This commit is contained in:
Lucio Lelii 2019-03-08 16:08:39 +00:00
parent abdcc0d4cc
commit abd80189ba
102 changed files with 1025 additions and 2604 deletions

View File

@ -62,11 +62,6 @@
<artifactId>commons-httpclient</artifactId> <artifactId>commons-httpclient</artifactId>
<version>3.1</version> <version>3.1</version>
</dependency> </dependency>
<dependency>
<groupId>commons-logging</groupId>
<artifactId>commons-logging</artifactId>
<version>1.0.4</version>
</dependency>
<dependency> <dependency>
<groupId>dom4j</groupId> <groupId>dom4j</groupId>
<artifactId>dom4j</artifactId> <artifactId>dom4j</artifactId>

View File

@ -11,7 +11,6 @@ import javax.imageio.ImageIO;
import javax.swing.JPanel; import javax.swing.JPanel;
import org.gcube.contentmanagement.graphtools.data.conversions.ImageTools; import org.gcube.contentmanagement.graphtools.data.conversions.ImageTools;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.portlets.user.timeseries.charts.support.types.GraphData; import org.gcube.portlets.user.timeseries.charts.support.types.GraphData;
import org.gcube.portlets.user.timeseries.charts.support.types.GraphGroups; import org.gcube.portlets.user.timeseries.charts.support.types.GraphGroups;
import org.gcube.portlets.user.timeseries.charts.support.types.Point; import org.gcube.portlets.user.timeseries.charts.support.types.Point;
@ -21,6 +20,8 @@ import org.jfree.data.category.DefaultCategoryDataset;
import org.jfree.data.general.Dataset; import org.jfree.data.general.Dataset;
import org.jfree.ui.ApplicationFrame; import org.jfree.ui.ApplicationFrame;
import org.jfree.ui.RefineryUtilities; import org.jfree.ui.RefineryUtilities;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/* /*
* Converts a GraphData into a graphicable structure DataSet * Converts a GraphData into a graphicable structure DataSet
@ -28,6 +29,8 @@ import org.jfree.ui.RefineryUtilities;
*/ */
public abstract class GenericStandaloneGraph extends ApplicationFrame { public abstract class GenericStandaloneGraph extends ApplicationFrame {
private static Logger logger = LoggerFactory.getLogger(GenericStandaloneGraph.class);
/** /**
* *
*/ */
@ -108,7 +111,7 @@ public abstract class GenericStandaloneGraph extends ApplicationFrame {
try{ try{
ImageIO.write(bimage, "png", outputfile); ImageIO.write(bimage, "png", outputfile);
}catch(Exception e){ }catch(Exception e){
AnalysisLogger.getLogger().error("renderImages->Error in writing files ",e); logger.error("renderImages->Error in writing files ",e);
} }
i++; i++;
} }

View File

@ -10,12 +10,13 @@ import org.gcube.contentmanagement.graphtools.data.GraphSamplesTable;
import org.gcube.contentmanagement.graphtools.data.conversions.GraphConverter2D; import org.gcube.contentmanagement.graphtools.data.conversions.GraphConverter2D;
import org.gcube.contentmanagement.graphtools.data.databases.CommonDBExtractor; import org.gcube.contentmanagement.graphtools.data.databases.CommonDBExtractor;
import org.gcube.contentmanagement.lexicalmatcher.analysis.core.LexicalEngineConfiguration; import org.gcube.contentmanagement.lexicalmatcher.analysis.core.LexicalEngineConfiguration;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.contentmanagement.lexicalmatcher.utils.DatabaseFactory; import org.gcube.contentmanagement.lexicalmatcher.utils.DatabaseFactory;
import org.gcube.portlets.user.timeseries.charts.support.types.GraphData; import org.gcube.portlets.user.timeseries.charts.support.types.GraphData;
import org.gcube.portlets.user.timeseries.charts.support.types.GraphGroups; import org.gcube.portlets.user.timeseries.charts.support.types.GraphGroups;
import org.gcube.portlets.user.timeseries.charts.support.types.Point; import org.gcube.portlets.user.timeseries.charts.support.types.Point;
import org.hibernate.SessionFactory; import org.hibernate.SessionFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.rapidminer.RapidMiner; import com.rapidminer.RapidMiner;
import com.rapidminer.example.ExampleSet; import com.rapidminer.example.ExampleSet;
@ -26,31 +27,8 @@ import com.thoughtworks.xstream.io.xml.DomDriver;
public class StatisticsGenerator { public class StatisticsGenerator {
public static void main(String[] args) throws Exception { private static Logger logger = LoggerFactory.getLogger(StatisticsGenerator.class);
String table = "ts_161efa00_2c32_11df_b8b3_aa10916debe6";
String xDimension = "field5";
String yDimension = "field6";
String groupDimension = "field1";
String speciesColumn = "field3";
String filter1 = "Brown seaweeds";
String filter2 = "River eels";
StatisticsGenerator stg = new StatisticsGenerator();
LexicalEngineConfiguration conf = new LexicalEngineConfiguration();
// database Parameters
conf.setDatabaseUserName("root");
// conf.setDatabasePassword("password");
conf.setDatabaseDriver("com.mysql.jdbc.Driver");
conf.setDatabaseURL("jdbc:mysql://localhost/timeseries");
conf.setDatabaseDialect("org.hibernate.dialect.MySQLDialect");
conf.setDatabaseAutomaticTestTable("connectiontesttable");
conf.setDatabaseIdleConnectionTestPeriod("3600");
// stg.init("./cfg/");
stg.init("./cfg/", conf);
stg.generateGraphs(3, table, xDimension, yDimension, groupDimension, speciesColumn, filter1, filter2);
}
SessionFactory referenceDBSession; SessionFactory referenceDBSession;
CommonDBExtractor extractor; CommonDBExtractor extractor;
@ -71,7 +49,6 @@ public class StatisticsGenerator {
} }
public void init(String cfgPath, LexicalEngineConfiguration config) throws Exception { public void init(String cfgPath, LexicalEngineConfiguration config) throws Exception {
AnalysisLogger.setLogger(cfgPath + "/" + LogFile);
if (config == null) if (config == null)
referenceDBSession = DatabaseFactory.initDBConnection(cfgPath + HibFile); referenceDBSession = DatabaseFactory.initDBConnection(cfgPath + HibFile);
else else
@ -81,7 +58,7 @@ public class StatisticsGenerator {
extractor = new CommonDBExtractor(referenceDBSession); extractor = new CommonDBExtractor(referenceDBSession);
AnalysisLogger.getLogger().info("StatisticsGenerator->initialization complete"); logger.info("StatisticsGenerator->initialization complete");
System.setProperty("rapidminer.init.operators", cfgPath + OperatorsFile); System.setProperty("rapidminer.init.operators", cfgPath + OperatorsFile);
xStream = new XStream(new DomDriver()); xStream = new XStream(new DomDriver());
RapidMiner.init(); RapidMiner.init();
@ -112,8 +89,8 @@ public class StatisticsGenerator {
Map<String, SamplesTable> samplesMap = extractor.getMultiDimTemporalTables(ColumnFilters, YRangeFilter, timeSeriesTable, xDimension, groupDimension, yDimension, speciesColumn, filters); Map<String, SamplesTable> samplesMap = extractor.getMultiDimTemporalTables(ColumnFilters, YRangeFilter, timeSeriesTable, xDimension, groupDimension, yDimension, speciesColumn, filters);
AnalysisLogger.getLogger().info("StatisticsGenerator-> samplesMap has been generated"); logger.info("StatisticsGenerator-> samplesMap has been generated");
AnalysisLogger.getLogger().trace(samplesMap.toString()); logger.trace(samplesMap.toString());
// setup Absolute Sampling operator // setup Absolute Sampling operator
AbsoluteSampling asop = (AbsoluteSampling) OperatorService.createOperator("AbsoluteSampling"); AbsoluteSampling asop = (AbsoluteSampling) OperatorService.createOperator("AbsoluteSampling");
asop.setParameter("sample_size", "" + maxElements); asop.setParameter("sample_size", "" + maxElements);
@ -149,14 +126,14 @@ public class StatisticsGenerator {
// get the points list from the graph samples table // get the points list from the graph samples table
singlegraph = graphSamples.getGraph(); singlegraph = graphSamples.getGraph();
AnalysisLogger.getLogger().trace("Regeneration\n" + graphSamples.toString()); logger.trace("Regeneration\n" + graphSamples.toString());
} }
// reorder the elements of the points list // reorder the elements of the points list
// this steps performs re-enumeration and reordering of the rows after the sampling operations // this steps performs re-enumeration and reordering of the rows after the sampling operations
singlegraph = GraphConverter2D.reorder(singlegraph); singlegraph = GraphConverter2D.reorder(singlegraph);
AnalysisLogger.getLogger().trace("Reordering\n" + singlegraph.toString()); logger.trace("Reordering\n" + singlegraph.toString());
if ((singlegraph != null)&&(singlegraph.size()>0)) { if ((singlegraph != null)&&(singlegraph.size()>0)) {
// build up the GraphData for visualization // build up the GraphData for visualization
GraphData grd = new GraphData(singlegraph, true); GraphData grd = new GraphData(singlegraph, true);
@ -171,12 +148,12 @@ public class StatisticsGenerator {
// add the points list // add the points list
graphgroups.addGraph("Distribution for " + key, grd); graphgroups.addGraph("Distribution for " + key, grd);
AnalysisLogger.getLogger().trace("StatisticsGenerator-> graphgroup " + i + " generated with key: " + key); logger.trace("StatisticsGenerator-> graphgroup " + i + " generated with key: " + key);
i++; i++;
} }
} }
AnalysisLogger.getLogger().info("StatisticsGenerator-> graphgroups have been generated"); logger.info("StatisticsGenerator-> graphgroups have been generated");
return graphgroups; return graphgroups;

View File

@ -5,7 +5,6 @@ import java.util.List;
import org.gcube.contentmanagement.graphtools.abstracts.SamplesTable; import org.gcube.contentmanagement.graphtools.abstracts.SamplesTable;
import org.gcube.contentmanagement.graphtools.utils.MathFunctions; import org.gcube.contentmanagement.graphtools.utils.MathFunctions;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.portlets.user.timeseries.charts.support.types.GraphData; import org.gcube.portlets.user.timeseries.charts.support.types.GraphData;
import org.gcube.portlets.user.timeseries.charts.support.types.GraphGroups; import org.gcube.portlets.user.timeseries.charts.support.types.GraphGroups;
import org.gcube.portlets.user.timeseries.charts.support.types.Point; import org.gcube.portlets.user.timeseries.charts.support.types.Point;
@ -17,6 +16,7 @@ import org.gcube.portlets.user.timeseries.charts.support.types.ValueEntry;
*/ */
public class GraphConverter2D { public class GraphConverter2D {
private static Point<? extends Number, ? extends Number> searchPoint(Point<? extends Number, ? extends Number> x, List<Point<? extends Number, ? extends Number>> samples) { private static Point<? extends Number, ? extends Number> searchPoint(Point<? extends Number, ? extends Number> x, List<Point<? extends Number, ? extends Number>> samples) {
Point<? extends Number, ? extends Number> novelpoint = null; Point<? extends Number, ? extends Number> novelpoint = null;
for (Point<? extends Number, ? extends Number> point : samples) { for (Point<? extends Number, ? extends Number> point : samples) {

View File

@ -10,12 +10,15 @@ import org.gcube.contentmanagement.graphtools.abstracts.SamplesTable;
import org.gcube.contentmanagement.graphtools.core.filters.Filter; import org.gcube.contentmanagement.graphtools.core.filters.Filter;
import org.gcube.contentmanagement.graphtools.data.BigSamplesTable; import org.gcube.contentmanagement.graphtools.data.BigSamplesTable;
import org.gcube.contentmanagement.lexicalmatcher.analysis.core.DataTypeRecognizer; import org.gcube.contentmanagement.lexicalmatcher.analysis.core.DataTypeRecognizer;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.contentmanagement.lexicalmatcher.utils.DatabaseFactory; import org.gcube.contentmanagement.lexicalmatcher.utils.DatabaseFactory;
import org.hibernate.SessionFactory; import org.hibernate.SessionFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class CommonDBExtractor implements GenericDBExtractor { public class CommonDBExtractor implements GenericDBExtractor {
private static Logger logger = LoggerFactory.getLogger(CommonDBExtractor.class);
SessionFactory dbSession; SessionFactory dbSession;
public CommonDBExtractor(SessionFactory DbSession) { public CommonDBExtractor(SessionFactory DbSession) {
@ -76,7 +79,7 @@ public class CommonDBExtractor implements GenericDBExtractor {
HashMap<String, String> typesMap = new HashMap<String, String>(); HashMap<String, String> typesMap = new HashMap<String, String>();
String queryDesc = String.format(descriptionQuery, table); String queryDesc = String.format(descriptionQuery, table);
AnalysisLogger.getLogger().trace("Query for Description: " + queryDesc); logger.trace("Query for Description: " + queryDesc);
List<Object> resultSet = DatabaseFactory.executeSQLQuery(queryDesc, dbSession); List<Object> resultSet = DatabaseFactory.executeSQLQuery(queryDesc, dbSession);
for (Object result : resultSet) { for (Object result : resultSet) {
@ -169,7 +172,7 @@ public class CommonDBExtractor implements GenericDBExtractor {
query += String.format(staticOrderBy, xDimension); query += String.format(staticOrderBy, xDimension);
AnalysisLogger.getLogger().trace("Query: " + query); logger.trace("Query: " + query);
List<Object> resultSet = DatabaseFactory.executeSQLQuery(query, dbSession); List<Object> resultSet = DatabaseFactory.executeSQLQuery(query, dbSession);
// xdim, group, quantity, yLabel // xdim, group, quantity, yLabel

View File

@ -1,42 +0,0 @@
package org.gcube.contentmanagement.graphtools.examples;
import org.gcube.contentmanagement.graphtools.core.StatisticsGenerator;
import org.gcube.contentmanagement.lexicalmatcher.analysis.core.LexicalEngineConfiguration;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
public class ExampleExternalConfigPostGressProd {
public static void main(String[] args) throws Exception{
String table = "ts_c4bdfaa0_6c16_11e0_bb1f_fb760af5afc7";
String xDimension = "field4"; // the dates
String yDimension = "field6"; // values on Y
String groupDimension = "field1"; // group names
String speciesColumn = "field2"; // lines labels
String filter1 = "ABW";
// String filter2 = "Osteichthyes";
StatisticsGenerator stg = new StatisticsGenerator();
AnalysisLogger.setLogger("./cfg/ALog.properties");
LexicalEngineConfiguration conf = new LexicalEngineConfiguration();
//database Parameters
conf.setDatabaseUserName("gcube1");
conf.setDatabasePassword("d4science");
conf.setDatabaseDriver("org.postgresql.Driver");
conf.setDatabaseURL("jdbc:postgresql://node28.p.d4science.research-infrastructures.eu/timeseries");
// conf.setDatabaseDialect("org.hibernate.dialect.MySQLDialect");
conf.setDatabaseDialect("org.hibernate.dialect.PostgreSQLDialect");
conf.setDatabaseAutomaticTestTable("connectiontesttable");
conf.setDatabaseIdleConnectionTestPeriod("3600");
stg.init("./cfg/",conf);
// stg.generateGraphs(200, table, xDimension, yDimension, groupDimension, speciesColumn, filter1, filter2);
stg.generateGraphs(200, table, xDimension, yDimension, groupDimension, speciesColumn,filter1);
System.out.println();
}
}

View File

@ -1,42 +0,0 @@
package org.gcube.contentmanagement.graphtools.examples;
import org.gcube.contentmanagement.graphtools.core.StatisticsGenerator;
import org.gcube.contentmanagement.graphtools.plotting.graphs.RadarGraph;
import org.gcube.contentmanagement.lexicalmatcher.analysis.core.LexicalEngineConfiguration;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.portlets.user.timeseries.charts.support.types.GraphGroups;
public class ExamplePostGressLocalRadar {
public static void main(String[] args) throws Exception{
AnalysisLogger.setLogger("./cfg/ALog.properties");
String table = "ts_3bdaf790_edbe_11e0_93e3_f6a9821baa29";
String xDimension = "field2"; // the dates
String yDimension = "field4"; // values on Y
String groupDimension = "field0"; // group names
String speciesColumn = "field3"; // lines labels
String filter2 = "Perciformes";
String filter1 = "Boregadus";
StatisticsGenerator stg = new StatisticsGenerator();
LexicalEngineConfiguration conf = new LexicalEngineConfiguration();
//database Parameters
conf.setDatabaseUserName("gcube");
conf.setDatabasePassword("d4science2");
conf.setDatabaseDriver("org.postgresql.Driver");
conf.setDatabaseURL("jdbc:postgresql://dbtest.next.research-infrastructures.eu/timeseries");
conf.setDatabaseDialect("org.hibernate.dialect.PostgreSQLDialect");
stg.init("./cfg/",conf);
GraphGroups gg = stg.generateGraphs(200, table, xDimension, yDimension, groupDimension, speciesColumn, filter1);
RadarGraph radar = new RadarGraph("");
radar.renderGraphGroup(gg);
}
}

View File

@ -1,113 +0,0 @@
package org.gcube.contentmanagement.graphtools.examples.graphsTypes;
import java.applet.Applet;
import java.awt.Color;
import java.awt.Graphics;
import java.awt.Image;
import java.awt.image.BufferedImage;
import java.io.File;
import java.util.List;
import javax.imageio.ImageIO;
import org.gcube.contentmanagement.graphtools.core.StatisticsGenerator;
import org.gcube.contentmanagement.graphtools.data.conversions.ImageTools;
import org.gcube.contentmanagement.graphtools.plotting.graphs.GaussianDistributionGraph;
import org.gcube.contentmanagement.graphtools.plotting.graphs.ScatterGraphGeneric;
import org.gcube.contentmanagement.graphtools.plotting.graphs.ScatterGraphNumeric;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.portlets.user.timeseries.charts.support.types.GraphGroups;
import com.thoughtworks.xstream.XStream;
import com.thoughtworks.xstream.io.xml.DomDriver;
public class ExampleImage extends Applet {
public void paint1(Graphics g) {
BufferedImage bgimg = loadImage();
g.drawImage(bgimg, 0, 0, this);
// g.fillRect(0, 0, 10, 10);
}
static int width = 320;
static int height = 280;
private BufferedImage loadImage() {
String imgFileName = "C:/Users/coro/Desktop/WorkFolder/Workspace/StatisticsExtractor/weather-cloud.png";
BufferedImage img = null;
try {
img = ImageIO.read(new File(imgFileName));
} catch (Exception e) {
e.printStackTrace();
}
return img;
}
public void init() {
setBackground( Color.LIGHT_GRAY );
setSize(width, height);
}
// public void paint(Graphics g){
public static void main(String[] args){
try{
String table = "ts_161efa00_2c32_11df_b8b3_aa10916debe6";
String xDimension = "field5";
String yDimension = "field6";
String groupDimension = "field1";
String speciesColumn = "field3";
String filter1 = "Brown seaweeds";
StatisticsGenerator stg = new StatisticsGenerator();
stg.init("C:/Users/coro/Desktop/WorkFolder/Workspace/StatisticsExtractor/cfg/");
GraphGroups gg = stg.generateGraphs(200, table, xDimension, yDimension, groupDimension, speciesColumn, filter1);
// ScatterGraphNumeric series = new ScatterGraphNumeric("");
// GaussianDistributionGraph series = new GaussianDistributionGraph("");
ScatterGraphGeneric series = new ScatterGraphGeneric("");
series.renderImages("./saved",width,height,gg);
AnalysisLogger.getLogger().debug("finished");
System.exit(0);
/* OLD CODE
List<Image> image = series.renderGraphGroupImage(width,height,gg);
Image singleimage = image.get(1);
BufferedImage bimage = ImageTools.toBufferedImage(singleimage);
XStream xStream = new XStream(new DomDriver());
String xmlimage = xStream.toXML(singleimage);
System.out.println(xmlimage);
File outputfile = new File("saved.png");
ImageIO.write(bimage, "png", outputfile);
*/
}catch(Exception e){
e.printStackTrace();
}
}
}

View File

@ -49,7 +49,6 @@ import java.util.Map;
import org.gcube.contentmanagement.graphtools.abstracts.GenericStandaloneGraph; import org.gcube.contentmanagement.graphtools.abstracts.GenericStandaloneGraph;
import org.gcube.contentmanagement.graphtools.utils.MathFunctions; import org.gcube.contentmanagement.graphtools.utils.MathFunctions;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.portlets.user.timeseries.charts.support.types.GraphData; import org.gcube.portlets.user.timeseries.charts.support.types.GraphData;
import org.gcube.portlets.user.timeseries.charts.support.types.GraphGroups; import org.gcube.portlets.user.timeseries.charts.support.types.GraphGroups;
import org.gcube.portlets.user.timeseries.charts.support.types.Point; import org.gcube.portlets.user.timeseries.charts.support.types.Point;
@ -63,12 +62,13 @@ import org.jfree.data.function.NormalDistributionFunction2D;
import org.jfree.data.general.Dataset; import org.jfree.data.general.Dataset;
import org.jfree.data.general.DatasetUtilities; import org.jfree.data.general.DatasetUtilities;
import org.jfree.data.xy.XYSeriesCollection; import org.jfree.data.xy.XYSeriesCollection;
import org.slf4j.Logger;
import com.thoughtworks.xstream.XStream; import org.slf4j.LoggerFactory;
import com.thoughtworks.xstream.io.xml.DomDriver;
public class GaussianDistributionGraph extends GenericStandaloneGraph { public class GaussianDistributionGraph extends GenericStandaloneGraph {
private static Logger logger = LoggerFactory.getLogger(GaussianDistributionGraph.class);
private static final long serialVersionUID = 1L; private static final long serialVersionUID = 1L;
public GaussianDistributionGraph(String title) { public GaussianDistributionGraph(String title) {
@ -173,7 +173,7 @@ public class GaussianDistributionGraph extends GenericStandaloneGraph {
if (variance==0) if (variance==0)
variance = 0.1; variance = 0.1;
AnalysisLogger.getLogger().debug("mean "+mean+" variance "+variance); logger.debug("mean "+mean+" variance "+variance);
//build up normal distribution and add to the series //build up normal distribution and add to the series
NormalDistributionFunction2D normaldistributionfunction2d = new NormalDistributionFunction2D(mean, variance); NormalDistributionFunction2D normaldistributionfunction2d = new NormalDistributionFunction2D(mean, variance);
//make the representation a bit longer //make the representation a bit longer
@ -213,7 +213,7 @@ public class GaussianDistributionGraph extends GenericStandaloneGraph {
if (variance==0) if (variance==0)
variance = 0.1; variance = 0.1;
AnalysisLogger.getLogger().debug("mean "+mean+" variance "+variance); logger.debug("mean "+mean+" variance "+variance);
//build up normal distribution and add to the series //build up normal distribution and add to the series
NormalDistributionFunction2D normaldistributionfunction2d = new NormalDistributionFunction2D(mean, variance); NormalDistributionFunction2D normaldistributionfunction2d = new NormalDistributionFunction2D(mean, variance);
normalsList.add(normaldistributionfunction2d); normalsList.add(normaldistributionfunction2d);

View File

@ -44,7 +44,6 @@ package org.gcube.contentmanagement.graphtools.plotting.graphs;
import java.util.List; import java.util.List;
import org.gcube.contentmanagement.graphtools.abstracts.GenericStandaloneGraph; import org.gcube.contentmanagement.graphtools.abstracts.GenericStandaloneGraph;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.portlets.user.timeseries.charts.support.types.GraphData; import org.gcube.portlets.user.timeseries.charts.support.types.GraphData;
import org.gcube.portlets.user.timeseries.charts.support.types.Point; import org.gcube.portlets.user.timeseries.charts.support.types.Point;
import org.jfree.chart.ChartFactory; import org.jfree.chart.ChartFactory;
@ -53,22 +52,19 @@ import org.jfree.data.category.DefaultCategoryDataset;
import org.jfree.data.general.Dataset; import org.jfree.data.general.Dataset;
import org.jfree.data.general.DefaultPieDataset; import org.jfree.data.general.DefaultPieDataset;
import org.jfree.util.TableOrder; import org.jfree.util.TableOrder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class PieGraph extends GenericStandaloneGraph { public class PieGraph extends GenericStandaloneGraph {
private static Logger logger = LoggerFactory.getLogger(PieGraph.class);
private static final long serialVersionUID = 1L; private static final long serialVersionUID = 1L;
public PieGraph(String title) { public PieGraph(String title) {
super(title); super(title);
} }
/**
* @param args
*/
public static void main(String[] args) {
// TODO Auto-generated method stub
}
protected Dataset generateDataset() { protected Dataset generateDataset() {
DefaultPieDataset dataset = new DefaultPieDataset(); DefaultPieDataset dataset = new DefaultPieDataset();
@ -142,7 +138,7 @@ public class PieGraph extends GenericStandaloneGraph {
double value = pointslist.get(x).getEntries().get(y).getValue().doubleValue(); double value = pointslist.get(x).getEntries().get(y).getValue().doubleValue();
value = (value/max)*100; value = (value/max)*100;
String ylabel = pointslist.get(x).getEntries().get(y).getLabel(); String ylabel = pointslist.get(x).getEntries().get(y).getLabel();
AnalysisLogger.getLogger().info(xlabel + ":" + ylabel +"->" + value); logger.info(xlabel + ":" + ylabel +"->" + value);
dataset.setValue(xlabel + ":" + ylabel, value); dataset.setValue(xlabel + ":" + ylabel, value);
} }

View File

@ -49,7 +49,6 @@ import java.util.List;
import org.gcube.contentmanagement.graphtools.abstracts.GenericStandaloneGraph; import org.gcube.contentmanagement.graphtools.abstracts.GenericStandaloneGraph;
import org.gcube.contentmanagement.graphtools.utils.DateGuesser; import org.gcube.contentmanagement.graphtools.utils.DateGuesser;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.portlets.user.timeseries.charts.support.types.GraphData; import org.gcube.portlets.user.timeseries.charts.support.types.GraphData;
import org.gcube.portlets.user.timeseries.charts.support.types.Point; import org.gcube.portlets.user.timeseries.charts.support.types.Point;
import org.jfree.chart.ChartFactory; import org.jfree.chart.ChartFactory;
@ -65,9 +64,13 @@ import org.jfree.data.time.TimeSeries;
import org.jfree.data.time.TimeSeriesCollection; import org.jfree.data.time.TimeSeriesCollection;
import org.jfree.data.xy.XYDataset; import org.jfree.data.xy.XYDataset;
import org.jfree.ui.RectangleInsets; import org.jfree.ui.RectangleInsets;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class TimeSeriesGraph extends GenericStandaloneGraph { public class TimeSeriesGraph extends GenericStandaloneGraph {
private static Logger logger = LoggerFactory.getLogger(TimeSeriesGraph.class);
private static final long serialVersionUID = 1L; private static final long serialVersionUID = 1L;
private static final String formatYear= "yyyy"; private static final String formatYear= "yyyy";
private static final String formatMonth= "MM-yyyy"; private static final String formatMonth= "MM-yyyy";
@ -206,7 +209,7 @@ public class TimeSeriesGraph extends GenericStandaloneGraph {
else if (granularity.equals(DateGuesser.MONTH)) timeseriesformat = formatMonth; else if (granularity.equals(DateGuesser.MONTH)) timeseriesformat = formatMonth;
else if (granularity.equals(DateGuesser.DAY)) timeseriesformat = formatDay; else if (granularity.equals(DateGuesser.DAY)) timeseriesformat = formatDay;
AnalysisLogger.getLogger().debug("TimeSeriesGraph-> granularity "+granularity+" format "+timeseriesformat); logger.debug("TimeSeriesGraph-> granularity "+granularity+" format "+timeseriesformat);
xyseries.add(new Day(new Date(cal.getTimeInMillis())),value); xyseries.add(new Day(new Date(cal.getTimeInMillis())),value);

View File

@ -14,13 +14,16 @@ import org.gcube.contentmanagement.lexicalmatcher.analysis.guesser.treeStructure
import org.gcube.contentmanagement.lexicalmatcher.analysis.guesser.treeStructure.chunks.SingletonChunkSet; import org.gcube.contentmanagement.lexicalmatcher.analysis.guesser.treeStructure.chunks.SingletonChunkSet;
import org.gcube.contentmanagement.lexicalmatcher.analysis.guesser.treeStructure.chunks.TimeSeriesChunk; import org.gcube.contentmanagement.lexicalmatcher.analysis.guesser.treeStructure.chunks.TimeSeriesChunk;
import org.gcube.contentmanagement.lexicalmatcher.analysis.guesser.treeStructure.chunks.TimeSeriesChunkSet; import org.gcube.contentmanagement.lexicalmatcher.analysis.guesser.treeStructure.chunks.TimeSeriesChunkSet;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.contentmanagement.lexicalmatcher.utils.DatabaseFactory; import org.gcube.contentmanagement.lexicalmatcher.utils.DatabaseFactory;
import org.gcube.contentmanagement.lexicalmatcher.utils.MathFunctions; import org.gcube.contentmanagement.lexicalmatcher.utils.MathFunctions;
import org.hibernate.SessionFactory; import org.hibernate.SessionFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class Engine { public class Engine {
private static Logger logger = LoggerFactory.getLogger(Engine.class);
private String ConfigurationFileNameLocal = "lexicalguesser/hibernate.cfg.xml"; private String ConfigurationFileNameLocal = "lexicalguesser/hibernate.cfg.xml";
private SessionFactory referenceDBSession; private SessionFactory referenceDBSession;
@ -85,7 +88,7 @@ public class Engine {
tsChunkSet = new TimeSeriesChunkSet(config.TimeSeriesChunksToTake, config.chunkSize, unknownSeriesName, unknownSeriesColumn,config, this); tsChunkSet = new TimeSeriesChunkSet(config.TimeSeriesChunksToTake, config.chunkSize, unknownSeriesName, unknownSeriesColumn,config, this);
} catch (Exception e) { } catch (Exception e) {
e.printStackTrace(); e.printStackTrace();
AnalysisLogger.getLogger().error("Engine->calcLike-> ERROR could not retrieve time series chunks " + e.getLocalizedMessage()); logger.error("Engine->calcLike-> ERROR could not retrieve time series chunks " + e.getLocalizedMessage());
} }
// if we took the ts chunk set correctly perform calculation // if we took the ts chunk set correctly perform calculation
if (tsChunkSet != null) { if (tsChunkSet != null) {
@ -108,7 +111,7 @@ public class Engine {
tsChunk.compareToReferenceChunk(scoresTable, refChunk); tsChunk.compareToReferenceChunk(scoresTable, refChunk);
} catch (Exception e) { } catch (Exception e) {
e.printStackTrace(); e.printStackTrace();
AnalysisLogger.getLogger().error("Engine->calcLike-> ERROR could not compare time series chunk with reference chunk " + e.getLocalizedMessage()); logger.error("Engine->calcLike-> ERROR could not compare time series chunk with reference chunk " + e.getLocalizedMessage());
} }
// take another chunk in the reference chunk set // take another chunk in the reference chunk set
refChunk = refChunkSet.nextChunk(); refChunk = refChunkSet.nextChunk();
@ -150,7 +153,7 @@ public class Engine {
ThreadCalculator tc = new ThreadCalculator(tsChunk, refChunkSet,index); ThreadCalculator tc = new ThreadCalculator(tsChunk, refChunkSet,index);
Thread t = new Thread(tc); Thread t = new Thread(tc);
t.start(); t.start();
// AnalysisLogger.getLogger().info("ThreadCalculator<-go "+index); // logger.info("ThreadCalculator<-go "+index);
} }
@ -178,8 +181,7 @@ public class Engine {
} }
} catch (Exception e) { } catch (Exception e) {
e.printStackTrace(); logger.error("Engine->calcLike-> ERROR could not retrieve time series chunks ",e);
AnalysisLogger.getLogger().error("Engine->calcLike-> ERROR could not retrieve time series chunks " + e.getLocalizedMessage());
} }
// if we took the ts chunk set correctly perform calculation // if we took the ts chunk set correctly perform calculation
if (tsChunkSet != null) { if (tsChunkSet != null) {
@ -189,7 +191,7 @@ public class Engine {
TimeSeriesChunk tsChunk = (TimeSeriesChunk)tsChunkSet.nextChunk(); TimeSeriesChunk tsChunk = (TimeSeriesChunk)tsChunkSet.nextChunk();
AnalysisLogger.getLogger().debug("tsChunk is null "+(tsChunk != null)); logger.debug("tsChunk is null "+(tsChunk != null));
// for all ts chunks // for all ts chunks
while (tsChunk != null) { while (tsChunk != null) {
@ -240,7 +242,7 @@ public class Engine {
tsChunk.compareToReferenceChunk(scoresTable, refChunk,columnFilter); tsChunk.compareToReferenceChunk(scoresTable, refChunk,columnFilter);
} catch (Exception e) { } catch (Exception e) {
e.printStackTrace(); e.printStackTrace();
AnalysisLogger.getLogger().error("Engine->calcLike-> ERROR could not compare time series chunk with reference chunk " + e.getLocalizedMessage()); logger.error("Engine->calcLike-> ERROR could not compare time series chunk with reference chunk " + e.getLocalizedMessage());
} }
//if the TimeSeries chunk states the processing must be interrupted, don't perform other comparisons //if the TimeSeries chunk states the processing must be interrupted, don't perform other comparisons
@ -266,8 +268,8 @@ public class Engine {
score = categoryScore.getScore(bestColumn,singletonMatch); score = categoryScore.getScore(bestColumn,singletonMatch);
} }
AnalysisLogger.getLogger().trace("Engine->UpdateScores-> \tBEST SUITABLE COLUMN IS: " + bestColumn); logger.trace("Engine->UpdateScores-> \tBEST SUITABLE COLUMN IS: " + bestColumn);
AnalysisLogger.getLogger().trace("Engine->UpdateScores-> \tBEST SCORE IS: " + score); logger.trace("Engine->UpdateScores-> \tBEST SCORE IS: " + score);
// order this column // order this column
if (score > config.categoryDiscardThreshold) { if (score > config.categoryDiscardThreshold) {
@ -314,11 +316,11 @@ public class Engine {
index++; index++;
} }
// AnalysisLogger.getLogger().info("chechAndAddColumns -> column to add "+column+" category "+categoryName+" with value "+score+" previous "+(bestScore - 0.5 * bestScore)); // logger.info("chechAndAddColumns -> column to add "+column+" category "+categoryName+" with value "+score+" previous "+(bestScore - 0.5 * bestScore));
bestColumns.add(index,column); bestColumns.add(index,column);
bestScores.add(index,score); bestScores.add(index,score);
bestCategories.add(index,categoryName); bestCategories.add(index,categoryName);
// AnalysisLogger.getLogger().info("chechAndAddColumns -> "+bestCategories); // logger.info("chechAndAddColumns -> "+bestCategories);
} }
} }
} }
@ -337,10 +339,10 @@ public class Engine {
} }
public void run() { public void run() {
// AnalysisLogger.getLogger().info("ThreadCalculator->started "+index); // logger.info("ThreadCalculator->started "+index);
makeComparisonsTSChunk2RefChunks(tsChunk, refChunksSet); makeComparisonsTSChunk2RefChunks(tsChunk, refChunksSet);
threadActivity[index]=false; threadActivity[index]=false;
// AnalysisLogger.getLogger().info("ThreadCalculator>-finished "+index); // logger.info("ThreadCalculator>-finished "+index);
} }
} }

View File

@ -1,32 +0,0 @@
package org.gcube.contentmanagement.lexicalmatcher.analysis.examples;
import org.gcube.contentmanagement.lexicalmatcher.analysis.run.CategoryGuesser;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
public class Example1_Species {
public static void main(String[] args) {
try {
int attempts = 1;
String configPath = ".";
CategoryGuesser guesser = new CategoryGuesser();
//bench 1
AnalysisLogger.getLogger().warn("----------------------BENCH 1-------------------------");
String seriesName = "import_2c97f580_35a0_11df_b8b3_aa10916debe6";
String column = "field1";
String correctFamily = "SPECIES";
String correctColumn = "SCIENTIFIC_NAME";
CategoryGuesser.AccuracyCalc(guesser, configPath, seriesName, column, attempts, correctFamily, correctColumn);
AnalysisLogger.getLogger().warn("--------------------END BENCH 1-----------------------\n");
} catch (Exception e) {
e.printStackTrace();
}
}
}

View File

@ -1,32 +0,0 @@
package org.gcube.contentmanagement.lexicalmatcher.analysis.examples;
import org.gcube.contentmanagement.lexicalmatcher.analysis.run.CategoryGuesser;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
public class Example2_Area {
public static void main(String[] args) {
try {
int attempts = 1;
String configPath = ".";
CategoryGuesser guesser = new CategoryGuesser();
//bench 1
AnalysisLogger.getLogger().warn("----------------------BENCH 1-------------------------");
String seriesName = "import_2c97f580_35a0_11df_b8b3_aa10916debe6";
String column = "field3";
String correctFamily = "AREA";
String correctColumn = "NAME_EN";
CategoryGuesser.AccuracyCalc(guesser, configPath, seriesName, column, attempts, correctFamily, correctColumn);
AnalysisLogger.getLogger().warn("--------------------END BENCH 1-----------------------\n");
} catch (Exception e) {
e.printStackTrace();
}
}
}

View File

@ -1,48 +0,0 @@
package org.gcube.contentmanagement.lexicalmatcher.analysis.examples;
import java.util.ArrayList;
import org.gcube.contentmanagement.lexicalmatcher.analysis.core.LexicalEngineConfiguration;
import org.gcube.contentmanagement.lexicalmatcher.analysis.guesser.data.SingleResult;
import org.gcube.contentmanagement.lexicalmatcher.analysis.run.CategoryGuesser;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
public class Example3_SingleMatchShark {
public static void main(String[] args) {
try {
String configPath = ".";
CategoryGuesser guesser = new CategoryGuesser();
//bench 1
AnalysisLogger.getLogger().warn("----------------------BENCH 1-------------------------");
String singleton = "shark";
String family = "species";
String column = "name_en";
LexicalEngineConfiguration conf = new LexicalEngineConfiguration();
//CHANGE THIS TO ENHANCE THE RECALL
conf.setEntryAcceptanceThreshold(30);
conf.setReferenceChunksToTake(-1);
conf.setTimeSeriesChunksToTake(-1);
conf.setUseSimpleDistance(false);
guesser.runGuesser(configPath, singleton, conf, family,column );
ArrayList<SingleResult> detailedResults = guesser.getDetailedMatches();
AnalysisLogger.getLogger().warn("Detailed Match on Name :"+singleton);
CategoryGuesser.showResults(detailedResults);
AnalysisLogger.getLogger().warn("--------------------END BENCH 1-----------------------\n");
} catch (Exception e) {
e.printStackTrace();
}
}
}

View File

@ -1,49 +0,0 @@
package org.gcube.contentmanagement.lexicalmatcher.analysis.examples;
import java.util.ArrayList;
import org.gcube.contentmanagement.lexicalmatcher.analysis.core.LexicalEngineConfiguration;
import org.gcube.contentmanagement.lexicalmatcher.analysis.guesser.data.SingleResult;
import org.gcube.contentmanagement.lexicalmatcher.analysis.run.CategoryGuesser;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
public class Example4_SingleMatchMitella {
public static void main(String[] args) {
try {
String configPath = ".";
CategoryGuesser guesser = new CategoryGuesser();
//bench 1
AnalysisLogger.getLogger().warn("----------------------BENCH 1-------------------------");
String singleton = "Mitella pollicipes";
// String singleton = "policipes";
String family = "species";
String column = "scientific_name";
LexicalEngineConfiguration conf = new LexicalEngineConfiguration();
//CHANGE THIS TO ENHANCE THE RECALL
conf.setEntryAcceptanceThreshold(30);
conf.setReferenceChunksToTake(-1);
conf.setTimeSeriesChunksToTake(-1);
conf.setUseSimpleDistance(false);
guesser.runGuesser(configPath, singleton, conf, family,column );
ArrayList<SingleResult> detailedResults = guesser.getDetailedMatches();
AnalysisLogger.getLogger().warn("Detailed Match on Name :"+singleton);
CategoryGuesser.showResults(detailedResults);
AnalysisLogger.getLogger().warn("--------------------END BENCH 1-----------------------\n");
} catch (Exception e) {
e.printStackTrace();
}
}
}

View File

@ -1,48 +0,0 @@
package org.gcube.contentmanagement.lexicalmatcher.analysis.examples;
import java.util.ArrayList;
import org.gcube.contentmanagement.lexicalmatcher.analysis.core.LexicalEngineConfiguration;
import org.gcube.contentmanagement.lexicalmatcher.analysis.guesser.data.SingleResult;
import org.gcube.contentmanagement.lexicalmatcher.analysis.run.CategoryGuesser;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
public class Example5_SingleMatchMitella {
public static void main(String[] args) {
try {
String configPath = ".";
CategoryGuesser guesser = new CategoryGuesser();
//bench 1
AnalysisLogger.getLogger().warn("----------------------BENCH 1-------------------------");
String singleton = "Mirella policepes";
String family = "species";
String column = "scientific_name";
LexicalEngineConfiguration conf = new LexicalEngineConfiguration();
//CHANGE THIS TO ENHANCE THE RECALL
conf.setEntryAcceptanceThreshold(30);
conf.setReferenceChunksToTake(-1);
conf.setTimeSeriesChunksToTake(-1);
conf.setUseSimpleDistance(false);
guesser.runGuesser(configPath, singleton, conf, family,column );
ArrayList<SingleResult> detailedResults = guesser.getDetailedMatches();
AnalysisLogger.getLogger().warn("Detailed Match on Name :"+singleton);
CategoryGuesser.showResults(detailedResults);
AnalysisLogger.getLogger().warn("--------------------END BENCH 1-----------------------\n");
} catch (Exception e) {
e.printStackTrace();
}
}
}

View File

@ -1,64 +0,0 @@
package org.gcube.contentmanagement.lexicalmatcher.analysis.examples;
import java.util.ArrayList;
import org.gcube.contentmanagement.lexicalmatcher.analysis.core.LexicalEngineConfiguration;
import org.gcube.contentmanagement.lexicalmatcher.analysis.guesser.data.SingleResult;
import org.gcube.contentmanagement.lexicalmatcher.analysis.run.CategoryGuesser;
public class ExampleGuessingExternalCfg {
public static void main(String[] args) {
try {
String configPath = "./";
CategoryGuesser guesser = new CategoryGuesser();
//bench 1
System.out.println("----------------------BENCH 1-------------------------");
String seriesName = "import_532bba80_1c8f_11df_a4ee_87804054691e";
String column = "field2";
LexicalEngineConfiguration conf = new LexicalEngineConfiguration();
conf.setCategoryDiscardDifferencialThreshold(10);
conf.setCategoryDiscardThreshold(0);
conf.setChunkSize(25);
conf.setEntryAcceptanceThreshold(50);
conf.setNumberOfThreadsToUse(2);
conf.setRandomTake(true);
conf.setReferenceChunksToTake(20);
conf.setTimeSeriesChunksToTake(1);
conf.setUseSimpleDistance(false);
//database Parameters
conf.setDatabaseUserName("root");
conf.setDatabasePassword("ash_ash80");
conf.setDatabaseDriver("com.mysql.jdbc.Driver");
conf.setDatabaseURL("jdbc:mysql://localhost/timeseries");
conf.setDatabaseDialect("org.hibernate.dialect.MySQLDialect");
conf.setDatabaseAutomaticTestTable("connectiontesttable");
conf.setDatabaseIdleConnectionTestPeriod("3600");
//reference parameters
conf.setReferenceTable("reference_table");
conf.setReferenceColumn("table_name");
conf.setIdColumn("id");
conf.setNameHuman("name_human");
conf.setDescription("description");
guesser.init(conf);
guesser.runGuesser(seriesName, column, conf);
ArrayList<SingleResult> results = guesser.getClassification();
CategoryGuesser.showResults(results);
System.out.println("--------------------END BENCH 1-----------------------\n");
} catch (Exception e) {
e.printStackTrace();
}
}
}

View File

@ -7,13 +7,16 @@ import java.util.ArrayList;
import java.util.HashMap; import java.util.HashMap;
import org.gcube.contentmanagement.lexicalmatcher.analysis.core.LexicalEngineConfiguration; import org.gcube.contentmanagement.lexicalmatcher.analysis.core.LexicalEngineConfiguration;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.contentmanagement.lexicalmatcher.utils.MathFunctions; import org.gcube.contentmanagement.lexicalmatcher.utils.MathFunctions;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
//score relative to a certain category and column //score relative to a certain category and column
public class CategoryScores { public class CategoryScores {
private static Logger logger = LoggerFactory.getLogger(CategoryScores.class);
// column names vs percentage // column names vs percentage
private HashMap<String, Float> columnsScore; private HashMap<String, Float> columnsScore;
@ -152,7 +155,7 @@ public class CategoryScores {
try { try {
score = columnsScore.get(column); score = columnsScore.get(column);
} catch (Exception e) { } catch (Exception e) {
AnalysisLogger.getLogger().error("ERROR in getting SCORE " + e.getLocalizedMessage()); logger.error("ERROR in getting SCORE ",e);
} }
if (bestscore.compareTo(score) < 0) { if (bestscore.compareTo(score) < 0) {
bestscore = score; bestscore = score;
@ -175,7 +178,7 @@ public class CategoryScores {
try { try {
score = columnsScore.get(column); score = columnsScore.get(column);
} catch (Exception e) { } catch (Exception e) {
AnalysisLogger.getLogger().error("ERROR in getting SCORE " + e.getLocalizedMessage()); logger.error("ERROR in getting SCORE ",e);
} }
// find best place where to put column // find best place where to put column

View File

@ -6,12 +6,15 @@ import java.math.BigInteger;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.HashMap; import java.util.HashMap;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
//score relative to a certain category and column //score relative to a certain category and column
public class CategoryScoresOld { public class CategoryScoresOld {
private static Logger logger = LoggerFactory.getLogger(CategoryScoresOld.class);
// column names vs percentage // column names vs percentage
private HashMap<String, BigDecimal> columnsScore; private HashMap<String, BigDecimal> columnsScore;
@ -52,7 +55,7 @@ public class CategoryScoresOld {
if (percentage == null) if (percentage == null)
percentage = BigDecimal.ZERO; percentage = BigDecimal.ZERO;
AnalysisLogger.getLogger().trace("getScore -> Score for "+columnName+": " + percentage + " vs " + maximumElements); logger.trace("getScore -> Score for "+columnName+": " + percentage + " vs " + maximumElements);
percentage = percentage.divide(maximumElements, 2, BigDecimal.ROUND_DOWN); percentage = percentage.divide(maximumElements, 2, BigDecimal.ROUND_DOWN);
} catch (ArithmeticException e) { } catch (ArithmeticException e) {
percentage = BigDecimal.ZERO; percentage = BigDecimal.ZERO;
@ -78,7 +81,7 @@ public class CategoryScoresOld {
try { try {
score = columnsScore.get(column); score = columnsScore.get(column);
} catch (Exception e) { } catch (Exception e) {
AnalysisLogger.getLogger().error("ERROR in getting SCORE " + e.getLocalizedMessage()); logger.error("ERROR in getting SCORE ",e);
} }
if (bestscore.compareTo(score) < 0) { if (bestscore.compareTo(score) < 0) {
bestscore = score; bestscore = score;
@ -101,7 +104,7 @@ public class CategoryScoresOld {
try { try {
score = columnsScore.get(column); score = columnsScore.get(column);
} catch (Exception e) { } catch (Exception e) {
AnalysisLogger.getLogger().error("ERROR in getting SCORE " + e.getLocalizedMessage()); logger.error("ERROR in getting SCORE ",e);
} }
// find best place where to put column // find best place where to put column

View File

@ -5,16 +5,15 @@ import java.util.ArrayList;
import java.util.List; import java.util.List;
import org.gcube.contentmanagement.lexicalmatcher.analysis.core.DataTypeRecognizer; import org.gcube.contentmanagement.lexicalmatcher.analysis.core.DataTypeRecognizer;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.contentmanagement.lexicalmatcher.utils.DatabaseFactory; import org.gcube.contentmanagement.lexicalmatcher.utils.DatabaseFactory;
import org.hibernate.SessionFactory; import org.hibernate.SessionFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class DBObjectTranslator { public class DBObjectTranslator {
public static void main(String[] args) { private static Logger logger = LoggerFactory.getLogger(DBObjectTranslator.class);
}
public ArrayList<RelationEdge> relations; public ArrayList<RelationEdge> relations;
public ArrayList<Category> categories; public ArrayList<Category> categories;
@ -44,7 +43,7 @@ public class DBObjectTranslator {
BigInteger resultcount = (BigInteger) result; BigInteger resultcount = (BigInteger) result;
totalEntries = totalEntries.add(resultcount); totalEntries = totalEntries.add(resultcount);
count = resultcount; count = resultcount;
AnalysisLogger.getLogger().trace("DBObjectTranslator->calculateTotalEntries: Time Series " + timeSeriesName + " total " + totalEntries); logger.trace("DBObjectTranslator->calculateTotalEntries: Time Series " + timeSeriesName + " total " + totalEntries);
} catch (Exception e) { } catch (Exception e) {
} }
} }
@ -56,7 +55,7 @@ public class DBObjectTranslator {
// String query = "SELECT distinct "+timeSeriesColumn+" FROM "+timeSeriesName+" r limit "+min+","+numberOfElements; // String query = "SELECT distinct "+timeSeriesColumn+" FROM "+timeSeriesName+" r limit "+min+","+numberOfElements;
String query = "SELECT distinct " + timeSeriesColumn + " FROM " + timeSeriesName + " r limit " + numberOfElements + " offset " + min; String query = "SELECT distinct " + timeSeriesColumn + " FROM " + timeSeriesName + " r limit " + numberOfElements + " offset " + min;
AnalysisLogger.getLogger().trace("DBObjectTranslator->query: " + query); logger.trace("DBObjectTranslator->query: " + query);
List<Object> resultSet = DatabaseFactory.executeSQLQuery(query, dbSession); List<Object> resultSet = DatabaseFactory.executeSQLQuery(query, dbSession);
ArrayList<String> column = new ArrayList<String>(); ArrayList<String> column = new ArrayList<String>();
@ -69,14 +68,14 @@ public class DBObjectTranslator {
column.add(value); column.add(value);
// AnalysisLogger.getLogger().debug("DBObjectTranslator->retrieveColumnRange: Column Element Added " + value); // logger.debug("DBObjectTranslator->retrieveColumnRange: Column Element Added " + value);
} catch (Exception e) { } catch (Exception e) {
e.printStackTrace(); e.printStackTrace();
AnalysisLogger.getLogger().trace("DBObjectTranslator->retrieveTimeSeriesEntries: Error in adding entry :" + e.getLocalizedMessage()); logger.trace("DBObjectTranslator->retrieveTimeSeriesEntries: Error in adding entry :" + e.getLocalizedMessage());
} }
} }
AnalysisLogger.getLogger().trace("DBObjectTranslator->retrieveColumnRange: Column " + column.toString()); logger.trace("DBObjectTranslator->retrieveColumnRange: Column " + column.toString());
return column; return column;
} }
@ -105,7 +104,7 @@ public class DBObjectTranslator {
// String query = "SELECT DISTINCT * FROM " + timeSeriesName + " r where id>=" + min.toString() + " and id<=" + max.toString(); // String query = "SELECT DISTINCT * FROM " + timeSeriesName + " r where id>=" + min.toString() + " and id<=" + max.toString();
// String query = "SELECT DISTINCT * FROM " + timeSeriesName + " r limit "+min+","+numberOfElements; // String query = "SELECT DISTINCT * FROM " + timeSeriesName + " r limit "+min+","+numberOfElements;
String query = "SELECT DISTINCT * FROM " + timeSeriesName + " r limit " + numberOfElements + " offset " + min; String query = "SELECT DISTINCT * FROM " + timeSeriesName + " r limit " + numberOfElements + " offset " + min;
AnalysisLogger.getLogger().trace("DBObjectTranslator->retrieveEntries: query " + query); logger.trace("DBObjectTranslator->retrieveEntries: query " + query);
List<Object> resultSet = DatabaseFactory.executeSQLQuery(query, dbSession); List<Object> resultSet = DatabaseFactory.executeSQLQuery(query, dbSession);
@ -126,15 +125,15 @@ public class DBObjectTranslator {
} }
// add entry // add entry
currentEntries.add(entry); currentEntries.add(entry);
// AnalysisLogger.getLogger().debug("DBObjectTranslator->retrieveEntries: Entry Added " + entry.toString()); // logger.debug("DBObjectTranslator->retrieveEntries: Entry Added " + entry.toString());
} catch (Exception e) { } catch (Exception e) {
// e.printStackTrace(); // e.printStackTrace();
AnalysisLogger.getLogger().trace("DBObjectTranslator->retrieveEntries: Error in adding entry :" + e.getLocalizedMessage()); logger.trace("DBObjectTranslator->retrieveEntries: Error in adding entry :" + e.getLocalizedMessage());
} }
} }
} }
// AnalysisLogger.getLogger().trace("DBObjectTranslator->retrieveEntries: Entries " + currentEntries); // logger.trace("DBObjectTranslator->retrieveEntries: Entries " + currentEntries);
return currentEntries; return currentEntries;
} }
@ -152,7 +151,7 @@ public class DBObjectTranslator {
} }
if (re != null) { if (re != null) {
relations.add(re); relations.add(re);
AnalysisLogger.getLogger().trace("DBObjectTranslator->buildRelationsEdges: add relation " + re.toString()); logger.trace("DBObjectTranslator->buildRelationsEdges: add relation " + re.toString());
} }
} }
} }
@ -180,7 +179,7 @@ public class DBObjectTranslator {
} }
if (cat != null) { if (cat != null) {
categories.add(cat); categories.add(cat);
AnalysisLogger.getLogger().trace("DBObjectTranslator->buildCategories: add category " + cat.toString()); logger.trace("DBObjectTranslator->buildCategories: add category " + cat.toString());
} }
} }
} }
@ -208,7 +207,7 @@ public class DBObjectTranslator {
Category to = getCategoryfromIndex(re.getTo()); Category to = getCategoryfromIndex(re.getTo());
re.setCategoryFrom(from.getName()); re.setCategoryFrom(from.getName());
re.setCategoryTo(to.getName()); re.setCategoryTo(to.getName());
AnalysisLogger.getLogger().trace("DBObjectTranslator->populateRelationWithCategories: modified Relation " + re.toString()); logger.trace("DBObjectTranslator->populateRelationWithCategories: modified Relation " + re.toString());
} }
} }
@ -225,7 +224,7 @@ public class DBObjectTranslator {
BigInteger resultcount = (BigInteger) result; BigInteger resultcount = (BigInteger) result;
re.setWeigth(resultcount); re.setWeigth(resultcount);
totalRelationElements = totalRelationElements.add(resultcount); totalRelationElements = totalRelationElements.add(resultcount);
AnalysisLogger.getLogger().trace("DBObjectTranslator->calculateRelationWeights: Relation " + re.getName() + " weight " + re.getWeigth()); logger.trace("DBObjectTranslator->calculateRelationWeights: Relation " + re.getName() + " weight " + re.getWeigth());
} catch (Exception e) { } catch (Exception e) {
} }
} }
@ -246,7 +245,7 @@ public class DBObjectTranslator {
BigInteger resultcount = (BigInteger) result; BigInteger resultcount = (BigInteger) result;
cat.setNumberOfElements(resultcount); cat.setNumberOfElements(resultcount);
totalCatElements = totalCatElements.add(resultcount); totalCatElements = totalCatElements.add(resultcount);
AnalysisLogger.getLogger().trace("DBObjectTranslator->calculateCategoriesWeights: Category " + cat.getName() + " weight " + cat.getNumberOfElements() + " total " + totalCatElements); logger.trace("DBObjectTranslator->calculateCategoriesWeights: Category " + cat.getName() + " weight " + cat.getNumberOfElements() + " total " + totalCatElements);
} catch (Exception e) { } catch (Exception e) {
} }
} }
@ -256,7 +255,7 @@ public class DBObjectTranslator {
public void buildCategoriesStructure(SessionFactory dbSession, String referenceTable, String referenceColumn, String idColumn, String nameHuman, String description) { public void buildCategoriesStructure(SessionFactory dbSession, String referenceTable, String referenceColumn, String idColumn, String nameHuman, String description) {
buildCategories(dbSession, referenceTable, referenceColumn, idColumn, nameHuman, description); buildCategories(dbSession, referenceTable, referenceColumn, idColumn, nameHuman, description);
calculateCategoriesWeights(dbSession); calculateCategoriesWeights(dbSession);
AnalysisLogger.getLogger().trace("DBObjectTranslator->buildWholeStructure: Total Categories Elements " + totalCatElements + " Total Relation Elements " + totalRelationElements); logger.trace("DBObjectTranslator->buildWholeStructure: Total Categories Elements " + totalCatElements + " Total Relation Elements " + totalRelationElements);
} }
public void buildWholeStructure(SessionFactory dbSession, String referenceTable, String referenceColumn, String idColumn, String nameHuman, String description) { public void buildWholeStructure(SessionFactory dbSession, String referenceTable, String referenceColumn, String idColumn, String nameHuman, String description) {
@ -267,7 +266,7 @@ public class DBObjectTranslator {
calculateRelationWeights(dbSession); calculateRelationWeights(dbSession);
calculateCategoriesWeights(dbSession); calculateCategoriesWeights(dbSession);
AnalysisLogger.getLogger().trace("DBObjectTranslator->buildWholeStructure: Total Categories Elements " + totalCatElements + " Total Relation Elements " + totalRelationElements); logger.trace("DBObjectTranslator->buildWholeStructure: Total Categories Elements " + totalCatElements + " Total Relation Elements " + totalRelationElements);
} }
} }

View File

@ -7,11 +7,13 @@ import java.util.ArrayList;
import org.gcube.contentmanagement.lexicalmatcher.analysis.core.Engine; import org.gcube.contentmanagement.lexicalmatcher.analysis.core.Engine;
import org.gcube.contentmanagement.lexicalmatcher.analysis.core.LexicalEngineConfiguration; import org.gcube.contentmanagement.lexicalmatcher.analysis.core.LexicalEngineConfiguration;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.contentmanagement.lexicalmatcher.utils.MathFunctions; import org.gcube.contentmanagement.lexicalmatcher.utils.MathFunctions;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public abstract class ChunkSet { public abstract class ChunkSet {
private static Logger logger = LoggerFactory.getLogger(ChunkSet.class);
protected String seriesName; protected String seriesName;
protected String seriesColumn; protected String seriesColumn;
@ -52,7 +54,7 @@ public abstract class ChunkSet {
public void generateChunkSet() throws Exception{ public void generateChunkSet() throws Exception{
AnalysisLogger.getLogger().trace("ChunkSet->generateChunkSet-> \tGenerating Chunk Set for " + seriesName+ " "+seriesColumn); logger.trace("ChunkSet->generateChunkSet-> \tGenerating Chunk Set for " + seriesName+ " "+seriesColumn);
int numberOfChunks = calculateNumberOfCycles(); int numberOfChunks = calculateNumberOfCycles();
//generate chunks to be processed //generate chunks to be processed
chunkSet = MathFunctions.generateRandoms(maxNumberOfChunks, 0, numberOfChunks); chunkSet = MathFunctions.generateRandoms(maxNumberOfChunks, 0, numberOfChunks);

View File

@ -7,12 +7,13 @@ import java.util.ArrayList;
import org.gcube.contentmanagement.lexicalmatcher.analysis.core.Engine; import org.gcube.contentmanagement.lexicalmatcher.analysis.core.Engine;
import org.gcube.contentmanagement.lexicalmatcher.analysis.guesser.data.DBObjectTranslator; import org.gcube.contentmanagement.lexicalmatcher.analysis.guesser.data.DBObjectTranslator;
import org.gcube.contentmanagement.lexicalmatcher.analysis.guesser.data.Entry; import org.gcube.contentmanagement.lexicalmatcher.analysis.guesser.data.Entry;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.hibernate.SessionFactory; import org.hibernate.SessionFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class ReferenceChunk extends Chunk{ public class ReferenceChunk extends Chunk{
private static Logger logger = LoggerFactory.getLogger(ReferenceChunk.class);
private String categoryName; private String categoryName;
private String categoryTableName; private String categoryTableName;
@ -27,7 +28,7 @@ public class ReferenceChunk extends Chunk{
categoryName = CategoryName; categoryName = CategoryName;
categoryTableName = CategoryTableName; categoryTableName = CategoryTableName;
startPoint = StartPoint; startPoint = StartPoint;
AnalysisLogger.getLogger().trace("ReferenceChunk-> \t\tTOOK CATEGORY CHUNK FOR CATEGORY: " + categoryName+" - index : "+startPoint); logger.trace("ReferenceChunk-> \t\tTOOK CATEGORY CHUNK FOR CATEGORY: " + categoryName+" - index : "+startPoint);
} }

View File

@ -12,12 +12,14 @@ import org.gcube.contentmanagement.lexicalmatcher.analysis.guesser.data.Category
import org.gcube.contentmanagement.lexicalmatcher.analysis.guesser.data.DBObjectTranslator; import org.gcube.contentmanagement.lexicalmatcher.analysis.guesser.data.DBObjectTranslator;
import org.gcube.contentmanagement.lexicalmatcher.analysis.guesser.data.Entry; import org.gcube.contentmanagement.lexicalmatcher.analysis.guesser.data.Entry;
import org.gcube.contentmanagement.lexicalmatcher.analysis.guesser.data.SingleResult; import org.gcube.contentmanagement.lexicalmatcher.analysis.guesser.data.SingleResult;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.contentmanagement.lexicalmatcher.utils.DistanceCalculator; import org.gcube.contentmanagement.lexicalmatcher.utils.DistanceCalculator;
import org.hibernate.SessionFactory; import org.hibernate.SessionFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class TimeSeriesChunk extends Chunk{ public class TimeSeriesChunk extends Chunk{
private static Logger logger = LoggerFactory.getLogger(TimeSeriesChunk.class);
private ArrayList<String> columnEntries; private ArrayList<String> columnEntries;
private String columnType; private String columnType;
@ -49,7 +51,7 @@ public class TimeSeriesChunk extends Chunk{
columnEntries = dbo.retrieveTimeSeriesEntries(sess, timeSeriesName, timeSeriesColumn, start, ChunkSize); columnEntries = dbo.retrieveTimeSeriesEntries(sess, timeSeriesName, timeSeriesColumn, start, ChunkSize);
if (ColumnType==null){ if (ColumnType==null){
columnType = DataTypeRecognizer.guessType(columnEntries); columnType = DataTypeRecognizer.guessType(columnEntries);
AnalysisLogger.getLogger().trace("TimeSeriesChunk-> GUESSED TYPE " + columnType + " FOR COLUMN "+timeSeriesColumn); logger.trace("TimeSeriesChunk-> GUESSED TYPE " + columnType + " FOR COLUMN "+timeSeriesColumn);
} }
mustInterrupt = false; mustInterrupt = false;
config = Config; config = Config;
@ -62,7 +64,7 @@ public class TimeSeriesChunk extends Chunk{
columnEntries.add(singletonString); columnEntries.add(singletonString);
if (ColumnType==null){ if (ColumnType==null){
columnType = DataTypeRecognizer.guessType(columnEntries); columnType = DataTypeRecognizer.guessType(columnEntries);
AnalysisLogger.getLogger().trace("TimeSeriesChunk-> GUESSED TYPE " + columnType + " FOR SINGLETON "+singletonString); logger.trace("TimeSeriesChunk-> GUESSED TYPE " + columnType + " FOR SINGLETON "+singletonString);
} }
mustInterrupt = false; mustInterrupt = false;
config = Config; config = Config;
@ -108,17 +110,17 @@ public class TimeSeriesChunk extends Chunk{
// perform calculation only if the column type is the same // perform calculation only if the column type is the same
if (types.get(referenceColumn).equals(columnType)&&((ColumnFilter==null)||(ColumnFilter.equalsIgnoreCase(referenceColumn)))) { if (types.get(referenceColumn).equals(columnType)&&((ColumnFilter==null)||(ColumnFilter.equalsIgnoreCase(referenceColumn)))) {
// AnalysisLogger.getLogger().debug("CategoryOrderedList->checkAllEntriesOnEntireCategory-> REFERENCE COLUMN "+referenceColumn+" HAS TYPE "+types.get(referenceColumn)); // logger.debug("CategoryOrderedList->checkAllEntriesOnEntireCategory-> REFERENCE COLUMN "+referenceColumn+" HAS TYPE "+types.get(referenceColumn));
// take the attribute value of the entry // take the attribute value of the entry
String attribute = attributes.get(referenceColumn); String attribute = attributes.get(referenceColumn);
// calculate the distance between the unknown entry and the attribute // calculate the distance between the unknown entry and the attribute
DistanceCalculator d = new DistanceCalculator(); DistanceCalculator d = new DistanceCalculator();
double percentage = d.CD(config.useSimpleDistance, timeSeriesElement, attribute, isSingleton, isSingleton) * 100f; double percentage = d.CD(config.useSimpleDistance, timeSeriesElement, attribute, isSingleton, isSingleton) * 100f;
// AnalysisLogger.getLogger().debug("CategoryOrderedList->checkUnkEntriesOnEntireCategory-> Percentage between " +timeSeriesElement + " and " + attribute + " is: "+percentage ); // logger.debug("CategoryOrderedList->checkUnkEntriesOnEntireCategory-> Percentage between " +timeSeriesElement + " and " + attribute + " is: "+percentage );
// if they are similar // if they are similar
if (percentage > config.entryAcceptanceThreshold) { if (percentage > config.entryAcceptanceThreshold) {
// if (catChunk.getCategoryName().equals("COUNTRY_OLD")) // if (catChunk.getCategoryName().equals("COUNTRY_OLD"))
AnalysisLogger.getLogger().trace("TimeSeriesChunk->compareToCategoryChunk-> \t\tPercentage between " + timeSeriesElement + " vs. " + attribute + " is: " + percentage+" in "+catChunk.getCategoryName()+":"+referenceColumn); logger.trace("TimeSeriesChunk->compareToCategoryChunk-> \t\tPercentage between " + timeSeriesElement + " vs. " + attribute + " is: " + percentage+" in "+catChunk.getCategoryName()+":"+referenceColumn);
categoryScores.incrementScore(referenceColumn, (float)percentage,anotherReference); categoryScores.incrementScore(referenceColumn, (float)percentage,anotherReference);
@ -138,7 +140,7 @@ public class TimeSeriesChunk extends Chunk{
detailedResults.add(index, new SingleResult(attribute, null, percentage,null,"0")); detailedResults.add(index, new SingleResult(attribute, null, percentage,null,"0"));
} }
else{ else{
AnalysisLogger.getLogger().trace("TimeSeriesChunk->compareToCategoryChunk-> "+categoryScores.showScores()); logger.trace("TimeSeriesChunk->compareToCategoryChunk-> "+categoryScores.showScores());
} }
//if exact match is reached, exit //if exact match is reached, exit
if ((percentage==100)&&(isSingleton)) if ((percentage==100)&&(isSingleton))

View File

@ -5,7 +5,6 @@ import java.awt.Color;
import java.awt.Dimension; import java.awt.Dimension;
import java.awt.Rectangle; import java.awt.Rectangle;
import java.awt.geom.Rectangle2D; import java.awt.geom.Rectangle2D;
import java.math.BigInteger;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.HashMap; import java.util.HashMap;
import java.util.Map; import java.util.Map;
@ -13,13 +12,17 @@ import java.util.Random;
import javax.swing.JApplet; import javax.swing.JApplet;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.jgraph.JGraph; import org.jgraph.JGraph;
import org.jgraph.graph.DefaultGraphCell; import org.jgraph.graph.DefaultGraphCell;
import org.jgraph.graph.GraphConstants; import org.jgraph.graph.GraphConstants;
import org.jgrapht.ext.JGraphModelAdapter; import org.jgrapht.ext.JGraphModelAdapter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class GraphDisplayer extends JApplet { public class GraphDisplayer extends JApplet {
private static Logger logger = LoggerFactory.getLogger(GraphDisplayer.class);
private static final Color DEFAULT_BG_COLOR = Color.decode("#FAFBFF"); private static final Color DEFAULT_BG_COLOR = Color.decode("#FAFBFF");
private static final Dimension DEFAULT_SIZE = new Dimension(530, 320); private static final Dimension DEFAULT_SIZE = new Dimension(530, 320);
@ -64,7 +67,7 @@ public class GraphDisplayer extends JApplet {
} }
public void init() { public void init() {
AnalysisLogger.getLogger().debug("INIZIALIZZATO!"); logger.debug("INIZIALIZZATO!");
JGraph jgraph = new JGraph(m_jgAdapter); JGraph jgraph = new JGraph(m_jgAdapter);
@ -72,7 +75,7 @@ public class GraphDisplayer extends JApplet {
getContentPane().add(jgraph); getContentPane().add(jgraph);
resize(DEFAULT_SIZE); resize(DEFAULT_SIZE);
AnalysisLogger.getLogger().debug("RESIZED!"); logger.debug("RESIZED!");
} }
public void generateGraph() { public void generateGraph() {

View File

@ -13,27 +13,26 @@ import org.gcube.contentmanagement.lexicalmatcher.analysis.guesser.data.DBObject
import org.gcube.contentmanagement.lexicalmatcher.analysis.guesser.data.SingleResult; import org.gcube.contentmanagement.lexicalmatcher.analysis.guesser.data.SingleResult;
import org.gcube.contentmanagement.lexicalmatcher.analysis.guesser.data.TSObjectTransformer; import org.gcube.contentmanagement.lexicalmatcher.analysis.guesser.data.TSObjectTransformer;
import org.gcube.contentmanagement.lexicalmatcher.analysis.guesser.interfaces.Reference; import org.gcube.contentmanagement.lexicalmatcher.analysis.guesser.interfaces.Reference;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.hibernate.SessionFactory; import org.hibernate.SessionFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class CategoryGuesser { public class CategoryGuesser {
/**
* @param args
* @throws Exception
*/
private static Logger logger = LoggerFactory.getLogger(CategoryGuesser.class);
private final static int MAXRESULTS = 10; private final static int MAXRESULTS = 10;
public static void showResults(ArrayList<SingleResult> results) { public static void showResults(ArrayList<SingleResult> results) {
AnalysisLogger.getLogger().warn("CLASSIFICATION RESULT:\n"); logger.warn("CLASSIFICATION RESULT:\n");
int i = 1; int i = 1;
for (SingleResult result : results) { for (SingleResult result : results) {
if (result.getColumn() != null) if (result.getColumn() != null)
AnalysisLogger.getLogger().warn(i + ": " + result.getCategory() + " - " + result.getColumn() + " ; SCORE: " + result.getStringScore() + "%"); logger.warn(i + ": " + result.getCategory() + " - " + result.getColumn() + " ; SCORE: " + result.getStringScore() + "%");
else else
AnalysisLogger.getLogger().warn(i + ": " + result.getCategory() + " ; SCORE: " + result.getStringScore() + "%"); logger.warn(i + ": " + result.getCategory() + " ; SCORE: " + result.getStringScore() + "%");
i++; i++;
} }
@ -57,7 +56,7 @@ public class CategoryGuesser {
String result = results.toString(); String result = results.toString();
showResults(results); showResults(results);
AnalysisLogger.getLogger().info("CLASSIFICATION RESULT " + result + " " + CategoryGuesser.resultString(result, correctFamily, correctColumn)); logger.info("CLASSIFICATION RESULT " + result + " " + CategoryGuesser.resultString(result, correctFamily, correctColumn));
if (CategoryGuesser.CheckCompleteResult(result, correctFamily, correctColumn)) if (CategoryGuesser.CheckCompleteResult(result, correctFamily, correctColumn))
columnscore++; columnscore++;
@ -70,7 +69,7 @@ public class CategoryGuesser {
double percColumn = ((double) columnscore / (double) attempts) * 100; double percColumn = ((double) columnscore / (double) attempts) * 100;
double percFamily = ((double) familyscore / (double) attempts) * 100; double percFamily = ((double) familyscore / (double) attempts) * 100;
AnalysisLogger.getLogger().info("->ACCURACY ON FAMILY " + correctFamily + ":" + percFamily + " ACCURACY ON COLUMN " + correctColumn + ":" + percColumn); logger.info("->ACCURACY ON FAMILY " + correctFamily + ":" + percFamily + " ACCURACY ON COLUMN " + correctColumn + ":" + percColumn);
} }
public static String resultString(String result, String family, String column) { public static String resultString(String result, String family, String column) {
@ -142,9 +141,8 @@ public class CategoryGuesser {
public void init(String categoryFilter, String columnFilter, LexicalEngineConfiguration externalConfig) throws Exception { public void init(String categoryFilter, String columnFilter, LexicalEngineConfiguration externalConfig) throws Exception {
String cfgFileCompletePath = cfgFile; String cfgFileCompletePath = cfgFile;
AnalysisLogger.setLogger(LogFile);
AnalysisLogger.getLogger().trace("******************INITIALIZING******************"); logger.trace("******************INITIALIZING******************");
config = new LexicalEngineConfiguration(); config = new LexicalEngineConfiguration();
config.configureByStream(cfgFileCompletePath); config.configureByStream(cfgFileCompletePath);
@ -159,7 +157,7 @@ public class CategoryGuesser {
DBObjectTranslator dbo = new DBObjectTranslator(); DBObjectTranslator dbo = new DBObjectTranslator();
if (col == null) { if (col == null) {
AnalysisLogger.getLogger().trace("******************Order Category******************"); logger.trace("******************Order Category******************");
if (externalConfig == null) if (externalConfig == null)
externalConfig = new LexicalEngineConfiguration(); externalConfig = new LexicalEngineConfiguration();
@ -169,7 +167,7 @@ public class CategoryGuesser {
dbo.buildCategoriesStructure(dbSession, externalConfig.getReferenceTable(), externalConfig.getReferenceColumn(), externalConfig.getIdColumn(), externalConfig.getNameHuman(), externalConfig.getDescription()); dbo.buildCategoriesStructure(dbSession, externalConfig.getReferenceTable(), externalConfig.getReferenceColumn(), externalConfig.getIdColumn(), externalConfig.getNameHuman(), externalConfig.getDescription());
col = TSObjectTransformer.transform2List(dbo, config, categoryFilter); col = TSObjectTransformer.transform2List(dbo, config, categoryFilter);
AnalysisLogger.getLogger().trace("***************End Ordering********************"); logger.trace("***************End Ordering********************");
originalCol = col.generateNovelList(); originalCol = col.generateNovelList();
} else { } else {
col = originalCol.generateNovelList(); col = originalCol.generateNovelList();
@ -181,7 +179,6 @@ public class CategoryGuesser {
public void initSingleMatcher(LexicalEngineConfiguration externalConfig, String ColumnFilter) throws Exception { public void initSingleMatcher(LexicalEngineConfiguration externalConfig, String ColumnFilter) throws Exception {
String cfgFileCompletePath = cfgFile; String cfgFileCompletePath = cfgFile;
AnalysisLogger.setLogger(LogFile);
config = new LexicalEngineConfiguration(); config = new LexicalEngineConfiguration();
config.configureByStream(cfgFileCompletePath); config.configureByStream(cfgFileCompletePath);
@ -220,9 +217,8 @@ public class CategoryGuesser {
public void runGuesser(String seriesName, String columnName, LexicalEngineConfiguration externalConfig, String CategoryFilter, String ColumnFilter, String SingletonString) throws Exception { public void runGuesser(String seriesName, String columnName, LexicalEngineConfiguration externalConfig, String CategoryFilter, String ColumnFilter, String SingletonString) throws Exception {
SessionFactory dbSession = null; SessionFactory dbSession = null;
String cfgFileCompletePath = cfgFile; String cfgFileCompletePath = cfgFile;
AnalysisLogger.setLogger(LogFile);
AnalysisLogger.getLogger().debug("Guessing Table " + seriesName + " column " + columnName); logger.debug("Guessing Table " + seriesName + " column " + columnName);
if (externalConfig != null) { if (externalConfig != null) {
config = new LexicalEngineConfiguration(); config = new LexicalEngineConfiguration();
config.configureByStream(cfgFileCompletePath); config.configureByStream(cfgFileCompletePath);
@ -247,7 +243,7 @@ public class CategoryGuesser {
//modification of 10/10/11 calculate structure each time //modification of 10/10/11 calculate structure each time
// if (col == null) { // if (col == null) {
AnalysisLogger.getLogger().trace("******************Order Category******************"); logger.trace("******************Order Category******************");
if (externalConfig !=null){ if (externalConfig !=null){
ArrayList<Category> externalcategories = externalConfig.getCategories(); ArrayList<Category> externalcategories = externalConfig.getCategories();
if ((externalcategories!=null) && (externalcategories.size()>0)){ if ((externalcategories!=null) && (externalcategories.size()>0)){
@ -259,7 +255,7 @@ public class CategoryGuesser {
} }
col = TSObjectTransformer.transform2List(dbo, config, CategoryFilter); col = TSObjectTransformer.transform2List(dbo, config, CategoryFilter);
AnalysisLogger.getLogger().trace("***************End Ordering********************"); logger.trace("***************End Ordering********************");
originalCol = col.generateNovelList(); originalCol = col.generateNovelList();
/* /*
} else { } else {
@ -267,7 +263,7 @@ public class CategoryGuesser {
} }
*/ */
AnalysisLogger.getLogger().warn("Starting Calculation...wait..."); logger.warn("Starting Calculation...wait...");
long t0 = System.currentTimeMillis(); long t0 = System.currentTimeMillis();
@ -284,16 +280,16 @@ public class CategoryGuesser {
checkingResults = getClassification(); checkingResults = getClassification();
while ((checkingResults == null || checkingResults.size() == 0) && (triesCounter < maxTriesClassification)) { while ((checkingResults == null || checkingResults.size() == 0) && (triesCounter < maxTriesClassification)) {
AnalysisLogger.getLogger().warn("..another processing pass is required. Attempt number " + (triesCounter + 1)); logger.warn("..another processing pass is required. Attempt number " + (triesCounter + 1));
triesCounter++; triesCounter++;
float differencialThr = config.getCategoryDiscardDifferencialThreshold(); float differencialThr = config.getCategoryDiscardDifferencialThreshold();
float acceptanceThr = config.getEntryAcceptanceThreshold(); float acceptanceThr = config.getEntryAcceptanceThreshold();
// reduce the thresholds of 10 points and recalculate // reduce the thresholds of 10 points and recalculate
config.setCategoryDiscardDifferencialThreshold(Math.max(differencialThr - 20, 0)); config.setCategoryDiscardDifferencialThreshold(Math.max(differencialThr - 20, 0));
config.setEntryAcceptanceThreshold(Math.max(acceptanceThr - 20, 0)); config.setEntryAcceptanceThreshold(Math.max(acceptanceThr - 20, 0));
AnalysisLogger.getLogger().trace("Performing next processing pass"); logger.trace("Performing next processing pass");
runGuesser(seriesName, columnName, externalConfig, CategoryFilter, ColumnFilter, SingletonString); runGuesser(seriesName, columnName, externalConfig, CategoryFilter, ColumnFilter, SingletonString);
AnalysisLogger.getLogger().debug("End processing pass"); logger.debug("End processing pass");
// if (oneshotMode) // if (oneshotMode)
// checkingResults = getClassification(); // checkingResults = getClassification();
@ -306,7 +302,7 @@ public class CategoryGuesser {
long t1 = System.currentTimeMillis() - t0; long t1 = System.currentTimeMillis() - t0;
AnalysisLogger.getLogger().warn("...End Calculation in " + t1 + "ms"); logger.warn("...End Calculation in " + t1 + "ms");
triesCounter = 0; triesCounter = 0;
lastResults=checkingResults; lastResults=checkingResults;
@ -416,7 +412,7 @@ public class CategoryGuesser {
// multiply for impotance // multiply for impotance
BigInteger catElements = col.getScoresTable().get(processor.bestCategories.get(i)).getCategoryElements(); BigInteger catElements = col.getScoresTable().get(processor.bestCategories.get(i)).getCategoryElements();
// AnalysisLogger.getLogger().warn("\t elements "+catElements+" sum "+sumElements); // logger.warn("\t elements "+catElements+" sum "+sumElements);
double weight = new BigDecimal(catElements).divide(sumElements, 2, BigDecimal.ROUND_HALF_UP).doubleValue(); double weight = new BigDecimal(catElements).divide(sumElements, 2, BigDecimal.ROUND_HALF_UP).doubleValue();
@ -429,7 +425,7 @@ public class CategoryGuesser {
else if (weight < 0.05) else if (weight < 0.05)
weight = 0.05; weight = 0.05;
AnalysisLogger.getLogger().warn("WEIGHT FOR CATEGORY " + processor.bestCategories.get(i) + "-" + processor.bestColumns.get(i) + " : " + weight + " SCORE " + score); logger.warn("WEIGHT FOR CATEGORY " + processor.bestCategories.get(i) + "-" + processor.bestColumns.get(i) + " : " + weight + " SCORE " + score);
// recalculate weights // recalculate weights
score = score * weight; score = score * weight;
@ -441,21 +437,21 @@ public class CategoryGuesser {
subscores.add(score); subscores.add(score);
} }
// AnalysisLogger.getLogger().warn("MAX SCORE "+maxscore); // logger.warn("MAX SCORE "+maxscore);
for (int i = 0; i < size; i++) { for (int i = 0; i < size; i++) {
// double score = processor.bestScores.get(i); // double score = processor.bestScores.get(i);
double score = subscores.get(i); double score = subscores.get(i);
// AnalysisLogger.getLogger().warn("SCORE FOR CATEGORY "+processor.bestCategories.get(i)+" -COLUMN : "+processor.bestColumns.get(i)+" - "+score); // logger.warn("SCORE FOR CATEGORY "+processor.bestCategories.get(i)+" -COLUMN : "+processor.bestColumns.get(i)+" - "+score);
// normalizing percentages!!! // normalizing percentages!!!
score = (score / (maxscore + ((size > 1) ? 1 : 0))) * 100; score = (score / (maxscore + ((size > 1) ? 1 : 0))) * 100;
// AnalysisLogger.getLogger().warn("SCORE FOR CATEGORY "+processor.bestCategories.get(i)+" -COLUMN : "+processor.bestColumns.get(i)+" - "+score); // logger.warn("SCORE FOR CATEGORY "+processor.bestCategories.get(i)+" -COLUMN : "+processor.bestColumns.get(i)+" - "+score);
if (score > config.categoryDiscardDifferencialThreshold) { if (score > config.categoryDiscardDifferencialThreshold) {
// AnalysisLogger.getLogger().warn("SCORE "+score); // logger.warn("SCORE "+score);
// insert into the right place // insert into the right place
int index = results.size(); int index = results.size();
int j = 0; int j = 0;

View File

@ -1,36 +0,0 @@
package org.gcube.contentmanagement.lexicalmatcher.analysis.run;
import org.gcube.contentmanagement.lexicalmatcher.analysis.guesser.data.DBObjectTranslator;
import org.gcube.contentmanagement.lexicalmatcher.analysis.guesser.data.TSObjectTransformer;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.contentmanagement.lexicalmatcher.utils.DatabaseFactory;
import org.hibernate.SessionFactory;
public class StarGraphExtraction {
/**
* @param args
*/
public static void main(String[] args) {
try {
RunMain();
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
private final static String ConfigurationFileNameLocal = "hibernate.cfg.xml";
public static void RunMain() throws Exception{
AnalysisLogger.setLogger("./ALog.properties");
//configurazione DB - inizializzo la sessione e mi connetto
SessionFactory dbSession = DatabaseFactory.initDBConnection(ConfigurationFileNameLocal);
DBObjectTranslator dbo = new DBObjectTranslator();
dbo.buildWholeStructure(dbSession,null,null,null,null,null);
TSObjectTransformer.transform2Graph(dbo);
}
}

View File

@ -1,52 +0,0 @@
package org.gcube.contentmanagement.lexicalmatcher.analysis.test;
import java.util.ArrayList;
import org.gcube.contentmanagement.lexicalmatcher.analysis.core.LexicalEngineConfiguration;
import org.gcube.contentmanagement.lexicalmatcher.analysis.guesser.data.Category;
import org.gcube.contentmanagement.lexicalmatcher.analysis.guesser.data.SingleResult;
import org.gcube.contentmanagement.lexicalmatcher.analysis.run.CategoryGuesser;
public class ExampleGuesser {
public static void main(String[] args) {
try {
String seriesName = "IMPORT_ecd2e3a0_ee90_11e0_be9e_90f3621758ee";
String column = "field4";
CategoryGuesser guesser = new CategoryGuesser();
LexicalEngineConfiguration conf = new LexicalEngineConfiguration();
ArrayList<Category> categories = new ArrayList<Category>();
//human name, index, table name, description
categories.add(new Category("COUNTRY_OLD","39c98800-dd3c-11e0-b8d1-d1e2e7ba4f9d","rdf39c98800dd3c11e0b8d1d1e2e7ba4f9d","country"));
categories.add(new Category("CONTINENT_OLD","1d5d51f0-dd42-11e0-b8d3-d1e2e7ba4f9d","rdf1d5d51f0dd4211e0b8d3d1e2e7ba4f9d","continent reference data"));
categories.add(new Category("SPECIES_OLD","0a7fb500-dd3d-11e0-b8d1-d1e2e7ba4f9d","rdf0a7fb500dd3d11e0b8d1d1e2e7ba4f9d","species"));
categories.add(new Category("CodeListCountry","4c8d93a0-edc2-11e0-93e4-f6a9821baa29","rdf4c8d93a0edc211e093e4f6a9821baa29","Country"));
categories.add(new Category("CL_DIVISION","1140bdf0-dd2c-11e0-9220-ae17b3db32b7","rdf1140bdf0dd2c11e09220ae17b3db32b7","undefined"));
categories.add(new Category("CL_ASFIS_TAX","f87360f0-d9f9-11e0-ba05-d9adb0db767c","rdff87360f0d9f911e0ba05d9adb0db767c","undefined"));
conf.setCategories(categories);
//database Parameters
conf.setDatabaseUserName("gcube");
conf.setDatabasePassword("d4science2");
conf.setDatabaseDriver("org.postgresql.Driver");
conf.setDatabaseURL("jdbc:postgresql://localhost/testdb");
conf.setDatabaseDialect("org.hibernate.dialect.PostgreSQLDialect");
guesser.runGuesser(seriesName, column, conf);
ArrayList<SingleResult> classification = guesser.getLastResults();
//only for debug
guesser.showResults(classification);
guesser.shutdown();
} catch (Exception e) {
e.printStackTrace();
}
}
}

View File

@ -1,58 +0,0 @@
package org.gcube.contentmanagement.lexicalmatcher.analysis.test;
import java.util.ArrayList;
import org.gcube.contentmanagement.lexicalmatcher.analysis.core.LexicalEngineConfiguration;
import org.gcube.contentmanagement.lexicalmatcher.analysis.guesser.data.Category;
import org.gcube.contentmanagement.lexicalmatcher.analysis.guesser.data.SingleResult;
import org.gcube.contentmanagement.lexicalmatcher.analysis.run.CategoryGuesser;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
public class ExampleLexicalMatcher {
public static void main(String[] args) {
try {
String singleton = "Faroe Island";
String family = "COUNTRY_OLD";
String column = "field6";
CategoryGuesser guesser = new CategoryGuesser();
LexicalEngineConfiguration conf = new LexicalEngineConfiguration();
ArrayList<Category> categories = new ArrayList<Category>();
//human name, index, table name, description
categories.add(new Category("COUNTRY_OLD","39c98800-dd3c-11e0-b8d1-d1e2e7ba4f9d","rdf39c98800dd3c11e0b8d1d1e2e7ba4f9d","country"));
categories.add(new Category("CONTINENT_OLD","1d5d51f0-dd42-11e0-b8d3-d1e2e7ba4f9d","rdf1d5d51f0dd4211e0b8d3d1e2e7ba4f9d","continent reference data"));
categories.add(new Category("SPECIES_OLD","0a7fb500-dd3d-11e0-b8d1-d1e2e7ba4f9d","rdf0a7fb500dd3d11e0b8d1d1e2e7ba4f9d","species"));
categories.add(new Category("CodeListCountry","4c8d93a0-edc2-11e0-93e4-f6a9821baa29","rdf4c8d93a0edc211e093e4f6a9821baa29","Country"));
categories.add(new Category("CL_DIVISION","1140bdf0-dd2c-11e0-9220-ae17b3db32b7","rdf1140bdf0dd2c11e09220ae17b3db32b7","undefined"));
categories.add(new Category("CL_ASFIS_TAX","f87360f0-d9f9-11e0-ba05-d9adb0db767c","rdff87360f0d9f911e0ba05d9adb0db767c","undefined"));
conf.setCategories(categories);
//CHANGE THIS TO ENHANCE THE RECALL
conf.setEntryAcceptanceThreshold(30);
conf.setReferenceChunksToTake(-1);
conf.setTimeSeriesChunksToTake(-1);
conf.setUseSimpleDistance(false);
//database Parameters
conf.setDatabaseUserName("gcube");
conf.setDatabasePassword("d4science2");
conf.setDatabaseURL("jdbc:postgresql://localhost/testdb");
conf.setDatabaseDialect("org.hibernate.dialect.PostgreSQLDialect");
guesser.runGuesser(singleton, conf, family,column );
ArrayList<SingleResult> detailedResults = guesser.getDetailedMatches();
AnalysisLogger.getLogger().warn("Detailed Match on Name :"+singleton);
CategoryGuesser.showResults(detailedResults);
} catch (Exception e) {
e.printStackTrace();
}
}
}

View File

@ -1,65 +0,0 @@
package org.gcube.contentmanagement.lexicalmatcher.analysis.test;
import java.util.ArrayList;
import java.util.Properties;
import org.gcube.contentmanagement.lexicalmatcher.analysis.core.LexicalEngineConfiguration;
import org.gcube.contentmanagement.lexicalmatcher.analysis.guesser.data.Category;
import org.gcube.contentmanagement.lexicalmatcher.analysis.run.CategoryGuesser;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
public class TestExternalCfgProduction {
public static void main(String[] args) {
try {
int attempts = 1;
// new Properties().load(ClassLoader.getSystemResourceAsStream("lexicalguesser/lexicalGuesser.properties"));
CategoryGuesser guesser = new CategoryGuesser();
//bench 1
AnalysisLogger.getLogger().warn("----------------------BENCH 1-------------------------");
String seriesName = "IMPORT_ecd2e3a0_ee90_11e0_be9e_90f3621758ee";
String column = "field4";
LexicalEngineConfiguration conf = new LexicalEngineConfiguration();
conf.setReferenceTable("codelist1733371938");
conf.setReferenceColumn("ifield14");
conf.setNameHuman("ifield1");
conf.setIdColumn("ifield0");
conf.setDescription("ifield2");
ArrayList<Category> categories = new ArrayList<Category>();
//human name, index, table name, description
categories.add(new Category("COUNTRY_OLD","39c98800-dd3c-11e0-b8d1-d1e2e7ba4f9d","rdf39c98800dd3c11e0b8d1d1e2e7ba4f9d","country"));
categories.add(new Category("CONTINENT_OLD","1d5d51f0-dd42-11e0-b8d3-d1e2e7ba4f9d","rdf1d5d51f0dd4211e0b8d3d1e2e7ba4f9d","continent reference data"));
categories.add(new Category("SPECIES_OLD","0a7fb500-dd3d-11e0-b8d1-d1e2e7ba4f9d","rdf0a7fb500dd3d11e0b8d1d1e2e7ba4f9d","species"));
categories.add(new Category("CodeListCountry","4c8d93a0-edc2-11e0-93e4-f6a9821baa29","rdf4c8d93a0edc211e093e4f6a9821baa29","Country"));
categories.add(new Category("CL_DIVISION","1140bdf0-dd2c-11e0-9220-ae17b3db32b7","rdf1140bdf0dd2c11e09220ae17b3db32b7","undefined"));
categories.add(new Category("CL_ASFIS_TAX","f87360f0-d9f9-11e0-ba05-d9adb0db767c","rdff87360f0d9f911e0ba05d9adb0db767c","undefined"));
conf.setCategories(categories);
//database Parameters
conf.setDatabaseUserName("gcube");
conf.setDatabasePassword("d4science2");
conf.setDatabaseDriver("org.postgresql.Driver");
conf.setDatabaseURL("jdbc:postgresql://localhost/testdb");
conf.setDatabaseDialect("org.hibernate.dialect.PostgreSQLDialect");
guesser.runGuesser(seriesName, column, conf);
guesser.showResults(guesser.getClassification());
AnalysisLogger.getLogger().warn("--------------------END BENCH 1-----------------------\n");
} catch (Exception e) {
e.printStackTrace();
}
}
}

View File

@ -1,76 +0,0 @@
package org.gcube.contentmanagement.lexicalmatcher.analysis.test;
import java.util.ArrayList;
import java.util.List;
import org.gcube.contentmanagement.lexicalmatcher.analysis.core.LexicalEngineConfiguration;
import org.gcube.contentmanagement.lexicalmatcher.analysis.guesser.data.Category;
import org.gcube.contentmanagement.lexicalmatcher.analysis.guesser.data.SingleResult;
import org.gcube.contentmanagement.lexicalmatcher.analysis.run.CategoryGuesser;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
public class TestSingleExternalCfgProduction {
public static void main(String[] args) {
try {
CategoryGuesser guesser = new CategoryGuesser();
//bench 1
AnalysisLogger.getLogger().warn("----------------------BENCH 1-------------------------");
String singleton = "Faroe Island";
String family = "COUNTRY_OLD";
String column = "field6";
LexicalEngineConfiguration conf = new LexicalEngineConfiguration();
/*
conf.setReferenceTable("codelist1733371938");
conf.setReferenceColumn("ifield14");
conf.setNameHuman("ifield1");
conf.setIdColumn("ifield0");
conf.setDescription("ifield2");
*/
ArrayList<Category> categories = new ArrayList<Category>();
//human name, index, table name, description
categories.add(new Category("COUNTRY_OLD","39c98800-dd3c-11e0-b8d1-d1e2e7ba4f9d","rdf39c98800dd3c11e0b8d1d1e2e7ba4f9d","country"));
categories.add(new Category("CONTINENT_OLD","1d5d51f0-dd42-11e0-b8d3-d1e2e7ba4f9d","rdf1d5d51f0dd4211e0b8d3d1e2e7ba4f9d","continent reference data"));
categories.add(new Category("SPECIES_OLD","0a7fb500-dd3d-11e0-b8d1-d1e2e7ba4f9d","rdf0a7fb500dd3d11e0b8d1d1e2e7ba4f9d","species"));
categories.add(new Category("CodeListCountry","4c8d93a0-edc2-11e0-93e4-f6a9821baa29","rdf4c8d93a0edc211e093e4f6a9821baa29","Country"));
categories.add(new Category("CL_DIVISION","1140bdf0-dd2c-11e0-9220-ae17b3db32b7","rdf1140bdf0dd2c11e09220ae17b3db32b7","undefined"));
conf.setCategories(categories);
//CHANGE THIS TO ENHANCE THE RECALL
conf.setEntryAcceptanceThreshold(30);
conf.setReferenceChunksToTake(-1);
conf.setTimeSeriesChunksToTake(-1);
conf.setUseSimpleDistance(false);
//database Parameters
conf.setDatabaseUserName("gcube");
conf.setDatabasePassword("d4science2");
// conf.setDatabaseDriver("org.postgresql.Driver");
conf.setDatabaseURL("jdbc:postgresql://localhost/testdb");
conf.setDatabaseDialect("org.hibernate.dialect.PostgreSQLDialect");
guesser.runGuesser(singleton, conf, family,column );
ArrayList<SingleResult> detailedResults = guesser.getDetailedMatches();
AnalysisLogger.getLogger().warn("Detailed Match on Name :"+singleton);
CategoryGuesser.showResults(detailedResults);
AnalysisLogger.getLogger().warn("--------------------END BENCH 1-----------------------\n");
} catch (Exception e) {
e.printStackTrace();
}
}
}

View File

@ -1,58 +0,0 @@
package org.gcube.contentmanagement.lexicalmatcher.analysis.test.old;
import org.gcube.contentmanagement.lexicalmatcher.analysis.run.CategoryGuesser;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
public class BenchMarkTest1 {
public static void main(String[] args) {
try {
int attempts = 1;
String configPath = ".";
CategoryGuesser guesser = new CategoryGuesser();
//bench 1
AnalysisLogger.getLogger().warn("----------------------BENCH 1-------------------------");
String seriesName = "import_bdefb470_5cea_11df_a0a6_909e7d074592";
String column = "field1";
String correctFamily = "country";
String correctColumn = "name_en";
CategoryGuesser.AccuracyCalc(guesser, configPath, seriesName, column, attempts, correctFamily, correctColumn);
AnalysisLogger.getLogger().warn("--------------------END BENCH 1-----------------------\n");
//bench 2
AnalysisLogger.getLogger().warn("----------------------BENCH 2-------------------------");
seriesName = "import_bdefb470_5cea_11df_a0a6_909e7d074592";
column = "field2";
correctFamily = "area";
correctColumn = "name_en";
CategoryGuesser.AccuracyCalc(guesser, configPath, seriesName, column, attempts, correctFamily, correctColumn);
AnalysisLogger.getLogger().warn("--------------------END BENCH 2-----------------------\n");
//bench 3
AnalysisLogger.getLogger().warn("----------------------BENCH 3-------------------------");
seriesName = "import_bdefb470_5cea_11df_a0a6_909e7d074592";
column = "field4";
correctFamily = "species";
correctColumn = "scientific_name";
CategoryGuesser.AccuracyCalc(guesser, configPath, seriesName, column, attempts, correctFamily, correctColumn);
AnalysisLogger.getLogger().warn("--------------------END BENCH 3-----------------------\n");
//bench 4
AnalysisLogger.getLogger().warn("----------------------BENCH 4-------------------------");
seriesName = "import_bdefb470_5cea_11df_a0a6_909e7d074592";
column = "field3";
correctFamily = "species";
correctColumn = "scientific_name";
// CategoryGuesser.AccuracyCalc(guesser, configPath, seriesName, column, attempts, correctFamily, correctColumn);
AnalysisLogger.getLogger().warn("--------------------END BENCH 4-----------------------\n");
} catch (Exception e) {
e.printStackTrace();
}
}
}

View File

@ -1,54 +0,0 @@
package org.gcube.contentmanagement.lexicalmatcher.analysis.test.old;
import org.gcube.contentmanagement.lexicalmatcher.analysis.run.CategoryGuesser;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
public class BenchMarkTest2 {
public static void main(String[] args) {
try {
int attempts = 1;
String configPath = ".";
CategoryGuesser guesser = new CategoryGuesser();
//bench 1
AnalysisLogger.getLogger().warn("----------------------BENCH 1-------------------------");
String seriesName = "import_2c97f580_35a0_11df_b8b3_aa10916debe6";
String column = "field1";
String correctFamily = "SPECIES";
String correctColumn = "SCIENTIFIC_NAME";
CategoryGuesser.AccuracyCalc(guesser, configPath, seriesName, column, attempts, correctFamily, correctColumn);
AnalysisLogger.getLogger().warn("--------------------END BENCH 1-----------------------\n");
//bench 2
AnalysisLogger.getLogger().warn("----------------------BENCH 2-------------------------");
seriesName = "import_2c97f580_35a0_11df_b8b3_aa10916debe6";
column = "field2";
correctFamily = "COUNTRY";
correctColumn = "ISO_3_CODE";
CategoryGuesser.AccuracyCalc(guesser, configPath, seriesName, column, attempts, correctFamily, correctColumn);
AnalysisLogger.getLogger().warn("--------------------END BENCH 2-----------------------\n");
//bench 4
AnalysisLogger.getLogger().warn("----------------------BENCH 4-------------------------");
seriesName = "import_2c97f580_35a0_11df_b8b3_aa10916debe6";
column = "field3";
correctFamily = "AREA";
correctColumn = "NAME_EN";
CategoryGuesser.AccuracyCalc(guesser, configPath, seriesName, column, attempts, correctFamily, correctColumn);
AnalysisLogger.getLogger().warn("--------------------END BENCH 4-----------------------\n");
} catch (Exception e) {
e.printStackTrace();
}
}
}

View File

@ -1,31 +0,0 @@
package org.gcube.contentmanagement.lexicalmatcher.analysis.test.old;
import org.gcube.contentmanagement.lexicalmatcher.analysis.run.CategoryGuesser;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
public class BenchMarkTest3 {
public static void main(String[] args) {
try {
int attempts = 1;
String configPath = ".";
CategoryGuesser guesser = new CategoryGuesser();
//bench 1
AnalysisLogger.getLogger().warn("----------------------BENCH 1-------------------------");
String seriesName = "import_2c97f580_35a0_11df_b8b3_aa10916debe6";
String column = "field1";
String correctFamily = "SPECIES";
String correctColumn = "SCIENTIFIC_NAME";
CategoryGuesser.AccuracyCalc(guesser, configPath, seriesName, column, attempts, correctFamily, correctColumn);
AnalysisLogger.getLogger().warn("--------------------END BENCH 1-----------------------\n");
} catch (Exception e) {
e.printStackTrace();
}
}
}

View File

@ -1,31 +0,0 @@
package org.gcube.contentmanagement.lexicalmatcher.analysis.test.old;
import org.gcube.contentmanagement.lexicalmatcher.analysis.run.CategoryGuesser;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
public class BenchMarkTest4 {
public static void main(String[] args) {
try {
int attempts = 1;
String configPath = ".";
CategoryGuesser guesser = new CategoryGuesser();
//bench 1
AnalysisLogger.getLogger().warn("----------------------BENCH 1-------------------------");
String seriesName = "import_2c97f580_35a0_11df_b8b3_aa10916debe6";
String column = "field3";
String correctFamily = "AREA";
String correctColumn = "NAME_EN";
CategoryGuesser.AccuracyCalc(guesser, configPath, seriesName, column, attempts, correctFamily, correctColumn);
AnalysisLogger.getLogger().warn("--------------------END BENCH 1-----------------------\n");
} catch (Exception e) {
e.printStackTrace();
}
}
}

View File

@ -1,31 +0,0 @@
package org.gcube.contentmanagement.lexicalmatcher.analysis.test.old;
import org.gcube.contentmanagement.lexicalmatcher.analysis.run.CategoryGuesser;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
public class BenchMarkTest5 {
public static void main(String[] args) {
try {
int attempts = 1;
String configPath = ".";
CategoryGuesser guesser = new CategoryGuesser();
//bench 1
AnalysisLogger.getLogger().warn("----------------------BENCH 1-------------------------");
String seriesName = "import_532bba80_1c8f_11df_a4ee_87804054691e";
String column = "field2";
String correctFamily = "ISSCAAP GROUP";
String correctColumn = "NAME_EN";
CategoryGuesser.AccuracyCalc(guesser, configPath, seriesName, column, attempts, correctFamily, correctColumn);
AnalysisLogger.getLogger().warn("--------------------END BENCH 1-----------------------\n");
} catch (Exception e) {
e.printStackTrace();
}
}
}

View File

@ -1,52 +0,0 @@
package org.gcube.contentmanagement.lexicalmatcher.analysis.test.old;
import org.gcube.contentmanagement.lexicalmatcher.analysis.core.LexicalEngineConfiguration;
import org.gcube.contentmanagement.lexicalmatcher.analysis.run.CategoryGuesser;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
public class BenchMarkTestExternalCfg {
public static void main(String[] args) {
try {
int attempts = 1;
String configPath = ".";
CategoryGuesser guesser = new CategoryGuesser();
//bench 1
AnalysisLogger.getLogger().warn("----------------------BENCH 1-------------------------");
String seriesName = "import_532bba80_1c8f_11df_a4ee_87804054691e";
String column = "field2";
String correctFamily = "ISSCAAP GROUP";
String correctColumn = "NAME_EN";
LexicalEngineConfiguration conf = new LexicalEngineConfiguration();
conf.setCategoryDiscardDifferencialThreshold(5);
conf.setCategoryDiscardThreshold(0);
conf.setChunkSize(25);
conf.setEntryAcceptanceThreshold(50);
conf.setNumberOfThreadsToUse(2);
conf.setRandomTake(true);
conf.setReferenceChunksToTake(20);
conf.setTimeSeriesChunksToTake(1);
conf.setUseSimpleDistance(false);
//database Parameters
conf.setDatabaseUserName("root");
// conf.setDatabasePassword("password");
conf.setDatabaseDriver("com.mysql.jdbc.Driver");
conf.setDatabaseURL("jdbc:mysql://localhost/timeseries");
conf.setDatabaseDialect("org.hibernate.dialect.MySQLDialect");
conf.setDatabaseAutomaticTestTable("connectiontesttable");
conf.setDatabaseIdleConnectionTestPeriod("3600");
CategoryGuesser.AccuracyCalc(conf,guesser, configPath, seriesName, column, attempts, correctFamily, correctColumn);
AnalysisLogger.getLogger().warn("--------------------END BENCH 1-----------------------\n");
} catch (Exception e) {
e.printStackTrace();
}
}
}

View File

@ -1,38 +0,0 @@
package org.gcube.contentmanagement.lexicalmatcher.analysis.test.old;
import java.util.ArrayList;
import org.gcube.contentmanagement.lexicalmatcher.analysis.guesser.data.SingleResult;
import org.gcube.contentmanagement.lexicalmatcher.analysis.run.CategoryGuesser;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
public class BenchMarkTestFilterCategory {
public static void main(String[] args) {
try {
String configPath = ".";
CategoryGuesser guesser = new CategoryGuesser();
//bench 1
AnalysisLogger.getLogger().warn("----------------------BENCH 1-------------------------");
String seriesName = "ref_order";
String column = "scientific_name";
String correctFamily = "order";
String correctColumn = "scientific_name";
guesser.runGuesser(seriesName, column, null, correctFamily, correctColumn);
ArrayList<SingleResult> results = guesser.getClassification();
CategoryGuesser.showResults(results);
AnalysisLogger.getLogger().warn("--------------------END BENCH 1-----------------------\n");
} catch (Exception e) {
e.printStackTrace();
}
}
}

View File

@ -1,51 +0,0 @@
package org.gcube.contentmanagement.lexicalmatcher.analysis.test.old;
import java.util.ArrayList;
import org.gcube.contentmanagement.lexicalmatcher.analysis.core.LexicalEngineConfiguration;
import org.gcube.contentmanagement.lexicalmatcher.analysis.guesser.data.SingleResult;
import org.gcube.contentmanagement.lexicalmatcher.analysis.run.CategoryGuesser;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
public class BenchMarkTestSingleton {
public static void main(String[] args) {
try {
String configPath = ".";
CategoryGuesser guesser = new CategoryGuesser();
//bench 1
AnalysisLogger.getLogger().warn("----------------------BENCH 1-------------------------");
String singleton = "sarda sarda";
// String singleton = "Mitella pollicipes";
// String singleton = "policipes";
// String singleton = "";
String family = "catalog life";
String column = "scientific_name";
LexicalEngineConfiguration conf = new LexicalEngineConfiguration();
//CHANGE THIS TO ENHANCE THE RECALL
conf.setEntryAcceptanceThreshold(30);
conf.setReferenceChunksToTake(-1);
conf.setTimeSeriesChunksToTake(-1);
conf.setUseSimpleDistance(false);
guesser.runGuesser(singleton, conf, family,column );
ArrayList<SingleResult> detailedResults = guesser.getDetailedMatches();
AnalysisLogger.getLogger().warn("Detailed Match on Name :"+singleton);
CategoryGuesser.showResults(detailedResults);
AnalysisLogger.getLogger().warn("--------------------END BENCH 1-----------------------\n");
} catch (Exception e) {
e.printStackTrace();
}
}
}

View File

@ -1,31 +0,0 @@
package org.gcube.contentmanagement.lexicalmatcher.analysis.test.old;
import org.gcube.contentmanagement.lexicalmatcher.analysis.run.CategoryGuesser;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
public class BenchMarkTestTSCountry {
public static void main(String[] args) {
try {
int attempts = 1;
String configPath = ".";
CategoryGuesser guesser = new CategoryGuesser();
//bench 1
AnalysisLogger.getLogger().warn("----------------------BENCH 1-------------------------");
String seriesName = "import_bdefb470_5cea_11df_a0a6_909e7d074592";
String column = "field1";
String correctFamily = "country";
String correctColumn = "name_en";
CategoryGuesser.AccuracyCalc(guesser, configPath, seriesName, column, attempts, correctFamily, correctColumn);
AnalysisLogger.getLogger().warn("--------------------END BENCH 1-----------------------\n");
} catch (Exception e) {
e.printStackTrace();
}
}
}

View File

@ -1,88 +0,0 @@
package org.gcube.contentmanagement.lexicalmatcher.analysis.test.old;
import org.gcube.contentmanagement.lexicalmatcher.analysis.run.CategoryGuesser;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
public class BenchMarkTrainingSet {
public static void main(String[] args) {
try {
String configPath =".";
int attempts = 1;
CategoryGuesser guesser = new CategoryGuesser();
//bench 1
AnalysisLogger.getLogger().warn("----------------------BENCH 1-------------------------");
String seriesName = "ref_commission";
String column = "name_en";
String correctFamily = "commission";
String correctColumn = "name_en";
CategoryGuesser.AccuracyCalc(guesser, configPath, seriesName, column, attempts, correctFamily, correctColumn);
AnalysisLogger.getLogger().warn("--------------------END BENCH 1-----------------------\n");
AnalysisLogger.getLogger().warn("----------------------BENCH 2-------------------------");
seriesName = "ref_species";
column = "scientific_name";
correctFamily = "species";
correctColumn = "scientific_name";
CategoryGuesser.AccuracyCalc(guesser, configPath, seriesName, column, attempts, correctFamily, correctColumn);
AnalysisLogger.getLogger().warn("--------------------END BENCH 2-----------------------\n");
AnalysisLogger.getLogger().warn("----------------------BENCH 3-------------------------");
seriesName = "ref_area";
column = "name_en";
correctFamily = "area";
correctColumn = "name_en";
// CategoryGuesser.AccuracyCalc(guesser, configPath, seriesName, column, attempts, correctFamily, correctColumn);
AnalysisLogger.getLogger().warn("--------------------END BENCH 3-----------------------\n");
AnalysisLogger.getLogger().warn("----------------------BENCH 4-------------------------");
seriesName = "ref_ocean";
column = "name_en";
correctFamily = "ocean";
correctColumn = "name_en";
// CategoryGuesser.AccuracyCalc(guesser, configPath, seriesName, column, attempts, correctFamily, correctColumn);
AnalysisLogger.getLogger().warn("--------------------END BENCH 4-----------------------\n");
AnalysisLogger.getLogger().warn("----------------------BENCH 5-------------------------");
seriesName = "ref_geo_region";
column = "name_en";
correctFamily = "geo region";
correctColumn = "name_en";
// CategoryGuesser.AccuracyCalc(guesser, configPath, seriesName, column, attempts, correctFamily, correctColumn);
AnalysisLogger.getLogger().warn("--------------------END BENCH 5-----------------------\n");
AnalysisLogger.getLogger().warn("----------------------BENCH 6-------------------------");
seriesName = "ref_fa_region";
column = "name_en";
correctFamily = "fa region";
correctColumn = "name_en";
// CategoryGuesser.AccuracyCalc(guesser, configPath, seriesName, column, attempts, correctFamily, correctColumn);
AnalysisLogger.getLogger().warn("--------------------END BENCH 6-----------------------\n");
AnalysisLogger.getLogger().warn("----------------------BENCH 7-------------------------");
seriesName = "ref_order";
column = "scientific_name";
correctFamily = "order";
correctColumn = "scientific_name";
// CategoryGuesser.AccuracyCalc(guesser, configPath, seriesName, column, attempts, correctFamily, correctColumn);
AnalysisLogger.getLogger().warn("--------------------END BENCH 7-----------------------\n");
} catch (Exception e) {
e.printStackTrace();
}
}
}

View File

@ -1,33 +0,0 @@
package org.gcube.contentmanagement.lexicalmatcher.analysis.test.old;
import org.gcube.contentmanagement.lexicalmatcher.analysis.run.CategoryGuesser;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
public class BenchMarkTrainingSetScientificName {
public static void main(String[] args) {
try {
String configPath =".";
int attempts = 1;
CategoryGuesser guesser = new CategoryGuesser();
//bench 1
AnalysisLogger.getLogger().warn("----------------------BENCH 1-------------------------");
String seriesName = "ref_species";
String column = "scientific_name";
String correctFamily = "species";
String correctColumn = "scientific_name";
CategoryGuesser.AccuracyCalc(guesser, configPath, seriesName, column, attempts, correctFamily, correctColumn);
AnalysisLogger.getLogger().warn("--------------------END BENCH 1-----------------------\n");
} catch (Exception e) {
e.printStackTrace();
}
}
}

View File

@ -1,64 +0,0 @@
package org.gcube.contentmanagement.lexicalmatcher.analysis.test.old;
import org.gcube.contentmanagement.lexicalmatcher.analysis.core.LexicalEngineConfiguration;
import org.gcube.contentmanagement.lexicalmatcher.analysis.run.CategoryGuesser;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
public class TestExternalCfgProduction {
public static void main(String[] args) {
try {
int attempts = 1;
String configPath = ".";
CategoryGuesser guesser = new CategoryGuesser();
//bench 1
AnalysisLogger.getLogger().warn("----------------------BENCH 1-------------------------");
// String seriesName = "rdmc366dfe0ddf511e086b1b1c5d6fb1c27";
String seriesName = "IMPORT_ecd2e3a0_ee90_11e0_be9e_90f3621758ee";
String column = "field4";
LexicalEngineConfiguration conf = new LexicalEngineConfiguration();
/*
conf.setCategoryDiscardDifferencialThreshold(5);
conf.setCategoryDiscardThreshold(0);
conf.setChunkSize(25);
conf.setEntryAcceptanceThreshold(50);
conf.setNumberOfThreadsToUse(2);
conf.setRandomTake(true);
conf.setReferenceChunksToTake(20);
conf.setTimeSeriesChunksToTake(1);
conf.setUseSimpleDistance(false);
*/
//database Parameters
conf.setDatabaseUserName("utente");
conf.setDatabasePassword("d4science");
// conf.setDatabaseDriver("org.postgresql.Driver");
conf.setDatabaseURL("jdbc:postgresql://dbtest.next.research-infrastructures.eu/timeseries");
conf.setDatabaseDialect("org.hibernate.dialect.PostgreSQLDialect");
conf.setDatabaseAutomaticTestTable("connectiontesttable");
conf.setDatabaseIdleConnectionTestPeriod("3600");
conf.setReferenceTable("codelist1733371938");
conf.setReferenceColumn("ifield14");
conf.setNameHuman("ifield1");
conf.setIdColumn("ifield0");
conf.setDescription("ifield2");
guesser.runGuesser(seriesName, column, conf);
guesser.showResults(guesser.getClassification());
// AnalysisLogger.getLogger().warn();
AnalysisLogger.getLogger().warn("--------------------END BENCH 1-----------------------\n");
} catch (Exception e) {
e.printStackTrace();
}
}
}

View File

@ -1,71 +0,0 @@
package org.gcube.contentmanagement.lexicalmatcher.analysis.test.old;
import java.util.ArrayList;
import org.gcube.contentmanagement.lexicalmatcher.analysis.core.LexicalEngineConfiguration;
import org.gcube.contentmanagement.lexicalmatcher.analysis.guesser.data.SingleResult;
import org.gcube.contentmanagement.lexicalmatcher.analysis.run.CategoryGuesser;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
public class TestSingleExternalCfgProduction {
public static void main(String[] args) {
try {
String configPath = ".";
CategoryGuesser guesser = new CategoryGuesser();
//bench 1
AnalysisLogger.getLogger().warn("----------------------BENCH 1-------------------------");
String singleton = "Faroe Island";
// String singleton = "Mitella pollicipes";
// String singleton = "policipes";
// String singleton = "";
// String family = "rdf0a7fb500dd3d11e0b8d1d1e2e7ba4f9d";
String family = "COUNTRY_OLD";
String column = "field6";
LexicalEngineConfiguration conf = new LexicalEngineConfiguration();
//CHANGE THIS TO ENHANCE THE RECALL
conf.setEntryAcceptanceThreshold(30);
conf.setReferenceChunksToTake(-1);
conf.setTimeSeriesChunksToTake(-1);
conf.setUseSimpleDistance(false);
//database Parameters
conf.setDatabaseUserName("utente");
conf.setDatabasePassword("d4science");
// conf.setDatabaseDriver("org.postgresql.Driver");
conf.setDatabaseURL("jdbc:postgresql://dbtest.next.research-infrastructures.eu/timeseries");
conf.setDatabaseDialect("org.hibernate.dialect.PostgreSQLDialect");
conf.setDatabaseAutomaticTestTable("connectiontesttable");
conf.setDatabaseIdleConnectionTestPeriod("3600");
conf.setReferenceTable("codelist1733371938");
conf.setReferenceColumn("ifield14");
conf.setNameHuman("ifield1");
conf.setIdColumn("ifield0");
conf.setDescription("ifield2");
guesser.initSingleMatcher(conf,column );
guesser.runGuesser(singleton, null, family,column );
ArrayList<SingleResult> detailedResults = guesser.getDetailedMatches();
AnalysisLogger.getLogger().warn("Detailed Match on Name :"+singleton);
CategoryGuesser.showResults(detailedResults);
AnalysisLogger.getLogger().warn("--------------------END BENCH 1-----------------------\n");
} catch (Exception e) {
e.printStackTrace();
}
}
}

View File

@ -1,23 +1,26 @@
package org.gcube.contentmanagement.lexicalmatcher.utils; package org.gcube.contentmanagement.lexicalmatcher.utils;
import org.apache.log4j.Logger; import org.apache.log4j.Logger;
@Deprecated @Deprecated
public class AnalysisLogger { public class AnalysisLogger {
private static Logger logger = Logger.getLogger("AnalysisLogger"); private static Logger logger = Logger.getLogger("AnalysisLogger");
//private static Logger hibernateLogger = LoggerFactory.getLogger("HibernateLogger"); //private static Logger hibernateLogger = LoggerFactory.getLogger("HibernateLogger");
public static Logger getLogger() { @Deprecated
return logger; public static Logger getLogger() {
} return logger;
}
// in ingresso vuole il path al file di config del log4j
public static void setLogger(String path) { // in ingresso vuole il path al file di config del log4j
@Deprecated
} public static void setLogger(String path) {
public static void printStackTrace(Exception e) { }
logger.error("error ",e);
} @Deprecated
} public static void printStackTrace(Exception e) {
logger.error("error ",e);
}
}

View File

@ -3,16 +3,14 @@ package org.gcube.contentmanagement.lexicalmatcher.utils;
import java.math.BigInteger; import java.math.BigInteger;
import java.util.ArrayList; import java.util.ArrayList;
import org.gcube.contentmanagement.graphtools.abstracts.GenericStandaloneGraph;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class MathFunctions { public class MathFunctions {
/** private static Logger logger = LoggerFactory.getLogger(MathFunctions.class);
* @param args
*/
public static void main(String[] args) {
// TODO Auto-generated method stub
}
//increments a percentage o mean calculation when a lot of elements are present //increments a percentage o mean calculation when a lot of elements are present
public static float incrementPerc(float perc, float quantity, int N){ public static float incrementPerc(float perc, float quantity, int N){
@ -78,7 +76,7 @@ public class MathFunctions {
} }
} }
AnalysisLogger.getLogger().trace("MathFunctions-> generateRandoms " + randomsSet.toString()); logger.trace("MathFunctions-> generateRandoms " + randomsSet.toString());
return randomsSet; return randomsSet;
} }

View File

@ -3,7 +3,6 @@ package org.gcube.dataanalysis.ecoengine.clustering;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.configuration.INFRASTRUCTURE; import org.gcube.dataanalysis.ecoengine.configuration.INFRASTRUCTURE;
import org.gcube.dataanalysis.ecoengine.datatypes.ColumnTypesList; import org.gcube.dataanalysis.ecoengine.datatypes.ColumnTypesList;
@ -13,7 +12,6 @@ import org.gcube.dataanalysis.ecoengine.datatypes.OutputTable;
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType; import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
import org.gcube.dataanalysis.ecoengine.datatypes.ServiceType; import org.gcube.dataanalysis.ecoengine.datatypes.ServiceType;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType; import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.DatabaseParameters;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes; import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.ServiceParameters; import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.ServiceParameters;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.TableTemplates; import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.TableTemplates;
@ -23,6 +21,8 @@ import org.gcube.dataanalysis.ecoengine.utils.DatabaseUtils;
import org.gcube.dataanalysis.ecoengine.utils.ResourceFactory; import org.gcube.dataanalysis.ecoengine.utils.ResourceFactory;
import org.gcube.dataanalysis.ecoengine.utils.Transformations; import org.gcube.dataanalysis.ecoengine.utils.Transformations;
import org.hibernate.SessionFactory; import org.hibernate.SessionFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.rapidminer.example.Attribute; import com.rapidminer.example.Attribute;
import com.rapidminer.example.Attributes; import com.rapidminer.example.Attributes;
@ -36,6 +36,8 @@ import com.rapidminer.tools.OperatorService;
public class DBScan implements Clusterer{ public class DBScan implements Clusterer{
private static Logger logger = LoggerFactory.getLogger(DBScan.class);
protected AlgorithmConfiguration config; protected AlgorithmConfiguration config;
protected String epsilon; protected String epsilon;
protected String minPoints; protected String minPoints;
@ -170,20 +172,20 @@ public class DBScan implements Clusterer{
status = 0; status = 0;
if ((config!=null) && (initrapidminer)) if ((config!=null) && (initrapidminer))
config.initRapidMiner(); config.initRapidMiner();
AnalysisLogger.getLogger().debug("Initialized Rapid Miner "); logger.debug("Initialized Rapid Miner ");
AnalysisLogger.getLogger().debug("Initializing Database Connection"); logger.debug("Initializing Database Connection");
dbHibConnection=DatabaseUtils.initDBSession(config); dbHibConnection=DatabaseUtils.initDBSession(config);
//create the final table //create the final table
try{ try{
AnalysisLogger.getLogger().debug("dropping table "+OccurrencePointsClusterTable); logger.debug("dropping table "+OccurrencePointsClusterTable);
String dropStatement = DatabaseUtils.dropTableStatement(OccurrencePointsClusterTable); String dropStatement = DatabaseUtils.dropTableStatement(OccurrencePointsClusterTable);
AnalysisLogger.getLogger().debug("dropping table "+dropStatement); logger.debug("dropping table "+dropStatement);
DatabaseFactory.executeSQLUpdate(dropStatement, dbHibConnection); DatabaseFactory.executeSQLUpdate(dropStatement, dbHibConnection);
}catch(Exception e){ }catch(Exception e){
AnalysisLogger.getLogger().debug("Could not drop table "+OccurrencePointsClusterTable); logger.debug("Could not drop table "+OccurrencePointsClusterTable);
} }
//create Table //create Table
AnalysisLogger.getLogger().debug("Creating table "+OccurrencePointsClusterTable); logger.debug("Creating table "+OccurrencePointsClusterTable);
String [] features = FeaturesColumnNames.split(AlgorithmConfiguration.getListSeparator()); String [] features = FeaturesColumnNames.split(AlgorithmConfiguration.getListSeparator());
String columns = ""; String columns = "";
@ -195,18 +197,18 @@ public class DBScan implements Clusterer{
String createStatement = "create table "+OccurrencePointsClusterTable+" ( "+columns+")"; String createStatement = "create table "+OccurrencePointsClusterTable+" ( "+columns+")";
// String createStatement = new DatabaseUtils(dbHibConnection).buildCreateStatement(OccurrencePointsTable,OccurrencePointsClusterTable); // String createStatement = new DatabaseUtils(dbHibConnection).buildCreateStatement(OccurrencePointsTable,OccurrencePointsClusterTable);
AnalysisLogger.getLogger().debug("Statement: "+createStatement); logger.debug("Statement: "+createStatement);
DatabaseFactory.executeSQLUpdate(createStatement, dbHibConnection); DatabaseFactory.executeSQLUpdate(createStatement, dbHibConnection);
//add two columns one for cluster and another for outliers //add two columns one for cluster and another for outliers
AnalysisLogger.getLogger().debug("Adding Columns"); logger.debug("Adding Columns");
DatabaseFactory.executeSQLUpdate(DatabaseUtils.addColumnStatement(OccurrencePointsClusterTable, clusterColumn, clusterColumnType), dbHibConnection); DatabaseFactory.executeSQLUpdate(DatabaseUtils.addColumnStatement(OccurrencePointsClusterTable, clusterColumn, clusterColumnType), dbHibConnection);
DatabaseFactory.executeSQLUpdate(DatabaseUtils.addColumnStatement(OccurrencePointsClusterTable, outliersColumn, outliersColumnType), dbHibConnection); DatabaseFactory.executeSQLUpdate(DatabaseUtils.addColumnStatement(OccurrencePointsClusterTable, outliersColumn, outliersColumnType), dbHibConnection);
AnalysisLogger.getLogger().debug("Getting Samples"); logger.debug("Getting Samples");
//build samples //build samples
try{ try{
getSamples(); getSamples();
}catch(Throwable e){ }catch(Throwable e){
AnalysisLogger.getLogger().debug("Error getting samples for clustering: "+e.getLocalizedMessage()); logger.debug("Error getting samples for clustering: "+e.getLocalizedMessage());
} }
status = 10f; status = 10f;
} }
@ -242,7 +244,7 @@ public class DBScan implements Clusterer{
// limit = (int)(11d*Math.pow(N,2d/(k+1))); // limit = (int)(11d*Math.pow(N,2d/(k+1)));
// limit =(int) ((double)N/(1.3d)); // limit =(int) ((double)N/(1.3d));
AnalysisLogger.getLogger().debug("Clustering limit: "+limit); logger.debug("Clustering limit: "+limit);
List<Object> samples = DatabaseFactory.executeSQLQuery(DatabaseUtils.getColumnsElementsStatement(OccurrencePointsTable, FeaturesColumnNames, "limit "+limit), dbHibConnection); List<Object> samples = DatabaseFactory.executeSQLQuery(DatabaseUtils.getColumnsElementsStatement(OccurrencePointsTable, FeaturesColumnNames, "limit "+limit), dbHibConnection);
@ -272,9 +274,9 @@ public class DBScan implements Clusterer{
ir++; ir++;
} }
AnalysisLogger.getLogger().debug("Building Sample Set For Miner"); logger.debug("Building Sample Set For Miner");
produceSamples(samplesVector); produceSamples(samplesVector);
AnalysisLogger.getLogger().debug("Obtained "+samplesVector.length+" chunks"); logger.debug("Obtained "+samplesVector.length+" chunks");
} }
public void produceSamples(double[][] sampleVectors) throws Exception{ public void produceSamples(double[][] sampleVectors) throws Exception{
@ -292,7 +294,7 @@ public class DBScan implements Clusterer{
throw new Exception("DBScan: Error incomplete parameters"); throw new Exception("DBScan: Error incomplete parameters");
} }
AnalysisLogger.getLogger().debug("DBScan: Settin up the cluster"); logger.debug("DBScan: Settin up the cluster");
//take elements and produce example set //take elements and produce example set
com.rapidminer.operator.clustering.clusterer.DBScan clusterer = (com.rapidminer.operator.clustering.clusterer.DBScan) OperatorService.createOperator("DBScanClustering"); com.rapidminer.operator.clustering.clusterer.DBScan clusterer = (com.rapidminer.operator.clustering.clusterer.DBScan) OperatorService.createOperator("DBScanClustering");
clusterer.setParameter("local_random_seed", "-1"); clusterer.setParameter("local_random_seed", "-1");
@ -303,11 +305,11 @@ public class DBScan implements Clusterer{
IOContainer innerInput = new IOContainer(points); IOContainer innerInput = new IOContainer(points);
AnalysisLogger.getLogger().debug("DBScan: Clustering..."); logger.debug("DBScan: Clustering...");
long ti= System.currentTimeMillis(); long ti= System.currentTimeMillis();
IOContainer output = clusterer.apply(innerInput); IOContainer output = clusterer.apply(innerInput);
AnalysisLogger.getLogger().debug("DBScan: ...ELAPSED CLUSTERING TIME: "+(System.currentTimeMillis()-ti)); logger.debug("DBScan: ...ELAPSED CLUSTERING TIME: "+(System.currentTimeMillis()-ti));
AnalysisLogger.getLogger().debug("DBScan: ...Clustering Finished in "+(System.currentTimeMillis()-t00)); logger.debug("DBScan: ...Clustering Finished in "+(System.currentTimeMillis()-t00));
status = 70f; status = 70f;
IOObject[] outputvector = output.getIOObjects(); IOObject[] outputvector = output.getIOObjects();
@ -315,7 +317,7 @@ public class DBScan implements Clusterer{
BuildClusterTable(outputvector); BuildClusterTable(outputvector);
}catch(Exception e){ }catch(Exception e){
AnalysisLogger.getLogger().debug("ERROR "+e.getLocalizedMessage()); logger.debug("ERROR "+e.getLocalizedMessage());
e.printStackTrace(); e.printStackTrace();
throw e; throw e;
} }
@ -332,11 +334,11 @@ public class DBScan implements Clusterer{
ExampleSet es = (ExampleSet) outputvector[1]; ExampleSet es = (ExampleSet) outputvector[1];
String columnsNames =FeaturesColumnNames+","+clusterColumn+","+outliersColumn; String columnsNames =FeaturesColumnNames+","+clusterColumn+","+outliersColumn;
int minpoints = Integer.parseInt(minPoints); int minpoints = Integer.parseInt(minPoints);
AnalysisLogger.getLogger().debug("Analyzing Cluster ->"+" minpoints"+minpoints); logger.debug("Analyzing Cluster ->"+" minpoints"+minpoints);
int nClusters = innermodel.getClusters().size(); int nClusters = innermodel.getClusters().size();
float statusstep = ((100f-status)/ (float)(nClusters+1)); float statusstep = ((100f-status)/ (float)(nClusters+1));
AnalysisLogger.getLogger().debug("Start Write On DB"); logger.debug("Start Write On DB");
for (Cluster c : innermodel.getClusters()){ for (Cluster c : innermodel.getClusters()){
StringBuffer bufferRows = new StringBuffer(); StringBuffer bufferRows = new StringBuffer();
//take cluster id //take cluster id
@ -344,7 +346,7 @@ public class DBScan implements Clusterer{
boolean outlier = false; boolean outlier = false;
//take cluster element indexes //take cluster element indexes
int npoints = c.getExampleIds().size(); int npoints = c.getExampleIds().size();
AnalysisLogger.getLogger().debug("Analyzing Cluster ->"+id+" with "+npoints); logger.debug("Analyzing Cluster ->"+id+" with "+npoints);
if (npoints<=minpoints) if (npoints<=minpoints)
outlier=true; outlier=true;
@ -375,20 +377,20 @@ public class DBScan implements Clusterer{
} }
k++; k++;
// AnalysisLogger.getLogger().trace("DBScan: Classification : "+towrite+"->"+id+" is outlier?"+outlier); // logger.trace("DBScan: Classification : "+towrite+"->"+id+" is outlier?"+outlier);
} }
if (bufferRows.length()>0){ if (bufferRows.length()>0){
// AnalysisLogger.getLogger().debug("DBScan: Inserting Buffer "+DatabaseUtils.insertFromBuffer(OccurrencePointsClusterTable, columnsNames, bufferRows)); // logger.debug("DBScan: Inserting Buffer "+DatabaseUtils.insertFromBuffer(OccurrencePointsClusterTable, columnsNames, bufferRows));
AnalysisLogger.getLogger().debug("Writing into DB"); logger.debug("Writing into DB");
DatabaseFactory.executeSQLUpdate(DatabaseUtils.insertFromBuffer(OccurrencePointsClusterTable, columnsNames, bufferRows),dbHibConnection); DatabaseFactory.executeSQLUpdate(DatabaseUtils.insertFromBuffer(OccurrencePointsClusterTable, columnsNames, bufferRows),dbHibConnection);
AnalysisLogger.getLogger().debug("Finished with writing into DB"); logger.debug("Finished with writing into DB");
}else }else
AnalysisLogger.getLogger().debug("Nothing to write in the buffer"); logger.debug("Nothing to write in the buffer");
float instatus = status + statusstep; float instatus = status + statusstep;
status = Math.min(95f, instatus); status = Math.min(95f, instatus);
AnalysisLogger.getLogger().debug("Status: "+status); logger.debug("Status: "+status);
} }
} }
@ -397,11 +399,11 @@ public class DBScan implements Clusterer{
@Override @Override
public void shutdown() { public void shutdown() {
try{ try{
AnalysisLogger.getLogger().debug("Closing DB Connection "); logger.debug("Closing DB Connection ");
if (dbHibConnection!=null) if (dbHibConnection!=null)
dbHibConnection.close(); dbHibConnection.close();
}catch(Exception e){ }catch(Exception e){
AnalysisLogger.getLogger().debug("Could not shut down connection"); logger.debug("Could not shut down connection");
} }
} }

View File

@ -3,7 +3,7 @@ package org.gcube.dataanalysis.ecoengine.clustering;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; import org.gcube.contentmanagement.graphtools.abstracts.GenericStandaloneGraph;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.datatypes.ColumnTypesList; import org.gcube.dataanalysis.ecoengine.datatypes.ColumnTypesList;
import org.gcube.dataanalysis.ecoengine.datatypes.DatabaseType; import org.gcube.dataanalysis.ecoengine.datatypes.DatabaseType;
@ -11,11 +11,12 @@ import org.gcube.dataanalysis.ecoengine.datatypes.InputTable;
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType; import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
import org.gcube.dataanalysis.ecoengine.datatypes.ServiceType; import org.gcube.dataanalysis.ecoengine.datatypes.ServiceType;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType; import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.DatabaseParameters;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes; import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.ServiceParameters; import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.ServiceParameters;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.TableTemplates; import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.TableTemplates;
import org.gcube.dataanalysis.ecoengine.utils.ResourceFactory; import org.gcube.dataanalysis.ecoengine.utils.ResourceFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.rapidminer.operator.IOContainer; import com.rapidminer.operator.IOContainer;
import com.rapidminer.operator.IOObject; import com.rapidminer.operator.IOObject;
@ -23,38 +24,12 @@ import com.rapidminer.tools.OperatorService;
public class KMeans extends DBScan{ public class KMeans extends DBScan{
private static Logger logger = LoggerFactory.getLogger(KMeans.class);
private String kk; private String kk;
private String maxRuns; private String maxRuns;
private String maxOptimizations; private String maxOptimizations;
public static void main(String[] args) throws Exception{
long t0 = System.currentTimeMillis();
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setParam("OccurrencePointsTable","presence_basking_cluster");
config.setParam("FeaturesColumnNames","centerlat"+AlgorithmConfiguration.getListSeparator()+"centerlong");
config.setParam("OccurrencePointsClusterTable","occCluster_kmeans");
config.setParam("k","50");
config.setParam("max_runs","10");
config.setParam("max_optimization_steps","10");
config.setParam("min_points","2");
config.setParam("DatabaseUserName","gcube");
config.setParam("DatabasePassword","d4science2");
config.setParam("DatabaseURL","jdbc:postgresql://146.48.87.169/testdb");
config.setParam("DatabaseDriver","org.postgresql.Driver");
KMeans cluster = new KMeans();
cluster.setConfiguration(config);
cluster.init();
cluster.compute();
System.out.println("ELAPSED "+(System.currentTimeMillis()-t0));
}
@Override @Override
public void setConfiguration(AlgorithmConfiguration config) { public void setConfiguration(AlgorithmConfiguration config) {
@ -79,7 +54,7 @@ public class KMeans extends DBScan{
throw new Exception("KMeans: Error incomplete parameters"); throw new Exception("KMeans: Error incomplete parameters");
} }
AnalysisLogger.getLogger().debug("KMeans: Settin up the cluster"); logger.debug("KMeans: Settin up the cluster");
//take elements and produce example set //take elements and produce example set
com.rapidminer.operator.clustering.clusterer.KMeans kmeans = (com.rapidminer.operator.clustering.clusterer.KMeans) OperatorService.createOperator("KMeans"); com.rapidminer.operator.clustering.clusterer.KMeans kmeans = (com.rapidminer.operator.clustering.clusterer.KMeans) OperatorService.createOperator("KMeans");
@ -93,11 +68,11 @@ public class KMeans extends DBScan{
IOContainer innerInput = new IOContainer(points); IOContainer innerInput = new IOContainer(points);
AnalysisLogger.getLogger().debug("KMeans: Clustering..."); logger.debug("KMeans: Clustering...");
long ti= System.currentTimeMillis(); long ti= System.currentTimeMillis();
IOContainer output = kmeans.apply(innerInput); IOContainer output = kmeans.apply(innerInput);
AnalysisLogger.getLogger().debug("KMEANS: ...ELAPSED CLUSTERING TIME: "+(System.currentTimeMillis()-ti)); logger.debug("KMEANS: ...ELAPSED CLUSTERING TIME: "+(System.currentTimeMillis()-ti));
AnalysisLogger.getLogger().debug("KMeans: ...Clustering Finished"); logger.debug("KMeans: ...Clustering Finished");
status = 70f; status = 70f;
IOObject[] outputvector = output.getIOObjects(); IOObject[] outputvector = output.getIOObjects();

View File

@ -1,11 +1,9 @@
package org.gcube.dataanalysis.ecoengine.clustering; package org.gcube.dataanalysis.ecoengine.clustering;
import java.lang.reflect.Field; import java.lang.reflect.Field;
import java.security.acl.LastOwnerException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.datatypes.ColumnTypesList; import org.gcube.dataanalysis.ecoengine.datatypes.ColumnTypesList;
import org.gcube.dataanalysis.ecoengine.datatypes.DatabaseType; import org.gcube.dataanalysis.ecoengine.datatypes.DatabaseType;
@ -20,26 +18,21 @@ import org.gcube.dataanalysis.ecoengine.utils.DatabaseFactory;
import org.gcube.dataanalysis.ecoengine.utils.DatabaseUtils; import org.gcube.dataanalysis.ecoengine.utils.DatabaseUtils;
import org.gcube.dataanalysis.ecoengine.utils.DynamicEnum; import org.gcube.dataanalysis.ecoengine.utils.DynamicEnum;
import org.gcube.dataanalysis.ecoengine.utils.ResourceFactory; import org.gcube.dataanalysis.ecoengine.utils.ResourceFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.rapidminer.example.Attribute; import com.rapidminer.example.Attribute;
import com.rapidminer.example.Attributes;
import com.rapidminer.example.Example;
import com.rapidminer.example.ExampleSet;
import com.rapidminer.example.set.SimpleExampleSet; import com.rapidminer.example.set.SimpleExampleSet;
import com.rapidminer.example.table.AttributeFactory;
import com.rapidminer.example.table.DataRow; import com.rapidminer.example.table.DataRow;
import com.rapidminer.example.table.DoubleArrayDataRow;
import com.rapidminer.example.table.ExampleTable;
import com.rapidminer.example.table.MemoryExampleTable; import com.rapidminer.example.table.MemoryExampleTable;
import com.rapidminer.operator.IOContainer; import com.rapidminer.operator.IOContainer;
import com.rapidminer.operator.IOObject; import com.rapidminer.operator.IOObject;
import com.rapidminer.operator.clustering.Cluster;
import com.rapidminer.operator.clustering.ClusterModel;
import com.rapidminer.tools.Ontology;
import com.rapidminer.tools.OperatorService; import com.rapidminer.tools.OperatorService;
public class LOF extends DBScan { public class LOF extends DBScan {
private static Logger logger = LoggerFactory.getLogger(LOF.class);
String minimal_points_lower_bound = "1"; String minimal_points_lower_bound = "1";
String minimal_points_upper_bound = "10"; String minimal_points_upper_bound = "10";
String lof_threshold = "2"; String lof_threshold = "2";
@ -64,20 +57,20 @@ public class LOF extends DBScan {
status = 0; status = 0;
if ((config!=null) && (initrapidminer)) if ((config!=null) && (initrapidminer))
config.initRapidMiner(); config.initRapidMiner();
AnalysisLogger.getLogger().debug("Initialized Rapid Miner "); logger.debug("Initialized Rapid Miner ");
AnalysisLogger.getLogger().debug("Initializing Database Connection"); logger.debug("Initializing Database Connection");
dbHibConnection=DatabaseUtils.initDBSession(config); dbHibConnection=DatabaseUtils.initDBSession(config);
//create the final table //create the final table
try{ try{
AnalysisLogger.getLogger().debug("dropping table "+OccurrencePointsClusterTable); logger.debug("dropping table "+OccurrencePointsClusterTable);
String dropStatement = DatabaseUtils.dropTableStatement(OccurrencePointsClusterTable); String dropStatement = DatabaseUtils.dropTableStatement(OccurrencePointsClusterTable);
AnalysisLogger.getLogger().debug("dropping table "+dropStatement); logger.debug("dropping table "+dropStatement);
DatabaseFactory.executeSQLUpdate(dropStatement, dbHibConnection); DatabaseFactory.executeSQLUpdate(dropStatement, dbHibConnection);
}catch(Exception e){ }catch(Exception e){
AnalysisLogger.getLogger().debug("Could not drop table "+OccurrencePointsClusterTable); logger.debug("Could not drop table "+OccurrencePointsClusterTable);
} }
//create Table //create Table
AnalysisLogger.getLogger().debug("Creating table "+OccurrencePointsClusterTable); logger.debug("Creating table "+OccurrencePointsClusterTable);
String [] features = FeaturesColumnNames.split(AlgorithmConfiguration.getListSeparator()); String [] features = FeaturesColumnNames.split(AlgorithmConfiguration.getListSeparator());
String columns = ""; String columns = "";
@ -89,13 +82,13 @@ public class LOF extends DBScan {
String createStatement = "create table "+OccurrencePointsClusterTable+" ( "+columns+")"; String createStatement = "create table "+OccurrencePointsClusterTable+" ( "+columns+")";
// String createStatement = new DatabaseUtils(dbHibConnection).buildCreateStatement(OccurrencePointsTable,OccurrencePointsClusterTable); // String createStatement = new DatabaseUtils(dbHibConnection).buildCreateStatement(OccurrencePointsTable,OccurrencePointsClusterTable);
AnalysisLogger.getLogger().debug("Statement: "+createStatement); logger.debug("Statement: "+createStatement);
DatabaseFactory.executeSQLUpdate(createStatement, dbHibConnection); DatabaseFactory.executeSQLUpdate(createStatement, dbHibConnection);
//add two columns one for cluster and another for outliers //add two columns one for cluster and another for outliers
AnalysisLogger.getLogger().debug("Adding Columns"); logger.debug("Adding Columns");
DatabaseFactory.executeSQLUpdate(DatabaseUtils.addColumnStatement(OccurrencePointsClusterTable, lofcolumn, lofcolumntype), dbHibConnection); DatabaseFactory.executeSQLUpdate(DatabaseUtils.addColumnStatement(OccurrencePointsClusterTable, lofcolumn, lofcolumntype), dbHibConnection);
DatabaseFactory.executeSQLUpdate(DatabaseUtils.addColumnStatement(OccurrencePointsClusterTable, outliersColumn, outliersColumnType), dbHibConnection); DatabaseFactory.executeSQLUpdate(DatabaseUtils.addColumnStatement(OccurrencePointsClusterTable, outliersColumn, outliersColumnType), dbHibConnection);
AnalysisLogger.getLogger().debug("Getting Samples"); logger.debug("Getting Samples");
//build samples //build samples
getSamples(); getSamples();
status = 10f; status = 10f;
@ -127,7 +120,7 @@ public class LOF extends DBScan {
throw new Exception("LOF: Error incomplete parameters"); throw new Exception("LOF: Error incomplete parameters");
} }
status = 10f; status = 10f;
AnalysisLogger.getLogger().debug("LOF: Settin up the cluster"); logger.debug("LOF: Settin up the cluster");
// take elements and produce example set // take elements and produce example set
com.rapidminer.operator.preprocessing.outlier.LOFOutlierOperator clusterer = (com.rapidminer.operator.preprocessing.outlier.LOFOutlierOperator) OperatorService.createOperator("LOFOutlierDetection"); com.rapidminer.operator.preprocessing.outlier.LOFOutlierOperator clusterer = (com.rapidminer.operator.preprocessing.outlier.LOFOutlierOperator) OperatorService.createOperator("LOFOutlierDetection");
clusterer.setParameter("minimal_points_lower_bound", minimal_points_lower_bound); clusterer.setParameter("minimal_points_lower_bound", minimal_points_lower_bound);
@ -136,11 +129,11 @@ public class LOF extends DBScan {
IOContainer innerInput = new IOContainer(points); IOContainer innerInput = new IOContainer(points);
AnalysisLogger.getLogger().debug("LOF: Clustering..."); logger.debug("LOF: Clustering...");
long ti = System.currentTimeMillis(); long ti = System.currentTimeMillis();
IOContainer output = clusterer.apply(innerInput); IOContainer output = clusterer.apply(innerInput);
AnalysisLogger.getLogger().debug("LOF: ...ELAPSED CLUSTERING TIME: " + (System.currentTimeMillis() - ti)); logger.debug("LOF: ...ELAPSED CLUSTERING TIME: " + (System.currentTimeMillis() - ti));
AnalysisLogger.getLogger().debug("LOF: ...Clustering Finished"); logger.debug("LOF: ...Clustering Finished");
status = 70f; status = 70f;
IOObject[] outputvector = output.getIOObjects(); IOObject[] outputvector = output.getIOObjects();
@ -165,7 +158,7 @@ public class LOF extends DBScan {
double lofthr = 2; double lofthr = 2;
if (lof_threshold!=null) if (lof_threshold!=null)
try{lofthr = Double.parseDouble(lof_threshold);}catch(Exception e){}; try{lofthr = Double.parseDouble(lof_threshold);}catch(Exception e){};
AnalysisLogger.getLogger().debug("LOF: using lof threshold :"+lofthr); logger.debug("LOF: using lof threshold :"+lofthr);
for (int i = 0; i < numofrows; i++) { for (int i = 0; i < numofrows; i++) {
@ -188,7 +181,7 @@ public class LOF extends DBScan {
bufferRows.append(","); bufferRows.append(",");
} }
AnalysisLogger.getLogger().debug("LOF: Finished in retrieving and building output to write"); logger.debug("LOF: Finished in retrieving and building output to write");
String columnsNames = FeaturesColumnNames + ","+lofcolumn+","+outliersColumn; String columnsNames = FeaturesColumnNames + ","+lofcolumn+","+outliersColumn;
@ -196,15 +189,15 @@ public class LOF extends DBScan {
if (bufferRows.length() > 0) { if (bufferRows.length() > 0) {
AnalysisLogger.getLogger().debug("Writing into DB"); logger.debug("Writing into DB");
// AnalysisLogger.getLogger().debug("Query to execute: "+DatabaseUtils.insertFromBuffer(OccurrencePointsClusterTable, columnsNames, bufferRows)); // logger.debug("Query to execute: "+DatabaseUtils.insertFromBuffer(OccurrencePointsClusterTable, columnsNames, bufferRows));
DatabaseFactory.executeSQLUpdate(DatabaseUtils.insertFromBuffer(OccurrencePointsClusterTable, columnsNames, bufferRows), dbHibConnection); DatabaseFactory.executeSQLUpdate(DatabaseUtils.insertFromBuffer(OccurrencePointsClusterTable, columnsNames, bufferRows), dbHibConnection);
AnalysisLogger.getLogger().debug("Finished with writing into DB"); logger.debug("Finished with writing into DB");
} else } else
AnalysisLogger.getLogger().debug("Nothing to write in the buffer"); logger.debug("Nothing to write in the buffer");
status = 100; status = 100;
AnalysisLogger.getLogger().debug("Status: " + status); logger.debug("Status: " + status);
} }

View File

@ -1,12 +1,10 @@
package org.gcube.dataanalysis.ecoengine.clustering; package org.gcube.dataanalysis.ecoengine.clustering;
import java.io.ByteArrayInputStream; import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.InputStream; import java.io.InputStream;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.datatypes.ColumnTypesList; import org.gcube.dataanalysis.ecoengine.datatypes.ColumnTypesList;
import org.gcube.dataanalysis.ecoengine.datatypes.DatabaseType; import org.gcube.dataanalysis.ecoengine.datatypes.DatabaseType;
@ -14,22 +12,23 @@ import org.gcube.dataanalysis.ecoengine.datatypes.InputTable;
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType; import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
import org.gcube.dataanalysis.ecoengine.datatypes.ServiceType; import org.gcube.dataanalysis.ecoengine.datatypes.ServiceType;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType; import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.DatabaseParameters;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes; import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.ServiceParameters; import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.ServiceParameters;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.TableTemplates; import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.TableTemplates;
import org.gcube.dataanalysis.ecoengine.utils.DatabaseFactory; import org.gcube.dataanalysis.ecoengine.utils.DatabaseFactory;
import org.gcube.dataanalysis.ecoengine.utils.DatabaseUtils; import org.gcube.dataanalysis.ecoengine.utils.DatabaseUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import weka.clusterers.ClusterEvaluation;
import weka.core.DenseInstance; import weka.core.DenseInstance;
import weka.core.Instance; import weka.core.Instance;
import weka.core.Instances; import weka.core.Instances;
import weka.core.converters.ArffSaver;
import weka.core.converters.CSVLoader; import weka.core.converters.CSVLoader;
public class XMeansWrapper extends DBScan { public class XMeansWrapper extends DBScan {
private static Logger logger = LoggerFactory.getLogger(XMeansWrapper.class);
private String maxIterations; private String maxIterations;
private String minClusters; private String minClusters;
private String maxClusters; private String maxClusters;
@ -38,22 +37,7 @@ public class XMeansWrapper extends DBScan {
super(); super();
initrapidminer=false; initrapidminer=false;
} }
public static void main1(String[] args) throws Exception {
args = new String[2];
args[0] = "input.csv";
args[1] = "c:/tmp/output.arff";
// load CSV
CSVLoader loader = new CSVLoader();
loader.setSource(new File(args[0]));
Instances data = loader.getDataSet();
// save ARFF
ArffSaver saver = new ArffSaver();
saver.setInstances(data);
saver.setFile(new File(args[1]));
// saver.setDestination(new File(args[1]));
saver.writeBatch();
}
public class CSV2Arff { public class CSV2Arff {
/** /**
@ -62,56 +46,6 @@ public class XMeansWrapper extends DBScan {
} }
public static void main(String[] args) throws Exception {
XMeans xmeans = new XMeans();
// xmeans.setInputCenterFile(new File("./clusterinput.arf"));
// String[] options = {"-I 10","-M 1000","-J 1000","-L 2","-H 50","-B 1.0","-use-kdtree no","-N clusterinput.arf","-O clusterout.txt","-U 3"};
// String[] options = {"-I 10","-M 1000","-J 1000","-L 2","-H 50","-B 1.0","-use-kdtree no","-t clusterinput.arf","-O clusterout.txt","-U 3"};
// String optionsS = "-t c:/tmp/output.arff -O c:/tmp/clusterout.arff";
String optionsS = "-t c:/tmp/output.arff";
String[] options = optionsS.split(" ");
String elements = "ciao,tutti\n5.1,3.5\n4.9,3.0\n4.7,3.2\n4.6,3.1\n5.0,3.6\n5.4,3.9\n4.6,3.4\n5.0,3.4\n4.4,2.9\n4.9,3.1\n";
// xmeans.setInputCenterFile(new File("./clusterinput.arf"));
CSVLoader loader = new CSVLoader();
InputStream tis = new ByteArrayInputStream(elements.getBytes("UTF-8"));
loader.setSource(tis);
Instances id = loader.getDataSet();
System.out.println("ids: "+id.numInstances());
System.exit(0);
xmeans.buildClusterer(id);
// xmeans.main(options);
// ClusterEvaluation.evaluateClusterer(xmeans, options);
/*
* String[] opts = xmeans.getOptions(); for (int i=0;i<opts.length;i++){ System.out.println("options: "+opts[i]); }
*/
System.out.println(ClusterEvaluation.evaluateClusterer(xmeans, options));
// ClusterEvaluation.evaluateClusterer(xmeans, options);
System.out.println("*************");
Instances is = xmeans.getClusterCenters();
for (Instance i : is) {
DenseInstance di = (DenseInstance) i;
System.out.println("Attributes: " + i.numAttributes());
System.out.print("->" + di.toString(0));
System.out.println(" " + di.toString(1));
// System.out.println(i);
System.out.println("-------------------------------");
}
System.out.println(xmeans.m_Bic);
// System.out.println(xmeans.clusterInstance(instance));
int[] ii = xmeans.m_ClusterAssignments;
for (int ix : ii)
System.out.print(ix + " ");
// xmeans.main(options);
}
@Override @Override
public List<StatisticalType> getInputParameters() { public List<StatisticalType> getInputParameters() {
@ -172,7 +106,7 @@ public class XMeansWrapper extends DBScan {
if ((samplesVector != null) && (samplesVector.length > 0)) { if ((samplesVector != null) && (samplesVector.length > 0)) {
AnalysisLogger.getLogger().debug("XMeans: Setting up the cluster"); logger.debug("XMeans: Setting up the cluster");
CSVLoader loader = new CSVLoader(); CSVLoader loader = new CSVLoader();
StringBuffer sb = new StringBuffer(); StringBuffer sb = new StringBuffer();
@ -198,22 +132,22 @@ public class XMeansWrapper extends DBScan {
xmeans.setMinNumClusters(Integer.parseInt(minClusters)); xmeans.setMinNumClusters(Integer.parseInt(minClusters));
xmeans.setMaxNumClusters(Integer.parseInt(maxClusters)); xmeans.setMaxNumClusters(Integer.parseInt(maxClusters));
xmeans.buildClusterer(id); xmeans.buildClusterer(id);
AnalysisLogger.getLogger().debug("XMEANS: ...ELAPSED CLUSTERING TIME: "+(System.currentTimeMillis()-ti)); logger.debug("XMEANS: ...ELAPSED CLUSTERING TIME: "+(System.currentTimeMillis()-ti));
status = 50f; status = 50f;
// do clustering // do clustering
AnalysisLogger.getLogger().debug("XMeans: Clustering ..."); logger.debug("XMeans: Clustering ...");
Instances is = xmeans.getClusterCenters(); Instances is = xmeans.getClusterCenters();
int nClusters = is.numInstances(); int nClusters = is.numInstances();
// take results // take results
AnalysisLogger.getLogger().debug("XMeans: Found "+nClusters+" Centroids"); logger.debug("XMeans: Found "+nClusters+" Centroids");
for (Instance i : is) { for (Instance i : is) {
DenseInstance di = (DenseInstance) i; DenseInstance di = (DenseInstance) i;
int nCluster = di.numAttributes(); int nCluster = di.numAttributes();
for (int k = 0; k < nCluster; k++) { for (int k = 0; k < nCluster; k++) {
AnalysisLogger.getLogger().debug(di.toString(k)); logger.debug(di.toString(k));
} }
AnalysisLogger.getLogger().debug("-------------------------------"); logger.debug("-------------------------------");
} }
int[] clusteringAssignments = xmeans.m_ClusterAssignments; int[] clusteringAssignments = xmeans.m_ClusterAssignments;
@ -223,11 +157,11 @@ public class XMeansWrapper extends DBScan {
counters[cluster]++; counters[cluster]++;
} }
AnalysisLogger.getLogger().debug("XMeans: Building Table"); logger.debug("XMeans: Building Table");
BuildClusterTable(clusteringAssignments, counters); BuildClusterTable(clusteringAssignments, counters);
} else } else
AnalysisLogger.getLogger().debug("XMeans: Warning - Empty Training Set"); logger.debug("XMeans: Warning - Empty Training Set");
}catch(Exception e){ }catch(Exception e){
throw e; throw e;
} }
@ -242,13 +176,13 @@ public class XMeansWrapper extends DBScan {
String columnsNames = FeaturesColumnNames + "," + clusterColumn + "," + outliersColumn; String columnsNames = FeaturesColumnNames + "," + clusterColumn + "," + outliersColumn;
int minpoints = Integer.parseInt(minPoints); int minpoints = Integer.parseInt(minPoints);
AnalysisLogger.getLogger().debug("Analyzing Cluster ->" + " minpoints " + minpoints); logger.debug("Analyzing Cluster ->" + " minpoints " + minpoints);
StringBuffer bufferRows = new StringBuffer(); StringBuffer bufferRows = new StringBuffer();
int nrows = samplesVector.length; int nrows = samplesVector.length;
int ncols = samplesVector[0].length; int ncols = samplesVector[0].length;
AnalysisLogger.getLogger().debug("Analyzing Cluster ->" + "Building Rows to Insert"); logger.debug("Analyzing Cluster ->" + "Building Rows to Insert");
for (int k = 0; k < nrows; k++) { for (int k = 0; k < nrows; k++) {
bufferRows.append("("); bufferRows.append("(");
@ -269,18 +203,18 @@ public class XMeansWrapper extends DBScan {
//TO-DO: insert row at chunks //TO-DO: insert row at chunks
AnalysisLogger.getLogger().debug("Analyzing Cluster ->" + "Inserting rows"); logger.debug("Analyzing Cluster ->" + "Inserting rows");
if (bufferRows.length() > 0) { if (bufferRows.length() > 0) {
AnalysisLogger.getLogger().debug("XMeans: Writing into DB"); logger.debug("XMeans: Writing into DB");
AnalysisLogger.getLogger().debug(DatabaseUtils.insertFromBuffer(OccurrencePointsClusterTable, columnsNames, bufferRows)); logger.debug(DatabaseUtils.insertFromBuffer(OccurrencePointsClusterTable, columnsNames, bufferRows));
DatabaseFactory.executeSQLUpdate(DatabaseUtils.insertFromBuffer(OccurrencePointsClusterTable, columnsNames, bufferRows), dbHibConnection); DatabaseFactory.executeSQLUpdate(DatabaseUtils.insertFromBuffer(OccurrencePointsClusterTable, columnsNames, bufferRows), dbHibConnection);
AnalysisLogger.getLogger().debug("XMeans: Finished with writing into DB"); logger.debug("XMeans: Finished with writing into DB");
} else } else
AnalysisLogger.getLogger().debug("XMeans: Nothing to write in the buffer"); logger.debug("XMeans: Nothing to write in the buffer");
status = 95f; status = 95f;
AnalysisLogger.getLogger().debug("XMeans: Status: " + status); logger.debug("XMeans: Status: " + status);
} }

View File

@ -7,14 +7,17 @@ import java.util.List;
import java.util.Properties; import java.util.Properties;
import org.gcube.contentmanagement.lexicalmatcher.analysis.core.LexicalEngineConfiguration; import org.gcube.contentmanagement.lexicalmatcher.analysis.core.LexicalEngineConfiguration;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.utils.DatabaseFactory; import org.gcube.dataanalysis.ecoengine.utils.DatabaseFactory;
import org.hibernate.SessionFactory; import org.hibernate.SessionFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.rapidminer.RapidMiner; import com.rapidminer.RapidMiner;
public class AlgorithmConfiguration extends LexicalEngineConfiguration implements Serializable{ public class AlgorithmConfiguration extends LexicalEngineConfiguration implements Serializable{
private static Logger logger = LoggerFactory.getLogger(AlgorithmConfiguration.class);
private static final long serialVersionUID = 1L; private static final long serialVersionUID = 1L;
public static Properties getProperties(String absoluteFilePath) { public static Properties getProperties(String absoluteFilePath) {
@ -24,7 +27,7 @@ public class AlgorithmConfiguration extends LexicalEngineConfiguration implement
fis = new FileInputStream(absoluteFilePath); fis = new FileInputStream(absoluteFilePath);
props.load(fis); props.load(fis);
} catch (Exception e) { } catch (Exception e) {
AnalysisLogger.getLogger().error("error getting properties",e); logger.error("error getting properties",e);
} finally { } finally {
try { try {
fis.close(); fis.close();
@ -179,7 +182,7 @@ public class AlgorithmConfiguration extends LexicalEngineConfiguration implement
try { try {
connection = DatabaseFactory.initDBConnection(defaultDatabaseFile, Input); connection = DatabaseFactory.initDBConnection(defaultDatabaseFile, Input);
} catch (Exception e) { } catch (Exception e) {
AnalysisLogger.getLogger().error("ERROR initializing connection",e); logger.error("ERROR initializing connection",e);
} }
return connection; return connection;
} }
@ -188,8 +191,7 @@ public class AlgorithmConfiguration extends LexicalEngineConfiguration implement
public void initRapidMiner(){ public void initRapidMiner(){
System.setProperty("rapidminer.init.operators", configPath+ AlgorithmConfiguration.RapidMinerOperatorsFile); System.setProperty("rapidminer.init.operators", configPath+ AlgorithmConfiguration.RapidMinerOperatorsFile);
RapidMiner.init(); RapidMiner.init();
AnalysisLogger.setLogger(getConfigPath() + AlgorithmConfiguration.defaultLoggerFile); logger.info("Rapid Miner initialized");
AnalysisLogger.getLogger().info("Rapid Miner initialized");
} }
public String getGcubeUserName() { public String getGcubeUserName() {

View File

@ -1,10 +1,13 @@
package org.gcube.dataanalysis.ecoengine.connectors; package org.gcube.dataanalysis.ecoengine.connectors;
import org.gcube.contentmanagement.graphtools.utils.HttpRequest; import org.gcube.contentmanagement.graphtools.utils.HttpRequest;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class RemoteGenerationManager { public class RemoteGenerationManager {
private static Logger logger = LoggerFactory.getLogger(RemoteGenerationManager.class);
private final String submissionMethod = "submit"; private final String submissionMethod = "submit";
private final String statusMethod = "status/"; private final String statusMethod = "status/";
private String submissionID; private String submissionID;
@ -20,24 +23,24 @@ public class RemoteGenerationManager {
public void submitJob(RemoteHspecInputObject rhio) throws Exception{ public void submitJob(RemoteHspecInputObject rhio) throws Exception{
AnalysisLogger.getLogger().warn("RemoteGenerationManager: retrieving job information"); logger.warn("RemoteGenerationManager: retrieving job information");
RemoteHspecOutputObject rhoo = null; RemoteHspecOutputObject rhoo = null;
username = rhio.userName; username = rhio.userName;
try{ try{
rhoo = (RemoteHspecOutputObject)HttpRequest.postJSonData(endpoint+submissionMethod, rhio, RemoteHspecOutputObject.class); rhoo = (RemoteHspecOutputObject)HttpRequest.postJSonData(endpoint+submissionMethod, rhio, RemoteHspecOutputObject.class);
AnalysisLogger.getLogger().trace("RemoteGenerationManager: job information retrieved"); logger.trace("RemoteGenerationManager: job information retrieved");
}catch(Exception e){ }catch(Exception e){
e.printStackTrace(); e.printStackTrace();
AnalysisLogger.getLogger().trace("RemoteGenerationManager: ERROR - job information NOT retrieved"); logger.trace("RemoteGenerationManager: ERROR - job information NOT retrieved");
throw e; throw e;
} }
if ((rhoo!=null) && (rhoo.id!=null)){ if ((rhoo!=null) && (rhoo.id!=null)){
AnalysisLogger.getLogger().warn("RemoteGenerationManager: job ID retrieved "); logger.warn("RemoteGenerationManager: job ID retrieved ");
submissionID = rhoo.id; submissionID = rhoo.id;
} }
else{ else{
AnalysisLogger.getLogger().warn("RemoteGenerationManager: ERROR - job ID NOT retrieved "+rhoo.error); logger.warn("RemoteGenerationManager: ERROR - job ID NOT retrieved "+rhoo.error);
throw new Exception(); throw new Exception("RemoteGenerationManager: ERROR - job ID NOT retrieved "+rhoo.error);
} }
} }
@ -48,8 +51,7 @@ public class RemoteGenerationManager {
double completion = Double.parseDouble(rhoo.completion); double completion = Double.parseDouble(rhoo.completion);
return completion; return completion;
}catch(Exception e){ }catch(Exception e){
e.printStackTrace(); logger.warn("RemoteGenerationManager: ERROR - cannot retrieve information from remote site ",e);
AnalysisLogger.getLogger().warn("RemoteGenerationManager: ERROR - cannot retrieve information from remote site ",e);
} }
return 0; return 0;
} }
@ -60,8 +62,7 @@ public class RemoteGenerationManager {
try{ try{
rhoo = (RemoteHspecOutputObject)HttpRequest.getJSonData(endpoint+statusMethod+submissionID, null ,RemoteHspecOutputObject.class); rhoo = (RemoteHspecOutputObject)HttpRequest.getJSonData(endpoint+statusMethod+submissionID, null ,RemoteHspecOutputObject.class);
}catch(Exception e){ }catch(Exception e){
e.printStackTrace(); logger.warn("RemoteGenerationManager: ERROR - cannot retrieve information from remote site ",e);
AnalysisLogger.getLogger().warn("RemoteGenerationManager: ERROR - cannot retrieve information from remote site ",e);
} }
return rhoo; return rhoo;

View File

@ -5,7 +5,6 @@ import java.util.LinkedHashMap;
import java.util.List; import java.util.List;
import org.gcube.contentmanagement.graphtools.utils.MathFunctions; import org.gcube.contentmanagement.graphtools.utils.MathFunctions;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.datatypes.ColumnType; import org.gcube.dataanalysis.ecoengine.datatypes.ColumnType;
import org.gcube.dataanalysis.ecoengine.datatypes.DatabaseType; import org.gcube.dataanalysis.ecoengine.datatypes.DatabaseType;
import org.gcube.dataanalysis.ecoengine.datatypes.InputTable; import org.gcube.dataanalysis.ecoengine.datatypes.InputTable;
@ -15,6 +14,8 @@ import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.TableTemplates; import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.TableTemplates;
import org.gcube.dataanalysis.ecoengine.interfaces.DataAnalysis; import org.gcube.dataanalysis.ecoengine.interfaces.DataAnalysis;
import org.gcube.dataanalysis.ecoengine.utils.DatabaseFactory; import org.gcube.dataanalysis.ecoengine.utils.DatabaseFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class DiscrepancyAnalysis extends DataAnalysis { public class DiscrepancyAnalysis extends DataAnalysis {
@ -29,6 +30,8 @@ public class DiscrepancyAnalysis extends DataAnalysis {
"on a.%1$s=b.%2$s) as sel where firstprob<>secondprob"; "on a.%1$s=b.%2$s) as sel where firstprob<>secondprob";
*/ */
private static Logger logger = LoggerFactory.getLogger(DiscrepancyAnalysis.class);
static String discrepancyQuery = "select * from (select distinct a.%1$s as csquareone,b.%2$s as csquaretwo,a.%3$s as firstprob,b.%4$s as secondprob from " + static String discrepancyQuery = "select * from (select distinct a.%1$s as csquareone,b.%2$s as csquaretwo,a.%3$s as firstprob,b.%4$s as secondprob from " +
"(select %1$s , avg(%3$s) as %3$s from (select distinct * from %5$s order by %1$s limit %7$s) as aa group by %1$s) as a " + "(select %1$s , avg(%3$s) as %3$s from (select distinct * from %5$s order by %1$s limit %7$s) as aa group by %1$s) as a " +
"left join " + "left join " +
@ -110,7 +113,7 @@ public class DiscrepancyAnalysis extends DataAnalysis {
kthreshold = Double.parseDouble(kthresholdString); kthreshold = Double.parseDouble(kthresholdString);
}catch(Exception e){} }catch(Exception e){}
AnalysisLogger.getLogger().trace("Using Cohen's Kappa Threshold: "+kthreshold); logger.trace("Using Cohen's Kappa Threshold: "+kthreshold);
int maxCompElements = maxElements; int maxCompElements = maxElements;
if (maxSamples!=null && maxSamples.length()>0){ if (maxSamples!=null && maxSamples.length()>0){
@ -143,10 +146,10 @@ public class DiscrepancyAnalysis extends DataAnalysis {
return output; return output;
} }
AnalysisLogger.getLogger().trace("Number Of Elements to take: "+numberofvectors); logger.trace("Number Of Elements to take: "+numberofvectors);
String query = String.format(discrepancyQuery, FirstTableCsquareColumn, SecondTableCsquareColumn, FirstTableProbabilityColumn, SecondTableProbabilityColumn, FirstTable, SecondTable,""+numberofvectors); String query = String.format(discrepancyQuery, FirstTableCsquareColumn, SecondTableCsquareColumn, FirstTableProbabilityColumn, SecondTableProbabilityColumn, FirstTable, SecondTable,""+numberofvectors);
AnalysisLogger.getLogger().debug("Discrepancy Calculation - Query to perform :" + query); logger.debug("Discrepancy Calculation - Query to perform :" + query);
List<Object> takePoints = DatabaseFactory.executeSQLQuery(query, connection); List<Object> takePoints = DatabaseFactory.executeSQLQuery(query, connection);
super.processedRecords = 0; super.processedRecords = 0;
@ -164,9 +167,9 @@ public class DiscrepancyAnalysis extends DataAnalysis {
if (maxdiscrepancyPoint==null) if (maxdiscrepancyPoint==null)
maxdiscrepancyPoint="-"; maxdiscrepancyPoint="-";
AnalysisLogger.getLogger().debug("Discrepancy Calculation - Kappa values: " + "agreementA1B1 "+agreementA1B1 +" agreementA1B0 " + agreementA1B0 +" agreementA0B1 "+agreementA0B1+" agreementA0B0 "+agreementA0B0); logger.debug("Discrepancy Calculation - Kappa values: " + "agreementA1B1 "+agreementA1B1 +" agreementA1B0 " + agreementA1B0 +" agreementA0B1 "+agreementA0B1+" agreementA0B0 "+agreementA0B0);
double kappa = MathFunctions.cohensKappaForDichotomy(agreementA1B1, agreementA1B0, agreementA0B1, agreementA0B0); double kappa = MathFunctions.cohensKappaForDichotomy(agreementA1B1, agreementA1B0, agreementA0B1, agreementA0B0);
AnalysisLogger.getLogger().debug("Discrepancy Calculation - Calculated Cohen's Kappa:" + kappa); logger.debug("Discrepancy Calculation - Calculated Cohen's Kappa:" + kappa);
output = new LinkedHashMap<String, String>(); output = new LinkedHashMap<String, String>();
output.put("MEAN", "" + MathFunctions.roundDecimal(mean,2)); output.put("MEAN", "" + MathFunctions.roundDecimal(mean,2));

View File

@ -8,8 +8,6 @@ import java.util.List;
import java.util.Map; import java.util.Map;
import org.gcube.contentmanagement.graphtools.utils.MathFunctions; import org.gcube.contentmanagement.graphtools.utils.MathFunctions;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.datatypes.ColumnType; import org.gcube.dataanalysis.ecoengine.datatypes.ColumnType;
import org.gcube.dataanalysis.ecoengine.datatypes.DatabaseType; import org.gcube.dataanalysis.ecoengine.datatypes.DatabaseType;
import org.gcube.dataanalysis.ecoengine.datatypes.InputTable; import org.gcube.dataanalysis.ecoengine.datatypes.InputTable;
@ -19,6 +17,8 @@ import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.TableTemplates; import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.TableTemplates;
import org.gcube.dataanalysis.ecoengine.interfaces.DataAnalysis; import org.gcube.dataanalysis.ecoengine.interfaces.DataAnalysis;
import org.gcube.dataanalysis.ecoengine.utils.DatabaseFactory; import org.gcube.dataanalysis.ecoengine.utils.DatabaseFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.rapidminer.example.Attribute; import com.rapidminer.example.Attribute;
import com.rapidminer.example.Attributes; import com.rapidminer.example.Attributes;
@ -33,6 +33,8 @@ import com.rapidminer.tools.math.ROCDataGenerator;
public class DistributionQualityAnalysis extends DataAnalysis { public class DistributionQualityAnalysis extends DataAnalysis {
private static Logger logger = LoggerFactory.getLogger(DistributionQualityAnalysis.class);
static String getProbabilititesQuery = "select count(*) from (select distinct * from %1$s as a join %2$s as b on a.%3$s=b.%4$s and b.%5$s %6$s %7$s) as aa"; static String getProbabilititesQuery = "select count(*) from (select distinct * from %1$s as a join %2$s as b on a.%3$s=b.%4$s and b.%5$s %6$s %7$s) as aa";
static String getNumberOfElementsQuery = "select count(*) from %1$s"; static String getNumberOfElementsQuery = "select count(*) from %1$s";
static String getValuesQuery = "select %5$s as distribprob (select distinct * from %1$s as a join %2$s as b on a.%3$s=b.%4$s) as b"; static String getValuesQuery = "select %5$s as distribprob (select distinct * from %1$s as a join %2$s as b on a.%3$s=b.%4$s) as b";
@ -110,7 +112,7 @@ public class DistributionQualityAnalysis extends DataAnalysis {
private int calculateCaughtPoints(String casesTable, String distributionTable, String casesTableKeyColumn, String distributionTableKeyColumn, String distributionTableProbabilityColumn, String operator, String threshold) { private int calculateCaughtPoints(String casesTable, String distributionTable, String casesTableKeyColumn, String distributionTableKeyColumn, String distributionTableProbabilityColumn, String operator, String threshold) {
String query = String.format(getProbabilititesQuery, casesTable, distributionTable, casesTableKeyColumn, distributionTableKeyColumn, distributionTableProbabilityColumn, operator, threshold); String query = String.format(getProbabilititesQuery, casesTable, distributionTable, casesTableKeyColumn, distributionTableKeyColumn, distributionTableProbabilityColumn, operator, threshold);
AnalysisLogger.getLogger().trace("Compare - Query to perform for caught cases:" + query); logger.trace("Compare - Query to perform for caught cases:" + query);
List<Object> caughtpoints = DatabaseFactory.executeSQLQuery(query, connection); List<Object> caughtpoints = DatabaseFactory.executeSQLQuery(query, connection);
int points = Integer.parseInt("" + caughtpoints.get(0)); int points = Integer.parseInt("" + caughtpoints.get(0));
return points; return points;
@ -120,7 +122,7 @@ public class DistributionQualityAnalysis extends DataAnalysis {
String query = String.format(getValuesQuery, casesTable, distributionTable, casesTableKeyColumn, distributionTableKeyColumn, distributionTableProbabilityColumn); String query = String.format(getValuesQuery, casesTable, distributionTable, casesTableKeyColumn, distributionTableKeyColumn, distributionTableProbabilityColumn);
AnalysisLogger.getLogger().trace("Compare - Query to perform for caught cases:" + query); logger.trace("Compare - Query to perform for caught cases:" + query);
List<Object> caughtpoints = DatabaseFactory.executeSQLQuery(query, connection); List<Object> caughtpoints = DatabaseFactory.executeSQLQuery(query, connection);
int size = 0; int size = 0;
if (caughtpoints != null) if (caughtpoints != null)
@ -143,12 +145,12 @@ public class DistributionQualityAnalysis extends DataAnalysis {
try { try {
acceptanceThreshold = Float.parseFloat(config.getParam("PositiveThreshold")); acceptanceThreshold = Float.parseFloat(config.getParam("PositiveThreshold"));
} catch (Exception e) { } catch (Exception e) {
AnalysisLogger.getLogger().debug("ERROR : " + e.getLocalizedMessage()); logger.debug("ERROR : " + e.getLocalizedMessage());
} }
try { try {
rejectionThreshold = Float.parseFloat(config.getParam("NegativeThreshold")); rejectionThreshold = Float.parseFloat(config.getParam("NegativeThreshold"));
} catch (Exception e) { } catch (Exception e) {
AnalysisLogger.getLogger().debug("ERROR : " + e.getLocalizedMessage()); logger.debug("ERROR : " + e.getLocalizedMessage());
} }
String positiveCasesTable = config.getParam("PositiveCasesTable"); String positiveCasesTable = config.getParam("PositiveCasesTable");
@ -270,60 +272,6 @@ public class DistributionQualityAnalysis extends DataAnalysis {
System.out.println(key + ":" + results.get(key)); System.out.println(key + ":" + results.get(key));
} }
} }
public static void main(String[] args) {
AnalysisLogger.setLogger("./cfg/" + AlgorithmConfiguration.defaultLoggerFile);
/*
* double [] pos = new double [4]; // pos[0] = 1d; pos[1] = 0.8d;pos[2]=0.7;pos[3]=0.9; // pos[0] = 1d; pos[1] = 1d;pos[2]=1;pos[3]=1; // pos[0] = 0.3d; pos[1] = 0.7d;pos[2]=0.1;pos[3]=0.9;
*
*
* double [] neg = new double [4]; // neg[0] = 0d; neg[1] = 0.3d;neg[2]=0.4;neg[3]=0.6; // neg[0] = 0d; neg[1] = 0.0d;neg[2]=0.0;neg[3]=0.0; // neg[0] = 0.7d; neg[1] = 0.3d;neg[2]=0.9;neg[3]=0.1;
*
* DistributionQualityAnalysis quality = new DistributionQualityAnalysis(); double auc = quality.calculateAUC(pos, neg); System.out.println("AUC: "+auc);
*
* int n = 100; double[] posRandom = new double[n]; double[] negRandom = new double[n];
*
* for (int i=0;i<n;i++){ posRandom[i] = Math.random(); negRandom[i] = Math.random(); }
*
*
* quality = new DistributionQualityAnalysis(); auc = quality.calculateAUC(posRandom, negRandom); System.out.println("AUC: "+auc);
*
* for (int i=0;i<n;i++){ posRandom[i] = 1; negRandom[i] = 0; }
*
* quality = new DistributionQualityAnalysis(); auc = quality.calculateAUC(posRandom, negRandom); System.out.println("AUC: "+auc);
*/
int n = 100;
double[] posRandom = new double[n];
double[] negRandom = new double[n];
for (int i = 0; i < n; i++) {
posRandom[i] = Math.random();
negRandom[i] = Math.random();
}
DistributionQualityAnalysis quality = new DistributionQualityAnalysis();
double auc = quality.calculateAUC(posRandom, negRandom, true);
for (int i = 0; i < n; i++) {
posRandom[i] = 1;
negRandom[i] = 0;
}
quality = new DistributionQualityAnalysis();
auc = quality.calculateAUC(posRandom, negRandom, true);
// System.out.println("AUC: " + auc);
double[] po = { 0.16, 0.12, 0.12, 0.16, 0.58, 0.36, 0.32, 0.5, 0.65, 0.59, 0.65, 0.65, 0.65, 0.38, 0.18, 0.64, 0.28, 0.64, 0.52, 0.72, 0.74, 0.23, 0.23, 0.23, 0.21, 0.21, 0.22, 0.22, 0.24, 0.32, 0.32, 0.32, 0.32, 0.55, 0.78, 0.37, 0.87, 0.87, 0.87, 0.98, 0.98, 0.76, 0.76, 0.9, 0.88, 0.97, 0.97, 0.97, 1.0, 1.0, 0.45, 0.45, 0.19, 0.89, 0.17, 0.16, 0.1, 0.25, 0.89, 0.89, 0.9, 0.9, 0.87, 1.0, 0.48, 0.88, 0.9, 0.93, 1.0, 1.0, 0.17, 0.87, 1.0, 0.24, 0.86, 0.15, 0.74, 0.32, 1.0, 0.95, 0.52, 0.66, 0.39, 0.31, 0.47, 0.57, 0.73, 0.83, 0.86, 0.98, 0.99, 1.0, 1.0, 1.0, 1.0, 0.86, 0.43, 0.67, 0.66, 0.41, 0.52, 0.46, 0.34, 1.0, 1.0, 1.0, 0.68, 1.0, 0.98, 0.89, 0.79, 1.0, 0.88, 0.99, 1.0, 0.95, 0.95, 0.95, 0.95, 0.88, 0.96, 0.95, 0.96, 0.99, 1.0, 0.98, 0.6, 0.36, 0.15, 0.87, 0.43, 0.86, 0.34, 0.21, 0.41, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.94, 0.98, 0.71, 0.85, 0.49, 0.91, 0.75, 0.74, 0.42, 0.99, 0.43, 0.22, 0.23, 1.0, 1.0, 1.0, 1.0, 0.4, 1.0, 1.0, 1.0, 0.94, 0.95, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.94, 0.98, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.97, 0.11, 0.12, 0.19, 0.2, 0.46, 0.9, 0.84, 0.64, 1.0, 0.77, 0.56, 0.66, 0.17, 0.54, 0.2, 0.27, 0.24, 0.52, 0.74, 0.23, 0.78, 0.69, 0.46, 0.65, 0.18, 0.28, 0.66, 0.66, 0.6, 0.16, 0.24, 0.4, 0.79, 0.69, 0.81, 0.49, 0.29, 0.5, 0.46, 0.15, 0.29, 0.54, 0.29, 0.37, 0.12, 0.24, 0.16, 0.4, 0.24, 0.55, 0.68, 0.6, 0.14, 0.56, 0.17, 0.73, 0.73, 0.43, 0.72, 0.72, 0.49, 0.13, 0.37, 0.11, 0.25, 0.11, 0.74, 0.59, 0.35, 0.67, 0.83, 0.71, 0.48, 0.86, 0.94, 0.17, 0.19, 0.13, 0.27, 0.77, 0.38, 0.47, 0.49, 0.13, 0.27, 0.14, 0.4, 0.45, 0.15, 0.68, 0.37, 0.2, 0.2, 0.63, 0.35, 0.13, 0.17, 0.24, 0.85, 0.58, 0.44, 1.0, 1.0, 0.94, 0.58, 0.28, 0.36, 0.25, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 };
double[] ne = { 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 };
quality = new DistributionQualityAnalysis();
auc = quality.calculateAUC(po, ne, true);
System.out.println("AUC: " + auc);
}
@Override @Override
public StatisticalType getOutput() { public StatisticalType getOutput() {

View File

@ -6,7 +6,6 @@ import java.util.LinkedHashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.contentmanagement.lexicalmatcher.utils.MathFunctions; import org.gcube.contentmanagement.lexicalmatcher.utils.MathFunctions;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.datatypes.ColumnTypesList; import org.gcube.dataanalysis.ecoengine.datatypes.ColumnTypesList;
@ -21,9 +20,13 @@ import org.gcube.dataanalysis.ecoengine.models.cores.pca.PrincipalComponentAnaly
import org.gcube.dataanalysis.ecoengine.utils.DatabaseFactory; import org.gcube.dataanalysis.ecoengine.utils.DatabaseFactory;
import org.gcube.dataanalysis.ecoengine.utils.Operations; import org.gcube.dataanalysis.ecoengine.utils.Operations;
import org.gcube.dataanalysis.ecoengine.utils.Transformations; import org.gcube.dataanalysis.ecoengine.utils.Transformations;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class HabitatRepresentativeness extends DataAnalysis { public class HabitatRepresentativeness extends DataAnalysis {
private static Logger logger = LoggerFactory.getLogger(HabitatRepresentativeness.class);
static String getNumberOfElementsQuery = "select count(*) from %1$s #OPTIONAL#"; static String getNumberOfElementsQuery = "select count(*) from %1$s #OPTIONAL#";
static String getRandomVectors = "select %1$s from %2$s #OPTIONAL# order by RANDOM() limit %3$s"; static String getRandomVectors = "select %1$s from %2$s #OPTIONAL# order by RANDOM() limit %3$s";
// static String getRandomVectors = "select %1$s from %2$s #OPTIONAL# limit %3$s"; // static String getRandomVectors = "select %1$s from %2$s #OPTIONAL# limit %3$s";
@ -91,7 +94,7 @@ public class HabitatRepresentativeness extends DataAnalysis {
String query = String.format(getRandomVectors, features, table, "" + numberOfElemsToTake); String query = String.format(getRandomVectors, features, table, "" + numberOfElemsToTake);
query = query.replace("#OPTIONAL#", (option != null) ? option : ""); query = query.replace("#OPTIONAL#", (option != null) ? option : "");
AnalysisLogger.getLogger().trace("Compare - Query to perform for points:" + query); logger.trace("Compare - Query to perform for points:" + query);
List<Object> caughtpoints = DatabaseFactory.executeSQLQuery(query, connection); List<Object> caughtpoints = DatabaseFactory.executeSQLQuery(query, connection);
int size = 0; int size = 0;
if (caughtpoints != null) if (caughtpoints != null)
@ -122,28 +125,28 @@ public class HabitatRepresentativeness extends DataAnalysis {
private void calcHRS(String projectingAreaTable, String projectingAreaFeaturesOptionalCondition, String FeaturesColumns, String positiveCasesTable, String negativeCasesTable,int numberOfElements) throws Exception{ private void calcHRS(String projectingAreaTable, String projectingAreaFeaturesOptionalCondition, String FeaturesColumns, String positiveCasesTable, String negativeCasesTable,int numberOfElements) throws Exception{
innerstatus = 0f; innerstatus = 0f;
int numberOfElementsToTake = Operations.calcNumOfRepresentativeElements(numberOfElements, minimumNumberToTake); int numberOfElementsToTake = Operations.calcNumOfRepresentativeElements(numberOfElements, minimumNumberToTake);
AnalysisLogger.getLogger().trace("HRS: TAKING "+numberOfElementsToTake+" POINTS ON "+numberOfElements+" FROM THE AREA UNDER ANALYSIS"); logger.trace("HRS: TAKING "+numberOfElementsToTake+" POINTS ON "+numberOfElements+" FROM THE AREA UNDER ANALYSIS");
// 1 - take the right number of points // 1 - take the right number of points
double[][] areaPoints = getPoints(projectingAreaTable, projectingAreaFeaturesOptionalCondition, FeaturesColumns, numberOfElementsToTake); double[][] areaPoints = getPoints(projectingAreaTable, projectingAreaFeaturesOptionalCondition, FeaturesColumns, numberOfElementsToTake);
AnalysisLogger.getLogger().trace("HRS: AREA POINTS MATRIX GENERATED"); logger.trace("HRS: AREA POINTS MATRIX GENERATED");
innerstatus = 10f; innerstatus = 10f;
Operations operations = new Operations(); Operations operations = new Operations();
// 2 - standardize the matrix // 2 - standardize the matrix
areaPoints = operations.standardize(areaPoints); areaPoints = operations.standardize(areaPoints);
AnalysisLogger.getLogger().trace("HRS: MATRIX HAS BEEN STANDARDIZED"); logger.trace("HRS: MATRIX HAS BEEN STANDARDIZED");
innerstatus = 20f; innerstatus = 20f;
// 3 - calculate PCA // 3 - calculate PCA
PrincipalComponentAnalysis pca = new PrincipalComponentAnalysis(); PrincipalComponentAnalysis pca = new PrincipalComponentAnalysis();
pca.calcPCA(areaPoints); pca.calcPCA(areaPoints);
AnalysisLogger.getLogger().trace("HRS: PCA HAS BEEN TRAINED"); logger.trace("HRS: PCA HAS BEEN TRAINED");
innerstatus = 30f; innerstatus = 30f;
// 4 - get the pca components for all the vector // 4 - get the pca components for all the vector
double[][] pcaComponents = pca.getComponentsMatrix(areaPoints); double[][] pcaComponents = pca.getComponentsMatrix(areaPoints);
AnalysisLogger.getLogger().trace("HRS: PCA COMPONENT CALCULATED"); logger.trace("HRS: PCA COMPONENT CALCULATED");
innerstatus = 40f; innerstatus = 40f;
// 5 - calculate the frequency distributions for all the pca: each row will be a frequency distribution for a pca component associated to uniform divisions of the range // 5 - calculate the frequency distributions for all the pca: each row will be a frequency distribution for a pca component associated to uniform divisions of the range
calcFrequenciesDistributionsForComponents(pcaComponents); calcFrequenciesDistributionsForComponents(pcaComponents);
AnalysisLogger.getLogger().trace("HRS: FREQUENCIES FOR COMPONENTS CALCULATED"); logger.trace("HRS: FREQUENCIES FOR COMPONENTS CALCULATED");
innerstatus = 50f; innerstatus = 50f;
// 6 - take positive points and negative points - eventually merge them // 6 - take positive points and negative points - eventually merge them
double[][] positivePoints = null; double[][] positivePoints = null;
@ -153,14 +156,14 @@ public class HabitatRepresentativeness extends DataAnalysis {
if ((negativeCasesTable!=null) && (negativeCasesTable.length()>0)) if ((negativeCasesTable!=null) && (negativeCasesTable.length()>0))
negativePoints = getPoints(negativeCasesTable, "", FeaturesColumns, numberOfElementsToTake); negativePoints = getPoints(negativeCasesTable, "", FeaturesColumns, numberOfElementsToTake);
double[][] habitatPoints = Transformations.mergeMatrixes(positivePoints, negativePoints); double[][] habitatPoints = Transformations.mergeMatrixes(positivePoints, negativePoints);
AnalysisLogger.getLogger().trace("HRS: HABITAT POINTS BUILT FROM POSITIVE AND NEGATIVE POINTS"); logger.trace("HRS: HABITAT POINTS BUILT FROM POSITIVE AND NEGATIVE POINTS");
innerstatus = 60f; innerstatus = 60f;
// 7 - Standardize the points respect to previous means and variances // 7 - Standardize the points respect to previous means and variances
habitatPoints = operations.standardize(habitatPoints, operations.means, operations.variances); habitatPoints = operations.standardize(habitatPoints, operations.means, operations.variances);
AnalysisLogger.getLogger().trace("HRS: HABITAT POINTS HAVE BEEN STANDARDIZED RESPECT TO PREVIOUS MEANS AND VARIANCES"); logger.trace("HRS: HABITAT POINTS HAVE BEEN STANDARDIZED RESPECT TO PREVIOUS MEANS AND VARIANCES");
// 8 - calculate the pca components for habitat // 8 - calculate the pca components for habitat
double[][] habitatPcaComponents = pca.getComponentsMatrix(habitatPoints); double[][] habitatPcaComponents = pca.getComponentsMatrix(habitatPoints);
AnalysisLogger.getLogger().trace("HRS: HABITAT POINTS HAVE BEEN TRANSFORMED BY PCA"); logger.trace("HRS: HABITAT POINTS HAVE BEEN TRANSFORMED BY PCA");
innerstatus = 70f; innerstatus = 70f;
// 9 - calculate frequencies distributions for each component, respect to previous intervals // 9 - calculate frequencies distributions for each component, respect to previous intervals
int components = habitatPcaComponents[0].length; int components = habitatPcaComponents[0].length;
@ -177,13 +180,13 @@ public class HabitatRepresentativeness extends DataAnalysis {
currentHRSVector[i] = Operations.sumVector(absdifference); currentHRSVector[i] = Operations.sumVector(absdifference);
} }
AnalysisLogger.getLogger().trace("HRS: HRS VECTOR HAS BEEN CALCULATED"); logger.trace("HRS: HRS VECTOR HAS BEEN CALCULATED");
innerstatus = 90f; innerstatus = 90f;
// 11 - obtain hrsScore by weighted sum of hrs respect to inverse eigenvalues - too variable, substituted with the sum of the scores // 11 - obtain hrsScore by weighted sum of hrs respect to inverse eigenvalues - too variable, substituted with the sum of the scores
// currentHRSScore = Operations.scalarProduct(currentHRSVector, pca.getInverseNormalizedEigenvalues()); // currentHRSScore = Operations.scalarProduct(currentHRSVector, pca.getInverseNormalizedEigenvalues());
currentHRSScore = Operations.sumVector(currentHRSVector); currentHRSScore = Operations.sumVector(currentHRSVector);
AnalysisLogger.getLogger().trace("HRS: HRS SCORE HAS BEEN CALCULATED"); logger.trace("HRS: HRS SCORE HAS BEEN CALCULATED");
innerstatus = 100f; innerstatus = 100f;
} }
private double meanHRS ; private double meanHRS ;
@ -206,7 +209,7 @@ public class HabitatRepresentativeness extends DataAnalysis {
for (int i=0;i<maxTests;i++){ for (int i=0;i<maxTests;i++){
currentIterationStep = i; currentIterationStep = i;
AnalysisLogger.getLogger().trace("ITERATION NUMBER "+(i+1)); logger.trace("ITERATION NUMBER "+(i+1));
calcHRS(projectingAreaTable, projectingAreaFeaturesOptionalCondition, FeaturesColumns, positiveCasesTable, negativeCasesTable, numberOfElements); calcHRS(projectingAreaTable, projectingAreaFeaturesOptionalCondition, FeaturesColumns, positiveCasesTable, negativeCasesTable, numberOfElements);
meanHRS = MathFunctions.incrementAvg(meanHRS, currentHRSScore, i); meanHRS = MathFunctions.incrementAvg(meanHRS, currentHRSScore, i);
if (meanHRSVector==null) if (meanHRSVector==null)
@ -216,7 +219,7 @@ public class HabitatRepresentativeness extends DataAnalysis {
meanHRSVector[j]=org.gcube.contentmanagement.graphtools.utils.MathFunctions.roundDecimal(MathFunctions.incrementAvg(meanHRSVector[j],currentHRSVector[j],i),2); meanHRSVector[j]=org.gcube.contentmanagement.graphtools.utils.MathFunctions.roundDecimal(MathFunctions.incrementAvg(meanHRSVector[j],currentHRSVector[j],i),2);
} }
AnalysisLogger.getLogger().trace("ITERATION FINISHED "+meanHRS); logger.trace("ITERATION FINISHED "+meanHRS);
status=Math.min(status+100f/maxTests,99f); status=Math.min(status+100f/maxTests,99f);
} }
@ -227,11 +230,11 @@ public class HabitatRepresentativeness extends DataAnalysis {
return output; return output;
} catch (Exception e) { } catch (Exception e) {
e.printStackTrace(); e.printStackTrace();
AnalysisLogger.getLogger().error("ALERT: AN ERROR OCCURRED DURING HRS CALCULATION : " + e.getLocalizedMessage()); logger.error("ALERT: AN ERROR OCCURRED DURING HRS CALCULATION : " + e.getLocalizedMessage());
throw e; throw e;
} finally { } finally {
status=100; status=100;
AnalysisLogger.getLogger().trace("COMPUTATION FINISHED "); logger.trace("COMPUTATION FINISHED ");
} }
} }
@ -264,56 +267,6 @@ public class HabitatRepresentativeness extends DataAnalysis {
} }
} }
public static void main(String[] args) throws Exception {
AnalysisLogger.setLogger("./cfg/" + AlgorithmConfiguration.defaultLoggerFile);
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setConfigPath("./cfg/");
config.setParam("ProjectingAreaTable", "hcaf_d");
// config.setParam("ProjectingAreaTable", "absence_data_baskingshark_random");
// config.setParam("ProjectingAreaTable", "absence_data_baskingshark2");
config.setParam("OptionalCondition", "where oceanarea>0");
config.setParam("FeaturesColumns", "depthmean,depthmax,depthmin, sstanmean,sbtanmean,salinitymean,salinitybmean, primprodmean,iceconann,landdist,oceanarea");
config.setParam("PositiveCasesTable", "presence_data_baskingshark");
config.setParam("NegativeCasesTable", "absence_data_baskingshark_random");
// config.setParam("NegativeCasesTable", "absence_data_baskingshark2");
HabitatRepresentativeness hsrcalc = new HabitatRepresentativeness();
hsrcalc.setConfiguration(config);
hsrcalc.init();
HashMap<String,String> output = hsrcalc.analyze();
for (String param:output.keySet()){
System.out.println(param+":"+output.get(param));
}
/*
double[][] matrix = new double[7][2];
double[] row1 = { 2d, 3d };
double[] row2 = { 3d, 4d };
double[] row3 = { 4d, 5d };
double[] row4 = { 5d, 6d };
double[] row5 = { 2d, 3d };
double[] row6 = { 2d, 5d };
double[] row7 = { 3d, 4d };
matrix[0] = row1;
matrix[1] = row2;
matrix[2] = row3;
matrix[3] = row4;
matrix[4] = row5;
matrix[5] = row6;
matrix[6] = row7;
*/
// Operations operations = new Operations();
// matrix = operations.standardize(matrix);
// hsrcalc.calcFrequenciesDistributionsForComponents(matrix);
// double[][] bigmat = Transformations.mergeMatrixes(null, matrix);
System.out.println("FINISHED");
}
@Override @Override
public StatisticalType getOutput() { public StatisticalType getOutput() {
PrimitiveType p = new PrimitiveType(Map.class.getName(), PrimitiveType.stringMap2StatisticalMap(output), PrimitiveTypes.MAP, "AnalysisResult","Habitat Representativeness Score"); PrimitiveType p = new PrimitiveType(Map.class.getName(), PrimitiveType.stringMap2StatisticalMap(output), PrimitiveTypes.MAP, "AnalysisResult","Habitat Representativeness Score");

View File

@ -8,25 +8,22 @@ import java.util.UUID;
import org.gcube.contentmanagement.graphtools.utils.MathFunctions; import org.gcube.contentmanagement.graphtools.utils.MathFunctions;
import org.gcube.contentmanagement.lexicalmatcher.analysis.core.LexicalEngineConfiguration; import org.gcube.contentmanagement.lexicalmatcher.analysis.core.LexicalEngineConfiguration;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType; import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent; import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent;
import org.gcube.dataanalysis.ecoengine.interfaces.Evaluator;
import org.gcube.dataanalysis.ecoengine.models.cores.aquamaps.Hspen;
import org.gcube.dataanalysis.ecoengine.processing.factories.EvaluatorsFactory; import org.gcube.dataanalysis.ecoengine.processing.factories.EvaluatorsFactory;
import org.gcube.dataanalysis.ecoengine.utils.DatabaseFactory; import org.gcube.dataanalysis.ecoengine.utils.DatabaseFactory;
import org.gcube.dataanalysis.ecoengine.utils.DatabaseUtils; import org.gcube.dataanalysis.ecoengine.utils.DatabaseUtils;
import org.gcube.dataanalysis.ecoengine.utils.Operations; import org.gcube.dataanalysis.ecoengine.utils.Operations;
import org.gcube.dataanalysis.ecoengine.utils.Tuple;
import org.hibernate.SessionFactory; import org.hibernate.SessionFactory;
import org.jfree.data.category.DefaultCategoryDataset; import org.jfree.data.category.DefaultCategoryDataset;
import org.slf4j.Logger;
import com.mchange.v1.util.ArrayUtils; import org.slf4j.LoggerFactory;
public class BioClimateAnalysis { public class BioClimateAnalysis {
private static Logger logger = LoggerFactory.getLogger(BioClimateAnalysis.class);
private static String countHighProbabilityCells = "select count(*) from %1$s where probability>%2$s"; private static String countHighProbabilityCells = "select count(*) from %1$s where probability>%2$s";
private static String countSeaCells = "select count(*) from %1$s as a join %2$s as b on a.oceanarea>0 and a.csquarecode=b.csquarecode and ((a.iceconann<b.iceconann-%3$s or a.iceconann>b.iceconann+%3$s) or " + "(a.salinitymean<b.salinitymean-%3$s or a.salinitymean>b.salinitymean+%3$s) or (a.sstanmean<b.sstanmean-%3$s or a.sstanmean>b.sstanmean+%3$s))"; private static String countSeaCells = "select count(*) from %1$s as a join %2$s as b on a.oceanarea>0 and a.csquarecode=b.csquarecode and ((a.iceconann<b.iceconann-%3$s or a.iceconann>b.iceconann+%3$s) or " + "(a.salinitymean<b.salinitymean-%3$s or a.salinitymean>b.salinitymean+%3$s) or (a.sstanmean<b.sstanmean-%3$s or a.sstanmean>b.sstanmean+%3$s))";
private static String iceLeakage = "select count(*) from %1$s as a join %2$s as b on a.csquarecode=b.csquarecode and (a.iceconann<b.iceconann) and a.oceanarea>0"; private static String iceLeakage = "select count(*) from %1$s as a join %2$s as b on a.csquarecode=b.csquarecode and (a.iceconann<b.iceconann) and a.oceanarea>0";
@ -180,7 +177,7 @@ public class BioClimateAnalysis {
} }
AnalysisLogger.getLogger().trace("Produced All Images"); logger.trace("Produced All Images");
} }
@ -251,7 +248,7 @@ public class BioClimateAnalysis {
public void speciesEvolutionAnalysis(String[] hspenTables, String[] hspenTableNames, String parameterName, String condition) throws Exception { public void speciesEvolutionAnalysis(String[] hspenTables, String[] hspenTableNames, String parameterName, String condition) throws Exception {
try { try {
referencedbConnection = DatabaseFactory.initDBConnection(configPath + AlgorithmConfiguration.defaultConnectionFile, config); referencedbConnection = DatabaseFactory.initDBConnection(configPath + AlgorithmConfiguration.defaultConnectionFile, config);
AnalysisLogger.getLogger().debug("ReferenceDB initialized"); logger.debug("ReferenceDB initialized");
status = 0f; status = 0f;
int numbOfTables = (hspenTables != null) ? hspenTables.length : 0; int numbOfTables = (hspenTables != null) ? hspenTables.length : 0;
@ -265,7 +262,7 @@ public class BioClimateAnalysis {
if (condition != null && (condition.length() > 0)) if (condition != null && (condition.length() > 0))
condition = "and " + condition; condition = "and " + condition;
// take the spectrum of depths // take the spectrum of depths
AnalysisLogger.getLogger().trace("Range query: " + String.format(takeRangeOfParam, parameterName, hspenTables[0], condition)); logger.trace("Range query: " + String.format(takeRangeOfParam, parameterName, hspenTables[0], condition));
List<Object> paramrange = DatabaseFactory.executeSQLQuery(String.format(takeRangeOfParam, parameterName, hspenTables[0], condition), referencedbConnection); List<Object> paramrange = DatabaseFactory.executeSQLQuery(String.format(takeRangeOfParam, parameterName, hspenTables[0], condition), referencedbConnection);
@ -301,12 +298,12 @@ public class BioClimateAnalysis {
if (prevpmax != pmax) { if (prevpmax != pmax) {
// take the number of elements for this range // take the number of elements for this range
String countSpeciesQuery = String.format(countNumberOfSpeciesPerRange, hspenTables[i], prevpmax, pmax, parameterName); String countSpeciesQuery = String.format(countNumberOfSpeciesPerRange, hspenTables[i], prevpmax, pmax, parameterName);
AnalysisLogger.getLogger().trace("count elements query: " + countSpeciesQuery); logger.trace("count elements query: " + countSpeciesQuery);
List<Object> elementsInRange = DatabaseFactory.executeSQLQuery(countSpeciesQuery, referencedbConnection); List<Object> elementsInRange = DatabaseFactory.executeSQLQuery(countSpeciesQuery, referencedbConnection);
int nelements = (elementsInRange == null) ? 0 : Integer.parseInt("" + elementsInRange.get(0)); int nelements = (elementsInRange == null) ? 0 : Integer.parseInt("" + elementsInRange.get(0));
AnalysisLogger.getLogger().trace("Number of elements for " + hspenTables[i] + " in (" + prevpmax + " - " + pmax + ")" + " : " + nelements); logger.trace("Number of elements for " + hspenTables[i] + " in (" + prevpmax + " - " + pmax + ")" + " : " + nelements);
// take the chart for this range // take the chart for this range
String chartName = parameterName + " envelope for interval (" + prevpmax + " ; " + pmax + ")"; String chartName = parameterName + " envelope for interval (" + prevpmax + " ; " + pmax + ")";
@ -355,7 +352,7 @@ public class BioClimateAnalysis {
public void speciesGeographicEvolutionAnalysis2(String[] hspecTables, String[] hspecTablesNames, float threshold) throws Exception { public void speciesGeographicEvolutionAnalysis2(String[] hspecTables, String[] hspecTablesNames, float threshold) throws Exception {
try { try {
referencedbConnection = DatabaseFactory.initDBConnection(configPath + AlgorithmConfiguration.defaultConnectionFile, config); referencedbConnection = DatabaseFactory.initDBConnection(configPath + AlgorithmConfiguration.defaultConnectionFile, config);
AnalysisLogger.getLogger().debug("ReferenceDB initialized"); logger.debug("ReferenceDB initialized");
status = 0f; status = 0f;
int numbOfTables = (hspecTables != null) ? hspecTables.length : 0; int numbOfTables = (hspecTables != null) ? hspecTables.length : 0;
@ -375,9 +372,9 @@ public class BioClimateAnalysis {
String code$ = "" + code; String code$ = "" + code;
String query = String.format(countProbabilityPerArea, hspecTables[i], selectionCriteria[j], code$); String query = String.format(countProbabilityPerArea, hspecTables[i], selectionCriteria[j], code$);
query = query.replace("#THRESHOLD#", "" + threshold); query = query.replace("#THRESHOLD#", "" + threshold);
AnalysisLogger.getLogger().trace("Executing query for counting probabilities: " + query); logger.trace("Executing query for counting probabilities: " + query);
List<Object> counts = DatabaseFactory.executeSQLQuery(query, referencedbConnection); List<Object> counts = DatabaseFactory.executeSQLQuery(query, referencedbConnection);
AnalysisLogger.getLogger().trace("Query Executed"); logger.trace("Query Executed");
int countPerArea = (counts == null) ? 0 : Integer.parseInt("" + counts.get(0)); int countPerArea = (counts == null) ? 0 : Integer.parseInt("" + counts.get(0));
String chartName = "Hspec (prob>0.8) for " + criteriaNames[j] + "_" + code$; // put the code and the value in the timeseries associated to the feature name String chartName = "Hspec (prob>0.8) for " + criteriaNames[j] + "_" + code$; // put the code and the value in the timeseries associated to the feature name
@ -417,7 +414,7 @@ public class BioClimateAnalysis {
public void speciesGeographicEvolutionAnalysis(String[] hspecTables, String[] hspecTablesNames, float threshold) throws Exception { public void speciesGeographicEvolutionAnalysis(String[] hspecTables, String[] hspecTablesNames, float threshold) throws Exception {
try { try {
referencedbConnection = DatabaseFactory.initDBConnection(configPath + AlgorithmConfiguration.defaultConnectionFile, config); referencedbConnection = DatabaseFactory.initDBConnection(configPath + AlgorithmConfiguration.defaultConnectionFile, config);
AnalysisLogger.getLogger().debug("ReferenceDB initialized"); logger.debug("ReferenceDB initialized");
status = 0f; status = 0f;
int numbOfTables = (hspecTables != null) ? hspecTables.length : 0; int numbOfTables = (hspecTables != null) ? hspecTables.length : 0;
@ -434,26 +431,26 @@ public class BioClimateAnalysis {
try { try {
DatabaseFactory.executeSQLUpdate("drop table " + tmpanalysisTable, referencedbConnection); DatabaseFactory.executeSQLUpdate("drop table " + tmpanalysisTable, referencedbConnection);
} catch (Exception ee) { } catch (Exception ee) {
AnalysisLogger.getLogger().trace("table " + tmpanalysisTable + " does not exist"); logger.trace("table " + tmpanalysisTable + " does not exist");
} }
String preparationQuery = "create table " + tmpanalysisTable + " as select a.faoaream, lme,count(*) from %1$s as a where a.probability > #THRESHOLD# group by faoaream,lme;"; String preparationQuery = "create table " + tmpanalysisTable + " as select a.faoaream, lme,count(*) from %1$s as a where a.probability > #THRESHOLD# group by faoaream,lme;";
preparationQuery = String.format(preparationQuery, hspecTables[i]); preparationQuery = String.format(preparationQuery, hspecTables[i]);
preparationQuery = preparationQuery.replace("#THRESHOLD#", "" + threshold); preparationQuery = preparationQuery.replace("#THRESHOLD#", "" + threshold);
AnalysisLogger.getLogger().trace("Executing query for counting probabilities: " + preparationQuery); logger.trace("Executing query for counting probabilities: " + preparationQuery);
DatabaseFactory.executeSQLUpdate(preparationQuery, referencedbConnection); DatabaseFactory.executeSQLUpdate(preparationQuery, referencedbConnection);
AnalysisLogger.getLogger().trace("Query Executed"); logger.trace("Query Executed");
// for each criterion to apply: fao area, lme etc. // for each criterion to apply: fao area, lme etc.
for (int j = 0; j < criteriaNames.length; j++) { for (int j = 0; j < criteriaNames.length; j++) {
String criteriaQuery = String.format("select %1$s,sum(count) from " + tmpanalysisTable + " %2$s group by %1$s;", selectionCriteria[j], criteriaFilters[j]); String criteriaQuery = String.format("select %1$s,sum(count) from " + tmpanalysisTable + " %2$s group by %1$s;", selectionCriteria[j], criteriaFilters[j]);
AnalysisLogger.getLogger().trace("Executing query for counting probabilities: " + criteriaQuery); logger.trace("Executing query for counting probabilities: " + criteriaQuery);
List<Object> codeSums = DatabaseFactory.executeSQLQuery(criteriaQuery, referencedbConnection); List<Object> codeSums = DatabaseFactory.executeSQLQuery(criteriaQuery, referencedbConnection);
for (Object codeSum : codeSums) { for (Object codeSum : codeSums) {
String code$ = "" + ((Object[]) codeSum)[0]; String code$ = "" + ((Object[]) codeSum)[0];
int countPerArea = (((Object[]) codeSum)[1] == null) ? 0 : Integer.parseInt("" + ((Object[]) codeSum)[1]); int countPerArea = (((Object[]) codeSum)[1] == null) ? 0 : Integer.parseInt("" + ((Object[]) codeSum)[1]);
AnalysisLogger.getLogger().trace("Analyzing " + selectionCriteria[j] + " with code " + code$ + " count " + countPerArea); logger.trace("Analyzing " + selectionCriteria[j] + " with code " + code$ + " count " + countPerArea);
String chartName = "Hspec (prob>0.8) for " + criteriaNames[j] + "_" + code$; String chartName = "Hspec (prob>0.8) for " + criteriaNames[j] + "_" + code$;
// put the code and the value in the timeseries associated to the feature name // put the code and the value in the timeseries associated to the feature name
@ -479,7 +476,7 @@ public class BioClimateAnalysis {
DatabaseFactory.executeSQLUpdate("drop table " + tmpanalysisTable, referencedbConnection); DatabaseFactory.executeSQLUpdate("drop table " + tmpanalysisTable, referencedbConnection);
} catch (Exception ee) { } catch (Exception ee) {
ee.printStackTrace(); ee.printStackTrace();
AnalysisLogger.getLogger().trace("table " + tmpanalysisTable + " does not exist"); logger.trace("table " + tmpanalysisTable + " does not exist");
} }
status = status + statusstep; status = status + statusstep;
@ -507,7 +504,7 @@ public class BioClimateAnalysis {
public void geographicEvolutionAnalysis(String[] hcafTable, String[] hcafTableNames) throws Exception { public void geographicEvolutionAnalysis(String[] hcafTable, String[] hcafTableNames) throws Exception {
try { try {
referencedbConnection = DatabaseFactory.initDBConnection(configPath + AlgorithmConfiguration.defaultConnectionFile, config); referencedbConnection = DatabaseFactory.initDBConnection(configPath + AlgorithmConfiguration.defaultConnectionFile, config);
AnalysisLogger.getLogger().debug("ReferenceDB initialized"); logger.debug("ReferenceDB initialized");
doHcafAn = true; doHcafAn = true;
doHspecAn = true; doHspecAn = true;
status = 0f; status = 0f;
@ -528,7 +525,7 @@ public class BioClimateAnalysis {
// for each quantity to display: ice concentration // for each quantity to display: ice concentration
for (int k = 0; k < quantitiesNames.length; k++) { for (int k = 0; k < quantitiesNames.length; k++) {
String query = String.format(takeAvgSelection, quantitiesNames[k], selectionCriteria[j], hcafTable[i], criteriaFilters[j]); String query = String.format(takeAvgSelection, quantitiesNames[k], selectionCriteria[j], hcafTable[i], criteriaFilters[j]);
AnalysisLogger.getLogger().debug("Query to be executed : " + query); logger.debug("Query to be executed : " + query);
// take couples (avg,code) // take couples (avg,code)
List<Object> quantityCriterion = DatabaseFactory.executeSQLQuery(query, referencedbConnection); List<Object> quantityCriterion = DatabaseFactory.executeSQLQuery(query, referencedbConnection);
// for each row // for each row
@ -586,7 +583,7 @@ public class BioClimateAnalysis {
public void globalEvolutionAnalysis(String[] hcafTable, String[] hspecTables, String[] hcafTablesNames, String[] hspecTableNames, String probabilityColumn, String csquareColumn, float threshold) throws Exception { public void globalEvolutionAnalysis(String[] hcafTable, String[] hspecTables, String[] hcafTablesNames, String[] hspecTableNames, String probabilityColumn, String csquareColumn, float threshold) throws Exception {
try { try {
referencedbConnection = DatabaseFactory.initDBConnection(configPath + AlgorithmConfiguration.defaultConnectionFile, config); referencedbConnection = DatabaseFactory.initDBConnection(configPath + AlgorithmConfiguration.defaultConnectionFile, config);
AnalysisLogger.getLogger().debug("ReferenceDB initialized"); logger.debug("ReferenceDB initialized");
doHcafAn = true; doHcafAn = true;
doHspecAn = true; doHspecAn = true;
@ -620,7 +617,7 @@ public class BioClimateAnalysis {
avgIce[i] = avgValue(hcafTable[i], FIELD.iceconann.name()); avgIce[i] = avgValue(hcafTable[i], FIELD.iceconann.name());
avgSST[i] = avgValue(hcafTable[i], FIELD.sstanmean.name()); avgSST[i] = avgValue(hcafTable[i], FIELD.sstanmean.name());
avgSalinity[i] = avgValue(hcafTable[i], FIELD.salinitymean.name()); avgSalinity[i] = avgValue(hcafTable[i], FIELD.salinitymean.name());
AnalysisLogger.getLogger().trace("(" + hcafTable[i] + "): " + " ICE " + avgIce[i] + " SST " + avgSST[i] + " SAL " + avgSalinity[i]); logger.trace("(" + hcafTable[i] + "): " + " ICE " + avgIce[i] + " SST " + avgSST[i] + " SAL " + avgSalinity[i]);
} }
if (doHspecAn) { if (doHspecAn) {
@ -631,10 +628,10 @@ public class BioClimateAnalysis {
// discrepancies[i] = MathFunctions.roundDecimal(calcOverDiscrepancy(configPath, temporaryDirectory, hspecTables[i], hspecTables[i - 1], probabilityColumn, csquareColumn, 0.1f), 5); // discrepancies[i] = MathFunctions.roundDecimal(calcOverDiscrepancy(configPath, temporaryDirectory, hspecTables[i], hspecTables[i - 1], probabilityColumn, csquareColumn, 0.1f), 5);
discrepancies[i] = highProbabilityCells[i]-highProbabilityCells[i-1]; discrepancies[i] = highProbabilityCells[i]-highProbabilityCells[i-1];
} }
AnalysisLogger.getLogger().trace("(" + hspecTables[i] + "): DISCREPANCY " + discrepancies[i] + " HIGH PROB CELLS " + highProbabilityCells[i]); logger.trace("(" + hspecTables[i] + "): DISCREPANCY " + discrepancies[i] + " HIGH PROB CELLS " + highProbabilityCells[i]);
} }
// AnalysisLogger.getLogger().trace("(" + hcafTable[i] + "," + hspecTables[i] + "): HIGH PROB CELLS " + highProbabilityCells[i] + " DISCREPANCY " + discrepancies[i] + " ICE " + avgIce[i] + " SST " + avgSST[i] + " SAL " + avgSalinity[i]); // logger.trace("(" + hcafTable[i] + "," + hspecTables[i] + "): HIGH PROB CELLS " + highProbabilityCells[i] + " DISCREPANCY " + discrepancies[i] + " ICE " + avgIce[i] + " SST " + avgSST[i] + " SAL " + avgSalinity[i]);
status = status + statusstep; status = status + statusstep;
} }
@ -661,7 +658,6 @@ public class BioClimateAnalysis {
if (!persistencePath.endsWith("/")) if (!persistencePath.endsWith("/"))
this.temporaryDirectory += "/"; this.temporaryDirectory += "/";
AnalysisLogger.setLogger(configPath + AlgorithmConfiguration.defaultLoggerFile);
config = new LexicalEngineConfiguration(); config = new LexicalEngineConfiguration();
config.setDatabaseURL(databaseURL); config.setDatabaseURL(databaseURL);
config.setDatabaseUserName(databaseUserName); config.setDatabaseUserName(databaseUserName);
@ -671,10 +667,10 @@ public class BioClimateAnalysis {
} }
public int calcHighProbabilityCells(String hspec, double probabilty) throws Exception { public int calcHighProbabilityCells(String hspec, double probabilty) throws Exception {
AnalysisLogger.getLogger().trace("Calculating High Prob Cells: "+String.format(countHighProbabilityCells, hspec, probabilty)); logger.trace("Calculating High Prob Cells: "+String.format(countHighProbabilityCells, hspec, probabilty));
List<Object> countage = DatabaseFactory.executeSQLQuery(String.format(countHighProbabilityCells, hspec, probabilty), referencedbConnection); List<Object> countage = DatabaseFactory.executeSQLQuery(String.format(countHighProbabilityCells, hspec, probabilty), referencedbConnection);
int count = Integer.parseInt("" + countage.get(0)); int count = Integer.parseInt("" + countage.get(0));
AnalysisLogger.getLogger().trace("Calc High Prob Cells: " + count); logger.trace("Calc High Prob Cells: " + count);
return count; return count;
} }
@ -710,18 +706,18 @@ public class BioClimateAnalysis {
List<Object> nelementsQ1 = DatabaseFactory.executeSQLQuery(DatabaseUtils.countElementsStatement(firstTable), referencedbConnection); List<Object> nelementsQ1 = DatabaseFactory.executeSQLQuery(DatabaseUtils.countElementsStatement(firstTable), referencedbConnection);
int nelements = Integer.parseInt("" + nelementsQ1.get(0)); int nelements = Integer.parseInt("" + nelementsQ1.get(0));
AnalysisLogger.getLogger().trace("Number Of elements1: " + nelementsQ1); logger.trace("Number Of elements1: " + nelementsQ1);
List<Object> nelementsQ2 = DatabaseFactory.executeSQLQuery(DatabaseUtils.countElementsStatement(secondTable), referencedbConnection); List<Object> nelementsQ2 = DatabaseFactory.executeSQLQuery(DatabaseUtils.countElementsStatement(secondTable), referencedbConnection);
int nelements2 = Integer.parseInt("" + nelementsQ2.get(0)); int nelements2 = Integer.parseInt("" + nelementsQ2.get(0));
AnalysisLogger.getLogger().trace("Number Of elements2: " + nelementsQ1); logger.trace("Number Of elements2: " + nelementsQ1);
List<Object> sumFirst = DatabaseFactory.executeSQLQuery(DatabaseUtils.sumElementsStatement(firstTable, probabilityColumnName), referencedbConnection); List<Object> sumFirst = DatabaseFactory.executeSQLQuery(DatabaseUtils.sumElementsStatement(firstTable, probabilityColumnName), referencedbConnection);
double sum1 = Double.parseDouble("" + sumFirst.get(0)); double sum1 = Double.parseDouble("" + sumFirst.get(0));
AnalysisLogger.getLogger().trace("Sum1: " + sum1); logger.trace("Sum1: " + sum1);
List<Object> sumSecond = DatabaseFactory.executeSQLQuery(DatabaseUtils.sumElementsStatement(secondTable, probabilityColumnName), referencedbConnection); List<Object> sumSecond = DatabaseFactory.executeSQLQuery(DatabaseUtils.sumElementsStatement(secondTable, probabilityColumnName), referencedbConnection);
double sum2 = Double.parseDouble("" + sumSecond.get(0)); double sum2 = Double.parseDouble("" + sumSecond.get(0));
AnalysisLogger.getLogger().trace("Sum2: " + sum1); logger.trace("Sum2: " + sum1);
double d = (double) (sum2 - sum1) / (double) (nelements + nelements2); double d = (double) (sum2 - sum1) / (double) (nelements + nelements2);
return d; return d;

View File

@ -4,18 +4,18 @@ import java.io.File;
import java.math.BigDecimal; import java.math.BigDecimal;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.UUID;
import org.gcube.contentmanagement.graphtools.utils.MathFunctions; import org.gcube.contentmanagement.graphtools.utils.MathFunctions;
import org.gcube.contentmanagement.lexicalmatcher.analysis.core.DataTypeRecognizer; import org.gcube.contentmanagement.lexicalmatcher.analysis.core.DataTypeRecognizer;
import org.gcube.contentmanagement.lexicalmatcher.analysis.core.LexicalEngineConfiguration; import org.gcube.contentmanagement.lexicalmatcher.analysis.core.LexicalEngineConfiguration;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.contentmanagement.lexicalmatcher.utils.FileTools; import org.gcube.contentmanagement.lexicalmatcher.utils.FileTools;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.utils.DatabaseFactory; import org.gcube.dataanalysis.ecoengine.utils.DatabaseFactory;
import org.gcube.dataanalysis.ecoengine.utils.DatabaseUtils; import org.gcube.dataanalysis.ecoengine.utils.DatabaseUtils;
import org.gcube.dataanalysis.ecoengine.utils.Operations; import org.gcube.dataanalysis.ecoengine.utils.Operations;
import org.hibernate.SessionFactory; import org.hibernate.SessionFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/** /**
* checks if two tables are equal checks numbers at the second decimal position * checks if two tables are equal checks numbers at the second decimal position
@ -24,6 +24,8 @@ public class InterpolateTables {
// connection setup // connection setup
private static Logger logger = LoggerFactory.getLogger(InterpolateTables.class);
protected String temporaryDirectory; protected String temporaryDirectory;
// selection query // selection query
public static String selectElementsQuery = "select %1$s from %2$s order by %3$s"; public static String selectElementsQuery = "select %1$s from %2$s order by %3$s";
@ -53,9 +55,8 @@ public class InterpolateTables {
if (!persistencePath.endsWith("/")) if (!persistencePath.endsWith("/"))
this.temporaryDirectory += "/"; this.temporaryDirectory += "/";
AnalysisLogger.setLogger(configPath + AlgorithmConfiguration.defaultLoggerFile);
logger.debug("Initialization complete: persistence path " + persistencePath);
AnalysisLogger.getLogger().debug("Initialization complete: persistence path " + persistencePath);
config = new LexicalEngineConfiguration(); config = new LexicalEngineConfiguration();
config.setDatabaseURL(databaseURL); config.setDatabaseURL(databaseURL);
@ -71,16 +72,16 @@ public class InterpolateTables {
interpolatedTables = new String[2]; interpolatedTables = new String[2];
interpolatedTables[0] = table1; interpolatedTables[0] = table1;
interpolatedTables[1] = table2; interpolatedTables[1] = table2;
AnalysisLogger.getLogger().debug("NO TABLES TO PRODUCE"); logger.debug("NO TABLES TO PRODUCE");
} else { } else {
referencedbConnection = DatabaseFactory.initDBConnection(configPath + AlgorithmConfiguration.defaultConnectionFile, config); referencedbConnection = DatabaseFactory.initDBConnection(configPath + AlgorithmConfiguration.defaultConnectionFile, config);
AnalysisLogger.getLogger().debug("ReferenceDB initialized"); logger.debug("ReferenceDB initialized");
status = 0f; status = 0f;
AnalysisLogger.getLogger().debug("Interpolating from " + table1 + " to " + table2); logger.debug("Interpolating from " + table1 + " to " + table2);
DatabaseUtils utils = new DatabaseUtils(referencedbConnection); DatabaseUtils utils = new DatabaseUtils(referencedbConnection);
// analyze table and take information about it // analyze table and take information about it
String createTableStatement = utils.buildCreateStatement(table1, "%1$s"); String createTableStatement = utils.buildCreateStatement(table1, "%1$s");
AnalysisLogger.getLogger().debug("Create Statement for table " + table1 + ": " + createTableStatement); logger.debug("Create Statement for table " + table1 + ": " + createTableStatement);
int numberOfColumns = utils.getColumnDecriptions().size(); int numberOfColumns = utils.getColumnDecriptions().size();
// initialize the map of columns to write // initialize the map of columns to write
List<List<StringBuffer>> outputFiles = new ArrayList<List<StringBuffer>>(); List<List<StringBuffer>> outputFiles = new ArrayList<List<StringBuffer>>();
@ -97,19 +98,19 @@ public class InterpolateTables {
String javatype = DataTypeRecognizer.transformTypeFromDB(gotColumnType); String javatype = DataTypeRecognizer.transformTypeFromDB(gotColumnType);
String takeF = DatabaseUtils.getOrderedElements(table1, utils.getPrimaryKey(), gotColumn); String takeF = DatabaseUtils.getOrderedElements(table1, utils.getPrimaryKey(), gotColumn);
String takeS = DatabaseUtils.getOrderedElements(table2, utils.getPrimaryKey(), gotColumn); String takeS = DatabaseUtils.getOrderedElements(table2, utils.getPrimaryKey(), gotColumn);
AnalysisLogger.getLogger().debug("Taking First column->" + takeF); logger.debug("Taking First column->" + takeF);
AnalysisLogger.getLogger().debug("Taking Second column->" + takeS); logger.debug("Taking Second column->" + takeS);
List<Object> takeFirstColumn = DatabaseFactory.executeSQLQuery(takeF, referencedbConnection); List<Object> takeFirstColumn = DatabaseFactory.executeSQLQuery(takeF, referencedbConnection);
List<Object> takeSecondColumn = DatabaseFactory.executeSQLQuery(takeS, referencedbConnection); List<Object> takeSecondColumn = DatabaseFactory.executeSQLQuery(takeS, referencedbConnection);
AnalysisLogger.getLogger().debug("First column elements size->" + takeFirstColumn.size()); logger.debug("First column elements size->" + takeFirstColumn.size());
AnalysisLogger.getLogger().debug("Second column elements size->" + takeSecondColumn.size()); logger.debug("Second column elements size->" + takeSecondColumn.size());
// only if data are of numeric type, perform calculation // only if data are of numeric type, perform calculation
if (javatype.equals(BigDecimal.class.getName())) { if (javatype.equals(BigDecimal.class.getName())) {
AnalysisLogger.getLogger().debug("interpolating -> " + gotColumn); logger.debug("interpolating -> " + gotColumn);
List<List<Object>> interpolations = interpolateColumns(takeFirstColumn, takeSecondColumn, intervals, gotColumnType, function); List<List<Object>> interpolations = interpolateColumns(takeFirstColumn, takeSecondColumn, intervals, gotColumnType, function);
@ -119,10 +120,10 @@ public class InterpolateTables {
// for each column to substitute // for each column to substitute
List<Object> columnToSub = interpolations.get(i); List<Object> columnToSub = interpolations.get(i);
if (columnToSub.size() > 0) { if (columnToSub.size() > 0) {
AnalysisLogger.getLogger().debug("UPDATE TABLE " + tableInterp + " ON COLUMN " + gotColumn); logger.debug("UPDATE TABLE " + tableInterp + " ON COLUMN " + gotColumn);
addColumnToTable(outputFiles.get(i - 1), columnToSub, true); addColumnToTable(outputFiles.get(i - 1), columnToSub, true);
} else { } else {
AnalysisLogger.getLogger().debug("DOESN'T CHANGE TABLE " + tableInterp + " COLUMN " + gotColumn); logger.debug("DOESN'T CHANGE TABLE " + tableInterp + " COLUMN " + gotColumn);
addColumnToTable(outputFiles.get(i - 1), takeFirstColumn, true); addColumnToTable(outputFiles.get(i - 1), takeFirstColumn, true);
} }
} }
@ -138,7 +139,7 @@ public class InterpolateTables {
} }
status = 60f; status = 60f;
AnalysisLogger.getLogger().debug("WRITING ALL THE BUFFERS"); logger.debug("WRITING ALL THE BUFFERS");
writeAllStringBuffersToFiles(table1, outputFiles, function, startYear, endYear); writeAllStringBuffersToFiles(table1, outputFiles, function, startYear, endYear);
statusstep = 40f / (float) producedfiles.length; statusstep = 40f / (float) producedfiles.length;
@ -153,27 +154,27 @@ public class InterpolateTables {
/*create Table from file*/ /*create Table from file*/
/* OLD CODE FOR LOCAL DB /* OLD CODE FOR LOCAL DB
String copyFileQuery = DatabaseUtils.copyFileToTableStatement(temporaryDirectory + producedfiles[i].getName(), filename); String copyFileQuery = DatabaseUtils.copyFileToTableStatement(temporaryDirectory + producedfiles[i].getName(), filename);
AnalysisLogger.getLogger().debug("CREATING TABLE->" + filename); logger.debug("CREATING TABLE->" + filename);
DatabaseFactory.executeSQLUpdate(String.format(createTableStatement, filename), referencedbConnection); DatabaseFactory.executeSQLUpdate(String.format(createTableStatement, filename), referencedbConnection);
AnalysisLogger.getLogger().debug("FULFILLING TABLE->" + filename + ": " + copyFileQuery); logger.debug("FULFILLING TABLE->" + filename + ": " + copyFileQuery);
DatabaseFactory.executeSQLUpdate(copyFileQuery, referencedbConnection); DatabaseFactory.executeSQLUpdate(copyFileQuery, referencedbConnection);
*/ */
AnalysisLogger.getLogger().debug("CREATING TABLE->" + filename); logger.debug("CREATING TABLE->" + filename);
DatabaseFactory.executeSQLUpdate(String.format(createTableStatement, filename), referencedbConnection); DatabaseFactory.executeSQLUpdate(String.format(createTableStatement, filename), referencedbConnection);
AnalysisLogger.getLogger().debug("COPYING TABLE->" + filename); logger.debug("COPYING TABLE->" + filename);
DatabaseUtils.createRemoteTableFromFile(producedfiles[i].getAbsolutePath(),filename,";",false,config.getDatabaseUserName(),config.getDatabasePassword(),config.getDatabaseURL()); DatabaseUtils.createRemoteTableFromFile(producedfiles[i].getAbsolutePath(),filename,";",false,config.getDatabaseUserName(),config.getDatabasePassword(),config.getDatabaseURL());
status = Math.min(status + statusstep, 99); status = Math.min(status + statusstep, 99);
} }
AnalysisLogger.getLogger().debug("DELETING ALL TEMPORARY FILES"); logger.debug("DELETING ALL TEMPORARY FILES");
for (int i = 0; i < producedfiles.length; i++) { for (int i = 0; i < producedfiles.length; i++) {
producedfiles[i].delete(); producedfiles[i].delete();
} }
interpolatedTables[interpolatedTables.length - 1] = table2; interpolatedTables[interpolatedTables.length - 1] = table2;
AnalysisLogger.getLogger().debug("ALL TABLES HAVE BEEN PRODUCED"); logger.debug("ALL TABLES HAVE BEEN PRODUCED");
}//end else control on the number of intervals }//end else control on the number of intervals
} catch (Exception e) { } catch (Exception e) {
e.printStackTrace(); e.printStackTrace();
@ -253,7 +254,7 @@ public class InterpolateTables {
double[] interpolation = null; double[] interpolation = null;
if (firstNum != secondNum) { if (firstNum != secondNum) {
if (interping) { if (interping) {
AnalysisLogger.getLogger().debug("Interpolating ... "); logger.debug("Interpolating ... ");
interping = false; interping = false;
} }

View File

@ -4,7 +4,6 @@ import java.util.HashMap;
import java.util.LinkedHashMap; import java.util.LinkedHashMap;
import java.util.Map; import java.util.Map;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.configuration.INFRASTRUCTURE; import org.gcube.dataanalysis.ecoengine.configuration.INFRASTRUCTURE;
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType; import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
@ -12,6 +11,8 @@ import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes; import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
import org.gcube.dataanalysis.ecoengine.utils.ResourceFactory; import org.gcube.dataanalysis.ecoengine.utils.ResourceFactory;
import org.hibernate.SessionFactory; import org.hibernate.SessionFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/** /**
* Implements a mono-thread data analysis process * Implements a mono-thread data analysis process
@ -21,6 +22,8 @@ import org.hibernate.SessionFactory;
*/ */
public abstract class DataAnalysis implements Evaluator{ public abstract class DataAnalysis implements Evaluator{
private static Logger logger = LoggerFactory.getLogger(DataAnalysis.class);
protected ResourceFactory resourceManager; protected ResourceFactory resourceManager;
protected int processedRecords; protected int processedRecords;
protected float status; protected float status;
@ -112,7 +115,6 @@ public abstract class DataAnalysis implements Evaluator{
} }
public void init(boolean initRapidMiner) throws Exception { public void init(boolean initRapidMiner) throws Exception {
AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
// init db connection // init db connection
connection = AlgorithmConfiguration.getConnectionFromConfig(config); connection = AlgorithmConfiguration.getConnectionFromConfig(config);
if (initRapidMiner) if (initRapidMiner)
@ -126,10 +128,10 @@ public abstract class DataAnalysis implements Evaluator{
public void shutdown() { public void shutdown() {
try { try {
AnalysisLogger.getLogger().debug("Closing DB connections"); logger.debug("Closing DB connections");
connection.close(); connection.close();
} catch (Exception e) { } catch (Exception e) {
AnalysisLogger.getLogger().debug("Error in closing DB connections "+e.getLocalizedMessage()); logger.debug("Error in closing DB connections "+e.getLocalizedMessage());
} }
} }

View File

@ -4,7 +4,6 @@ import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.List; import java.util.List;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.datatypes.ColumnType; import org.gcube.dataanalysis.ecoengine.datatypes.ColumnType;
import org.gcube.dataanalysis.ecoengine.datatypes.ColumnTypesList; import org.gcube.dataanalysis.ecoengine.datatypes.ColumnTypesList;
@ -21,9 +20,12 @@ import org.gcube.dataanalysis.ecoengine.interfaces.Model;
import org.gcube.dataanalysis.ecoengine.models.cores.neuralnetworks.Neural_Network; import org.gcube.dataanalysis.ecoengine.models.cores.neuralnetworks.Neural_Network;
import org.gcube.dataanalysis.ecoengine.utils.DatabaseFactory; import org.gcube.dataanalysis.ecoengine.utils.DatabaseFactory;
import org.gcube.dataanalysis.ecoengine.utils.DatabaseUtils; import org.gcube.dataanalysis.ecoengine.utils.DatabaseUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class FeedForwardNN extends ModelAquamapsNN{ public class FeedForwardNN extends ModelAquamapsNN{
private static Logger logger = LoggerFactory.getLogger(FeedForwardNN.class);
@Override @Override
public String getName() { public String getName() {
@ -95,13 +97,12 @@ public class FeedForwardNN extends ModelAquamapsNN{
@Override @Override
public void init(AlgorithmConfiguration config, Model previousModel) { public void init(AlgorithmConfiguration config, Model previousModel) {
AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
// init the database // init the database
try { try {
connection = DatabaseUtils.initDBSession(config); connection = DatabaseUtils.initDBSession(config);
} catch (Exception e) { } catch (Exception e) {
e.printStackTrace(); logger.trace("ERROR initializing connection", e);
AnalysisLogger.getLogger().trace("ERROR initializing connection");
} }
fileName = config.getPersistencePath()+Neural_Network.generateNNName(config.getParam(Reference), config.getParam(UserName), config.getParam(ModelName)); fileName = config.getPersistencePath()+Neural_Network.generateNNName(config.getParam(Reference), config.getParam(UserName), config.getParam(ModelName));
trainingDataSet = config.getParam(TrainingDataSet); trainingDataSet = config.getParam(TrainingDataSet);
@ -147,7 +148,7 @@ public class FeedForwardNN extends ModelAquamapsNN{
try { try {
// take all features input vectors // take all features input vectors
String query = String.format(takeElementsQuery, trainingColumn+","+dbcolumns,trainingDataSet,trainingColumn); String query = String.format(takeElementsQuery, trainingColumn+","+dbcolumns,trainingDataSet,trainingColumn);
AnalysisLogger.getLogger().debug("Query to execute: "+query); logger.debug("Query to execute: "+query);
List<Object> features = DatabaseFactory.executeSQLQuery(query, connection); List<Object> features = DatabaseFactory.executeSQLQuery(query, connection);
int numbOfFeatures = features.size(); int numbOfFeatures = features.size();
@ -155,12 +156,12 @@ public class FeedForwardNN extends ModelAquamapsNN{
List<Object> maxmin = DatabaseFactory.executeSQLQuery("select max("+trainingColumn+"), min("+trainingColumn+") from "+trainingDataSet, connection); List<Object> maxmin = DatabaseFactory.executeSQLQuery("select max("+trainingColumn+"), min("+trainingColumn+") from "+trainingDataSet, connection);
maxfactor = Double.parseDouble(""+((Object[])maxmin.get(0))[0]); maxfactor = Double.parseDouble(""+((Object[])maxmin.get(0))[0]);
minfactor = Double.parseDouble(""+((Object[])maxmin.get(0))[1]); minfactor = Double.parseDouble(""+((Object[])maxmin.get(0))[1]);
AnalysisLogger.getLogger().debug("Calculated max: "+maxfactor+" min: "+minfactor); logger.debug("Calculated max: "+maxfactor+" min: "+minfactor);
// setup Neural Network // setup Neural Network
int numberOfInputNodes = dbcolumnsList.length; int numberOfInputNodes = dbcolumnsList.length;
int numberOfOutputNodes = 1; int numberOfOutputNodes = 1;
AnalysisLogger.getLogger().debug("Training the ANN with "+numbOfFeatures+" training data and "+numberOfInputNodes+" inputs"); logger.debug("Training the ANN with "+numbOfFeatures+" training data and "+numberOfInputNodes+" inputs");
if (layersNeurons!=null){ if (layersNeurons!=null){
int[] innerLayers = Neural_Network.setupInnerLayers(layersNeurons); int[] innerLayers = Neural_Network.setupInnerLayers(layersNeurons);
nn = new Neural_Network(numberOfInputNodes, numberOfOutputNodes, innerLayers, Neural_Network.ACTIVATIONFUNCTION.SIGMOID); nn = new Neural_Network(numberOfInputNodes, numberOfOutputNodes, innerLayers, Neural_Network.ACTIVATIONFUNCTION.SIGMOID);
@ -172,10 +173,10 @@ public class FeedForwardNN extends ModelAquamapsNN{
nn.minfactor=minfactor; nn.minfactor=minfactor;
nn.setThreshold(learningThr); nn.setThreshold(learningThr);
nn.setCycles(maxiter); nn.setCycles(maxiter);
AnalysisLogger.getLogger().debug("network parameters: M: "+maxfactor+", m: "+minfactor+", lt: "+learningThr+", it: "+maxiter); logger.debug("network parameters: M: "+maxfactor+", m: "+minfactor+", lt: "+learningThr+", it: "+maxiter);
AnalysisLogger.getLogger().debug("topology: "+nn.griglia.length+"X"+nn.griglia[0].length); logger.debug("topology: "+nn.griglia.length+"X"+nn.griglia[0].length);
AnalysisLogger.getLogger().debug("Features preprocessing"); logger.debug("Features preprocessing");
double[][] in = new double[numbOfFeatures][]; double[][] in = new double[numbOfFeatures][];
double[][] out = new double[numbOfFeatures][]; double[][] out = new double[numbOfFeatures][];
// build NN input // build NN input
@ -185,21 +186,20 @@ public class FeedForwardNN extends ModelAquamapsNN{
in[i] = Neural_Network.preprocessObjects(Arrays.copyOfRange((Object[]) features.get(i), 1, feats.length)); in[i] = Neural_Network.preprocessObjects(Arrays.copyOfRange((Object[]) features.get(i), 1, feats.length));
out[i] = Neural_Network.preprocessObjects(Arrays.copyOfRange((Object[]) features.get(i), 0, 1)); out[i] = Neural_Network.preprocessObjects(Arrays.copyOfRange((Object[]) features.get(i), 0, 1));
//apply reduction factor //apply reduction factor
// AnalysisLogger.getLogger().debug("Output Transformed from "+out[i][0]); // logger.debug("Output Transformed from "+out[i][0]);
out[i][0] =nn.getCorrectValueForOutput(out[i][0]); out[i][0] =nn.getCorrectValueForOutput(out[i][0]);
// AnalysisLogger.getLogger().debug("To "+out[i][0]); // logger.debug("To "+out[i][0]);
} }
AnalysisLogger.getLogger().debug("Features were correctly preprocessed - Training"); logger.debug("Features were correctly preprocessed - Training");
// train the NN // train the NN
nn.train(in, out); nn.train(in, out);
learningscore=nn.en; learningscore=nn.en;
AnalysisLogger.getLogger().error("Final learning error: "+nn.en); logger.error("Final learning error: "+nn.en);
AnalysisLogger.getLogger().debug("Saving Network"); logger.debug("Saving Network");
save(fileName, nn); save(fileName, nn);
AnalysisLogger.getLogger().debug("Done"); logger.debug("Done");
} catch (Exception e) { } catch (Exception e) {
e.printStackTrace(); logger.error("ERROR during training",e);
AnalysisLogger.getLogger().error("ERROR during training");
} }
status = 100f; status = 100f;

View File

@ -5,12 +5,9 @@ import java.io.FileOutputStream;
import java.io.IOException; import java.io.IOException;
import java.io.ObjectOutputStream; import java.io.ObjectOutputStream;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedHashMap; import java.util.LinkedHashMap;
import java.util.List; import java.util.List;
import java.util.Map;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS; import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.datatypes.DatabaseType; import org.gcube.dataanalysis.ecoengine.datatypes.DatabaseType;
@ -27,9 +24,13 @@ import org.gcube.dataanalysis.ecoengine.interfaces.Model;
import org.gcube.dataanalysis.ecoengine.models.cores.neuralnetworks.Neural_Network; import org.gcube.dataanalysis.ecoengine.models.cores.neuralnetworks.Neural_Network;
import org.gcube.dataanalysis.ecoengine.utils.DatabaseFactory; import org.gcube.dataanalysis.ecoengine.utils.DatabaseFactory;
import org.hibernate.SessionFactory; import org.hibernate.SessionFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class ModelAquamapsNN implements Model { public class ModelAquamapsNN implements Model {
private static Logger logger = LoggerFactory.getLogger(ModelAquamapsNN.class);
@Override @Override
public ALG_PROPS[] getProperties() { public ALG_PROPS[] getProperties() {
ALG_PROPS[] props = { ALG_PROPS.SPECIES_MODEL }; ALG_PROPS[] props = { ALG_PROPS.SPECIES_MODEL };
@ -99,8 +100,7 @@ public class ModelAquamapsNN implements Model {
@Override @Override
public void init(AlgorithmConfiguration Input, Model previousModel) { public void init(AlgorithmConfiguration Input, Model previousModel) {
AnalysisLogger.setLogger(Input.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
// init the database // init the database
String defaultDatabaseFile = Input.getConfigPath() + AlgorithmConfiguration.defaultConnectionFile; String defaultDatabaseFile = Input.getConfigPath() + AlgorithmConfiguration.defaultConnectionFile;
@ -112,8 +112,7 @@ public class ModelAquamapsNN implements Model {
try { try {
connection = DatabaseFactory.initDBConnection(defaultDatabaseFile, Input); connection = DatabaseFactory.initDBConnection(defaultDatabaseFile, Input);
} catch (Exception e) { } catch (Exception e) {
e.printStackTrace(); logger.trace("ERROR initializing connection",e);
AnalysisLogger.getLogger().trace("ERROR initializing connection");
} }
fileName = Input.getPersistencePath() + "neuralnetwork_" + Input.getParam("SpeciesName") + "_" + Input.getParam("UserName")+"_"+Input.getParam("NeuralNetworkName").replace(" ", ""); fileName = Input.getPersistencePath() + "neuralnetwork_" + Input.getParam("SpeciesName") + "_" + Input.getParam("UserName")+"_"+Input.getParam("NeuralNetworkName").replace(" ", "");
@ -154,11 +153,11 @@ public class ModelAquamapsNN implements Model {
@Override @Override
public void postprocess(AlgorithmConfiguration Input, Model previousModel) { public void postprocess(AlgorithmConfiguration Input, Model previousModel) {
AnalysisLogger.getLogger().debug("Closing DB Connection"); logger.debug("Closing DB Connection");
try{ try{
connection.close(); connection.close();
}catch(Exception e){ }catch(Exception e){
AnalysisLogger.getLogger().debug("Error in Closing DB Connection "+e.getLocalizedMessage()); logger.debug("Error in Closing DB Connection "+e.getLocalizedMessage());
} }
} }
@ -173,8 +172,8 @@ public class ModelAquamapsNN implements Model {
// take all presence inputs // take all presence inputs
List<Object> presences = DatabaseFactory.executeSQLQuery(String.format(takeElementsQuery, presenceTable), connection); List<Object> presences = DatabaseFactory.executeSQLQuery(String.format(takeElementsQuery, presenceTable), connection);
// take all absence inputs // take all absence inputs
// AnalysisLogger.getLogger().trace("presence "+String.format(takeElementsQuery, presenceTable)); // logger.trace("presence "+String.format(takeElementsQuery, presenceTable));
// AnalysisLogger.getLogger().trace("absence "+String.format(takeElementsQuery, absenceTable)); // logger.trace("absence "+String.format(takeElementsQuery, absenceTable));
List<Object> absences = DatabaseFactory.executeSQLQuery(String.format(takeElementsQuery, absenceTable), connection); List<Object> absences = DatabaseFactory.executeSQLQuery(String.format(takeElementsQuery, absenceTable), connection);
int numbOfPresence = presences.size(); int numbOfPresence = presences.size();
int numbOfAbsence = absences.size(); int numbOfAbsence = absences.size();
@ -202,12 +201,12 @@ public class ModelAquamapsNN implements Model {
// train the NN // train the NN
nn.train(in, out); nn.train(in, out);
learningscore=nn.en; learningscore=nn.en;
AnalysisLogger.getLogger().error("Final learning error: "+nn.en); logger.error("Final learning error: "+nn.en);
save(fileName, nn); save(fileName, nn);
} catch (Exception e) { } catch (Exception e) {
e.printStackTrace(); e.printStackTrace();
AnalysisLogger.getLogger().error("ERROR during training"); logger.error("ERROR during training");
} }
status = 100f; status = 100f;
} }
@ -239,15 +238,14 @@ public class ModelAquamapsNN implements Model {
ObjectOutputStream oos = new ObjectOutputStream(stream); ObjectOutputStream oos = new ObjectOutputStream(stream);
oos.writeObject(nn); oos.writeObject(nn);
} catch (Exception e) { } catch (Exception e) {
e.printStackTrace(); logger.error("ERROR in writing object on file: " + nomeFile,e);
AnalysisLogger.getLogger().error("ERROR in writing object on file: " + nomeFile);
} finally { } finally {
try { try {
stream.close(); stream.close();
} catch (IOException e) { } catch (IOException e) {
} }
} }
AnalysisLogger.getLogger().trace("OK in writing object on file: " + nomeFile); logger.trace("OK in writing object on file: " + nomeFile);
} }
} }

View File

@ -7,7 +7,6 @@ import java.io.ObjectOutputStream;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS; import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.datatypes.DatabaseType; import org.gcube.dataanalysis.ecoengine.datatypes.DatabaseType;
@ -24,9 +23,13 @@ import org.gcube.dataanalysis.ecoengine.models.cores.neuralnetworks.neurosolutio
import org.gcube.dataanalysis.ecoengine.models.cores.neuralnetworks.neurosolutions.Pattern; import org.gcube.dataanalysis.ecoengine.models.cores.neuralnetworks.neurosolutions.Pattern;
import org.gcube.dataanalysis.ecoengine.utils.DatabaseFactory; import org.gcube.dataanalysis.ecoengine.utils.DatabaseFactory;
import org.hibernate.SessionFactory; import org.hibernate.SessionFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class ModelAquamapsNNNS implements Model { public class ModelAquamapsNNNS implements Model {
private static Logger logger = LoggerFactory.getLogger(ModelAquamapsNN.class);
@Override @Override
public ALG_PROPS[] getProperties() { public ALG_PROPS[] getProperties() {
ALG_PROPS[] props = { ALG_PROPS.SPECIES_MODEL }; ALG_PROPS[] props = { ALG_PROPS.SPECIES_MODEL };
@ -91,7 +94,6 @@ public class ModelAquamapsNNNS implements Model {
@Override @Override
public void init(AlgorithmConfiguration Input, Model previousModel) { public void init(AlgorithmConfiguration Input, Model previousModel) {
AnalysisLogger.setLogger(Input.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
// init the database // init the database
String defaultDatabaseFile = Input.getConfigPath() + AlgorithmConfiguration.defaultConnectionFile; String defaultDatabaseFile = Input.getConfigPath() + AlgorithmConfiguration.defaultConnectionFile;
@ -104,8 +106,7 @@ public class ModelAquamapsNNNS implements Model {
try { try {
connection = DatabaseFactory.initDBConnection(defaultDatabaseFile, Input); connection = DatabaseFactory.initDBConnection(defaultDatabaseFile, Input);
} catch (Exception e) { } catch (Exception e) {
e.printStackTrace(); logger.trace("ERROR initializing connection");
AnalysisLogger.getLogger().trace("ERROR initializing connection");
} }
fileName = Input.getPersistencePath() + "neuralnetwork_" + Input.getParam("SpeciesName") + "_" + Input.getParam("UserName"); fileName = Input.getPersistencePath() + "neuralnetwork_" + Input.getParam("SpeciesName") + "_" + Input.getParam("UserName");
@ -169,14 +170,14 @@ public class ModelAquamapsNNNS implements Model {
out[i] = nn.getPositiveCase(); out[i] = nn.getPositiveCase();
Pattern pattern = new Pattern(in[i], out[i]); Pattern pattern = new Pattern(in[i], out[i]);
nn.IncrementalTrain(.2, pattern); nn.IncrementalTrain(.2, pattern);
AnalysisLogger.getLogger().debug("-> "+i); logger.debug("-> "+i);
} }
for (int i = numbOfPresence; i < numberOfInputs; i++) { for (int i = numbOfPresence; i < numberOfInputs; i++) {
in[i] = NeuralNet.preprocessObjects((Object[]) absences.get(i-numbOfPresence)); in[i] = NeuralNet.preprocessObjects((Object[]) absences.get(i-numbOfPresence));
out[i] = nn.getNegativeCase(); out[i] = nn.getNegativeCase();
Pattern pattern = new Pattern(in[i], out[i]); Pattern pattern = new Pattern(in[i], out[i]);
nn.IncrementalTrain(.2, pattern); nn.IncrementalTrain(.2, pattern);
AnalysisLogger.getLogger().debug("-> "+i); logger.debug("-> "+i);
} }
@ -196,7 +197,7 @@ public class ModelAquamapsNNNS implements Model {
} catch (Exception e) { } catch (Exception e) {
e.printStackTrace(); e.printStackTrace();
AnalysisLogger.getLogger().error("ERROR during training"); logger.error("ERROR during training");
} }
status = 100f; status = 100f;
} }
@ -223,15 +224,14 @@ public class ModelAquamapsNNNS implements Model {
ObjectOutputStream oos = new ObjectOutputStream(stream); ObjectOutputStream oos = new ObjectOutputStream(stream);
oos.writeObject(nn); oos.writeObject(nn);
} catch (Exception e) { } catch (Exception e) {
e.printStackTrace(); logger.error("ERROR in writing object on file: " + nomeFile,e);
AnalysisLogger.getLogger().error("ERROR in writing object on file: " + nomeFile);
} finally { } finally {
try { try {
stream.close(); stream.close();
} catch (IOException e) { } catch (IOException e) {
} }
} }
AnalysisLogger.getLogger().trace("OK in writing object on file: " + nomeFile); logger.trace("OK in writing object on file: " + nomeFile);
} }
} }

View File

@ -8,7 +8,6 @@ import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors; import java.util.concurrent.Executors;
import org.gcube.contentmanagement.graphtools.utils.HttpRequest; import org.gcube.contentmanagement.graphtools.utils.HttpRequest;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS; import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.connectors.livemonitor.ResourceLoad; import org.gcube.dataanalysis.ecoengine.connectors.livemonitor.ResourceLoad;
@ -28,9 +27,13 @@ import org.gcube.dataanalysis.ecoengine.models.cores.aquamaps.AquamapsEnvelopeAl
import org.gcube.dataanalysis.ecoengine.models.cores.aquamaps.EnvelopeSet; import org.gcube.dataanalysis.ecoengine.models.cores.aquamaps.EnvelopeSet;
import org.gcube.dataanalysis.ecoengine.utils.DatabaseFactory; import org.gcube.dataanalysis.ecoengine.utils.DatabaseFactory;
import org.hibernate.SessionFactory; import org.hibernate.SessionFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class ModelHSPEN implements Model { public class ModelHSPEN implements Model {
private static Logger logger = LoggerFactory.getLogger(ModelHSPEN.class);
private float version; private float version;
// DB SESSION // DB SESSION
protected SessionFactory connection; protected SessionFactory connection;
@ -83,7 +86,6 @@ public class ModelHSPEN implements Model {
outconfig = setup; outconfig = setup;
defaultDatabaseFile = setup.getConfigPath() + defaultDatabaseFile; defaultDatabaseFile = setup.getConfigPath() + defaultDatabaseFile;
AnalysisLogger.setLogger(setup.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
try { try {
String defaultDatabaseFile = setup.getConfigPath() + AlgorithmConfiguration.defaultConnectionFile; String defaultDatabaseFile = setup.getConfigPath() + AlgorithmConfiguration.defaultConnectionFile;
@ -94,7 +96,7 @@ public class ModelHSPEN implements Model {
connection = DatabaseFactory.initDBConnection(defaultDatabaseFile, setup); connection = DatabaseFactory.initDBConnection(defaultDatabaseFile, setup);
} catch (Exception e) { } catch (Exception e) {
AnalysisLogger.getLogger().error("error intitializing",e); logger.error("error intitializing",e);
} }
outputTable = outconfig.getParam("OuputEnvelopeTable"); outputTable = outconfig.getParam("OuputEnvelopeTable");
@ -119,7 +121,7 @@ public class ModelHSPEN implements Model {
// populates the selectedSpecies variable by getting species from db // populates the selectedSpecies variable by getting species from db
private List<Object> populateSpecies() { private List<Object> populateSpecies() {
AnalysisLogger.getLogger().trace("Distribution Generator ->getting all species list from DB"); logger.trace("Distribution Generator ->getting all species list from DB");
List<Object> allspecies = DatabaseFactory.executeSQLQuery(dynamicSpeciesListQuery, connection); List<Object> allspecies = DatabaseFactory.executeSQLQuery(dynamicSpeciesListQuery, connection);
return allspecies; return allspecies;
} }
@ -174,20 +176,20 @@ public class ModelHSPEN implements Model {
AlgorithmConfiguration config = (AlgorithmConfiguration) Input; AlgorithmConfiguration config = (AlgorithmConfiguration) Input;
// create and populate the novel table // create and populate the novel table
if (config.getParam("CreateTable").equalsIgnoreCase("true")){ if (config.getParam("CreateTable").equalsIgnoreCase("true")){
AnalysisLogger.getLogger().trace("Distribution Generator->recreating new table " + dynamicCreateTable); logger.trace("Distribution Generator->recreating new table " + dynamicCreateTable);
try{ try{
DatabaseFactory.executeSQLUpdate(String.format(dynamicDropTable, config.getDatabaseUserName()), connection); DatabaseFactory.executeSQLUpdate(String.format(dynamicDropTable, config.getDatabaseUserName()), connection);
}catch(Exception e){ }catch(Exception e){
AnalysisLogger.getLogger().trace("Impossible to drop table - maybe not existing"); logger.trace("Impossible to drop table - maybe not existing");
} }
try{ try{
DatabaseFactory.executeSQLUpdate(String.format(dynamicCreateTable, config.getDatabaseUserName()), connection); DatabaseFactory.executeSQLUpdate(String.format(dynamicCreateTable, config.getDatabaseUserName()), connection);
}catch(Exception e){ }catch(Exception e){
AnalysisLogger.getLogger().trace("Impossible to create table - maybe yet existing"); logger.trace("Impossible to create table - maybe yet existing");
} }
} }
AnalysisLogger.getLogger().trace("Distribution Generator->populating new table " + dynamicPopulateNewHspen); logger.trace("Distribution Generator->populating new table " + dynamicPopulateNewHspen);
DatabaseFactory.executeSQLUpdate(dynamicPopulateNewHspen, connection); DatabaseFactory.executeSQLUpdate(dynamicPopulateNewHspen, connection);
} }
@ -196,11 +198,11 @@ public class ModelHSPEN implements Model {
long tstart = System.currentTimeMillis(); long tstart = System.currentTimeMillis();
// INITIALIZATION // INITIALIZATION
try { try {
AnalysisLogger.getLogger().trace("ModelHSPENr->populating species"); logger.trace("ModelHSPENr->populating species");
List<Object> allspecies = populateSpecies(); List<Object> allspecies = populateSpecies();
allSpeciesHspen = populateHspen(); allSpeciesHspen = populateHspen();
AnalysisLogger.getLogger().trace("ModelHSPENr->ENVELOPES GENERATION STARTED"); logger.trace("ModelHSPENr->ENVELOPES GENERATION STARTED");
// initialize threads // initialize threads
initializeThreads(numberOfthreads); initializeThreads(numberOfthreads);
@ -225,7 +227,7 @@ public class ModelHSPEN implements Model {
String speciesid = (String) species; String speciesid = (String) species;
if (speciesid.length() > 0) { if (speciesid.length() > 0) {
// calculation on multiple threads // calculation on multiple threads
AnalysisLogger.getLogger().trace("ModelHSPENr->ANALIZING SPECIES: " + speciesid); logger.trace("ModelHSPENr->ANALIZING SPECIES: " + speciesid);
// wait for thread to be free // wait for thread to be free
wait4Thread(currentThread); wait4Thread(currentThread);
// start species information calculation on the thread // start species information calculation on the thread
@ -238,11 +240,11 @@ public class ModelHSPEN implements Model {
// report probability // report probability
float s = (float) ((int) (((float) globalcounter * 100f / (numberOfSpecies)) * 100f)) / 100f; float s = (float) ((int) (((float) globalcounter * 100f / (numberOfSpecies)) * 100f)) / 100f;
status = (s == 100) ? 99 : s; status = (s == 100) ? 99 : s;
AnalysisLogger.getLogger().trace("STATUS->" + status + "%"); logger.trace("STATUS->" + status + "%");
// increment global counter index // increment global counter index
globalcounter++; globalcounter++;
AnalysisLogger.getLogger().warn("Number of Found Differences: " + countDifferences); logger.warn("Number of Found Differences: " + countDifferences);
} }
if (interruptProcessing) if (interruptProcessing)
@ -258,10 +260,10 @@ public class ModelHSPEN implements Model {
} }
long computationT1 = System.currentTimeMillis(); long computationT1 = System.currentTimeMillis();
AnalysisLogger.getLogger().warn("All Envelopes Computation Finished in " + (computationT1 - computationT0) + " ms"); logger.warn("All Envelopes Computation Finished in " + (computationT1 - computationT0) + " ms");
AnalysisLogger.getLogger().warn("Number of Overall Found Differences: " + countDifferences); logger.warn("Number of Overall Found Differences: " + countDifferences);
} catch (Exception e) { } catch (Exception e) {
AnalysisLogger.getLogger().trace("Computation traminate prematurely: ", e); logger.trace("Computation traminate prematurely: ", e);
} finally { } finally {
// shutdown threads // shutdown threads
executorService.shutdown(); executorService.shutdown();
@ -270,7 +272,7 @@ public class ModelHSPEN implements Model {
// set completeness // set completeness
status = 100.0f; status = 100.0f;
long tstop = System.currentTimeMillis(); long tstop = System.currentTimeMillis();
AnalysisLogger.getLogger().warn("All Envelopes Computation Finished in " + (tstop - tstart) + " ms"); logger.warn("All Envelopes Computation Finished in " + (tstop - tstart) + " ms");
} }
} }
@ -291,7 +293,7 @@ public class ModelHSPEN implements Model {
try { try {
calcEnvelopes(species); calcEnvelopes(species);
} catch (Exception e) { } catch (Exception e) {
AnalysisLogger.getLogger().trace("" + e); logger.trace("" + e);
e.printStackTrace(); e.printStackTrace();
} }
threadActivity[index] = false; threadActivity[index] = false;
@ -321,24 +323,24 @@ public class ModelHSPEN implements Model {
String instruction = envSet.getEnvelopeString(); String instruction = envSet.getEnvelopeString();
// take the result of the calculation // take the result of the calculation
long t1 = System.currentTimeMillis(); long t1 = System.currentTimeMillis();
AnalysisLogger.getLogger().trace("Computation for species " + species + " finished in " + (t1 - t0) + " ms"); logger.trace("Computation for species " + species + " finished in " + (t1 - t0) + " ms");
if (instruction.length() > 0) { if (instruction.length() > 0) {
countDifferences++; countDifferences++;
// write results on the DB // write results on the DB
String query = String.format(dynamicAlterQuery, instruction, species); String query = String.format(dynamicAlterQuery, instruction, species);
try { try {
AnalysisLogger.getLogger().trace("Envelope Generated - executing query: " + query); logger.trace("Envelope Generated - executing query: " + query);
DatabaseFactory.executeSQLUpdate(query, connection); DatabaseFactory.executeSQLUpdate(query, connection);
} catch (Exception e) { } catch (Exception e) {
AnalysisLogger.getLogger().trace("could not execute update"); logger.trace("could not execute update");
e.printStackTrace(); e.printStackTrace();
// System.exit(0); // System.exit(0);
} }
} }
} catch (Exception ex) { } catch (Exception ex) {
AnalysisLogger.getLogger().trace("Computation traminated prematurely: ", ex); logger.trace("Computation traminated prematurely: ", ex);
} }
numbOfProcessedSpecies++; numbOfProcessedSpecies++;
// take ending time // take ending time

View File

@ -3,12 +3,14 @@ package org.gcube.dataanalysis.ecoengine.models.cores.aquamaps;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.utils.DatabaseFactory; import org.gcube.dataanalysis.ecoengine.utils.DatabaseFactory;
import org.hibernate.SessionFactory; import org.hibernate.SessionFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class AquamapsEnvelopeAlgorithm { public class AquamapsEnvelopeAlgorithm {
private static Logger logger = LoggerFactory.getLogger(AquamapsEnvelopeAlgorithm.class);
private static final String selectValues = "SELECT DISTINCT %OCCURRENCEPOINTS%.CsquareCode, %OCCURRENCEPOINTS%.SpeciesID, %HCAF%.%1$s FROM %OCCURRENCEPOINTS% INNER JOIN %HCAF% ON %OCCURRENCEPOINTS%.CsquareCode = %HCAF%.CsquareCode WHERE %OCCURRENCEPOINTS%.SpeciesID = '%2$s' AND %HCAF%.%1$s <> -9999 AND %HCAF%.%1$s is not null AND %HCAF%.OceanArea > 0 AND %OCCURRENCEPOINTS%.goodcell = '1' ORDER BY %HCAF%.%1$s"; private static final String selectValues = "SELECT DISTINCT %OCCURRENCEPOINTS%.CsquareCode, %OCCURRENCEPOINTS%.SpeciesID, %HCAF%.%1$s FROM %OCCURRENCEPOINTS% INNER JOIN %HCAF% ON %OCCURRENCEPOINTS%.CsquareCode = %HCAF%.CsquareCode WHERE %OCCURRENCEPOINTS%.SpeciesID = '%2$s' AND %HCAF%.%1$s <> -9999 AND %HCAF%.%1$s is not null AND %HCAF%.OceanArea > 0 AND %OCCURRENCEPOINTS%.goodcell = '1' ORDER BY %HCAF%.%1$s";
@ -72,8 +74,8 @@ public class AquamapsEnvelopeAlgorithm {
//the core of the procedure //the core of the procedure
public static EnvelopeSet calcEnv(String species, Object[] singleSpeciesValues, List<Object> tempvalues,List<Object> salinityvalues,List<Object> primprodvalues,List<Object> icevalues,List<Object> landdistvalues){ public static EnvelopeSet calcEnv(String species, Object[] singleSpeciesValues, List<Object> tempvalues,List<Object> salinityvalues,List<Object> primprodvalues,List<Object> icevalues,List<Object> landdistvalues){
if (tempvalues.size()<10){ if (tempvalues.size()<10){
AnalysisLogger.getLogger().warn("WARNING: NOT ENOUGH OCCURRENCES FOR SPECIES: "+species); logger.warn("WARNING: NOT ENOUGH OCCURRENCES FOR SPECIES: "+species);
AnalysisLogger.getLogger().warn("Leaving the hspen as is"); logger.warn("Leaving the hspen as is");
return new EnvelopeSet(); return new EnvelopeSet();
} }
//take previousValues //take previousValues
@ -120,13 +122,13 @@ public class AquamapsEnvelopeAlgorithm {
int countchunks = 0; int countchunks = 0;
if (!tempEnv.checkPrevious(prevTempMin,prevTempMax,prevTempPMin,prevTempPMax)) if (!tempEnv.checkPrevious(prevTempMin,prevTempMax,prevTempPMin,prevTempPMax))
{ {
AnalysisLogger.getLogger().warn("DIFFERENCE ON SPECIES: "+species+" - "+prevTempMin+","+prevTempPMin+","+prevTempPMax+","+prevTempMax+" vs "+tempEnv.toString()); logger.warn("DIFFERENCE ON SPECIES: "+species+" - "+prevTempMin+","+prevTempPMin+","+prevTempPMax+","+prevTempMax+" vs "+tempEnv.toString());
addingElements+=tempEnv.toString(); addingElements+=tempEnv.toString();
countchunks++; countchunks++;
} }
if (!salinityEnv.checkPrevious(prevSalinityMin,prevSalinityMax,prevSalinityPMin,prevSalinityPMax)) if (!salinityEnv.checkPrevious(prevSalinityMin,prevSalinityMax,prevSalinityPMin,prevSalinityPMax))
{ {
AnalysisLogger.getLogger().warn("DIFFERENCE ON SPECIES: "+species+" - "+prevSalinityMin+","+prevSalinityPMin+","+prevSalinityPMax+","+prevSalinityMax+" vs "+salinityEnv.toString()); logger.warn("DIFFERENCE ON SPECIES: "+species+" - "+prevSalinityMin+","+prevSalinityPMin+","+prevSalinityPMax+","+prevSalinityMax+" vs "+salinityEnv.toString());
if (countchunks>0) if (countchunks>0)
addingElements+=","; addingElements+=",";
addingElements+=salinityEnv.toString(); addingElements+=salinityEnv.toString();
@ -134,7 +136,7 @@ public class AquamapsEnvelopeAlgorithm {
} }
if (!primprodEnv.checkPrevious(prevPrimProdMin,prevPrimProdMax,prevPrimProdPMin,prevPrimProdPMax)) if (!primprodEnv.checkPrevious(prevPrimProdMin,prevPrimProdMax,prevPrimProdPMin,prevPrimProdPMax))
{ {
AnalysisLogger.getLogger().warn("DIFFERENCE ON SPECIES: "+species+" - "+prevPrimProdMin+","+prevPrimProdPMin+","+prevPrimProdPMax+","+prevPrimProdMax+" vs "+primprodEnv.toString()); logger.warn("DIFFERENCE ON SPECIES: "+species+" - "+prevPrimProdMin+","+prevPrimProdPMin+","+prevPrimProdPMax+","+prevPrimProdMax+" vs "+primprodEnv.toString());
if (countchunks>0) if (countchunks>0)
addingElements+=","; addingElements+=",";
addingElements+=primprodEnv.toString(); addingElements+=primprodEnv.toString();
@ -142,7 +144,7 @@ public class AquamapsEnvelopeAlgorithm {
} }
if (!seaiceEnv.checkPrevious(prevIceMin,prevIceMax,prevIcePMin,prevIcePMax)) if (!seaiceEnv.checkPrevious(prevIceMin,prevIceMax,prevIcePMin,prevIcePMax))
{ {
AnalysisLogger.getLogger().warn("DIFFERENCE ON SPECIES: "+species+" - "+prevIceMin+","+prevIcePMin+","+prevIcePMax+","+prevIceMax+" vs "+seaiceEnv.toString()); logger.warn("DIFFERENCE ON SPECIES: "+species+" - "+prevIceMin+","+prevIcePMin+","+prevIcePMax+","+prevIceMax+" vs "+seaiceEnv.toString());
if (countchunks>0) if (countchunks>0)
addingElements+=","; addingElements+=",";
addingElements+=seaiceEnv.toString(); addingElements+=seaiceEnv.toString();
@ -150,7 +152,7 @@ public class AquamapsEnvelopeAlgorithm {
} }
if (!landdistEnv.checkPrevious(prevLanddistMin,prevLanddistMax,prevLanddistPMin,prevLanddistPMax)) if (!landdistEnv.checkPrevious(prevLanddistMin,prevLanddistMax,prevLanddistPMin,prevLanddistPMax))
{ {
AnalysisLogger.getLogger().warn("DIFFERENCE ON SPECIES: "+species+" - "+prevLanddistMin+","+prevLanddistPMin+","+prevLanddistPMax+","+prevLanddistPMax+" vs "+landdistEnv.toString()); logger.warn("DIFFERENCE ON SPECIES: "+species+" - "+prevLanddistMin+","+prevLanddistPMin+","+prevLanddistPMax+","+prevLanddistPMax+" vs "+landdistEnv.toString());
if (countchunks>0) if (countchunks>0)
addingElements+=","; addingElements+=",";
addingElements+=landdistEnv.toString(); addingElements+=landdistEnv.toString();

View File

@ -4,12 +4,15 @@ import java.util.ArrayList;
import java.util.List; import java.util.List;
import org.gcube.contentmanagement.graphtools.utils.MathFunctions; import org.gcube.contentmanagement.graphtools.utils.MathFunctions;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.contentmanagement.lexicalmatcher.utils.DatabaseFactory; import org.gcube.contentmanagement.lexicalmatcher.utils.DatabaseFactory;
import org.hibernate.SessionFactory; import org.hibernate.SessionFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class MaxMinGenerator { public class MaxMinGenerator {
private static Logger logger = LoggerFactory.getLogger(MaxMinGenerator.class);
String selectionQuery = "SELECT DISTINCT Max(hcaf_s.CenterLat) AS maxCLat, Min(hcaf_s.CenterLat) AS minCLat,speciesid FROM #occurrencecells# INNER JOIN hcaf_s ON #occurrencecells#.CsquareCode = hcaf_s.CsquareCode WHERE (((hcaf_s.oceanarea > 0))) AND #occurrencecells#.SpeciesID in (%1$s) AND #occurrencecells#.GoodCell <> 0 group by speciesid; "; String selectionQuery = "SELECT DISTINCT Max(hcaf_s.CenterLat) AS maxCLat, Min(hcaf_s.CenterLat) AS minCLat,speciesid FROM #occurrencecells# INNER JOIN hcaf_s ON #occurrencecells#.CsquareCode = hcaf_s.CsquareCode WHERE (((hcaf_s.oceanarea > 0))) AND #occurrencecells#.SpeciesID in (%1$s) AND #occurrencecells#.GoodCell <> 0 group by speciesid; ";
String insertionQuery = "insert into maxminlat_%4$s values ('%1$s','%2$s','%3$s'); "; String insertionQuery = "insert into maxminlat_%4$s values ('%1$s','%2$s','%3$s'); ";
@ -78,7 +81,7 @@ public class MaxMinGenerator {
} }
try { try {
AnalysisLogger.getLogger().debug("inserting..."); logger.debug("inserting...");
// DatabaseFactory.executeSQLUpdate(insert, vreConnection); // DatabaseFactory.executeSQLUpdate(insert, vreConnection);
DatabaseFactory.executeSQLUpdate(buffer.toString(), vreConnection); DatabaseFactory.executeSQLUpdate(buffer.toString(), vreConnection);
} catch (Exception e) { } catch (Exception e) {
@ -100,7 +103,7 @@ public class MaxMinGenerator {
if (counter % 100 == 0) { if (counter % 100 == 0) {
try { try {
AnalysisLogger.getLogger().debug("inserting..."); logger.debug("inserting...");
// DatabaseFactory.executeSQLUpdate(insert, vreConnection); // DatabaseFactory.executeSQLUpdate(insert, vreConnection);
DatabaseFactory.executeSQLUpdate(buffer.toString(), vreConnection); DatabaseFactory.executeSQLUpdate(buffer.toString(), vreConnection);
} catch (Exception e) { } catch (Exception e) {
@ -114,11 +117,11 @@ public class MaxMinGenerator {
public void getAllSpecies(String hspentable) { public void getAllSpecies(String hspentable) {
// populates the selectedSpecies variable by getting species from db // populates the selectedSpecies variable by getting species from db
String query = String.format(SpeciesListQuery, hspentable); String query = String.format(SpeciesListQuery, hspentable);
AnalysisLogger.getLogger().warn("Distribution Generator ->getting all species list from DB"); logger.warn("Distribution Generator ->getting all species list from DB");
AnalysisLogger.getLogger().warn("Distribution Generator ->" + query); logger.warn("Distribution Generator ->" + query);
List<Object> allspecies = DatabaseFactory.executeSQLQuery(query, vreConnection); List<Object> allspecies = DatabaseFactory.executeSQLQuery(query, vreConnection);
selectedSpecies = allspecies; selectedSpecies = allspecies;
// AnalysisLogger.getLogger().warn("Distribution Generator -> SIZE: " + selectedSpecies.size()); // logger.warn("Distribution Generator -> SIZE: " + selectedSpecies.size());
} }
protected void deleteDestinationTable(String table) { protected void deleteDestinationTable(String table) {
@ -126,9 +129,9 @@ public class MaxMinGenerator {
// clean the corresponding table on destination db // clean the corresponding table on destination db
try { try {
DatabaseFactory.executeSQLUpdate(deleteQuery, vreConnection); DatabaseFactory.executeSQLUpdate(deleteQuery, vreConnection);
AnalysisLogger.getLogger().debug("destination table dropped"); logger.debug("destination table dropped");
} catch (Exception e) { } catch (Exception e) {
AnalysisLogger.getLogger().debug("destination table NOT dropped"); logger.debug("destination table NOT dropped");
} }
} }
@ -136,12 +139,12 @@ public class MaxMinGenerator {
protected void buildDestinationTable(String destinationTable) { protected void buildDestinationTable(String destinationTable) {
String createQuery = String.format(creationQuery, destinationTable); String createQuery = String.format(creationQuery, destinationTable);
AnalysisLogger.getLogger().debug("Creating new table or destination schema: " + destinationTable); logger.debug("Creating new table or destination schema: " + destinationTable);
try { try {
DatabaseFactory.executeSQLUpdate(String.format(createQuery, destinationTable), vreConnection); DatabaseFactory.executeSQLUpdate(String.format(createQuery, destinationTable), vreConnection);
AnalysisLogger.getLogger().debug("Table created"); logger.debug("Table created");
} catch (Exception e) { } catch (Exception e) {
AnalysisLogger.getLogger().debug("Table NOT created"); logger.debug("Table NOT created");
} }
} }
@ -155,7 +158,7 @@ public class MaxMinGenerator {
deleteDestinationTable("maxminlat_" + hspenTable); deleteDestinationTable("maxminlat_" + hspenTable);
buildDestinationTable("maxminlat_" + hspenTable); buildDestinationTable("maxminlat_" + hspenTable);
int size = selectedSpecies.size(); int size = selectedSpecies.size();
AnalysisLogger.getLogger().warn("Distribution Generator -> SIZE: " + size); logger.warn("Distribution Generator -> SIZE: " + size);
for (Object species : selectedSpecies) { for (Object species : selectedSpecies) {
String speciesid = (String) species; String speciesid = (String) species;
executeQuery(speciesid, hspenTable); executeQuery(speciesid, hspenTable);
@ -164,10 +167,10 @@ public class MaxMinGenerator {
status = MathFunctions.roundDecimal(((double) i *100/ (double) size), 2); status = MathFunctions.roundDecimal(((double) i *100/ (double) size), 2);
// status = (double) i / (double) size; // status = (double) i / (double) size;
if (i%10==0) if (i%10==0)
AnalysisLogger.getLogger().debug("status " + status); logger.debug("status " + status);
} }
long t1 = System.currentTimeMillis(); long t1 = System.currentTimeMillis();
AnalysisLogger.getLogger().debug("elapsed Time: " + (t1 - t0)); logger.debug("elapsed Time: " + (t1 - t0));
} catch (Exception e) { } catch (Exception e) {
e.printStackTrace(); e.printStackTrace();
@ -189,7 +192,7 @@ public class MaxMinGenerator {
deleteDestinationTable(destinationTable); deleteDestinationTable(destinationTable);
buildDestinationTable(destinationTable); buildDestinationTable(destinationTable);
int size = selectedSpecies.size(); int size = selectedSpecies.size();
AnalysisLogger.getLogger().warn("Distribution Generator -> SIZE: " + size); logger.warn("Distribution Generator -> SIZE: " + size);
List<String> specieslist = new ArrayList<String>(); List<String> specieslist = new ArrayList<String>();
for (Object species : selectedSpecies) { for (Object species : selectedSpecies) {
String speciesid = (String) species; String speciesid = (String) species;
@ -203,24 +206,24 @@ public class MaxMinGenerator {
// status = (double) i / (double) size; // status = (double) i / (double) size;
if (i%100==0){ if (i%100==0){
AnalysisLogger.getLogger().debug("status " + status+" species processed "+i); logger.debug("status " + status+" species processed "+i);
insertQuery(specieslist, hspenTable); insertQuery(specieslist, hspenTable);
specieslist = null; specieslist = null;
specieslist = new ArrayList<String>(); specieslist = new ArrayList<String>();
//long t1 = System.currentTimeMillis(); //long t1 = System.currentTimeMillis();
//AnalysisLogger.getLogger().debug("elapsed Time: " + (t1 - t0)); //logger.debug("elapsed Time: " + (t1 - t0));
} }
} }
if (specieslist.size()>0){ if (specieslist.size()>0){
AnalysisLogger.getLogger().debug("final status " + status+" species processed "+i); logger.debug("final status " + status+" species processed "+i);
insertQuery(specieslist, hspenTable); insertQuery(specieslist, hspenTable);
} }
long t1 = System.currentTimeMillis(); long t1 = System.currentTimeMillis();
AnalysisLogger.getLogger().debug("overall elapsed Time: " + (t1 - t0)); logger.debug("overall elapsed Time: " + (t1 - t0));
} catch (Exception e) { } catch (Exception e) {
e.printStackTrace(); e.printStackTrace();

View File

@ -5,9 +5,13 @@ import java.io.IOException;
import java.io.ObjectInputStream; import java.io.ObjectInputStream;
import java.io.Serializable; import java.io.Serializable;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class Neural_Network implements Serializable { public class Neural_Network implements Serializable {
private static Logger logger = LoggerFactory.getLogger(Neural_Network.class);
public Neuron[][] griglia; public Neuron[][] griglia;
static final long serialVersionUID = 1; static final long serialVersionUID = 1;
// originale = 1.2 // originale = 1.2
@ -340,7 +344,7 @@ public class Neural_Network implements Serializable {
public void train(double[][] inputvet, double[][] correctoutputvet) { public void train(double[][] inputvet, double[][] correctoutputvet) {
if (griglia[griglia.length - 1].length != correctoutputvet[0].length) if (griglia[griglia.length - 1].length != correctoutputvet[0].length)
AnalysisLogger.getLogger().debug("Error : the vector of outputs has not a lenght equal to the output of the network"); logger.debug("Error : the vector of outputs has not a lenght equal to the output of the network");
else { else {
en = 2; en = 2;
int counter = 0; int counter = 0;
@ -353,7 +357,7 @@ public class Neural_Network implements Serializable {
en += energy(this.propagate(inputvet[i]), correctoutputvet[i]); en += energy(this.propagate(inputvet[i]), correctoutputvet[i]);
} }
AnalysisLogger.getLogger().debug("Learning Score: " + en); logger.debug("Learning Score: " + en);
counter++; counter++;
status = (float)counter/(float)maxcycle; status = (float)counter/(float)maxcycle;
@ -365,9 +369,9 @@ public class Neural_Network implements Serializable {
} }
System.out.println("Final Error: " + en); System.out.println("Final Error: " + en);
if (counter >= maxcycle) if (counter >= maxcycle)
AnalysisLogger.getLogger().debug("training incomplete: didn't manage to reduce the error under the thr!"); logger.debug("training incomplete: didn't manage to reduce the error under the thr!");
else else
AnalysisLogger.getLogger().debug("training complete!"); logger.debug("training complete!");
status = 100; status = 100;
} }
} }
@ -411,7 +415,7 @@ public class Neural_Network implements Serializable {
nn = (Neural_Network) ois.readObject(); nn = (Neural_Network) ois.readObject();
} catch (Exception ex) { } catch (Exception ex) {
ex.printStackTrace(); ex.printStackTrace();
AnalysisLogger.getLogger().debug("Error in reading the object from file " + nomeFile + " ."); logger.debug("Error in reading the object from file " + nomeFile + " .");
} finally { } finally {
try { try {
stream.close(); stream.close();

View File

@ -1,15 +1,14 @@
package org.gcube.dataanalysis.ecoengine.models.cores.pca; package org.gcube.dataanalysis.ecoengine.models.cores.pca;
import org.gcube.contentmanagement.graphtools.data.BigSamplesTable;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.utils.Operations; import org.gcube.dataanalysis.ecoengine.utils.Operations;
import org.gcube.dataanalysis.ecoengine.utils.Transformations; import org.gcube.dataanalysis.ecoengine.utils.Transformations;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.rapidminer.example.ExampleSet; import com.rapidminer.example.ExampleSet;
import com.rapidminer.operator.IOContainer; import com.rapidminer.operator.IOContainer;
import com.rapidminer.operator.IOObject; import com.rapidminer.operator.IOObject;
import com.rapidminer.operator.ModelApplier;
import com.rapidminer.operator.features.transformation.PCA; import com.rapidminer.operator.features.transformation.PCA;
import com.rapidminer.operator.features.transformation.PCAModel; import com.rapidminer.operator.features.transformation.PCAModel;
import com.rapidminer.tools.OperatorService; import com.rapidminer.tools.OperatorService;
@ -17,67 +16,7 @@ import com.rapidminer.tools.OperatorService;
public class PrincipalComponentAnalysis { public class PrincipalComponentAnalysis {
private static Logger logger = LoggerFactory.getLogger(PrincipalComponentAnalysis.class);
public static void main(String[] args) throws Exception{
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setConfigPath("./cfg/");
config.setPersistencePath("./cfg/");
config.setNumberOfResources(1);
config.setAgent("QUALITY_ANALYSIS");
AnalysisLogger.setLogger(config.getConfigPath()+AlgorithmConfiguration.defaultLoggerFile);
config.initRapidMiner();
PrincipalComponentAnalysis pca = new PrincipalComponentAnalysis();
/*random example
int m= 20;
int n = 10;
double values[][] = new double[m][n];
for (int i=0;i<m;i++){
for (int j=0;j<n;j++)
if ((i==0)||(i==1))
values[i][j]=1d;
else
values[i][j]=Math.random();
}
*/
int m= 5;
int n = 5;
double values[][] = new double[m][n];
double val2[] = {1.000d,0.451d,0.511d,0.197d,0.162d};
double val1[] = {0.451d,1.000d,0.445d,0.252d,0.238d};
double val3[] = {0.511d,0.445d,1.000d,0.301d,0.227d};
double val5[] = {0.197d,0.252d,0.301d,1.000d,0.620d};
double val4[] = {0.162d,0.238d,0.227d,0.620d,1.000d};
values[0] = val1;
values[1] = val2;
values[2] = val3;
values[3] = val4;
values[4] = val5;
//calculates the PCA
pca.calcPCA(values);
PCAModel model = pca.getModel();
int components = model.getMaximumNumberOfComponents();
for (int i=0;i<components;i++){
AnalysisLogger.getLogger().debug((i+1)+"->"+model.getEigenvalue(i));
double[] eigen = model.getEigenvector(i);
for (int j=0;j<eigen.length;j++)
System.out.print(eigen[j]+" ");
System.out.println();
}
double [][] componentsMatrix = pca.getComponentsMatrix(values);
System.exit(0);
}
public void init(AlgorithmConfiguration config){ public void init(AlgorithmConfiguration config){
config.initRapidMiner(); config.initRapidMiner();
@ -177,7 +116,7 @@ public class PrincipalComponentAnalysis {
public void calcPCA(double [][] sampleVectors) throws Exception{ public void calcPCA(double [][] sampleVectors) throws Exception{
AnalysisLogger.getLogger().debug("STARTING PCA COMPUTATION"); logger.debug("STARTING PCA COMPUTATION");
PCA pca = (PCA) OperatorService.createOperator("PCA"); PCA pca = (PCA) OperatorService.createOperator("PCA");
pca.setParameter("variance_threshold", "0.95"); pca.setParameter("variance_threshold", "0.95");
@ -196,7 +135,7 @@ public class PrincipalComponentAnalysis {
innermodel = (PCAModel) outputvector[1]; innermodel = (PCAModel) outputvector[1];
numberOfComponents = innermodel.getMaximumNumberOfComponents(); numberOfComponents = innermodel.getMaximumNumberOfComponents();
AnalysisLogger.getLogger().debug("MODEL APPLIED"); logger.debug("MODEL APPLIED");
} }

View File

@ -9,7 +9,6 @@ import java.util.HashMap;
import java.util.List; import java.util.List;
import org.gcube.contentmanagement.graphtools.data.conversions.ImageTools; import org.gcube.contentmanagement.graphtools.data.conversions.ImageTools;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.datatypes.ColumnType; import org.gcube.dataanalysis.ecoengine.datatypes.ColumnType;
import org.gcube.dataanalysis.ecoengine.datatypes.ColumnTypesList; import org.gcube.dataanalysis.ecoengine.datatypes.ColumnTypesList;
@ -35,9 +34,13 @@ import org.jfree.chart.renderer.xy.XYSplineRenderer;
import org.jfree.data.xy.XYDataset; import org.jfree.data.xy.XYDataset;
import org.jfree.data.xy.XYSeriesCollection; import org.jfree.data.xy.XYSeriesCollection;
import org.jfree.ui.RectangleInsets; import org.jfree.ui.RectangleInsets;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class FeedForwardNNFile extends ModelAquamapsNN { public class FeedForwardNNFile extends ModelAquamapsNN {
private static Logger logger = LoggerFactory.getLogger(FeedForwardNNFile.class);
final float frequency = 0.3f;// 1f; final float frequency = 0.3f;// 1f;
int samplingRate = 32; int samplingRate = 32;
float timeShift = 1f / (float) samplingRate; float timeShift = 1f / (float) samplingRate;
@ -173,13 +176,11 @@ public class FeedForwardNNFile extends ModelAquamapsNN {
@Override @Override
public void init(AlgorithmConfiguration config, Model previousModel) { public void init(AlgorithmConfiguration config, Model previousModel) {
AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
// init the database // init the database
try { try {
connection = DatabaseUtils.initDBSession(config); connection = DatabaseUtils.initDBSession(config);
} catch (Exception e) { } catch (Exception e) {
e.printStackTrace(); logger.trace("ERROR initializing connection");
AnalysisLogger.getLogger().trace("ERROR initializing connection");
} }
fileName = config.getPersistencePath() + Neural_Network.generateNNName(config.getParam(Reference), config.getParam(UserName), config.getParam(ModelName)); fileName = config.getPersistencePath() + Neural_Network.generateNNName(config.getParam(Reference), config.getParam(UserName), config.getParam(ModelName));
trainingDataSet = config.getParam(TrainingDataSet); trainingDataSet = config.getParam(TrainingDataSet);
@ -187,7 +188,7 @@ public class FeedForwardNNFile extends ModelAquamapsNN {
trainingColumn = config.getParam(TrainingDataSetTargetColumn); trainingColumn = config.getParam(TrainingDataSetTargetColumn);
File previousfile = new File(config.getParam("PreviousNeuralNetwork")); File previousfile = new File(config.getParam("PreviousNeuralNetwork"));
AnalysisLogger.getLogger().info("Previous File is " + previousfile.getAbsolutePath()); logger.info("Previous File is " + previousfile.getAbsolutePath());
learningThr = Float.parseFloat(config.getParam(LearningThreshold)); learningThr = Float.parseFloat(config.getParam(LearningThreshold));
maxiter = Integer.parseInt(config.getParam(MaxIterations)); maxiter = Integer.parseInt(config.getParam(MaxIterations));
@ -228,7 +229,7 @@ public class FeedForwardNNFile extends ModelAquamapsNN {
// take all features input vectors // take all features input vectors
String query = String.format(takeElementsQuery, trainingColumn + "," + dbcolumns, trainingDataSet, trainingColumn); String query = String.format(takeElementsQuery, trainingColumn + "," + dbcolumns, trainingDataSet, trainingColumn);
AnalysisLogger.getLogger().debug("Query to execute: " + query); logger.debug("Query to execute: " + query);
List<Object> features = DatabaseFactory.executeSQLQuery(query, connection); List<Object> features = DatabaseFactory.executeSQLQuery(query, connection);
int numbOfFeatures = features.size(); int numbOfFeatures = features.size();
@ -236,12 +237,12 @@ public class FeedForwardNNFile extends ModelAquamapsNN {
List<Object> maxmin = DatabaseFactory.executeSQLQuery("select max(" + trainingColumn + "), min(" + trainingColumn + ") from " + trainingDataSet, connection); List<Object> maxmin = DatabaseFactory.executeSQLQuery("select max(" + trainingColumn + "), min(" + trainingColumn + ") from " + trainingDataSet, connection);
maxfactor = Double.parseDouble("" + ((Object[]) maxmin.get(0))[0]); maxfactor = Double.parseDouble("" + ((Object[]) maxmin.get(0))[0]);
minfactor = Double.parseDouble("" + ((Object[]) maxmin.get(0))[1]); minfactor = Double.parseDouble("" + ((Object[]) maxmin.get(0))[1]);
AnalysisLogger.getLogger().debug("Calculated max: " + maxfactor + " min: " + minfactor); logger.debug("Calculated max: " + maxfactor + " min: " + minfactor);
// setup Neural Network // setup Neural Network
int numberOfInputNodes = dbcolumnsList.length; int numberOfInputNodes = dbcolumnsList.length;
int numberOfOutputNodes = 1; int numberOfOutputNodes = 1;
AnalysisLogger.getLogger().debug("Training the ANN with " + numbOfFeatures + " training data and " + numberOfInputNodes + " inputs"); logger.debug("Training the ANN with " + numbOfFeatures + " training data and " + numberOfInputNodes + " inputs");
if (layersNeurons != null) { if (layersNeurons != null) {
int[] innerLayers = Neural_Network.setupInnerLayers(layersNeurons); int[] innerLayers = Neural_Network.setupInnerLayers(layersNeurons);
nn = new Neural_Network(numberOfInputNodes, numberOfOutputNodes, innerLayers, Neural_Network.ACTIVATIONFUNCTION.SIGMOID); nn = new Neural_Network(numberOfInputNodes, numberOfOutputNodes, innerLayers, Neural_Network.ACTIVATIONFUNCTION.SIGMOID);
@ -252,10 +253,10 @@ public class FeedForwardNNFile extends ModelAquamapsNN {
nn.minfactor = minfactor; nn.minfactor = minfactor;
nn.setThreshold(learningThr); nn.setThreshold(learningThr);
nn.setCycles(maxiter); nn.setCycles(maxiter);
AnalysisLogger.getLogger().debug("network parameters: M: " + maxfactor + ", m: " + minfactor + ", lt: " + learningThr + ", it: " + maxiter); logger.debug("network parameters: M: " + maxfactor + ", m: " + minfactor + ", lt: " + learningThr + ", it: " + maxiter);
AnalysisLogger.getLogger().debug("topology: " + nn.griglia.length + "X" + nn.griglia[0].length); logger.debug("topology: " + nn.griglia.length + "X" + nn.griglia[0].length);
AnalysisLogger.getLogger().debug("Features preprocessing"); logger.debug("Features preprocessing");
double[][] in = new double[numbOfFeatures][]; double[][] in = new double[numbOfFeatures][];
double[][] out = new double[numbOfFeatures][]; double[][] out = new double[numbOfFeatures][];
// build NN input // build NN input
@ -265,20 +266,19 @@ public class FeedForwardNNFile extends ModelAquamapsNN {
in[i] = Neural_Network.preprocessObjects(Arrays.copyOfRange((Object[]) features.get(i), 1, feats.length)); in[i] = Neural_Network.preprocessObjects(Arrays.copyOfRange((Object[]) features.get(i), 1, feats.length));
out[i] = Neural_Network.preprocessObjects(Arrays.copyOfRange((Object[]) features.get(i), 0, 1)); out[i] = Neural_Network.preprocessObjects(Arrays.copyOfRange((Object[]) features.get(i), 0, 1));
// apply reduction factor // apply reduction factor
// AnalysisLogger.getLogger().debug("Output Transformed from "+out[i][0]); // logger.debug("Output Transformed from "+out[i][0]);
out[i][0] = nn.getCorrectValueForOutput(out[i][0]); out[i][0] = nn.getCorrectValueForOutput(out[i][0]);
// AnalysisLogger.getLogger().debug("To "+out[i][0]); // logger.debug("To "+out[i][0]);
} }
AnalysisLogger.getLogger().debug("Features were correctly preprocessed - Training"); logger.debug("Features were correctly preprocessed - Training");
// train the NN // train the NN
nn.train(in, out); nn.train(in, out);
AnalysisLogger.getLogger().debug("Saving Network"); logger.debug("Saving Network");
save(fileName, nn); save(fileName, nn);
AnalysisLogger.getLogger().debug("Done"); logger.debug("Done");
} catch (Exception e) { } catch (Exception e) {
e.printStackTrace(); logger.error("ERROR during training",e);
AnalysisLogger.getLogger().error("ERROR during training");
} }
status = 100f; status = 100f;

View File

@ -11,7 +11,6 @@ import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors; import java.util.concurrent.Executors;
import org.gcube.contentmanagement.graphtools.utils.HttpRequest; import org.gcube.contentmanagement.graphtools.utils.HttpRequest;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS; import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.configuration.INFRASTRUCTURE; import org.gcube.dataanalysis.ecoengine.configuration.INFRASTRUCTURE;
@ -21,9 +20,13 @@ import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.interfaces.Generator; import org.gcube.dataanalysis.ecoengine.interfaces.Generator;
import org.gcube.dataanalysis.ecoengine.interfaces.GenericAlgorithm; import org.gcube.dataanalysis.ecoengine.interfaces.GenericAlgorithm;
import org.gcube.dataanalysis.ecoengine.interfaces.SpatialProbabilityDistributionGeneric; import org.gcube.dataanalysis.ecoengine.interfaces.SpatialProbabilityDistributionGeneric;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class LocalSimpleSplitGenerator implements Generator { public class LocalSimpleSplitGenerator implements Generator {
private static Logger logger = LoggerFactory.getLogger(LocalSimpleSplitGenerator.class);
protected AlgorithmConfiguration config; protected AlgorithmConfiguration config;
protected ExecutorService executorService; protected ExecutorService executorService;
protected int numberOfThreadsToUse; protected int numberOfThreadsToUse;
@ -91,13 +94,12 @@ public class LocalSimpleSplitGenerator implements Generator {
@Override @Override
public void init() { public void init() {
AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
stopInterrupt = false; stopInterrupt = false;
completeDistribution = new ConcurrentHashMap<Object, Map<Object,Float>>(); completeDistribution = new ConcurrentHashMap<Object, Map<Object,Float>>();
try { try {
initModel(); initModel();
} catch (Exception e) { } catch (Exception e) {
AnalysisLogger.getLogger().error("error",e); logger.error("error",e);
} }
// probabilityBuffer = new Vector<String>(); // probabilityBuffer = new Vector<String>();
probabilityBuffer = new ConcurrentLinkedQueue<String>(); probabilityBuffer = new ConcurrentLinkedQueue<String>();
@ -156,19 +158,19 @@ public class LocalSimpleSplitGenerator implements Generator {
// INITIALIZATION // INITIALIZATION
long tstart = System.currentTimeMillis(); long tstart = System.currentTimeMillis();
try { try {
AnalysisLogger.getLogger().trace("generate->Take features reference"); logger.trace("generate->Take features reference");
// take the area reference vectors // take the area reference vectors
environmentVectors = distributionModel.getGeographicalInfoObjects(); environmentVectors = distributionModel.getGeographicalInfoObjects();
if ((environmentVectors ==null) ||(environmentVectors.size()==0)) if ((environmentVectors ==null) ||(environmentVectors.size()==0))
throw new Exception("Empty Features Set"); throw new Exception("Empty Features Set");
// calculate the number of chunks needed // calculate the number of chunks needed
spaceVectorsNumber = environmentVectors.size(); spaceVectorsNumber = environmentVectors.size();
AnalysisLogger.getLogger().trace("generate->Features to calc: "+spaceVectorsNumber); logger.trace("generate->Features to calc: "+spaceVectorsNumber);
AnalysisLogger.getLogger().trace("generate->Take groups references"); logger.trace("generate->Take groups references");
List<Object> speciesVectors = distributionModel.getMainInfoObjects(); List<Object> speciesVectors = distributionModel.getMainInfoObjects();
int speciesVectorNumber = speciesVectors.size(); int speciesVectorNumber = speciesVectors.size();
AnalysisLogger.getLogger().trace("generate->Number of groups of features: "+speciesVectorNumber); logger.trace("generate->Number of groups of features: "+speciesVectorNumber);
// calculate number of chunks to take into account // calculate number of chunks to take into account
chunksize = spaceVectorsNumber / numberOfThreadsToUse; chunksize = spaceVectorsNumber / numberOfThreadsToUse;
@ -178,7 +180,7 @@ public class LocalSimpleSplitGenerator implements Generator {
if ((spaceVectorsNumber % chunksize) != 0) if ((spaceVectorsNumber % chunksize) != 0)
numOfChunks += 1; numOfChunks += 1;
AnalysisLogger.getLogger().trace("generate->Calculation Started with " + numOfChunks + " chunks and " + speciesVectorNumber + " groups - chunk size will be "+chunksize); logger.trace("generate->Calculation Started with " + numOfChunks + " chunks and " + speciesVectorNumber + " groups - chunk size will be "+chunksize);
// initialize threads // initialize threads
initializeThreads(); initializeThreads();
@ -198,7 +200,7 @@ public class LocalSimpleSplitGenerator implements Generator {
long computationT0 = System.currentTimeMillis(); long computationT0 = System.currentTimeMillis();
// pre process for single species // pre process for single species
distributionModel.singleStepPreprocess(species, spaceVectorsNumber); distributionModel.singleStepPreprocess(species, spaceVectorsNumber);
AnalysisLogger.getLogger().trace("-> species " + distributionModel.getMainInfoID(species) + " - n. " + (processedSpeciesCounter + 1)); logger.trace("-> species " + distributionModel.getMainInfoID(species) + " - n. " + (processedSpeciesCounter + 1));
// CALCULATION CORE // CALCULATION CORE
for (int k = 0; k < numOfChunks; k++) { for (int k = 0; k < numOfChunks; k++) {
// get the starting index // get the starting index
@ -217,7 +219,7 @@ public class LocalSimpleSplitGenerator implements Generator {
status = ((float) overallcounter / ((float) (speciesVectorNumber * numOfChunks))) * 100f; status = ((float) overallcounter / ((float) (speciesVectorNumber * numOfChunks))) * 100f;
if (status == 100) if (status == 100)
status = 99f; status = 99f;
// AnalysisLogger.getLogger().trace("STATUS->"+status+"%"); // logger.trace("STATUS->"+status+"%");
// increment global counter index // increment global counter index
overallcounter++; overallcounter++;
} }
@ -231,7 +233,7 @@ public class LocalSimpleSplitGenerator implements Generator {
long computationT1 = System.currentTimeMillis(); long computationT1 = System.currentTimeMillis();
// flushBuffer(); // flushBuffer();
AnalysisLogger.getLogger().trace("generate->Species Computation Finished in " + (computationT1 - computationT0) + " ms"); logger.trace("generate->Species Computation Finished in " + (computationT1 - computationT0) + " ms");
// perform overall insert // perform overall insert
// insertCriteria(); // insertCriteria();
// increment the count of processed species // increment the count of processed species
@ -246,18 +248,18 @@ public class LocalSimpleSplitGenerator implements Generator {
long computationT2 = System.currentTimeMillis(); long computationT2 = System.currentTimeMillis();
// flushInterrupt = true; // flushInterrupt = true;
AnalysisLogger.getLogger().trace("generate->All Species Computed in " + (computationT2 - tstart) + " ms"); logger.trace("generate->All Species Computed in " + (computationT2 - tstart) + " ms");
} catch (Exception e) { } catch (Exception e) {
AnalysisLogger.getLogger().error("error",e); logger.error("error",e);
throw e; throw e;
} finally { } finally {
// REPORT OVERALL ELAPSED TIME // REPORT OVERALL ELAPSED TIME
AnalysisLogger.getLogger().trace("generate-> Storing Probability Distribution"); logger.trace("generate-> Storing Probability Distribution");
try{ try{
distributionModel.storeDistribution(completeDistribution); distributionModel.storeDistribution(completeDistribution);
}catch(Exception ee){ }catch(Exception ee){
AnalysisLogger.getLogger().trace("generate-> Error Storing Probability Distribution ",ee); logger.trace("generate-> Error Storing Probability Distribution ",ee);
} }
try{ try{
distributionModel.postProcess(); distributionModel.postProcess();
@ -268,7 +270,7 @@ public class LocalSimpleSplitGenerator implements Generator {
}catch(Exception eeee){} }catch(Exception eeee){}
long tend = System.currentTimeMillis(); long tend = System.currentTimeMillis();
long ttotal = tend - tstart; long ttotal = tend - tstart;
AnalysisLogger.getLogger().warn("generate->Distribution Generator->Algorithm finished in: " + ((double) ttotal / (double) 60000) + " min\n"); logger.warn("generate->Distribution Generator->Algorithm finished in: " + ((double) ttotal / (double) 60000) + " min\n");
status = 100f; status = 100f;
} }
} }
@ -296,10 +298,10 @@ public class LocalSimpleSplitGenerator implements Generator {
} }
public Integer call() { public Integer call() {
AnalysisLogger.getLogger().trace("threadCalculation->" + (threadIndex+1)); logger.trace("threadCalculation->" + (threadIndex+1));
int max = Math.min(spaceindex + chunksize, spaceVectorsNumber); int max = Math.min(spaceindex + chunksize, spaceVectorsNumber);
String speciesID = distributionModel.getMainInfoID(speciesVector); String speciesID = distributionModel.getMainInfoID(speciesVector);
AnalysisLogger.getLogger().trace("threadCalculation-> calculating elements from "+spaceindex+" to " + max +" for species "+speciesID); logger.trace("threadCalculation-> calculating elements from "+spaceindex+" to " + max +" for species "+speciesID);
Map<Object,Float> geoDistrib = completeDistribution.get(speciesID); Map<Object,Float> geoDistrib = completeDistribution.get(speciesID);
//if the map is null then generate a new map, otherwise update it //if the map is null then generate a new map, otherwise update it
if (geoDistrib==null){ if (geoDistrib==null){
@ -317,7 +319,7 @@ public class LocalSimpleSplitGenerator implements Generator {
processedRecordsCounter++; processedRecordsCounter++;
} }
AnalysisLogger.getLogger().trace("FINISHED"); logger.trace("FINISHED");
threadActivity[threadIndex] = false; threadActivity[threadIndex] = false;
return 0; return 0;
} }

View File

@ -13,7 +13,6 @@ import java.util.concurrent.Executors;
import org.gcube.contentmanagement.graphtools.utils.HttpRequest; import org.gcube.contentmanagement.graphtools.utils.HttpRequest;
import org.gcube.contentmanagement.graphtools.utils.MathFunctions; import org.gcube.contentmanagement.graphtools.utils.MathFunctions;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS; import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.configuration.INFRASTRUCTURE; import org.gcube.dataanalysis.ecoengine.configuration.INFRASTRUCTURE;
@ -25,9 +24,13 @@ import org.gcube.dataanalysis.ecoengine.interfaces.GenericAlgorithm;
import org.gcube.dataanalysis.ecoengine.interfaces.SpatialProbabilityDistributionTable; import org.gcube.dataanalysis.ecoengine.interfaces.SpatialProbabilityDistributionTable;
import org.gcube.dataanalysis.ecoengine.utils.DatabaseFactory; import org.gcube.dataanalysis.ecoengine.utils.DatabaseFactory;
import org.hibernate.SessionFactory; import org.hibernate.SessionFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class LocalSplitGenerator implements Generator { public class LocalSplitGenerator implements Generator {
private static Logger logger = LoggerFactory.getLogger(LocalSplitGenerator.class);
private AlgorithmConfiguration config; private AlgorithmConfiguration config;
private ExecutorService executorService; private ExecutorService executorService;
private int numberOfThreadsToUse; private int numberOfThreadsToUse;
@ -105,7 +108,6 @@ public class LocalSplitGenerator implements Generator {
@Override @Override
public void init() { public void init() {
AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
stopInterrupt = false; stopInterrupt = false;
flushInterrupt = false; flushInterrupt = false;
forceflush=false; forceflush=false;
@ -113,7 +115,7 @@ public class LocalSplitGenerator implements Generator {
try { try {
initModel(); initModel();
} catch (Exception e) { } catch (Exception e) {
AnalysisLogger.getLogger().error("error",e); logger.error("error",e);
} }
// probabilityBuffer = new Vector<String>(); // probabilityBuffer = new Vector<String>();
probabilityBuffer = new ConcurrentLinkedQueue<String>(); probabilityBuffer = new ConcurrentLinkedQueue<String>();
@ -127,7 +129,7 @@ public class LocalSplitGenerator implements Generator {
probabilityInsertionStatement = probabilityInsertionStatement.replace("%ADDEDINFORMATION%", addedinfo); probabilityInsertionStatement = probabilityInsertionStatement.replace("%ADDEDINFORMATION%", addedinfo);
if (!distributionModel.isSynchronousProbabilityWrite()) { if (!distributionModel.isSynchronousProbabilityWrite()) {
AnalysisLogger.getLogger().trace("init()->insertion scheduler initialized"); logger.trace("init()->insertion scheduler initialized");
// inizialize the scheduler for the insertions // inizialize the scheduler for the insertions
writerScheduler = new Timer(); writerScheduler = new Timer();
writerScheduler.schedule(new DatabaseWriter(), 0, AlgorithmConfiguration.refreshResourcesTime); writerScheduler.schedule(new DatabaseWriter(), 0, AlgorithmConfiguration.refreshResourcesTime);
@ -174,7 +176,7 @@ public class LocalSplitGenerator implements Generator {
dbHibConnection = DatabaseFactory.initDBConnection(defaultDatabaseFile, config); dbHibConnection = DatabaseFactory.initDBConnection(defaultDatabaseFile, config);
} }
} catch (Exception e) { } catch (Exception e) {
AnalysisLogger.getLogger().warn("error initializing db session",e); logger.warn("error initializing db session",e);
} }
} }
@ -182,17 +184,17 @@ public class LocalSplitGenerator implements Generator {
private void createTable() throws Exception { private void createTable() throws Exception {
if (config.getParam("CreateTable") != null && config.getParam("CreateTable").equalsIgnoreCase("true")) { if (config.getParam("CreateTable") != null && config.getParam("CreateTable").equalsIgnoreCase("true")) {
try { try {
AnalysisLogger.getLogger().trace("recreating table: " + "drop table " + config.getParam("DistributionTable")); logger.trace("recreating table: " + "drop table " + config.getParam("DistributionTable"));
DatabaseFactory.executeSQLUpdate("drop table " + config.getParam("DistributionTable"), dbHibConnection); DatabaseFactory.executeSQLUpdate("drop table " + config.getParam("DistributionTable"), dbHibConnection);
AnalysisLogger.getLogger().trace("recreating table->OK"); logger.trace("recreating table->OK");
} catch (Exception e) { } catch (Exception e) {
AnalysisLogger.getLogger().trace("recreating table->" + e.getLocalizedMessage()); logger.trace("recreating table->" + e.getLocalizedMessage());
} }
// DatabaseFactory.executeUpdateNoTransaction(distributionModel.getDistributionTableStatement(), config.getParam("DatabaseDriver"), config.getParam("DatabaseUserName"), config.getParam("DatabasePassword"), config.getParam("DatabaseURL"), true); // DatabaseFactory.executeUpdateNoTransaction(distributionModel.getDistributionTableStatement(), config.getParam("DatabaseDriver"), config.getParam("DatabaseUserName"), config.getParam("DatabasePassword"), config.getParam("DatabaseURL"), true);
DatabaseFactory.executeUpdateNoTransaction(distributionModel.getDistributionTableStatement(), config.getDatabaseDriver(), config.getDatabaseUserName(), config.getDatabasePassword(), config.getDatabaseURL(), true); DatabaseFactory.executeUpdateNoTransaction(distributionModel.getDistributionTableStatement(), config.getDatabaseDriver(), config.getDatabaseUserName(), config.getDatabasePassword(), config.getDatabaseURL(), true);
AnalysisLogger.getLogger().trace("createTable()->OK!"); logger.trace("createTable()->OK!");
} }
} }
@ -217,7 +219,7 @@ public class LocalSplitGenerator implements Generator {
} catch (Exception e) { } catch (Exception e) {
} }
} }
AnalysisLogger.getLogger().trace("CLOSING CONNECTIONS"); logger.trace("CLOSING CONNECTIONS");
try{ try{
dbHibConnection.close(); dbHibConnection.close();
}catch(Exception eee){} }catch(Exception eee){}
@ -240,16 +242,16 @@ public class LocalSplitGenerator implements Generator {
// INITIALIZATION // INITIALIZATION
long tstart = System.currentTimeMillis(); long tstart = System.currentTimeMillis();
try { try {
AnalysisLogger.getLogger().trace("generate->Using Local Computation algorithm " + distributionModel.getName()); logger.trace("generate->Using Local Computation algorithm " + distributionModel.getName());
AnalysisLogger.getLogger().trace("generate->Check for table creation"); logger.trace("generate->Check for table creation");
createTable(); createTable();
AnalysisLogger.getLogger().trace("generate->Take area reference"); logger.trace("generate->Take area reference");
// take the area reference vectors // take the area reference vectors
environmentVectors = DatabaseFactory.executeSQLQuery(distributionModel.getGeographicalInfoQuery(), dbHibConnection); environmentVectors = DatabaseFactory.executeSQLQuery(distributionModel.getGeographicalInfoQuery(), dbHibConnection);
AnalysisLogger.getLogger().trace("generate->Take species reference"); logger.trace("generate->Take species reference");
List<Object> speciesVectors = DatabaseFactory.executeSQLQuery(distributionModel.getMainInfoQuery(), dbHibConnection); List<Object> speciesVectors = DatabaseFactory.executeSQLQuery(distributionModel.getMainInfoQuery(), dbHibConnection);
AnalysisLogger.getLogger().trace("generate->got all information"); logger.trace("generate->got all information");
// calculate the number of chunks needed // calculate the number of chunks needed
spaceVectorsNumber = environmentVectors.size(); spaceVectorsNumber = environmentVectors.size();
@ -263,7 +265,7 @@ public class LocalSplitGenerator implements Generator {
if ((spaceVectorsNumber % chunksize) != 0) if ((spaceVectorsNumber % chunksize) != 0)
numOfChunks += 1; numOfChunks += 1;
AnalysisLogger.getLogger().trace("generate->Calculation Started with " + numOfChunks + " chunks and " + speciesVectorNumber + " species"); logger.trace("generate->Calculation Started with " + numOfChunks + " chunks and " + speciesVectorNumber + " species");
// initialize threads // initialize threads
initializeThreads(); initializeThreads();
@ -283,7 +285,7 @@ public class LocalSplitGenerator implements Generator {
long computationT0 = System.currentTimeMillis(); long computationT0 = System.currentTimeMillis();
// pre process for single species // pre process for single species
distributionModel.singleStepPreprocess(species, spaceVectorsNumber); distributionModel.singleStepPreprocess(species, spaceVectorsNumber);
AnalysisLogger.getLogger().trace("-------------------------------------------------> species " + distributionModel.getMainInfoID(species) + " - n. " + (processedSpeciesCounter + 1)); logger.trace("-------------------------------------------------> species " + distributionModel.getMainInfoID(species) + " - n. " + (processedSpeciesCounter + 1));
// CALCULATION CORE // CALCULATION CORE
for (int k = 0; k < numOfChunks; k++) { for (int k = 0; k < numOfChunks; k++) {
// get the starting index // get the starting index
@ -302,7 +304,7 @@ public class LocalSplitGenerator implements Generator {
status = ((float) overallcounter / ((float) (speciesVectorNumber * numOfChunks))) * 100f; status = ((float) overallcounter / ((float) (speciesVectorNumber * numOfChunks))) * 100f;
if (status == 100) if (status == 100)
status = 99f; status = 99f;
// AnalysisLogger.getLogger().trace("STATUS->"+status+"%"); // logger.trace("STATUS->"+status+"%");
// increment global counter index // increment global counter index
overallcounter++; overallcounter++;
} }
@ -322,7 +324,7 @@ public class LocalSplitGenerator implements Generator {
long computationT1 = System.currentTimeMillis(); long computationT1 = System.currentTimeMillis();
// flushBuffer(); // flushBuffer();
AnalysisLogger.getLogger().trace("generate->Species Computation Finished in " + (computationT1 - computationT0) + " ms"); logger.trace("generate->Species Computation Finished in " + (computationT1 - computationT0) + " ms");
// perform overall insert // perform overall insert
// insertCriteria(); // insertCriteria();
// increment the count of processed species // increment the count of processed species
@ -338,10 +340,10 @@ public class LocalSplitGenerator implements Generator {
long computationT2 = System.currentTimeMillis(); long computationT2 = System.currentTimeMillis();
// flushInterrupt = true; // flushInterrupt = true;
AnalysisLogger.getLogger().trace("generate->All Species Computed in " + (computationT2 - tstart) + " ms"); logger.trace("generate->All Species Computed in " + (computationT2 - tstart) + " ms");
} catch (Exception e) { } catch (Exception e) {
AnalysisLogger.getLogger().error("error",e); logger.error("error",e);
throw e; throw e;
} finally { } finally {
try { try {
@ -353,7 +355,7 @@ public class LocalSplitGenerator implements Generator {
} }
long tend = System.currentTimeMillis(); long tend = System.currentTimeMillis();
long ttotal = tend - tstart; long ttotal = tend - tstart;
AnalysisLogger.getLogger().warn("generate->Distribution Generator->Algorithm finished in: " + ((double) ttotal / (double) 60000) + " min\n"); logger.warn("generate->Distribution Generator->Algorithm finished in: " + ((double) ttotal / (double) 60000) + " min\n");
status = 100f; status = 100f;
} }
} }
@ -381,7 +383,7 @@ public class LocalSplitGenerator implements Generator {
} }
public Integer call() { public Integer call() {
// AnalysisLogger.getLogger().trace("threadCalculation->" + (threadIndex+1)); // logger.trace("threadCalculation->" + (threadIndex+1));
int max = Math.min(spaceindex + chunksize, spaceVectorsNumber); int max = Math.min(spaceindex + chunksize, spaceVectorsNumber);
String speciesID = distributionModel.getMainInfoID(speciesVector); String speciesID = distributionModel.getMainInfoID(speciesVector);
@ -417,26 +419,26 @@ public class LocalSplitGenerator implements Generator {
public void run() { public void run() {
try { try {
if (forceflush){ if (forceflush){
AnalysisLogger.getLogger().trace("\t...flushing on db"); logger.trace("\t...flushing on db");
// flush the objects // flush the objects
flushBuffer(); flushBuffer();
AnalysisLogger.getLogger().trace("\t...finished flushing on db"); logger.trace("\t...finished flushing on db");
forceflush=false; forceflush=false;
} }
if (stopInterrupt) { if (stopInterrupt) {
AnalysisLogger.getLogger().trace("\t...finally flushing on db"); logger.trace("\t...finally flushing on db");
// flush the objects // flush the objects
flushBuffer(); flushBuffer();
AnalysisLogger.getLogger().trace("\t...finished finally flushing on db"); logger.trace("\t...finished finally flushing on db");
flushInterrupt = true; flushInterrupt = true;
this.cancel(); this.cancel();
} else if ((probabilityBuffer != null) && (probabilityBuffer.size() > AlgorithmConfiguration.chunkSize)) { } else if ((probabilityBuffer != null) && (probabilityBuffer.size() > AlgorithmConfiguration.chunkSize)) {
// AnalysisLogger.getLogger().trace("\t...writing on db"); // logger.trace("\t...writing on db");
writeOnDB(AlgorithmConfiguration.chunkSize); writeOnDB(AlgorithmConfiguration.chunkSize);
// AnalysisLogger.getLogger().trace("\t...finished writing on db"); // logger.trace("\t...finished writing on db");
} }
} catch (Throwable e) { } catch (Throwable e) {
AnalysisLogger.getLogger().error("error",e); logger.error("error",e);
flushInterrupt = true; flushInterrupt = true;
} }
@ -468,13 +470,13 @@ public class LocalSplitGenerator implements Generator {
String insertionString = String.format(probabilityInsertionStatement, config.getParam("DistributionTable"), sb.toString()); String insertionString = String.format(probabilityInsertionStatement, config.getParam("DistributionTable"), sb.toString());
try { try {
// AnalysisLogger.getLogger().debug("->"+insertionString); // logger.debug("->"+insertionString);
DatabaseFactory.executeSQLUpdate(insertionString, dbHibConnection); DatabaseFactory.executeSQLUpdate(insertionString, dbHibConnection);
} catch (Exception e) { } catch (Exception e) {
e.printStackTrace(); e.printStackTrace();
} }
AnalysisLogger.getLogger().trace("writeOnDB()->PROBABILITIES BUFFER REMAINING:" + probabilityBuffer.size()); logger.trace("writeOnDB()->PROBABILITIES BUFFER REMAINING:" + probabilityBuffer.size());
sb = null; sb = null;
} }

View File

@ -1,10 +1,6 @@
package org.gcube.dataanalysis.ecoengine.processing; package org.gcube.dataanalysis.ecoengine.processing;
import java.util.HashMap;
import java.util.List;
import org.gcube.contentmanagement.graphtools.utils.HttpRequest; import org.gcube.contentmanagement.graphtools.utils.HttpRequest;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS; import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.configuration.INFRASTRUCTURE; import org.gcube.dataanalysis.ecoengine.configuration.INFRASTRUCTURE;
@ -13,12 +9,14 @@ import org.gcube.dataanalysis.ecoengine.connectors.RemoteHspecInputObject;
import org.gcube.dataanalysis.ecoengine.connectors.RemoteHspecOutputObject; import org.gcube.dataanalysis.ecoengine.connectors.RemoteHspecOutputObject;
import org.gcube.dataanalysis.ecoengine.connectors.livemonitor.ResourceLoad; import org.gcube.dataanalysis.ecoengine.connectors.livemonitor.ResourceLoad;
import org.gcube.dataanalysis.ecoengine.connectors.livemonitor.Resources; import org.gcube.dataanalysis.ecoengine.connectors.livemonitor.Resources;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType; import org.slf4j.Logger;
import org.gcube.dataanalysis.ecoengine.interfaces.GenericAlgorithm; import org.slf4j.LoggerFactory;
//deprecated //deprecated
public class RainyCloudGenerator { public class RainyCloudGenerator {
private static Logger logger = LoggerFactory.getLogger(RainyCloudGenerator.class);
AlgorithmConfiguration config; AlgorithmConfiguration config;
private boolean interruptProcessing; private boolean interruptProcessing;
RemoteGenerationManager remoteGenerationManager; RemoteGenerationManager remoteGenerationManager;
@ -46,7 +44,7 @@ public class RainyCloudGenerator {
} }
public void init() { public void init() {
AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
interruptProcessing = false; interruptProcessing = false;
rhio = new RemoteHspecInputObject(); rhio = new RemoteHspecInputObject();
rhio.userName = config.getParam("ServiceUserName"); rhio.userName = config.getParam("ServiceUserName");
@ -145,18 +143,18 @@ public class RainyCloudGenerator {
e.printStackTrace(); e.printStackTrace();
} }
AnalysisLogger.getLogger().trace("REMOTE PROCESSING STARTED"); logger.trace("REMOTE PROCESSING STARTED");
boolean finish = false; boolean finish = false;
while (!finish && !interruptProcessing) { while (!finish && !interruptProcessing) {
float status = getStatus(); float status = getStatus();
// AnalysisLogger.getLogger().trace("Status "+status); // logger.trace("Status "+status);
if (status == 100) if (status == 100)
finish = true; finish = true;
Thread.sleep(500); Thread.sleep(500);
} }
AnalysisLogger.getLogger().trace("REMOTE PROCESSING ENDED"); logger.trace("REMOTE PROCESSING ENDED");
} }

View File

@ -5,15 +5,18 @@ import java.util.LinkedHashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType; import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.interfaces.DynamicTransducer; import org.gcube.dataanalysis.ecoengine.interfaces.DynamicTransducer;
import org.gcube.dataanalysis.ecoengine.interfaces.Transducerer; import org.gcube.dataanalysis.ecoengine.interfaces.Transducerer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class DynamicTransducerersFactory { public class DynamicTransducerersFactory {
private static Logger logger = LoggerFactory.getLogger(DynamicTransducerersFactory.class);
public DynamicTransducerersFactory (){ public DynamicTransducerersFactory (){
} }
@ -22,7 +25,7 @@ public class DynamicTransducerersFactory {
public static Transducerer getTransducerer(AlgorithmConfiguration config) throws Exception { public static Transducerer getTransducerer(AlgorithmConfiguration config) throws Exception {
String agent = config.getAgent(); String agent = config.getAgent();
Map<String,Transducerer> subTransducerers = getAllSubTransducerers(config); Map<String,Transducerer> subTransducerers = getAllSubTransducerers(config);
AnalysisLogger.getLogger().debug("DynamicTransducerFactory: Getting the following Agent:"+agent+" From the list of N Transducers: "+subTransducerers.size()); logger.debug("DynamicTransducerFactory: Getting the following Agent:"+agent+" From the list of N Transducers: "+subTransducerers.size());
Transducerer trans = subTransducerers.get(agent); Transducerer trans = subTransducerers.get(agent);
return trans; return trans;
} }
@ -41,9 +44,9 @@ public class DynamicTransducerersFactory {
List<String> trans = ProcessorsFactory.getClasses(config.getConfigPath() + AlgorithmConfiguration.dynamicTransducerersFile); List<String> trans = ProcessorsFactory.getClasses(config.getConfigPath() + AlgorithmConfiguration.dynamicTransducerersFile);
/* /*
if (trans!=null && trans.size()>0) if (trans!=null && trans.size()>0)
AnalysisLogger.getLogger().debug("Dynamic Transducerers Factory: Found "+trans.size()+" external connectors"); logger.debug("Dynamic Transducerers Factory: Found "+trans.size()+" external connectors");
else else
AnalysisLogger.getLogger().debug("Dynamic Transducerers Factory: No external connectors found!"); logger.debug("Dynamic Transducerers Factory: No external connectors found!");
*/ */
return trans; return trans;
} }

View File

@ -4,7 +4,6 @@ import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.Properties; import java.util.Properties;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS; import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType; import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
@ -14,6 +13,7 @@ import org.gcube.dataanalysis.ecoengine.interfaces.GenericAlgorithm;
public class GeneratorsFactory { public class GeneratorsFactory {
public GeneratorsFactory(){ public GeneratorsFactory(){
} }
@ -56,7 +56,6 @@ public class GeneratorsFactory {
List<ComputationalAgent> generators = new ArrayList<ComputationalAgent>(); List<ComputationalAgent> generators = new ArrayList<ComputationalAgent>();
try { try {
//initialize the logger //initialize the logger
AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
//take the algorithm //take the algorithm
String algorithm = config.getModel(); String algorithm = config.getModel();
//take the algorithms list //take the algorithms list

View File

@ -4,7 +4,6 @@ import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.Properties; import java.util.Properties;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS; import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType; import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
@ -50,8 +49,6 @@ public class ModelersFactory {
//modify this class in order to manage generators weight and match algorithm vs generators //modify this class in order to manage generators weight and match algorithm vs generators
List<ComputationalAgent> modelers = new ArrayList<ComputationalAgent>(); List<ComputationalAgent> modelers = new ArrayList<ComputationalAgent>();
try { try {
//initialize the logger
AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
//take the algorithm //take the algorithm
String algorithm = config.getModel(); String algorithm = config.getModel();
//take the algorithms list //take the algorithms list

View File

@ -10,7 +10,6 @@ import java.util.LinkedHashMap;
import java.util.List; import java.util.List;
import java.util.Properties; import java.util.Properties;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType; import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.interfaces.Clusterer; import org.gcube.dataanalysis.ecoengine.interfaces.Clusterer;
@ -22,9 +21,13 @@ import org.gcube.dataanalysis.ecoengine.interfaces.Model;
import org.gcube.dataanalysis.ecoengine.interfaces.Modeler; import org.gcube.dataanalysis.ecoengine.interfaces.Modeler;
import org.gcube.dataanalysis.ecoengine.interfaces.Transducerer; import org.gcube.dataanalysis.ecoengine.interfaces.Transducerer;
import org.gcube.dataanalysis.ecoengine.utils.DatabaseFactory; import org.gcube.dataanalysis.ecoengine.utils.DatabaseFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class ProcessorsFactory { public class ProcessorsFactory {
private static Logger logger = LoggerFactory.getLogger(ProcessorsFactory.class);
public static List<StatisticalType> getDefaultDatabaseConfiguration(String cfgPath) { public static List<StatisticalType> getDefaultDatabaseConfiguration(String cfgPath) {
String databasecfgfile = cfgPath + AlgorithmConfiguration.defaultConnectionFile; String databasecfgfile = cfgPath + AlgorithmConfiguration.defaultConnectionFile;
try { try {
@ -141,8 +144,6 @@ public class ProcessorsFactory {
public static ComputationalAgent getProcessor(AlgorithmConfiguration config, String file,String explicitAlgorithm, ClassLoader cl) throws Exception { public static ComputationalAgent getProcessor(AlgorithmConfiguration config, String file,String explicitAlgorithm, ClassLoader cl) throws Exception {
// modify this class in order to take the right generator algorithm // modify this class in order to take the right generator algorithm
try { try {
// initialize the logger
AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
// take the algorithm // take the algorithm
String algorithm = explicitAlgorithm; String algorithm = explicitAlgorithm;
if (explicitAlgorithm==null) if (explicitAlgorithm==null)
@ -171,7 +172,7 @@ public class ProcessorsFactory {
} }
else if (algclass instanceof Clusterer) { else if (algclass instanceof Clusterer) {
Clusterer m = (Clusterer) algclass; Clusterer m = (Clusterer) algclass;
AnalysisLogger.getLogger().debug("algoritm is null !!!! "+(m==null)); logger.debug("algoritm is null !!!! "+(m==null));
m.setConfiguration(config); m.setConfiguration(config);
return m; return m;
} }

View File

@ -3,14 +3,17 @@ package org.gcube.dataanalysis.ecoengine.processing.factories;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType; import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent; import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent;
import org.gcube.dataanalysis.ecoengine.interfaces.Transducerer; import org.gcube.dataanalysis.ecoengine.interfaces.Transducerer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class TransducerersFactory { public class TransducerersFactory {
private static Logger logger = LoggerFactory.getLogger(TransducerersFactory.class);
public TransducerersFactory() { public TransducerersFactory() {
} }
@ -39,7 +42,7 @@ public class TransducerersFactory {
else { else {
config.setAgent(algorithmName); config.setAgent(algorithmName);
inputs = DynamicTransducerersFactory.getTransducerer(config).getInputParameters(); inputs = DynamicTransducerersFactory.getTransducerer(config).getInputParameters();
AnalysisLogger.getLogger().debug("Dynamic INPUTS:"+inputs); logger.debug("Dynamic INPUTS:"+inputs);
return inputs; return inputs;
} }
} }
@ -52,7 +55,7 @@ public class TransducerersFactory {
else { else {
config.setAgent(algorithmName); config.setAgent(algorithmName);
output = DynamicTransducerersFactory.getTransducerer(config).getOutput(); output = DynamicTransducerersFactory.getTransducerer(config).getOutput();
AnalysisLogger.getLogger().debug("Dynamic Output:"+output); logger.debug("Dynamic Output:"+output);
return output; return output;
} }
} }
@ -70,7 +73,7 @@ public class TransducerersFactory {
else{ else{
config.setAgent(algorithmName); config.setAgent(algorithmName);
input = DynamicTransducerersFactory.getTransducerer(config).getDescription(); input = DynamicTransducerersFactory.getTransducerer(config).getDescription();
AnalysisLogger.getLogger().debug("Dynamic DESCRIPTION:"+input); logger.debug("Dynamic DESCRIPTION:"+input);
return input; return input;
} }
} }

View File

@ -4,10 +4,13 @@ import java.util.ArrayList;
import java.util.LinkedHashMap; import java.util.LinkedHashMap;
import org.gcube.contentmanagement.graphtools.utils.MathFunctions; import org.gcube.contentmanagement.graphtools.utils.MathFunctions;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class PeriodicityDetector { public class PeriodicityDetector {
private static Logger logger = LoggerFactory.getLogger(PeriodicityDetector.class);
/* /*
* static int defaultSamplingRate = 1000;// Hz static float defaultSignalLengthTimeinSec = 5;// s static float defaultHiddenFrequency = 100f;// Hz static float defaultMinPossibleFreq = 0; // Hz static float defaultMaxPossibleFreq = 200; // Hz static float defaultSNratio = 2; static float defaultFreqError = 1f; * static int defaultSamplingRate = 1000;// Hz static float defaultSignalLengthTimeinSec = 5;// s static float defaultHiddenFrequency = 100f;// Hz static float defaultMinPossibleFreq = 0; // Hz static float defaultMaxPossibleFreq = 200; // Hz static float defaultSNratio = 2; static float defaultFreqError = 1f;
*/ */
@ -71,9 +74,9 @@ public class PeriodicityDetector {
public void demo() throws Exception { public void demo() throws Exception {
double[] signal = produceNoisySignal(defaultSignalLengthTimeinSec, defaultSamplingRate, defaultHiddenFrequency, defaultSNratio); double[] signal = produceNoisySignal(defaultSignalLengthTimeinSec, defaultSamplingRate, defaultHiddenFrequency, defaultSNratio);
AnalysisLogger.getLogger().debug("Signal samples: " + signal.length); logger.debug("Signal samples: " + signal.length);
double F = detectFrequency(signal, defaultSamplingRate, defaultMinPossibleFreq, defaultMaxPossibleFreq, defaultFreqError, -1, true); double F = detectFrequency(signal, defaultSamplingRate, defaultMinPossibleFreq, defaultMaxPossibleFreq, defaultFreqError, -1, true);
AnalysisLogger.getLogger().debug("Detected F:" + F + " indecision [" + lowermeanF + " , " + uppermeanF + "]"); logger.debug("Detected F:" + F + " indecision [" + lowermeanF + " , " + uppermeanF + "]");
} }
public static void main(String[] args) throws Exception { public static void main(String[] args) throws Exception {
@ -115,7 +118,7 @@ public class PeriodicityDetector {
if (pow <= 1) if (pow <= 1)
pow = Math.round(Math.log((float) signal.length / (float) ("" + signal.length).length()) / Math.log(2)); pow = Math.round(Math.log((float) signal.length / (float) ("" + signal.length).length()) / Math.log(2));
AnalysisLogger.getLogger().debug("Suggested pow for window length=" + pow); logger.debug("Suggested pow for window length=" + pow);
} }
// adjust FFT Samples to be even // adjust FFT Samples to be even
@ -130,8 +133,8 @@ public class PeriodicityDetector {
wLength = (int) Math.pow(2, pow); wLength = (int) Math.pow(2, pow);
AnalysisLogger.getLogger().debug("Suggested windows length (samples)=" + wLength); logger.debug("Suggested windows length (samples)=" + wLength);
AnalysisLogger.getLogger().debug("Suggested windows length (s)=" + ((float) wLength / (float) samplingRate) + " s"); logger.debug("Suggested windows length (s)=" + ((float) wLength / (float) samplingRate) + " s");
int windowAnalysisSamples = (int) Math.pow(2, 14);// (int) int windowAnalysisSamples = (int) Math.pow(2, 14);// (int)
windowAnalysisSamples = wLength; windowAnalysisSamples = wLength;
int windowShiftSamples = (int) Math.round((float) windowAnalysisSamples / 2f); int windowShiftSamples = (int) Math.round((float) windowAnalysisSamples / 2f);
@ -139,9 +142,9 @@ public class PeriodicityDetector {
float error = ((float) samplingRate / (float) windowAnalysisSamples); float error = ((float) samplingRate / (float) windowAnalysisSamples);
AnalysisLogger.getLogger().debug("Error in the Measure will be: " + error + " Hz"); logger.debug("Error in the Measure will be: " + error + " Hz");
AnalysisLogger.getLogger().debug("A priori Min Freq: " + minPossibleFreq + " s"); logger.debug("A priori Min Freq: " + minPossibleFreq + " s");
AnalysisLogger.getLogger().debug("A priori Max Freq: " + maxPossibleFreq + " s"); logger.debug("A priori Max Freq: " + maxPossibleFreq + " s");
if (maxPossibleFreq >= samplingRate) if (maxPossibleFreq >= samplingRate)
maxPossibleFreq = (float) (samplingRate / 2f) - (0.1f * samplingRate / 2f); maxPossibleFreq = (float) (samplingRate / 2f) - (0.1f * samplingRate / 2f);
@ -173,7 +176,7 @@ public class PeriodicityDetector {
double[] powers = signalMaximumAnalyzer.averagepower; double[] powers = signalMaximumAnalyzer.averagepower;
currentspectrum = spectrum; currentspectrum = spectrum;
// display the maximum freqs // display the maximum freqs
AnalysisLogger.getLogger().debug("Number of frequency peaks " + maxfrequencies.length); logger.debug("Number of frequency peaks " + maxfrequencies.length);
// take the longest stable sequence of frequencies // take the longest stable sequence of frequencies
SignalConverter signalconverter = new SignalConverter(); SignalConverter signalconverter = new SignalConverter();
maxfrequencies = signalconverter.takeLongestStableTract(maxfrequencies, 0.01); maxfrequencies = signalconverter.takeLongestStableTract(maxfrequencies, 0.01);
@ -202,13 +205,13 @@ public class PeriodicityDetector {
double meanF = MathFunctions.mean(maxfrequencies); double meanF = MathFunctions.mean(maxfrequencies);
// we consider a complete cycle // we consider a complete cycle
double possibleperiod = 2d / meanF; double possibleperiod = 2d / meanF;
AnalysisLogger.getLogger().debug("TimeSeriesAnalysis->Frequency " + meanF); logger.debug("TimeSeriesAnalysis->Frequency " + meanF);
AnalysisLogger.getLogger().debug("TimeSeriesAnalysis->Periodicity " + possibleperiod); logger.debug("TimeSeriesAnalysis->Periodicity " + possibleperiod);
double maxperiod = Math.min(signal.length, currentWindowAnalysisSamples); double maxperiod = Math.min(signal.length, currentWindowAnalysisSamples);
if ((meanF <= minPossibleFreq) || (meanF >= maxPossibleFreq) || (possibleperiod == 0) || (possibleperiod > (maxperiod))) { if ((meanF <= minPossibleFreq) || (meanF >= maxPossibleFreq) || (possibleperiod == 0) || (possibleperiod > (maxperiod))) {
AnalysisLogger.getLogger().debug("TimeSeriesAnalysis->Invalid periodicity " + (meanF <= minPossibleFreq) + " , " + (meanF >= maxPossibleFreq) + " , " + (possibleperiod == 0) + " , " + (possibleperiod > (maxperiod))); logger.debug("TimeSeriesAnalysis->Invalid periodicity " + (meanF <= minPossibleFreq) + " , " + (meanF >= maxPossibleFreq) + " , " + (possibleperiod == 0) + " , " + (possibleperiod > (maxperiod)));
meanF = 0; meanF = 0;
this.meanF = 0; this.meanF = 0;
@ -225,7 +228,7 @@ public class PeriodicityDetector {
this.endPeriodSampleIndex = 0; this.endPeriodSampleIndex = 0;
} else { } else {
AnalysisLogger.getLogger().debug("TimeSeriesAnalysis->periodicity is valid " + possibleperiod); logger.debug("TimeSeriesAnalysis->periodicity is valid " + possibleperiod);
this.meanF = meanF; this.meanF = meanF;
this.lowermeanF = Math.max(meanF - error, minPossibleFreq); this.lowermeanF = Math.max(meanF - error, minPossibleFreq);
this.uppermeanF = Math.min(meanF + error, maxFrequency); this.uppermeanF = Math.min(meanF + error, maxFrequency);
@ -247,7 +250,7 @@ public class PeriodicityDetector {
if (pow <= 1) if (pow <= 1)
pow = Math.round(Math.log((float) signal.length / (float) ("" + signal.length).length()) / Math.log(2)); pow = Math.round(Math.log((float) signal.length / (float) ("" + signal.length).length()) / Math.log(2));
AnalysisLogger.getLogger().debug("Suggested pow for window length=" + pow); logger.debug("Suggested pow for window length=" + pow);
} }
// adjust FFT Samples to be even // adjust FFT Samples to be even
@ -262,8 +265,8 @@ public class PeriodicityDetector {
wLength = (int) Math.pow(2, pow); wLength = (int) Math.pow(2, pow);
AnalysisLogger.getLogger().debug("Suggested windows length (samples)=" + wLength); logger.debug("Suggested windows length (samples)=" + wLength);
AnalysisLogger.getLogger().debug("Suggested windows length (s)=" + ((float) wLength / (float) samplingRate) + " s"); logger.debug("Suggested windows length (s)=" + ((float) wLength / (float) samplingRate) + " s");
int windowAnalysisSamples = (int) Math.pow(2, 14);// (int) int windowAnalysisSamples = (int) Math.pow(2, 14);// (int)
windowAnalysisSamples = wLength; windowAnalysisSamples = wLength;
int windowShiftSamples = (int) Math.round((float) windowAnalysisSamples / 2f); int windowShiftSamples = (int) Math.round((float) windowAnalysisSamples / 2f);
@ -271,9 +274,9 @@ public class PeriodicityDetector {
float error = ((float) samplingRate / (float) windowAnalysisSamples); float error = ((float) samplingRate / (float) windowAnalysisSamples);
AnalysisLogger.getLogger().debug("Error in the Measure will be: " + error + " Hz"); logger.debug("Error in the Measure will be: " + error + " Hz");
AnalysisLogger.getLogger().debug("A priori Min Freq: " + minPossibleFreq + " s"); logger.debug("A priori Min Freq: " + minPossibleFreq + " s");
AnalysisLogger.getLogger().debug("A priori Max Freq: " + maxPossibleFreq + " s"); logger.debug("A priori Max Freq: " + maxPossibleFreq + " s");
if (maxPossibleFreq >= samplingRate) if (maxPossibleFreq >= samplingRate)
maxPossibleFreq = (float) (samplingRate / 2f) - (0.1f * samplingRate / 2f); maxPossibleFreq = (float) (samplingRate / 2f) - (0.1f * samplingRate / 2f);
@ -296,7 +299,7 @@ public class PeriodicityDetector {
adjustParameters(signal, samplingRate, minPossibleFreq, maxPossibleFreq, wantedFreqError, FFTnsamples); adjustParameters(signal, samplingRate, minPossibleFreq, maxPossibleFreq, wantedFreqError, FFTnsamples);
//evaluate the minimum frequency resolution //evaluate the minimum frequency resolution
double frequencyRes = ((double)samplingRate/2d)/ (double)currentWindowAnalysisSamples; double frequencyRes = ((double)samplingRate/2d)/ (double)currentWindowAnalysisSamples;
AnalysisLogger.getLogger().debug("Frequency Resolution: "+frequencyRes); logger.debug("Frequency Resolution: "+frequencyRes);
// trace spectrum // trace spectrum
double[][] spectrum = SignalConverter.spectrogram("spectrogram", signal, samplingRate, currentWindowShiftSamples, currentWindowAnalysisSamples, false); double[][] spectrum = SignalConverter.spectrogram("spectrogram", signal, samplingRate, currentWindowShiftSamples, currentWindowAnalysisSamples, false);
if (display) if (display)
@ -351,10 +354,10 @@ public class PeriodicityDetector {
&& (period < maxperiod*0.55f) && (period < maxperiod*0.55f)
&& (!getPowerSpectrumStregthInterpretation(power).equalsIgnoreCase("None"))) && (!getPowerSpectrumStregthInterpretation(power).equalsIgnoreCase("None")))
{ {
AnalysisLogger.getLogger().debug("DISCREPANCY WITH RESPECT TO THE PREVIOUS FREQ:"+(peakFreq-previousFreq)); logger.debug("DISCREPANCY WITH RESPECT TO THE PREVIOUS FREQ:"+(peakFreq-previousFreq));
AnalysisLogger.getLogger().debug("RATIO WITH RESPECT TO THE PREVIOUS FREQ:"+((peakFreq-previousFreq)/error)); logger.debug("RATIO WITH RESPECT TO THE PREVIOUS FREQ:"+((peakFreq-previousFreq)/error));
if (counter == 0) { if (counter == 0) {
AnalysisLogger.getLogger().debug("Section "+(i+1)); logger.debug("Section "+(i+1));
peaks.put("*StartTime_In_Spectrogram"+prefix, "" + startTime); peaks.put("*StartTime_In_Spectrogram"+prefix, "" + startTime);
peaks.put("*EndTime_In_Spectrogram" + prefix, "" + endTime); peaks.put("*EndTime_In_Spectrogram" + prefix, "" + endTime);
} }
@ -377,16 +380,16 @@ public class PeriodicityDetector {
int minFidx = SignalConverter.frequencyIndex(minPossibleFreq, currentWindowAnalysisSamples, samplingRate); int minFidx = SignalConverter.frequencyIndex(minPossibleFreq, currentWindowAnalysisSamples, samplingRate);
double spectrogramidx = SignalConverter.spectrumFreq2Idx(peakFreq.floatValue(), samplingRate, currentWindowAnalysisSamples)-minFidx; double spectrogramidx = SignalConverter.spectrumFreq2Idx(peakFreq.floatValue(), samplingRate, currentWindowAnalysisSamples)-minFidx;
AnalysisLogger.getLogger().debug("SpectorgramIdx_"+(counter+1)+":" + spectrogramidx); logger.debug("SpectorgramIdx_"+(counter+1)+":" + spectrogramidx);
AnalysisLogger.getLogger().debug("Strength_of_Periodicity_"+(counter+1)+":" + signalMaximumAnalyzer.currentSpikesPowerSpectra[i].get(freqCounter)); logger.debug("Strength_of_Periodicity_"+(counter+1)+":" + signalMaximumAnalyzer.currentSpikesPowerSpectra[i].get(freqCounter));
AnalysisLogger.getLogger().debug("Strength_of_Periodicity_Interpretation"+(counter+1)+":" + getPowerSpectrumStregthInterpretation(signalMaximumAnalyzer.currentSpikesPowerSpectra[i].get(freqCounter))); logger.debug("Strength_of_Periodicity_Interpretation"+(counter+1)+":" + getPowerSpectrumStregthInterpretation(signalMaximumAnalyzer.currentSpikesPowerSpectra[i].get(freqCounter)));
AnalysisLogger.getLogger().debug("Frequency_"+(counter+1)+":" + peakFreq); logger.debug("Frequency_"+(counter+1)+":" + peakFreq);
AnalysisLogger.getLogger().debug("UpperFrequencyConfidence_"+(counter+1)+":" + uppermeanF); logger.debug("UpperFrequencyConfidence_"+(counter+1)+":" + uppermeanF);
AnalysisLogger.getLogger().debug("LowerFrequencyConfidence_"+(counter+1)+":" + lowermeanF); logger.debug("LowerFrequencyConfidence_"+(counter+1)+":" + lowermeanF);
AnalysisLogger.getLogger().debug("Period"+":" + period); logger.debug("Period"+":" + period);
AnalysisLogger.getLogger().debug("UpperFrequencyPeriod_"+(counter+1)+":" + (1d / lowermeanF)); logger.debug("UpperFrequencyPeriod_"+(counter+1)+":" + (1d / lowermeanF));
AnalysisLogger.getLogger().debug("LowerFrequencyPeriod_"+(counter+1)+":"+ (1d / uppermeanF)); logger.debug("LowerFrequencyPeriod_"+(counter+1)+":"+ (1d / uppermeanF));
AnalysisLogger.getLogger().debug(""); logger.debug("");
counter++; counter++;
previousFreq=peakFreq; previousFreq=peakFreq;
previousPeriod=period; previousPeriod=period;

View File

@ -7,20 +7,22 @@ import java.util.Arrays;
import javax.swing.JPanel; import javax.swing.JPanel;
import marytts.signalproc.display.SpectrogramCustom;
import marytts.signalproc.window.Window;
import org.gcube.contentmanagement.graphtools.data.BigSamplesTable; import org.gcube.contentmanagement.graphtools.data.BigSamplesTable;
import org.gcube.contentmanagement.graphtools.utils.MathFunctions; import org.gcube.contentmanagement.graphtools.utils.MathFunctions;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; import org.gcube.dataanalysis.ecoengine.interfaces.DataAnalysis;
import org.gcube.dataanalysis.ecoengine.transducers.TimeSeriesAnalysis; import org.gcube.dataanalysis.ecoengine.transducers.TimeSeriesAnalysis;
import org.gcube.dataanalysis.ecoengine.utils.Operations; import org.gcube.dataanalysis.ecoengine.utils.Operations;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.rapidminer.example.Attribute; import com.rapidminer.example.Attribute;
import com.rapidminer.example.Example; import com.rapidminer.example.Example;
import com.rapidminer.example.ExampleSet; import com.rapidminer.example.ExampleSet;
import com.rapidminer.example.table.MemoryExampleTable; import com.rapidminer.example.table.MemoryExampleTable;
import marytts.signalproc.display.SpectrogramCustom;
import marytts.signalproc.window.Window;
/** /**
* includes tools for basic signal transformations: delta + double delta center frequency cepstral coefficients calculation spectrum frequency cut transformation to and from Rapid Miner Example Set filterbanks fequency to mel frequency to index in fft sinusoid signal generation inverse mel log10 mel filterbanks sample to time and time to sample signal timeline generation index to time in spectrogram spectrogram calculation and display time to index in spectrogram * includes tools for basic signal transformations: delta + double delta center frequency cepstral coefficients calculation spectrum frequency cut transformation to and from Rapid Miner Example Set filterbanks fequency to mel frequency to index in fft sinusoid signal generation inverse mel log10 mel filterbanks sample to time and time to sample signal timeline generation index to time in spectrogram spectrogram calculation and display time to index in spectrogram
* *
@ -29,6 +31,8 @@ import com.rapidminer.example.table.MemoryExampleTable;
*/ */
public class SignalConverter { public class SignalConverter {
private static Logger logger = LoggerFactory.getLogger(DataAnalysis.class);
public static double[][] addDeltaDouble(double[][] features) throws Exception { public static double[][] addDeltaDouble(double[][] features) throws Exception {
int vectorL = features[0].length; int vectorL = features[0].length;
double[][] delta = new double[features.length][features[0].length * 3]; double[][] delta = new double[features.length][features[0].length * 3];
@ -120,17 +124,17 @@ public class SignalConverter {
public static int[] fftBinIndices(double samplingRate, int frameSize, int numMelFilters, int numFequencies, float lowerFilterFreq, float upperFilterFreq) { public static int[] fftBinIndices(double samplingRate, int frameSize, int numMelFilters, int numFequencies, float lowerFilterFreq, float upperFilterFreq) {
int cbin[] = new int[numFequencies + 2]; int cbin[] = new int[numFequencies + 2];
AnalysisLogger.getLogger().debug("New Filter banks: " + numFequencies); logger.debug("New Filter banks: " + numFequencies);
cbin[0] = (int) Math.round(lowerFilterFreq / samplingRate * frameSize); cbin[0] = (int) Math.round(lowerFilterFreq / samplingRate * frameSize);
cbin[cbin.length - 1] = frequencyIndex(upperFilterFreq, frameSize, (float) samplingRate); cbin[cbin.length - 1] = frequencyIndex(upperFilterFreq, frameSize, (float) samplingRate);
AnalysisLogger.getLogger().debug("F0: " + lowerFilterFreq); logger.debug("F0: " + lowerFilterFreq);
for (int i = 1; i <= numFequencies; i++) { for (int i = 1; i <= numFequencies; i++) {
double fc = centerFreq(i, samplingRate, lowerFilterFreq, numMelFilters); double fc = centerFreq(i, samplingRate, lowerFilterFreq, numMelFilters);
AnalysisLogger.getLogger().debug("F" + (i) + ": " + fc); logger.debug("F" + (i) + ": " + fc);
cbin[i] = (int) Math.round(fc / samplingRate * frameSize); cbin[i] = (int) Math.round(fc / samplingRate * frameSize);
} }
AnalysisLogger.getLogger().debug("F" + (cbin.length - 1) + ": " + upperFilterFreq); logger.debug("F" + (cbin.length - 1) + ": " + upperFilterFreq);
return cbin; return cbin;
} }
@ -209,7 +213,7 @@ public class SignalConverter {
int bestIndex = 1; int bestIndex = 1;
for (int i = 1; i <= numMelFilters; i++) { for (int i = 1; i <= numMelFilters; i++) {
double fc = centerFreq(i, samplingRate, lowerFilterFreq, numMelFilters); double fc = centerFreq(i, samplingRate, lowerFilterFreq, numMelFilters);
AnalysisLogger.getLogger().debug("fc " + fc); logger.debug("fc " + fc);
if (fc > maxFilterFreq) { if (fc > maxFilterFreq) {
bestIndex = i; bestIndex = i;
break; break;
@ -230,7 +234,7 @@ public class SignalConverter {
for (int i = 0; i < signalLength; i++) { for (int i = 0; i < signalLength; i++) {
time[i] = (double) i / (double) samplingRate; time[i] = (double) i / (double) samplingRate;
} }
AnalysisLogger.getLogger().debug("time " + time[signalLength - 1] * samplingRate + " vs " + signalLength); logger.debug("time " + time[signalLength - 1] * samplingRate + " vs " + signalLength);
return time; return time;
} }
@ -243,7 +247,7 @@ public class SignalConverter {
for (int k = 0; k < signal.length; k++) { for (int k = 0; k < signal.length; k++) {
samples.addSampleRow("" + k, signal[k]); samples.addSampleRow("" + k, signal[k]);
} }
AnalysisLogger.getLogger().debug("Example Set Created"); logger.debug("Example Set Created");
return samples.generateExampleSet(); return samples.generateExampleSet();
} }
@ -306,12 +310,12 @@ public class SignalConverter {
for (int k = 1; k < slice.length; k++) { for (int k = 1; k < slice.length; k++) {
double ele = slice[k]; double ele = slice[k];
if (!overfirstmin && (slice[k]>slice[k-1])){ if (!overfirstmin && (slice[k]>slice[k-1])){
AnalysisLogger.getLogger().debug("First minimum in spectrum is at idx "+k); logger.debug("First minimum in spectrum is at idx "+k);
overfirstmin=true; overfirstmin=true;
} }
if (overfirstmin) { if (overfirstmin) {
if (ele > (max + (Math.abs(max) * tolerance))) { if (ele > (max + (Math.abs(max) * tolerance))) {
// AnalysisLogger.getLogger().debug(">max up:"+ele +">" +(max + (Math.abs(max) * tolerance))+" at idx "+k); // logger.debug(">max up:"+ele +">" +(max + (Math.abs(max) * tolerance))+" at idx "+k);
max = ele; max = ele;
bestidx = k; bestidx = k;
} }
@ -328,7 +332,7 @@ public class SignalConverter {
// System.out.println("min f idx: "+minFidx); // System.out.println("min f idx: "+minFidx);
maxs[j] = spectrumIdx2Frequency(minFidx + bestidx, samplingRate, windowSamples); maxs[j] = spectrumIdx2Frequency(minFidx + bestidx, samplingRate, windowSamples);
double mean = org.gcube.contentmanagement.graphtools.utils.MathFunctions.mean(slice); double mean = org.gcube.contentmanagement.graphtools.utils.MathFunctions.mean(slice);
AnalysisLogger.getLogger().debug("max freq in spec: " + maxs[j]+" index "+minFidx + bestidx); logger.debug("max freq in spec: " + maxs[j]+" index "+minFidx + bestidx);
if (min == Double.MAX_VALUE) { if (min == Double.MAX_VALUE) {
min = max; min = max;
} }
@ -343,7 +347,7 @@ public class SignalConverter {
averagepower[j] = Math.abs((max - mean) / max); averagepower[j] = Math.abs((max - mean) / max);
} }
AnalysisLogger.getLogger().debug("max power : " + max + " min power: " + min + " mean " + mean + " power " + averagepower[j]); logger.debug("max power : " + max + " min power: " + min + " mean " + mean + " power " + averagepower[j]);
j++; j++;
} }
@ -382,7 +386,7 @@ public class SignalConverter {
for (int i=0;i<spikes.length;i++){ for (int i=0;i<spikes.length;i++){
if (spikes[i]){ if (spikes[i]){
// AnalysisLogger.getLogger().debug("Spike at "+i); // logger.debug("Spike at "+i);
maxFreqs.add((double)spectrumIdx2Frequency(minFidx + i, samplingRate, windowSamples)); maxFreqs.add((double)spectrumIdx2Frequency(minFidx + i, samplingRate, windowSamples));
//make the min correspond to y=0 //make the min correspond to y=0
//take few samples around the spike and evaluate the amplitude with respect to the samples around //take few samples around the spike and evaluate the amplitude with respect to the samples around
@ -470,7 +474,7 @@ public class SignalConverter {
if (bestcouple[1]==bestcouple[0]) if (bestcouple[1]==bestcouple[0])
bestcouple[1]=bestcouple[0]+1; bestcouple[1]=bestcouple[0]+1;
double[] subsignal = new double[bestcouple[1] - bestcouple[0]]; double[] subsignal = new double[bestcouple[1] - bestcouple[0]];
AnalysisLogger.getLogger().debug("Longest range: from " + bestcouple[0] + " to " + bestcouple[1]); logger.debug("Longest range: from " + bestcouple[0] + " to " + bestcouple[1]);
startStableTractIdx = bestcouple[0]; startStableTractIdx = bestcouple[0];
endStableTractIdx = bestcouple[1]; endStableTractIdx = bestcouple[1];

View File

@ -10,29 +10,32 @@ import java.util.Arrays;
import java.util.Date; import java.util.Date;
import java.util.List; import java.util.List;
import marytts.signalproc.display.SpectrogramCustom;
import marytts.signalproc.window.Window;
import org.gcube.contentmanagement.graphtools.data.BigSamplesTable; import org.gcube.contentmanagement.graphtools.data.BigSamplesTable;
import org.gcube.contentmanagement.graphtools.data.conversions.ImageTools; import org.gcube.contentmanagement.graphtools.data.conversions.ImageTools;
import org.gcube.contentmanagement.graphtools.plotting.graphs.NumericSeriesGraph; import org.gcube.contentmanagement.graphtools.plotting.graphs.NumericSeriesGraph;
import org.gcube.contentmanagement.graphtools.plotting.graphs.SpectrumPlot2; import org.gcube.contentmanagement.graphtools.plotting.graphs.SpectrumPlot2;
import org.gcube.contentmanagement.graphtools.plotting.graphs.TimeSeriesGraph; import org.gcube.contentmanagement.graphtools.plotting.graphs.TimeSeriesGraph;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.contentmanagement.lexicalmatcher.utils.MathFunctions; import org.gcube.contentmanagement.lexicalmatcher.utils.MathFunctions;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.jfree.chart.JFreeChart; import org.jfree.chart.JFreeChart;
import org.jfree.data.time.FixedMillisecond; import org.jfree.data.time.FixedMillisecond;
import org.jfree.data.time.TimeSeriesCollection; import org.jfree.data.time.TimeSeriesCollection;
import org.jfree.data.xy.XYSeriesCollection; import org.jfree.data.xy.XYSeriesCollection;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.rapidminer.example.ExampleSet; import com.rapidminer.example.ExampleSet;
import com.rapidminer.operator.preprocessing.sampling.AbsoluteSampling; import com.rapidminer.operator.preprocessing.sampling.AbsoluteSampling;
import com.rapidminer.operator.preprocessing.series.filter.SeriesMissingValueReplenishment; import com.rapidminer.operator.preprocessing.series.filter.SeriesMissingValueReplenishment;
import com.rapidminer.tools.OperatorService; import com.rapidminer.tools.OperatorService;
import marytts.signalproc.display.SpectrogramCustom;
import marytts.signalproc.window.Window;
public class SignalProcessing { public class SignalProcessing {
private static Logger logger = LoggerFactory.getLogger(SignalProcessing.class);
public static double[][] applyFilterBank(double[][] feature, int numCepstra, int numMelFilters, int samplingRate, int frameLength, float minCutFequency, float maxCutFrequency) throws Exception { public static double[][] applyFilterBank(double[][] feature, int numCepstra, int numMelFilters, int samplingRate, int frameLength, float minCutFequency, float maxCutFrequency) throws Exception {
// recalculate Mel filters on the basis of the maxFrequency // recalculate Mel filters on the basis of the maxFrequency
int recalcMelFilters = SignalConverter.recalculateMaxMelFilters(samplingRate, numMelFilters, minCutFequency, maxCutFrequency); int recalcMelFilters = SignalConverter.recalculateMaxMelFilters(samplingRate, numMelFilters, minCutFequency, maxCutFrequency);
@ -63,20 +66,20 @@ public class SignalProcessing {
public static double[][] calculateSumSpectrum(List<double[]> signals, int windowShiftSamples, int frameLength, int samplingRate) throws Exception { public static double[][] calculateSumSpectrum(List<double[]> signals, int windowShiftSamples, int frameLength, int samplingRate) throws Exception {
int signalLenght = signals.get(0).length; int signalLenght = signals.get(0).length;
AnalysisLogger.getLogger().debug("TRIALS LENGHT " + signalLenght); logger.debug("TRIALS LENGHT " + signalLenght);
List<double[][]> spectrograms = new ArrayList<double[][]>(); List<double[][]> spectrograms = new ArrayList<double[][]>();
AnalysisLogger.getLogger().debug("Getting Spectra"); logger.debug("Getting Spectra");
int j = 0; int j = 0;
// get all spectrograms // get all spectrograms
for (double[] signal : signals) { for (double[] signal : signals) {
double[][] spectro = SignalConverter.spectrogram("Spectrogram", signal, samplingRate, windowShiftSamples, frameLength, false); double[][] spectro = SignalConverter.spectrogram("Spectrogram", signal, samplingRate, windowShiftSamples, frameLength, false);
AnalysisLogger.getLogger().debug("Signal Number " + (j + 1) + " spectrum lenght " + ((spectro.length * windowShiftSamples) / samplingRate)); logger.debug("Signal Number " + (j + 1) + " spectrum lenght " + ((spectro.length * windowShiftSamples) / samplingRate));
spectrograms.add(spectro); spectrograms.add(spectro);
j++; j++;
} }
AnalysisLogger.getLogger().debug("Summing Spectra"); logger.debug("Summing Spectra");
// sum all spectrograms // sum all spectrograms
double[][] sumSpectro = SignalProcessing.sumSpectra(spectrograms); double[][] sumSpectro = SignalProcessing.sumSpectra(spectrograms);
spectrograms = null; spectrograms = null;
@ -120,7 +123,7 @@ public class SignalProcessing {
public static Image renderSignalWithGenericTime(double[] signal, float t0, float timeshift, String name) { public static Image renderSignalWithGenericTime(double[] signal, float t0, float timeshift, String name) {
if (signal.length > 20000) { if (signal.length > 20000) {
AnalysisLogger.getLogger().debug("Too many points to display: " + signal.length); logger.debug("Too many points to display: " + signal.length);
return null; return null;
} }
org.jfree.data.xy.XYSeries xyseries = new org.jfree.data.xy.XYSeries(name); org.jfree.data.xy.XYSeries xyseries = new org.jfree.data.xy.XYSeries(name);
@ -137,7 +140,7 @@ public class SignalProcessing {
public static Image renderSignalWithGenericTime(double[] signal, double[] timeline, String name) { public static Image renderSignalWithGenericTime(double[] signal, double[] timeline, String name) {
if (signal.length > 20000) { if (signal.length > 20000) {
AnalysisLogger.getLogger().debug("Too many points to display: " + signal.length); logger.debug("Too many points to display: " + signal.length);
return null; return null;
} }
@ -156,7 +159,7 @@ public class SignalProcessing {
public static Image renderSignalWithTime(double[] signal, Date[] dates, String name, String format) { public static Image renderSignalWithTime(double[] signal, Date[] dates, String name, String format) {
org.jfree.data.time.TimeSeries series = new org.jfree.data.time.TimeSeries(name); org.jfree.data.time.TimeSeries series = new org.jfree.data.time.TimeSeries(name);
if (signal.length > 20000) { if (signal.length > 20000) {
AnalysisLogger.getLogger().debug("Too many points to display: " + signal.length); logger.debug("Too many points to display: " + signal.length);
return null; return null;
} }
for (int i = 0; i < signal.length; i++) { for (int i = 0; i < signal.length; i++) {
@ -164,7 +167,7 @@ public class SignalProcessing {
FixedMillisecond ms = new FixedMillisecond(dates[i]); FixedMillisecond ms = new FixedMillisecond(dates[i]);
series.add(ms, signal[i]); series.add(ms, signal[i]);
} catch (Exception e) { } catch (Exception e) {
AnalysisLogger.getLogger().debug("Skipping value yet present: " + dates[i]); logger.debug("Skipping value yet present: " + dates[i]);
} }
} }
TimeSeriesCollection dataset = new TimeSeriesCollection(); TimeSeriesCollection dataset = new TimeSeriesCollection();
@ -183,7 +186,7 @@ public class SignalProcessing {
org.jfree.data.time.TimeSeries series = new org.jfree.data.time.TimeSeries(name); org.jfree.data.time.TimeSeries series = new org.jfree.data.time.TimeSeries(name);
if (signal.length > 20000) { if (signal.length > 20000) {
AnalysisLogger.getLogger().debug("Too many points to display: " + signal.length); logger.debug("Too many points to display: " + signal.length);
return null; return null;
} }
int offset = 0; int offset = 0;
@ -195,7 +198,7 @@ public class SignalProcessing {
FixedMillisecond ms = new FixedMillisecond(dates[i]); FixedMillisecond ms = new FixedMillisecond(dates[i]);
series.add(ms, signal[i-offset]); series.add(ms, signal[i-offset]);
} catch (Exception e) { } catch (Exception e) {
AnalysisLogger.getLogger().debug("Skipping value yet present: " + dates[i]); logger.debug("Skipping value yet present: " + dates[i]);
} }
} }
dataset.addSeries(series); dataset.addSeries(series);
@ -216,8 +219,8 @@ public class SignalProcessing {
public static Image renderSignalSpectrogram2(double[][] spectrogram) { public static Image renderSignalSpectrogram2(double[][] spectrogram) {
SpectrumPlot2 spectrumPlot = new SpectrumPlot2(spectrogram); SpectrumPlot2 spectrumPlot = new SpectrumPlot2(spectrogram);
AnalysisLogger.getLogger().debug("Spectrum W:" + spectrumPlot.width); logger.debug("Spectrum W:" + spectrumPlot.width);
AnalysisLogger.getLogger().debug("Spectrum H:" + spectrumPlot.height); logger.debug("Spectrum H:" + spectrumPlot.height);
// spectrumPlot.hzoomSet(2f); // spectrumPlot.hzoomSet(2f);
spectrumPlot.hzoomSet(640f / (float) spectrumPlot.width); spectrumPlot.hzoomSet(640f / (float) spectrumPlot.width);
spectrumPlot.vzoomSet(480f / (float) spectrumPlot.height); spectrumPlot.vzoomSet(480f / (float) spectrumPlot.height);
@ -232,7 +235,7 @@ public class SignalProcessing {
public static void displaySignalWithTime(double[] signal, Date[] dates, String name, String format) { public static void displaySignalWithTime(double[] signal, Date[] dates, String name, String format) {
org.jfree.data.time.TimeSeries series = new org.jfree.data.time.TimeSeries(name); org.jfree.data.time.TimeSeries series = new org.jfree.data.time.TimeSeries(name);
if (signal.length > 20000) { if (signal.length > 20000) {
AnalysisLogger.getLogger().debug("Too many points to display: " + signal.length); logger.debug("Too many points to display: " + signal.length);
return; return;
} }
for (int i = 0; i < signal.length; i++) { for (int i = 0; i < signal.length; i++) {
@ -240,7 +243,7 @@ public class SignalProcessing {
FixedMillisecond ms = new FixedMillisecond(dates[i]); FixedMillisecond ms = new FixedMillisecond(dates[i]);
series.add(ms, signal[i]); series.add(ms, signal[i]);
} catch (Exception e) { } catch (Exception e) {
AnalysisLogger.getLogger().debug("Skipping value yet present: " + dates[i]); logger.debug("Skipping value yet present: " + dates[i]);
} }
} }
TimeSeriesCollection dataset = new TimeSeriesCollection(); TimeSeriesCollection dataset = new TimeSeriesCollection();
@ -260,7 +263,7 @@ public class SignalProcessing {
org.jfree.data.time.TimeSeries series = new org.jfree.data.time.TimeSeries(name); org.jfree.data.time.TimeSeries series = new org.jfree.data.time.TimeSeries(name);
if (signal.length > 20000) { if (signal.length > 20000) {
AnalysisLogger.getLogger().debug("Too many points to display: " + signal.length); logger.debug("Too many points to display: " + signal.length);
return ; return ;
} }
int offset = 0; int offset = 0;
@ -272,7 +275,7 @@ public class SignalProcessing {
FixedMillisecond ms = new FixedMillisecond(dates[i]); FixedMillisecond ms = new FixedMillisecond(dates[i]);
series.add(ms, signal[i-offset]); series.add(ms, signal[i-offset]);
} catch (Exception e) { } catch (Exception e) {
AnalysisLogger.getLogger().debug("Skipping value yet present: " + dates[i]); logger.debug("Skipping value yet present: " + dates[i]);
} }
} }
dataset.addSeries(series); dataset.addSeries(series);
@ -294,13 +297,13 @@ public class SignalProcessing {
} }
ExampleSet es = samples.generateExampleSet(); ExampleSet es = samples.generateExampleSet();
AnalysisLogger.getLogger().debug("Example Set Created"); logger.debug("Example Set Created");
AbsoluteSampling sampler = (AbsoluteSampling) OperatorService.createOperator("AbsoluteSampling"); AbsoluteSampling sampler = (AbsoluteSampling) OperatorService.createOperator("AbsoluteSampling");
sampler.setParameter("sample_size", "" + numElements); sampler.setParameter("sample_size", "" + numElements);
sampler.setParameter("local_random_seed", "-1"); sampler.setParameter("local_random_seed", "-1");
es = sampler.apply(es); es = sampler.apply(es);
AnalysisLogger.getLogger().debug("Finished"); logger.debug("Finished");
SignalConverter.exampleSet2Signal(rebuiltSignal, es); SignalConverter.exampleSet2Signal(rebuiltSignal, es);
@ -315,17 +318,17 @@ public class SignalProcessing {
List<double[][]> sumspectrograms = new ArrayList<double[][]>(); List<double[][]> sumspectrograms = new ArrayList<double[][]>();
List<double[][]> spectrograms = new ArrayList<double[][]>(); List<double[][]> spectrograms = new ArrayList<double[][]>();
AnalysisLogger.getLogger().debug("Getting Spectra"); logger.debug("Getting Spectra");
int j = 0; int j = 0;
// get all spectrograms // get all spectrograms
for (double[] signal : signals) { for (double[] signal : signals) {
AnalysisLogger.getLogger().debug("Signal Number " + (j + 1)); logger.debug("Signal Number " + (j + 1));
double[][] spectro = SignalConverter.spectrogram("Spectrogram", signal, samplingRate, windowShiftSamples, frameLength, false); double[][] spectro = SignalConverter.spectrogram("Spectrogram", signal, samplingRate, windowShiftSamples, frameLength, false);
spectrograms.add(spectro); spectrograms.add(spectro);
j++; j++;
} }
AnalysisLogger.getLogger().debug("Summing Spectra"); logger.debug("Summing Spectra");
// sum all spectrograms // sum all spectrograms
double[][] sumSpectro = SignalProcessing.sumSpectra(spectrograms); double[][] sumSpectro = SignalProcessing.sumSpectra(spectrograms);
spectrograms = null; spectrograms = null;
@ -348,7 +351,7 @@ public class SignalProcessing {
sampler.setParameter("attribute_name", "att0"); sampler.setParameter("attribute_name", "att0");
sampler.setParameter("replacement", "3"); sampler.setParameter("replacement", "3");
es = sampler.apply(es); es = sampler.apply(es);
AnalysisLogger.getLogger().debug("Finished"); logger.debug("Finished");
double[] rebuiltSignal = new double[signal.length]; double[] rebuiltSignal = new double[signal.length];
SignalConverter.exampleSet2Signal(rebuiltSignal, es, 0d); SignalConverter.exampleSet2Signal(rebuiltSignal, es, 0d);
@ -360,7 +363,7 @@ public class SignalProcessing {
double t0 = timeseconds[0]; double t0 = timeseconds[0];
double t1 = timeseconds[timeseconds.length - 1]; double t1 = timeseconds[timeseconds.length - 1];
int signalength = Math.abs((int) ((t1 - t0) * samplingRate) + 1); int signalength = Math.abs((int) ((t1 - t0) * samplingRate) + 1);
AnalysisLogger.getLogger().debug("SignalProcessing->Old Time Series had: " + values.length + " samples. New Time Series will have: " + signalength + " samples"); logger.debug("SignalProcessing->Old Time Series had: " + values.length + " samples. New Time Series will have: " + signalength + " samples");
if (values.length == signalength) if (values.length == signalength)
return values; return values;
@ -381,13 +384,13 @@ public class SignalProcessing {
} }
ExampleSet es = samples.generateExampleSet(); ExampleSet es = samples.generateExampleSet();
AnalysisLogger.getLogger().debug("Example Set Created"); logger.debug("Example Set Created");
SeriesMissingValueReplenishment sampler = (SeriesMissingValueReplenishment) OperatorService.createOperator("SeriesMissingValueReplenishment"); SeriesMissingValueReplenishment sampler = (SeriesMissingValueReplenishment) OperatorService.createOperator("SeriesMissingValueReplenishment");
sampler.setParameter("attribute_name", "att0"); sampler.setParameter("attribute_name", "att0");
sampler.setParameter("replacement", "3"); sampler.setParameter("replacement", "3");
es = sampler.apply(es); es = sampler.apply(es);
AnalysisLogger.getLogger().debug("Finished"); logger.debug("Finished");
SignalConverter.exampleSet2Signal(rebuiltSignal, es); SignalConverter.exampleSet2Signal(rebuiltSignal, es);
@ -452,15 +455,15 @@ public class SignalProcessing {
float centralTime = (maxTime / (2f * numOfCentralSeconds)); float centralTime = (maxTime / (2f * numOfCentralSeconds));
AnalysisLogger.getLogger().debug("Max Time in the Spectrum " + maxTime + " Central time " + centralTime); logger.debug("Max Time in the Spectrum " + maxTime + " Central time " + centralTime);
int startIndex = (int) (centralTime / windowShiftTime); int startIndex = (int) (centralTime / windowShiftTime);
int endIndex = (int) ((centralTime + numOfCentralSeconds) / windowShiftTime); int endIndex = (int) ((centralTime + numOfCentralSeconds) / windowShiftTime);
AnalysisLogger.getLogger().debug("Recalculated lenght " + maxTime + " sec"); logger.debug("Recalculated lenght " + maxTime + " sec");
AnalysisLogger.getLogger().debug("Lenght " + spectrum.length); logger.debug("Lenght " + spectrum.length);
AnalysisLogger.getLogger().debug("Start " + startIndex + " End " + endIndex + " max " + spectrum.length + " Cut lenght " + (endIndex - startIndex + 1) * windowShiftTime); logger.debug("Start " + startIndex + " End " + endIndex + " max " + spectrum.length + " Cut lenght " + (endIndex - startIndex + 1) * windowShiftTime);
double[][] cutSpectrum = new double[endIndex - startIndex + 1][spectrum[0].length]; double[][] cutSpectrum = new double[endIndex - startIndex + 1][spectrum[0].length];

View File

@ -7,13 +7,16 @@ import java.util.List;
import java.util.Locale; import java.util.Locale;
import org.gcube.contentmanagement.graphtools.utils.DateGuesser; import org.gcube.contentmanagement.graphtools.utils.DateGuesser;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.utils.Operations; import org.gcube.dataanalysis.ecoengine.utils.Operations;
import org.gcube.dataanalysis.ecoengine.utils.Tuple; import org.gcube.dataanalysis.ecoengine.utils.Tuple;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class TimeSeries { public class TimeSeries {
private static Logger logger = LoggerFactory.getLogger(TimeSeries.class);
private double[] values; private double[] values;
private Date[] times; private Date[] times;
private Date[] unsortedtimes; private Date[] unsortedtimes;
@ -159,19 +162,19 @@ public class TimeSeries {
if (counter == 0) { if (counter == 0) {
timepattern = DateGuesser.getPattern(timel); timepattern = DateGuesser.getPattern(timel);
ts.setTimepattern(timepattern); ts.setTimepattern(timepattern);
AnalysisLogger.getLogger().debug("Time pattern: " + timepattern); logger.debug("Time pattern: " + timepattern);
sdf = new SimpleDateFormat(timepattern, Locale.ENGLISH); sdf = new SimpleDateFormat(timepattern, Locale.ENGLISH);
} }
try{ try{
time = (Date) sdf.parse(timel); time = (Date) sdf.parse(timel);
}catch(Exception e){ }catch(Exception e){
AnalysisLogger.getLogger().debug("Error in parsing...adjusting "+timel); logger.debug("Error in parsing...adjusting "+timel);
time = DateGuesser.convertDate(timel).getTime(); time = DateGuesser.convertDate(timel).getTime();
AnalysisLogger.getLogger().debug("Error in parsing...adjusting "+timel+" in "+time); logger.debug("Error in parsing...adjusting "+timel+" in "+time);
} }
if (counter == 0) { if (counter == 0) {
AnalysisLogger.getLogger().debug("Date detection: input " + timel + " output " + time); logger.debug("Date detection: input " + timel + " output " + time);
} }
ts.addElement(quantity, time, timel, counter); ts.addElement(quantity, time, timel, counter);
@ -192,16 +195,16 @@ public class TimeSeries {
samplingrate = 1d / (double) minimumtimegap; samplingrate = 1d / (double) minimumtimegap;
} }
AnalysisLogger.getLogger().debug("TimeSeries->Samplig rate: " + samplingrate + " minimum gap in time: " + minimumtimegap); logger.debug("TimeSeries->Samplig rate: " + samplingrate + " minimum gap in time: " + minimumtimegap);
if (samplingrate == 0) if (samplingrate == 0)
return; return;
double[] timeline = getMillisecondsTimeline(); double[] timeline = getMillisecondsTimeline();
AnalysisLogger.getLogger().debug("TimeSeries->filling gaps"); logger.debug("TimeSeries->filling gaps");
double[] newvalues = SignalProcessing.fillTimeSeries(values, timeline, samplingrate, config); double[] newvalues = SignalProcessing.fillTimeSeries(values, timeline, samplingrate, config);
if (newvalues.length != values.length) { if (newvalues.length != values.length) {
AnalysisLogger.getLogger().debug("TimeSeries->filling also time values"); logger.debug("TimeSeries->filling also time values");
Date[] newtimeline = SignalProcessing.fillTimeLine(timeline, samplingrate, config); Date[] newtimeline = SignalProcessing.fillTimeLine(timeline, samplingrate, config);
values = null; values = null;
@ -213,7 +216,7 @@ public class TimeSeries {
timeLabels = new String[times.length]; timeLabels = new String[times.length];
} }
AnalysisLogger.getLogger().debug("TimeSeries->Returning values"); logger.debug("TimeSeries->Returning values");
timeLabels = new String[times.length]; timeLabels = new String[times.length];
SimpleDateFormat sdf = new SimpleDateFormat("dd/MM/yyyy HH:mm", Locale.ROOT); SimpleDateFormat sdf = new SimpleDateFormat("dd/MM/yyyy HH:mm", Locale.ROOT);
for (int i = 0; i < times.length; i++) { for (int i = 0; i < times.length; i++) {

View File

@ -3,11 +3,13 @@ package org.gcube.dataanalysis.ecoengine.signals.ssa;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; import org.slf4j.Logger;
import org.gcube.dataanalysis.ecoengine.signals.SignalProcessing; import org.slf4j.LoggerFactory;
public class SSAWorkflow { public class SSAWorkflow {
private static Logger logger = LoggerFactory.getLogger(SSAWorkflow.class);
public static SSADataset applyCompleteWorkflow(List<Double> timeseries, int analysisWindowLength, float eigenValuesPercentageThreshold, int nPointsToForecast, boolean reportReconstructedSignal){ public static SSADataset applyCompleteWorkflow(List<Double> timeseries, int analysisWindowLength, float eigenValuesPercentageThreshold, int nPointsToForecast, boolean reportReconstructedSignal){
SSADataset data = new SSADataset(); SSADataset data = new SSADataset();
@ -34,10 +36,10 @@ public class SSAWorkflow {
//build groups of indices //build groups of indices
List<SSAGroupList> groupsModel = new ArrayList<SSAGroupList>(); List<SSAGroupList> groupsModel = new ArrayList<SSAGroupList>();
List<SSAUnselectList> groups = new ArrayList<SSAUnselectList>(); List<SSAUnselectList> groups = new ArrayList<SSAUnselectList>();
AnalysisLogger.getLogger().debug("Listing All the Eigenvalues"); logger.debug("Listing All the Eigenvalues");
for (int i = 0; i < data.getPercentList().size(); i++) { for (int i = 0; i < data.getPercentList().size(); i++) {
double currentperc = data.getPercentList().get(i); double currentperc = data.getPercentList().get(i);
AnalysisLogger.getLogger().debug("Eigenvalue: Number: "+i+" Percentage: "+currentperc); logger.debug("Eigenvalue: Number: "+i+" Percentage: "+currentperc);
if (currentperc>eigenValuesPercentageThreshold) if (currentperc>eigenValuesPercentageThreshold)
groups.add(new SSAUnselectList(i, currentperc)); groups.add(new SSAUnselectList(i, currentperc));
} }
@ -61,7 +63,7 @@ public class SSAWorkflow {
// SignalProcessing.displaySignalWithGenericTime(signal, 0, 1, "signal"); // SignalProcessing.displaySignalWithGenericTime(signal, 0, 1, "signal");
// SignalProcessing.displaySignalWithGenericTime(rsignal, 0, 1, "reconstructed signal"); // SignalProcessing.displaySignalWithGenericTime(rsignal, 0, 1, "reconstructed signal");
AnalysisLogger.getLogger().debug("SSA workflow DONE"); logger.debug("SSA workflow DONE");
return data; return data;
} }

View File

@ -1,17 +1,20 @@
package org.gcube.dataanalysis.ecoengine.signals.ssa; package org.gcube.dataanalysis.ecoengine.signals.ssa;
import Jama.EigenvalueDecomposition;
import Jama.Matrix;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collections; import java.util.Collections;
import java.util.Comparator; import java.util.Comparator;
import java.util.List; import java.util.List;
import javax.swing.DefaultListModel;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import Jama.EigenvalueDecomposition;
import Jama.Matrix;
public class SingularSpectrumAnalysis { public class SingularSpectrumAnalysis {
private static Logger logger = LoggerFactory.getLogger(SingularSpectrumAnalysis.class);
/** /**
* translation of the original time series into a sequence of multidimensional * translation of the original time series into a sequence of multidimensional
* vectors * vectors
@ -240,7 +243,7 @@ public class SingularSpectrumAnalysis {
* @return the resulting matrix * @return the resulting matrix
*/ */
private static double[][] transpositionMatrix(double matrix[][]) { private static double[][] transpositionMatrix(double matrix[][]) {
AnalysisLogger.getLogger().debug("SSA->Building a matrix with dimensions: "+matrix[0].length+" X "+matrix.length); logger.debug("SSA->Building a matrix with dimensions: "+matrix[0].length+" X "+matrix.length);
double transpMatrix[][] = new double[matrix[0].length][matrix.length]; double transpMatrix[][] = new double[matrix[0].length][matrix.length];
for (int i = 0; i < matrix.length; i++) { for (int i = 0; i < matrix.length; i++) {
for (int j = 0; j < matrix[i].length; j++) { for (int j = 0; j < matrix[i].length; j++) {
@ -350,9 +353,9 @@ public class SingularSpectrumAnalysis {
List eigenvectors = data.getEigenVectors().subList(0, bestEigenVectors); List eigenvectors = data.getEigenVectors().subList(0, bestEigenVectors);
int L = data.getL(); int L = data.getL();
int lastcoordinate = L-1; int lastcoordinate = L-1;
AnalysisLogger.getLogger().debug("SSA: value for L: "+L); logger.debug("SSA: value for L: "+L);
int nEigenVectors = eigenvectors.size(); int nEigenVectors = eigenvectors.size();
AnalysisLogger.getLogger().debug("Number of Selected Eigenvectors For Reconstruction: "+nEigenVectors); logger.debug("Number of Selected Eigenvectors For Reconstruction: "+nEigenVectors);
double[] p = new double[nEigenVectors]; double[] p = new double[nEigenVectors];
for (int i = 0;i<nEigenVectors;i++){ for (int i = 0;i<nEigenVectors;i++){
p[i] = (Double)((List)eigenvectors.get(i)).get(lastcoordinate); p[i] = (Double)((List)eigenvectors.get(i)).get(lastcoordinate);
@ -405,7 +408,7 @@ public class SingularSpectrumAnalysis {
} }
} }
AnalysisLogger.getLogger().debug("Length of the original signal: "+signalSize+" Length of the reconstructed signal: "+y.size()); logger.debug("Length of the original signal: "+signalSize+" Length of the reconstructed signal: "+y.size());
data.setForecastList(y); data.setForecastList(y);

View File

@ -6,7 +6,6 @@ import java.io.IOException;
import java.io.ObjectInputStream; import java.io.ObjectInputStream;
import java.util.List; import java.util.List;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS; import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType; import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
@ -16,9 +15,13 @@ import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.ServiceParameters; import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.ServiceParameters;
import org.gcube.dataanalysis.ecoengine.models.cores.neuralnetworks.Neural_Network; import org.gcube.dataanalysis.ecoengine.models.cores.neuralnetworks.Neural_Network;
import org.hibernate.SessionFactory; import org.hibernate.SessionFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class AquamapsNN extends AquamapsNative{ public class AquamapsNN extends AquamapsNative{
private static Logger logger = LoggerFactory.getLogger(AquamapsNN.class);
private Neural_Network neuralnet; private Neural_Network neuralnet;
@Override @Override
@ -56,14 +59,14 @@ public class AquamapsNN extends AquamapsNative{
@Override @Override
public void init(AlgorithmConfiguration config, SessionFactory dbHibConnection) { public void init(AlgorithmConfiguration config, SessionFactory dbHibConnection) {
super.init(config,dbHibConnection); super.init(config,dbHibConnection);
AnalysisLogger.getLogger().debug("Initializing ANNs"); logger.debug("Initializing ANNs");
String persistencePath = config.getPersistencePath(); String persistencePath = config.getPersistencePath();
// String filename = persistencePath + "neuralnetwork_" + config.getParam("SpeciesName") + "_" + config.getParam("UserName")+"_"+config.getParam("NeuralNetworkName").replace(" ", ""); // String filename = persistencePath + "neuralnetwork_" + config.getParam("SpeciesName") + "_" + config.getParam("UserName")+"_"+config.getParam("NeuralNetworkName").replace(" ", "");
String nnname = config.getParam("NeuralNetworkName"); String nnname = config.getParam("NeuralNetworkName");
AnalysisLogger.getLogger().debug("Init ANN in projection mode with filename: "+nnname); logger.debug("Init ANN in projection mode with filename: "+nnname);
String filename = new File(nnname).getAbsolutePath(); String filename = new File(nnname).getAbsolutePath();
AnalysisLogger.getLogger().debug("ANN: using file name: "+filename); logger.debug("ANN: using file name: "+filename);
if (filename!=null) if (filename!=null)
neuralnet = loadNN(filename); neuralnet = loadNN(filename);
} }
@ -76,9 +79,9 @@ public class AquamapsNN extends AquamapsNative{
Object[] inputvector = new Object[wholevector.length - 6]; Object[] inputvector = new Object[wholevector.length - 6];
for (int i = 0; i < inputvector.length; i++) { for (int i = 0; i < inputvector.length; i++) {
inputvector[i] = wholevector[i + 1]; inputvector[i] = wholevector[i + 1];
// AnalysisLogger.getLogger().debug(i+": "+inputvector[i]); // logger.debug(i+": "+inputvector[i]);
} }
// AnalysisLogger.getLogger().debug("species vs csquare:" + species + " , " + csquarecode); // logger.debug("species vs csquare:" + species + " , " + csquarecode);
float probability = 0; float probability = 0;
// if (csquarecode.equals("1000:102:2")) // if (csquarecode.equals("1000:102:2"))
@ -99,7 +102,7 @@ public class AquamapsNN extends AquamapsNative{
float probability = (float) output[0]; float probability = (float) output[0];
/* /*
if (probability>0.1) if (probability>0.1)
AnalysisLogger.getLogger().debug(" Probability " + probability); logger.debug(" Probability " + probability);
*/ */
// System.exit(0); // System.exit(0);
return probability; return probability;
@ -120,7 +123,7 @@ public class AquamapsNN extends AquamapsNative{
nn = (Neural_Network) ois.readObject(); nn = (Neural_Network) ois.readObject();
} catch (Exception ex) { } catch (Exception ex) {
ex.printStackTrace(); ex.printStackTrace();
AnalysisLogger.getLogger().debug("Error in reading the object from file " + nomeFile + " ."); logger.debug("Error in reading the object from file " + nomeFile + " .");
} finally { } finally {
try { try {
stream.close(); stream.close();

View File

@ -5,7 +5,6 @@ import java.io.IOException;
import java.io.ObjectInputStream; import java.io.ObjectInputStream;
import java.util.List; import java.util.List;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType; import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
import org.gcube.dataanalysis.ecoengine.datatypes.ServiceType; import org.gcube.dataanalysis.ecoengine.datatypes.ServiceType;
@ -14,9 +13,13 @@ import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.ServiceParameters; import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.ServiceParameters;
import org.gcube.dataanalysis.ecoengine.models.cores.neuralnetworks.neurosolutions.NeuralNet; import org.gcube.dataanalysis.ecoengine.models.cores.neuralnetworks.neurosolutions.NeuralNet;
import org.hibernate.SessionFactory; import org.hibernate.SessionFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class AquamapsNNNS extends AquamapsNative{ public class AquamapsNNNS extends AquamapsNative{
private static Logger logger = LoggerFactory.getLogger(AquamapsNNNS.class);
private NeuralNet neuralnet; private NeuralNet neuralnet;
@Override @Override
@ -83,7 +86,7 @@ public class AquamapsNNNS extends AquamapsNative{
if (probability>0.1) if (probability>0.1)
// if (probability<0) // if (probability<0)
AnalysisLogger.getLogger().debug(" Probability " + probability); logger.debug(" Probability " + probability);
// System.exit(0); // System.exit(0);
return probability; return probability;
@ -104,7 +107,7 @@ public class AquamapsNNNS extends AquamapsNative{
nn = (NeuralNet) ois.readObject(); nn = (NeuralNet) ois.readObject();
} catch (Exception ex) { } catch (Exception ex) {
ex.printStackTrace(); ex.printStackTrace();
AnalysisLogger.getLogger().debug("Error in reading the object from file " + nomeFile + " ."); logger.debug("Error in reading the object from file " + nomeFile + " .");
} finally { } finally {
try { try {
stream.close(); stream.close();

View File

@ -6,7 +6,6 @@ import java.io.IOException;
import java.io.ObjectInputStream; import java.io.ObjectInputStream;
import java.util.List; import java.util.List;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS; import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType; import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
@ -16,9 +15,13 @@ import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.ServiceParameters; import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.ServiceParameters;
import org.gcube.dataanalysis.ecoengine.models.cores.neuralnetworks.Neural_Network; import org.gcube.dataanalysis.ecoengine.models.cores.neuralnetworks.Neural_Network;
import org.hibernate.SessionFactory; import org.hibernate.SessionFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class AquamapsNNSuitable extends AquamapsSuitable{ public class AquamapsNNSuitable extends AquamapsSuitable{
private static Logger logger = LoggerFactory.getLogger(AquamapsNNSuitable.class);
private Neural_Network neuralnet; private Neural_Network neuralnet;
@Override @Override
@ -55,15 +58,15 @@ public class AquamapsNNSuitable extends AquamapsSuitable{
@Override @Override
public void init(AlgorithmConfiguration config, SessionFactory dbHibConnection) { public void init(AlgorithmConfiguration config, SessionFactory dbHibConnection) {
AnalysisLogger.getLogger().debug("Initializing ANNs"); logger.debug("Initializing ANNs");
super.init(config,dbHibConnection); super.init(config,dbHibConnection);
String persistencePath = config.getPersistencePath(); String persistencePath = config.getPersistencePath();
// String filename = persistencePath + "neuralnetwork_" + config.getParam("SpeciesName") + "_" + config.getParam("UserName")+"_"+config.getParam("NeuralNetworkName").replace(" ", ""); // String filename = persistencePath + "neuralnetwork_" + config.getParam("SpeciesName") + "_" + config.getParam("UserName")+"_"+config.getParam("NeuralNetworkName").replace(" ", "");
String nnname = config.getParam("NeuralNetworkName"); String nnname = config.getParam("NeuralNetworkName");
AnalysisLogger.getLogger().debug("Init ANN in projection mode with filename: "+nnname); logger.debug("Init ANN in projection mode with filename: "+nnname);
String filename = new File(nnname).getAbsolutePath(); String filename = new File(nnname).getAbsolutePath();
AnalysisLogger.getLogger().debug("ANN: using file name: "+filename); logger.debug("ANN: using file name: "+filename);
if (filename!=null) if (filename!=null)
neuralnet = loadNN(filename); neuralnet = loadNN(filename);
} }
@ -76,9 +79,9 @@ public class AquamapsNNSuitable extends AquamapsSuitable{
Object[] inputvector = new Object[wholevector.length - 6]; Object[] inputvector = new Object[wholevector.length - 6];
for (int i = 0; i < inputvector.length; i++) { for (int i = 0; i < inputvector.length; i++) {
inputvector[i] = wholevector[i + 1]; inputvector[i] = wholevector[i + 1];
// AnalysisLogger.getLogger().debug(i+": "+inputvector[i]); // logger.debug(i+": "+inputvector[i]);
} }
// AnalysisLogger.getLogger().debug("species vs csquare:" + species + " , " + csquarecode); // logger.debug("species vs csquare:" + species + " , " + csquarecode);
float probability = 0; float probability = 0;
// if (csquarecode.equals("1000:102:2")) // if (csquarecode.equals("1000:102:2"))
@ -99,7 +102,7 @@ public class AquamapsNNSuitable extends AquamapsSuitable{
float probability = (float) output[0]; float probability = (float) output[0];
/* /*
if (probability>0.1) if (probability>0.1)
AnalysisLogger.getLogger().debug(" Probability " + probability); logger.debug(" Probability " + probability);
*/ */
// System.exit(0); // System.exit(0);
return probability; return probability;
@ -120,7 +123,7 @@ public class AquamapsNNSuitable extends AquamapsSuitable{
nn = (Neural_Network) ois.readObject(); nn = (Neural_Network) ois.readObject();
} catch (Exception ex) { } catch (Exception ex) {
ex.printStackTrace(); ex.printStackTrace();
AnalysisLogger.getLogger().debug("Error in reading the object from file " + nomeFile + " ."); logger.debug("Error in reading the object from file " + nomeFile + " .");
} finally { } finally {
try { try {
stream.close(); stream.close();

View File

@ -5,7 +5,6 @@ import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Queue; import java.util.Queue;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS; import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.datatypes.DatabaseType; import org.gcube.dataanalysis.ecoengine.datatypes.DatabaseType;
@ -14,7 +13,6 @@ import org.gcube.dataanalysis.ecoengine.datatypes.OutputTable;
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType; import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
import org.gcube.dataanalysis.ecoengine.datatypes.ServiceType; import org.gcube.dataanalysis.ecoengine.datatypes.ServiceType;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType; import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.DatabaseParameters;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes; import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.ServiceParameters; import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.ServiceParameters;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.TableTemplates; import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.TableTemplates;
@ -22,9 +20,14 @@ import org.gcube.dataanalysis.ecoengine.interfaces.SpatialProbabilityDistributio
import org.gcube.dataanalysis.ecoengine.models.cores.aquamaps.MaxMinGenerator; import org.gcube.dataanalysis.ecoengine.models.cores.aquamaps.MaxMinGenerator;
import org.gcube.dataanalysis.ecoengine.utils.DatabaseFactory; import org.gcube.dataanalysis.ecoengine.utils.DatabaseFactory;
import org.hibernate.SessionFactory; import org.hibernate.SessionFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class AquamapsSuitable implements SpatialProbabilityDistributionTable{ public class AquamapsSuitable implements SpatialProbabilityDistributionTable{
private static Logger logger = LoggerFactory.getLogger(AquamapsSuitable.class);
String selectAllSpeciesQuery = "select depthmin,meandepth,depthprefmin,pelagic,depthprefmax,depthmax,tempmin,layer,tempprefmin,tempprefmax,tempmax,salinitymin,salinityprefmin,salinityprefmax,salinitymax,primprodmin,primprodprefmin,primprodprefmax,primprodmax,iceconmin,iceconprefmin,iceconprefmax,iceconmax,landdistyn,landdistmin,landdistprefmin,landdistprefmax,landdistmax,nmostlat,smostlat,wmostlong,emostlong,faoareas,speciesid from %1$s;"; String selectAllSpeciesQuery = "select depthmin,meandepth,depthprefmin,pelagic,depthprefmax,depthmax,tempmin,layer,tempprefmin,tempprefmax,tempmax,salinitymin,salinityprefmin,salinityprefmax,salinitymax,primprodmin,primprodprefmin,primprodprefmax,primprodmax,iceconmin,iceconprefmin,iceconprefmax,iceconmax,landdistyn,landdistmin,landdistprefmin,landdistprefmax,landdistmax,nmostlat,smostlat,wmostlong,emostlong,faoareas,speciesid from %1$s;";
String csquareCodeQuery = "select csquarecode,depthmean,depthmax,depthmin, sstanmean,sbtanmean,salinitymean,salinitybmean, primprodmean,iceconann,landdist,oceanarea,centerlat,centerlong,faoaream,eezall,lme from %1$s d where oceanarea>0"; String csquareCodeQuery = "select csquarecode,depthmean,depthmax,depthmin, sstanmean,sbtanmean,salinitymean,salinitybmean, primprodmean,iceconann,landdist,oceanarea,centerlat,centerlong,faoaream,eezall,lme from %1$s d where oceanarea>0";
String createTableStatement = "CREATE TABLE %1$s ( speciesid character varying, csquarecode character varying, probability real, boundboxyn smallint, faoareayn smallint, faoaream integer, eezall character varying, lme integer) WITH (OIDS=FALSE ); CREATE INDEX CONCURRENTLY %1$s_idx ON %1$s USING btree (speciesid, csquarecode, faoaream, eezall, lme);"; String createTableStatement = "CREATE TABLE %1$s ( speciesid character varying, csquarecode character varying, probability real, boundboxyn smallint, faoareayn smallint, faoaream integer, eezall character varying, lme integer) WITH (OIDS=FALSE ); CREATE INDEX CONCURRENTLY %1$s_idx ON %1$s USING btree (speciesid, csquarecode, faoaream, eezall, lme);";
@ -74,18 +77,18 @@ public class AquamapsSuitable implements SpatialProbabilityDistributionTable{
//if it exists then set the table name //if it exists then set the table name
if (select!=null){ if (select!=null){
hspenMinMaxLat = supposedminmaxlattable; hspenMinMaxLat = supposedminmaxlattable;
AnalysisLogger.getLogger().debug("Aquamaps Algorithm Init ->the min max latitudes table yet exists "+hspenMinMaxLat); logger.debug("Aquamaps Algorithm Init ->the min max latitudes table yet exists "+hspenMinMaxLat);
} }
else{ else{
//otherwise create it by calling the creator //otherwise create it by calling the creator
AnalysisLogger.getLogger().debug("Aquamaps Algorithm Init ->the min max latitudes table does not exist! - generating"); logger.debug("Aquamaps Algorithm Init ->the min max latitudes table does not exist! - generating");
hspenMinMaxLat = generateMaxMinHspec(supposedminmaxlattable, hspenTable,config.getParam("OccurrencePointsTable"), dbHibConnection); hspenMinMaxLat = generateMaxMinHspec(supposedminmaxlattable, hspenTable,config.getParam("OccurrencePointsTable"), dbHibConnection);
AnalysisLogger.getLogger().debug("Aquamaps Algorithm Init ->min max latitudes table created in "+hspenMinMaxLat); logger.debug("Aquamaps Algorithm Init ->min max latitudes table created in "+hspenMinMaxLat);
} }
} }
AnalysisLogger.getLogger().trace("Aquamaps Algorithm Init ->getting min max latitudes from "+hspenMinMaxLat); logger.trace("Aquamaps Algorithm Init ->getting min max latitudes from "+hspenMinMaxLat);
allSpeciesObservations = new HashMap<String, List<Object>>(); allSpeciesObservations = new HashMap<String, List<Object>>();
List<Object> SpeciesObservations = DatabaseFactory.executeSQLQuery(String.format(selectAllSpeciesObservationQuery, hspenMinMaxLat), dbHibConnection); List<Object> SpeciesObservations = DatabaseFactory.executeSQLQuery(String.format(selectAllSpeciesObservationQuery, hspenMinMaxLat), dbHibConnection);
@ -98,7 +101,7 @@ public class AquamapsSuitable implements SpatialProbabilityDistributionTable{
allSpeciesObservations.put((String)speciesid, maxminInfo); allSpeciesObservations.put((String)speciesid, maxminInfo);
} }
AnalysisLogger.getLogger().trace("Aquamaps Algorithm Init ->init finished"); logger.trace("Aquamaps Algorithm Init ->init finished");
} }
@Override @Override
@ -185,7 +188,7 @@ public class AquamapsSuitable implements SpatialProbabilityDistributionTable{
//adjust FAO areas //adjust FAO areas
currentFAOAreas = core.procFAO_2050(currentFAOAreas); currentFAOAreas = core.procFAO_2050(currentFAOAreas);
//get Bounding Box Information //get Bounding Box Information
// AnalysisLogger.getLogger().trace("TYPE:"+type); // logger.trace("TYPE:"+type);
currentSpeciesBoundingBoxInfo = core.getBoundingBoxInfo($paramData_NMostLat, $paramData_SMostLat, $paramData_WMostLong, $paramData_EMostLong, speciesObservations,type); currentSpeciesBoundingBoxInfo = core.getBoundingBoxInfo($paramData_NMostLat, $paramData_SMostLat, $paramData_WMostLong, $paramData_EMostLong, speciesObservations,type);
//end of get BoundingBoxInformation //end of get BoundingBoxInformation
} }

View File

@ -5,7 +5,6 @@ import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS; import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.datatypes.ColumnTypesList; import org.gcube.dataanalysis.ecoengine.datatypes.ColumnTypesList;
@ -22,9 +21,13 @@ import org.gcube.dataanalysis.ecoengine.interfaces.SpatialProbabilityDistributio
import org.gcube.dataanalysis.ecoengine.utils.DatabaseFactory; import org.gcube.dataanalysis.ecoengine.utils.DatabaseFactory;
import org.gcube.dataanalysis.ecoengine.utils.DatabaseUtils; import org.gcube.dataanalysis.ecoengine.utils.DatabaseUtils;
import org.hibernate.SessionFactory; import org.hibernate.SessionFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public abstract class BayesianDistribution implements SpatialProbabilityDistributionGeneric { public abstract class BayesianDistribution implements SpatialProbabilityDistributionGeneric {
private static Logger logger = LoggerFactory.getLogger(BayesianDistribution.class);
protected static String FeaturesTableP = "FeaturesTable"; protected static String FeaturesTableP = "FeaturesTable";
protected static String FeaturesTableColumnsP = "FeaturesColumnNames"; protected static String FeaturesTableColumnsP = "FeaturesColumnNames";
protected static String FinalTableLabel = "FinalTableLabel"; protected static String FinalTableLabel = "FinalTableLabel";
@ -109,10 +112,10 @@ public abstract class BayesianDistribution implements SpatialProbabilityDistribu
if (groupingFactor.trim().length()>0) if (groupingFactor.trim().length()>0)
insertBuffer = DatabaseUtils.insertFromBuffer(finalTableName, groupingFactor + "," + featuresTableColumns + "," + FinalTableValue, sb); insertBuffer = DatabaseUtils.insertFromBuffer(finalTableName, groupingFactor + "," + featuresTableColumns + "," + FinalTableValue, sb);
// AnalysisLogger.getLogger().debug("Insertion Query " + insertBuffer); // logger.debug("Insertion Query " + insertBuffer);
AnalysisLogger.getLogger().debug("Writing Distribution into the DB "); logger.debug("Writing Distribution into the DB ");
DatabaseFactory.executeSQLUpdate(insertBuffer, dbConnection); DatabaseFactory.executeSQLUpdate(insertBuffer, dbConnection);
AnalysisLogger.getLogger().debug("Done!"); logger.debug("Done!");
} }
@Override @Override
@ -187,7 +190,7 @@ public abstract class BayesianDistribution implements SpatialProbabilityDistribu
@Override @Override
public void init(AlgorithmConfiguration config) throws Exception { public void init(AlgorithmConfiguration config) throws Exception {
status = 0; status = 0;
AnalysisLogger.getLogger().debug("Initializing Database Connection"); logger.debug("Initializing Database Connection");
this.config = config; this.config = config;
// initialization of the variables // initialization of the variables
finalTableLabel = config.getParam(FinalTableLabel); finalTableLabel = config.getParam(FinalTableLabel);
@ -206,12 +209,12 @@ public abstract class BayesianDistribution implements SpatialProbabilityDistribu
// create a new table // create a new table
dbConnection = DatabaseUtils.initDBSession(config); dbConnection = DatabaseUtils.initDBSession(config);
try { try {
AnalysisLogger.getLogger().debug("Dropping table " + finalTableName); logger.debug("Dropping table " + finalTableName);
String dropStatement = DatabaseUtils.dropTableStatement(finalTableName); String dropStatement = DatabaseUtils.dropTableStatement(finalTableName);
DatabaseFactory.executeSQLUpdate(dropStatement, dbConnection); DatabaseFactory.executeSQLUpdate(dropStatement, dbConnection);
AnalysisLogger.getLogger().debug("Table " + finalTableName + " dropped"); logger.debug("Table " + finalTableName + " dropped");
} catch (Exception e) { } catch (Exception e) {
AnalysisLogger.getLogger().debug("Could not drop table " + finalTableName); logger.debug("Could not drop table " + finalTableName);
} }
// create Table // create Table
@ -233,11 +236,11 @@ public abstract class BayesianDistribution implements SpatialProbabilityDistribu
if (groupingFactor.length()>0){ if (groupingFactor.length()>0){
createStatement = "create table "+finalTableName+" ( "+groupingFactor+" character varying "+columns+", "+FinalTableValue+" "+FinalTableValueType+")"; createStatement = "create table "+finalTableName+" ( "+groupingFactor+" character varying "+columns+", "+FinalTableValue+" "+FinalTableValueType+")";
} }
AnalysisLogger.getLogger().debug("Creating table: " + finalTableName + " by statement: " + createStatement); logger.debug("Creating table: " + finalTableName + " by statement: " + createStatement);
DatabaseFactory.executeSQLUpdate(createStatement, dbConnection); DatabaseFactory.executeSQLUpdate(createStatement, dbConnection);
AnalysisLogger.getLogger().debug("Table: " + finalTableName + " created"); logger.debug("Table: " + finalTableName + " created");
/* /*
AnalysisLogger.getLogger().debug("Adding a new column to "+finalTableName); logger.debug("Adding a new column to "+finalTableName);
DatabaseFactory.executeSQLUpdate(DatabaseUtils.addColumnStatement(finalTableLabel, FinalTableValue, FinalTableValueType), dbConnection); DatabaseFactory.executeSQLUpdate(DatabaseUtils.addColumnStatement(finalTableLabel, FinalTableValue, FinalTableValueType), dbConnection);
*/ */
status = 10f; status = 10f;

View File

@ -9,27 +9,24 @@ import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.UUID; import java.util.UUID;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS; import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType; import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType; import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes; import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
import org.gcube.dataanalysis.ecoengine.interfaces.SpatialProbabilityDistributionGeneric; import org.gcube.dataanalysis.ecoengine.interfaces.SpatialProbabilityDistributionGeneric;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class DummyAlgorithm implements SpatialProbabilityDistributionGeneric{ public class DummyAlgorithm implements SpatialProbabilityDistributionGeneric{
private static Logger logger = LoggerFactory.getLogger(DummyAlgorithm.class);
List<String> randomElements; List<String> randomElements;
String persistence; String persistence;
private String filename; private String filename;
static String persistedFilePrefix = "dummyfile"; static String persistedFilePrefix = "dummyfile";
public static void main (String[] args){
String s = toString(330.6499f);
System.out.println(s);
System.out.println(fromString(s));
}
private static String toString(float number){ private static String toString(float number){
String s = ""+number; String s = ""+number;
@ -64,7 +61,7 @@ public class DummyAlgorithm implements SpatialProbabilityDistributionGeneric{
@Override @Override
public void init(AlgorithmConfiguration config) { public void init(AlgorithmConfiguration config) {
AnalysisLogger.getLogger().trace("Dummy INIT"); logger.trace("Dummy INIT");
randomElements = new ArrayList<String>(); randomElements = new ArrayList<String>();
for (int i=0;i<170000;i++) for (int i=0;i<170000;i++)
{ {
@ -96,7 +93,7 @@ public class DummyAlgorithm implements SpatialProbabilityDistributionGeneric{
@Override @Override
public List<Object> getGeographicalInfoObjects() { public List<Object> getGeographicalInfoObjects() {
AnalysisLogger.getLogger().trace("Dummy TAKING RANDOMS"); logger.trace("Dummy TAKING RANDOMS");
List<Object> randomElements = new ArrayList<Object>(); List<Object> randomElements = new ArrayList<Object>();
for (int i=0;i<170000;i++) for (int i=0;i<170000;i++)
{ {
@ -107,7 +104,7 @@ public class DummyAlgorithm implements SpatialProbabilityDistributionGeneric{
@Override @Override
public float calcProb(Object mainInfo, Object area) { public float calcProb(Object mainInfo, Object area) {
// AnalysisLogger.getLogger().debug("Calculation Probability"); // logger.debug("Calculation Probability");
Float f1 = fromString((String) mainInfo); Float f1 = fromString((String) mainInfo);
Float f2 = Float.valueOf((String) area); Float f2 = Float.valueOf((String) area);
return (float) 100f*f1*f2; return (float) 100f*f1*f2;
@ -115,17 +112,17 @@ public class DummyAlgorithm implements SpatialProbabilityDistributionGeneric{
@Override @Override
public void singleStepPreprocess(Object mainInfo, Object area) { public void singleStepPreprocess(Object mainInfo, Object area) {
AnalysisLogger.getLogger().trace("Dummy SINGLE PREPROCESSING Step"); logger.trace("Dummy SINGLE PREPROCESSING Step");
} }
@Override @Override
public void singleStepPostprocess(Object mainInfo, Object allAreasInformation) { public void singleStepPostprocess(Object mainInfo, Object allAreasInformation) {
AnalysisLogger.getLogger().trace("Dummy SINGLE POSTPROCESSING Step"); logger.trace("Dummy SINGLE POSTPROCESSING Step");
} }
@Override @Override
public void postProcess() { public void postProcess() {
AnalysisLogger.getLogger().trace("Dummy POSTPROCESS"); logger.trace("Dummy POSTPROCESS");
} }
@Override @Override
@ -138,14 +135,14 @@ public class DummyAlgorithm implements SpatialProbabilityDistributionGeneric{
ysize = distribution.get(s).size(); ysize = distribution.get(s).size();
break; break;
} }
AnalysisLogger.getLogger().debug("Dummy overall dimension of the distribution: "+distribution.size()+" X "+ysize); logger.debug("Dummy overall dimension of the distribution: "+distribution.size()+" X "+ysize);
//Construct the LineNumberReader object //Construct the LineNumberReader object
filename = persistence+persistedFilePrefix+UUID.randomUUID(); filename = persistence+persistedFilePrefix+UUID.randomUUID();
AnalysisLogger.getLogger().debug("Dummy Storing in "+filename); logger.debug("Dummy Storing in "+filename);
outputStream = new ObjectOutputStream(new FileOutputStream(persistence+persistedFilePrefix+"_"+UUID.randomUUID())); outputStream = new ObjectOutputStream(new FileOutputStream(persistence+persistedFilePrefix+"_"+UUID.randomUUID()));
outputStream.writeObject(distribution); outputStream.writeObject(distribution);
AnalysisLogger.getLogger().debug("Dummy Stored"); logger.debug("Dummy Stored");
} catch (Exception ex) { } catch (Exception ex) {
ex.printStackTrace(); ex.printStackTrace();
} finally { } finally {

View File

@ -1,10 +1,13 @@
package org.gcube.dataanalysis.ecoengine.spatialdistributions; package org.gcube.dataanalysis.ecoengine.spatialdistributions;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.models.cores.neuralnetworks.Neural_Network; import org.gcube.dataanalysis.ecoengine.models.cores.neuralnetworks.Neural_Network;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class FeedForwardNeuralNetworkDistribution extends BayesianDistribution{ public class FeedForwardNeuralNetworkDistribution extends BayesianDistribution{
private static Logger logger = LoggerFactory.getLogger(FeedForwardNeuralNetworkDistribution.class);
private Neural_Network neuralnet; private Neural_Network neuralnet;
@Override @Override
@ -32,7 +35,7 @@ public class FeedForwardNeuralNetworkDistribution extends BayesianDistribution{
// String filename = persistencePath + Neural_Network.generateNNName(""+mainInfo, userName, modelName); // String filename = persistencePath + Neural_Network.generateNNName(""+mainInfo, userName, modelName);
String filename = modelFile.getAbsolutePath(); String filename = modelFile.getAbsolutePath();
neuralnet = Neural_Network.loadNN(filename); neuralnet = Neural_Network.loadNN(filename);
AnalysisLogger.getLogger().debug("Using neural network with emission range: ("+neuralnet.minfactor+" ; "+neuralnet.maxfactor+"" ); logger.debug("Using neural network with emission range: ("+neuralnet.minfactor+" ; "+neuralnet.maxfactor+"" );
} }
@Override @Override

View File

@ -9,16 +9,19 @@ import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.UUID; import java.util.UUID;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS; import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType; import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType; import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes; import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
import org.gcube.dataanalysis.ecoengine.interfaces.SpatialProbabilityDistributionGeneric; import org.gcube.dataanalysis.ecoengine.interfaces.SpatialProbabilityDistributionGeneric;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class TestAlgorithm implements SpatialProbabilityDistributionGeneric{ public class TestAlgorithm implements SpatialProbabilityDistributionGeneric{
private static Logger logger = LoggerFactory.getLogger(TestAlgorithm.class);
@Override @Override
public ALG_PROPS[] getProperties() { public ALG_PROPS[] getProperties() {
ALG_PROPS[] p = {ALG_PROPS.PHENOMENON_VS_GEOINFO}; ALG_PROPS[] p = {ALG_PROPS.PHENOMENON_VS_GEOINFO};
@ -91,14 +94,14 @@ public class TestAlgorithm implements SpatialProbabilityDistributionGeneric{
ysize = distribution.get(s).size(); ysize = distribution.get(s).size();
break; break;
} }
AnalysisLogger.getLogger().debug("overall dimension of the distribution: "+distribution.size()+" X "+ysize); logger.debug("overall dimension of the distribution: "+distribution.size()+" X "+ysize);
//Construct the LineNumberReader object //Construct the LineNumberReader object
filename = pers+"testProb"+UUID.randomUUID(); filename = pers+"testProb"+UUID.randomUUID();
AnalysisLogger.getLogger().debug(" Storing in "+filename); logger.debug(" Storing in "+filename);
outputStream = new ObjectOutputStream(new FileOutputStream(filename)); outputStream = new ObjectOutputStream(new FileOutputStream(filename));
outputStream.writeObject(distribution); outputStream.writeObject(distribution);
AnalysisLogger.getLogger().debug("Stored"); logger.debug("Stored");
} catch (Exception ex) { } catch (Exception ex) {
ex.printStackTrace(); ex.printStackTrace();
} finally { } finally {

View File

@ -1,12 +1,10 @@
package org.gcube.dataanalysis.ecoengine.test.regression; package org.gcube.dataanalysis.ecoengine.test.regression;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent; import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent;
@Deprecated @Deprecated
public class Regressor { public class Regressor {
public static void process(ComputationalAgent agent) throws Exception { public static void process(ComputationalAgent agent) throws Exception {
@ -61,7 +59,6 @@ public class Regressor {
config.setParam("DatabasePassword","cdec755ed6633ba"); config.setParam("DatabasePassword","cdec755ed6633ba");
config.setParam("DatabaseURL","jdbc:postgresql://postgresql-srv-dev.d4science.org/dmalgorithms"); config.setParam("DatabaseURL","jdbc:postgresql://postgresql-srv-dev.d4science.org/dmalgorithms");
config.setParam("DatabaseDriver","org.postgresql.Driver"); config.setParam("DatabaseDriver","org.postgresql.Driver");
AnalysisLogger.setLogger(config.getConfigPath()+AlgorithmConfiguration.defaultLoggerFile);
return config; return config;
} }
} }

View File

@ -3,9 +3,7 @@ package org.gcube.dataanalysis.ecoengine.transducers;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.contentmanagement.lexicalmatcher.utils.DatabaseFactory; import org.gcube.contentmanagement.lexicalmatcher.utils.DatabaseFactory;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.datatypes.ColumnType; import org.gcube.dataanalysis.ecoengine.datatypes.ColumnType;
import org.gcube.dataanalysis.ecoengine.datatypes.DatabaseType; import org.gcube.dataanalysis.ecoengine.datatypes.DatabaseType;
import org.gcube.dataanalysis.ecoengine.datatypes.InputTable; import org.gcube.dataanalysis.ecoengine.datatypes.InputTable;
@ -15,10 +13,15 @@ import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes; import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.ServiceParameters; import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.ServiceParameters;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.TableTemplates; import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.TableTemplates;
import org.gcube.dataanalysis.ecoengine.interfaces.DataAnalysis;
import org.gcube.dataanalysis.ecoengine.utils.DatabaseUtils; import org.gcube.dataanalysis.ecoengine.utils.DatabaseUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class OccurrencePointsDuplicatesDeleter extends OccurrencePointsMerger { public class OccurrencePointsDuplicatesDeleter extends OccurrencePointsMerger {
private static Logger logger = LoggerFactory.getLogger(DataAnalysis.class);
String tableName; String tableName;
List<String> records = new ArrayList<String>(); List<String> records = new ArrayList<String>();
@ -68,7 +71,6 @@ public class OccurrencePointsDuplicatesDeleter extends OccurrencePointsMerger {
@Override @Override
public void init() throws Exception { public void init() throws Exception {
AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
lonFld = config.getParam(longitudeColumn); lonFld = config.getParam(longitudeColumn);
latFld = config.getParam(latitudeColumn); latFld = config.getParam(latitudeColumn);
recordedByFld = config.getParam(recordedByColumn); recordedByFld = config.getParam(recordedByColumn);
@ -105,20 +107,20 @@ public class OccurrencePointsDuplicatesDeleter extends OccurrencePointsMerger {
public void takeFullRanges() { public void takeFullRanges() {
// take the elements from sx table // take the elements from sx table
AnalysisLogger.getLogger().info("Taking elements from left table: " + leftTableName); logger.info("Taking elements from left table: " + leftTableName);
leftRows = DatabaseFactory.executeSQLQuery(DatabaseUtils.getDinstictElements(tableName, columns.toString(),"")+" limit 100000", dbconnection); leftRows = DatabaseFactory.executeSQLQuery(DatabaseUtils.getDinstictElements(tableName, columns.toString(),"")+" limit 100000", dbconnection);
} }
public void takeRange(int offsetLeft, int numLeft, int offsetRight, int numRight) { public void takeRange(int offsetLeft, int numLeft, int offsetRight, int numRight) {
// take the elements from sx table // take the elements from sx table
AnalysisLogger.getLogger().info("Taking elements from left table: " + leftTableName); logger.info("Taking elements from left table: " + leftTableName);
leftRows = DatabaseFactory.executeSQLQuery(DatabaseUtils.getDinstictElements(leftTableName, columns.toString(), "offset " + offsetLeft + " limit " + numLeft), dbconnection); leftRows = DatabaseFactory.executeSQLQuery(DatabaseUtils.getDinstictElements(leftTableName, columns.toString(), "offset " + offsetLeft + " limit " + numLeft), dbconnection);
} }
public void computeRange() throws Exception { public void computeRange() throws Exception {
try { try {
// for each element in dx // for each element in dx
AnalysisLogger.getLogger().trace("Processing"); logger.trace("Processing");
status = 10; status = 10;
int similaritiesCounter = 0; int similaritiesCounter = 0;
int allrows = 0; int allrows = 0;
@ -141,7 +143,7 @@ public class OccurrencePointsDuplicatesDeleter extends OccurrencePointsMerger {
if (prob >= confidenceValue) { if (prob >= confidenceValue) {
similaritiesCounter++; similaritiesCounter++;
if (isBetterThan(testOcc, yetInserted)) { if (isBetterThan(testOcc, yetInserted)) {
AnalysisLogger.getLogger().trace("Found a similarity with P=" + prob + " between (" + "\"" + testOcc.scientificName + "\"" + "," + testOcc.x + "\"" + "," + "\"" + testOcc.y + "\"" + "," + "\"" + testOcc.recordedby + "\"" + "," + "\"" + convert2conventionalFormat(testOcc.eventdate) + "\"" + ") VS " + "(" + "\"" + yetInserted.scientificName + "\"" + "," + "\"" + yetInserted.x + "\"" + "," + "\"" + yetInserted.y + "\"" + "," + "\"" + yetInserted.recordedby + "\"" + "," + "\"" + convert2conventionalFormat(yetInserted.eventdate) + "\"" + ")"); logger.trace("Found a similarity with P=" + prob + " between (" + "\"" + testOcc.scientificName + "\"" + "," + testOcc.x + "\"" + "," + "\"" + testOcc.y + "\"" + "," + "\"" + testOcc.recordedby + "\"" + "," + "\"" + convert2conventionalFormat(testOcc.eventdate) + "\"" + ") VS " + "(" + "\"" + yetInserted.scientificName + "\"" + "," + "\"" + yetInserted.x + "\"" + "," + "\"" + yetInserted.y + "\"" + "," + "\"" + yetInserted.recordedby + "\"" + "," + "\"" + convert2conventionalFormat(yetInserted.eventdate) + "\"" + ")");
objectstoinsert.remove(k); objectstoinsert.remove(k);
k--; k--;
insertedSize--; insertedSize--;
@ -164,19 +166,19 @@ public class OccurrencePointsDuplicatesDeleter extends OccurrencePointsMerger {
rowcounter++; rowcounter++;
} }
AnalysisLogger.getLogger().trace("Found " + similaritiesCounter + " similarities on " + allrows + " distinct elements"); logger.trace("Found " + similaritiesCounter + " similarities on " + allrows + " distinct elements");
status = 90; status = 90;
// transform the complete list into a table // transform the complete list into a table
persist(); persist();
// close DB connection // close DB connection
} }
} catch (Exception e) { } catch (Exception e) {
AnalysisLogger.getLogger().error("error",e); logger.error("error",e);
throw e; throw e;
} finally { } finally {
shutdown(); shutdown();
status = 100; status = 100;
AnalysisLogger.getLogger().trace("Occ Points Processing Finished and db closed"); logger.trace("Occ Points Processing Finished and db closed");
} }
} }
@ -185,25 +187,25 @@ public class OccurrencePointsDuplicatesDeleter extends OccurrencePointsMerger {
try { try {
// init DB connection // init DB connection
AnalysisLogger.getLogger().trace("Initializing DB Connection"); logger.trace("Initializing DB Connection");
dbconnection = DatabaseUtils.initDBSession(config); dbconnection = DatabaseUtils.initDBSession(config);
AnalysisLogger.getLogger().trace("Taking Table Description"); logger.trace("Taking Table Description");
AnalysisLogger.getLogger().trace("Creating final table: " + finalTableName); logger.trace("Creating final table: " + finalTableName);
// create new merged table // create new merged table
try { try {
DatabaseFactory.executeSQLUpdate(DatabaseUtils.dropTableStatement(finalTableName), dbconnection); DatabaseFactory.executeSQLUpdate(DatabaseUtils.dropTableStatement(finalTableName), dbconnection);
} catch (Exception e1) { } catch (Exception e1) {
} }
AnalysisLogger.getLogger().trace("Preparing table: " + finalTableName); logger.trace("Preparing table: " + finalTableName);
prepareFinalTable(); prepareFinalTable();
AnalysisLogger.getLogger().trace("Extracting columns from: " + finalTableName); logger.trace("Extracting columns from: " + finalTableName);
extractColumnNames(); extractColumnNames();
AnalysisLogger.getLogger().trace("Taken Table Description: " + columns); logger.trace("Taken Table Description: " + columns);
// take distinct elements from table // take distinct elements from table
AnalysisLogger.getLogger().trace("Taking elements from table: " + tableName); logger.trace("Taking elements from table: " + tableName);
List<Object> rows = DatabaseFactory.executeSQLQuery(DatabaseUtils.getDinstictElements(tableName, columns.toString(), ""), dbconnection); List<Object> rows = DatabaseFactory.executeSQLQuery(DatabaseUtils.getDinstictElements(tableName, columns.toString(), ""), dbconnection);
// for each element in dx // for each element in dx
AnalysisLogger.getLogger().trace("Processing"); logger.trace("Processing");
status = 10; status = 10;
int similaritiesCounter = 0; int similaritiesCounter = 0;
int allrows = rows.size(); int allrows = rows.size();
@ -224,7 +226,7 @@ public class OccurrencePointsDuplicatesDeleter extends OccurrencePointsMerger {
if (prob >= confidenceValue) { if (prob >= confidenceValue) {
similaritiesCounter++; similaritiesCounter++;
if (isBetterThan(testOcc, yetInserted)) { if (isBetterThan(testOcc, yetInserted)) {
AnalysisLogger.getLogger().trace("Found a similarity with P=" + prob + " between (" + "\"" + testOcc.scientificName + "\"" + "," + testOcc.x + "\"" + "," + "\"" + testOcc.y + "\"" + "," + "\"" + testOcc.recordedby + "\"" + "," + "\"" + convert2conventionalFormat(testOcc.eventdate) + "\"" + ") VS " + "(" + "\"" + yetInserted.scientificName + "\"" + "," + "\"" + yetInserted.x + "\"" + "," + "\"" + yetInserted.y + "\"" + "," + "\"" + yetInserted.recordedby + "\"" + "," + "\"" + convert2conventionalFormat(yetInserted.eventdate) + "\"" + ")"); logger.trace("Found a similarity with P=" + prob + " between (" + "\"" + testOcc.scientificName + "\"" + "," + testOcc.x + "\"" + "," + "\"" + testOcc.y + "\"" + "," + "\"" + testOcc.recordedby + "\"" + "," + "\"" + convert2conventionalFormat(testOcc.eventdate) + "\"" + ") VS " + "(" + "\"" + yetInserted.scientificName + "\"" + "," + "\"" + yetInserted.x + "\"" + "," + "\"" + yetInserted.y + "\"" + "," + "\"" + yetInserted.recordedby + "\"" + "," + "\"" + convert2conventionalFormat(yetInserted.eventdate) + "\"" + ")");
objectstoinsert.remove(k); objectstoinsert.remove(k);
k--; k--;
insertedSize--; insertedSize--;
@ -247,19 +249,19 @@ public class OccurrencePointsDuplicatesDeleter extends OccurrencePointsMerger {
rowcounter++; rowcounter++;
} }
AnalysisLogger.getLogger().trace("Found " + similaritiesCounter + " similarities on " + allrows + " distinct elements"); logger.trace("Found " + similaritiesCounter + " similarities on " + allrows + " distinct elements");
status = 90; status = 90;
// transform the complete list into a table // transform the complete list into a table
persist(); persist();
// close DB connection // close DB connection
} catch (Exception e) { } catch (Exception e) {
AnalysisLogger.getLogger().trace("An error occurred " + e.getLocalizedMessage()); logger.trace("An error occurred " + e.getLocalizedMessage());
throw e; throw e;
} finally { } finally {
if (dbconnection != null) if (dbconnection != null)
dbconnection.close(); dbconnection.close();
status = 100; status = 100;
AnalysisLogger.getLogger().trace("Occ Points Processing Finished and db closed"); logger.trace("Occ Points Processing Finished and db closed");
} }
} }

View File

@ -3,9 +3,7 @@ package org.gcube.dataanalysis.ecoengine.transducers;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.contentmanagement.lexicalmatcher.utils.DatabaseFactory; import org.gcube.contentmanagement.lexicalmatcher.utils.DatabaseFactory;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.datatypes.ColumnType; import org.gcube.dataanalysis.ecoengine.datatypes.ColumnType;
import org.gcube.dataanalysis.ecoengine.datatypes.DatabaseType; import org.gcube.dataanalysis.ecoengine.datatypes.DatabaseType;
import org.gcube.dataanalysis.ecoengine.datatypes.InputTable; import org.gcube.dataanalysis.ecoengine.datatypes.InputTable;
@ -16,9 +14,13 @@ import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.ServiceParameters; import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.ServiceParameters;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.TableTemplates; import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.TableTemplates;
import org.gcube.dataanalysis.ecoengine.utils.DatabaseUtils; import org.gcube.dataanalysis.ecoengine.utils.DatabaseUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class OccurrencePointsInSeaOnEarth extends OccurrencePointsMerger{ public class OccurrencePointsInSeaOnEarth extends OccurrencePointsMerger{
private static Logger logger = LoggerFactory.getLogger(OccurrencePointsInSeaOnEarth.class);
//NOTE: 0.125 is the diagonal of a csquare, which is the maximum extent to which a point can lie in a csquare //NOTE: 0.125 is the diagonal of a csquare, which is the maximum extent to which a point can lie in a csquare
private static String inthesea="select * into %1$s from (select distinct a.* from %2$s as a join hcaf_d as b on ((b.centerlat-a.%3$s)*(b.centerlat-a.%3$s)+(b.centerlong-a.%4$s)*(b.centerlong-a.%4$s)<= 0.125) and b.oceanarea>0) as t limit 10000"; private static String inthesea="select * into %1$s from (select distinct a.* from %2$s as a join hcaf_d as b on ((b.centerlat-a.%3$s)*(b.centerlat-a.%3$s)+(b.centerlong-a.%4$s)*(b.centerlong-a.%4$s)<= 0.125) and b.oceanarea>0) as t limit 10000";
private static String onearth="select * into %1$s from (select distinct a.* from %2$s as a join hcaf_d as b on ((b.centerlat-a.%3$s)*(b.centerlat-a.%3$s)+(b.centerlong-a.%4$s)*(b.centerlong-a.%4$s)<= 0.125) and b.landdist<=0.3) as t limit 10000"; private static String onearth="select * into %1$s from (select distinct a.* from %2$s as a join hcaf_d as b on ((b.centerlat-a.%3$s)*(b.centerlat-a.%3$s)+(b.centerlong-a.%4$s)*(b.centerlong-a.%4$s)<= 0.125) and b.landdist<=0.3) as t limit 10000";
@ -57,7 +59,6 @@ public class OccurrencePointsInSeaOnEarth extends OccurrencePointsMerger{
@Override @Override
public void init() throws Exception { public void init() throws Exception {
AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
lonFld = config.getParam(longitudeColumn); lonFld = config.getParam(longitudeColumn);
latFld = config.getParam(latitudeColumn); latFld = config.getParam(latitudeColumn);
tableName = config.getParam(tableNameF); tableName = config.getParam(tableNameF);
@ -82,10 +83,10 @@ public class OccurrencePointsInSeaOnEarth extends OccurrencePointsMerger{
try { try {
// init DB connection // init DB connection
AnalysisLogger.getLogger().trace("Initializing DB Connection"); logger.trace("Initializing DB Connection");
dbconnection = DatabaseUtils.initDBSession(config); dbconnection = DatabaseUtils.initDBSession(config);
AnalysisLogger.getLogger().trace("Taking Table Description"); logger.trace("Taking Table Description");
AnalysisLogger.getLogger().trace("Creating merged table: " + finalTableName); logger.trace("Creating merged table: " + finalTableName);
// create new merged table // create new merged table
try{ try{
DatabaseFactory.executeSQLUpdate(DatabaseUtils.dropTableStatement(finalTableName), dbconnection); DatabaseFactory.executeSQLUpdate(DatabaseUtils.dropTableStatement(finalTableName), dbconnection);
@ -98,10 +99,10 @@ public class OccurrencePointsInSeaOnEarth extends OccurrencePointsMerger{
else else
generationquery = String.format(onearth,finalTableName,tableName,latFld,lonFld); generationquery = String.format(onearth,finalTableName,tableName,latFld,lonFld);
AnalysisLogger.getLogger().trace("Applying filter " + filter.name()); logger.trace("Applying filter " + filter.name());
AnalysisLogger.getLogger().trace("Applying query " + generationquery); logger.trace("Applying query " + generationquery);
DatabaseFactory.executeSQLUpdate(generationquery, dbconnection); DatabaseFactory.executeSQLUpdate(generationquery, dbconnection);
AnalysisLogger.getLogger().trace("Final Table created!"); logger.trace("Final Table created!");
} catch (Exception e) { } catch (Exception e) {
throw e; throw e;
@ -111,7 +112,7 @@ public class OccurrencePointsInSeaOnEarth extends OccurrencePointsMerger{
dbconnection.close(); dbconnection.close();
}catch(Exception e2){} }catch(Exception e2){}
status = 100; status = 100;
AnalysisLogger.getLogger().trace("Occ Points Processing Finished and db closed"); logger.trace("Occ Points Processing Finished and db closed");
} }
} }

View File

@ -7,7 +7,6 @@ import java.util.Date;
import java.util.List; import java.util.List;
import org.gcube.contentmanagement.graphtools.utils.DateGuesser; import org.gcube.contentmanagement.graphtools.utils.DateGuesser;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.contentmanagement.lexicalmatcher.utils.DatabaseFactory; import org.gcube.contentmanagement.lexicalmatcher.utils.DatabaseFactory;
import org.gcube.contentmanagement.lexicalmatcher.utils.DistanceCalculator; import org.gcube.contentmanagement.lexicalmatcher.utils.DistanceCalculator;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
@ -26,9 +25,13 @@ import org.gcube.dataanalysis.ecoengine.interfaces.Transducerer;
import org.gcube.dataanalysis.ecoengine.utils.DatabaseUtils; import org.gcube.dataanalysis.ecoengine.utils.DatabaseUtils;
import org.gcube.dataanalysis.ecoengine.utils.ResourceFactory; import org.gcube.dataanalysis.ecoengine.utils.ResourceFactory;
import org.hibernate.SessionFactory; import org.hibernate.SessionFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class OccurrencePointsMerger implements Transducerer { public class OccurrencePointsMerger implements Transducerer {
private static Logger logger = LoggerFactory.getLogger(OccurrencePointsMerger.class);
static protected String finalTableNameL = "final_Table_Name"; static protected String finalTableNameL = "final_Table_Name";
static protected String longitudeColumn = "longitudeColumn"; static protected String longitudeColumn = "longitudeColumn";
static protected String latitudeColumn = "latitudeColumn"; static protected String latitudeColumn = "latitudeColumn";
@ -132,7 +135,7 @@ public class OccurrencePointsMerger implements Transducerer {
*/ */
record.eventdate = DateGuesser.convertDate(value$); record.eventdate = DateGuesser.convertDate(value$);
if (displaydateconvert) { if (displaydateconvert) {
AnalysisLogger.getLogger().info("From " + value$ + "->" + convert2conventionalFormat(record.eventdate) + " pattern " + DateGuesser.getPattern(value$)); logger.info("From " + value$ + "->" + convert2conventionalFormat(record.eventdate) + " pattern " + DateGuesser.getPattern(value$));
displaydateconvert = false; displaydateconvert = false;
} }
@ -385,7 +388,7 @@ public class OccurrencePointsMerger implements Transducerer {
int todel = objectstodelete.size(); int todel = objectstodelete.size();
int counter = 0; int counter = 0;
StringBuffer buffer = new StringBuffer(); StringBuffer buffer = new StringBuffer();
AnalysisLogger.getLogger().info("Deleting " + todel + " objects"); logger.info("Deleting " + todel + " objects");
if (todel > 0) { if (todel > 0) {
for (OccurrenceRecord record : objectstodelete) { for (OccurrenceRecord record : objectstodelete) {
buffer.append("("); buffer.append("(");
@ -434,9 +437,9 @@ public class OccurrencePointsMerger implements Transducerer {
if ((counter>0)&&(counter%500==0)){ if ((counter>0)&&(counter%500==0)){
String updateQ = DatabaseUtils.deleteFromBuffer(finalTableName, buffer); String updateQ = DatabaseUtils.deleteFromBuffer(finalTableName, buffer);
// AnalysisLogger.getLogger().debug("Update:\n"+updateQ); // logger.debug("Update:\n"+updateQ);
DatabaseFactory.executeSQLUpdate(updateQ, dbconnection); DatabaseFactory.executeSQLUpdate(updateQ, dbconnection);
AnalysisLogger.getLogger().info("Partial Objects deleted"); logger.info("Partial Objects deleted");
buffer = new StringBuffer(); buffer = new StringBuffer();
} }
else else
@ -448,15 +451,15 @@ public class OccurrencePointsMerger implements Transducerer {
} }
String updateQ = DatabaseUtils.deleteFromBuffer(finalTableName, buffer); String updateQ = DatabaseUtils.deleteFromBuffer(finalTableName, buffer);
// AnalysisLogger.getLogger().debug("Update:\n"+updateQ); // logger.debug("Update:\n"+updateQ);
DatabaseFactory.executeSQLUpdate(updateQ, dbconnection); DatabaseFactory.executeSQLUpdate(updateQ, dbconnection);
AnalysisLogger.getLogger().info("All Objects deleted"); logger.info("All Objects deleted");
} }
buffer = new StringBuffer(); buffer = new StringBuffer();
ArrayList<String> insertedStrings = new ArrayList<String>(); ArrayList<String> insertedStrings = new ArrayList<String>();
int toins = objectstoinsert.size(); int toins = objectstoinsert.size();
AnalysisLogger.getLogger().info("Inserting " + toins + " objects"); logger.info("Inserting " + toins + " objects");
counter = 0; counter = 0;
if (toins > 0) { if (toins > 0) {
for (OccurrenceRecord record : objectstoinsert) { for (OccurrenceRecord record : objectstoinsert) {
@ -470,7 +473,7 @@ public class OccurrencePointsMerger implements Transducerer {
if ((counter>0)&&(counter%500==0)){ if ((counter>0)&&(counter%500==0)){
insertBuffer(buffer); insertBuffer(buffer);
AnalysisLogger.getLogger().info("Partial Objects inserted"); logger.info("Partial Objects inserted");
buffer = new StringBuffer(); buffer = new StringBuffer();
} }
else else
@ -481,9 +484,9 @@ public class OccurrencePointsMerger implements Transducerer {
} }
insertBuffer(buffer); insertBuffer(buffer);
AnalysisLogger.getLogger().info("Objects inserted"); logger.info("Objects inserted");
AnalysisLogger.getLogger().info("Inserted " + counter + " objects"); logger.info("Inserted " + counter + " objects");
} }
objectstoinsert = null; objectstoinsert = null;
@ -503,7 +506,7 @@ public class OccurrencePointsMerger implements Transducerer {
String updateQ = "SET datestyle = \"ISO, MDY\"; "+DatabaseUtils.insertFromString(finalTableName, columns.toString(), subBuffer); String updateQ = "SET datestyle = \"ISO, MDY\"; "+DatabaseUtils.insertFromString(finalTableName, columns.toString(), subBuffer);
// System.out.println("Update:\n"+updateQ); // System.out.println("Update:\n"+updateQ);
AnalysisLogger.getLogger().debug("Update:\n"+updateQ); logger.debug("Update:\n"+updateQ);
DatabaseFactory.executeSQLUpdate(updateQ, dbconnection); DatabaseFactory.executeSQLUpdate(updateQ, dbconnection);
} }
@ -527,14 +530,14 @@ public class OccurrencePointsMerger implements Transducerer {
public void initDB(boolean buildTable) throws Exception { public void initDB(boolean buildTable) throws Exception {
// init DB connection // init DB connection
AnalysisLogger.getLogger().info("Initializing DB Connection"); logger.info("Initializing DB Connection");
dbconnection = DatabaseUtils.initDBSession(config); dbconnection = DatabaseUtils.initDBSession(config);
AnalysisLogger.getLogger().info("Taking Table Description"); logger.info("Taking Table Description");
extractColumnNames(); extractColumnNames();
if (buildTable) { if (buildTable) {
AnalysisLogger.getLogger().info("Taken Table Description: " + columns); logger.info("Taken Table Description: " + columns);
AnalysisLogger.getLogger().info("Creating final table: " + finalTableName); logger.info("Creating final table: " + finalTableName);
// create new merged table // create new merged table
try { try {
@ -574,36 +577,36 @@ public class OccurrencePointsMerger implements Transducerer {
public void takeFullRanges() { public void takeFullRanges() {
// take the elements from sx table // take the elements from sx table
AnalysisLogger.getLogger().info("Taking elements from left table: " + leftTableName); logger.info("Taking elements from left table: " + leftTableName);
leftRows = DatabaseFactory.executeSQLQuery(DatabaseUtils.getColumnsElementsStatement(leftTableName, columns.toString(), " limit 10000"), dbconnection); leftRows = DatabaseFactory.executeSQLQuery(DatabaseUtils.getColumnsElementsStatement(leftTableName, columns.toString(), " limit 10000"), dbconnection);
// take the elements from dx table // take the elements from dx table
AnalysisLogger.getLogger().info("Taking elements from right table: " + rightTableName); logger.info("Taking elements from right table: " + rightTableName);
rightRows = DatabaseFactory.executeSQLQuery(DatabaseUtils.getColumnsElementsStatement(rightTableName, columns.toString(), " limit 10000"), dbconnection); rightRows = DatabaseFactory.executeSQLQuery(DatabaseUtils.getColumnsElementsStatement(rightTableName, columns.toString(), " limit 10000"), dbconnection);
} }
public void takeRange(int offsetLeft, int numLeft, int offsetRight, int numRight) { public void takeRange(int offsetLeft, int numLeft, int offsetRight, int numRight) {
// take the elements from sx table // take the elements from sx table
AnalysisLogger.getLogger().info("Taking elements from left table: " + leftTableName); logger.info("Taking elements from left table: " + leftTableName);
leftRows = DatabaseFactory.executeSQLQuery(DatabaseUtils.getColumnsElementsStatement(leftTableName, columns.toString(), "order by "+columns.toString()+" offset " + offsetLeft + " limit " + numLeft), dbconnection); leftRows = DatabaseFactory.executeSQLQuery(DatabaseUtils.getColumnsElementsStatement(leftTableName, columns.toString(), "order by "+columns.toString()+" offset " + offsetLeft + " limit " + numLeft), dbconnection);
// take the elements from dx table // take the elements from dx table
AnalysisLogger.getLogger().info("Taking elements from right table: " + rightTableName); logger.info("Taking elements from right table: " + rightTableName);
rightRows = DatabaseFactory.executeSQLQuery(DatabaseUtils.getColumnsElementsStatement(rightTableName, columns.toString(), "order by "+columns.toString()+" offset " + offsetRight + " limit " + numRight), dbconnection); rightRows = DatabaseFactory.executeSQLQuery(DatabaseUtils.getColumnsElementsStatement(rightTableName, columns.toString(), "order by "+columns.toString()+" offset " + offsetRight + " limit " + numRight), dbconnection);
} }
public void computeRange() throws Exception { public void computeRange() throws Exception {
try { try {
AnalysisLogger.getLogger().info("Processing " + leftTableName + " vs " + rightTableName); logger.info("Processing " + leftTableName + " vs " + rightTableName);
// AnalysisLogger.getLogger().info("ELEMENTS " + getNumLeftObjects() + " vs " + getNumRightObjects()); // logger.info("ELEMENTS " + getNumLeftObjects() + " vs " + getNumRightObjects());
status = 10; status = 10;
int rightCounter = 0; int rightCounter = 0;
int similaritiesCounter = 0; int similaritiesCounter = 0;
int allrightrows = rightRows.size(); int allrightrows = rightRows.size();
if ((allrightrows > 0) && (getNumLeftObjects() > 0)) { if ((allrightrows > 0) && (getNumLeftObjects() > 0)) {
for (Object rRow : rightRows) { for (Object rRow : rightRows) {
// AnalysisLogger.getLogger().info("RR CONV"); // logger.info("RR CONV");
// transform into an occurrence object // transform into an occurrence object
OccurrenceRecord rightOcc = row2OccurrenceRecord((Object[]) rRow); OccurrenceRecord rightOcc = row2OccurrenceRecord((Object[]) rRow);
// AnalysisLogger.getLogger().info("RR CONV - OK"); // logger.info("RR CONV - OK");
// for each element in sx // for each element in sx
int k = 0; int k = 0;
boolean found = false; boolean found = false;
@ -611,15 +614,15 @@ public class OccurrencePointsMerger implements Transducerer {
OccurrenceRecord bestleftOcc = null; OccurrenceRecord bestleftOcc = null;
for (Object lRow : leftRows) { for (Object lRow : leftRows) {
OccurrenceRecord leftOcc = null; OccurrenceRecord leftOcc = null;
// AnalysisLogger.getLogger().info("LL CONV"); // logger.info("LL CONV");
leftOcc = row2OccurrenceRecord((Object[]) lRow); leftOcc = row2OccurrenceRecord((Object[]) lRow);
p = extProb(leftOcc, rightOcc); p = extProb(leftOcc, rightOcc);
// AnalysisLogger.getLogger().info("P"); // logger.info("P");
if (p >= confidenceValue) { if (p >= confidenceValue) {
bestleftOcc = leftOcc; bestleftOcc = leftOcc;
found = true; found = true;
similaritiesCounter++; similaritiesCounter++;
AnalysisLogger.getLogger().info("Found a similarity with P=" + p + " between (" + "\"" + leftOcc.scientificName + "\"" + ",\"" + leftOcc.x + "\"" + "," + "\"" + leftOcc.y + "\"" + "," + "\"" + leftOcc.recordedby + "\"" + "," + "\"" + convert2conventionalFormat(leftOcc.eventdate) + "\"" + ") VS " + "(" + "\"" + rightOcc.scientificName + "\"" + "," + "\"" + rightOcc.x + "\"" + "," + "\"" + rightOcc.y + "\"" + "," + "\"" + rightOcc.recordedby + "\"" + "," + "\"" + convert2conventionalFormat(rightOcc.eventdate) + "\"" + ")"); logger.info("Found a similarity with P=" + p + " between (" + "\"" + leftOcc.scientificName + "\"" + ",\"" + leftOcc.x + "\"" + "," + "\"" + leftOcc.y + "\"" + "," + "\"" + leftOcc.recordedby + "\"" + "," + "\"" + convert2conventionalFormat(leftOcc.eventdate) + "\"" + ") VS " + "(" + "\"" + rightOcc.scientificName + "\"" + "," + "\"" + rightOcc.x + "\"" + "," + "\"" + rightOcc.y + "\"" + "," + "\"" + rightOcc.recordedby + "\"" + "," + "\"" + convert2conventionalFormat(rightOcc.eventdate) + "\"" + ")");
// break; // break;
if (!firstbest) if (!firstbest)
manageHighProbability(p, bestleftOcc, rightOcc); manageHighProbability(p, bestleftOcc, rightOcc);
@ -646,23 +649,23 @@ public class OccurrencePointsMerger implements Transducerer {
status = Math.min(90, 10f + (80 * ((float) rightCounter) / ((float) allrightrows))); status = Math.min(90, 10f + (80 * ((float) rightCounter) / ((float) allrightrows)));
if (rightCounter % 500 == 0) { if (rightCounter % 500 == 0) {
AnalysisLogger.getLogger().info("Persisting ... " + rightCounter + " over " + allrightrows); logger.info("Persisting ... " + rightCounter + " over " + allrightrows);
persist(); persist();
} }
} }
} }
AnalysisLogger.getLogger().info("Found " + similaritiesCounter + " similarities on " + rightCounter + " elements"); logger.info("Found " + similaritiesCounter + " similarities on " + rightCounter + " elements");
status = 90; status = 90;
// transform the complete list into a table // transform the complete list into a table
persist(); persist();
// close DB connection // close DB connection
} catch (Exception e) { } catch (Exception e) {
AnalysisLogger.getLogger().error("error in computation",e); logger.error("error in computation",e);
throw e; throw e;
} finally { } finally {
shutdown(); shutdown();
status = 100; status = 100;
AnalysisLogger.getLogger().info("Occ Points Processing Finished and db closed"); logger.info("Occ Points Processing Finished and db closed");
} }
} }
@ -679,7 +682,7 @@ public class OccurrencePointsMerger implements Transducerer {
public void postProcess() throws Exception{ public void postProcess() throws Exception{
/* /*
AnalysisLogger.getLogger().info("Post processing ... Deleting duplicates"); logger.info("Post processing ... Deleting duplicates");
OccurrencePointsDuplicatesDeleter opdd = new OccurrencePointsDuplicatesDeleter(); OccurrencePointsDuplicatesDeleter opdd = new OccurrencePointsDuplicatesDeleter();
opdd.setConfiguration(config); opdd.setConfiguration(config);
@ -687,7 +690,7 @@ public class OccurrencePointsMerger implements Transducerer {
opdd.initDB(false); opdd.initDB(false);
opdd.takeFullRanges(); opdd.takeFullRanges();
opdd.computeRange(); opdd.computeRange();
AnalysisLogger.getLogger().info("Post processing ... Finished"); logger.info("Post processing ... Finished");
*/ */
} }

View File

@ -2,21 +2,25 @@ package org.gcube.dataanalysis.ecoengine.transducers;
import java.util.List; import java.util.List;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.contentmanagement.lexicalmatcher.utils.DatabaseFactory; import org.gcube.contentmanagement.lexicalmatcher.utils.DatabaseFactory;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.configuration.INFRASTRUCTURE; import org.gcube.dataanalysis.ecoengine.configuration.INFRASTRUCTURE;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType; import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.interfaces.DataAnalysis;
import org.gcube.dataanalysis.ecoengine.interfaces.Transducerer; import org.gcube.dataanalysis.ecoengine.interfaces.Transducerer;
import org.gcube.dataanalysis.ecoengine.utils.DatabaseUtils; import org.gcube.dataanalysis.ecoengine.utils.DatabaseUtils;
import org.gcube.dataanalysis.ecoengine.utils.ResourceFactory; import org.gcube.dataanalysis.ecoengine.utils.ResourceFactory;
import org.hibernate.SessionFactory; import org.hibernate.SessionFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
//implements a creator of tables which simply executes a query //implements a creator of tables which simply executes a query
public class QueryExecutor implements Transducerer { public class QueryExecutor implements Transducerer {
private static Logger logger = LoggerFactory.getLogger(DataAnalysis.class);
protected String query = ""; protected String query = "";
protected String finalTableName = ""; protected String finalTableName = "";
protected String finalTableLabel = ""; protected String finalTableLabel = "";
@ -85,21 +89,21 @@ public class QueryExecutor implements Transducerer {
SessionFactory dbconnection = null; SessionFactory dbconnection = null;
try{ try{
AnalysisLogger.getLogger().trace("Initializing DB Connection"); logger.trace("Initializing DB Connection");
dbconnection = DatabaseUtils.initDBSession(config); dbconnection = DatabaseUtils.initDBSession(config);
AnalysisLogger.getLogger().trace("Deleting Previous Table "+DatabaseUtils.dropTableStatement(finalTableName)); logger.trace("Deleting Previous Table "+DatabaseUtils.dropTableStatement(finalTableName));
try{ try{
DatabaseFactory.executeSQLUpdate(DatabaseUtils.dropTableStatement(finalTableName), dbconnection); DatabaseFactory.executeSQLUpdate(DatabaseUtils.dropTableStatement(finalTableName), dbconnection);
}catch(Exception ee){ }catch(Exception ee){
} }
status = 10; status = 10;
AnalysisLogger.getLogger().trace("Deleted"); logger.trace("Deleted");
AnalysisLogger.getLogger().trace("Executing query: "+query); logger.trace("Executing query: "+query);
DatabaseFactory.executeSQLUpdate(query, dbconnection); DatabaseFactory.executeSQLUpdate(query, dbconnection);
AnalysisLogger.getLogger().trace("Executed!"); logger.trace("Executed!");
} catch (Exception e) { } catch (Exception e) {
AnalysisLogger.getLogger().trace("ERROR:",e); logger.trace("ERROR:",e);
throw e; throw e;
} finally { } finally {
if (dbconnection != null) if (dbconnection != null)
@ -107,7 +111,7 @@ public class QueryExecutor implements Transducerer {
dbconnection.close(); dbconnection.close();
}catch(Exception e2){} }catch(Exception e2){}
status = 100; status = 100;
AnalysisLogger.getLogger().trace("Processing Finished and db closed"); logger.trace("Processing Finished and db closed");
} }
} }

View File

@ -13,7 +13,6 @@ import java.util.List;
import org.gcube.contentmanagement.graphtools.utils.DateGuesser; import org.gcube.contentmanagement.graphtools.utils.DateGuesser;
import org.gcube.contentmanagement.graphtools.utils.MathFunctions; import org.gcube.contentmanagement.graphtools.utils.MathFunctions;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.datatypes.ColumnType; import org.gcube.dataanalysis.ecoengine.datatypes.ColumnType;
import org.gcube.dataanalysis.ecoengine.datatypes.DatabaseType; import org.gcube.dataanalysis.ecoengine.datatypes.DatabaseType;
import org.gcube.dataanalysis.ecoengine.datatypes.InputTable; import org.gcube.dataanalysis.ecoengine.datatypes.InputTable;
@ -21,6 +20,7 @@ import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType; import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes; import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.TableTemplates; import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.TableTemplates;
import org.gcube.dataanalysis.ecoengine.interfaces.DataAnalysis;
import org.gcube.dataanalysis.ecoengine.interfaces.StandardLocalExternalAlgorithm; import org.gcube.dataanalysis.ecoengine.interfaces.StandardLocalExternalAlgorithm;
import org.gcube.dataanalysis.ecoengine.signals.PeriodicityDetector; import org.gcube.dataanalysis.ecoengine.signals.PeriodicityDetector;
import org.gcube.dataanalysis.ecoengine.signals.SignalProcessing; import org.gcube.dataanalysis.ecoengine.signals.SignalProcessing;
@ -33,6 +33,8 @@ import org.gcube.dataanalysis.ecoengine.utils.DatabaseUtils;
import org.gcube.dataanalysis.ecoengine.utils.Operations; import org.gcube.dataanalysis.ecoengine.utils.Operations;
import org.gcube.dataanalysis.ecoengine.utils.Tuple; import org.gcube.dataanalysis.ecoengine.utils.Tuple;
import org.hibernate.SessionFactory; import org.hibernate.SessionFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class TimeSeriesAnalysis extends StandardLocalExternalAlgorithm { public class TimeSeriesAnalysis extends StandardLocalExternalAlgorithm {
private static String timeSeriesTable = "TimeSeriesTable"; private static String timeSeriesTable = "TimeSeriesTable";
@ -58,6 +60,8 @@ public class TimeSeriesAnalysis extends StandardLocalExternalAlgorithm {
LOW, NORMAL, HIGH LOW, NORMAL, HIGH
} }
private static Logger logger = LoggerFactory.getLogger(DataAnalysis.class);
@Override @Override
public void init() throws Exception { public void init() throws Exception {
@ -112,18 +116,18 @@ public class TimeSeriesAnalysis extends StandardLocalExternalAlgorithm {
} }
} }
AnalysisLogger.getLogger().debug("TimeSeriesAnalysis->Table Name: " + tablename); logger.debug("TimeSeriesAnalysis->Table Name: " + tablename);
AnalysisLogger.getLogger().debug("TimeSeriesAnalysis->Time Column: " + timecolumn); logger.debug("TimeSeriesAnalysis->Time Column: " + timecolumn);
AnalysisLogger.getLogger().debug("TimeSeriesAnalysis->Values Column: " + valuescolum); logger.debug("TimeSeriesAnalysis->Values Column: " + valuescolum);
AnalysisLogger.getLogger().debug("TimeSeriesAnalysis->Aggregation: " + aggregationFunc); logger.debug("TimeSeriesAnalysis->Aggregation: " + aggregationFunc);
AnalysisLogger.getLogger().debug("TimeSeriesAnalysis->FFT Window Samples: " + fftWindowSamplesDouble); logger.debug("TimeSeriesAnalysis->FFT Window Samples: " + fftWindowSamplesDouble);
AnalysisLogger.getLogger().debug("TimeSeriesAnalysis->SSA Window Samples: " + windowLength); logger.debug("TimeSeriesAnalysis->SSA Window Samples: " + windowLength);
AnalysisLogger.getLogger().debug("TimeSeriesAnalysis->SSA Eigenvalues threshold: " + eigenvaluespercthr); logger.debug("TimeSeriesAnalysis->SSA Eigenvalues threshold: " + eigenvaluespercthr);
AnalysisLogger.getLogger().debug("TimeSeriesAnalysis->SSA Points to Reconstruct: " + pointsToReconstruct); logger.debug("TimeSeriesAnalysis->SSA Points to Reconstruct: " + pointsToReconstruct);
AnalysisLogger.getLogger().debug("TimeSeriesAnalysis->Extracting Points..."); logger.debug("TimeSeriesAnalysis->Extracting Points...");
String query = "select * from (select " + aggregationFunc + "( CAST ( " + valuescolum + " as real))," + timecolumn + " from " + tablename + " group by " + timecolumn + ") as a"; String query = "select * from (select " + aggregationFunc + "( CAST ( " + valuescolum + " as real))," + timecolumn + " from " + tablename + " group by " + timecolumn + ") as a";
AnalysisLogger.getLogger().debug("TimeSeriesAnalysis->Query to execute: " + query); logger.debug("TimeSeriesAnalysis->Query to execute: " + query);
List<Object> results = DatabaseFactory.executeSQLQuery(query, dbconnection); List<Object> results = DatabaseFactory.executeSQLQuery(query, dbconnection);
status = 10; status = 10;
@ -132,9 +136,9 @@ public class TimeSeriesAnalysis extends StandardLocalExternalAlgorithm {
else if (results.size() > maxpoints) else if (results.size() > maxpoints)
throw new Exception("Too long Time Series: a maximum of distinct " + maxpoints + " in time is allowed"); throw new Exception("Too long Time Series: a maximum of distinct " + maxpoints + " in time is allowed");
AnalysisLogger.getLogger().debug("TimeSeriesAnalysis->Points Extracted!"); logger.debug("TimeSeriesAnalysis->Points Extracted!");
// build signal // build signal
AnalysisLogger.getLogger().debug("TimeSeriesAnalysis->Building signal"); logger.debug("TimeSeriesAnalysis->Building signal");
List<Tuple<String>> signal = new ArrayList<Tuple<String>>(); List<Tuple<String>> signal = new ArrayList<Tuple<String>>();
int sizesignal = 0; int sizesignal = 0;
for (Object row : results) { for (Object row : results) {
@ -146,28 +150,28 @@ public class TimeSeriesAnalysis extends StandardLocalExternalAlgorithm {
} }
status = 20; status = 20;
AnalysisLogger.getLogger().debug("TimeSeriesAnalysis->Signal built with success. Size: " + sizesignal); logger.debug("TimeSeriesAnalysis->Signal built with success. Size: " + sizesignal);
AnalysisLogger.getLogger().debug("TimeSeriesAnalysis->Building Time Series"); logger.debug("TimeSeriesAnalysis->Building Time Series");
TimeSeries ts = TimeSeries.buildFromSignal(signal, config); TimeSeries ts = TimeSeries.buildFromSignal(signal, config);
String timepattern = ts.getTimepattern(); String timepattern = ts.getTimepattern();
String chartpattern = "MM-dd-yy"; String chartpattern = "MM-dd-yy";
if (timepattern.equals("s") || (DateGuesser.isJavaDateOrigin(ts.getTime()[0]) && DateGuesser.isJavaDateOrigin(ts.getTime()[ts.getTime().length - 1]))) { if (timepattern.equals("s") || (DateGuesser.isJavaDateOrigin(ts.getTime()[0]) && DateGuesser.isJavaDateOrigin(ts.getTime()[ts.getTime().length - 1]))) {
AnalysisLogger.getLogger().debug("TimeSeriesAnalysis->Changing chart pattern to Seconds!"); logger.debug("TimeSeriesAnalysis->Changing chart pattern to Seconds!");
chartpattern = "HH:mm:ss:SS"; chartpattern = "HH:mm:ss:SS";
} else } else
AnalysisLogger.getLogger().debug("TimeSeriesAnalysis->Chart pattern remains " + chartpattern); logger.debug("TimeSeriesAnalysis->Chart pattern remains " + chartpattern);
AnalysisLogger.getLogger().debug("TimeSeriesAnalysis->Uniformly sampling the signal"); logger.debug("TimeSeriesAnalysis->Uniformly sampling the signal");
if (display) if (display)
SignalProcessing.displaySignalWithTime(ts.getValues(), ts.getTime(), "Time Series", chartpattern); SignalProcessing.displaySignalWithTime(ts.getValues(), ts.getTime(), "Time Series", chartpattern);
signalImg = SignalProcessing.renderSignalWithTime(ts.getValues(), ts.getTime(), "Original Time Series", chartpattern); signalImg = SignalProcessing.renderSignalWithTime(ts.getValues(), ts.getTime(), "Original Time Series", chartpattern);
int originalSignalLength = ts.getValues().length; int originalSignalLength = ts.getValues().length;
ts.convertToUniformSignal(0); ts.convertToUniformSignal(0);
AnalysisLogger.getLogger().debug("TimeSeriesAnalysis->Uniform sampling finished"); logger.debug("TimeSeriesAnalysis->Uniform sampling finished");
status = 30; status = 30;
// spectrum and signal processing // spectrum and signal processing
AnalysisLogger.getLogger().debug("TimeSeriesAnalysis->Detecting periodicities"); logger.debug("TimeSeriesAnalysis->Detecting periodicities");
PeriodicityDetector pd = new PeriodicityDetector(); PeriodicityDetector pd = new PeriodicityDetector();
LinkedHashMap<String, String> frequencies = pd.detectAllFrequencies(ts.getValues(), 1, 0.01f, 0.5f, -1, fftWindowSamplesDouble, sensitivity, display); LinkedHashMap<String, String> frequencies = pd.detectAllFrequencies(ts.getValues(), 1, 0.01f, 0.5f, -1, fftWindowSamplesDouble, sensitivity, display);
outputParameters.put("Original Time Series Length", "" + originalSignalLength); outputParameters.put("Original Time Series Length", "" + originalSignalLength);
@ -187,11 +191,11 @@ public class TimeSeriesAnalysis extends StandardLocalExternalAlgorithm {
* outputParameters.put("Detected Frequency (samples^-1)", ""+MathFunctions.roundDecimal(F,2)); outputParameters.put("Indecision on Frequency", "["+MathFunctions.roundDecimal(pd.lowermeanF,2)+" , "+MathFunctions.roundDecimal(pd.uppermeanF,2) + "]"); outputParameters.put("Average detected Period (samples)", ""+MathFunctions.roundDecimal(pd.meanPeriod,2)); outputParameters.put("Indecision on Average Period", "["+MathFunctions.roundDecimal(pd.lowermeanPeriod,2)+" , "+MathFunctions.roundDecimal(pd.uppermeanPeriod,2) + "]"); outputParameters.put("Samples range in which periodicity was detected", "from "+pd.startPeriodSampleIndex+" to "+pd.endPeriodSampleIndex); outputParameters.put("Period Strength with interpretation", ""+MathFunctions.roundDecimal(pd.periodicityStrength,2)+" ("+pd.getPeriodicityStregthInterpretation()+")"); * outputParameters.put("Detected Frequency (samples^-1)", ""+MathFunctions.roundDecimal(F,2)); outputParameters.put("Indecision on Frequency", "["+MathFunctions.roundDecimal(pd.lowermeanF,2)+" , "+MathFunctions.roundDecimal(pd.uppermeanF,2) + "]"); outputParameters.put("Average detected Period (samples)", ""+MathFunctions.roundDecimal(pd.meanPeriod,2)); outputParameters.put("Indecision on Average Period", "["+MathFunctions.roundDecimal(pd.lowermeanPeriod,2)+" , "+MathFunctions.roundDecimal(pd.uppermeanPeriod,2) + "]"); outputParameters.put("Samples range in which periodicity was detected", "from "+pd.startPeriodSampleIndex+" to "+pd.endPeriodSampleIndex); outputParameters.put("Period Strength with interpretation", ""+MathFunctions.roundDecimal(pd.periodicityStrength,2)+" ("+pd.getPeriodicityStregthInterpretation()+")");
*/ */
AnalysisLogger.getLogger().debug("TimeSeriesAnalysis->Periodicity Detected!"); logger.debug("TimeSeriesAnalysis->Periodicity Detected!");
status = 60; status = 60;
System.gc(); System.gc();
AnalysisLogger.getLogger().debug("TimeSeriesAnalysis->Executing SSA analysis"); logger.debug("TimeSeriesAnalysis->Executing SSA analysis");
List<Double> values = new ArrayList<Double>(); List<Double> values = new ArrayList<Double>();
for (double v : ts.getValues()) { for (double v : ts.getValues()) {
values.add(v); values.add(v);
@ -202,14 +206,14 @@ public class TimeSeriesAnalysis extends StandardLocalExternalAlgorithm {
if (windowLength < ts.getValues().length) if (windowLength < ts.getValues().length)
ssa = SSAWorkflow.applyCompleteWorkflow(values, windowLength, eigenvaluespercthr, pointsToReconstruct, false); ssa = SSAWorkflow.applyCompleteWorkflow(values, windowLength, eigenvaluespercthr, pointsToReconstruct, false);
else { else {
AnalysisLogger.getLogger().debug("TimeSeriesAnalysis->SSA analysis impossible to complete"); logger.debug("TimeSeriesAnalysis->SSA analysis impossible to complete");
outputParameters.put("SSA Note:", "The window length is higher than the signal length. Please reduce the value to less than the signal length."); outputParameters.put("SSA Note:", "The window length is higher than the signal length. Please reduce the value to less than the signal length.");
return; return;
} }
AnalysisLogger.getLogger().debug("TimeSeriesAnalysis->SSA analysis completed"); logger.debug("TimeSeriesAnalysis->SSA analysis completed");
status = 70; status = 70;
AnalysisLogger.getLogger().debug("TimeSeriesAnalysis->Rendering Images"); logger.debug("TimeSeriesAnalysis->Rendering Images");
uniformSignalImg = SignalProcessing.renderSignalWithTime(ts.getValues(), ts.getTime(), "Uniformly Sampled Time Series", chartpattern); uniformSignalImg = SignalProcessing.renderSignalWithTime(ts.getValues(), ts.getTime(), "Uniformly Sampled Time Series", chartpattern);
if (uniformSignalImg == null) if (uniformSignalImg == null)
outputParameters.put("Note:", "The charts for uniformly sampled and forecasted signals contain too many points and will not be displayed. The values will be only reported in the output file."); outputParameters.put("Note:", "The charts for uniformly sampled and forecasted signals contain too many points and will not be displayed. The values will be only reported in the output file.");
@ -235,9 +239,9 @@ public class TimeSeriesAnalysis extends StandardLocalExternalAlgorithm {
eigenValues[i] = ssa.getPercentList().get(i); eigenValues[i] = ssa.getPercentList().get(i);
} }
eigenValuesImg = SignalProcessing.renderSignalWithGenericTime(eigenValues, 0f, 1, "SSA Eigenvalues"); eigenValuesImg = SignalProcessing.renderSignalWithGenericTime(eigenValues, 0f, 1, "SSA Eigenvalues");
AnalysisLogger.getLogger().debug("TimeSeriesAnalysis->Images Rendered"); logger.debug("TimeSeriesAnalysis->Images Rendered");
System.gc(); System.gc();
AnalysisLogger.getLogger().debug("TimeSeriesAnalysis->Producing Files"); logger.debug("TimeSeriesAnalysis->Producing Files");
outputfilename = new File(config.getPersistencePath(), valuescolum + "_SignalProcessing.csv"); outputfilename = new File(config.getPersistencePath(), valuescolum + "_SignalProcessing.csv");
BufferedWriter bw = new BufferedWriter(new FileWriter(outputfilename)); BufferedWriter bw = new BufferedWriter(new FileWriter(outputfilename));
bw.write("Uniformly Sampled Time Series,Time Line,Forecasted Time Series,SSA Eigenvalues\n"); bw.write("Uniformly Sampled Time Series,Time Line,Forecasted Time Series,SSA Eigenvalues\n");
@ -263,15 +267,15 @@ public class TimeSeriesAnalysis extends StandardLocalExternalAlgorithm {
bw.write("\n"); bw.write("\n");
} }
bw.close(); bw.close();
AnalysisLogger.getLogger().debug("TimeSeriesAnalysis->Files Produced"); logger.debug("TimeSeriesAnalysis->Files Produced");
if (display) { if (display) {
SignalProcessing.displaySignalWithTime(ts.getValues(), ts.getTime(), "Uniformly Sampled Time Series", chartpattern); SignalProcessing.displaySignalWithTime(ts.getValues(), ts.getTime(), "Uniformly Sampled Time Series", chartpattern);
SignalProcessing.displaySignalWithGenericTime(ts.getValues(), 0, 1, "Uniformly Sampled Time Series in Samples"); SignalProcessing.displaySignalWithGenericTime(ts.getValues(), 0, 1, "Uniformly Sampled Time Series in Samples");
SignalProcessing.displaySignalWithTime(ssa.getForecastSignal(), newtimes, "Forecasted Time Series", chartpattern); SignalProcessing.displaySignalWithTime(ssa.getForecastSignal(), newtimes, "Forecasted Time Series", chartpattern);
SignalProcessing.displaySignalWithGenericTime(eigenValues, 0f, 1, "SSA Eigenvalues"); SignalProcessing.displaySignalWithGenericTime(eigenValues, 0f, 1, "SSA Eigenvalues");
} }
AnalysisLogger.getLogger().debug("TimeSeriesAnalysis->" + outputParameters); logger.debug("TimeSeriesAnalysis->" + outputParameters);
AnalysisLogger.getLogger().debug("TimeSeriesAnalysis->Computation has finished"); logger.debug("TimeSeriesAnalysis->Computation has finished");
} catch (Throwable e) { } catch (Throwable e) {
e.printStackTrace(); e.printStackTrace();
throw new Exception(e.getLocalizedMessage()); throw new Exception(e.getLocalizedMessage());

View File

@ -8,7 +8,6 @@ import java.util.LinkedHashMap;
import java.util.List; import java.util.List;
import org.gcube.contentmanagement.lexicalmatcher.analysis.core.LexicalEngineConfiguration; import org.gcube.contentmanagement.lexicalmatcher.analysis.core.LexicalEngineConfiguration;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType; import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType; import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
@ -18,9 +17,13 @@ import org.gcube.dataanalysis.ecoengine.utils.DatabaseFactory;
import org.gcube.dataanalysis.ecoengine.utils.DatabaseUtils; import org.gcube.dataanalysis.ecoengine.utils.DatabaseUtils;
import org.gcube.dataanalysis.ecoengine.utils.IOHelper; import org.gcube.dataanalysis.ecoengine.utils.IOHelper;
import org.hibernate.SessionFactory; import org.hibernate.SessionFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public abstract class AbstractChartsProducer extends StandardLocalExternalAlgorithm { public abstract class AbstractChartsProducer extends StandardLocalExternalAlgorithm {
private static Logger logger = LoggerFactory.getLogger(AbstractChartsProducer.class);
protected static String inputTableParameter = "InputTable"; protected static String inputTableParameter = "InputTable";
protected static String attributesParameter = "Attributes"; protected static String attributesParameter = "Attributes";
protected static String quantitiesParameter = "Quantities"; protected static String quantitiesParameter = "Quantities";
@ -57,7 +60,7 @@ public abstract class AbstractChartsProducer extends StandardLocalExternalAlgori
@Override @Override
public void init() throws Exception { public void init() throws Exception {
AnalysisLogger.getLogger().debug("ChartsProducer Initialized"); logger.debug("ChartsProducer Initialized");
} }
@Override @Override
@ -107,20 +110,19 @@ public abstract class AbstractChartsProducer extends StandardLocalExternalAlgori
@Override @Override
protected void process() throws Exception { protected void process() throws Exception {
status = 10; status = 10;
AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
try { try {
AnalysisLogger.getLogger().debug("ChartsProducer started"); logger.debug("ChartsProducer started");
String driver = config.getParam("DatabaseDriver"); String driver = config.getParam("DatabaseDriver");
String username = config.getParam("DatabaseUserName"); String username = config.getParam("DatabaseUserName");
String pwd = config.getParam("DatabasePassword"); String pwd = config.getParam("DatabasePassword");
String url = config.getParam("DatabaseURL"); String url = config.getParam("DatabaseURL");
String table = IOHelper.getInputParameter(config, inputTableParameter); String table = IOHelper.getInputParameter(config, inputTableParameter);
AnalysisLogger.getLogger().debug("ChartsProducer: Driver: " + driver + " User " + username + " URL " + url + " Table: " + table); logger.debug("ChartsProducer: Driver: " + driver + " User " + username + " URL " + url + " Table: " + table);
connection = DatabaseUtils.initDBSession(config); connection = DatabaseUtils.initDBSession(config);
AnalysisLogger.getLogger().debug("ChartsProducer: Connection initialized"); logger.debug("ChartsProducer: Connection initialized");
LexicalEngineConfiguration conf = new LexicalEngineConfiguration(); LexicalEngineConfiguration conf = new LexicalEngineConfiguration();
conf.setDatabaseUserName(username); conf.setDatabaseUserName(username);
@ -140,7 +142,7 @@ public abstract class AbstractChartsProducer extends StandardLocalExternalAlgori
for (String quantity : quantities) { for (String quantity : quantities) {
//produce chart with dimensions,quantity, time //produce chart with dimensions,quantity, time
String query = InfoRetrievalQuery(table,dimensions,quantity,time); String query = InfoRetrievalQuery(table,dimensions,quantity,time);
AnalysisLogger.getLogger().debug("ChartsProducer: Query for retrieving information "+query); logger.debug("ChartsProducer: Query for retrieving information "+query);
List<Object> values = DatabaseFactory.executeSQLQuery(query, connection); List<Object> values = DatabaseFactory.executeSQLQuery(query, connection);
if (values==null) if (values==null)
throw new Exception("There are issued in managing selected attributes and quantities"); throw new Exception("There are issued in managing selected attributes and quantities");
@ -166,7 +168,7 @@ public abstract class AbstractChartsProducer extends StandardLocalExternalAlgori
if (noCharts) if (noCharts)
throw new Exception("Error - no chart was produced because of incompatibility with the selected input parameters"); throw new Exception("Error - no chart was produced because of incompatibility with the selected input parameters");
AnalysisLogger.getLogger().debug("ChartsProducer: finished"); logger.debug("ChartsProducer: finished");
} catch (Throwable e) { } catch (Throwable e) {
e.printStackTrace(); e.printStackTrace();
@ -182,7 +184,7 @@ public abstract class AbstractChartsProducer extends StandardLocalExternalAlgori
@Override @Override
public void shutdown() { public void shutdown() {
AnalysisLogger.getLogger().debug("ChartsProducer shutdown"); logger.debug("ChartsProducer shutdown");
DatabaseUtils.closeDBConnection(connection); DatabaseUtils.closeDBConnection(connection);
} }

View File

@ -11,7 +11,6 @@ import org.gcube.contentmanagement.graphtools.plotting.graphs.HistogramGraph;
import org.gcube.contentmanagement.graphtools.plotting.graphs.RadarGraph; import org.gcube.contentmanagement.graphtools.plotting.graphs.RadarGraph;
import org.gcube.contentmanagement.graphtools.plotting.graphs.ScatterGraphGeneric; import org.gcube.contentmanagement.graphtools.plotting.graphs.ScatterGraphGeneric;
import org.gcube.contentmanagement.graphtools.utils.MathFunctions; import org.gcube.contentmanagement.graphtools.utils.MathFunctions;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.contentmanagement.lexicalmatcher.utils.DatabaseFactory; import org.gcube.contentmanagement.lexicalmatcher.utils.DatabaseFactory;
import org.gcube.dataanalysis.ecoengine.datatypes.ColumnTypesList; import org.gcube.dataanalysis.ecoengine.datatypes.ColumnTypesList;
import org.gcube.dataanalysis.ecoengine.datatypes.DatabaseType; import org.gcube.dataanalysis.ecoengine.datatypes.DatabaseType;
@ -22,9 +21,12 @@ import org.jfree.data.category.DefaultCategoryDataset;
import org.jfree.data.function.NormalDistributionFunction2D; import org.jfree.data.function.NormalDistributionFunction2D;
import org.jfree.data.general.DatasetUtilities; import org.jfree.data.general.DatasetUtilities;
import org.jfree.data.xy.XYSeriesCollection; import org.jfree.data.xy.XYSeriesCollection;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class QuantitiesAttributesChartsTransducerer extends AbstractChartsProducer { public class QuantitiesAttributesChartsTransducerer extends AbstractChartsProducer {
private static Logger logger = LoggerFactory.getLogger(QuantitiesAttributesChartsTransducerer.class);
@Override @Override
protected void setInputParameters() { protected void setInputParameters() {
@ -78,7 +80,7 @@ public class QuantitiesAttributesChartsTransducerer extends AbstractChartsProduc
// List<Object> meanvar = DatabaseFactory.executeSQLQuery("select avg("+"( CAST ( " + quantity + " as real))"+"), variance("+"( CAST ( " + quantity + " as real))"+") from "+IOHelper.getInputParameter(config, inputTableParameter), connection); // List<Object> meanvar = DatabaseFactory.executeSQLQuery("select avg("+"( CAST ( " + quantity + " as real))"+"), variance("+"( CAST ( " + quantity + " as real))"+") from "+IOHelper.getInputParameter(config, inputTableParameter), connection);
String selectMeanVar = "select avg(( CAST ( "+quantity+" as real))), variance(( CAST ( "+quantity+" as real))) from (select "+quantity+" from "+IOHelper.getInputParameter(config, inputTableParameter)+" where "+quantity+" IS NOT NULL and CAST("+quantity+" as character varying) <> '' ) as a"; String selectMeanVar = "select avg(( CAST ( "+quantity+" as real))), variance(( CAST ( "+quantity+" as real))) from (select "+quantity+" from "+IOHelper.getInputParameter(config, inputTableParameter)+" where "+quantity+" IS NOT NULL and CAST("+quantity+" as character varying) <> '' ) as a";
AnalysisLogger.getLogger().debug("QuantitiesAttributesCharts: select for mean and variance: "+selectMeanVar); logger.debug("QuantitiesAttributesCharts: select for mean and variance: "+selectMeanVar);
List<Object> meanvar = DatabaseFactory.executeSQLQuery(selectMeanVar, connection); List<Object> meanvar = DatabaseFactory.executeSQLQuery(selectMeanVar, connection);
@ -86,13 +88,13 @@ public class QuantitiesAttributesChartsTransducerer extends AbstractChartsProduc
try{ try{
meanvarsrow = (Object[]) meanvar.get(0);}catch(Exception e){ meanvarsrow = (Object[]) meanvar.get(0);}catch(Exception e){
AnalysisLogger.getLogger().debug("QuantitiesAttributesCharts: cannot detect mean and variance for "+quantity); logger.debug("QuantitiesAttributesCharts: cannot detect mean and variance for "+quantity);
} }
double mean = MathFunctions.roundDecimal(Double.parseDouble(""+meanvarsrow[0]),2); double mean = MathFunctions.roundDecimal(Double.parseDouble(""+meanvarsrow[0]),2);
double variance = MathFunctions.roundDecimal(Math.sqrt(Double.parseDouble(""+meanvarsrow[1])),2); double variance = MathFunctions.roundDecimal(Math.sqrt(Double.parseDouble(""+meanvarsrow[1])),2);
AnalysisLogger.getLogger().debug("QuantitiesAttributesCharts: " + mean + " and variance:" + variance); logger.debug("QuantitiesAttributesCharts: " + mean + " and variance:" + variance);
NormalDistributionFunction2D normaldistributionfunction2d = new NormalDistributionFunction2D(mean, variance); NormalDistributionFunction2D normaldistributionfunction2d = new NormalDistributionFunction2D(mean, variance);
org.jfree.data.xy.XYSeries gaussianxyseries = DatasetUtilities.sampleFunction2DToSeries(normaldistributionfunction2d, (mean - (2 * variance)), (mean + (2 * variance)), 121, "Distribution of "+quantity); org.jfree.data.xy.XYSeries gaussianxyseries = DatasetUtilities.sampleFunction2DToSeries(normaldistributionfunction2d, (mean - (2 * variance)), (mean + (2 * variance)), 121, "Distribution of "+quantity);

View File

@ -13,7 +13,6 @@ import org.gcube.contentmanagement.graphtools.data.conversions.ImageTools;
import org.gcube.contentmanagement.graphtools.plotting.graphs.TimeSeriesGraph; import org.gcube.contentmanagement.graphtools.plotting.graphs.TimeSeriesGraph;
import org.gcube.contentmanagement.graphtools.plotting.graphs.TransectLineGraph; import org.gcube.contentmanagement.graphtools.plotting.graphs.TransectLineGraph;
import org.gcube.contentmanagement.graphtools.utils.DateGuesser; import org.gcube.contentmanagement.graphtools.utils.DateGuesser;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.datatypes.ColumnType; import org.gcube.dataanalysis.ecoengine.datatypes.ColumnType;
import org.gcube.dataanalysis.ecoengine.datatypes.ColumnTypesList; import org.gcube.dataanalysis.ecoengine.datatypes.ColumnTypesList;
import org.gcube.dataanalysis.ecoengine.datatypes.DatabaseType; import org.gcube.dataanalysis.ecoengine.datatypes.DatabaseType;
@ -26,9 +25,13 @@ import org.jfree.data.time.FixedMillisecond;
import org.jfree.data.time.TimeSeries; import org.jfree.data.time.TimeSeries;
import org.jfree.data.time.TimeSeriesCollection; import org.jfree.data.time.TimeSeriesCollection;
import org.jfree.data.time.TimeSeriesDataItem; import org.jfree.data.time.TimeSeriesDataItem;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class TimeSeriesChartsTransducerer extends QuantitiesAttributesChartsTransducerer { public class TimeSeriesChartsTransducerer extends QuantitiesAttributesChartsTransducerer {
private static Logger logger = LoggerFactory.getLogger(TimeSeriesChartsTransducerer.class);
@Override @Override
protected void setInputParameters() { protected void setInputParameters() {
List<TableTemplates> templates = new ArrayList<TableTemplates>(); List<TableTemplates> templates = new ArrayList<TableTemplates>();
@ -60,13 +63,13 @@ public class TimeSeriesChartsTransducerer extends QuantitiesAttributesChartsTran
TimeAnalyzer analyzer = new TimeAnalyzer(); TimeAnalyzer analyzer = new TimeAnalyzer();
sampleTime = analyzer.string2Date(timel); sampleTime = analyzer.string2Date(timel);
timepattern = analyzer.getPattern(); timepattern = analyzer.getPattern();
AnalysisLogger.getLogger().debug("TimeSeriesChart: Time pattern: " + timepattern); logger.debug("TimeSeriesChart: Time pattern: " + timepattern);
formatter = new SimpleDateFormat(timepattern); formatter = new SimpleDateFormat(timepattern);
} }
return formatter.parse(timel); return formatter.parse(timel);
} catch (ParseException e) { } catch (ParseException e) {
AnalysisLogger.getLogger().debug("Error parsing date " + timel + " using pattern " + timepattern); logger.debug("Error parsing date " + timel + " using pattern " + timepattern);
return null; return null;
} }
} }
@ -108,12 +111,12 @@ public class TimeSeriesChartsTransducerer extends QuantitiesAttributesChartsTran
else else
indicesToTake = Operations.uniformIntegerSampling(0, nrows - 1, nrows); indicesToTake = Operations.uniformIntegerSampling(0, nrows - 1, nrows);
AnalysisLogger.getLogger().debug("TimeSeriesChartsTransducerer: uniform sampling - taking " + indicesToTake.length + " over " + nrows); logger.debug("TimeSeriesChartsTransducerer: uniform sampling - taking " + indicesToTake.length + " over " + nrows);
for (int i = 0; i < indicesToTake.length; i++) { for (int i = 0; i < indicesToTake.length; i++) {
Object row = rows.get(indicesToTake[i]); Object row = rows.get(indicesToTake[i]);
Object[] array = (Object[]) row; Object[] array = (Object[]) row;
// AnalysisLogger.getLogger().debug("TimeSeriesChartsTransducerer: "+Arrays.toString(array)); // logger.debug("TimeSeriesChartsTransducerer: "+Arrays.toString(array));
Double q = null; Double q = null;
Date timeD = null; Date timeD = null;
String timel = "" + array[array.length - 1]; String timel = "" + array[array.length - 1];
@ -121,7 +124,7 @@ public class TimeSeriesChartsTransducerer extends QuantitiesAttributesChartsTran
q = Double.parseDouble("" + array[array.length - 2]); q = Double.parseDouble("" + array[array.length - 2]);
timeD = getTime(timel); timeD = getTime(timel);
} catch (Exception e) { } catch (Exception e) {
AnalysisLogger.getLogger().debug("TimeSeriesChartsTransducerer: warning skipping value " + q + "," + timel); logger.debug("TimeSeriesChartsTransducerer: warning skipping value " + q + "," + timel);
} }
if (q != null && timeD != null) { if (q != null && timeD != null) {
@ -130,7 +133,7 @@ public class TimeSeriesChartsTransducerer extends QuantitiesAttributesChartsTran
if (item != null) { if (item != null) {
double prevquant = (Double) item.getValue(); double prevquant = (Double) item.getValue();
q = prevquant + q; q = prevquant + q;
AnalysisLogger.getLogger().debug("TimeSeriesChartsTransducerer: a previous quantity was found for time " + timel + " : " + prevquant + " setting to " + (prevquant + q)); logger.debug("TimeSeriesChartsTransducerer: a previous quantity was found for time " + timel + " : " + prevquant + " setting to " + (prevquant + q));
item.setValue(q); item.setValue(q);
} else } else
series.add(ms, q); series.add(ms, q);

View File

@ -3,9 +3,10 @@ package org.gcube.dataanalysis.ecoengine.utils;
import java.math.BigInteger; import java.math.BigInteger;
import java.util.List; import java.util.List;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.hibernate.SessionFactory; import org.hibernate.SessionFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/** /**
@ -14,6 +15,8 @@ import org.hibernate.SessionFactory;
*/ */
public class HspecDiscrepanciesCalculator { public class HspecDiscrepanciesCalculator {
private static Logger logger = LoggerFactory.getLogger(HspecDiscrepanciesCalculator.class);
private BigInteger numOfElements; private BigInteger numOfElements;
private int errorCounter; private int errorCounter;
//connection setup //connection setup
@ -42,11 +45,10 @@ public class HspecDiscrepanciesCalculator {
//init connections //init connections
public HspecDiscrepanciesCalculator(AlgorithmConfiguration config) throws Exception { public HspecDiscrepanciesCalculator(AlgorithmConfiguration config) throws Exception {
AnalysisLogger.setLogger(config.getConfigPath() + LogFile);
referencedbConnection = DatabaseFactory.initDBConnection(config.getConfigPath() + AlgorithmConfiguration.defaultConnectionFile,config); referencedbConnection = DatabaseFactory.initDBConnection(config.getConfigPath() + AlgorithmConfiguration.defaultConnectionFile,config);
AnalysisLogger.getLogger().debug("ReferenceDB initialized"); logger.debug("ReferenceDB initialized");
destinationdbConnection = DatabaseFactory.initDBConnection(config.getConfigPath() + AlgorithmConfiguration.defaultConnectionFile,config); destinationdbConnection = DatabaseFactory.initDBConnection(config.getConfigPath() + AlgorithmConfiguration.defaultConnectionFile,config);
AnalysisLogger.getLogger().debug("OriginalDB initialized"); logger.debug("OriginalDB initialized");
} }
//counts the elements in a table //counts the elements in a table
@ -54,7 +56,7 @@ public class HspecDiscrepanciesCalculator {
{ {
BigInteger count = BigInteger.ZERO; BigInteger count = BigInteger.ZERO;
String countingQuery = "select count(*) from "+tablename; String countingQuery = "select count(*) from "+tablename;
AnalysisLogger.getLogger().debug("Getting DB elements by this query: "+countingQuery); logger.debug("Getting DB elements by this query: "+countingQuery);
List<Object> result = DatabaseFactory.executeSQLQuery(countingQuery, session); List<Object> result = DatabaseFactory.executeSQLQuery(countingQuery, session);
count = (BigInteger) result.get(0); count = (BigInteger) result.get(0);
return count; return count;
@ -64,7 +66,7 @@ public class HspecDiscrepanciesCalculator {
//takes a chunk of elements from the database, belonging to the set of 170 selected species //takes a chunk of elements from the database, belonging to the set of 170 selected species
public List<Object> takeChunkOfDestElements(String tablename,String selectedColumns,String criteria, SessionFactory session,String whereclause) { public List<Object> takeChunkOfDestElements(String tablename,String selectedColumns,String criteria, SessionFactory session,String whereclause) {
String query = String.format(selectDestElementsQuery,selectedColumns,tablename,whereclause); String query = String.format(selectDestElementsQuery,selectedColumns,tablename,whereclause);
// AnalysisLogger.getLogger().debug("takeChunkOfDestinationElements-> executing query on DB: " + query); // logger.debug("takeChunkOfDestinationElements-> executing query on DB: " + query);
List<Object> results = DatabaseFactory.executeSQLQuery(query, session); List<Object> results = DatabaseFactory.executeSQLQuery(query, session);
return results; return results;
} }
@ -72,7 +74,7 @@ public class HspecDiscrepanciesCalculator {
//takes a chunk of elements from the database, belonging to the set of 170 selected species //takes a chunk of elements from the database, belonging to the set of 170 selected species
public List<Object> takeChunkOfElements(String tablename,String selectedColumns,String criteria, int limit, int offset, SessionFactory session) { public List<Object> takeChunkOfElements(String tablename,String selectedColumns,String criteria, int limit, int offset, SessionFactory session) {
String query = String.format(selectElementsQuery,selectedColumns,tablename,criteria)+ " limit " + limit + " offset " + offset; String query = String.format(selectElementsQuery,selectedColumns,tablename,criteria)+ " limit " + limit + " offset " + offset;
AnalysisLogger.getLogger().debug("takeChunkOfElements-> executing query on DB: " + query); logger.debug("takeChunkOfElements-> executing query on DB: " + query);
List<Object> results = DatabaseFactory.executeSQLQuery(query, session); List<Object> results = DatabaseFactory.executeSQLQuery(query, session);
return results; return results;
} }
@ -120,7 +122,7 @@ public class HspecDiscrepanciesCalculator {
// take the number of elements // take the number of elements
numOfElements = countElements(analyzedTable, destinationdbConnection); numOfElements = countElements(analyzedTable, destinationdbConnection);
AnalysisLogger.getLogger().debug("Remote DB contains " + numOfElements + " elements."); logger.debug("Remote DB contains " + numOfElements + " elements.");
int maxNumber = numOfElements.intValue(); int maxNumber = numOfElements.intValue();
int numOfChunks = maxNumber / chunkSize; int numOfChunks = maxNumber / chunkSize;
if ((maxNumber % chunkSize) > 0) { if ((maxNumber % chunkSize) > 0) {
@ -157,9 +159,9 @@ public class HspecDiscrepanciesCalculator {
columns = destrow.length; columns = destrow.length;
} }
else{ else{
AnalysisLogger.getLogger().debug("ERROR - COULD NOT FIND "+refrow+" ON DESTINATION TABLE"); logger.debug("ERROR - COULD NOT FIND "+refrow+" ON DESTINATION TABLE");
for (int k=0;k<refcolumns.length;k++){ for (int k=0;k<refcolumns.length;k++){
AnalysisLogger.getLogger().debug("-"+refrow[k]); logger.debug("-"+refrow[k]);
} }
error = true; error = true;
break; break;
@ -174,13 +176,13 @@ public class HspecDiscrepanciesCalculator {
if (Math.abs(d-isNumber(destelem))>Threshold){ if (Math.abs(d-isNumber(destelem))>Threshold){
errorCounter++; errorCounter++;
equal = false; equal = false;
AnalysisLogger.getLogger().debug("ERROR - DISCREPANCY AT NUMBERS COMPARISON: "+refelem+" vs "+destelem); logger.debug("ERROR - DISCREPANCY AT NUMBERS COMPARISON: "+refelem+" vs "+destelem);
} }
} }
else if (!refelem.equals(destelem)){ else if (!refelem.equals(destelem)){
errorCounter++; errorCounter++;
equal = false; equal = false;
AnalysisLogger.getLogger().debug("ERROR - DISCREPANCY AT STRING COMPARISON: "+refelem+" vs "+destelem); logger.debug("ERROR - DISCREPANCY AT STRING COMPARISON: "+refelem+" vs "+destelem);
} }
if (!equal) if (!equal)
break; break;
@ -195,11 +197,11 @@ public class HspecDiscrepanciesCalculator {
if (!equal) if (!equal)
break; break;
else else
AnalysisLogger.getLogger().debug("CHUNK NUMBER "+i+" of "+numOfChunks+" OK!"); logger.debug("CHUNK NUMBER "+i+" of "+numOfChunks+" OK!");
} }
long t1 = System.currentTimeMillis(); long t1 = System.currentTimeMillis();
AnalysisLogger.getLogger().debug("ELAPSED TIME: " + (t1-t0) + " ms"); logger.debug("ELAPSED TIME: " + (t1-t0) + " ms");
//close connections //close connections
referencedbConnection.close(); referencedbConnection.close();

View File

@ -3,9 +3,10 @@ package org.gcube.dataanalysis.ecoengine.utils;
import java.math.BigInteger; import java.math.BigInteger;
import java.util.List; import java.util.List;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.hibernate.SessionFactory; import org.hibernate.SessionFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/** /**
@ -14,6 +15,8 @@ import org.hibernate.SessionFactory;
*/ */
public class TablesDiscrepanciesCalculator { public class TablesDiscrepanciesCalculator {
private static Logger logger = LoggerFactory.getLogger(TablesDiscrepanciesCalculator.class);
private BigInteger numOfElements; private BigInteger numOfElements;
private int errorCounter; private int errorCounter;
//connection setup //connection setup
@ -41,11 +44,10 @@ public class TablesDiscrepanciesCalculator {
//init connections //init connections
public TablesDiscrepanciesCalculator(AlgorithmConfiguration config) throws Exception { public TablesDiscrepanciesCalculator(AlgorithmConfiguration config) throws Exception {
AnalysisLogger.setLogger(config.getConfigPath() + LogFile);
referencedbConnection = DatabaseFactory.initDBConnection(config.getConfigPath() + AlgorithmConfiguration.defaultConnectionFile,config); referencedbConnection = DatabaseFactory.initDBConnection(config.getConfigPath() + AlgorithmConfiguration.defaultConnectionFile,config);
AnalysisLogger.getLogger().debug("ReferenceDB initialized"); logger.debug("ReferenceDB initialized");
destinationdbConnection = DatabaseFactory.initDBConnection(config.getConfigPath() + AlgorithmConfiguration.defaultConnectionFile,config); destinationdbConnection = DatabaseFactory.initDBConnection(config.getConfigPath() + AlgorithmConfiguration.defaultConnectionFile,config);
AnalysisLogger.getLogger().debug("OriginalDB initialized"); logger.debug("OriginalDB initialized");
} }
//counts the elements in a table //counts the elements in a table
@ -53,7 +55,7 @@ public class TablesDiscrepanciesCalculator {
{ {
BigInteger count = BigInteger.ZERO; BigInteger count = BigInteger.ZERO;
String countingQuery = "select count(*) from "+tablename; String countingQuery = "select count(*) from "+tablename;
AnalysisLogger.getLogger().debug("Getting DB elements by this query: "+countingQuery); logger.debug("Getting DB elements by this query: "+countingQuery);
List<Object> result = DatabaseFactory.executeSQLQuery(countingQuery, session); List<Object> result = DatabaseFactory.executeSQLQuery(countingQuery, session);
count = (BigInteger) result.get(0); count = (BigInteger) result.get(0);
return count; return count;
@ -63,7 +65,7 @@ public class TablesDiscrepanciesCalculator {
//takes a chunk of elements from the database, belonging to the set of 170 selected species //takes a chunk of elements from the database, belonging to the set of 170 selected species
public List<Object> takeChunkOfElements(String tablename,String selectedColumns,String criteria, int limit, int offset, SessionFactory session) { public List<Object> takeChunkOfElements(String tablename,String selectedColumns,String criteria, int limit, int offset, SessionFactory session) {
String query = String.format(selectElementsQuery,selectedColumns,tablename,criteria)+ " limit " + limit + " offset " + offset; String query = String.format(selectElementsQuery,selectedColumns,tablename,criteria)+ " limit " + limit + " offset " + offset;
AnalysisLogger.getLogger().debug("takeChunkOfElements-> executing query on DB: " + query); logger.debug("takeChunkOfElements-> executing query on DB: " + query);
List<Object> results = DatabaseFactory.executeSQLQuery(query, session); List<Object> results = DatabaseFactory.executeSQLQuery(query, session);
return results; return results;
} }
@ -110,7 +112,7 @@ public class TablesDiscrepanciesCalculator {
// take the number of elements // take the number of elements
numOfElements = countElements(analyzedTable, destinationdbConnection); numOfElements = countElements(analyzedTable, destinationdbConnection);
AnalysisLogger.getLogger().debug("Remote DB contains " + numOfElements + " elements."); logger.debug("Remote DB contains " + numOfElements + " elements.");
int maxNumber = numOfElements.intValue(); int maxNumber = numOfElements.intValue();
int numOfChunks = maxNumber / chunkSize; int numOfChunks = maxNumber / chunkSize;
if ((maxNumber % chunkSize) > 0) { if ((maxNumber % chunkSize) > 0) {
@ -141,13 +143,13 @@ public class TablesDiscrepanciesCalculator {
if (Math.abs(d-isNumber(destelem))>Threshold){ if (Math.abs(d-isNumber(destelem))>Threshold){
errorCounter++; errorCounter++;
equal = false; equal = false;
AnalysisLogger.getLogger().debug("ERROR - DISCREPANCY AT NUMBERS COMPARISON: "+refelem+" vs "+destelem); logger.debug("ERROR - DISCREPANCY AT NUMBERS COMPARISON: "+refelem+" vs "+destelem);
} }
} }
else if (!refelem.equals(destelem)){ else if (!refelem.equals(destelem)){
errorCounter++; errorCounter++;
equal = false; equal = false;
AnalysisLogger.getLogger().debug("ERROR - DISCREPANCY AT STRING COMPARISON: "+refelem+" vs "+destelem); logger.debug("ERROR - DISCREPANCY AT STRING COMPARISON: "+refelem+" vs "+destelem);
} }
if (!equal) if (!equal)
break; break;
@ -160,11 +162,11 @@ public class TablesDiscrepanciesCalculator {
if (!equal) if (!equal)
break; break;
else else
AnalysisLogger.getLogger().debug("CHUNK NUMBER "+i+" OK!"); logger.debug("CHUNK NUMBER "+i+" OK!");
} }
long t1 = System.currentTimeMillis(); long t1 = System.currentTimeMillis();
AnalysisLogger.getLogger().debug("ELAPSED TIME: " + (t1-t0) + " ms"); logger.debug("ELAPSED TIME: " + (t1-t0) + " ms");
//close connections //close connections
referencedbConnection.close(); referencedbConnection.close();

Some files were not shown because too many files have changed in this diff Show More