This commit is contained in:
Gianpaolo Coro 2012-03-02 08:41:16 +00:00
parent 03a37a04c7
commit 872c1528fa
2 changed files with 211 additions and 106 deletions

View File

@ -1,4 +1,4 @@
name = EcologicalEngine
package = org.gcube
package = org.gcube.dataanalysis.ecologicalengine
# Where the jar will be stored
lib.dir = Dependencies/org.gcube.dataanalysis.ecologicalengine.jar

View File

@ -22,6 +22,24 @@ public class BioClimateAnalysis {
private static String countHighProbabilityCells = "select count(*) from %1$s where probability>%2$s";
private static String countSeaCells = "select count(*) from %1$s as a join %2$s as b on a.oceanarea>0 and a.csquarecode=b.csquarecode and ((a.iceconann<b.iceconann-%3$s or a.iceconann>b.iceconann+%3$s) or " + "(a.salinitymean<b.salinitymean-%3$s or a.salinitymean>b.salinitymean+%3$s) or (a.sstanmean<b.sstanmean-%3$s or a.sstanmean>b.sstanmean+%3$s))";
private static String iceLeakage = "select count(*) from %1$s as a join %2$s as b on a.csquarecode=b.csquarecode and (a.iceconann<b.iceconann) and a.oceanarea>0";
private static String takeRangeOfDepths= "select distinct depthmin, max(depthmax) from %1$s group by depthmin order by depthmin";
private static String countNumberOfSpecies= "count(*) from %1$s where depthmin<%2$s and depthmin>=%3$s and depthmax<%4$s and depthmax>=%5$s";
private static enum FIELD {
iceconann, sstanmean, salinitymean
};
private static String takeAvgSelection = "select avg(%1$s),%2$s from %3$s %4$s group by %2$s order by %2$s";
private static String [] selectionCriteria = {"faoaream", "lme"};
private static String [] timeseriesNames = {"Ice Conc.", "Sea Surface Temperature","Salinity"};
private static String [] quantitiesNames= {FIELD.iceconann.name(), FIELD.sstanmean.name(),FIELD.salinitymean.name()};
private static String [] selectionFilters= {"where faoaream>0", "where lme>0"};
private static String [] criteriaNames = {"FaoArea", "LME"};
// private static String takeSubHspec = "select %1$s from #CLAUSE# order by %1$s";
private static String meanVal = "select avg(%2$s) from %1$s where oceanarea>0";
// private static String meanVal = "select %2$s from %1$s where csquarecode = '1311:478:4'";
@ -51,11 +69,8 @@ public class BioClimateAnalysis {
private LexicalEngineConfiguration config;
private static enum FIELD {
iceconann, sstanmean, salinitymean
};
static int width = 680;
static int height = 420;
public static void main(String[] args) throws Exception {
String configPath = "./cfg/";
@ -89,7 +104,7 @@ public class BioClimateAnalysis {
return producedImages;
}
private void produceGraphs() throws Exception {
private void produceGraphs(String [] csquareTableNames,String[] hspecTableNames) throws Exception {
producedImages = new ArrayList<Image>();
int numberOfTrends = highProbabilityCells.length;
@ -103,27 +118,24 @@ public class BioClimateAnalysis {
for (int i = 0; i < numberOfTrends; i++) {
if (doHcafAn) {
avgIceD.addValue(avgIce[i], "Ice Conc.", csquareTable[i]);
avgSSTD.addValue(avgSST[i], "SST", csquareTable[i]);
avgSalinityD.addValue(avgSalinity[i], "Salinity", csquareTable[i]);
avgIceD.addValue(avgIce[i], "Ice Conc.", csquareTableNames[i]);
avgSSTD.addValue(avgSST[i], "SST", csquareTableNames[i]);
avgSalinityD.addValue(avgSalinity[i], "Salinity", csquareTableNames[i]);
}
if (doHspecAn) {
probabilityTrend.addValue(highProbabilityCells[i], "Number Of Cells", finalDistributionTable[i]);
probabilityTrend.addValue(highProbabilityCells[i], "Number Of Cells", hspecTableNames[i]);
if (i > 0) {
discrepanciesTrend.addValue(discrepancies[i], "Mean Discrepancy Respect to Prev. Distrib.", finalDistributionTable[i]);
}
}
}
int width = 680;
int height = 420;
if (doHspecAn) {
double min = Operations.getMin(discrepancies);
discrepancies[0] = min;
if (liveRender) {
BioClimateGraph lineg1 = new BioClimateGraph(SERIES[0], Operations.getMax(highProbabilityCells), Operations.getMin(highProbabilityCells));
BioClimateGraph lineg4 = new BioClimateGraph(SERIES[3], Operations.getMax(discrepancies), min);
@ -156,17 +168,112 @@ public class BioClimateAnalysis {
}
public void hcafEvolutionAnalysis(String[] hcafTable) throws Exception{
evolutionAnalysis(hcafTable, null, null, null);
public void hcafEvolutionAnalysis(String[] hcafTable,String[] hcafTableNames) throws Exception {
globalEvolutionAnalysis(hcafTable, null, hcafTableNames, null, null, null);
}
public void hspecEvolutionAnalysis(String[] hspecTables, String probabilityColumn, String csquareColumn) throws Exception{
evolutionAnalysis(null, hspecTables, probabilityColumn, csquareColumn);
public void hspecEvolutionAnalysis(String[] hspecTables, String[] hspecTableNames, String probabilityColumn, String csquareColumn) throws Exception {
globalEvolutionAnalysis(null, hspecTables, null, hspecTableNames, probabilityColumn, csquareColumn);
}
public void evolutionAnalysis(String[] hcafTable, String[] hspecTables, String probabilityColumn, String csquareColumn) throws Exception {
private List<Image> producedCharts;
public void produceCharts(HashMap<String,HashMap<String,double []>> GeoMap,String[] hcafTablesNames){
//produce a char for each feature
producedCharts = new ArrayList<Image>();
for (String featurename:GeoMap.keySet()){
DefaultCategoryDataset chart = new DefaultCategoryDataset();
HashMap<String,double []> timeseries = GeoMap.get(featurename);
double absmax = -Double.MAX_VALUE;
double absmin = Double.MAX_VALUE;
for (String timeserie:timeseries.keySet()){
double[] points = timeseries.get(timeserie);
for (int i=0;i<points.length;i++){
if (points [i]>absmax)
absmax = points[i];
if (points [i]<absmin)
absmin = points[i];
chart.addValue(points[i], timeserie,hcafTablesNames[i]);
}
}
if (liveRender) {
BioClimateGraph lineg1 = new BioClimateGraph(featurename, absmax, absmin);
lineg1.render(chart);
}
producedCharts.add(BioClimateGraph.renderStaticImgObject(width, height, chart, featurename, absmax, absmin));
}
}
public void geographicEvolutionAnalysis(String[] hcafTable,String []hcafTableNames) throws Exception {
try {
referencedbConnection = DatabaseFactory.initDBConnection(configPath + AlgorithmConfiguration.defaultConnectionFile, config);
AnalysisLogger.getLogger().debug("ReferenceDB initialized");
doHcafAn = true;
doHspecAn = true;
status = 0f;
this.csquareTable = hcafTable;
int numbOfTables = (hcafTable != null) ? hcafTable.length : 0;
if (numbOfTables > 0) {
//a map for each feature. each sub map contains a trend for faoaream, lme etc.
HashMap<String,HashMap<String,double []>> GeoMap= new HashMap<String, HashMap<String,double[]>>();
float statusstep = 80f / (float) numbOfTables;
//for each table
for (int i = 0; i < numbOfTables; i++) {
//for each criterion to apply: fao area, lme etc.
for (int j=0;j<criteriaNames.length;j++){
//for each quantity to display: ice concentration
for (int k=0;k<quantitiesNames.length;k++){
String query = String.format(takeAvgSelection, quantitiesNames[k],selectionCriteria[j],hcafTable[i],selectionFilters[j]);
AnalysisLogger.getLogger().debug("Query to be executed : "+query);
//take couples (avg,code)
List<Object> quantityCriterion = DatabaseFactory.executeSQLQuery(query, referencedbConnection);
//for each row
for (Object element: quantityCriterion){
Object [] row = (Object[]) element;
//take avg value
double value = (row[0]==null)?0:Double.parseDouble(""+row[0]);
//take code for criterion
String code = ""+row[1];
String chartName = timeseriesNames[k]+" for "+criteriaNames[j]+"_"+code;
//put the code and the value in the timeseries associated to the feature name
HashMap<String,double []> submap = GeoMap.get(chartName);
if (submap==null){
submap = new HashMap<String, double[]>();
GeoMap.put(chartName, submap);
}
String timeseries = criteriaNames[j]+"_"+code;
double [] elements = submap.get(timeseries);
if (elements==null){
elements = new double[numbOfTables];
submap.put(timeseries, elements);
}
elements[i] = value;
}
}
}
status = status + statusstep;
}
status = 80f;
produceCharts(GeoMap,hcafTableNames);
}
} catch (Exception e) {
e.printStackTrace();
throw e;
} finally {
status = 100f;
referencedbConnection.close();
}
}
public void globalEvolutionAnalysis(String[] hcafTable, String[] hspecTables,String[] hcafTablesNames, String [] hspecTableNames, String probabilityColumn, String csquareColumn) throws Exception {
try {
referencedbConnection = DatabaseFactory.initDBConnection(configPath + AlgorithmConfiguration.defaultConnectionFile, config);
AnalysisLogger.getLogger().debug("ReferenceDB initialized");
@ -192,7 +299,6 @@ public class BioClimateAnalysis {
float statusstep = 80f / (float) numbOfPoints;
for (int i = 0; i < numbOfPoints; i++) {
if (doHspecAn)
highProbabilityCells[i] = calcHighProbabilityCells(hspecTables[i], 0.8);
@ -204,7 +310,6 @@ public class BioClimateAnalysis {
AnalysisLogger.getLogger().trace("(" + hcafTable[i] + "): " + " ICE " + avgIce[i] + " SST " + avgSST[i] + " SAL " + avgSalinity[i]);
}
if (doHspecAn) {
if (i == 0) {
discrepancies[i] = 1.0;
@ -220,7 +325,7 @@ public class BioClimateAnalysis {
status = status + statusstep;
}
status = 80f;
produceGraphs();
produceGraphs(hcafTablesNames,hspecTableNames);
} catch (Exception e) {
e.printStackTrace();
@ -231,7 +336,6 @@ public class BioClimateAnalysis {
}
}
// init connections
public BioClimateAnalysis(String configPath, String persistencePath, String databaseURL, String databaseUserName, String databasePassword, boolean liveRender) throws Exception {
this.configPath = configPath;
@ -258,6 +362,8 @@ public class BioClimateAnalysis {
return count;
}
public double avgValue(String hcaf1, String field) throws Exception {
List<Object> countage = DatabaseFactory.executeSQLQuery(String.format(meanVal, hcaf1, field), referencedbConnection);
@ -336,5 +442,4 @@ public class BioClimateAnalysis {
return d;
}
}