2013-07-10 18:36:37 +02:00
package org.gcube.dataanalysis.geo.algorithms ;
import java.awt.Image ;
import java.io.File ;
import java.util.ArrayList ;
import java.util.HashMap ;
import java.util.LinkedHashMap ;
import java.util.List ;
import java.util.Map ;
import org.gcube.common.scope.api.ScopeProvider ;
import org.gcube.contentmanagement.graphtools.data.conversions.ImageTools ;
import org.gcube.contentmanagement.graphtools.plotting.graphs.GaussianDistributionGraph ;
import org.gcube.contentmanagement.graphtools.utils.MathFunctions ;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger ;
import org.gcube.contentmanagement.lexicalmatcher.utils.DatabaseFactory ;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration ;
import org.gcube.dataanalysis.ecoengine.datatypes.DatabaseType ;
import org.gcube.dataanalysis.ecoengine.datatypes.OutputTable ;
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType ;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType ;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes ;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.TableTemplates ;
import org.gcube.dataanalysis.ecoengine.evaluation.DiscrepancyAnalysis ;
import org.gcube.dataanalysis.ecoengine.interfaces.DataAnalysis ;
import org.gcube.dataanalysis.ecoengine.utils.DatabaseUtils ;
import org.gcube.dataanalysis.ecoengine.utils.IOHelper ;
import org.gcube.dataanalysis.geo.insertion.RasterTable ;
2013-07-19 11:57:05 +02:00
import org.gcube.dataanalysis.geo.meta.OGCFormatter ;
2013-07-10 18:36:37 +02:00
import org.gcube.dataanalysis.geo.meta.features.FeaturesManager ;
import org.gcube.dataanalysis.geo.retrieval.GeoIntersector ;
2013-07-19 11:57:05 +02:00
import org.gcube.dataanalysis.geo.utils.ThreddsDataExplorer ;
2013-07-10 18:36:37 +02:00
import org.jfree.chart.JFreeChart ;
import org.jfree.data.function.NormalDistributionFunction2D ;
import org.jfree.data.general.DatasetUtilities ;
import org.jfree.data.xy.XYSeriesCollection ;
import org.opengis.metadata.Metadata ;
2013-07-19 11:57:05 +02:00
import ucar.nc2.dt.GridDatatype ;
import ucar.nc2.dt.grid.GridDataset ;
2013-07-10 18:36:37 +02:00
public class MapsComparator extends DataAnalysis {
static String layer1 = " Layer_1 " ;
static String layer2 = " Layer_2 " ;
static String zString = " Z " ;
static String t1 = " TimeIndex_1 " ;
static String t2 = " TimeIndex_2 " ;
static String valuesThr = " ValuesComparisonThreshold " ;
float status = 0 ;
public List < StatisticalType > inputs = new ArrayList < StatisticalType > ( ) ;
public LinkedHashMap < String , String > outputParameters = new LinkedHashMap < String , String > ( ) ;
@Override
public void init ( ) throws Exception {
AnalysisLogger . getLogger ( ) . debug ( " Initialization " ) ;
}
@Override
public String getDescription ( ) {
2013-09-18 16:44:43 +02:00
return " An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. Supported maps can only be in WFS, Opendap or ASC formats. " ;
2013-07-10 18:36:37 +02:00
}
@Override
public void compute ( ) throws Exception {
status = 0 ;
AnalysisLogger . setLogger ( config . getConfigPath ( ) + AlgorithmConfiguration . defaultLoggerFile ) ;
long t0 = System . currentTimeMillis ( ) ;
String layerT1 = IOHelper . getInputParameter ( config , layer1 ) ;
String layerT2 = IOHelper . getInputParameter ( config , layer2 ) ;
String z$ = IOHelper . getInputParameter ( config , zString ) ;
String valuesthr$ = IOHelper . getInputParameter ( config , valuesThr ) ;
String time1$ = IOHelper . getInputParameter ( config , t1 ) ;
String time2$ = IOHelper . getInputParameter ( config , t2 ) ;
int time1 = ( ( time1$ ! = null ) & & ( time1$ . trim ( ) . length ( ) > 0 ) ) ? Integer . parseInt ( time1$ ) : 0 ;
int time2 = ( ( time2$ ! = null ) & & ( time2$ . trim ( ) . length ( ) > 0 ) ) ? Integer . parseInt ( time2$ ) : 0 ;
if ( time1 < 0 )
time1 = 0 ;
if ( time2 < 0 )
time2 = 0 ;
double valuesthreshold = 0 . 1 ;
if ( ( valuesthr$ ! = null ) & & ( valuesthr$ . trim ( ) . length ( ) > 0 ) )
try {
valuesthreshold = Double . parseDouble ( valuesthr$ ) ;
} catch ( Exception ee ) {
}
double z = 0 ;
if ( ( z$ ! = null ) & & ( z$ . trim ( ) . length ( ) > 0 ) )
try {
z = Double . parseDouble ( z$ ) ;
} catch ( Exception ee ) {
}
try {
// delete this force
2013-07-19 11:57:05 +02:00
String scope = config . getGcubeScope ( ) ;
if ( scope = = null )
scope = ScopeProvider . instance . get ( ) ;
// scope = "/gcube";
2013-07-10 18:36:37 +02:00
// String scope = null;
AnalysisLogger . getLogger ( ) . debug ( " MapsComparator: Using Scope: " + scope + " Z: " + z + " Values Threshold: " + valuesthreshold + " Layer1: " + layerT1 + " vs " + layerT2 ) ;
GeoIntersector intersector = new GeoIntersector ( scope , config . getConfigPath ( ) ) ;
AnalysisLogger . getLogger ( ) . debug ( " MapsComparator: GeoIntersector initialized " ) ;
2013-07-19 11:57:05 +02:00
2013-07-10 18:36:37 +02:00
double x1 = - 180 ;
double x2 = 180 ;
double y1 = - 90 ;
double y2 = 90 ;
2013-07-19 11:57:05 +02:00
2013-07-10 18:36:37 +02:00
status = 10 ;
FeaturesManager fm = intersector . getFeaturer ( ) ;
AnalysisLogger . getLogger ( ) . debug ( " MapsComparator: Taking info for the layer: " + layerT1 ) ;
Metadata meta1 = fm . getGNInfobyUUIDorName ( layerT1 ) ;
if ( meta1 = = null ) throw new Exception ( " No Correspondence with Layer 1 " ) ;
double resolution1 = 0 ;
try {
resolution1 = FeaturesManager . getResolution ( meta1 ) ;
} catch ( Exception e ) {
AnalysisLogger . getLogger ( ) . debug ( " MapsComparator: Undefined resolution " ) ;
}
AnalysisLogger . getLogger ( ) . debug ( " MapsComparator: Resolution: " + resolution1 ) ;
2013-07-19 11:57:05 +02:00
if ( fm . isThreddsFile ( meta1 ) ) {
AnalysisLogger . getLogger ( ) . debug ( " MapsComparator: recalculating the spatial extent of the comparison " ) ;
String fileurl = fm . getOpenDapLink ( meta1 ) ;
GridDataset gds = ucar . nc2 . dt . grid . GridDataset . open ( fileurl ) ;
List < GridDatatype > gridTypes = gds . getGrids ( ) ;
GridDatatype gdt = gridTypes . get ( 0 ) ;
x1 = ThreddsDataExplorer . getMinX ( gdt . getCoordinateSystem ( ) ) ;
x2 = ThreddsDataExplorer . getMaxX ( gdt . getCoordinateSystem ( ) ) ;
y1 = ThreddsDataExplorer . getMinY ( gdt . getCoordinateSystem ( ) ) ;
y2 = ThreddsDataExplorer . getMaxY ( gdt . getCoordinateSystem ( ) ) ;
}
AnalysisLogger . getLogger ( ) . debug ( " MapsComparator: Spatial extent of the comparison: x1: " + x1 + " x2: " + x2 + " y1: " + y1 + " y2: " + y2 ) ;
2013-07-10 18:36:37 +02:00
status = 15 ;
AnalysisLogger . getLogger ( ) . debug ( " MapsComparator: Taking info for the layer: " + layerT2 ) ;
AnalysisLogger . getLogger ( ) . debug ( " MapsComparator: Trying with UUID... " + layerT2 ) ;
Metadata meta2 = fm . getGNInfobyUUIDorName ( layerT2 ) ;
if ( meta2 = = null ) throw new Exception ( " No Correspondence with Layer 2 " ) ;
double resolution2 = 0 ;
try {
resolution2 = FeaturesManager . getResolution ( meta2 ) ;
} catch ( Exception e ) {
AnalysisLogger . getLogger ( ) . debug ( " MapsComparator: Undefined resolution " ) ;
}
AnalysisLogger . getLogger ( ) . debug ( " MapsComparator: Resolution: " + resolution2 ) ;
status = 20 ;
// take the lowest resolution to perform the comparison
double resolution = Math . max ( resolution1 , resolution2 ) ;
2013-07-19 11:57:05 +02:00
AnalysisLogger . getLogger ( ) . debug ( " MapsComparator: Theoretical Resolution: " + resolution ) ;
2013-07-10 18:36:37 +02:00
if ( resolution = = 0 )
resolution = 0 . 5d ;
2013-07-19 11:57:05 +02:00
// I added the following control to limit the amount of calculations
2013-09-18 16:44:43 +02:00
if ( resolution < 0 . 5 & & resolution > 0 . 01 )
resolution = 0 . 5d ;
else if ( resolution < 0 . 01 )
2013-07-19 11:57:05 +02:00
resolution = 0 . 01d ;
AnalysisLogger . getLogger ( ) . debug ( " MapsComparator: Evaluation Indeed at Resolution: " + resolution ) ;
2013-07-10 18:36:37 +02:00
AnalysisLogger . getLogger ( ) . debug ( " MapsComparator: ****Rasterizing map 1**** " ) ;
double [ ] [ ] slice1 = intersector . takeTimeSlice ( layerT1 , time1 , x1 , x2 , y1 , y2 , z , resolution , resolution ) ;
AnalysisLogger . getLogger ( ) . debug ( " MapsComparator: Dumping map 1 " ) ;
status = 30 ;
RasterTable raster1 = new RasterTable ( x1 , x2 , y1 , y2 , z , resolution , resolution , slice1 , config ) ;
raster1 . dumpGeoTable ( ) ;
String rastertable1 = raster1 . getTablename ( ) ;
AnalysisLogger . getLogger ( ) . debug ( " MapsComparator: Map 1 was dumped in table: " + rastertable1 ) ;
status = 40 ;
AnalysisLogger . getLogger ( ) . debug ( " MapsComparator: ****Rasterizing map 2**** " ) ;
double [ ] [ ] slice2 = intersector . takeTimeSlice ( layerT2 , time2 , x1 , x2 , y1 , y2 , z , resolution , resolution ) ;
AnalysisLogger . getLogger ( ) . debug ( " MapsComparator: Dumping map 2 " ) ;
status = 50 ;
RasterTable raster2 = new RasterTable ( x1 , x2 , y1 , y2 , z , resolution , resolution , slice2 , config ) ;
raster2 . dumpGeoTable ( ) ;
String rastertable2 = raster2 . getTablename ( ) ;
AnalysisLogger . getLogger ( ) . debug ( " MapsComparator: Map 2 was dumped in table: " + rastertable2 ) ;
status = 60 ;
/ *
* String rastertable1 = " rstr909f60c1d3f1472e9de998e844990724 " ; String rastertable2 = " rstre52e744c99224de3a1c5354263c6c8d8 " ; String resolution = " 0.5 " ;
* /
config . setNumberOfResources ( 1 ) ;
config . setParam ( " FirstTable " , rastertable1 ) ;
config . setParam ( " SecondTable " , rastertable2 ) ;
config . setParam ( " FirstTableCsquareColumn " , RasterTable . csquareColumn ) ;
config . setParam ( " SecondTableCsquareColumn " , RasterTable . csquareColumn ) ;
config . setParam ( " FirstTableProbabilityColumn " , RasterTable . probabilityColumn ) ;
config . setParam ( " SecondTableProbabilityColumn " , RasterTable . probabilityColumn ) ;
config . setParam ( " ComparisonThreshold " , " " + valuesthreshold ) ;
AnalysisLogger . getLogger ( ) . debug ( " MapsComparator: Analyzing discrepancy between maps: " + rastertable1 + " and " + rastertable2 ) ;
DiscrepancyAnalysis da = new DiscrepancyAnalysis ( ) ;
da . setConfiguration ( config ) ;
da . init ( false ) ;
outputParameters = da . analyze ( ) ;
outputParameters . put ( " RESOLUTION " , " " + MathFunctions . roundDecimal ( resolution , 4 ) ) ;
status = 80 ;
AnalysisLogger . getLogger ( ) . debug ( " MapsComparator: Output: " + outputParameters ) ;
// delete the tables
connection = DatabaseUtils . initDBSession ( config ) ;
AnalysisLogger . getLogger ( ) . debug ( " MapsComparator: Deleting table " + rastertable1 ) ;
DatabaseFactory . executeSQLUpdate ( DatabaseUtils . dropTableStatement ( rastertable1 ) , connection ) ;
status = 90 ;
AnalysisLogger . getLogger ( ) . debug ( " MapsComparator: Deleting table " + rastertable2 ) ;
DatabaseFactory . executeSQLUpdate ( DatabaseUtils . dropTableStatement ( rastertable2 ) , connection ) ;
AnalysisLogger . getLogger ( ) . debug ( " MapsComparator: Elapsed: Whole operation completed in " + ( ( double ) ( System . currentTimeMillis ( ) - t0 ) / 1000d ) + " s " ) ;
} catch ( Exception e ) {
e . printStackTrace ( ) ;
AnalysisLogger . getLogger ( ) . debug ( " MapsComparator: ERROR!: " + e . getLocalizedMessage ( ) ) ;
} finally {
DatabaseUtils . closeDBConnection ( connection ) ;
status = 100 ;
}
}
@Override
public List < StatisticalType > getInputParameters ( ) {
IOHelper . addStringInput ( inputs , layer1 , " First Layer Title or UUID: The title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer " , " " ) ; //"Sarda australis");
IOHelper . addStringInput ( inputs , layer2 , " Second Layer Title or UUID: The title or the UUID (preferred) of a second layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer " , " " ) ; //"Sarda orientalis");
IOHelper . addIntegerInput ( inputs , zString , " value of Z. Default is 0, that means comparison will be at surface level " , " 0 " ) ;
IOHelper . addDoubleInput ( inputs , valuesThr , " A comparison threshold for the values in the map. Null equals to 0.1 " , " 0.1 " ) ;
IOHelper . addIntegerInput ( inputs , t1 , " First Layer Time Index. The default is the first " , " 0 " ) ;
IOHelper . addIntegerInput ( inputs , t2 , " Second Layer Time Index. The default is the first " , " 0 " ) ;
IOHelper . addDoubleInput ( inputs , " KThreshold " , " Threshold for K-Statistic: over this threshold values will be considered 1 for agreement calculation. Default is 0.5 " , " 0.5 " ) ;
DatabaseType . addDefaultDBPars ( inputs ) ;
return inputs ;
}
@Override
public void shutdown ( ) {
AnalysisLogger . getLogger ( ) . debug ( " Shutdown " ) ;
}
protected Image generateGaussian ( double mean , double variance ) {
// gaussian
XYSeriesCollection xyseriescollection = new XYSeriesCollection ( ) ;
if ( variance = = 0 )
variance = 0 . 01 ;
AnalysisLogger . getLogger ( ) . debug ( " MapsComparator: Adopting mean: " + mean + " and variance: " + variance ) ;
NormalDistributionFunction2D normaldistributionfunction2d = new NormalDistributionFunction2D ( mean , variance ) ;
org . jfree . data . xy . XYSeries xyseries = DatasetUtilities . sampleFunction2DToSeries ( normaldistributionfunction2d , ( mean - ( 2 * variance ) ) , ( mean + ( 2 * variance ) ) , 121 , " Distribution of the Error " ) ;
xyseriescollection . addSeries ( xyseries ) ;
// end gaussian
JFreeChart chart = GaussianDistributionGraph . createStaticChart ( xyseriescollection , mean , variance ) ;
Image image = ImageTools . toImage ( chart . createBufferedImage ( 680 , 420 ) ) ;
/ *
* GaussianDistributionGraph graph = new GaussianDistributionGraph ( " Error Distribution " ) ; graph . mean = mean ; graph . variance = variance ; graph . render ( xyseriescollection ) ;
* /
// end build image
AnalysisLogger . getLogger ( ) . debug ( " MapsComparator: Gaussian Distribution Produced " ) ;
return image ;
}
@Override
public StatisticalType getOutput ( ) {
// set the output map containing values
AnalysisLogger . getLogger ( ) . debug ( " MapsComparator: Producing Gaussian Distribution for the errors " ) ;
// build image:
HashMap < String , Image > producedImages = new HashMap < String , Image > ( ) ;
double mean = Double . parseDouble ( outputParameters . get ( " MEAN " ) ) ;
double variance = Double . parseDouble ( outputParameters . get ( " VARIANCE " ) ) ;
producedImages . put ( " Error Distribution " , generateGaussian ( mean , variance ) ) ;
PrimitiveType images = new PrimitiveType ( " Images " , producedImages , PrimitiveTypes . IMAGES , " Distribution of the Error " , " The distribution of the error along with variance " ) ;
LinkedHashMap < String , StatisticalType > map = new LinkedHashMap < String , StatisticalType > ( ) ;
for ( String key : outputParameters . keySet ( ) ) {
String value = outputParameters . get ( key ) ;
PrimitiveType val = new PrimitiveType ( String . class . getName ( ) , " " + value , PrimitiveTypes . STRING , key , key ) ;
map . put ( key , val ) ;
}
// collect all the outputs
map . put ( " Images " , images ) ;
// generate a primitive type for the collection
PrimitiveType output = new PrimitiveType ( HashMap . class . getName ( ) , map , PrimitiveTypes . MAP , " ResultsMap " , " Results Map " ) ;
return output ;
}
@Override
public float getStatus ( ) {
return status ;
}
@Override
public LinkedHashMap < String , String > analyze ( ) throws Exception {
// TODO Auto-generated method stub
return null ;
}
}