From 9b76e5e8a62eb268e90759eeedcde1aa4d9589c2 Mon Sep 17 00:00:00 2001 From: Gianpaolo Coro Date: Thu, 6 Mar 2014 11:56:54 +0000 Subject: [PATCH] git-svn-id: https://svn.d4science.research-infrastructures.eu/gcube/trunk/data-analysis/EcologicalEngineGeoSpatialExtension@92747 82a268e6-3cf1-43bd-a215-b396298e98cf --- .../geo/algorithms/OccurrenceEnrichment.java | 304 ++++++++++++------ .../geo/algorithms/TimeExtraction.java | 31 +- .../geo/algorithms/TimeExtractionTable.java | 2 +- .../geo/algorithms/XYExtraction.java | 34 +- .../geo/algorithms/XYExtractionTable.java | 2 + .../geo/algorithms/ZExtraction.java | 5 +- .../geo/algorithms/ZExtractionTable.java | 2 +- .../geo/connectors/asc/AscRasterReader.java | 11 +- .../connectors/netcdf/NetCDFDataExplorer.java | 5 +- .../geo/connectors/table/Table.java | 1 + .../table/TableMatrixRepresentation.java | 12 +- .../geo/matrixmodel/MatrixExtractor.java | 146 +++++---- .../geo/test/infra/TestExtraction.java | 67 +++- .../test/infra/TestOccurrenceEnrichment.java | 71 ++++ .../geo/test/infra/TestTimeExtraction.java | 12 +- .../RegressionOccurrenceEnrichment.java | 70 ++++ .../geo/utils/VectorOperations.java | 37 ++- 17 files changed, 608 insertions(+), 204 deletions(-) create mode 100644 src/main/java/org/gcube/dataanalysis/geo/test/infra/TestOccurrenceEnrichment.java create mode 100644 src/main/java/org/gcube/dataanalysis/geo/test/regression/RegressionOccurrenceEnrichment.java diff --git a/src/main/java/org/gcube/dataanalysis/geo/algorithms/OccurrenceEnrichment.java b/src/main/java/org/gcube/dataanalysis/geo/algorithms/OccurrenceEnrichment.java index a7e9034..3288533 100644 --- a/src/main/java/org/gcube/dataanalysis/geo/algorithms/OccurrenceEnrichment.java +++ b/src/main/java/org/gcube/dataanalysis/geo/algorithms/OccurrenceEnrichment.java @@ -1,6 +1,8 @@ package org.gcube.dataanalysis.geo.algorithms; import java.util.ArrayList; +import java.util.HashMap; +import java.util.LinkedHashMap; import java.util.List; import org.gcube.common.scope.api.ScopeProvider; @@ -11,6 +13,7 @@ import org.gcube.dataanalysis.ecoengine.datatypes.ColumnType; import org.gcube.dataanalysis.ecoengine.datatypes.DatabaseType; import org.gcube.dataanalysis.ecoengine.datatypes.InputTable; import org.gcube.dataanalysis.ecoengine.datatypes.OutputTable; +import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType; import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveTypesList; import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType; import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes; @@ -21,6 +24,7 @@ import org.gcube.dataanalysis.ecoengine.utils.DatabaseUtils; import org.gcube.dataanalysis.ecoengine.utils.IOHelper; import org.gcube.dataanalysis.ecoengine.utils.ResourceFactory; import org.gcube.dataanalysis.ecoengine.utils.Tuple; +import org.gcube.dataanalysis.geo.interfaces.GISDataConnector; import org.gcube.dataanalysis.geo.matrixmodel.XYExtractor; import org.gcube.dataanalysis.geo.utils.VectorOperations; import org.hibernate.SessionFactory; @@ -28,89 +32,102 @@ import org.hibernate.SessionFactory; public class OccurrenceEnrichment implements Transducerer { static String OccurrencesTableNameParameter = "OccurrenceTable"; - static String LongitudeColumn= "LongitudeColumn"; + static String LongitudeColumn = "LongitudeColumn"; static String LatitudeColumn = "LatitudeColumn"; static String ScientificNameColumn = "ScientificNameColumn"; + static String TimeColumn = "TimeColumn"; static String OutputTableLabelParameter = "OutputTableName"; static String OutputTableDBNameParameter = "OutputTableDBName"; static String FilterParameter = "OptionalFilter"; static String Resolution = "Resolution"; static String Layers = "Layers"; + static String LayersNames = "FeaturesNames"; static String yLL = "BBox_LowerLeftLat"; static String xLL = "BBox_LowerLeftLong"; static String yUR = "BBox_UpperRightLat"; static String xUR = "BBox_UpperRightLong"; - - + AlgorithmConfiguration config; float status; - - private String [] layers; + + private String[] layers; + private String[] layersnames; private String occurrencesTableName; private String longitudeColumn; private String latitudeColumn; private String scientificnameColumn; + private String timeColumn; private String filter; private float resolution; private String outputTableLabel; private String outputTableDBName; - private double BBxLL; - private double BByLL; - private double BBxUR; - private double BByUR; - + public LinkedHashMap outputParameters = new LinkedHashMap(); + @Override public List getInputParameters() { - + List inputs = new ArrayList(); - List template= new ArrayList(); + List template = new ArrayList(); template.add(TableTemplates.OCCURRENCE_SPECIES); - InputTable table = new InputTable(template,OccurrencesTableNameParameter ,"A geospatial table containing at least x,y information",""); + InputTable table = new InputTable(template, OccurrencesTableNameParameter, "A geospatial table containing occurrence records, following the template of the Species Products Discovery datasets", ""); inputs.add(table); - ColumnType p1 = new ColumnType(OccurrencesTableNameParameter , LongitudeColumn, "column with longitude values", "decimallongitude", false); + ColumnType p1 = new ColumnType(OccurrencesTableNameParameter, LongitudeColumn, "The column containing longitude values", "decimallongitude", false); inputs.add(p1); - ColumnType p2 = new ColumnType(OccurrencesTableNameParameter , LatitudeColumn, "column with latitude values", "decimallatitude", false); + ColumnType p2 = new ColumnType(OccurrencesTableNameParameter, LatitudeColumn, "The column containing latitude values", "decimallatitude", false); inputs.add(p2); - ColumnType p3 = new ColumnType(OccurrencesTableNameParameter , ScientificNameColumn, "column with Scientific Names", "scientificname", false); + ColumnType p3 = new ColumnType(OccurrencesTableNameParameter, ScientificNameColumn, "The column containing Scientific Names", "scientificname", false); inputs.add(p3); - - IOHelper.addStringInput(inputs, FilterParameter, "A filter on one of the columns (e.g. basisofrecord='HumanObservation')", " "); - IOHelper.addDoubleInput(inputs, Resolution, "The spatial resolution of the association between observations and environmental features.", "0.5"); - IOHelper.addRandomStringInput(inputs, OutputTableDBNameParameter, "The db name of the table to produce", "enrich_"); + ColumnType p4 = new ColumnType(OccurrencesTableNameParameter, TimeColumn, "The column containing time information", "eventdate", false); + inputs.add(p4); + + IOHelper.addStringInput(inputs, FilterParameter, "A filter on one of the columns (e.g. basisofrecord='HumanObservation'). Optional", " "); + IOHelper.addDoubleInput(inputs, Resolution, "The spatial resolution in degrees of the association between observations and environmental features", "0.5"); + IOHelper.addRandomStringInput(inputs, OutputTableDBNameParameter, "The database name of the table to produce", "enrich_"); IOHelper.addStringInput(inputs, OutputTableLabelParameter, "The name of the output table", "enrich_"); - PrimitiveTypesList listEnvLayers = new PrimitiveTypesList(String.class.getName(), PrimitiveTypes.STRING, Layers, "The list of environmental layers to use for enriching the points. Each entry is a Layer Title or UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Supports several standards (NETCDF, WFS, WCS ASC, GeoTiff )", false); + PrimitiveTypesList listEnvLayers = new PrimitiveTypesList(String.class.getName(), PrimitiveTypes.STRING, Layers, "The list of environmental layers to use for enriching the points. Each entry is a layer Title or UUID or HTTP link. E.g. the title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Otherwise you can supply the direct HTTP link of the layer. The format will be guessed from the link. The default is GeoTiff. Supports several standards (NETCDF, WFS, WCS, ASC, GeoTiff )", false); inputs.add(listEnvLayers); - - IOHelper.addDoubleInput(inputs, xLL, "Lower Left Longitude of the Bounding Box", "-180"); - IOHelper.addDoubleInput(inputs, yLL, "Lower Left Latitute of the Bounding Box", "-90"); - IOHelper.addDoubleInput(inputs, xUR, "Upper Right Longitude of the Bounding Box", "180"); - IOHelper.addDoubleInput(inputs, yUR, "Upper Right Latitute of the Bounding Box", "90"); - + + PrimitiveTypesList listEnvLayersNames = new PrimitiveTypesList(String.class.getName(), PrimitiveTypes.STRING, LayersNames, "The list of names for the columns corresponding to the environmental layers. These will be the column names of the resulting table", false); + inputs.add(listEnvLayersNames); + DatabaseType.addDefaultDBPars(inputs); return inputs; } protected void getParameters() { - - layers = IOHelper.getInputParameter(config, Layers).split(AlgorithmConfiguration.getListSeparator()); - AnalysisLogger.getLogger().debug("Layers to take " + layers.length); - occurrencesTableName=IOHelper.getInputParameter(config, OccurrencesTableNameParameter); - longitudeColumn=IOHelper.getInputParameter(config, LongitudeColumn); - latitudeColumn=IOHelper.getInputParameter(config, LatitudeColumn); - scientificnameColumn=IOHelper.getInputParameter(config, ScientificNameColumn); - filter=IOHelper.getInputParameter(config, FilterParameter); - if (filter==null) - filter=""; - filter=filter.trim(); - - resolution=IOHelper.getInputParameter(config, Resolution)==null?0.5f:Float.parseFloat(IOHelper.getInputParameter(config, Resolution)); - outputTableLabel=IOHelper.getInputParameter(config, OutputTableLabelParameter); - outputTableDBName=IOHelper.getInputParameter(config, OutputTableDBNameParameter); - BBxLL=Double.parseDouble(IOHelper.getInputParameter(config, xLL)); - BByLL=Double.parseDouble(IOHelper.getInputParameter(config, yLL)); - BBxUR=Double.parseDouble(IOHelper.getInputParameter(config, xUR)); - BByUR=Double.parseDouble(IOHelper.getInputParameter(config, yUR)); - + + layers = IOHelper.getInputParameter(config, Layers).trim().split(AlgorithmConfiguration.getListSeparator()); + String layernamesS = IOHelper.getInputParameter(config, LayersNames); + if (layernamesS == null) + layernamesS = ""; + layersnames = layernamesS.split(AlgorithmConfiguration.getListSeparator()); + AnalysisLogger.getLogger().debug("N. of Layers to take " + layers.length); + occurrencesTableName = IOHelper.getInputParameter(config, OccurrencesTableNameParameter); + longitudeColumn = IOHelper.getInputParameter(config, LongitudeColumn); + latitudeColumn = IOHelper.getInputParameter(config, LatitudeColumn); + scientificnameColumn = IOHelper.getInputParameter(config, ScientificNameColumn); + timeColumn = IOHelper.getInputParameter(config, TimeColumn); + filter = IOHelper.getInputParameter(config, FilterParameter); + if (filter == null) + filter = ""; + filter = filter.trim(); + + resolution = IOHelper.getInputParameter(config, Resolution) == null ? 0.5f : Float.parseFloat(IOHelper.getInputParameter(config, Resolution)); + outputTableLabel = IOHelper.getInputParameter(config, OutputTableLabelParameter); + outputTableDBName = IOHelper.getInputParameter(config, OutputTableDBNameParameter); + + AnalysisLogger.getLogger().debug("OccurrenceEnrichment->layers: " + layers); + AnalysisLogger.getLogger().debug("OccurrenceEnrichment->layers names: " + layersnames); + AnalysisLogger.getLogger().debug("OccurrenceEnrichment->occurrencesTableName: " + occurrencesTableName); + AnalysisLogger.getLogger().debug("OccurrenceEnrichment->longitudeColumn: " + longitudeColumn); + AnalysisLogger.getLogger().debug("OccurrenceEnrichment->latitudeColumn: " + latitudeColumn); + AnalysisLogger.getLogger().debug("OccurrenceEnrichment->scientificnameColumn: " + scientificnameColumn); + AnalysisLogger.getLogger().debug("OccurrenceEnrichment->timeColumn: " + timeColumn); + AnalysisLogger.getLogger().debug("OccurrenceEnrichment->filter: " + filter); + AnalysisLogger.getLogger().debug("OccurrenceEnrichment->resolution: " + resolution); + AnalysisLogger.getLogger().debug("OccurrenceEnrichment->outputTableLabel: " + outputTableLabel); + AnalysisLogger.getLogger().debug("OccurrenceEnrichment->outputTableDBName: " + outputTableDBName); + String scope = config.getGcubeScope(); AnalysisLogger.getLogger().debug("Extraction: Externally set scope " + scope); if (scope == null) { @@ -120,6 +137,7 @@ public class OccurrenceEnrichment implements Transducerer { } } + @Override public void init() throws Exception { AnalysisLogger.getLogger().debug("Occurrence Enrichment Initialization"); @@ -132,83 +150,157 @@ public class OccurrenceEnrichment implements Transducerer { @Override public String getDescription() { - return "An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layers from the e-infrastructure GeoNetwork (through the GeoExplorer application). Produces one table reporting the set of environmental values associated to the occurrence points."; + return "An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points."; } - @Override public void compute() throws Exception { - //TODO: report times - SessionFactory dbconnection=null; - try{ - dbconnection=DatabaseUtils.initDBSession(config); - String columns = longitudeColumn+","+latitudeColumn+","+scientificnameColumn; - String columnsToProduce = longitudeColumn+","+latitudeColumn+","+scientificnameColumn; - //take the occurrence points - List rows = (List)DatabaseFactory.executeSQLQuery(DatabaseUtils.getDinstictElements(occurrencesTableName, columns, filter),dbconnection); - if (rows==null || rows.size()==0) + + SessionFactory dbconnection = null; + try { + long t0 = System.currentTimeMillis(); + status = 10; + getParameters(); + dbconnection = DatabaseUtils.initDBSession(config); + String columns = longitudeColumn + "," + latitudeColumn + "," + scientificnameColumn + "," + timeColumn; + // (id serial, csquarecode character varying, x real, y real, z real, t real, fvalue real) + String columnsTypes = "id serial, " + longitudeColumn + " real," + latitudeColumn + " real," + scientificnameColumn + " character varying," + timeColumn + " timestamp without time zone"; + + // take min_max lat + String query = "select min(" + longitudeColumn + ") as minlong, max(" + longitudeColumn + ") as maxlong,min(" + latitudeColumn + ") as minlat,max(" + latitudeColumn + ") as maxlat from " + occurrencesTableName; + AnalysisLogger.getLogger().debug("OccurrenceEnrichment->Retrieving bounding box: " + query); + List minmaxlonglat = (List) DatabaseFactory.executeSQLQuery(query, dbconnection); + if (minmaxlonglat == null || minmaxlonglat.size() == 0) + throw new Exception("Could not find min and max for occurrence data"); + status = 20; + Object[] minmaxLongLat = (Object[]) minmaxlonglat.get(0); + double BBxLL = Double.parseDouble("" + minmaxLongLat[0]); + double BBxUR = Double.parseDouble("" + minmaxLongLat[1]); + double BByLL = Double.parseDouble("" + minmaxLongLat[2]); + double BByUR = Double.parseDouble("" + minmaxLongLat[3]); + + AnalysisLogger.getLogger().debug("OccurrenceEnrichment->Calculated Bounding Box: [" + BBxLL + "," + BByLL + ";" + BBxUR + "," + BByUR + "]"); + + // take the occurrence points + List rows = (List) DatabaseFactory.executeSQLQuery(DatabaseUtils.getDinstictElements(occurrencesTableName, columns, filter), dbconnection); + if (rows == null || rows.size() == 0) throw new Exception("Could not find occurrence data"); - - int rowsize = rows.size(); + status = 30; + AnalysisLogger.getLogger().debug("OccurrenceEnrichment->Rows Retrieved"); List> coordinates4d = new ArrayList>(); - List enrichment = new ArrayList(); - int elementstoreport = 4; - int elementsfromoccurrences = 3; - for (Object row:rows){ + List enrichment = new ArrayList(); + + int elementsFromOccurrences = 4; + int elementstoreport = elementsFromOccurrences + layers.length; + + for (Object row : rows) { Object[] elements = (Object[]) row; - double x =elements[0]==null?0:Double.parseDouble(""+elements[0]); - double y =elements[1]==null?0:Double.parseDouble(""+elements[1]); - String species=elements[2]==null?"":""+elements[2]; - Tuple el = new Tuple(x,y); + double x = elements[0] == null ? 0 : Double.parseDouble("" + elements[0]); + double y = elements[1] == null ? 0 : Double.parseDouble("" + elements[1]); + String species = elements[2] == null ? "" : "" + elements[2]; + String time = elements[3] == null ? "NULL" : "" + elements[3]; + Tuple el = new Tuple(x, y, 0d, 0d); coordinates4d.add(el); - - String[] singlerow = new String[elementstoreport]; - singlerow [0]=""+x; - singlerow[1]=""+y; - singlerow[2]=""+species; + + String[] singlerow = new String[elementstoreport]; + singlerow[0] = "" + x; + singlerow[1] = "" + y; + singlerow[2] = species; + singlerow[3] = time; + + enrichment.add(singlerow); } - - //take the layers matrices - int layeridx = 0; - for (String layerID:layers){ - //for each layer - XYExtractor extractor = new XYExtractor(config); - extractor.extractXYGrid(layerID, 0, BBxLL, BBxUR, BByLL, BByUR, 0, resolution,resolution); - List gridValues = extractor.currentTimeValues; - List> grid3d =extractor.currentTuples; - int time = 0; - //take the association - List enriched = VectorOperations.assignGridValuesToPoints(grid3d, time , gridValues, coordinates4d, resolution); - int k=0; - for (Double value:enriched){ - String[] singlerow =enrichment.get(k); - singlerow[layeridx+elementsfromoccurrences] = ""+value; - k++; + AnalysisLogger.getLogger().debug("OccurrenceEnrichment->Tuples Created. Assigning grid values to the tuples"); + status = 40; + // take the layers matrices + int layeridx = 0; + float statusSteps = 50f / (float) layers.length; + for (String layerID : layers) { + if (layerID.length() == 0) + continue; + AnalysisLogger.getLogger().debug("OccurrenceEnrichment->Assigning layer " + layerID + " Layer enumerator: " + layeridx); + // for each layer + XYExtractor extractor = new XYExtractor(config); + extractor.correctZ(0, layerID,resolution); + double zmin = extractor.zmin; + double zmax = extractor.zmax; + double bestZ = Math.min(Math.abs(zmin), Math.abs(zmax)); + + outputParameters.put("Matching Z value in layer " + (layeridx + 1), "" + bestZ); + outputParameters.put("Min Z value in layer "+ (layeridx + 1), "" + zmin); + outputParameters.put("Max Z value in layer "+ (layeridx + 1), "" + zmax); + + AnalysisLogger.getLogger().debug("OccurrenceEnrichment->Best Z for this reference layer: " + bestZ); + // perform the comparison closest to the surface + extractor.extractXYGrid(layerID, 0, BBxLL, BBxUR, BByLL, BByUR, bestZ, resolution, resolution); + // retrieve the grid time values and tuples + List gridValues = extractor.currentTimeValues; + List> grid3d = extractor.currentTuples; + + String layername = (layersnames.length > (layeridx) && layersnames[layeridx].trim().length() > 0) ? layersnames[layeridx].trim() : "feature" + (layeridx + 1); + AnalysisLogger.getLogger().debug("OccurrenceEnrichment->Retrieved Layer Name: " + layername); + columns += ",\"" + layername + "\""; + columnsTypes += ",\"" + layername + "\" real"; + + AnalysisLogger.getLogger().debug("OccurrenceEnrichment->Assigning grid points to the occurrences"); + // take the association + List enriched = VectorOperations.assignGridValuesToPoints2D(grid3d, gridValues, coordinates4d, resolution); + int k = 0; + AnalysisLogger.getLogger().debug("OccurrenceEnrichment->Assigning values to the column " + (elementsFromOccurrences + layeridx)); + for (Double value : enriched) { + String[] singlerow = enrichment.get(k); + if (value == null || Double.isNaN(value) || Double.isInfinite(value)) + singlerow[elementsFromOccurrences + layeridx] = "-9999"; + else + singlerow[elementsFromOccurrences + layeridx] = "" + value; + k++; + } + AnalysisLogger.getLogger().debug("OccurrenceEnrichment->Added values to the row"); + layeridx++; + status = status + statusSteps; } - layeridx++; - } - - //write the complete association into the db - DatabaseFactory.executeSQLQuery(DatabaseUtils.dropTableStatement(outputTableDBName),dbconnection); - //TODO: create table - DatabaseUtils.insertChunksIntoTable(outputTableDBName, columnsToProduce, enrichment, 5000, dbconnection); - - }catch(Exception e){ - if (dbconnection!=null) + + // write the complete association into the db + AnalysisLogger.getLogger().debug("OccurrenceEnrichment->Dropping table " + outputTableDBName); + try { + DatabaseFactory.executeSQLUpdate(DatabaseUtils.dropTableStatement(outputTableDBName), dbconnection); + } catch (Exception e) { + AnalysisLogger.getLogger().debug("OccurrenceEnrichment->cannot drop table, does not exist: " + outputTableDBName); + } + String createquery = "create table " + outputTableDBName + " (" + columnsTypes + ")"; + AnalysisLogger.getLogger().debug("OccurrenceEnrichment->Creating table " + outputTableDBName + " query:" + createquery); + DatabaseFactory.executeSQLUpdate(createquery, dbconnection); + AnalysisLogger.getLogger().debug("OccurrenceEnrichment->Inserting chunks"); + DatabaseUtils.insertChunksIntoTable(outputTableDBName, columns, enrichment, 5000, dbconnection); + AnalysisLogger.getLogger().debug("OccurrenceEnrichment->Whole process complete in " + ((double) (System.currentTimeMillis() - t0) / 1000f) + " s"); + } catch (Exception e) { + e.printStackTrace(); + throw e; + } finally { + if (dbconnection != null) dbconnection.close(); + status = 100; } - - + } - @Override public StatisticalType getOutput() { List templateHspec = new ArrayList(); templateHspec.add(TableTemplates.GENERIC); - OutputTable p = new OutputTable(templateHspec, OutputTableLabelParameter, OutputTableDBNameParameter, "Output table"); - return p; + OutputTable p = new OutputTable(templateHspec, outputTableLabel, outputTableDBName, "Output table"); + LinkedHashMap map = new LinkedHashMap(); + + for (String key : outputParameters.keySet()) { + String value = outputParameters.get(key); + PrimitiveType val = new PrimitiveType(String.class.getName(), "" + value, PrimitiveTypes.STRING, key, key); + map.put(key, val); + } + + map.put("OutputTable", p); + PrimitiveType outputm = new PrimitiveType(HashMap.class.getName(), map, PrimitiveTypes.MAP, "ResultsMap", "Results Map"); + return outputm; } @Override @@ -237,5 +329,5 @@ public class OccurrenceEnrichment implements Transducerer { public String getResources() { return ResourceFactory.getResources(100f); } - + } diff --git a/src/main/java/org/gcube/dataanalysis/geo/algorithms/TimeExtraction.java b/src/main/java/org/gcube/dataanalysis/geo/algorithms/TimeExtraction.java index 92e3060..35808cc 100644 --- a/src/main/java/org/gcube/dataanalysis/geo/algorithms/TimeExtraction.java +++ b/src/main/java/org/gcube/dataanalysis/geo/algorithms/TimeExtraction.java @@ -21,6 +21,7 @@ import org.gcube.dataanalysis.ecoengine.signals.SignalConverter; import org.gcube.dataanalysis.ecoengine.signals.SignalProcessing; import org.gcube.dataanalysis.ecoengine.utils.IOHelper; import org.gcube.dataanalysis.ecoengine.utils.Tuple; +import org.gcube.dataanalysis.geo.interfaces.GISDataConnector; import org.gcube.dataanalysis.geo.matrixmodel.RasterTable; import org.gcube.dataanalysis.geo.matrixmodel.TimeSeriesExtractor; @@ -63,9 +64,9 @@ public class TimeExtraction extends XYExtraction{ IOHelper.addDoubleInput(inputs, resolution, "Extraction point resolution", "0.5"); IOHelper.addIntegerInput(inputs, samplingFrequency, "Sampling frequency in Hz. Leave it to -1 if unknown or under 1", "-1"); - IOHelper.addDoubleInput(inputs, minFrequency, "Minimum expected frequency in Hz. Can be decimal.", "-1"); - IOHelper.addDoubleInput(inputs, maxFrequency, "Maximum expected frequency in Hz. Can be decimal.", "-1"); - IOHelper.addDoubleInput(inputs, expectedFrequencyError, "Expected precision on periodicity detection in Hz. Can be decimal and depends on the signal length.", "0.1"); + IOHelper.addDoubleInput(inputs, minFrequency, "Minimum expected frequency in Hz. Can be decimal", "-1"); + IOHelper.addDoubleInput(inputs, maxFrequency, "Maximum expected frequency in Hz. Can be decimal", "-1"); + IOHelper.addDoubleInput(inputs, expectedFrequencyError, "Expected precision on periodicity detection in Hz or 1/samples. Can be decimal and depends on the signal length. Default is 0.1", "0.1"); DatabaseType.addDefaultDBPars(inputs); @@ -120,6 +121,14 @@ public class TimeExtraction extends XYExtraction{ AnalysisLogger.getLogger().debug("Extracting Time Series from layer"); TimeSeriesExtractor intersector = new TimeSeriesExtractor(config); long t0 = System.currentTimeMillis(); + //take best z + zValue = intersector.correctZ(zValue, layerNameValue, resolutionValue); + AnalysisLogger.getLogger().debug("TimeExtraction->Best Z for this reference layer: " + zValue); + outputParameters.put("Matching Z value in the layer", ""+zValue); + outputParameters.put("Min Z value in the Layer", ""+intersector.zmin); + outputParameters.put("Max Z value in the Layer", ""+intersector.zmax); + + AnalysisLogger.getLogger().debug("Z allowed to be: "+zValue); signal = intersector.extractT(layerNameValue, xValue,yValue, zValue, resolutionValue); AnalysisLogger.getLogger().debug("ELAPSED TIME: "+(System.currentTimeMillis()-t0)); @@ -188,10 +197,12 @@ public class TimeExtraction extends XYExtraction{ raster.deleteTable(); raster.dumpGeoTable(); - signalimage = SignalProcessing.renderSignalWithGenericTime(signal, timeline, "Signal"); -// spectrogramImage = SignalProcessing.renderSignalSpectrogram(signal, timeline, pd.currentSamplingRate, pd.currentWindowAnalysisSamples, pd.currentWindowShiftSamples); - spectrogramImage = SignalProcessing.renderSignalSpectrogram2(pd.currentspectrum); +// spectrogramImage = SignalProcessing.renderSignalSpectrogram(signal, timeline, pd.currentSamplingRate, pd.currentWindowAnalysisSamples, pd.currentWindowShiftSamples); + if (pd!=null && pd.currentspectrum!=null && pd.currentspectrum.length>0){ + signalimage = SignalProcessing.renderSignalWithGenericTime(signal, timeline, "Signal"); + spectrogramImage = SignalProcessing.renderSignalSpectrogram2(pd.currentspectrum); + } AnalysisLogger.getLogger().debug("Extractor: Map was dumped in table: " + tableNameValue); status = 80; AnalysisLogger.getLogger().debug("Extractor: Elapsed: Whole operation completed in " + ((double) (System.currentTimeMillis() - t0) / 1000d) + "s"); @@ -217,8 +228,10 @@ public class TimeExtraction extends XYExtraction{ map.put("OutputTable", p); if (pd!=null && signal!=null && signal.length>0){ HashMap producedImages = new HashMap(); + if (signalimage!=null) producedImages.put("Time Series Visualization", signalimage); - producedImages.put("Spectrogram", spectrogramImage); + if (spectrogramImage!=null) + producedImages.put("Spectrogram", spectrogramImage); /* try { ImageIO.write(ImageTools.toBufferedImage(signalimage), "png", new File("signal.png")); @@ -239,7 +252,9 @@ public class TimeExtraction extends XYExtraction{ map.put("Images", images); } - + else + map.put("Note", new PrimitiveType(String.class.getName(), "The signal contains only one point. The charts will not be displayed.", PrimitiveTypes.STRING,"Note","Note about the signal")); + // generate a primitive type for the collection PrimitiveType outputm = new PrimitiveType(HashMap.class.getName(), map, PrimitiveTypes.MAP, "ResultsMap", "Results Map"); diff --git a/src/main/java/org/gcube/dataanalysis/geo/algorithms/TimeExtractionTable.java b/src/main/java/org/gcube/dataanalysis/geo/algorithms/TimeExtractionTable.java index 2a8d55f..4a6b4c3 100644 --- a/src/main/java/org/gcube/dataanalysis/geo/algorithms/TimeExtractionTable.java +++ b/src/main/java/org/gcube/dataanalysis/geo/algorithms/TimeExtractionTable.java @@ -36,7 +36,7 @@ public class TimeExtractionTable extends TimeExtraction{ inputs.add(columnx); ColumnType columny = new ColumnType(TableMatrixRepresentation.tableNameParameter, TableMatrixRepresentation.yDimensionColumnParameter, "The column containing y (latitude) information", "y", false); inputs.add(columny); - ColumnType columnt = new ColumnType(TableMatrixRepresentation.tableNameParameter, TableMatrixRepresentation.timeDimensionColumnParameter, "The column containing y (latitude) information", "y", false); + ColumnType columnt = new ColumnType(TableMatrixRepresentation.tableNameParameter, TableMatrixRepresentation.timeDimensionColumnParameter, "The column containing time information", "datetime", false); inputs.add(columnt); ColumnType columnvalue = new ColumnType(TableMatrixRepresentation.tableNameParameter, TableMatrixRepresentation.valueDimensionColumnParameter, "A column containing real valued features", "value", false); inputs.add(columnvalue); diff --git a/src/main/java/org/gcube/dataanalysis/geo/algorithms/XYExtraction.java b/src/main/java/org/gcube/dataanalysis/geo/algorithms/XYExtraction.java index f9ca28a..4aedbc1 100644 --- a/src/main/java/org/gcube/dataanalysis/geo/algorithms/XYExtraction.java +++ b/src/main/java/org/gcube/dataanalysis/geo/algorithms/XYExtraction.java @@ -1,6 +1,7 @@ package org.gcube.dataanalysis.geo.algorithms; import java.util.ArrayList; +import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; @@ -10,11 +11,14 @@ import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; import org.gcube.dataanalysis.ecoengine.configuration.INFRASTRUCTURE; import org.gcube.dataanalysis.ecoengine.datatypes.DatabaseType; import org.gcube.dataanalysis.ecoengine.datatypes.OutputTable; +import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType; import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType; +import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes; import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.TableTemplates; import org.gcube.dataanalysis.ecoengine.interfaces.Transducerer; import org.gcube.dataanalysis.ecoengine.utils.IOHelper; import org.gcube.dataanalysis.ecoengine.utils.ResourceFactory; +import org.gcube.dataanalysis.geo.interfaces.GISDataConnector; import org.gcube.dataanalysis.geo.matrixmodel.RasterTable; import org.gcube.dataanalysis.geo.matrixmodel.XYExtractor; @@ -59,13 +63,13 @@ public class XYExtraction implements Transducerer { @Override public String getDescription() { - return "An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. " + "It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) " + "and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box."; + return "An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. " + "It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file " + "and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box."; } @Override public List getInputParameters() { - IOHelper.addStringInput(inputs, layerName, "Layer Title or UUID: The title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Supports several standards (NETCDF, WFS, WCS ASC, GeoTiff )", ""); + IOHelper.addStringInput(inputs, layerName, "Layer Title or UUID or HTTP link. E.g. the title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Otherwise you can supply the direct HTTP link of the layer. The format will be guessed from the link. The default is GeoTiff. Supports several standards (NETCDF, WFS, WCS ASC, GeoTiff )", ""); IOHelper.addDoubleInput(inputs, yLL, "Lower Left Latitute of the Bounding Box", "-60"); IOHelper.addDoubleInput(inputs, xLL, "Lower Left Longitude of the Bounding Box", "-50"); IOHelper.addDoubleInput(inputs, yUR, "Upper Right Latitute of the Bounding Box", "60"); @@ -73,8 +77,8 @@ public class XYExtraction implements Transducerer { IOHelper.addRandomStringInput(inputs, tableName, "The db name of the table to produce", "extr_"); IOHelper.addStringInput(inputs, tableLabel, "The name of the table to produce", "extr_"); - IOHelper.addDoubleInput(inputs, z, "Value of Z. Default is 0, that means processing will be at surface level", "0"); - IOHelper.addIntegerInput(inputs, t, "Time Index. The default is the first", "0"); + IOHelper.addDoubleInput(inputs, z, "Value of Z. Default is 0, that means processing will be at surface level or at the first avaliable Z value in the layer", "0"); + IOHelper.addIntegerInput(inputs, t, "Time Index. The default is the first time indexed dataset", "0"); IOHelper.addDoubleInput(inputs, xRes, "Projection resolution on the X axis", "0.5"); IOHelper.addDoubleInput(inputs, yRes, "Projection resolution on the Y axis", "0.5"); @@ -144,12 +148,20 @@ public class XYExtraction implements Transducerer { AnalysisLogger.getLogger().debug("Extractor: MatrixExtractor initialized"); long t0 = System.currentTimeMillis(); XYExtractor extractor = new XYExtractor(config); + zValue = extractor.correctZ(zValue, layerNameValue, xResValue); + + AnalysisLogger.getLogger().debug("XYExtraction->Best Z for this reference layer: " + zValue); + + outputParameters.put("Matching Z value in the layer", ""+zValue); + outputParameters.put("Min Z value in the Layer", ""+extractor.zmin); + outputParameters.put("Max Z value in the Layer", ""+extractor.zmax); + double[][] matrix = extractor.extractXYGrid(layerNameValue, time, BBxLL, BBxUR, BByLL, BByUR, zValue, xResValue, yResValue); System.out.println("ELAPSED TIME: " + (System.currentTimeMillis() - t0)); AnalysisLogger.getLogger().debug("Extractor: Matrix Extracted"); AnalysisLogger.getLogger().debug("Extractor: ****Rasterizing grid into table****"); status = 30; - RasterTable raster = new RasterTable(BBxLL, BBxUR, BByLL, BByUR, zValue, xResValue, yResValue, matrix, config); + RasterTable raster = new RasterTable(BBxLL, BBxUR, BByLL, BByUR, zValue, time, xResValue, yResValue, matrix, config); raster.setTablename(tableNameValue); raster.deleteTable(); raster.dumpGeoTable(); @@ -177,7 +189,17 @@ public class XYExtraction implements Transducerer { List templateHspec = new ArrayList(); templateHspec.add(TableTemplates.GENERIC); OutputTable p = new OutputTable(templateHspec, tableLabelValue, tableNameValue, "Output table"); - return p; + LinkedHashMap map = new LinkedHashMap(); + + for (String key:outputParameters.keySet()){ + String value = outputParameters.get(key); + PrimitiveType val = new PrimitiveType(String.class.getName(), "" + value, PrimitiveTypes.STRING, key, key); + map.put(key, val); + } + + map.put("OutputTable", p); + PrimitiveType outputm = new PrimitiveType(HashMap.class.getName(), map, PrimitiveTypes.MAP, "ResultsMap", "Results Map"); + return outputm; } @Override diff --git a/src/main/java/org/gcube/dataanalysis/geo/algorithms/XYExtractionTable.java b/src/main/java/org/gcube/dataanalysis/geo/algorithms/XYExtractionTable.java index 308412a..b1aa6ad 100644 --- a/src/main/java/org/gcube/dataanalysis/geo/algorithms/XYExtractionTable.java +++ b/src/main/java/org/gcube/dataanalysis/geo/algorithms/XYExtractionTable.java @@ -48,6 +48,8 @@ public class XYExtractionTable extends XYExtraction{ inputs.add(previnputs.get(4)); inputs.add(previnputs.get(5)); inputs.add(previnputs.get(6)); + inputs.add(previnputs.get(7)); + inputs.add(previnputs.get(8)); inputs.add(previnputs.get(9)); inputs.add(previnputs.get(10)); diff --git a/src/main/java/org/gcube/dataanalysis/geo/algorithms/ZExtraction.java b/src/main/java/org/gcube/dataanalysis/geo/algorithms/ZExtraction.java index 9ccccd7..dab00c0 100644 --- a/src/main/java/org/gcube/dataanalysis/geo/algorithms/ZExtraction.java +++ b/src/main/java/org/gcube/dataanalysis/geo/algorithms/ZExtraction.java @@ -100,6 +100,8 @@ public class ZExtraction extends XYExtraction{ AnalysisLogger.getLogger().debug("Extracting Time Series from layer"); ZExtractor extractor = new ZExtractor(config); + extractor.correctZ(0, layerNameValue,resolutionValue); + long t0 = System.currentTimeMillis(); signal = extractor.extractZ(layerNameValue, xValue,yValue, time, resolutionValue); @@ -117,7 +119,7 @@ public class ZExtraction extends XYExtraction{ double matrix[][] = new double[1][]; matrix[0] = signal; - RasterTable raster = new RasterTable(xValue, xValue, yValue, yValue, zValue, resolutionValue, resolutionValue, matrix, config); + RasterTable raster = new RasterTable(xValue, xValue, yValue, yValue, zValue, time,resolutionValue, resolutionValue, matrix, config); int signalRate = 1; @@ -161,6 +163,7 @@ public class ZExtraction extends XYExtraction{ List templateHspec = new ArrayList(); templateHspec.add(TableTemplates.GENERIC); + OutputTable p = new OutputTable(templateHspec, tableLabelValue, tableNameValue, "Output table"); map.put("OutputTable", p); if (signalimage!=null){ diff --git a/src/main/java/org/gcube/dataanalysis/geo/algorithms/ZExtractionTable.java b/src/main/java/org/gcube/dataanalysis/geo/algorithms/ZExtractionTable.java index 81d1486..755e4a0 100644 --- a/src/main/java/org/gcube/dataanalysis/geo/algorithms/ZExtractionTable.java +++ b/src/main/java/org/gcube/dataanalysis/geo/algorithms/ZExtractionTable.java @@ -36,7 +36,7 @@ public class ZExtractionTable extends ZExtraction{ inputs.add(columnx); ColumnType columny = new ColumnType(TableMatrixRepresentation.tableNameParameter, TableMatrixRepresentation.yDimensionColumnParameter, "The column containing y (latitude) information", "y", false); inputs.add(columny); - ColumnType columnt = new ColumnType(TableMatrixRepresentation.tableNameParameter, TableMatrixRepresentation.zDimensionColumnParameter, "The column containing z information", "datetime", false); + ColumnType columnt = new ColumnType(TableMatrixRepresentation.tableNameParameter, TableMatrixRepresentation.zDimensionColumnParameter, "The column containing z information", "z", false); inputs.add(columnt); ColumnType columnvalue = new ColumnType(TableMatrixRepresentation.tableNameParameter, TableMatrixRepresentation.valueDimensionColumnParameter, "A column containing real valued features", "value", false); inputs.add(columnvalue); diff --git a/src/main/java/org/gcube/dataanalysis/geo/connectors/asc/AscRasterReader.java b/src/main/java/org/gcube/dataanalysis/geo/connectors/asc/AscRasterReader.java index abb7b5f..54a57db 100644 --- a/src/main/java/org/gcube/dataanalysis/geo/connectors/asc/AscRasterReader.java +++ b/src/main/java/org/gcube/dataanalysis/geo/connectors/asc/AscRasterReader.java @@ -11,6 +11,7 @@ import java.util.regex.Matcher; import java.util.regex.Pattern; import org.apache.tools.ant.types.CommandlineJava.SysProperties; +import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; /** @@ -44,17 +45,19 @@ public class AscRasterReader BufferedReader input = null; URLConnection urlConn =null; if (filename.startsWith("http")){ + AnalysisLogger.getLogger().debug("Getting file from http"); URL fileurl = new URL(filename); urlConn = fileurl.openConnection(); - urlConn.setConnectTimeout(60000); - urlConn.setReadTimeout(60000); + urlConn.setConnectTimeout(120000); + urlConn.setReadTimeout(1200000); urlConn.setAllowUserInteraction(false); urlConn.setDoOutput(true); input = new BufferedReader(new InputStreamReader(urlConn.getInputStream())); } - else + else { + AnalysisLogger.getLogger().debug("Getting file from local file"); input = new BufferedReader( new FileReader( filename ) ); - + } while( input.ready() ) { String line = input.readLine(); diff --git a/src/main/java/org/gcube/dataanalysis/geo/connectors/netcdf/NetCDFDataExplorer.java b/src/main/java/org/gcube/dataanalysis/geo/connectors/netcdf/NetCDFDataExplorer.java index 8586916..3029238 100644 --- a/src/main/java/org/gcube/dataanalysis/geo/connectors/netcdf/NetCDFDataExplorer.java +++ b/src/main/java/org/gcube/dataanalysis/geo/connectors/netcdf/NetCDFDataExplorer.java @@ -98,8 +98,9 @@ public class NetCDFDataExplorer { List gridTypes = gds.getGrids(); for (GridDatatype gdt : gridTypes) { - AnalysisLogger.getLogger().debug("Inside File - layer name: " + gdt.getName()); - if (layer.equalsIgnoreCase(gdt.getName())) { + AnalysisLogger.getLogger().debug("Inside File - layer name: " + gdt.getName()+" layer to find "+layer); + //if the layer is an HTTP link then take the first innser layer + if (layer.equalsIgnoreCase(gdt.getName()) || layer.toLowerCase().startsWith("http:")) { AnalysisLogger.getLogger().debug("Found layer " + layer + " inside file"); GridDatatype grid = gds.findGridDatatype(gdt.getName()); CoordinateAxis zAxis = gdt.getCoordinateSystem().getVerticalAxis(); diff --git a/src/main/java/org/gcube/dataanalysis/geo/connectors/table/Table.java b/src/main/java/org/gcube/dataanalysis/geo/connectors/table/Table.java index fbb1f9b..13d4a7e 100644 --- a/src/main/java/org/gcube/dataanalysis/geo/connectors/table/Table.java +++ b/src/main/java/org/gcube/dataanalysis/geo/connectors/table/Table.java @@ -41,6 +41,7 @@ public class Table implements GISDataConnector { throw new Exception("Error in getting elements for time " + time); } + AnalysisLogger.getLogger().debug("Getting elements for time " + time); // check z: if there is at least one point inside the z boundary then it is ok boolean outsideZ = true; diff --git a/src/main/java/org/gcube/dataanalysis/geo/connectors/table/TableMatrixRepresentation.java b/src/main/java/org/gcube/dataanalysis/geo/connectors/table/TableMatrixRepresentation.java index 9d37bf7..610a9d1 100644 --- a/src/main/java/org/gcube/dataanalysis/geo/connectors/table/TableMatrixRepresentation.java +++ b/src/main/java/org/gcube/dataanalysis/geo/connectors/table/TableMatrixRepresentation.java @@ -46,23 +46,23 @@ public class TableMatrixRepresentation { String dbtuple = ""; if (xField != null && xField.trim().length()>0) - dbtuple += xField + ","; + dbtuple += "\""+xField + "\" as x,"; else dbtuple += "0 as x,"; if (yField != null && yField.trim().length()>0) - dbtuple += yField + ","; + dbtuple += "\""+yField + "\" as y,"; else dbtuple += "0 as y,"; if (zField != null && zField.trim().length()>0) - dbtuple += zField + ","; + dbtuple += "\""+zField + "\" as z,"; else dbtuple += "0 as z,"; if (tField != null && tField.trim().length()>0) - dbtuple += tField + " as time,"; + dbtuple += "\""+tField + "\" as time,"; else dbtuple += "0 as time,"; if (valueField != null && valueField.trim().length()>0) - dbtuple += valueField; + dbtuple += "\""+valueField+"\" as v"; else dbtuple += "0 as v"; @@ -73,7 +73,7 @@ public class TableMatrixRepresentation { //find maxZ if (zField!=null && zField.trim().length()>0){ - String maxzq = "select max("+zField+"),min("+zField+") from "+tableName; + String maxzq = "select max("+zField+") as max,min("+zField+") as min from "+tableName; Object [] maxzr = (Object [] )DatabaseFactory.executeSQLQuery(maxzq, dbconnection).get(0); maxZ = Double.parseDouble(""+maxzr[0]); minZ = Double.parseDouble(""+maxzr[1]); diff --git a/src/main/java/org/gcube/dataanalysis/geo/matrixmodel/MatrixExtractor.java b/src/main/java/org/gcube/dataanalysis/geo/matrixmodel/MatrixExtractor.java index 14ef69b..a71bf12 100644 --- a/src/main/java/org/gcube/dataanalysis/geo/matrixmodel/MatrixExtractor.java +++ b/src/main/java/org/gcube/dataanalysis/geo/matrixmodel/MatrixExtractor.java @@ -1,6 +1,7 @@ package org.gcube.dataanalysis.geo.matrixmodel; import java.util.ArrayList; +import java.util.Arrays; import java.util.List; import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; @@ -16,6 +17,7 @@ import org.gcube.dataanalysis.geo.connectors.wcs.WCS; import org.gcube.dataanalysis.geo.connectors.wfs.WFS; import org.gcube.dataanalysis.geo.infrastructure.GeoNetworkInspector; import org.gcube.dataanalysis.geo.interfaces.GISDataConnector; +import org.gcube.dataanalysis.geo.meta.GenericLayerMetadata; import org.opengis.metadata.Metadata; import org.opengis.metadata.identification.Identification; @@ -26,13 +28,14 @@ public class MatrixExtractor { public static int maxSignalLength = 100000; public static int maxzLength = 100000; boolean log = true; - protected double currentResolution=0.5; + protected double currentResolution = 0.5; public List currentTimeValues; public List> currentTuples; + public MatrixExtractor(AlgorithmConfiguration configuration) { gnInspector = new GeoNetworkInspector(); gnInspector.setScope(configuration.getGcubeScope()); - this.configuration=configuration; + this.configuration = configuration; } public GeoNetworkInspector getFeaturer() { @@ -49,12 +52,11 @@ public class MatrixExtractor { protected List getRawValuesInTimeInstantAndBoundingBox(String layerTitle, int time, List> coordinates3d, double xL, double xR, double yL, double yR, double resolution) throws Exception { return getRawValuesInTimeInstantAndBoundingBox(layerTitle, time, coordinates3d, xL, xR, yL, yR, resolution, false); } - - + public GISDataConnector currentconnector; public String layerName; public String layerURL; - + public GISDataConnector getConnector(String layerTitle, double resolution) throws Exception { // get the layer Metadata meta = null; @@ -64,62 +66,87 @@ public class MatrixExtractor { else { if (isTable()) { AnalysisLogger.getLogger().debug("Extracting grid from table " + configuration.getParam(TableMatrixRepresentation.tableNameParameter)); - connector = new Table(configuration,resolution); + connector = new Table(configuration, resolution); currentconnector = connector; - } else + } else { meta = gnInspector.getGNInfobyUUIDorName(layerTitle); - } - // if the layer is good - if (meta != null) { - layerName = gnInspector.getLayerName(meta); - if (layerName == null) - layerName = layerTitle; - layerURL = ""; - if (gnInspector.isNetCDFFile(meta)) { - Identification id = meta.getIdentificationInfo().iterator().next(); - String title = id.getCitation().getTitle().toString(); - AnalysisLogger.getLogger().debug("found a netCDF file with title " + title + " and layer name " + layerName); - layerURL = gnInspector.getOpenDapLink(meta); - connector = new NetCDF(layerURL, layerName); - } else if (gnInspector.isAscFile(meta)) { - AnalysisLogger.getLogger().debug("managing ASC File"); - layerURL = gnInspector.getHttpLink(meta); - connector = new ASC(); - } else if (gnInspector.isWFS(meta)) { - AnalysisLogger.getLogger().debug("found a Geo Layer with reference " + layerTitle + " and layer name " + layerName); - layerURL = gnInspector.getGeoserverLink(meta); - connector = new WFS(); - } else if (gnInspector.isWCS(meta)) { - AnalysisLogger.getLogger().debug("found a WCS Layer with reference " + layerTitle + " and layer name " + layerName); - layerURL = gnInspector.getWCSLink(meta); - connector = new WCS(configuration,layerURL); - } else if (gnInspector.isGeoTiff(meta)) { - layerURL = gnInspector.getGeoTiffLink(meta); - AnalysisLogger.getLogger().debug("found a GeoTiff with reference " + layerTitle + " and layer name " + layerName); - connector = new GeoTiff(configuration,layerURL); + + // if the layer is not on GeoNetwork + if (meta == null) { + String[] urls = { layerTitle }; + String[] protocols = { "HTTP" }; + meta = new GenericLayerMetadata().createBasicMeta(urls, protocols); + } + layerName = gnInspector.getLayerName(meta); + if (layerName == null) + layerName = layerTitle; + layerURL = ""; + if (gnInspector.isNetCDFFile(meta)) { + Identification id = meta.getIdentificationInfo().iterator().next(); + String title = id.getCitation().getTitle().toString(); + AnalysisLogger.getLogger().debug("found a netCDF file with title " + title + " and layer name " + layerName); + layerURL = gnInspector.getOpenDapLink(meta); + connector = new NetCDF(layerURL, layerName); + } else if (gnInspector.isAscFile(meta)) { + AnalysisLogger.getLogger().debug("managing ASC File"); + layerURL = gnInspector.getHttpLink(meta); + connector = new ASC(); + } else if (gnInspector.isWFS(meta)) { + AnalysisLogger.getLogger().debug("found a Geo Layer with reference " + layerURL + " and layer name " + layerName); + layerURL = gnInspector.getGeoserverLink(meta); + connector = new WFS(); + } else if (gnInspector.isWCS(meta)) { + AnalysisLogger.getLogger().debug("found a WCS Layer with reference " + layerURL + " and layer name " + layerName); + layerURL = gnInspector.getWCSLink(meta); + connector = new WCS(configuration, layerURL); + } else if (gnInspector.isGeoTiff(meta)) { + layerURL = gnInspector.getGeoTiffLink(meta); + AnalysisLogger.getLogger().debug("found a GeoTiff with reference " + layerURL + " and layer name " + layerName); + connector = new GeoTiff(configuration, layerURL); + } else { + // treat as geotiff + layerURL = layerTitle; + AnalysisLogger.getLogger().debug("guessing a GeoTiff with reference " + layerURL + " and layer name " + layerName); + connector = new GeoTiff(configuration, layerURL); + } } - currentconnector = connector; } - + currentconnector = connector; return currentconnector; } - - //4D Extraction -/** - * Extract raw values in a time instant according to a set of grid points and a bounding box - */ + + // 4D Extraction + /** + * Extract raw values in a time instant according to a set of grid points and a bounding box + */ public List getRawValuesInTimeInstantAndBoundingBox(String layerTitle, int time, List> coordinates3d, double xL, double xR, double yL, double yR, double resolution, boolean saveLayer) throws Exception { - GISDataConnector connector = getConnector(layerTitle,resolution); - //execute connector + GISDataConnector connector = getConnector(layerTitle, resolution); + // execute connector if (connector != null) return connector.getFeaturesInTimeInstantAndArea(layerURL, layerName, time, coordinates3d, xL, xR, yL, yR); else throw new Exception("ERROR: Connector not found for layer " + layerTitle + " - Cannot Rasterize!"); } + + public double zmin; + public double zmax; + + public double correctZ(double zValue, String layerURL, double resolution) throws Exception{ + GISDataConnector connector = getConnector(layerURL, resolution); + zmin = connector.getMinZ(layerURL, layerName); + zmax = connector.getMaxZ(layerURL, layerName); + if (zValuezmax) + zValue = zmax; + + return zValue; + } /** * Extract a grid of XY points with fixed time and z + * * @param layerTitle * @param timeInstant * @param x1 @@ -135,11 +162,11 @@ public class MatrixExtractor { */ public double[][] extractXYGridWithFixedTZ(String layerTitle, int timeInstant, double x1, double x2, double y1, double y2, double z, double xResolution, double yResolution, boolean cachelayer) throws Exception { - currentResolution = (double)(xResolution+yResolution)/2d; - + currentResolution = (double) (xResolution + yResolution) / 2d; + boolean faolayer = false; - if (layerTitle==null) - layerTitle=""; + if (layerTitle == null) + layerTitle = ""; if (layerTitle.toLowerCase().contains("MatrixExtractor->FAO aquatic species distribution map")) { AnalysisLogger.getLogger().debug("MatrixExtractor->FAO DISTRIBUTION LAYER ... TO APPY PATCH!"); faolayer = true; @@ -153,12 +180,12 @@ public class MatrixExtractor { double[][] slice = new double[ysteps + 1][xsteps + 1]; List> tuples = new ArrayList>(); - - if (log){ + + if (log) { AnalysisLogger.getLogger().debug("MatrixExtractor->Building the points grid according to YRes:" + yResolution + " and XRes:" + xResolution); AnalysisLogger.getLogger().debug("MatrixExtractor->Points to reassign:" + (ysteps * xsteps)); } - + // build the tuples according to the desired resolution for (int i = 0; i < ysteps + 1; i++) { double y = (i * yResolution) + y1; @@ -187,16 +214,15 @@ public class MatrixExtractor { } if (log) AnalysisLogger.getLogger().debug("Bounding box: (" + x1 + "," + x2 + ";" + y1 + "," + y2 + ")"); - -// long t0=System.currentTimeMillis(); + + // long t0=System.currentTimeMillis(); currentTimeValues = getRawValuesInTimeInstantAndBoundingBox(layerTitle, timeInstant, tuples, x1, x2, y1, y2, currentResolution, cachelayer); - currentTuples=tuples; -// AnalysisLogger.getLogger().debug("Elapsed:"+(System.currentTimeMillis()-t0)); - + currentTuples = tuples; + // AnalysisLogger.getLogger().debug("Elapsed:"+(System.currentTimeMillis()-t0)); + if (log) AnalysisLogger.getLogger().debug("Taken " + currentTimeValues.size() + " values"); - // build back the values matrix int k = 0; int g = 0; @@ -224,10 +250,10 @@ public class MatrixExtractor { } // applyNearestNeighbor(); - + if (log) AnalysisLogger.getLogger().debug("Features map: rows " + slice.length + ", cols " + slice[0].length); return slice; } - + } diff --git a/src/main/java/org/gcube/dataanalysis/geo/test/infra/TestExtraction.java b/src/main/java/org/gcube/dataanalysis/geo/test/infra/TestExtraction.java index b315edb..647672d 100644 --- a/src/main/java/org/gcube/dataanalysis/geo/test/infra/TestExtraction.java +++ b/src/main/java/org/gcube/dataanalysis/geo/test/infra/TestExtraction.java @@ -12,8 +12,9 @@ import org.gcube.dataanalysis.geo.connectors.table.TableMatrixRepresentation; public class TestExtraction { -// static AlgorithmConfiguration[] configs = { testXYExtractionNetCDF(),testXYExtractionAquaMaps(),testXYExtractionTable()}; - static AlgorithmConfiguration[] configs = { testXYExtractionTable()}; + static AlgorithmConfiguration[] configs = { testXYExtractionNetCDF(),testXYExtractionAquaMaps(),testXYExtractionTable(),testXYExtractionTable2()}; +// static AlgorithmConfiguration[] configs = { testXYExtractionTable2()}; +// static AlgorithmConfiguration[] configs = { testDirectExtraction()}; public static void main(String[] args) throws Exception { System.out.println("TEST 1"); @@ -35,7 +36,7 @@ public class TestExtraction { AlgorithmConfiguration config = new AlgorithmConfiguration(); - config.setAgent("XYEXTRACTION"); + config.setAgent("XYEXTRACTOR"); config.setConfigPath("./cfg/"); config.setPersistencePath("./"); config.setParam("DatabaseUserName","gcube"); @@ -59,6 +60,34 @@ public class TestExtraction { return config; } + private static AlgorithmConfiguration testDirectExtraction() { + + AlgorithmConfiguration config = new AlgorithmConfiguration(); + + config.setAgent("XYEXTRACTOR"); + config.setConfigPath("./cfg/"); + config.setPersistencePath("./"); + config.setParam("DatabaseUserName","gcube"); + config.setParam("DatabasePassword","d4science2"); + config.setParam("DatabaseURL","jdbc:postgresql://localhost/testdb"); + config.setParam("DatabaseDriver","org.postgresql.Driver"); + config.setGcubeScope("/d4science.research-infrastructures.eu/gCubeApps/BiodiversityLab"); + + config.setParam("Layer","https://dl.dropboxusercontent.com/u/12809149/geoserver-GetCoverage.image.asc"); + config.setParam("Z","0"); + config.setParam("TimeIndex","0"); + config.setParam("BBox_LowerLeftLat","-60"); + config.setParam("BBox_LowerLeftLong","-50"); + config.setParam("BBox_UpperRightLat","60"); + config.setParam("BBox_UpperRightLong","50"); + config.setParam("XResolution","0.5"); + config.setParam("YResolution","0.5"); + config.setParam("OutputTableName","testextraction"); + config.setParam("OutputTableLabel","testextraction"); + + return config; + } + private static AlgorithmConfiguration testXYExtractionAquaMaps() { @@ -98,4 +127,36 @@ public class TestExtraction { return config; } + + private static AlgorithmConfiguration testXYExtractionTable2() { + + AlgorithmConfiguration config = testXYExtractionNetCDF(); + config.setAgent("XYEXTRACTOR_TABLE"); + + config.setParam("OutputTableName","testextractiontable"); + config.setParam("DatabaseUserName", "utente"); + config.setParam("DatabasePassword", "d4science"); + config.setParam("DatabaseURL", "jdbc:postgresql://statistical-manager.d.d4science.research-infrastructures.eu/testdb"); + config.setParam("DatabaseDriver", "org.postgresql.Driver"); + // vessels + /* + config.setParam(TableMatrixRepresentation.tableNameParameter, "generic_id037d302d_2ba0_4e43_b6e4_1a797bb91728"); + config.setParam(TableMatrixRepresentation.xDimensionColumnParameter, "x"); + config.setParam(TableMatrixRepresentation.yDimensionColumnParameter, "y"); + config.setParam(TableMatrixRepresentation.timeDimensionColumnParameter, "datetime"); + config.setParam(TableMatrixRepresentation.valueDimensionColumnParameter, "speed"); + config.setParam(TableMatrixRepresentation.filterParameter, "speed<2"); + */ + config.setParam(TableMatrixRepresentation.tableNameParameter, "occurrence_species_id0045886b_2a7c_4ede_afc4_3157c694b893"); + config.setParam(TableMatrixRepresentation.xDimensionColumnParameter, "decimallongitude"); + config.setParam(TableMatrixRepresentation.yDimensionColumnParameter, "decimallatitude"); + config.setParam(TableMatrixRepresentation.timeDimensionColumnParameter, "modified"); + config.setParam(TableMatrixRepresentation.valueDimensionColumnParameter, "decimallatitude"); + config.setParam(TableMatrixRepresentation.filterParameter, " "); + config.setParam("Z","0"); + config.setParam("TimeIndex","1"); + + return config; + } + } diff --git a/src/main/java/org/gcube/dataanalysis/geo/test/infra/TestOccurrenceEnrichment.java b/src/main/java/org/gcube/dataanalysis/geo/test/infra/TestOccurrenceEnrichment.java new file mode 100644 index 0000000..81dd81d --- /dev/null +++ b/src/main/java/org/gcube/dataanalysis/geo/test/infra/TestOccurrenceEnrichment.java @@ -0,0 +1,71 @@ +package org.gcube.dataanalysis.geo.test.infra; + +import java.util.ArrayList; +import java.util.List; + +import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; +import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; +import org.gcube.dataanalysis.ecoengine.datatypes.ColumnType; +import org.gcube.dataanalysis.ecoengine.datatypes.InputTable; +import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveTypesList; +import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType; +import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes; +import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.TableTemplates; +import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent; +import org.gcube.dataanalysis.ecoengine.processing.factories.TransducerersFactory; +import org.gcube.dataanalysis.ecoengine.test.regression.Regressor; +import org.gcube.dataanalysis.ecoengine.utils.IOHelper; + +public class TestOccurrenceEnrichment { + + static AlgorithmConfiguration[] configs = { testOccEnrichment()}; + public static void main(String[] args) throws Exception { + + System.out.println("TEST 1"); + + for (int i = 0; i < configs.length; i++) { + AnalysisLogger.getLogger().debug("Executing: "+configs[i].getAgent()); + List trans = null; + trans = TransducerersFactory.getTransducerers(configs[i]); + trans.get(0).init(); + Regressor.process(trans.get(0)); + StatisticalType st = trans.get(0).getOutput(); + AnalysisLogger.getLogger().debug("ST:" + st); + trans = null; + } + } + + + private static AlgorithmConfiguration testOccEnrichment() { + + AlgorithmConfiguration config = new AlgorithmConfiguration(); + + config.setAgent("OCCURRENCE_ENRICHMENT"); + config.setConfigPath("./cfg/"); + config.setPersistencePath("./"); + + config.setParam("DatabaseUserName", "utente"); + config.setParam("DatabasePassword", "d4science"); + config.setParam("DatabaseURL", "jdbc:postgresql://statistical-manager.d.d4science.research-infrastructures.eu/testdb"); + config.setParam("DatabaseDriver", "org.postgresql.Driver"); + config.setGcubeScope("/d4science.research-infrastructures.eu/gCubeApps/BiodiversityLab"); + + config.setParam("OccurrenceTable","occurrence_species_id0045886b_2a7c_4ede_afc4_3157c694b893"); + config.setParam("LongitudeColumn","decimallongitude"); + config.setParam("LatitudeColumn","decimallatitude"); + config.setParam("ScientificNameColumn","scientificname"); + config.setParam("TimeColumn","eventdate"); + config.setParam("OptionalFilter",""); + config.setParam("Resolution","0.5"); + config.setParam("OutputTableDBName","testenrichment"); + config.setParam("OutputTableName","testenrichment"); + String sep=AlgorithmConfiguration.getListSeparator(); + config.setParam("Layers","http://thredds.research-infrastructures.eu/thredds/dodsC/public/netcdf/global-reanalysis-phys-001-004-b-ref-fr-mjm95-gridt__ENVIRONMENT_OCEANS_ELEVATION_1366210702774.nc"+sep+"4d597da9-dbfa-4a65-9de6-9bbff69eac19"+sep+"2c2304d1-681a-4f3a-8409-e8cdb5ed447f"); +// config.setParam("Layers","8f5d883f-95bf-4b7c-8252-aaf0b2e6fd81"+sep+"4d597da9-dbfa-4a65-9de6-9bbff69eac19"+sep+"2c2304d1-681a-4f3a-8409-e8cdb5ed447f"); + config.setParam("FeaturesNames","temperature"+sep+"chlorophyll"+sep+"ph"); +// config.setParam("Layers","4d597da9-dbfa-4a65-9de6-9bbff69eac19"+sep+"2c2304d1-681a-4f3a-8409-e8cdb5ed447f"); +// config.setParam("FeaturesNames","chlorophyll"+sep+"ph"); + return config; + } + +} diff --git a/src/main/java/org/gcube/dataanalysis/geo/test/infra/TestTimeExtraction.java b/src/main/java/org/gcube/dataanalysis/geo/test/infra/TestTimeExtraction.java index 1476d48..b764fff 100644 --- a/src/main/java/org/gcube/dataanalysis/geo/test/infra/TestTimeExtraction.java +++ b/src/main/java/org/gcube/dataanalysis/geo/test/infra/TestTimeExtraction.java @@ -13,8 +13,8 @@ import org.gcube.dataanalysis.geo.connectors.table.TableMatrixRepresentation; public class TestTimeExtraction { - static AlgorithmConfiguration[] configs = { testTimeExtractionTable(), testTimeExtractionNetCDF(),testTimeExtractionTable2()}; - +// static AlgorithmConfiguration[] configs = { testTimeExtractionTable(), testTimeExtractionNetCDF(),testTimeExtractionTable2()}; + static AlgorithmConfiguration[] configs = { testTimeExtractionNetCDF()}; public static void main(String[] args) throws Exception { System.out.println("TEST 1"); @@ -43,11 +43,13 @@ public class TestTimeExtraction { config.setParam("DatabasePassword","d4science2"); config.setParam("DatabaseURL","jdbc:postgresql://localhost/testdb"); config.setParam("DatabaseDriver","org.postgresql.Driver"); - config.setGcubeScope("/d4science.research-infrastructures.eu/gCubeApps/BiodiversityLab"); +// config.setGcubeScope("/d4science.research-infrastructures.eu/gCubeApps/BiodiversityLab"); + config.setGcubeScope("/gcube/devsec/devVRE"); +// config.setParam("Layer","c565e32c-c5b3-4964-b44f-06dc620563e9"); - config.setParam("Layer","c565e32c-c5b3-4964-b44f-06dc620563e9"); // config.setParam("Layer","dffa504b-dbc8-4553-896e-002549f8f5d3"); - + //wind + config.setParam("Layer","21715b2e-28de-4646-acce-d4f16b59d6d0"); config.setParam("OutputTableName","testtimeextraction"); config.setParam("OutputTableLabel","testtimeextraction"); diff --git a/src/main/java/org/gcube/dataanalysis/geo/test/regression/RegressionOccurrenceEnrichment.java b/src/main/java/org/gcube/dataanalysis/geo/test/regression/RegressionOccurrenceEnrichment.java new file mode 100644 index 0000000..3817051 --- /dev/null +++ b/src/main/java/org/gcube/dataanalysis/geo/test/regression/RegressionOccurrenceEnrichment.java @@ -0,0 +1,70 @@ +package org.gcube.dataanalysis.geo.test.regression; + +import java.util.ArrayList; +import java.util.List; + +import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; +import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; +import org.gcube.dataanalysis.ecoengine.datatypes.ColumnType; +import org.gcube.dataanalysis.ecoengine.datatypes.InputTable; +import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveTypesList; +import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType; +import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes; +import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.TableTemplates; +import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent; +import org.gcube.dataanalysis.ecoengine.processing.factories.TransducerersFactory; +import org.gcube.dataanalysis.ecoengine.test.regression.Regressor; +import org.gcube.dataanalysis.ecoengine.utils.IOHelper; + +public class RegressionOccurrenceEnrichment { + + static AlgorithmConfiguration[] configs = { testOccEnrichment()}; + public static void main(String[] args) throws Exception { + + System.out.println("TEST 1"); + + for (int i = 0; i < configs.length; i++) { + AnalysisLogger.getLogger().debug("Executing: "+configs[i].getAgent()); + List trans = null; + trans = TransducerersFactory.getTransducerers(configs[i]); + trans.get(0).init(); + Regressor.process(trans.get(0)); + StatisticalType st = trans.get(0).getOutput(); + AnalysisLogger.getLogger().debug("ST:" + st); + trans = null; + } + } + + + private static AlgorithmConfiguration testOccEnrichment() { + + AlgorithmConfiguration config = new AlgorithmConfiguration(); + + config.setAgent("OCCURRENCE_ENRICHMENT"); + config.setConfigPath("./cfg/"); + config.setPersistencePath("./"); + + config.setParam("DatabaseUserName", "utente"); + config.setParam("DatabasePassword", "d4science"); + config.setParam("DatabaseURL", "jdbc:postgresql://statistical-manager.d.d4science.research-infrastructures.eu/testdb"); + config.setParam("DatabaseDriver", "org.postgresql.Driver"); + config.setGcubeScope("/d4science.research-infrastructures.eu/gCubeApps/BiodiversityLab"); + + config.setParam("OccurrenceTable","occurrence_species_id0045886b_2a7c_4ede_afc4_3157c694b893"); + config.setParam("LongitudeColumn","decimallongitude"); + config.setParam("LatitudeColumn","decimallatitude"); + config.setParam("ScientificNameColumn","scientificname"); + config.setParam("TimeColumn","eventdate"); + config.setParam("OptionalFilter",""); + config.setParam("Resolution","0.5"); + config.setParam("OutputTableDBName","testenrichment"); + config.setParam("OutputTableName","testenrichment"); + String sep=AlgorithmConfiguration.getListSeparator(); + config.setParam("Layers","8f5d883f-95bf-4b7c-8252-aaf0b2e6fd81"+sep+"4d597da9-dbfa-4a65-9de6-9bbff69eac19"+sep+"2c2304d1-681a-4f3a-8409-e8cdb5ed447f"); + config.setParam("FeaturesNames","temperature"+sep+"chlorophyll"+sep+"ph"); +// config.setParam("Layers","4d597da9-dbfa-4a65-9de6-9bbff69eac19"+sep+"2c2304d1-681a-4f3a-8409-e8cdb5ed447f"); +// config.setParam("FeaturesNames","chlorophyll"+sep+"ph"); + return config; + } + +} diff --git a/src/main/java/org/gcube/dataanalysis/geo/utils/VectorOperations.java b/src/main/java/org/gcube/dataanalysis/geo/utils/VectorOperations.java index 2d50fe0..17d9b27 100644 --- a/src/main/java/org/gcube/dataanalysis/geo/utils/VectorOperations.java +++ b/src/main/java/org/gcube/dataanalysis/geo/utils/VectorOperations.java @@ -92,6 +92,42 @@ public class VectorOperations { return valuesForGrid; } + public static List assignGridValuesToPoints2D(List> grid3d, List gridValues, List> coordinates4d, double tolerance) { + + List valuesForPoints = new ArrayList(); + int gridSize = coordinates4d.size(); + for (int i = 0; i < gridSize; i++) { + valuesForPoints.add(Double.NaN); + } + + + int foundmatches = 0; + int points=0; + for (Tuple coord4d : coordinates4d) { + double rx = coord4d.getElements().get(0); + double ry = coord4d.getElements().get(1); + + int gridIdx = 0; + for (Tuple gridElement : grid3d) { + double x = gridElement.getElements().get(0); + double y = gridElement.getElements().get(1); + + double d = distance(x, y, 0, 0, rx, ry, 0, 0); + if (d <= tolerance) { +// AnalysisLogger.getLogger().debug("Association: distance between grid:("+x+","+y+","+z+","+gridTimeInstant+") and point:("+rx+","+ry+","+rz+","+rt+") is "+d); + valuesForPoints.set(points, gridValues.get(gridIdx)); + foundmatches++; + break; + } + gridIdx++; + } + points++; + } + + AnalysisLogger.getLogger().debug("Association: Found "+foundmatches+" matches between the points and the grid"); + return valuesForPoints; + } + public static List assignGridValuesToPoints(List> grid3d, int gridTimeInstant, List gridValues, List> coordinates4d, double tolerance) { List valuesForPoints = new ArrayList(); @@ -116,7 +152,6 @@ public class VectorOperations { double z = gridElement.getElements().get(2); double d = distance(x, y, z, gridTimeInstant, rx, ry, rz, rt); - if (d <= tolerance) { // AnalysisLogger.getLogger().debug("Association: distance between grid:("+x+","+y+","+z+","+gridTimeInstant+") and point:("+rx+","+ry+","+rz+","+rt+") is "+d); valuesForPoints.set(points, gridValues.get(gridIdx));