Gianpaolo Coro 2014-03-06 11:56:54 +00:00
parent cf54369930
commit 9b76e5e8a6
17 changed files with 608 additions and 204 deletions

View File

@ -1,6 +1,8 @@
package org.gcube.dataanalysis.geo.algorithms;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import org.gcube.common.scope.api.ScopeProvider;
@ -11,6 +13,7 @@ import org.gcube.dataanalysis.ecoengine.datatypes.ColumnType;
import org.gcube.dataanalysis.ecoengine.datatypes.DatabaseType;
import org.gcube.dataanalysis.ecoengine.datatypes.InputTable;
import org.gcube.dataanalysis.ecoengine.datatypes.OutputTable;
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveTypesList;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
@ -21,6 +24,7 @@ import org.gcube.dataanalysis.ecoengine.utils.DatabaseUtils;
import org.gcube.dataanalysis.ecoengine.utils.IOHelper;
import org.gcube.dataanalysis.ecoengine.utils.ResourceFactory;
import org.gcube.dataanalysis.ecoengine.utils.Tuple;
import org.gcube.dataanalysis.geo.interfaces.GISDataConnector;
import org.gcube.dataanalysis.geo.matrixmodel.XYExtractor;
import org.gcube.dataanalysis.geo.utils.VectorOperations;
import org.hibernate.SessionFactory;
@ -31,33 +35,33 @@ public class OccurrenceEnrichment implements Transducerer {
static String LongitudeColumn = "LongitudeColumn";
static String LatitudeColumn = "LatitudeColumn";
static String ScientificNameColumn = "ScientificNameColumn";
static String TimeColumn = "TimeColumn";
static String OutputTableLabelParameter = "OutputTableName";
static String OutputTableDBNameParameter = "OutputTableDBName";
static String FilterParameter = "OptionalFilter";
static String Resolution = "Resolution";
static String Layers = "Layers";
static String LayersNames = "FeaturesNames";
static String yLL = "BBox_LowerLeftLat";
static String xLL = "BBox_LowerLeftLong";
static String yUR = "BBox_UpperRightLat";
static String xUR = "BBox_UpperRightLong";
AlgorithmConfiguration config;
float status;
private String[] layers;
private String[] layersnames;
private String occurrencesTableName;
private String longitudeColumn;
private String latitudeColumn;
private String scientificnameColumn;
private String timeColumn;
private String filter;
private float resolution;
private String outputTableLabel;
private String outputTableDBName;
private double BBxLL;
private double BByLL;
private double BBxUR;
private double BByUR;
public LinkedHashMap<String, String> outputParameters = new LinkedHashMap<String, String>();
@Override
public List<StatisticalType> getInputParameters() {
@ -65,26 +69,26 @@ public class OccurrenceEnrichment implements Transducerer {
List<StatisticalType> inputs = new ArrayList<StatisticalType>();
List<TableTemplates> template = new ArrayList<TableTemplates>();
template.add(TableTemplates.OCCURRENCE_SPECIES);
InputTable table = new InputTable(template,OccurrencesTableNameParameter ,"A geospatial table containing at least x,y information","");
InputTable table = new InputTable(template, OccurrencesTableNameParameter, "A geospatial table containing occurrence records, following the template of the Species Products Discovery datasets", "");
inputs.add(table);
ColumnType p1 = new ColumnType(OccurrencesTableNameParameter , LongitudeColumn, "column with longitude values", "decimallongitude", false);
ColumnType p1 = new ColumnType(OccurrencesTableNameParameter, LongitudeColumn, "The column containing longitude values", "decimallongitude", false);
inputs.add(p1);
ColumnType p2 = new ColumnType(OccurrencesTableNameParameter , LatitudeColumn, "column with latitude values", "decimallatitude", false);
ColumnType p2 = new ColumnType(OccurrencesTableNameParameter, LatitudeColumn, "The column containing latitude values", "decimallatitude", false);
inputs.add(p2);
ColumnType p3 = new ColumnType(OccurrencesTableNameParameter , ScientificNameColumn, "column with Scientific Names", "scientificname", false);
ColumnType p3 = new ColumnType(OccurrencesTableNameParameter, ScientificNameColumn, "The column containing Scientific Names", "scientificname", false);
inputs.add(p3);
ColumnType p4 = new ColumnType(OccurrencesTableNameParameter, TimeColumn, "The column containing time information", "eventdate", false);
inputs.add(p4);
IOHelper.addStringInput(inputs, FilterParameter, "A filter on one of the columns (e.g. basisofrecord='HumanObservation')", " ");
IOHelper.addDoubleInput(inputs, Resolution, "The spatial resolution of the association between observations and environmental features.", "0.5");
IOHelper.addRandomStringInput(inputs, OutputTableDBNameParameter, "The db name of the table to produce", "enrich_");
IOHelper.addStringInput(inputs, FilterParameter, "A filter on one of the columns (e.g. basisofrecord='HumanObservation'). Optional", " ");
IOHelper.addDoubleInput(inputs, Resolution, "The spatial resolution in degrees of the association between observations and environmental features", "0.5");
IOHelper.addRandomStringInput(inputs, OutputTableDBNameParameter, "The database name of the table to produce", "enrich_");
IOHelper.addStringInput(inputs, OutputTableLabelParameter, "The name of the output table", "enrich_");
PrimitiveTypesList listEnvLayers = new PrimitiveTypesList(String.class.getName(), PrimitiveTypes.STRING, Layers, "The list of environmental layers to use for enriching the points. Each entry is a Layer Title or UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Supports several standards (NETCDF, WFS, WCS ASC, GeoTiff )", false);
PrimitiveTypesList listEnvLayers = new PrimitiveTypesList(String.class.getName(), PrimitiveTypes.STRING, Layers, "The list of environmental layers to use for enriching the points. Each entry is a layer Title or UUID or HTTP link. E.g. the title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Otherwise you can supply the direct HTTP link of the layer. The format will be guessed from the link. The default is GeoTiff. Supports several standards (NETCDF, WFS, WCS, ASC, GeoTiff )", false);
inputs.add(listEnvLayers);
IOHelper.addDoubleInput(inputs, xLL, "Lower Left Longitude of the Bounding Box", "-180");
IOHelper.addDoubleInput(inputs, yLL, "Lower Left Latitute of the Bounding Box", "-90");
IOHelper.addDoubleInput(inputs, xUR, "Upper Right Longitude of the Bounding Box", "180");
IOHelper.addDoubleInput(inputs, yUR, "Upper Right Latitute of the Bounding Box", "90");
PrimitiveTypesList listEnvLayersNames = new PrimitiveTypesList(String.class.getName(), PrimitiveTypes.STRING, LayersNames, "The list of names for the columns corresponding to the environmental layers. These will be the column names of the resulting table", false);
inputs.add(listEnvLayersNames);
DatabaseType.addDefaultDBPars(inputs);
return inputs;
@ -92,12 +96,17 @@ public class OccurrenceEnrichment implements Transducerer {
protected void getParameters() {
layers = IOHelper.getInputParameter(config, Layers).split(AlgorithmConfiguration.getListSeparator());
AnalysisLogger.getLogger().debug("Layers to take " + layers.length);
layers = IOHelper.getInputParameter(config, Layers).trim().split(AlgorithmConfiguration.getListSeparator());
String layernamesS = IOHelper.getInputParameter(config, LayersNames);
if (layernamesS == null)
layernamesS = "";
layersnames = layernamesS.split(AlgorithmConfiguration.getListSeparator());
AnalysisLogger.getLogger().debug("N. of Layers to take " + layers.length);
occurrencesTableName = IOHelper.getInputParameter(config, OccurrencesTableNameParameter);
longitudeColumn = IOHelper.getInputParameter(config, LongitudeColumn);
latitudeColumn = IOHelper.getInputParameter(config, LatitudeColumn);
scientificnameColumn = IOHelper.getInputParameter(config, ScientificNameColumn);
timeColumn = IOHelper.getInputParameter(config, TimeColumn);
filter = IOHelper.getInputParameter(config, FilterParameter);
if (filter == null)
filter = "";
@ -106,10 +115,18 @@ public class OccurrenceEnrichment implements Transducerer {
resolution = IOHelper.getInputParameter(config, Resolution) == null ? 0.5f : Float.parseFloat(IOHelper.getInputParameter(config, Resolution));
outputTableLabel = IOHelper.getInputParameter(config, OutputTableLabelParameter);
outputTableDBName = IOHelper.getInputParameter(config, OutputTableDBNameParameter);
BBxLL=Double.parseDouble(IOHelper.getInputParameter(config, xLL));
BByLL=Double.parseDouble(IOHelper.getInputParameter(config, yLL));
BBxUR=Double.parseDouble(IOHelper.getInputParameter(config, xUR));
BByUR=Double.parseDouble(IOHelper.getInputParameter(config, yUR));
AnalysisLogger.getLogger().debug("OccurrenceEnrichment->layers: " + layers);
AnalysisLogger.getLogger().debug("OccurrenceEnrichment->layers names: " + layersnames);
AnalysisLogger.getLogger().debug("OccurrenceEnrichment->occurrencesTableName: " + occurrencesTableName);
AnalysisLogger.getLogger().debug("OccurrenceEnrichment->longitudeColumn: " + longitudeColumn);
AnalysisLogger.getLogger().debug("OccurrenceEnrichment->latitudeColumn: " + latitudeColumn);
AnalysisLogger.getLogger().debug("OccurrenceEnrichment->scientificnameColumn: " + scientificnameColumn);
AnalysisLogger.getLogger().debug("OccurrenceEnrichment->timeColumn: " + timeColumn);
AnalysisLogger.getLogger().debug("OccurrenceEnrichment->filter: " + filter);
AnalysisLogger.getLogger().debug("OccurrenceEnrichment->resolution: " + resolution);
AnalysisLogger.getLogger().debug("OccurrenceEnrichment->outputTableLabel: " + outputTableLabel);
AnalysisLogger.getLogger().debug("OccurrenceEnrichment->outputTableDBName: " + outputTableDBName);
String scope = config.getGcubeScope();
AnalysisLogger.getLogger().debug("Extraction: Externally set scope " + scope);
@ -120,6 +137,7 @@ public class OccurrenceEnrichment implements Transducerer {
}
}
@Override
public void init() throws Exception {
AnalysisLogger.getLogger().debug("Occurrence Enrichment Initialization");
@ -132,83 +150,157 @@ public class OccurrenceEnrichment implements Transducerer {
@Override
public String getDescription() {
return "An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layers from the e-infrastructure GeoNetwork (through the GeoExplorer application). Produces one table reporting the set of environmental values associated to the occurrence points.";
return "An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points.";
}
@Override
public void compute() throws Exception {
//TODO: report times
SessionFactory dbconnection = null;
try {
long t0 = System.currentTimeMillis();
status = 10;
getParameters();
dbconnection = DatabaseUtils.initDBSession(config);
String columns = longitudeColumn+","+latitudeColumn+","+scientificnameColumn;
String columnsToProduce = longitudeColumn+","+latitudeColumn+","+scientificnameColumn;
String columns = longitudeColumn + "," + latitudeColumn + "," + scientificnameColumn + "," + timeColumn;
// (id serial, csquarecode character varying, x real, y real, z real, t real, fvalue real)
String columnsTypes = "id serial, " + longitudeColumn + " real," + latitudeColumn + " real," + scientificnameColumn + " character varying," + timeColumn + " timestamp without time zone";
// take min_max lat
String query = "select min(" + longitudeColumn + ") as minlong, max(" + longitudeColumn + ") as maxlong,min(" + latitudeColumn + ") as minlat,max(" + latitudeColumn + ") as maxlat from " + occurrencesTableName;
AnalysisLogger.getLogger().debug("OccurrenceEnrichment->Retrieving bounding box: " + query);
List<Object> minmaxlonglat = (List<Object>) DatabaseFactory.executeSQLQuery(query, dbconnection);
if (minmaxlonglat == null || minmaxlonglat.size() == 0)
throw new Exception("Could not find min and max for occurrence data");
status = 20;
Object[] minmaxLongLat = (Object[]) minmaxlonglat.get(0);
double BBxLL = Double.parseDouble("" + minmaxLongLat[0]);
double BBxUR = Double.parseDouble("" + minmaxLongLat[1]);
double BByLL = Double.parseDouble("" + minmaxLongLat[2]);
double BByUR = Double.parseDouble("" + minmaxLongLat[3]);
AnalysisLogger.getLogger().debug("OccurrenceEnrichment->Calculated Bounding Box: [" + BBxLL + "," + BByLL + ";" + BBxUR + "," + BByUR + "]");
// take the occurrence points
List<Object> rows = (List<Object>) DatabaseFactory.executeSQLQuery(DatabaseUtils.getDinstictElements(occurrencesTableName, columns, filter), dbconnection);
if (rows == null || rows.size() == 0)
throw new Exception("Could not find occurrence data");
int rowsize = rows.size();
status = 30;
AnalysisLogger.getLogger().debug("OccurrenceEnrichment->Rows Retrieved");
List<Tuple<Double>> coordinates4d = new ArrayList<Tuple<Double>>();
List<String[]> enrichment = new ArrayList<String[]>();
int elementstoreport = 4;
int elementsfromoccurrences = 3;
int elementsFromOccurrences = 4;
int elementstoreport = elementsFromOccurrences + layers.length;
for (Object row : rows) {
Object[] elements = (Object[]) row;
double x = elements[0] == null ? 0 : Double.parseDouble("" + elements[0]);
double y = elements[1] == null ? 0 : Double.parseDouble("" + elements[1]);
String species = elements[2] == null ? "" : "" + elements[2];
Tuple<Double> el = new Tuple<Double>(x,y);
String time = elements[3] == null ? "NULL" : "" + elements[3];
Tuple<Double> el = new Tuple<Double>(x, y, 0d, 0d);
coordinates4d.add(el);
String[] singlerow = new String[elementstoreport];
singlerow[0] = "" + x;
singlerow[1] = "" + y;
singlerow[2]=""+species;
singlerow[2] = species;
singlerow[3] = time;
enrichment.add(singlerow);
}
AnalysisLogger.getLogger().debug("OccurrenceEnrichment->Tuples Created. Assigning grid values to the tuples");
status = 40;
// take the layers matrices
int layeridx = 0;
float statusSteps = 50f / (float) layers.length;
for (String layerID : layers) {
if (layerID.length() == 0)
continue;
AnalysisLogger.getLogger().debug("OccurrenceEnrichment->Assigning layer " + layerID + " Layer enumerator: " + layeridx);
// for each layer
XYExtractor extractor = new XYExtractor(config);
extractor.extractXYGrid(layerID, 0, BBxLL, BBxUR, BByLL, BByUR, 0, resolution,resolution);
extractor.correctZ(0, layerID,resolution);
double zmin = extractor.zmin;
double zmax = extractor.zmax;
double bestZ = Math.min(Math.abs(zmin), Math.abs(zmax));
outputParameters.put("Matching Z value in layer " + (layeridx + 1), "" + bestZ);
outputParameters.put("Min Z value in layer "+ (layeridx + 1), "" + zmin);
outputParameters.put("Max Z value in layer "+ (layeridx + 1), "" + zmax);
AnalysisLogger.getLogger().debug("OccurrenceEnrichment->Best Z for this reference layer: " + bestZ);
// perform the comparison closest to the surface
extractor.extractXYGrid(layerID, 0, BBxLL, BBxUR, BByLL, BByUR, bestZ, resolution, resolution);
// retrieve the grid time values and tuples
List<Double> gridValues = extractor.currentTimeValues;
List<Tuple<Double>> grid3d = extractor.currentTuples;
int time = 0;
String layername = (layersnames.length > (layeridx) && layersnames[layeridx].trim().length() > 0) ? layersnames[layeridx].trim() : "feature" + (layeridx + 1);
AnalysisLogger.getLogger().debug("OccurrenceEnrichment->Retrieved Layer Name: " + layername);
columns += ",\"" + layername + "\"";
columnsTypes += ",\"" + layername + "\" real";
AnalysisLogger.getLogger().debug("OccurrenceEnrichment->Assigning grid points to the occurrences");
// take the association
List<Double> enriched = VectorOperations.assignGridValuesToPoints(grid3d, time , gridValues, coordinates4d, resolution);
List<Double> enriched = VectorOperations.assignGridValuesToPoints2D(grid3d, gridValues, coordinates4d, resolution);
int k = 0;
AnalysisLogger.getLogger().debug("OccurrenceEnrichment->Assigning values to the column " + (elementsFromOccurrences + layeridx));
for (Double value : enriched) {
String[] singlerow = enrichment.get(k);
singlerow[layeridx+elementsfromoccurrences] = ""+value;
if (value == null || Double.isNaN(value) || Double.isInfinite(value))
singlerow[elementsFromOccurrences + layeridx] = "-9999";
else
singlerow[elementsFromOccurrences + layeridx] = "" + value;
k++;
}
AnalysisLogger.getLogger().debug("OccurrenceEnrichment->Added values to the row");
layeridx++;
status = status + statusSteps;
}
// write the complete association into the db
DatabaseFactory.executeSQLQuery(DatabaseUtils.dropTableStatement(outputTableDBName),dbconnection);
//TODO: create table
DatabaseUtils.insertChunksIntoTable(outputTableDBName, columnsToProduce, enrichment, 5000, dbconnection);
AnalysisLogger.getLogger().debug("OccurrenceEnrichment->Dropping table " + outputTableDBName);
try {
DatabaseFactory.executeSQLUpdate(DatabaseUtils.dropTableStatement(outputTableDBName), dbconnection);
} catch (Exception e) {
AnalysisLogger.getLogger().debug("OccurrenceEnrichment->cannot drop table, does not exist: " + outputTableDBName);
}
String createquery = "create table " + outputTableDBName + " (" + columnsTypes + ")";
AnalysisLogger.getLogger().debug("OccurrenceEnrichment->Creating table " + outputTableDBName + " query:" + createquery);
DatabaseFactory.executeSQLUpdate(createquery, dbconnection);
AnalysisLogger.getLogger().debug("OccurrenceEnrichment->Inserting chunks");
DatabaseUtils.insertChunksIntoTable(outputTableDBName, columns, enrichment, 5000, dbconnection);
AnalysisLogger.getLogger().debug("OccurrenceEnrichment->Whole process complete in " + ((double) (System.currentTimeMillis() - t0) / 1000f) + " s");
} catch (Exception e) {
e.printStackTrace();
throw e;
} finally {
if (dbconnection != null)
dbconnection.close();
status = 100;
}
}
@Override
public StatisticalType getOutput() {
List<TableTemplates> templateHspec = new ArrayList<TableTemplates>();
templateHspec.add(TableTemplates.GENERIC);
OutputTable p = new OutputTable(templateHspec, OutputTableLabelParameter, OutputTableDBNameParameter, "Output table");
return p;
OutputTable p = new OutputTable(templateHspec, outputTableLabel, outputTableDBName, "Output table");
LinkedHashMap<String, StatisticalType> map = new LinkedHashMap<String, StatisticalType>();
for (String key : outputParameters.keySet()) {
String value = outputParameters.get(key);
PrimitiveType val = new PrimitiveType(String.class.getName(), "" + value, PrimitiveTypes.STRING, key, key);
map.put(key, val);
}
map.put("OutputTable", p);
PrimitiveType outputm = new PrimitiveType(HashMap.class.getName(), map, PrimitiveTypes.MAP, "ResultsMap", "Results Map");
return outputm;
}
@Override

View File

@ -21,6 +21,7 @@ import org.gcube.dataanalysis.ecoengine.signals.SignalConverter;
import org.gcube.dataanalysis.ecoengine.signals.SignalProcessing;
import org.gcube.dataanalysis.ecoengine.utils.IOHelper;
import org.gcube.dataanalysis.ecoengine.utils.Tuple;
import org.gcube.dataanalysis.geo.interfaces.GISDataConnector;
import org.gcube.dataanalysis.geo.matrixmodel.RasterTable;
import org.gcube.dataanalysis.geo.matrixmodel.TimeSeriesExtractor;
@ -63,9 +64,9 @@ public class TimeExtraction extends XYExtraction{
IOHelper.addDoubleInput(inputs, resolution, "Extraction point resolution", "0.5");
IOHelper.addIntegerInput(inputs, samplingFrequency, "Sampling frequency in Hz. Leave it to -1 if unknown or under 1", "-1");
IOHelper.addDoubleInput(inputs, minFrequency, "Minimum expected frequency in Hz. Can be decimal.", "-1");
IOHelper.addDoubleInput(inputs, maxFrequency, "Maximum expected frequency in Hz. Can be decimal.", "-1");
IOHelper.addDoubleInput(inputs, expectedFrequencyError, "Expected precision on periodicity detection in Hz. Can be decimal and depends on the signal length.", "0.1");
IOHelper.addDoubleInput(inputs, minFrequency, "Minimum expected frequency in Hz. Can be decimal", "-1");
IOHelper.addDoubleInput(inputs, maxFrequency, "Maximum expected frequency in Hz. Can be decimal", "-1");
IOHelper.addDoubleInput(inputs, expectedFrequencyError, "Expected precision on periodicity detection in Hz or 1/samples. Can be decimal and depends on the signal length. Default is 0.1", "0.1");
DatabaseType.addDefaultDBPars(inputs);
@ -120,6 +121,14 @@ public class TimeExtraction extends XYExtraction{
AnalysisLogger.getLogger().debug("Extracting Time Series from layer");
TimeSeriesExtractor intersector = new TimeSeriesExtractor(config);
long t0 = System.currentTimeMillis();
//take best z
zValue = intersector.correctZ(zValue, layerNameValue, resolutionValue);
AnalysisLogger.getLogger().debug("TimeExtraction->Best Z for this reference layer: " + zValue);
outputParameters.put("Matching Z value in the layer", ""+zValue);
outputParameters.put("Min Z value in the Layer", ""+intersector.zmin);
outputParameters.put("Max Z value in the Layer", ""+intersector.zmax);
AnalysisLogger.getLogger().debug("Z allowed to be: "+zValue);
signal = intersector.extractT(layerNameValue, xValue,yValue, zValue, resolutionValue);
AnalysisLogger.getLogger().debug("ELAPSED TIME: "+(System.currentTimeMillis()-t0));
@ -188,10 +197,12 @@ public class TimeExtraction extends XYExtraction{
raster.deleteTable();
raster.dumpGeoTable();
signalimage = SignalProcessing.renderSignalWithGenericTime(signal, timeline, "Signal");
// spectrogramImage = SignalProcessing.renderSignalSpectrogram(signal, timeline, pd.currentSamplingRate, pd.currentWindowAnalysisSamples, pd.currentWindowShiftSamples);
spectrogramImage = SignalProcessing.renderSignalSpectrogram2(pd.currentspectrum);
// spectrogramImage = SignalProcessing.renderSignalSpectrogram(signal, timeline, pd.currentSamplingRate, pd.currentWindowAnalysisSamples, pd.currentWindowShiftSamples);
if (pd!=null && pd.currentspectrum!=null && pd.currentspectrum.length>0){
signalimage = SignalProcessing.renderSignalWithGenericTime(signal, timeline, "Signal");
spectrogramImage = SignalProcessing.renderSignalSpectrogram2(pd.currentspectrum);
}
AnalysisLogger.getLogger().debug("Extractor: Map was dumped in table: " + tableNameValue);
status = 80;
AnalysisLogger.getLogger().debug("Extractor: Elapsed: Whole operation completed in " + ((double) (System.currentTimeMillis() - t0) / 1000d) + "s");
@ -217,7 +228,9 @@ public class TimeExtraction extends XYExtraction{
map.put("OutputTable", p);
if (pd!=null && signal!=null && signal.length>0){
HashMap<String, Image> producedImages = new HashMap<String, Image>();
if (signalimage!=null)
producedImages.put("Time Series Visualization", signalimage);
if (spectrogramImage!=null)
producedImages.put("Spectrogram", spectrogramImage);
/*
try {
@ -239,6 +252,8 @@ public class TimeExtraction extends XYExtraction{
map.put("Images", images);
}
else
map.put("Note", new PrimitiveType(String.class.getName(), "The signal contains only one point. The charts will not be displayed.", PrimitiveTypes.STRING,"Note","Note about the signal"));
// generate a primitive type for the collection
PrimitiveType outputm = new PrimitiveType(HashMap.class.getName(), map, PrimitiveTypes.MAP, "ResultsMap", "Results Map");

View File

@ -36,7 +36,7 @@ public class TimeExtractionTable extends TimeExtraction{
inputs.add(columnx);
ColumnType columny = new ColumnType(TableMatrixRepresentation.tableNameParameter, TableMatrixRepresentation.yDimensionColumnParameter, "The column containing y (latitude) information", "y", false);
inputs.add(columny);
ColumnType columnt = new ColumnType(TableMatrixRepresentation.tableNameParameter, TableMatrixRepresentation.timeDimensionColumnParameter, "The column containing y (latitude) information", "y", false);
ColumnType columnt = new ColumnType(TableMatrixRepresentation.tableNameParameter, TableMatrixRepresentation.timeDimensionColumnParameter, "The column containing time information", "datetime", false);
inputs.add(columnt);
ColumnType columnvalue = new ColumnType(TableMatrixRepresentation.tableNameParameter, TableMatrixRepresentation.valueDimensionColumnParameter, "A column containing real valued features", "value", false);
inputs.add(columnvalue);

View File

@ -1,6 +1,7 @@
package org.gcube.dataanalysis.geo.algorithms;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
@ -10,11 +11,14 @@ import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.configuration.INFRASTRUCTURE;
import org.gcube.dataanalysis.ecoengine.datatypes.DatabaseType;
import org.gcube.dataanalysis.ecoengine.datatypes.OutputTable;
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.TableTemplates;
import org.gcube.dataanalysis.ecoengine.interfaces.Transducerer;
import org.gcube.dataanalysis.ecoengine.utils.IOHelper;
import org.gcube.dataanalysis.ecoengine.utils.ResourceFactory;
import org.gcube.dataanalysis.geo.interfaces.GISDataConnector;
import org.gcube.dataanalysis.geo.matrixmodel.RasterTable;
import org.gcube.dataanalysis.geo.matrixmodel.XYExtractor;
@ -59,13 +63,13 @@ public class XYExtraction implements Transducerer {
@Override
public String getDescription() {
return "An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. " + "It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) " + "and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box.";
return "An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. " + "It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file " + "and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box.";
}
@Override
public List<StatisticalType> getInputParameters() {
IOHelper.addStringInput(inputs, layerName, "Layer Title or UUID: The title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Supports several standards (NETCDF, WFS, WCS ASC, GeoTiff )", "");
IOHelper.addStringInput(inputs, layerName, "Layer Title or UUID or HTTP link. E.g. the title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Otherwise you can supply the direct HTTP link of the layer. The format will be guessed from the link. The default is GeoTiff. Supports several standards (NETCDF, WFS, WCS ASC, GeoTiff )", "");
IOHelper.addDoubleInput(inputs, yLL, "Lower Left Latitute of the Bounding Box", "-60");
IOHelper.addDoubleInput(inputs, xLL, "Lower Left Longitude of the Bounding Box", "-50");
IOHelper.addDoubleInput(inputs, yUR, "Upper Right Latitute of the Bounding Box", "60");
@ -73,8 +77,8 @@ public class XYExtraction implements Transducerer {
IOHelper.addRandomStringInput(inputs, tableName, "The db name of the table to produce", "extr_");
IOHelper.addStringInput(inputs, tableLabel, "The name of the table to produce", "extr_");
IOHelper.addDoubleInput(inputs, z, "Value of Z. Default is 0, that means processing will be at surface level", "0");
IOHelper.addIntegerInput(inputs, t, "Time Index. The default is the first", "0");
IOHelper.addDoubleInput(inputs, z, "Value of Z. Default is 0, that means processing will be at surface level or at the first avaliable Z value in the layer", "0");
IOHelper.addIntegerInput(inputs, t, "Time Index. The default is the first time indexed dataset", "0");
IOHelper.addDoubleInput(inputs, xRes, "Projection resolution on the X axis", "0.5");
IOHelper.addDoubleInput(inputs, yRes, "Projection resolution on the Y axis", "0.5");
@ -144,12 +148,20 @@ public class XYExtraction implements Transducerer {
AnalysisLogger.getLogger().debug("Extractor: MatrixExtractor initialized");
long t0 = System.currentTimeMillis();
XYExtractor extractor = new XYExtractor(config);
zValue = extractor.correctZ(zValue, layerNameValue, xResValue);
AnalysisLogger.getLogger().debug("XYExtraction->Best Z for this reference layer: " + zValue);
outputParameters.put("Matching Z value in the layer", ""+zValue);
outputParameters.put("Min Z value in the Layer", ""+extractor.zmin);
outputParameters.put("Max Z value in the Layer", ""+extractor.zmax);
double[][] matrix = extractor.extractXYGrid(layerNameValue, time, BBxLL, BBxUR, BByLL, BByUR, zValue, xResValue, yResValue);
System.out.println("ELAPSED TIME: " + (System.currentTimeMillis() - t0));
AnalysisLogger.getLogger().debug("Extractor: Matrix Extracted");
AnalysisLogger.getLogger().debug("Extractor: ****Rasterizing grid into table****");
status = 30;
RasterTable raster = new RasterTable(BBxLL, BBxUR, BByLL, BByUR, zValue, xResValue, yResValue, matrix, config);
RasterTable raster = new RasterTable(BBxLL, BBxUR, BByLL, BByUR, zValue, time, xResValue, yResValue, matrix, config);
raster.setTablename(tableNameValue);
raster.deleteTable();
raster.dumpGeoTable();
@ -177,7 +189,17 @@ public class XYExtraction implements Transducerer {
List<TableTemplates> templateHspec = new ArrayList<TableTemplates>();
templateHspec.add(TableTemplates.GENERIC);
OutputTable p = new OutputTable(templateHspec, tableLabelValue, tableNameValue, "Output table");
return p;
LinkedHashMap<String, StatisticalType> map = new LinkedHashMap<String, StatisticalType>();
for (String key:outputParameters.keySet()){
String value = outputParameters.get(key);
PrimitiveType val = new PrimitiveType(String.class.getName(), "" + value, PrimitiveTypes.STRING, key, key);
map.put(key, val);
}
map.put("OutputTable", p);
PrimitiveType outputm = new PrimitiveType(HashMap.class.getName(), map, PrimitiveTypes.MAP, "ResultsMap", "Results Map");
return outputm;
}
@Override

View File

@ -48,6 +48,8 @@ public class XYExtractionTable extends XYExtraction{
inputs.add(previnputs.get(4));
inputs.add(previnputs.get(5));
inputs.add(previnputs.get(6));
inputs.add(previnputs.get(7));
inputs.add(previnputs.get(8));
inputs.add(previnputs.get(9));
inputs.add(previnputs.get(10));

View File

@ -100,6 +100,8 @@ public class ZExtraction extends XYExtraction{
AnalysisLogger.getLogger().debug("Extracting Time Series from layer");
ZExtractor extractor = new ZExtractor(config);
extractor.correctZ(0, layerNameValue,resolutionValue);
long t0 = System.currentTimeMillis();
signal = extractor.extractZ(layerNameValue, xValue,yValue, time, resolutionValue);
@ -117,7 +119,7 @@ public class ZExtraction extends XYExtraction{
double matrix[][] = new double[1][];
matrix[0] = signal;
RasterTable raster = new RasterTable(xValue, xValue, yValue, yValue, zValue, resolutionValue, resolutionValue, matrix, config);
RasterTable raster = new RasterTable(xValue, xValue, yValue, yValue, zValue, time,resolutionValue, resolutionValue, matrix, config);
int signalRate = 1;
@ -161,6 +163,7 @@ public class ZExtraction extends XYExtraction{
List<TableTemplates> templateHspec = new ArrayList<TableTemplates>();
templateHspec.add(TableTemplates.GENERIC);
OutputTable p = new OutputTable(templateHspec, tableLabelValue, tableNameValue, "Output table");
map.put("OutputTable", p);
if (signalimage!=null){

View File

@ -36,7 +36,7 @@ public class ZExtractionTable extends ZExtraction{
inputs.add(columnx);
ColumnType columny = new ColumnType(TableMatrixRepresentation.tableNameParameter, TableMatrixRepresentation.yDimensionColumnParameter, "The column containing y (latitude) information", "y", false);
inputs.add(columny);
ColumnType columnt = new ColumnType(TableMatrixRepresentation.tableNameParameter, TableMatrixRepresentation.zDimensionColumnParameter, "The column containing z information", "datetime", false);
ColumnType columnt = new ColumnType(TableMatrixRepresentation.tableNameParameter, TableMatrixRepresentation.zDimensionColumnParameter, "The column containing z information", "z", false);
inputs.add(columnt);
ColumnType columnvalue = new ColumnType(TableMatrixRepresentation.tableNameParameter, TableMatrixRepresentation.valueDimensionColumnParameter, "A column containing real valued features", "value", false);
inputs.add(columnvalue);

View File

@ -11,6 +11,7 @@ import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.tools.ant.types.CommandlineJava.SysProperties;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
/**
@ -44,17 +45,19 @@ public class AscRasterReader
BufferedReader input = null;
URLConnection urlConn =null;
if (filename.startsWith("http")){
AnalysisLogger.getLogger().debug("Getting file from http");
URL fileurl = new URL(filename);
urlConn = fileurl.openConnection();
urlConn.setConnectTimeout(60000);
urlConn.setReadTimeout(60000);
urlConn.setConnectTimeout(120000);
urlConn.setReadTimeout(1200000);
urlConn.setAllowUserInteraction(false);
urlConn.setDoOutput(true);
input = new BufferedReader(new InputStreamReader(urlConn.getInputStream()));
}
else
else {
AnalysisLogger.getLogger().debug("Getting file from local file");
input = new BufferedReader( new FileReader( filename ) );
}
while( input.ready() )
{
String line = input.readLine();

View File

@ -98,8 +98,9 @@ public class NetCDFDataExplorer {
List<GridDatatype> gridTypes = gds.getGrids();
for (GridDatatype gdt : gridTypes) {
AnalysisLogger.getLogger().debug("Inside File - layer name: " + gdt.getName());
if (layer.equalsIgnoreCase(gdt.getName())) {
AnalysisLogger.getLogger().debug("Inside File - layer name: " + gdt.getName()+" layer to find "+layer);
//if the layer is an HTTP link then take the first innser layer
if (layer.equalsIgnoreCase(gdt.getName()) || layer.toLowerCase().startsWith("http:")) {
AnalysisLogger.getLogger().debug("Found layer " + layer + " inside file");
GridDatatype grid = gds.findGridDatatype(gdt.getName());
CoordinateAxis zAxis = gdt.getCoordinateSystem().getVerticalAxis();

View File

@ -41,6 +41,7 @@ public class Table implements GISDataConnector {
throw new Exception("Error in getting elements for time " + time);
}
AnalysisLogger.getLogger().debug("Getting elements for time " + time);
// check z: if there is at least one point inside the z boundary then it is ok
boolean outsideZ = true;

View File

@ -46,23 +46,23 @@ public class TableMatrixRepresentation {
String dbtuple = "";
if (xField != null && xField.trim().length()>0)
dbtuple += xField + ",";
dbtuple += "\""+xField + "\" as x,";
else
dbtuple += "0 as x,";
if (yField != null && yField.trim().length()>0)
dbtuple += yField + ",";
dbtuple += "\""+yField + "\" as y,";
else
dbtuple += "0 as y,";
if (zField != null && zField.trim().length()>0)
dbtuple += zField + ",";
dbtuple += "\""+zField + "\" as z,";
else
dbtuple += "0 as z,";
if (tField != null && tField.trim().length()>0)
dbtuple += tField + " as time,";
dbtuple += "\""+tField + "\" as time,";
else
dbtuple += "0 as time,";
if (valueField != null && valueField.trim().length()>0)
dbtuple += valueField;
dbtuple += "\""+valueField+"\" as v";
else
dbtuple += "0 as v";
@ -73,7 +73,7 @@ public class TableMatrixRepresentation {
//find maxZ
if (zField!=null && zField.trim().length()>0){
String maxzq = "select max("+zField+"),min("+zField+") from "+tableName;
String maxzq = "select max("+zField+") as max,min("+zField+") as min from "+tableName;
Object [] maxzr = (Object [] )DatabaseFactory.executeSQLQuery(maxzq, dbconnection).get(0);
maxZ = Double.parseDouble(""+maxzr[0]);
minZ = Double.parseDouble(""+maxzr[1]);

View File

@ -1,6 +1,7 @@
package org.gcube.dataanalysis.geo.matrixmodel;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
@ -16,6 +17,7 @@ import org.gcube.dataanalysis.geo.connectors.wcs.WCS;
import org.gcube.dataanalysis.geo.connectors.wfs.WFS;
import org.gcube.dataanalysis.geo.infrastructure.GeoNetworkInspector;
import org.gcube.dataanalysis.geo.interfaces.GISDataConnector;
import org.gcube.dataanalysis.geo.meta.GenericLayerMetadata;
import org.opengis.metadata.Metadata;
import org.opengis.metadata.identification.Identification;
@ -29,6 +31,7 @@ public class MatrixExtractor {
protected double currentResolution = 0.5;
public List<Double> currentTimeValues;
public List<Tuple<Double>> currentTuples;
public MatrixExtractor(AlgorithmConfiguration configuration) {
gnInspector = new GeoNetworkInspector();
gnInspector.setScope(configuration.getGcubeScope());
@ -50,7 +53,6 @@ public class MatrixExtractor {
return getRawValuesInTimeInstantAndBoundingBox(layerTitle, time, coordinates3d, xL, xR, yL, yR, resolution, false);
}
public GISDataConnector currentconnector;
public String layerName;
public String layerURL;
@ -66,11 +68,15 @@ public class MatrixExtractor {
AnalysisLogger.getLogger().debug("Extracting grid from table " + configuration.getParam(TableMatrixRepresentation.tableNameParameter));
connector = new Table(configuration, resolution);
currentconnector = connector;
} else
} else {
meta = gnInspector.getGNInfobyUUIDorName(layerTitle);
// if the layer is not on GeoNetwork
if (meta == null) {
String[] urls = { layerTitle };
String[] protocols = { "HTTP" };
meta = new GenericLayerMetadata().createBasicMeta(urls, protocols);
}
// if the layer is good
if (meta != null) {
layerName = gnInspector.getLayerName(meta);
if (layerName == null)
layerName = layerTitle;
@ -86,21 +92,26 @@ public class MatrixExtractor {
layerURL = gnInspector.getHttpLink(meta);
connector = new ASC();
} else if (gnInspector.isWFS(meta)) {
AnalysisLogger.getLogger().debug("found a Geo Layer with reference " + layerTitle + " and layer name " + layerName);
AnalysisLogger.getLogger().debug("found a Geo Layer with reference " + layerURL + " and layer name " + layerName);
layerURL = gnInspector.getGeoserverLink(meta);
connector = new WFS();
} else if (gnInspector.isWCS(meta)) {
AnalysisLogger.getLogger().debug("found a WCS Layer with reference " + layerTitle + " and layer name " + layerName);
AnalysisLogger.getLogger().debug("found a WCS Layer with reference " + layerURL + " and layer name " + layerName);
layerURL = gnInspector.getWCSLink(meta);
connector = new WCS(configuration, layerURL);
} else if (gnInspector.isGeoTiff(meta)) {
layerURL = gnInspector.getGeoTiffLink(meta);
AnalysisLogger.getLogger().debug("found a GeoTiff with reference " + layerTitle + " and layer name " + layerName);
AnalysisLogger.getLogger().debug("found a GeoTiff with reference " + layerURL + " and layer name " + layerName);
connector = new GeoTiff(configuration, layerURL);
} else {
// treat as geotiff
layerURL = layerTitle;
AnalysisLogger.getLogger().debug("guessing a GeoTiff with reference " + layerURL + " and layer name " + layerName);
connector = new GeoTiff(configuration, layerURL);
}
currentconnector = connector;
}
}
currentconnector = connector;
return currentconnector;
}
@ -118,8 +129,24 @@ public class MatrixExtractor {
}
public double zmin;
public double zmax;
public double correctZ(double zValue, String layerURL, double resolution) throws Exception{
GISDataConnector connector = getConnector(layerURL, resolution);
zmin = connector.getMinZ(layerURL, layerName);
zmax = connector.getMaxZ(layerURL, layerName);
if (zValue<zmin)
zValue = zmin;
else if (zValue>zmax)
zValue = zmax;
return zValue;
}
/**
* Extract a grid of XY points with fixed time and z
*
* @param layerTitle
* @param timeInstant
* @param x1
@ -196,7 +223,6 @@ public class MatrixExtractor {
if (log)
AnalysisLogger.getLogger().debug("Taken " + currentTimeValues.size() + " values");
// build back the values matrix
int k = 0;
int g = 0;

View File

@ -12,8 +12,9 @@ import org.gcube.dataanalysis.geo.connectors.table.TableMatrixRepresentation;
public class TestExtraction {
// static AlgorithmConfiguration[] configs = { testXYExtractionNetCDF(),testXYExtractionAquaMaps(),testXYExtractionTable()};
static AlgorithmConfiguration[] configs = { testXYExtractionTable()};
static AlgorithmConfiguration[] configs = { testXYExtractionNetCDF(),testXYExtractionAquaMaps(),testXYExtractionTable(),testXYExtractionTable2()};
// static AlgorithmConfiguration[] configs = { testXYExtractionTable2()};
// static AlgorithmConfiguration[] configs = { testDirectExtraction()};
public static void main(String[] args) throws Exception {
System.out.println("TEST 1");
@ -35,7 +36,7 @@ public class TestExtraction {
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setAgent("XYEXTRACTION");
config.setAgent("XYEXTRACTOR");
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setParam("DatabaseUserName","gcube");
@ -59,6 +60,34 @@ public class TestExtraction {
return config;
}
private static AlgorithmConfiguration testDirectExtraction() {
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setAgent("XYEXTRACTOR");
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setParam("DatabaseUserName","gcube");
config.setParam("DatabasePassword","d4science2");
config.setParam("DatabaseURL","jdbc:postgresql://localhost/testdb");
config.setParam("DatabaseDriver","org.postgresql.Driver");
config.setGcubeScope("/d4science.research-infrastructures.eu/gCubeApps/BiodiversityLab");
config.setParam("Layer","https://dl.dropboxusercontent.com/u/12809149/geoserver-GetCoverage.image.asc");
config.setParam("Z","0");
config.setParam("TimeIndex","0");
config.setParam("BBox_LowerLeftLat","-60");
config.setParam("BBox_LowerLeftLong","-50");
config.setParam("BBox_UpperRightLat","60");
config.setParam("BBox_UpperRightLong","50");
config.setParam("XResolution","0.5");
config.setParam("YResolution","0.5");
config.setParam("OutputTableName","testextraction");
config.setParam("OutputTableLabel","testextraction");
return config;
}
private static AlgorithmConfiguration testXYExtractionAquaMaps() {
@ -98,4 +127,36 @@ public class TestExtraction {
return config;
}
private static AlgorithmConfiguration testXYExtractionTable2() {
AlgorithmConfiguration config = testXYExtractionNetCDF();
config.setAgent("XYEXTRACTOR_TABLE");
config.setParam("OutputTableName","testextractiontable");
config.setParam("DatabaseUserName", "utente");
config.setParam("DatabasePassword", "d4science");
config.setParam("DatabaseURL", "jdbc:postgresql://statistical-manager.d.d4science.research-infrastructures.eu/testdb");
config.setParam("DatabaseDriver", "org.postgresql.Driver");
// vessels
/*
config.setParam(TableMatrixRepresentation.tableNameParameter, "generic_id037d302d_2ba0_4e43_b6e4_1a797bb91728");
config.setParam(TableMatrixRepresentation.xDimensionColumnParameter, "x");
config.setParam(TableMatrixRepresentation.yDimensionColumnParameter, "y");
config.setParam(TableMatrixRepresentation.timeDimensionColumnParameter, "datetime");
config.setParam(TableMatrixRepresentation.valueDimensionColumnParameter, "speed");
config.setParam(TableMatrixRepresentation.filterParameter, "speed<2");
*/
config.setParam(TableMatrixRepresentation.tableNameParameter, "occurrence_species_id0045886b_2a7c_4ede_afc4_3157c694b893");
config.setParam(TableMatrixRepresentation.xDimensionColumnParameter, "decimallongitude");
config.setParam(TableMatrixRepresentation.yDimensionColumnParameter, "decimallatitude");
config.setParam(TableMatrixRepresentation.timeDimensionColumnParameter, "modified");
config.setParam(TableMatrixRepresentation.valueDimensionColumnParameter, "decimallatitude");
config.setParam(TableMatrixRepresentation.filterParameter, " ");
config.setParam("Z","0");
config.setParam("TimeIndex","1");
return config;
}
}

View File

@ -0,0 +1,71 @@
package org.gcube.dataanalysis.geo.test.infra;
import java.util.ArrayList;
import java.util.List;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.datatypes.ColumnType;
import org.gcube.dataanalysis.ecoengine.datatypes.InputTable;
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveTypesList;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.TableTemplates;
import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent;
import org.gcube.dataanalysis.ecoengine.processing.factories.TransducerersFactory;
import org.gcube.dataanalysis.ecoengine.test.regression.Regressor;
import org.gcube.dataanalysis.ecoengine.utils.IOHelper;
public class TestOccurrenceEnrichment {
static AlgorithmConfiguration[] configs = { testOccEnrichment()};
public static void main(String[] args) throws Exception {
System.out.println("TEST 1");
for (int i = 0; i < configs.length; i++) {
AnalysisLogger.getLogger().debug("Executing: "+configs[i].getAgent());
List<ComputationalAgent> trans = null;
trans = TransducerersFactory.getTransducerers(configs[i]);
trans.get(0).init();
Regressor.process(trans.get(0));
StatisticalType st = trans.get(0).getOutput();
AnalysisLogger.getLogger().debug("ST:" + st);
trans = null;
}
}
private static AlgorithmConfiguration testOccEnrichment() {
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setAgent("OCCURRENCE_ENRICHMENT");
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setParam("DatabaseUserName", "utente");
config.setParam("DatabasePassword", "d4science");
config.setParam("DatabaseURL", "jdbc:postgresql://statistical-manager.d.d4science.research-infrastructures.eu/testdb");
config.setParam("DatabaseDriver", "org.postgresql.Driver");
config.setGcubeScope("/d4science.research-infrastructures.eu/gCubeApps/BiodiversityLab");
config.setParam("OccurrenceTable","occurrence_species_id0045886b_2a7c_4ede_afc4_3157c694b893");
config.setParam("LongitudeColumn","decimallongitude");
config.setParam("LatitudeColumn","decimallatitude");
config.setParam("ScientificNameColumn","scientificname");
config.setParam("TimeColumn","eventdate");
config.setParam("OptionalFilter","");
config.setParam("Resolution","0.5");
config.setParam("OutputTableDBName","testenrichment");
config.setParam("OutputTableName","testenrichment");
String sep=AlgorithmConfiguration.getListSeparator();
config.setParam("Layers","http://thredds.research-infrastructures.eu/thredds/dodsC/public/netcdf/global-reanalysis-phys-001-004-b-ref-fr-mjm95-gridt__ENVIRONMENT_OCEANS_ELEVATION_1366210702774.nc"+sep+"4d597da9-dbfa-4a65-9de6-9bbff69eac19"+sep+"2c2304d1-681a-4f3a-8409-e8cdb5ed447f");
// config.setParam("Layers","8f5d883f-95bf-4b7c-8252-aaf0b2e6fd81"+sep+"4d597da9-dbfa-4a65-9de6-9bbff69eac19"+sep+"2c2304d1-681a-4f3a-8409-e8cdb5ed447f");
config.setParam("FeaturesNames","temperature"+sep+"chlorophyll"+sep+"ph");
// config.setParam("Layers","4d597da9-dbfa-4a65-9de6-9bbff69eac19"+sep+"2c2304d1-681a-4f3a-8409-e8cdb5ed447f");
// config.setParam("FeaturesNames","chlorophyll"+sep+"ph");
return config;
}
}

View File

@ -13,8 +13,8 @@ import org.gcube.dataanalysis.geo.connectors.table.TableMatrixRepresentation;
public class TestTimeExtraction {
static AlgorithmConfiguration[] configs = { testTimeExtractionTable(), testTimeExtractionNetCDF(),testTimeExtractionTable2()};
// static AlgorithmConfiguration[] configs = { testTimeExtractionTable(), testTimeExtractionNetCDF(),testTimeExtractionTable2()};
static AlgorithmConfiguration[] configs = { testTimeExtractionNetCDF()};
public static void main(String[] args) throws Exception {
System.out.println("TEST 1");
@ -43,11 +43,13 @@ public class TestTimeExtraction {
config.setParam("DatabasePassword","d4science2");
config.setParam("DatabaseURL","jdbc:postgresql://localhost/testdb");
config.setParam("DatabaseDriver","org.postgresql.Driver");
config.setGcubeScope("/d4science.research-infrastructures.eu/gCubeApps/BiodiversityLab");
// config.setGcubeScope("/d4science.research-infrastructures.eu/gCubeApps/BiodiversityLab");
config.setGcubeScope("/gcube/devsec/devVRE");
// config.setParam("Layer","c565e32c-c5b3-4964-b44f-06dc620563e9");
config.setParam("Layer","c565e32c-c5b3-4964-b44f-06dc620563e9");
// config.setParam("Layer","dffa504b-dbc8-4553-896e-002549f8f5d3");
//wind
config.setParam("Layer","21715b2e-28de-4646-acce-d4f16b59d6d0");
config.setParam("OutputTableName","testtimeextraction");
config.setParam("OutputTableLabel","testtimeextraction");

View File

@ -0,0 +1,70 @@
package org.gcube.dataanalysis.geo.test.regression;
import java.util.ArrayList;
import java.util.List;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.datatypes.ColumnType;
import org.gcube.dataanalysis.ecoengine.datatypes.InputTable;
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveTypesList;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.TableTemplates;
import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent;
import org.gcube.dataanalysis.ecoengine.processing.factories.TransducerersFactory;
import org.gcube.dataanalysis.ecoengine.test.regression.Regressor;
import org.gcube.dataanalysis.ecoengine.utils.IOHelper;
public class RegressionOccurrenceEnrichment {
static AlgorithmConfiguration[] configs = { testOccEnrichment()};
public static void main(String[] args) throws Exception {
System.out.println("TEST 1");
for (int i = 0; i < configs.length; i++) {
AnalysisLogger.getLogger().debug("Executing: "+configs[i].getAgent());
List<ComputationalAgent> trans = null;
trans = TransducerersFactory.getTransducerers(configs[i]);
trans.get(0).init();
Regressor.process(trans.get(0));
StatisticalType st = trans.get(0).getOutput();
AnalysisLogger.getLogger().debug("ST:" + st);
trans = null;
}
}
private static AlgorithmConfiguration testOccEnrichment() {
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setAgent("OCCURRENCE_ENRICHMENT");
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setParam("DatabaseUserName", "utente");
config.setParam("DatabasePassword", "d4science");
config.setParam("DatabaseURL", "jdbc:postgresql://statistical-manager.d.d4science.research-infrastructures.eu/testdb");
config.setParam("DatabaseDriver", "org.postgresql.Driver");
config.setGcubeScope("/d4science.research-infrastructures.eu/gCubeApps/BiodiversityLab");
config.setParam("OccurrenceTable","occurrence_species_id0045886b_2a7c_4ede_afc4_3157c694b893");
config.setParam("LongitudeColumn","decimallongitude");
config.setParam("LatitudeColumn","decimallatitude");
config.setParam("ScientificNameColumn","scientificname");
config.setParam("TimeColumn","eventdate");
config.setParam("OptionalFilter","");
config.setParam("Resolution","0.5");
config.setParam("OutputTableDBName","testenrichment");
config.setParam("OutputTableName","testenrichment");
String sep=AlgorithmConfiguration.getListSeparator();
config.setParam("Layers","8f5d883f-95bf-4b7c-8252-aaf0b2e6fd81"+sep+"4d597da9-dbfa-4a65-9de6-9bbff69eac19"+sep+"2c2304d1-681a-4f3a-8409-e8cdb5ed447f");
config.setParam("FeaturesNames","temperature"+sep+"chlorophyll"+sep+"ph");
// config.setParam("Layers","4d597da9-dbfa-4a65-9de6-9bbff69eac19"+sep+"2c2304d1-681a-4f3a-8409-e8cdb5ed447f");
// config.setParam("FeaturesNames","chlorophyll"+sep+"ph");
return config;
}
}

View File

@ -92,6 +92,42 @@ public class VectorOperations {
return valuesForGrid;
}
public static List<Double> assignGridValuesToPoints2D(List<Tuple<Double>> grid3d, List<Double> gridValues, List<Tuple<Double>> coordinates4d, double tolerance) {
List<Double> valuesForPoints = new ArrayList<Double>();
int gridSize = coordinates4d.size();
for (int i = 0; i < gridSize; i++) {
valuesForPoints.add(Double.NaN);
}
int foundmatches = 0;
int points=0;
for (Tuple<Double> coord4d : coordinates4d) {
double rx = coord4d.getElements().get(0);
double ry = coord4d.getElements().get(1);
int gridIdx = 0;
for (Tuple<Double> gridElement : grid3d) {
double x = gridElement.getElements().get(0);
double y = gridElement.getElements().get(1);
double d = distance(x, y, 0, 0, rx, ry, 0, 0);
if (d <= tolerance) {
// AnalysisLogger.getLogger().debug("Association: distance between grid:("+x+","+y+","+z+","+gridTimeInstant+") and point:("+rx+","+ry+","+rz+","+rt+") is "+d);
valuesForPoints.set(points, gridValues.get(gridIdx));
foundmatches++;
break;
}
gridIdx++;
}
points++;
}
AnalysisLogger.getLogger().debug("Association: Found "+foundmatches+" matches between the points and the grid");
return valuesForPoints;
}
public static List<Double> assignGridValuesToPoints(List<Tuple<Double>> grid3d, int gridTimeInstant, List<Double> gridValues, List<Tuple<Double>> coordinates4d, double tolerance) {
List<Double> valuesForPoints = new ArrayList<Double>();
@ -116,7 +152,6 @@ public class VectorOperations {
double z = gridElement.getElements().get(2);
double d = distance(x, y, z, gridTimeInstant, rx, ry, rz, rt);
if (d <= tolerance) {
// AnalysisLogger.getLogger().debug("Association: distance between grid:("+x+","+y+","+z+","+gridTimeInstant+") and point:("+rx+","+ry+","+rz+","+rt+") is "+d);
valuesForPoints.set(points, gridValues.get(gridIdx));