Managed multi-features coming from WFS

git-svn-id: https://svn.d4science.research-infrastructures.eu/gcube/trunk/data-analysis/EcologicalEngineGeoSpatialExtension@95491 82a268e6-3cf1-43bd-a215-b396298e98cf
This commit is contained in:
Gianpaolo Coro 2014-05-09 14:51:21 +00:00
parent 6acbc54288
commit 1b595a2db9
12 changed files with 390 additions and 95 deletions

View File

@ -4,6 +4,7 @@ import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
@ -24,7 +25,9 @@ import org.gcube.dataanalysis.ecoengine.utils.DatabaseUtils;
import org.gcube.dataanalysis.ecoengine.utils.IOHelper;
import org.gcube.dataanalysis.ecoengine.utils.ResourceFactory;
import org.gcube.dataanalysis.ecoengine.utils.Tuple;
import org.gcube.dataanalysis.geo.connectors.wfs.WFS;
import org.gcube.dataanalysis.geo.interfaces.GISDataConnector;
import org.gcube.dataanalysis.geo.matrixmodel.RasterTable;
import org.gcube.dataanalysis.geo.matrixmodel.XYExtractor;
import org.gcube.dataanalysis.geo.utils.VectorOperations;
import org.hibernate.SessionFactory;
@ -192,7 +195,7 @@ public class OccurrenceEnrichment implements Transducerer {
int elementsFromOccurrences = 4;
int elementstoreport = elementsFromOccurrences + layers.length;
// take all the observations
for (Object row : rows) {
Object[] elements = (Object[]) row;
double x = elements[0] == null ? 0 : Double.parseDouble("" + elements[0]);
@ -216,50 +219,89 @@ public class OccurrenceEnrichment implements Transducerer {
// take the layers matrices
int layeridx = 0;
float statusSteps = 50f / (float) layers.length;
// for each layer, enrich observations with layers info
for (String layerID : layers) {
if (layerID.length() == 0)
continue;
AnalysisLogger.getLogger().debug("OccurrenceEnrichment->Assigning layer " + layerID + " Layer enumerator: " + layeridx);
// for each layer
// extract xy information
XYExtractor extractor = new XYExtractor(config);
extractor.correctZ(0, layerID,resolution);
extractor.correctZ(0, layerID, resolution);
double zmin = extractor.zmin;
double zmax = extractor.zmax;
double bestZ = Math.min(Math.abs(zmin), Math.abs(zmax));
outputParameters.put("Matching Z value in layer " + (layeridx + 1), "" + bestZ);
outputParameters.put("Min Z value in layer "+ (layeridx + 1), "" + zmin);
outputParameters.put("Max Z value in layer "+ (layeridx + 1), "" + zmax);
outputParameters.put("Min Z value in layer " + (layeridx + 1), "" + zmin);
outputParameters.put("Max Z value in layer " + (layeridx + 1), "" + zmax);
AnalysisLogger.getLogger().debug("OccurrenceEnrichment->Best Z for this reference layer: " + bestZ);
// perform the comparison closest to the surface
// perform the closest extraction to the surface
extractor.extractXYGrid(layerID, 0, BBxLL, BBxUR, BByLL, BByUR, bestZ, resolution, resolution);
// retrieve the grid time values and tuples
List<Double> gridValues = extractor.currentTimeValues;
List<Tuple<Double>> grid3d = extractor.currentTuples;
// use the layername as column name otherwise use a generic feature indication
String layername = (layersnames.length > (layeridx) && layersnames[layeridx].trim().length() > 0) ? layersnames[layeridx].trim() : "feature" + (layeridx + 1);
AnalysisLogger.getLogger().debug("OccurrenceEnrichment->Retrieved Layer Name: " + layername);
columns += ",\"" + layername + "\"";
columnsTypes += ",\"" + layername + "\" real";
AnalysisLogger.getLogger().debug("OccurrenceEnrichment->Assigning grid points to the occurrences");
// take the association
// make the association
List<Double> enriched = VectorOperations.assignGridValuesToPoints2D(grid3d, gridValues, coordinates4d, resolution);
int k = 0;
AnalysisLogger.getLogger().debug("OccurrenceEnrichment->Assigning values to the column " + (elementsFromOccurrences + layeridx));
HashMap<Double, Map<String, String>> polygonsFeatures = null;
if (extractor.currentconnector instanceof WFS)
polygonsFeatures = ((WFS) extractor.currentconnector).getPolygonsFeatures();
boolean enrichWithEnvironmentalFeatures = true;
Map<String, String> features=null;
String emptyRow = "";
if (polygonsFeatures != null && polygonsFeatures.size() > 0){
AnalysisLogger.getLogger().debug("OccurrenceEnrichment->Managing Web Features");
enrichWithEnvironmentalFeatures = false;
int ncolumns = polygonsFeatures.keySet().size();
emptyRow = RasterTable.generateEmptyValues(ncolumns);
features = polygonsFeatures.values().iterator().next();
columnsTypes += ","+RasterTable.propertiesMapToColumnString(features, true);
columns += ","+RasterTable.propertiesMapToColumnString(features, false);
}
else{
columns += ",\"" + layername + "\"";
columnsTypes += ",\"" + layername + "\" real";
}
for (Double value : enriched) {
String[] singlerow = enrichment.get(k);
if (value == null || Double.isNaN(value) || Double.isInfinite(value))
singlerow[elementsFromOccurrences + layeridx] = "-9999";
else
singlerow[elementsFromOccurrences + layeridx] = "" + value;
if (enrichWithEnvironmentalFeatures) {
if (value == null || Double.isNaN(value) || Double.isInfinite(value))
singlerow[elementsFromOccurrences + layeridx] = "-9999";
else {
singlerow[elementsFromOccurrences + layeridx] = "" + value;
}
}
else{
if (value == null || Double.isNaN(value) || Double.isInfinite(value))
singlerow[elementsFromOccurrences + layeridx] = emptyRow;
else {
singlerow[elementsFromOccurrences + layeridx] = RasterTable.propertiesMapToDatabaseString(features);
}
}
k++;
}
AnalysisLogger.getLogger().debug("OccurrenceEnrichment->Added values to the row");
layeridx++;
status = status + statusSteps;
}
}//end for on the layer, switch to the next layer
// write the complete association into the db
AnalysisLogger.getLogger().debug("OccurrenceEnrichment->Dropping table " + outputTableDBName);
@ -272,7 +314,7 @@ public class OccurrenceEnrichment implements Transducerer {
AnalysisLogger.getLogger().debug("OccurrenceEnrichment->Creating table " + outputTableDBName + " query:" + createquery);
DatabaseFactory.executeSQLUpdate(createquery, dbconnection);
AnalysisLogger.getLogger().debug("OccurrenceEnrichment->Inserting chunks");
DatabaseUtils.insertChunksIntoTable(outputTableDBName, columns, enrichment, 5000, dbconnection);
DatabaseUtils.insertChunksIntoTable(outputTableDBName, columns, enrichment, 5000, dbconnection,false);
AnalysisLogger.getLogger().debug("OccurrenceEnrichment->Whole process complete in " + ((double) (System.currentTimeMillis() - t0) / 1000f) + " s");
} catch (Exception e) {
e.printStackTrace();

View File

@ -5,6 +5,7 @@ import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.contentmanagement.graphtools.utils.MathFunctions;
@ -21,6 +22,7 @@ import org.gcube.dataanalysis.ecoengine.signals.SignalConverter;
import org.gcube.dataanalysis.ecoengine.signals.SignalProcessing;
import org.gcube.dataanalysis.ecoengine.utils.IOHelper;
import org.gcube.dataanalysis.ecoengine.utils.Tuple;
import org.gcube.dataanalysis.geo.connectors.wfs.WFS;
import org.gcube.dataanalysis.geo.interfaces.GISDataConnector;
import org.gcube.dataanalysis.geo.matrixmodel.RasterTable;
import org.gcube.dataanalysis.geo.matrixmodel.TimeSeriesExtractor;
@ -180,7 +182,11 @@ public class TimeExtraction extends XYExtraction{
AnalysisLogger.getLogger().debug("Extractor: ****Rasterizing grid into table****");
double matrix[][] = new double[1][];
matrix[0] = signal;
RasterTable raster = new RasterTable(xValue, xValue, yValue, yValue, zValue, resolutionValue, resolutionValue, matrix, config);
HashMap<Double,Map<String, String>> polygonsFeatures = null;
if (intersector.currentconnector instanceof WFS)
polygonsFeatures = ((WFS) intersector.currentconnector).getPolygonsFeatures();
RasterTable raster = new RasterTable(xValue, xValue, yValue, yValue, zValue, resolutionValue, resolutionValue, matrix, polygonsFeatures, config);
int signalRate = 1;
if (samplingFrequencyValue>0)

View File

@ -4,6 +4,7 @@ import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
@ -18,6 +19,7 @@ import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.TableTemplates;
import org.gcube.dataanalysis.ecoengine.interfaces.Transducerer;
import org.gcube.dataanalysis.ecoengine.utils.IOHelper;
import org.gcube.dataanalysis.ecoengine.utils.ResourceFactory;
import org.gcube.dataanalysis.geo.connectors.wfs.WFS;
import org.gcube.dataanalysis.geo.interfaces.GISDataConnector;
import org.gcube.dataanalysis.geo.matrixmodel.RasterTable;
import org.gcube.dataanalysis.geo.matrixmodel.XYExtractor;
@ -157,11 +159,15 @@ public class XYExtraction implements Transducerer {
outputParameters.put("Max Z value in the Layer", ""+extractor.zmax);
double[][] matrix = extractor.extractXYGrid(layerNameValue, time, BBxLL, BBxUR, BByLL, BByUR, zValue, xResValue, yResValue);
System.out.println("ELAPSED TIME: " + (System.currentTimeMillis() - t0));
HashMap<Double,Map<String, String>> polygonsFeatures = null;
if (extractor.currentconnector instanceof WFS)
polygonsFeatures = ((WFS) extractor.currentconnector).getPolygonsFeatures();
AnalysisLogger.getLogger().debug("ELAPSED TIME: " + (System.currentTimeMillis() - t0));
AnalysisLogger.getLogger().debug("Extractor: Matrix Extracted");
AnalysisLogger.getLogger().debug("Extractor: ****Rasterizing grid into table****");
status = 30;
RasterTable raster = new RasterTable(BBxLL, BBxUR, BByLL, BByUR, zValue, time, xResValue, yResValue, matrix, config);
RasterTable raster = new RasterTable(BBxLL, BBxUR, BByLL, BByUR, zValue, time, xResValue, yResValue, matrix, polygonsFeatures,config);
raster.setTablename(tableNameValue);
raster.deleteTable();
raster.dumpGeoTable();

View File

@ -5,6 +5,7 @@ import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
@ -18,6 +19,7 @@ import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.TableTemplates;
import org.gcube.dataanalysis.ecoengine.signals.SignalProcessing;
import org.gcube.dataanalysis.ecoengine.utils.IOHelper;
import org.gcube.dataanalysis.ecoengine.utils.Tuple;
import org.gcube.dataanalysis.geo.connectors.wfs.WFS;
import org.gcube.dataanalysis.geo.matrixmodel.RasterTable;
import org.gcube.dataanalysis.geo.matrixmodel.ZExtractor;
@ -119,7 +121,12 @@ public class ZExtraction extends XYExtraction{
double matrix[][] = new double[1][];
matrix[0] = signal;
RasterTable raster = new RasterTable(xValue, xValue, yValue, yValue, zValue, time,resolutionValue, resolutionValue, matrix, config);
HashMap<Double,Map<String, String>> polygonsFeatures = null;
if (extractor.currentconnector instanceof WFS)
polygonsFeatures = ((WFS) extractor.currentconnector).getPolygonsFeatures();
RasterTable raster = new RasterTable(xValue, xValue, yValue, yValue, zValue, time,resolutionValue, resolutionValue, matrix, polygonsFeatures,config);
int signalRate = 1;

View File

@ -2,11 +2,14 @@ package org.gcube.dataanalysis.geo.connectors.wfs;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.utils.Tuple;
import org.gcube.dataanalysis.geo.interfaces.GISDataConnector;
import org.gcube.dataanalysis.geo.utils.CSquareCodesConverter;
import com.vividsolutions.jts.geom.Coordinate;
import com.vividsolutions.jts.geom.GeometryFactory;
@ -16,6 +19,9 @@ import com.vividsolutions.jts.geom.impl.CoordinateArraySequence;
public class WFS implements GISDataConnector {
List<FeaturedPolygon> featuresInTime;
HashMap<Double,Map<String, String>> polygonsFeatures;
@Override
public List<Double> getFeaturesInTimeInstantAndArea(String layerURL, String layerName, int time, List<Tuple<Double>> coordinates3d, double BBxL, double BBxR, double BByL, double BByR) throws Exception {
@ -24,9 +30,10 @@ public class WFS implements GISDataConnector {
if (layerURL == null)
return null;
List<FeaturedPolygon> featuresInTime = new ArrayList<FeaturedPolygon>();
featuresInTime = new ArrayList<FeaturedPolygon>();
AnalysisLogger.getLogger().debug("taking WFS features");
featuresInTime = WFSDataExplorer.getFeatures(layerURL, layerName, BBxL, BByL, BBxR, BByR);
polygonsFeatures=new HashMap<Double, Map<String,String>>();
int tsize = coordinates3d.size();
AnalysisLogger.getLogger().debug("Intersecting " + tsize + " vs " + featuresInTime.size() + " elements");
int ttc = 0;
@ -40,8 +47,34 @@ public class WFS implements GISDataConnector {
boolean found = false;
for (FeaturedPolygon poly : featuresInTime) {
if (poly != null && poly.p != null && poly.p.covers(po)) {
/*
AnalysisLogger.getLogger().debug(poly.p.getCentroid()+
"["+CSquareCodesConverter.convertAtResolution(poly.p.getCentroid().getY(), poly.p.getCentroid().getX(), 0.5)+"] "+
"("+(poly.p.getCentroid().getX()-0.25)+" -- "+(poly.p.getCentroid().getX()+0.25) +" ; "+
(poly.p.getCentroid().getY()-0.25)+" -- "+(poly.p.getCentroid().getY()+0.25) +") "+
" ["+poly.features+"]"+
" covers "+po+ "["+CSquareCodesConverter.convertAtResolution(po.getY(), po.getX(), 0.5)+"]");
AnalysisLogger.getLogger().debug("{"+poly.p.contains(po)+
";"+po.isWithinDistance(poly.p.getCentroid(), 0.24)+
";"+po.isWithinDistance(poly.p.getCentroid(), 0.25)+
";"+poly.p.getCentroid().distance(po)+
";"+poly.p.distance(po)+
";"+poly.p.convexHull().contains(po)+
";"+poly.p.convexHull().touches(po)+
";"+poly.p.touches(po)+
";"+poly.p.crosses(po)+
";"+po.crosses(poly.p)+
";"+po.coveredBy(poly.p)+
";"+po.intersection(poly.p)+
";"+poly.p.contains(po)+
";"+poly.p.covers(po)+
"}");
*/
featuresarray[k] = poly.value;
polygonsFeatures.put(poly.value, poly.features);
found = true;
break;
}
@ -64,6 +97,7 @@ public class WFS implements GISDataConnector {
return features;
}
@Override
public double getMinZ(String layerURL, String layerName) {
return 0;
@ -74,4 +108,7 @@ public class WFS implements GISDataConnector {
return 0;
}
public HashMap<Double, Map<String, String>> getPolygonsFeatures() {
return polygonsFeatures;
}
}

View File

@ -87,18 +87,21 @@ public class WFSDataExplorer {
String jsonString = callWFS(geoserver, layer, xL, yL, xR, yR);
// System.out.println("JSON:"+jsonString);
LinkedHashMap<String, Object> map = JsonMapper.parse(jsonString);
// System.out.println(map);
List<FeaturedPolygon> fpolygons = new ArrayList<FeaturedPolygon>();
FeaturedPolygon poly = null;
int polygonId = 0;
for (String key : map.keySet()) {
if (key.contains("features")) {
HashMap<String, Object> propertiesMap = (HashMap<String, Object>) map.get(key);
// cycle on all the properties
for (String properties : propertiesMap.keySet()) {
if (properties.contains("properties")) {
polygonId++;
if (poly == null)
poly = new FeaturedPolygon();
LinkedHashMap<String, String> props = (LinkedHashMap<String, String>) propertiesMap.get(properties);
// fill the properties of the fpolygon
for (String keyprop : props.keySet()) {
@ -107,8 +110,12 @@ public class WFSDataExplorer {
String value = props.get(keyprop);
try {
String lowcaseprop = keyprop.toLowerCase();
if ((poly.value == null) && !lowcaseprop.startsWith("id") && !lowcaseprop.endsWith("id"))
poly.setValue(Double.parseDouble(value));
// System.out.println(poly.p.getCentroid()+" -> "+value);
//add the first double value to the polygon
if ((poly.value == null) && !lowcaseprop.startsWith("id") && !lowcaseprop.endsWith("id")){
poly.setValue((double)polygonId);
poly.addFeature(keyprop, value);
}
else
poly.addFeature(keyprop, value);
} catch (Exception e2) {

View File

@ -119,8 +119,9 @@ public class MatrixExtractor {
/**
* Extract raw values in a time instant according to a set of grid points and a bounding box
*/
public GISDataConnector connector;
public List<Double> getRawValuesInTimeInstantAndBoundingBox(String layerTitle, int time, List<Tuple<Double>> coordinates3d, double xL, double xR, double yL, double yR, double resolution, boolean saveLayer) throws Exception {
GISDataConnector connector = getConnector(layerTitle, resolution);
connector = getConnector(layerTitle, resolution);
// execute connector
if (connector != null)
return connector.getFeaturesInTimeInstantAndArea(layerURL, layerName, time, coordinates3d, xL, xR, yL, yR);
@ -128,7 +129,10 @@ public class MatrixExtractor {
throw new Exception("ERROR: Connector not found for layer " + layerTitle + " - Cannot Rasterize!");
}
public GISDataConnector getCurrentConnector(){
return connector;
}
public double zmin;
public double zmax;

View File

@ -1,7 +1,9 @@
package org.gcube.dataanalysis.geo.matrixmodel;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
@ -22,6 +24,7 @@ import org.hibernate.SessionFactory;
public class RasterTable {
private double valuesMatrix[][];
private HashMap<Double, Map<String, String>> valuesPropertiesMap;
double x1;
double x2;
double y1;
@ -31,15 +34,20 @@ public class RasterTable {
double xResolution;
double yResolution;
List<Tuple<Double>> coordinates;
private AlgorithmConfiguration configuration;
private String tablename = "rstr" + ("" + UUID.randomUUID()).replace("-", "");
static String createTableStatement = "CREATE TABLE %1$s (id serial, csquarecode character varying, x real, y real, z real, t real, fvalue real)";
static String columnsnames = "csquarecode, x , y , z , t, fvalue";
// static String createTableStatement = "CREATE TABLE %1$s (id serial, csquarecode character varying, x real, y real, z real, t real, fvalue real)";
static String createTableStatementStandard = "CREATE TABLE %1$s (id serial, csquarecode character varying, x real, y real, z real, t real, fvalue character varying)";
static String createTableStatementWithFields = "CREATE TABLE %1$s (id serial, approx_x real, approx_y real, z real, t real, %2$s)";
static String columnsnamesStandard = "csquarecode, x , y , z , t, fvalue";
static String columnsnamesWithFields = "approx_x , approx_y , z , t , %1$s";
public static String csquareColumn = "csquarecode";
public static String valuesColumn = "fvalue";
public static String idColumn = "id";
public String getTablename() {
return tablename;
}
@ -47,25 +55,35 @@ public class RasterTable {
public void setTablename(String tablename) {
this.tablename = tablename;
}
public List<Tuple<Double>> getCoordinates(){
public List<Tuple<Double>> getCoordinates() {
return coordinates;
}
public void setCoordinates(List<Tuple<Double>> coordinates) {
this.coordinates=coordinates;
}
public RasterTable(double x1, double x2, double y1, double y2, double z, double xResolution, double yResolution, double[][] values, AlgorithmConfiguration configuration) {
init(x1, x2, y1, y2, z, 0, xResolution, yResolution, values, configuration);
}
public RasterTable(double x1, double x2, double y1, double y2, double z, double time, double xResolution, double yResolution, double[][] values, AlgorithmConfiguration configuration) {
init(x1, x2, y1, y2, z, time, xResolution, yResolution, values, configuration);
this.coordinates = coordinates;
}
public void init(double x1, double x2, double y1, double y2, double z, double time, double xResolution, double yResolution, double[][] values, AlgorithmConfiguration configuration){
public RasterTable(double x1, double x2, double y1, double y2, double z, double xResolution, double yResolution, double[][] values, AlgorithmConfiguration configuration) {
init(x1, x2, y1, y2, z, 0, xResolution, yResolution, values, null, configuration);
}
public RasterTable(double x1, double x2, double y1, double y2, double z, double time, double xResolution, double yResolution, double[][] values, AlgorithmConfiguration configuration) {
init(x1, x2, y1, y2, z, time, xResolution, yResolution, values, null, configuration);
}
public RasterTable(double x1, double x2, double y1, double y2, double z, double xResolution, double yResolution, double[][] values, HashMap<Double, Map<String, String>> valuesPropertiesMap, AlgorithmConfiguration configuration) {
init(x1, x2, y1, y2, z, 0, xResolution, yResolution, values, valuesPropertiesMap, configuration);
}
public RasterTable(double x1, double x2, double y1, double y2, double z, double time, double xResolution, double yResolution, double[][] values, HashMap<Double, Map<String, String>> valuesPropertiesMap, AlgorithmConfiguration configuration) {
init(x1, x2, y1, y2, z, time, xResolution, yResolution, values, valuesPropertiesMap, configuration);
}
public void init(double x1, double x2, double y1, double y2, double z, double time, double xResolution, double yResolution, double[][] values, HashMap<Double, Map<String, String>> valuesPropertiesMap, AlgorithmConfiguration configuration) {
this.valuesMatrix = values;
if (valuesPropertiesMap!=null && valuesPropertiesMap.size()>0)
this.valuesPropertiesMap = valuesPropertiesMap;
this.configuration = configuration;
this.x1 = x1;
this.x2 = x2;
@ -76,6 +94,7 @@ public class RasterTable {
this.xResolution = xResolution;
this.yResolution = yResolution;
}
public void dumpGeoTable() {
// open the connection to the db
@ -83,57 +102,92 @@ public class RasterTable {
try {
AnalysisLogger.getLogger().debug("Database Initialized");
// create a table
DatabaseFactory.executeSQLUpdate(String.format(createTableStatement, tablename), dbconnection);
String columnNames = columnsnamesStandard;
String emptycolumns = "";
if (valuesPropertiesMap == null)
DatabaseFactory.executeSQLUpdate(String.format(createTableStatementStandard, tablename), dbconnection);
else {
AnalysisLogger.getLogger().debug("Managing Table with Custom Fields");
Map<String, String> valuesMap = valuesPropertiesMap.values().iterator().next();
emptycolumns = generateEmptyValues(valuesMap.size());
DatabaseFactory.executeSQLUpdate(String.format(createTableStatementWithFields, tablename, propertiesMapToColumnString(valuesMap, true)), dbconnection);
columnNames = String.format(columnsnamesWithFields, propertiesMapToColumnString(valuesMap, false));
AnalysisLogger.getLogger().debug("Column names: "+columnNames);
}
AnalysisLogger.getLogger().debug("Table " + tablename + " created");
if (coordinates==null)
if (coordinates == null)
coordinates = VectorOperations.generateCoordinateTripletsInBoundingBox(x1, x2, y1, y2, z, xResolution, yResolution);
int triplets = coordinates.size();
AnalysisLogger.getLogger().debug("Generated " + triplets + " coordinates triples");
List<Double> values = associateValueToCoordinates(coordinates, valuesMatrix);
AnalysisLogger.getLogger().debug("Association to values completed - fulfilling buffer");
// for each element in the matrix, build the corresponding csquare code
StringBuffer sb = new StringBuffer();
int rowcounter = 1;
for (int i = 0; i < triplets; i++) {
// save the string in a buffer
Tuple<Double> cset = coordinates.get(i);
double x = cset.getElements().get(0);
double y = cset.getElements().get(1);
String csquare = CSquareCodesConverter.convertAtResolution(y,x, xResolution);
String csquare = CSquareCodesConverter.convertAtResolution(y, x, xResolution);
String valueForTable = "";
// if we have fields insert fields, otherwise insert double numbers
Double value = values.get(i);
//we do not use NaNs in this case every value will be filled
if (value.isNaN())
value = 0d;
double zVal = z;
if (cset.getElements().size()>2)
zVal = cset.getElements().get(2);
String tVal = ""+time;
if (cset.getElements().size()>3){
tVal = ""+cset.getElements().get(3);
if (Double.isNaN(cset.getElements().get(3)) || (Double.isInfinite(cset.getElements().get(3))))
tVal="NULL";
if (valuesPropertiesMap == null) {
// we do not use NaNs in this case every value will be filled
if (value.isNaN())
value = 0d;
valueForTable = "" + value;
} else {
// we do not use NaNs in this case every value will be filled
if (value.isNaN())
valueForTable = null;
else
valueForTable = propertiesMapToDatabaseString(valuesPropertiesMap.get(values.get(i)));
}
sb.append("('" + csquare + "'," + x + "," + y + "," + zVal + "," + tVal +",'" + value + "')");
if (i % 5000 == 0) {
// AnalysisLogger.getLogger().debug("Partial Inserting Buffer of " + sb.length() + " Values");
String insertStatement = DatabaseUtils.insertFromBuffer(tablename, columnsnames, sb);
DatabaseFactory.executeSQLUpdate(insertStatement, dbconnection);
// AnalysisLogger.getLogger().debug("Partial Insertion completed with Success!");
double zVal = z;
if (cset.getElements().size() > 2)
zVal = cset.getElements().get(2);
String tVal = "" + time;
if (cset.getElements().size() > 3) {
tVal = "" + cset.getElements().get(3);
if (Double.isNaN(cset.getElements().get(3)) || (Double.isInfinite(cset.getElements().get(3))))
tVal = "NULL";
}
if (valueForTable != null) {
rowcounter++;
if (valuesPropertiesMap == null)
sb.append("('" + csquare + "'," + x + "," + y + "," + zVal + "," + tVal + ",'" + valueForTable + "')");
else
sb.append("(" + x + "," + y + "," + zVal + "," + tVal + "," + valueForTable + ")");
}
if (rowcounter % 5000 == 0) {
// AnalysisLogger.getLogger().debug("Partial Inserting Buffer of " + sb.length() + " Values");
String insertStatement = DatabaseUtils.insertFromBuffer(tablename, columnNames, sb);
// AnalysisLogger.getLogger().debug("Inserting Buffer " + insertStatement);
DatabaseFactory.executeSQLUpdate(insertStatement, dbconnection);
// AnalysisLogger.getLogger().debug("Partial Insertion completed with Success!");
sb = new StringBuffer();
} else if (i < triplets - 1)
}
else if (valueForTable != null)
sb.append(",");
}
AnalysisLogger.getLogger().debug("Inserting Final Buffer of " + sb.length() + " Values");
// AnalysisLogger.getLogger().debug("Inserting Final Buffer " + sb);
// AnalysisLogger.getLogger().debug("Inserting Final Buffer " + sb);
// save all the strings on the table
if (sb.length() > 0) {
String insertStatement = DatabaseUtils.insertFromBuffer(tablename, columnsnames, sb);
String insertStatement = DatabaseUtils.insertFromString(tablename, columnNames, sb.substring(0, sb.length()-1));
// AnalysisLogger.getLogger().debug("Inserting Buffer " + insertStatement);
DatabaseFactory.executeSQLUpdate(insertStatement, dbconnection);
AnalysisLogger.getLogger().debug("Insertion completed with Success!");
}
@ -147,38 +201,78 @@ public class RasterTable {
}
}
public static String propertiesMapToDatabaseString(Map<String, String> valuesMap) {
StringBuffer sb = new StringBuffer();
int m = valuesMap.size();
int i = 0;
for (String value : valuesMap.values()) {
sb.append("'" + value .replace("'", ""+(char)96)+ "'");
if (i < m - 1)
sb.append(",");
i++;
}
return sb.toString();
}
public static String propertiesMapToColumnString(Map<String, String> valuesMap, boolean withtype) {
StringBuffer sb = new StringBuffer();
int m = valuesMap.size();
int i = 0;
for (String keys : valuesMap.keySet()) {
sb.append(keys);
if (withtype)
sb.append(" character varying");
if (i < m - 1)
sb.append(",");
i++;
}
return sb.toString();
}
public static String generateEmptyValues(int nValues) {
StringBuffer sb = new StringBuffer();
for (int j = 0; j < nValues; j++) {
sb.append("NULL");
if (j < nValues - 1)
sb.append(",");
}
return sb.toString();
}
public void deleteTable() {
SessionFactory dbconnection = null;
try {
dbconnection = DatabaseUtils.initDBSession(configuration);
DatabaseFactory.executeSQLUpdate(DatabaseUtils.dropTableStatement(tablename), dbconnection);
} catch (Exception e) {
// e.printStackTrace();
AnalysisLogger.getLogger().debug("Impossible to delete table "+tablename+" : "+e.getLocalizedMessage());
// e.printStackTrace();
AnalysisLogger.getLogger().debug("Impossible to delete table " + tablename + " : " + e.getLocalizedMessage());
} finally {
DatabaseUtils.closeDBConnection(dbconnection);
}
}
public static List<Double> associateValueToCoordinates(List<Tuple<Double>> coordinates, double[][] data){
public static List<Double> associateValueToCoordinates(List<Tuple<Double>> coordinates, double[][] data) {
List<Double> values = new ArrayList<Double>();
int k = 0;
int g = 0;
int ntriplets = coordinates.size();
int xsteps = data[0].length-1;
int xsteps = data[0].length - 1;
for (int t = 0; t < ntriplets; t++) {
values.add(data[k][g]);
if (g == xsteps) {
g = 0;
k++;
}
else
} else
g++;
}
return values;
}
}
}

View File

@ -12,9 +12,10 @@ import org.gcube.dataanalysis.geo.connectors.table.TableMatrixRepresentation;
public class TestExtraction {
static AlgorithmConfiguration[] configs = { testXYExtractionNetCDF(),testXYExtractionAquaMaps(),testXYExtractionTable(),testXYExtractionTable2()};
static AlgorithmConfiguration[] configs = { testXYExtractionNetCDF(),testXYExtractionAquaMaps(),testXYExtractionTable(),testXYExtractionTable2(),testDirectExtraction()};
// static AlgorithmConfiguration[] configs = { testXYExtractionTable2()};
// static AlgorithmConfiguration[] configs = { testDirectExtraction()};
// static AlgorithmConfiguration[] configs = { testXYExtractionAquaMaps()};
public static void main(String[] args) throws Exception {
System.out.println("TEST 1");
@ -82,8 +83,8 @@ public class TestExtraction {
config.setParam("BBox_UpperRightLong","50");
config.setParam("XResolution","0.5");
config.setParam("YResolution","0.5");
config.setParam("OutputTableName","testextraction");
config.setParam("OutputTableLabel","testextraction");
config.setParam("OutputTableName","testextractiondirect");
config.setParam("OutputTableLabel","testextractiondirect");
return config;
}
@ -133,7 +134,7 @@ public class TestExtraction {
AlgorithmConfiguration config = testXYExtractionNetCDF();
config.setAgent("XYEXTRACTOR_TABLE");
config.setParam("OutputTableName","testextractiontable");
config.setParam("OutputTableName","testextractiontable2");
config.setParam("DatabaseUserName", "utente");
config.setParam("DatabasePassword", "d4science");
config.setParam("DatabaseURL", "jdbc:postgresql://statistical-manager.d.d4science.research-infrastructures.eu/testdb");

View File

@ -19,6 +19,7 @@ import org.gcube.dataanalysis.ecoengine.utils.IOHelper;
public class TestOccurrenceEnrichment {
static AlgorithmConfiguration[] configs = { testOccEnrichment()};
// static AlgorithmConfiguration[] configs = { testOccEnrichmentWPS()};
public static void main(String[] args) throws Exception {
System.out.println("TEST 1");
@ -81,4 +82,37 @@ public class TestOccurrenceEnrichment {
return config;
}
private static AlgorithmConfiguration testOccEnrichmentWPS() {
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setAgent("OCCURRENCE_ENRICHMENT");
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setParam("DatabaseUserName", "utente");
config.setParam("DatabasePassword", "d4science");
config.setParam("DatabaseURL", "jdbc:postgresql://statistical-manager.d.d4science.research-infrastructures.eu/testdb");
config.setParam("DatabaseDriver", "org.postgresql.Driver");
config.setGcubeScope("/gcube/devsec/devVRE");
config.setParam("OccurrenceTable","occurrence_carch");
config.setParam("LongitudeColumn","decimallongitude");
config.setParam("LatitudeColumn","decimallatitude");
config.setParam("ScientificNameColumn","scientificname");
config.setParam("TimeColumn","eventdate");
config.setParam("OptionalFilter","");
config.setParam("Resolution","0.5");
config.setParam("OutputTableDBName","testenrichmentwps");
config.setParam("OutputTableName","testenrichmentwps");
String sep=AlgorithmConfiguration.getListSeparator();
//WFS: carcharodon
config.setParam("Layers","b8a17d86-c62f-4e73-b5c9-bdb3366015c9");
return config;
}
}

View File

@ -13,8 +13,10 @@ import org.gcube.dataanalysis.geo.connectors.table.TableMatrixRepresentation;
public class TestTimeExtraction {
static AlgorithmConfiguration[] configs = { testTimeExtractionTable(), testTimeExtractionNetCDF(),testTimeExtractionTable2()};
// static AlgorithmConfiguration[] configs = { testTimeExtractionTable(), testTimeExtractionNetCDF(),testTimeExtractionTable2()};
// static AlgorithmConfiguration[] configs = { testTimeExtractionNetCDF()};
static AlgorithmConfiguration[] configs = { testTExtractionAquamaps()};
public static void main(String[] args) throws Exception {
System.out.println("TEST 1");
@ -32,6 +34,36 @@ public class TestTimeExtraction {
}
private static AlgorithmConfiguration testTExtractionAquamaps() {
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setAgent("TIMEEXTRACTION");
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setParam("DatabaseUserName","gcube");
config.setParam("DatabasePassword","d4science2");
config.setParam("DatabaseURL","jdbc:postgresql://localhost/testdb");
config.setParam("DatabaseDriver","org.postgresql.Driver");
config.setGcubeScope("/gcube/devsec/devVRE");
config.setParam("Layer","40198411-9ceb-420f-8f39-a7e1b8128d6b");
config.setParam("OutputTableName","testtextractionaquamaps");
config.setParam("OutputTableLabel","testtextractionaquamaps");
config.setParam("X","121");
config.setParam("Y","-4");
config.setParam("Resolution","0.5");
config.setParam("Z","0");
config.setParam("SamplingFreq","-1");
config.setParam("MinFrequency","-1");
config.setParam("MaxFrequency","-1");
config.setParam("FrequencyError","-1");
return config;
}
private static AlgorithmConfiguration testTimeExtractionNetCDF() {
AlgorithmConfiguration config = new AlgorithmConfiguration();

View File

@ -13,8 +13,8 @@ import org.gcube.dataanalysis.geo.connectors.table.TableMatrixRepresentation;
public class TestZExtraction {
// static AlgorithmConfiguration[] configs = { testTimeExtractionTable(), testTimeExtractionNetCDF(),testTimeExtractionTable2()};
static AlgorithmConfiguration[] configs = { testZExtractionNetCDF()};
// static AlgorithmConfiguration[] configs = { testTimeExtractionTable(), testZExtractionLongNetCDF(),testZExtractionNetCDF(),testTimeExtractionTable2()};
static AlgorithmConfiguration[] configs = { testZExtractionAquamaps()};
public static void main(String[] args) throws Exception {
@ -32,6 +32,31 @@ public class TestZExtraction {
}
}
private static AlgorithmConfiguration testZExtractionAquamaps() {
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setAgent("ZEXTRACTION");
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setParam("DatabaseUserName","gcube");
config.setParam("DatabasePassword","d4science2");
config.setParam("DatabaseURL","jdbc:postgresql://localhost/testdb");
config.setParam("DatabaseDriver","org.postgresql.Driver");
config.setGcubeScope("/gcube/devsec/devVRE");
config.setParam("Layer","40198411-9ceb-420f-8f39-a7e1b8128d6b");
config.setParam("OutputTableName","testzextractionaquamaps");
config.setParam("OutputTableLabel","testzextractionaquamaps");
config.setParam("TimeIndex","0");
config.setParam("X","121");
config.setParam("Y","-4");
config.setParam("Resolution","0.5");
return config;
}
private static AlgorithmConfiguration testZExtractionLongNetCDF() {
@ -49,8 +74,8 @@ public class TestZExtraction {
config.setParam("Layer","6411b110-7572-457a-a662-a16e4ff09e4e");
// config.setParam("Layer","dffa504b-dbc8-4553-896e-002549f8f5d3");
config.setParam("OutputTableName","testtimeextraction");
config.setParam("OutputTableLabel","testtimeextraction");
config.setParam("OutputTableName","testzextractionlong");
config.setParam("OutputTableLabel","testzextractionlong");
config.setParam("TimeIndex","0");
config.setParam("X","0");
@ -75,8 +100,8 @@ public class TestZExtraction {
config.setParam("Layer","7f90e153-0c5c-4d45-a498-a6374593e68d");
config.setParam("OutputTableName","testtimeextraction");
config.setParam("OutputTableLabel","testtimeextraction");
config.setParam("OutputTableName","testzextractionstandard");
config.setParam("OutputTableLabel","testzextractionstandard");
config.setParam("TimeIndex","0");
config.setParam("X","0");
@ -101,8 +126,8 @@ public class TestZExtraction {
config.setParam("Layer","c565e32c-c5b3-4964-b44f-06dc620563e9");
config.setParam("OutputTableName","testtimeextraction2");
config.setParam("OutputTableLabel","testtimeextraction2");
config.setParam("OutputTableName","testtimeextraction");
config.setParam("OutputTableLabel","testtimeextraction");
config.setParam("Z","0");
config.setParam("X","-47.97");