accounting for WFS features
git-svn-id: https://svn.d4science.research-infrastructures.eu/gcube/trunk/data-analysis/EcologicalEngineGeoSpatialExtension@96015 82a268e6-3cf1-43bd-a215-b396298e98cf
This commit is contained in:
parent
1b595a2db9
commit
30af187dde
|
@ -156,6 +156,15 @@ public class OccurrenceEnrichment implements Transducerer {
|
|||
return "An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points.";
|
||||
}
|
||||
|
||||
public static String generateEmptyValues(int nValues) {
|
||||
StringBuffer sb = new StringBuffer();
|
||||
for (int j = 0; j < nValues; j++) {
|
||||
sb.append("''");
|
||||
if (j < nValues - 1)
|
||||
sb.append(",");
|
||||
}
|
||||
return sb.toString();
|
||||
}
|
||||
@Override
|
||||
public void compute() throws Exception {
|
||||
|
||||
|
@ -267,9 +276,10 @@ public class OccurrenceEnrichment implements Transducerer {
|
|||
if (polygonsFeatures != null && polygonsFeatures.size() > 0){
|
||||
AnalysisLogger.getLogger().debug("OccurrenceEnrichment->Managing Web Features");
|
||||
enrichWithEnvironmentalFeatures = false;
|
||||
int ncolumns = polygonsFeatures.keySet().size();
|
||||
emptyRow = RasterTable.generateEmptyValues(ncolumns);
|
||||
features = polygonsFeatures.values().iterator().next();
|
||||
int ncolumns = features.keySet().size();
|
||||
emptyRow = generateEmptyValues(ncolumns);
|
||||
AnalysisLogger.getLogger().debug("OccurrenceEnrichment->Number of additional columns "+ncolumns);
|
||||
columnsTypes += ","+RasterTable.propertiesMapToColumnString(features, true);
|
||||
columns += ","+RasterTable.propertiesMapToColumnString(features, false);
|
||||
|
||||
|
|
|
@ -141,7 +141,7 @@ public class TimeExtraction extends XYExtraction{
|
|||
this.pd = new PeriodicityDetector();
|
||||
double F = -1;
|
||||
if (samplingFrequencyValue>0&&minFrequencyValue>0&&maxFrequencyValue>0&&expectedFrequencyErrorValue>0)
|
||||
F = pd.detectFrequency(signal,samplingFrequencyValue,(float)minFrequencyValue,(float)maxFrequencyValue,(float)expectedFrequencyErrorValue,false);
|
||||
F = pd.detectFrequency(signal,samplingFrequencyValue,(float)minFrequencyValue,(float)maxFrequencyValue,(float)expectedFrequencyErrorValue,-1,false);
|
||||
else
|
||||
F = pd.detectFrequency(signal,false);
|
||||
|
||||
|
|
|
@ -39,6 +39,7 @@ public class WFS implements GISDataConnector {
|
|||
int ttc = 0;
|
||||
Double[] featuresarray = new Double[tsize];
|
||||
int k = 0;
|
||||
int intersections = 0;
|
||||
GeometryFactory factory = new GeometryFactory(new PrecisionModel(), 4326);
|
||||
for (Tuple<Double> triplet : coordinates3d) {
|
||||
ArrayList<Double> elements = triplet.getElements();
|
||||
|
@ -47,7 +48,10 @@ public class WFS implements GISDataConnector {
|
|||
boolean found = false;
|
||||
|
||||
for (FeaturedPolygon poly : featuresInTime) {
|
||||
|
||||
/*check the polygons
|
||||
* if (k==0)
|
||||
System.out.println(poly.p);
|
||||
*/
|
||||
if (poly != null && poly.p != null && poly.p.covers(po)) {
|
||||
/*
|
||||
AnalysisLogger.getLogger().debug(poly.p.getCentroid()+
|
||||
|
@ -76,6 +80,7 @@ public class WFS implements GISDataConnector {
|
|||
featuresarray[k] = poly.value;
|
||||
polygonsFeatures.put(poly.value, poly.features);
|
||||
found = true;
|
||||
intersections++;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
@ -92,7 +97,9 @@ public class WFS implements GISDataConnector {
|
|||
ttc++;
|
||||
k++;
|
||||
}
|
||||
|
||||
|
||||
AnalysisLogger.getLogger().debug("WFS-> Found " + intersections + " intersections!");
|
||||
|
||||
List<Double> features = Arrays.asList(featuresarray);
|
||||
return features;
|
||||
}
|
||||
|
|
|
@ -305,7 +305,7 @@ public class WFSDataExplorer {
|
|||
// geometry935133b1-ba3c-493d-8e18-6fb496ced995={type=MultiPolygon, coordinates={966a275c-23aa-4a43-a943-7e1c7eaf5d65=[[[1.5,125.00000000000011],[1.5,124.5],[2.000000000000057,124.5],[2.000000000000057,125.00000000000011],[1.5,125.00000000000011]]]}},
|
||||
String[] coordinatePairs = null;
|
||||
List<double[]> dpairs = new ArrayList<double[]>();
|
||||
if (wfsgeometry.toLowerCase().contains("multipolygon")) {
|
||||
if (wfsgeometry.toLowerCase().contains("multipolygon")||wfsgeometry.toLowerCase().contains("polygon")) {
|
||||
String coordString = "coordinates=";
|
||||
String coordinates = wfsgeometry.substring(wfsgeometry.indexOf(coordString) + coordString.length());
|
||||
coordinates = coordinates.substring(coordinates.indexOf("=") + 1);
|
||||
|
@ -319,8 +319,14 @@ public class WFSDataExplorer {
|
|||
String[] coordpair = coord.split(",");
|
||||
double[] dd = new double[2];
|
||||
// invert the coordinates as the final must be are long,lat
|
||||
|
||||
dd[1] = Double.parseDouble(coordpair[0]);
|
||||
dd[0] = Double.parseDouble(coordpair[1]);
|
||||
|
||||
/*
|
||||
dd[1] = Double.parseDouble(coordpair[1]);
|
||||
dd[0] = Double.parseDouble(coordpair[0]);
|
||||
*/
|
||||
dpairs.add(dd);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -223,10 +223,16 @@ public class GeoNetworkInspector {
|
|||
AnalysisLogger.getLogger().debug("Retrieving Layer Name");
|
||||
String wmslink = getWMSLink(meta);
|
||||
String layer = null;
|
||||
String finder = "layers=";
|
||||
|
||||
if (wmslink != null) {
|
||||
String [] finders = {"layers=", "LAYERS=","Layers="};
|
||||
AnalysisLogger.getLogger().debug("WMS layer found!");
|
||||
int idxfinder = wmslink.indexOf(finder);
|
||||
int idxfinder = -1;
|
||||
for (String finder:finders){
|
||||
idxfinder = wmslink.indexOf(finder);
|
||||
if (idxfinder>-1)
|
||||
break;
|
||||
}
|
||||
if (idxfinder > 0) {
|
||||
AnalysisLogger.getLogger().debug("Searching for Layer Name inside the WMS Link");
|
||||
wmslink = wmslink.substring(idxfinder);
|
||||
|
@ -234,7 +240,7 @@ public class GeoNetworkInspector {
|
|||
if (andIdx < 0)
|
||||
andIdx = wmslink.length();
|
||||
|
||||
layer = wmslink.substring(finder.length(), andIdx).trim();
|
||||
layer = wmslink.substring(finders[0].length(), andIdx).trim();
|
||||
}
|
||||
// if the layer is not inside the wmslink
|
||||
else {
|
||||
|
@ -242,6 +248,34 @@ public class GeoNetworkInspector {
|
|||
layer = searchLayerNameInMeta(meta);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
String wfslink = getWFSLink(meta);
|
||||
String [] finders = {"typename=", "TYPENAME=","typeName=","TypeName"};
|
||||
if (wfslink != null) {
|
||||
AnalysisLogger.getLogger().debug("WFS layer found!");
|
||||
int idxfinder = -1;
|
||||
for (String finder:finders){
|
||||
idxfinder = wfslink.indexOf(finder);
|
||||
if (idxfinder>-1)
|
||||
break;
|
||||
}
|
||||
if (idxfinder > 0) {
|
||||
AnalysisLogger.getLogger().debug("Searching for Layer Name inside the WMS Link");
|
||||
wfslink = wfslink.substring(idxfinder);
|
||||
int andIdx = wfslink.indexOf("&");
|
||||
if (andIdx < 0)
|
||||
andIdx = wfslink.length();
|
||||
|
||||
layer = wfslink.substring(finders[0].length(), andIdx).trim();
|
||||
}
|
||||
// if the layer is not inside the wfslink
|
||||
else {
|
||||
AnalysisLogger.getLogger().debug("Searching for Layer Name inside the file");
|
||||
layer = searchLayerNameInMeta(meta);
|
||||
}
|
||||
}
|
||||
}
|
||||
return layer;
|
||||
}
|
||||
|
||||
|
|
|
@ -93,7 +93,8 @@ public class MatrixExtractor {
|
|||
connector = new ASC();
|
||||
} else if (gnInspector.isWFS(meta)) {
|
||||
AnalysisLogger.getLogger().debug("found a Geo Layer with reference " + layerURL + " and layer name " + layerName);
|
||||
layerURL = gnInspector.getGeoserverLink(meta);
|
||||
// layerURL = gnInspector.getGeoserverLink(meta);
|
||||
layerURL = gnInspector.getWFSLink(meta);
|
||||
connector = new WFS();
|
||||
} else if (gnInspector.isWCS(meta)) {
|
||||
AnalysisLogger.getLogger().debug("found a WCS Layer with reference " + layerURL + " and layer name " + layerName);
|
||||
|
|
|
@ -104,11 +104,14 @@ public class RasterTable {
|
|||
// create a table
|
||||
String columnNames = columnsnamesStandard;
|
||||
String emptycolumns = "";
|
||||
if (valuesPropertiesMap == null)
|
||||
if (valuesPropertiesMap == null){
|
||||
AnalysisLogger.getLogger().debug("Rasterization->No properties to associate");
|
||||
DatabaseFactory.executeSQLUpdate(String.format(createTableStatementStandard, tablename), dbconnection);
|
||||
}
|
||||
else {
|
||||
AnalysisLogger.getLogger().debug("Managing Table with Custom Fields");
|
||||
Map<String, String> valuesMap = valuesPropertiesMap.values().iterator().next();
|
||||
AnalysisLogger.getLogger().debug("Rasterization->Sample of properties: "+valuesMap);
|
||||
emptycolumns = generateEmptyValues(valuesMap.size());
|
||||
DatabaseFactory.executeSQLUpdate(String.format(createTableStatementWithFields, tablename, propertiesMapToColumnString(valuesMap, true)), dbconnection);
|
||||
columnNames = String.format(columnsnamesWithFields, propertiesMapToColumnString(valuesMap, false));
|
||||
|
|
|
@ -4,7 +4,14 @@ import java.util.HashMap;
|
|||
|
||||
public class OGCFormatter {
|
||||
public static String getWfsUrl(String geoServerUrl, String layerName, String bbox, int limit, String format) {
|
||||
return geoServerUrl + "/wfs?service=wfs&version=1.1.0&REQUEST=GetFeature" + "&TYPENAME=" + layerName + (bbox==null? "":"&BBOX=" + bbox) + (limit == 0 ? "" : "&MAXFEATURES=" + limit) + (format == null ? "" : "&OUTPUTFORMAT=" + format);
|
||||
int idx = -1;
|
||||
if ((idx = geoServerUrl.indexOf("?"))>0){
|
||||
geoServerUrl = geoServerUrl.substring(0,idx);
|
||||
}
|
||||
else
|
||||
geoServerUrl = geoServerUrl + "/wfs";
|
||||
//the srsName keeps lat,long output constant
|
||||
return geoServerUrl+"?service=wfs&version=1.1.0&REQUEST=GetFeature" + "&srsName=urn:x-ogc:def:crs:EPSG:4326&TYPENAME=" + layerName + (bbox==null? "":"&BBOX=" + bbox) + (limit == 0 ? "" : "&MAXFEATURES=" + limit) + (format == null ? "" : "&OUTPUTFORMAT=" + format);
|
||||
}
|
||||
|
||||
public static String getWmsUrl(String geoServerUrl, String layerName, String style, String bbox) {
|
||||
|
|
|
@ -12,10 +12,12 @@ import org.gcube.dataanalysis.geo.connectors.table.TableMatrixRepresentation;
|
|||
|
||||
public class TestExtraction {
|
||||
|
||||
static AlgorithmConfiguration[] configs = { testXYExtractionNetCDF(),testXYExtractionAquaMaps(),testXYExtractionTable(),testXYExtractionTable2(),testDirectExtraction()};
|
||||
// static AlgorithmConfiguration[] configs = { testXYExtractionNetCDF(),testXYExtractionAquaMaps(),testXYExtractionTable(),testXYExtractionTable2(),testDirectExtraction()};
|
||||
// static AlgorithmConfiguration[] configs = { testXYExtractionTable2()};
|
||||
// static AlgorithmConfiguration[] configs = { testDirectExtraction()};
|
||||
// static AlgorithmConfiguration[] configs = { testXYExtractionAquaMaps()};
|
||||
static AlgorithmConfiguration[] configs = { testXYExtractionGeotermia()};
|
||||
// static AlgorithmConfiguration[] configs = { testXYExtractionFAO()};
|
||||
public static void main(String[] args) throws Exception {
|
||||
|
||||
System.out.println("TEST 1");
|
||||
|
@ -32,6 +34,89 @@ public class TestExtraction {
|
|||
}
|
||||
}
|
||||
|
||||
private static AlgorithmConfiguration testXYExtractionProd() {
|
||||
|
||||
AlgorithmConfiguration config = new AlgorithmConfiguration();
|
||||
|
||||
config.setAgent("XYEXTRACTOR");
|
||||
config.setConfigPath("./cfg/");
|
||||
config.setPersistencePath("./");
|
||||
config.setParam("DatabaseUserName","gcube");
|
||||
config.setParam("DatabasePassword","d4science2");
|
||||
config.setParam("DatabaseURL","jdbc:postgresql://localhost/testdb");
|
||||
config.setParam("DatabaseDriver","org.postgresql.Driver");
|
||||
config.setGcubeScope("/d4science.research-infrastructures.eu/gCubeApps/BiodiversityLab");
|
||||
|
||||
config.setParam("Layer","0aac424b-5f5b-4fa6-97d6-4b4deee62b97");
|
||||
config.setParam("Z","0");
|
||||
config.setParam("TimeIndex","0");
|
||||
config.setParam("BBox_LowerLeftLat","-60");
|
||||
config.setParam("BBox_LowerLeftLong","-50");
|
||||
config.setParam("BBox_UpperRightLat","60");
|
||||
config.setParam("BBox_UpperRightLong","50");
|
||||
config.setParam("XResolution","0.5");
|
||||
config.setParam("YResolution","0.5");
|
||||
config.setParam("OutputTableName","testextractionprod");
|
||||
config.setParam("OutputTableLabel","testextractionprod");
|
||||
|
||||
return config;
|
||||
}
|
||||
|
||||
private static AlgorithmConfiguration testXYExtractionGeotermia() {
|
||||
|
||||
AlgorithmConfiguration config = new AlgorithmConfiguration();
|
||||
|
||||
config.setAgent("XYEXTRACTOR");
|
||||
config.setConfigPath("./cfg/");
|
||||
config.setPersistencePath("./");
|
||||
config.setParam("DatabaseUserName","gcube");
|
||||
config.setParam("DatabasePassword","d4science2");
|
||||
config.setParam("DatabaseURL","jdbc:postgresql://localhost/testdb");
|
||||
config.setParam("DatabaseDriver","org.postgresql.Driver");
|
||||
config.setGcubeScope("/d4science.research-infrastructures.eu/gCubeApps/BiodiversityLab");
|
||||
|
||||
config.setParam("Layer","http://repoigg.services.iit.cnr.it:8080/geoserver/IGG/ows?service=WFS&version=1.0.0&request=GetFeature&typeName=IGG:area_temp_1000&maxFeatures=50");
|
||||
config.setParam("Z","-1000");
|
||||
config.setParam("TimeIndex","0");
|
||||
config.setParam("BBox_LowerLeftLat","34.46");
|
||||
config.setParam("BBox_LowerLeftLong","5.85");
|
||||
config.setParam("BBox_UpperRightLat","49");
|
||||
config.setParam("BBox_UpperRightLong","21.41");
|
||||
config.setParam("XResolution","0.01");
|
||||
config.setParam("YResolution","0.01");
|
||||
config.setParam("OutputTableName","testextractiongeotermia");
|
||||
config.setParam("OutputTableLabel","testextractiongeotermia");
|
||||
|
||||
return config;
|
||||
}
|
||||
|
||||
private static AlgorithmConfiguration testXYExtractionFAO() {
|
||||
|
||||
AlgorithmConfiguration config = new AlgorithmConfiguration();
|
||||
|
||||
config.setAgent("XYEXTRACTOR");
|
||||
config.setConfigPath("./cfg/");
|
||||
config.setPersistencePath("./");
|
||||
config.setParam("DatabaseUserName","gcube");
|
||||
config.setParam("DatabasePassword","d4science2");
|
||||
config.setParam("DatabaseURL","jdbc:postgresql://localhost/testdb");
|
||||
config.setParam("DatabaseDriver","org.postgresql.Driver");
|
||||
config.setGcubeScope("/gcube/devsec/devVRE");
|
||||
|
||||
config.setParam("Layer","20c06241-f00f-4cb0-82a3-4e5ec97a0d0a");
|
||||
config.setParam("Z","0");
|
||||
config.setParam("TimeIndex","0");
|
||||
config.setParam("BBox_LowerLeftLat","-90");
|
||||
config.setParam("BBox_LowerLeftLong","-180");
|
||||
config.setParam("BBox_UpperRightLat","90");
|
||||
config.setParam("BBox_UpperRightLong","180");
|
||||
config.setParam("XResolution","0.2");
|
||||
config.setParam("YResolution","0.2");
|
||||
config.setParam("OutputTableName","testextractionfao");
|
||||
config.setParam("OutputTableLabel","testextractionfao");
|
||||
|
||||
return config;
|
||||
}
|
||||
|
||||
private static AlgorithmConfiguration testXYExtractionNetCDF() {
|
||||
|
||||
|
|
|
@ -18,8 +18,8 @@ import org.gcube.dataanalysis.ecoengine.utils.IOHelper;
|
|||
|
||||
public class TestOccurrenceEnrichment {
|
||||
|
||||
static AlgorithmConfiguration[] configs = { testOccEnrichment()};
|
||||
// static AlgorithmConfiguration[] configs = { testOccEnrichmentWPS()};
|
||||
// static AlgorithmConfiguration[] configs = { testOccEnrichment(), testOccEnrichmentWFS()};
|
||||
static AlgorithmConfiguration[] configs = { testOccEnrichmentWFSFAO()};
|
||||
public static void main(String[] args) throws Exception {
|
||||
|
||||
System.out.println("TEST 1");
|
||||
|
@ -36,7 +36,7 @@ public class TestOccurrenceEnrichment {
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
private static AlgorithmConfiguration testOccEnrichment() {
|
||||
|
||||
AlgorithmConfiguration config = new AlgorithmConfiguration();
|
||||
|
@ -81,9 +81,40 @@ public class TestOccurrenceEnrichment {
|
|||
// config.setParam("FeaturesNames","chlorophyll"+sep+"ph");
|
||||
return config;
|
||||
}
|
||||
|
||||
|
||||
private static AlgorithmConfiguration testOccEnrichmentWPS() {
|
||||
private static AlgorithmConfiguration testOccEnrichmentWFSFAO() {
|
||||
|
||||
AlgorithmConfiguration config = new AlgorithmConfiguration();
|
||||
|
||||
config.setAgent("OCCURRENCE_ENRICHMENT");
|
||||
config.setConfigPath("./cfg/");
|
||||
config.setPersistencePath("./");
|
||||
|
||||
config.setParam("DatabaseUserName", "utente");
|
||||
config.setParam("DatabasePassword", "d4science");
|
||||
config.setParam("DatabaseURL", "jdbc:postgresql://statistical-manager.d.d4science.research-infrastructures.eu/testdb");
|
||||
config.setParam("DatabaseDriver", "org.postgresql.Driver");
|
||||
config.setGcubeScope("/gcube/devsec/devVRE");
|
||||
|
||||
config.setParam("OccurrenceTable","occurrence_carch");
|
||||
config.setParam("LongitudeColumn","decimallongitude");
|
||||
config.setParam("LatitudeColumn","decimallatitude");
|
||||
config.setParam("ScientificNameColumn","scientificname");
|
||||
config.setParam("TimeColumn","eventdate");
|
||||
config.setParam("OptionalFilter","");
|
||||
config.setParam("Resolution","0.5");
|
||||
config.setParam("OutputTableDBName","testenrichmentwpsfao");
|
||||
config.setParam("OutputTableName","testenrichmentwpsfao");
|
||||
String sep=AlgorithmConfiguration.getListSeparator();
|
||||
|
||||
//WFS: carcharodon
|
||||
config.setParam("Layers"," fao-species-map-wsh");
|
||||
|
||||
|
||||
return config;
|
||||
}
|
||||
|
||||
private static AlgorithmConfiguration testOccEnrichmentWFS() {
|
||||
|
||||
AlgorithmConfiguration config = new AlgorithmConfiguration();
|
||||
|
||||
|
|
Loading…
Reference in New Issue