git-svn-id: https://svn.d4science.research-infrastructures.eu/gcube/trunk/data-analysis/EcologicalEngineGeoSpatialExtension@92163 82a268e6-3cf1-43bd-a215-b396298e98cf
This commit is contained in:
parent
0d9bb0366f
commit
f4ea6d905b
|
@ -1,337 +0,0 @@
|
|||
package org.gcube.dataanalysis.geo.retrieval;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
|
||||
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
||||
import org.gcube.dataanalysis.ecoengine.signals.SignalProcessing;
|
||||
import org.gcube.dataanalysis.ecoengine.utils.Tuple;
|
||||
import org.gcube.dataanalysis.geo.meta.features.FeaturesManager;
|
||||
import org.gcube.dataanalysis.geo.utils.AscDataExplorer;
|
||||
import org.gcube.dataanalysis.geo.utils.EnvDataExplorer;
|
||||
import org.gcube.dataanalysis.geo.utils.FeaturedPolygon;
|
||||
import org.gcube.dataanalysis.geo.utils.ThreddsDataExplorer;
|
||||
import org.opengis.metadata.Metadata;
|
||||
import org.opengis.metadata.identification.Identification;
|
||||
|
||||
import com.vividsolutions.jts.geom.Coordinate;
|
||||
import com.vividsolutions.jts.geom.GeometryFactory;
|
||||
import com.vividsolutions.jts.geom.Point;
|
||||
import com.vividsolutions.jts.geom.PrecisionModel;
|
||||
import com.vividsolutions.jts.geom.impl.CoordinateArraySequence;
|
||||
|
||||
public class GeoIntersector {
|
||||
|
||||
private FeaturesManager featurer;
|
||||
private String configDir;
|
||||
public GeoIntersector(String scope, String cfgDir) {
|
||||
featurer = new FeaturesManager();
|
||||
featurer.setScope(scope);
|
||||
this.configDir=cfgDir;
|
||||
}
|
||||
|
||||
public FeaturesManager getFeaturer(){
|
||||
return featurer;
|
||||
}
|
||||
|
||||
public LinkedHashMap<String, Double> getFeaturesInTime(String layerTitle, double x, double y) throws Exception {
|
||||
return getFeaturesInAllTimes(layerTitle, x, y, 0);
|
||||
}
|
||||
|
||||
public LinkedHashMap<String, Double> getFeaturesInAllTimes(String layerTitle, double x, double y, double z) throws Exception {
|
||||
LinkedHashMap<String, Double> features = new LinkedHashMap<String, Double>();
|
||||
// get the layer
|
||||
// Metadata meta = featurer.getGNInfobyTitle(layerTitle);
|
||||
Metadata meta = featurer.getGNInfobyUUIDorName(layerTitle);
|
||||
// if the layer is good
|
||||
if (meta != null) {
|
||||
String layer = featurer.getLayerName(meta);
|
||||
|
||||
if (layer == null)
|
||||
layer = layerTitle;
|
||||
|
||||
// check if it is a NetCDF
|
||||
if (featurer.isThreddsFile(meta)) {
|
||||
Identification id = meta.getIdentificationInfo().iterator().next();
|
||||
String title = id.getCitation().getTitle().toString();
|
||||
|
||||
AnalysisLogger.getLogger().debug("found a netCDF file with title " + title + " and layer name " + layer);
|
||||
features = getFeaturesFromNetCDF(featurer.getOpenDapLink(meta), layer, x, y, z);
|
||||
} else {
|
||||
AnalysisLogger.getLogger().debug("found a Geo Layer with title " + layerTitle + " and layer name " + layer);
|
||||
features = getFeaturesFromWFS(featurer.getWFSLink(meta), layer, x, y);
|
||||
}
|
||||
}
|
||||
|
||||
return features;
|
||||
}
|
||||
|
||||
public List<Double> getFeaturesInTimeInstant(String layerTitle, int time, List<Tuple<Double>> triplets, double xL,double xR, double yL, double yR) throws Exception {
|
||||
List<Double> features = new ArrayList<Double>();
|
||||
// get the layer
|
||||
Metadata meta = featurer.getGNInfobyUUIDorName(layerTitle);
|
||||
// if the layer is good
|
||||
if (meta != null) {
|
||||
String layer = featurer.getLayerName(meta);
|
||||
if (layer == null)
|
||||
layer = layerTitle;
|
||||
// check if it is a NetCDF
|
||||
if (featurer.isThreddsFile(meta)) {
|
||||
Identification id = meta.getIdentificationInfo().iterator().next();
|
||||
String title = id.getCitation().getTitle().toString();
|
||||
AnalysisLogger.getLogger().debug("found a netCDF file with title " + title + " and layer name " + layer);
|
||||
features = getFeaturesFromNetCDF(featurer.getOpenDapLink(meta), layer, time, triplets, xL,xR, yL, yR);
|
||||
/*
|
||||
for (Tuple<Double> triplet : triplets) {
|
||||
double x = triplet.getElements().get(0);
|
||||
double y = triplet.getElements().get(1);
|
||||
double z = 0;
|
||||
if (triplet.getElements().size() > 2)
|
||||
z = triplet.getElements().get(2);
|
||||
AnalysisLogger.getLogger().debug("Taking point: (" + x + "," + y + "," + z + ")");
|
||||
LinkedHashMap<String, Double> features = new LinkedHashMap<String, Double>();
|
||||
features = getFeaturesFromNetCDF(featurer.getOpenDapLink(meta), layer, x, y, z);
|
||||
AnalysisLogger.getLogger().debug("Got: (" + features + ")");
|
||||
featuresSets.add(features);
|
||||
}
|
||||
*/
|
||||
|
||||
}
|
||||
else if (featurer.isAscFile(meta)){
|
||||
AnalysisLogger.getLogger().debug("managing ASC File");
|
||||
String remoteFile = featurer.getHttpLink(meta);
|
||||
AscDataExplorer asc = new AscDataExplorer(remoteFile);
|
||||
features = asc.retrieveDataFromAsc(triplets);
|
||||
AnalysisLogger.getLogger().debug("ASC File managed");
|
||||
}
|
||||
else {
|
||||
AnalysisLogger.getLogger().debug("found a Geo Layer with reference " + layerTitle + " and layer name " + layer);
|
||||
// AnalysisLogger.getLogger().debug("Taking point: (" + x + "," + y + ")");
|
||||
List<FeaturedPolygon> featuresInTime = new ArrayList<FeaturedPolygon>();
|
||||
AnalysisLogger.getLogger().debug("taking WFS features");
|
||||
featuresInTime = getFeaturesFromWFS(featurer.getGeoserverLink(meta), layer, xL,yL, xR, yR);
|
||||
int tsize = triplets.size();
|
||||
AnalysisLogger.getLogger().debug("Intersecting "+tsize+" vs "+featuresInTime.size() +" elements");
|
||||
int ttc= 0;
|
||||
Double[] featuresarray = new Double[tsize];
|
||||
int k=0;
|
||||
long t0=0;
|
||||
GeometryFactory factory = new GeometryFactory(new PrecisionModel(), 4326);
|
||||
// Tuple[] tripletss = new Tuple[tsize];
|
||||
// tripletss = triplets.toArray(tripletss);
|
||||
// for (Tuple<Double> triplet:triplets){
|
||||
for (Tuple<Double> triplet:triplets){
|
||||
ArrayList<Double> elements = triplet.getElements();
|
||||
CoordinateArraySequence pcoords = new CoordinateArraySequence(new Coordinate[] { new Coordinate(elements.get(0),elements.get(1)),});
|
||||
Point po = new Point(pcoords, factory);
|
||||
boolean found = false;
|
||||
|
||||
for (FeaturedPolygon poly:featuresInTime){
|
||||
if (poly!=null && poly.p!=null && poly.p.covers(po)){
|
||||
// System.out.println(po+" intersected by "+poly.p+ " assigning value "+poly.value);
|
||||
// features.add(poly.value);
|
||||
featuresarray[k] = poly.value;
|
||||
found = true;
|
||||
break;
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
po = null;
|
||||
if (!found){
|
||||
// features.add(Double.NaN);
|
||||
featuresarray[k] = Double.NaN;
|
||||
}
|
||||
|
||||
|
||||
if (ttc%10000==0){
|
||||
AnalysisLogger.getLogger().debug("Status: "+((double)ttc*100d/(double)tsize));
|
||||
// System.out.println("::"+((System.currentTimeMillis()-t0)/1000));
|
||||
}
|
||||
ttc++;
|
||||
k++;
|
||||
|
||||
// if (ttc%1000==0)
|
||||
// t0 = System.currentTimeMillis();
|
||||
}
|
||||
|
||||
|
||||
features = Arrays.asList(featuresarray);
|
||||
|
||||
}
|
||||
}
|
||||
return features;
|
||||
}
|
||||
|
||||
private List<Double> getFeaturesFromNetCDF(String opendapURL, String layer, int time, List<Tuple<Double>> triplets, double xL,double xR, double yL, double yR) {
|
||||
if (opendapURL == null)
|
||||
return null;
|
||||
|
||||
return ThreddsDataExplorer.retrieveDataFromNetCDF(opendapURL, layer, time, triplets, xL,xR, yL, yR);
|
||||
}
|
||||
|
||||
private LinkedHashMap<String, Double> getFeaturesFromNetCDF(String opendapURL, String layer, double x, double y, double z) {
|
||||
if (opendapURL == null)
|
||||
return null;
|
||||
|
||||
return ThreddsDataExplorer.retrieveDataFromNetCDF(opendapURL, layer, x, y, z);
|
||||
}
|
||||
|
||||
private LinkedHashMap<String, Double> getFeaturesFromWFS(String geoserverUrl, String layer, double x, double y) {
|
||||
if (geoserverUrl == null)
|
||||
return null;
|
||||
|
||||
return EnvDataExplorer.getFeatures(geoserverUrl, layer, x, y);
|
||||
}
|
||||
|
||||
private List<FeaturedPolygon> getFeaturesFromWFS(String geoserverUrl, String layer, double xL,double yL,double xR, double yR) {
|
||||
if (geoserverUrl == null)
|
||||
return null;
|
||||
|
||||
return EnvDataExplorer.getFeatures(geoserverUrl, layer, xL,yL,xR, yR);
|
||||
}
|
||||
|
||||
|
||||
public static List<Tuple<Double>> generateCoordinateTriplets(double x1, double x2, double y1, double y2, double z, double xResolution, double yResolution){
|
||||
|
||||
int ysteps = (int) ((y2 - y1) / yResolution);
|
||||
int xsteps = (int) ((x2 - x1) / xResolution);
|
||||
List<Tuple<Double>> tuples = new ArrayList<Tuple<Double>>();
|
||||
AnalysisLogger.getLogger().debug("Building the points grid according to YRes:" + yResolution + " and XRes:" + xResolution);
|
||||
// build the tuples according to the desired resolution
|
||||
for (int i = 0; i < ysteps + 1; i++) {
|
||||
double y = (i * yResolution) + y1;
|
||||
if (i == ysteps)
|
||||
y = y2;
|
||||
for (int j = 0; j < xsteps + 1; j++) {
|
||||
double x = (j * xResolution) + x1;
|
||||
if (j == xsteps)
|
||||
x = x2;
|
||||
tuples.add(new Tuple<Double>(x, y, z));
|
||||
}
|
||||
}
|
||||
|
||||
return tuples;
|
||||
}
|
||||
|
||||
public static List<Double> associateValueToCoordinates(List<Tuple<Double>> coordinates, double[][] data){
|
||||
List<Double> values = new ArrayList<Double>();
|
||||
|
||||
int k = 0;
|
||||
int g = 0;
|
||||
int ntriplets = coordinates.size();
|
||||
int xsteps = data[0].length-1;
|
||||
for (int t = 0; t < ntriplets; t++) {
|
||||
values.add(data[k][g]);
|
||||
if (g == xsteps) {
|
||||
g = 0;
|
||||
k++;
|
||||
}
|
||||
else
|
||||
g++;
|
||||
}
|
||||
|
||||
return values;
|
||||
}
|
||||
|
||||
public double[][] takeTimeSlice(String layerTitle, int timeInstant, double x1, double x2, double y1, double y2, double z, double xResolution, double yResolution) throws Exception {
|
||||
AnalysisLogger.getLogger().debug("Bounding box: (" + x1 + "," + x2 + ";" + y1 + "," + y2 + ")");
|
||||
boolean faolayer = false;
|
||||
if (layerTitle.toLowerCase().contains("fao aquatic species distribution map") )
|
||||
{
|
||||
AnalysisLogger.getLogger().debug("FAO DISTRIBUTION LAYER ... TO APPY PATCH!");
|
||||
faolayer=true;
|
||||
}
|
||||
if ((x2 < x1) || (y2 < y1)) {
|
||||
AnalysisLogger.getLogger().debug("ERROR: BAD BOUNDING BOX!!!");
|
||||
return new double[0][0];
|
||||
}
|
||||
|
||||
int ysteps = (int) ((y2 - y1) / yResolution);
|
||||
int xsteps = (int) ((x2 - x1) / xResolution);
|
||||
double[][] slice = new double[ysteps + 1][xsteps + 1];
|
||||
List<Tuple<Double>> tuples = new ArrayList<Tuple<Double>>();
|
||||
AnalysisLogger.getLogger().debug("Building the points grid according to YRes:" + yResolution + " and XRes:" + xResolution);
|
||||
AnalysisLogger.getLogger().debug("Points to reassign:" + (ysteps*xsteps));
|
||||
// build the tuples according to the desired resolution
|
||||
for (int i = 0; i < ysteps + 1; i++) {
|
||||
double y = (i * yResolution) + y1;
|
||||
if (i == ysteps)
|
||||
y = y2;
|
||||
for (int j = 0; j < xsteps + 1; j++) {
|
||||
double x = (j * xResolution) + x1;
|
||||
if (j == xsteps)
|
||||
x = x2;
|
||||
tuples.add(new Tuple<Double>(x, y, z));
|
||||
}
|
||||
}
|
||||
|
||||
AnalysisLogger.getLogger().debug("Taking " + ysteps + " values per "+xsteps+"="+(ysteps*xsteps)+ "...");
|
||||
List<Double> timeValues = getFeaturesInTimeInstant(layerTitle, timeInstant, tuples, x1, x2, y1,y2);
|
||||
AnalysisLogger.getLogger().debug("Taken " + timeValues.size() + " values");
|
||||
|
||||
// build back the values matrix
|
||||
int k = 0;
|
||||
int g = 0;
|
||||
int ntriplets = timeValues.size();
|
||||
//cycle on all the triplets to recontruct the matrix
|
||||
for (int t = 0; t < ntriplets; t++) {
|
||||
//take the corresponding (time,value) pair
|
||||
Double value = timeValues.get(t);
|
||||
//if there is value, then set it, otherwise set NaN
|
||||
//the layer is undefined in that point and a value must be generated
|
||||
//assign a value to the matrix
|
||||
|
||||
//WARNING: PATCH FOR FAO LAYERS:. Probability can be equal to 2 for uncertainty (Kolmogorov, forgive them for they know not what they do)
|
||||
if (faolayer && (value>1)){
|
||||
AnalysisLogger.getLogger().debug("APPLYING FAO PATCH!");
|
||||
slice[k][g] = 0.5;
|
||||
}
|
||||
else
|
||||
slice[k][g] = value;
|
||||
//increase the x step according to the matrix
|
||||
if (g == xsteps) {
|
||||
g = 0;
|
||||
k++;
|
||||
}
|
||||
else
|
||||
g++;
|
||||
}
|
||||
|
||||
/*
|
||||
AnalysisLogger.getLogger().debug("Applying nearest Neighbor to all the rows");
|
||||
//apply nearest neighbor to each row
|
||||
AlgorithmConfiguration config = new AlgorithmConfiguration();
|
||||
config.setConfigPath(configDir);
|
||||
boolean rapidinit = false;
|
||||
|
||||
|
||||
for (int i=0;i<slice.length;i++){
|
||||
// AnalysisLogger.getLogger().debug("Checking for unfilled values");
|
||||
boolean tofill = false;
|
||||
for (int j=0;j<slice[i].length;j++) {
|
||||
if (new Double(slice[i][j]).equals(Double.NaN))
|
||||
tofill = true;
|
||||
}
|
||||
if (tofill){
|
||||
if (!rapidinit){
|
||||
config.initRapidMiner();
|
||||
rapidinit=true;
|
||||
}
|
||||
AnalysisLogger.getLogger().debug("Filling signal");
|
||||
double[] ssliced = SignalProcessing.fillSignal(slice[i]);
|
||||
slice[i] = ssliced;
|
||||
}
|
||||
// else
|
||||
// AnalysisLogger.getLogger().debug("Signal yet complete");
|
||||
}
|
||||
*/
|
||||
|
||||
AnalysisLogger.getLogger().debug("Features map: "+slice.length+","+slice[0].length);
|
||||
return slice;
|
||||
}
|
||||
}
|
Loading…
Reference in New Issue