git-svn-id: https://svn.d4science.research-infrastructures.eu/gcube/trunk/data-analysis/EcologicalEngineGeoSpatialExtension@74469 82a268e6-3cf1-43bd-a215-b396298e98cf
This commit is contained in:
parent
88ab16c6bd
commit
70931d9960
|
@ -58,8 +58,8 @@ public class GeoIntersector {
|
|||
return features;
|
||||
}
|
||||
|
||||
public List<LinkedHashMap<String, Double>> getFeaturesInTime(String layerTitle, List<Tuple<Double>> triplets) throws Exception {
|
||||
List<LinkedHashMap<String, Double>> featuresSets = new ArrayList<LinkedHashMap<String, Double>>();
|
||||
public List<Double> getFeaturesInTimeInstant(String layerTitle, int time, List<Tuple<Double>> triplets, double xL,double xR, double yL, double yR) throws Exception {
|
||||
List<Double> features = new ArrayList<Double>();
|
||||
// get the layer
|
||||
Metadata meta = featurer.checkForMetadatabyTitle(FeaturesManager.treatTitleForGN(layerTitle), layerTitle);
|
||||
// if the layer is good
|
||||
|
@ -72,6 +72,8 @@ public class GeoIntersector {
|
|||
Identification id = meta.getIdentificationInfo().iterator().next();
|
||||
String title = id.getCitation().getTitle().toString();
|
||||
AnalysisLogger.getLogger().debug("found a netCDF file with title " + title + " and layer name " + layer);
|
||||
features = getFeaturesFromNetCDF(featurer.getOpenDapLink(meta), layer, time, triplets, xL,xR, yL, yR);
|
||||
/*
|
||||
for (Tuple<Double> triplet : triplets) {
|
||||
double x = triplet.getElements().get(0);
|
||||
double y = triplet.getElements().get(1);
|
||||
|
@ -84,9 +86,11 @@ public class GeoIntersector {
|
|||
AnalysisLogger.getLogger().debug("Got: (" + features + ")");
|
||||
featuresSets.add(features);
|
||||
}
|
||||
|
||||
*/
|
||||
} else {
|
||||
//TODO: adjust WFS take
|
||||
AnalysisLogger.getLogger().debug("found a Geo Layer with title " + layerTitle + " and layer name " + layer);
|
||||
/*
|
||||
for (Tuple<Double> triplet : triplets) {
|
||||
double x = triplet.getElements().get(0);
|
||||
double y = triplet.getElements().get(1);
|
||||
|
@ -95,12 +99,19 @@ public class GeoIntersector {
|
|||
features = getFeaturesFromWFS(featurer.getWFSLink(meta), layer, x, y);
|
||||
featuresSets.add(features);
|
||||
}
|
||||
*/
|
||||
}
|
||||
}
|
||||
|
||||
return featuresSets;
|
||||
return features;
|
||||
}
|
||||
|
||||
private List<Double> getFeaturesFromNetCDF(String opendapURL, String layer, int time, List<Tuple<Double>> triplets, double xL,double xR, double yL, double yR) {
|
||||
if (opendapURL == null)
|
||||
return null;
|
||||
|
||||
return ThreddsDataExplorer.retrieveDataFromNetCDF(opendapURL, layer, time, triplets, xL,xR, yL, yR);
|
||||
}
|
||||
|
||||
private LinkedHashMap<String, Double> getFeaturesFromNetCDF(String opendapURL, String layer, double x, double y, double z) {
|
||||
if (opendapURL == null)
|
||||
return null;
|
||||
|
@ -140,52 +151,56 @@ public class GeoIntersector {
|
|||
}
|
||||
}
|
||||
|
||||
AnalysisLogger.getLogger().debug("Taking " + ysteps + " values...");
|
||||
List<LinkedHashMap<String, Double>> time = getFeaturesInTime(layerTitle, tuples);
|
||||
AnalysisLogger.getLogger().debug("Taking " + ysteps + " values per "+xsteps+"="+(ysteps*xsteps)+ "...");
|
||||
List<Double> time = getFeaturesInTimeInstant(layerTitle, 0, tuples, x1, x2, y1,y2);
|
||||
AnalysisLogger.getLogger().debug("Taken " + time.size() + " values");
|
||||
|
||||
// build back the values matrix
|
||||
int k = 0;
|
||||
int g = 0;
|
||||
int ntriplets = ysteps * xsteps;
|
||||
int ntriplets = time.size();
|
||||
//cycle on all the triplets to recontruct the matrix
|
||||
for (int t = 0; t < ntriplets; t++) {
|
||||
//take the corresponding (time,value) pair
|
||||
LinkedHashMap<String, Double> tvalues = time.get(t);
|
||||
Double value = time.get(t);
|
||||
//if there is value, then set it, otherwise set NaN
|
||||
//the layer is undefined in that point and a value must be generated
|
||||
double value = Double.NaN;
|
||||
for (Double val : tvalues.values()) {
|
||||
value = val;
|
||||
break;
|
||||
}
|
||||
//assign a value to the matrix
|
||||
slice[k][g] = value;
|
||||
//increase the x step according to the matrix
|
||||
g++;
|
||||
if (g == xsteps) {
|
||||
g = 0;
|
||||
k++;
|
||||
}
|
||||
else
|
||||
g++;
|
||||
}
|
||||
AnalysisLogger.getLogger().debug("Applying nearest Neighbor to all the rows");
|
||||
//apply nearest neighbor to each row
|
||||
AlgorithmConfiguration config = new AlgorithmConfiguration();
|
||||
config.setConfigPath(configDir);
|
||||
boolean rapidinit = false;
|
||||
for (int i=0;i<slice.length;i++){
|
||||
AnalysisLogger.getLogger().debug("Checking for unfilled values");
|
||||
// AnalysisLogger.getLogger().debug("Checking for unfilled values");
|
||||
boolean tofill = false;
|
||||
for (int j=0;j<slice[i].length;j++) {
|
||||
if (slice[i][j]==Double.NaN)
|
||||
if (new Double(slice[i][j]).equals(Double.NaN))
|
||||
tofill = true;
|
||||
}
|
||||
if (tofill){
|
||||
if (!rapidinit){
|
||||
config.initRapidMiner();
|
||||
rapidinit=true;
|
||||
}
|
||||
AnalysisLogger.getLogger().debug("Filling signal");
|
||||
slice[i] = SignalProcessing.fillSignal(slice[i]);
|
||||
double[] ssliced = SignalProcessing.fillSignal(slice[i]);
|
||||
slice[i] = ssliced;
|
||||
}
|
||||
else
|
||||
AnalysisLogger.getLogger().debug("Signal yet complete");
|
||||
// else
|
||||
// AnalysisLogger.getLogger().debug("Signal yet complete");
|
||||
}
|
||||
|
||||
AnalysisLogger.getLogger().debug("Features map: "+slice.length+","+slice[0].length);
|
||||
return slice;
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue