Gianpaolo Coro 2014-02-26 15:55:31 +00:00
parent 25f2a88395
commit 0cb1b7941b
29 changed files with 846 additions and 177 deletions

View File

@ -17,4 +17,17 @@ public class ASC implements GISDataConnector{
return features;
}
@Override
public double getMinZ(String layerURL, String layerName) {
return 0;
}
@Override
public double getMaxZ(String layerURL, String layerName) {
return 0;
}
}

View File

@ -6,19 +6,37 @@ import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.utils.Tuple;
import org.gcube.dataanalysis.geo.interfaces.GISDataConnector;
public class NetCDF implements GISDataConnector{
public class NetCDF implements GISDataConnector {
NetCDFDataExplorer netcdffile;
public NetCDF(String layerURL, String layerName) {
if (netcdffile == null)
netcdffile = new NetCDFDataExplorer(layerURL, layerName);
}
@Override
public List<Double> getFeaturesInTimeInstantAndArea(String layerURL, String layerName, int time, List<Tuple<Double>> coordinates3d, double BBxL, double BBxR, double BByL, double BByR) throws Exception {
public List<Double> getFeaturesInTimeInstantAndArea(String layerURL, String layerName, int time, List<Tuple<Double>> coordinates3d, double BBxL, double BBxR, double BByL, double BByR) throws Exception {
AnalysisLogger.getLogger().debug("Managing netCDF file");
if (layerURL == null)
return null;
return NetCDFDataExplorer.retrieveDataFromNetCDF(layerURL, layerName, time, coordinates3d, BBxL, BBxR, BByL, BByR);
return netcdffile.retrieveDataFromNetCDF(layerURL, layerName, time, coordinates3d, BBxL, BBxR, BByL, BByR);
}
@Override
public double getMinZ(String layerURL, String layerName) {
return netcdffile.minZ;
}
@Override
public double getMaxZ(String layerURL, String layerName) {
return netcdffile.maxZ;
}
}

View File

@ -7,7 +7,7 @@ import java.util.List;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.utils.Tuple;
import org.gcube.dataanalysis.geo.utils.VectorTransformations;
import org.gcube.dataanalysis.geo.utils.VectorOperations;
import ucar.ma2.Array;
import ucar.ma2.ArrayByte;
@ -39,7 +39,11 @@ public class NetCDFDataExplorer {
// http://thredds.research-infrastructures.eu:8080/thredds/catalog/public/netcdf/catalog.xml
public static String timePrefix = "time:";
public static List<Double> retrieveDataFromNetCDF(String openDapLink, String layer, int time, List<Tuple<Double>> triplets, double xL, double xR, double yL, double yR) {
public NetCDFDataExplorer(String openDapLink, String layer){
calcZRange(openDapLink, layer);
}
public List<Double> retrieveDataFromNetCDF(String openDapLink, String layer, int time, List<Tuple<Double>> triplets, double xL, double xR, double yL, double yR) {
try {
List<Double> values = new ArrayList<Double>();
if (isGridDataset(openDapLink)) {
@ -60,10 +64,36 @@ public class NetCDFDataExplorer {
}
}
public double minZ=0;
public double maxZ=0;
private void calcZRange(String openDapLink, String layer) {
try{
if (isGridDataset(openDapLink)){
gds = ucar.nc2.dt.grid.GridDataset.open(openDapLink);
List<GridDatatype> gridTypes = gds.getGrids();
for (GridDatatype gdt : gridTypes) {
AnalysisLogger.getLogger().debug("Inside File - layer name: " + gdt.getFullName());
if (layer.equalsIgnoreCase(gdt.getFullName())) {
CoordinateAxis zAxis = gdt.getCoordinateSystem().getVerticalAxis();
minZ=zAxis.getMinValue();
maxZ=zAxis.getMaxValue();
break;
}
}
}
}catch(Exception e){
AnalysisLogger.getLogger().debug("NetCDF Explorer Error:"+e.getLocalizedMessage());
}
}
// A GridDatatype is like a specialized Variable that explicitly handles X,Y,Z,T dimensions
public static List<Double> manageGridDataset(String layer, String filename, int time, List<Tuple<Double>> triplets, double xL, double xR, double yL, double yR) throws Exception {
GridDataset gds;
public List<Double> manageGridDataset(String layer, String filename, int time, List<Tuple<Double>> triplets, double xL, double xR, double yL, double yR) throws Exception {
List<Double> values = new ArrayList<Double>();
GridDataset gds = ucar.nc2.dt.grid.GridDataset.open(filename);
if (gds==null)
gds = ucar.nc2.dt.grid.GridDataset.open(filename);
List<GridDatatype> gridTypes = gds.getGrids();
for (GridDatatype gdt : gridTypes) {
AnalysisLogger.getLogger().debug("Inside File - layer name: " + gdt.getFullName());
@ -131,7 +161,7 @@ public class NetCDFDataExplorer {
else if (data instanceof ArrayLong.D3)
data3Long = (ArrayLong.D3) data;
else if (data instanceof ArrayByte.D3)
data3Double = (ArrayDouble.D3)VectorTransformations.arrayByte3DArrayDouble((ArrayByte)data);
data3Double = (ArrayDouble.D3)VectorOperations.arrayByte3DArrayDouble((ArrayByte)data);
else
throw new Exception("Layer data format not supported");
}
@ -145,7 +175,7 @@ public class NetCDFDataExplorer {
else if (data instanceof ArrayLong.D2)
data2Long = (ArrayLong.D2) data;
else if (data instanceof ArrayByte.D2)
data2Double = (ArrayDouble.D2)VectorTransformations.arrayByte2DArrayDouble((ArrayByte)data);
data2Double = (ArrayDouble.D2)VectorOperations.arrayByte2DArrayDouble((ArrayByte)data);
else
throw new Exception("Layer data format not supported");
}

View File

@ -7,40 +7,73 @@ import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.utils.Tuple;
import org.gcube.dataanalysis.geo.interfaces.GISDataConnector;
import org.gcube.dataanalysis.geo.utils.VectorTransformations;
import org.gcube.dataanalysis.geo.utils.VectorOperations;
public class Table implements GISDataConnector {
public class Table implements GISDataConnector{
AlgorithmConfiguration config;
TableMatrixRepresentation tmr;
public Table(AlgorithmConfiguration config){
public Table(AlgorithmConfiguration config) throws Exception {
this.config = config;
tmr = new TableMatrixRepresentation();
tmr.build5DTuples(config, true);
}
@Override
public List<Double> getFeaturesInTimeInstantAndArea(String layerURL, String layerName, int time, List<Tuple<Double>> coordinates3d, double BBxL, double BBxR, double BByL, double BByR) throws Exception {
List<Double> values = new ArrayList<Double>();
values.add(Math.random());
if (tmr==null){
tmr = new TableMatrixRepresentation();
tmr.build5DTuples(config,time,true);
}
List<Tuple<Double>> tuples = tmr.currentcoordinates5d.get((double)time);
double tolerance = Math.sqrt(2d)*0.5/2d;
if (tuples.size()==0){
AnalysisLogger.getLogger().debug("Error in getting elements for time " +time);
throw new Exception("Error in getting elements for time " +time);
List<Tuple<Double>> tuples = tmr.currentcoordinates5d.get((double) time);
// AnalysisLogger.getLogger().debug("TUPLES "+tuples);
double resolution = 0;
if (coordinates3d.size() > 1)
resolution = Math.abs(coordinates3d.get(0).getElements().get(0) - coordinates3d.get(1).getElements().get(0));
double tolerance = Math.sqrt(2d) * resolution / 2d;
if (tuples.size() == 0) {
AnalysisLogger.getLogger().debug("Error in getting elements for time " + time);
throw new Exception("Error in getting elements for time " + time);
}
List<Double> v = VectorTransformations.assignPointsValuesToGrid(coordinates3d, time, tuples, tolerance);
// check z: if there is at least one point inside the z boundary then it is ok
boolean outsideZ = true;
for (Tuple<Double> coordinates : coordinates3d) {
double Zcoord = 0;
if (coordinates.getElements().size() > 2)
Zcoord = coordinates.getElements().get(2);
if ((Zcoord <= tmr.maxZ) && (Zcoord >= tmr.minZ)) {
outsideZ = false;
break;
}
}
if (outsideZ) {
AnalysisLogger.getLogger().debug("Error in getting elements for Z ");
throw new Exception("Outside the z boundaries [" + tmr.minZ + ";" + tmr.maxZ + "]");
}
List<Double> v = VectorOperations.assignPointsValuesToGrid(coordinates3d, time, tuples, tolerance);
// AnalysisLogger.getLogger().debug("VALUES "+v);
return v;
}
@Override
public double getMinZ(String layerURL, String layerName) {
return tmr.minZ;
}
@Override
public double getMaxZ(String layerURL, String layerName) {
return tmr.maxZ;
}
}

View File

@ -9,8 +9,7 @@ import org.gcube.contentmanagement.lexicalmatcher.utils.DatabaseFactory;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.utils.DatabaseUtils;
import org.gcube.dataanalysis.ecoengine.utils.Tuple;
import org.gcube.dataanalysis.geo.matrixmodel.MatrixExtractor;
import org.gcube.dataanalysis.geo.utils.VectorTransformations;
import org.gcube.dataanalysis.geo.utils.VectorOperations;
import org.hibernate.SessionFactory;
public class TableMatrixRepresentation {
@ -21,12 +20,14 @@ public class TableMatrixRepresentation {
public static String timeDimensionColumnParameter = "timeColumn";
public static String valueDimensionColumnParameter = "valueColumn";
public static String tableNameParameter = "geoReferencedTableName";
public static String filterParameter = "filter";
public HashMap<String, Integer> currentTimes = null;
public HashMap<Double,List<Tuple<Double>>> currentcoordinates5d=null;
public double maxZ = 0;
public double minZ = 0;
//TODO Manage the case of only time according to a property in the table
public List<Tuple<Double>> build5DTuples(AlgorithmConfiguration configuration, int timeIndex, boolean cacheElements) throws Exception {
public void build5DTuples(AlgorithmConfiguration configuration, boolean cacheElements) throws Exception {
currentTimes = new HashMap<String, Integer>();
@ -69,31 +70,43 @@ public class TableMatrixRepresentation {
if ((currentcoordinates5d == null) || !cacheElements){
currentcoordinates5d = new HashMap<Double, List<Tuple<Double>>>();
String query = "select " + dbtuple + " from " + tableName + " order by time";
//find maxZ
if (zField!=null && zField.trim().length()>0){
String maxzq = "select max("+zField+"),min("+zField+") from "+tableName;
Object [] maxzr = (Object [] )DatabaseFactory.executeSQLQuery(maxzq, dbconnection).get(0);
maxZ = Double.parseDouble(""+maxzr[0]);
minZ = Double.parseDouble(""+maxzr[1]);
}
String query = "select " + dbtuple + " from " + tableName;
String filter=configuration.getParam(filterParameter);
if (filter!=null && filter.trim().length()>0)
query+=" where "+filter;
query += " order by time";
AnalysisLogger.getLogger().debug("TableMatrixRepresentation-> Query to execute: " + query);
rows = DatabaseFactory.executeSQLQuery(query, dbconnection);
AnalysisLogger.getLogger().debug("TableMatrixRepresentation-> Returned " + rows.size() + " rows");
for (Object row : rows) {
Object[] orow = (Object[]) row;
Tuple<Double> t = build5DTuple(orow);
Double time = t.getElements().get(3);
double time = t.getElements().get(3);
List<Tuple<Double>> coordinates5d = currentcoordinates5d.get(time);
if (coordinates5d==null){
coordinates5d=new ArrayList<Tuple<Double>>();
currentcoordinates5d.put(time, coordinates5d);
}
//else
//AnalysisLogger.getLogger().debug("TableMatrixRepresentation-> yet found time "+time+"->"+orow[3]);
coordinates5d.add(t);
}
AnalysisLogger.getLogger().debug("TableMatrixRepresentation-> Association complete");
AnalysisLogger.getLogger().debug("TableMatrixRepresentation-> coordinates set complete: "+currentcoordinates5d.size());
}
List<Tuple<Double>> coordinates5dtoreturn = currentcoordinates5d.get((double)timeIndex);
if (coordinates5dtoreturn.size()==0){
AnalysisLogger.getLogger().debug("Error in getting elements for time " +timeIndex);
throw new Exception("Error in getting elements for time " +timeIndex);
}
return coordinates5dtoreturn;
} catch (Exception e) {
e.printStackTrace();
AnalysisLogger.getLogger().debug("Error in getting elements from DB: " + e.getLocalizedMessage());
@ -136,20 +149,25 @@ public class TableMatrixRepresentation {
// vessels
config.setParam(tableNameParameter, "generic_id037d302d_2ba0_4e43_b6e4_1a797bb91728");
config.setParam(xDimensionColumnParameter, "x");
config.setParam(yDimensionColumnParameter, "y");
// config.setParam(xDimensionColumnParameter, "x");
// config.setParam(yDimensionColumnParameter, "y");
// config.setParam(zDimensionColumnParameter,"");
config.setParam(timeDimensionColumnParameter, "datetime");
config.setParam(valueDimensionColumnParameter, "speed");
config.setParam(filterParameter, "speed<2");
AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
TableMatrixRepresentation tmr = new TableMatrixRepresentation();
List<Tuple<Double>> tuples = tmr.build5DTuples(config, 0,false);
tmr.build5DTuples(config,false);
List<Tuple<Double>> tuples = tmr.currentcoordinates5d.get(0d);
AnalysisLogger.getLogger().debug("TUPLES:" + tuples);
List<Tuple<Double>> grid = MatrixExtractor.generateCoordinateTripletsInBoundingBox(-47.14, -46.00, 44.52, 45.55, 0, 0.5, 0.5);
List<Double> values = VectorTransformations.assignPointsValuesToGrid(grid, 0, tuples, 0.5);
List<Tuple<Double>> grid = VectorOperations.generateCoordinateTripletsInBoundingBox(-47.14, -46.00, 44.52, 45.55, 0, 0.5, 0.5);
List<Double> values = VectorOperations.assignPointsValuesToGrid(grid, 0, tuples, 0.5);
AnalysisLogger.getLogger().debug("VALUES:" + values);

View File

@ -64,4 +64,14 @@ public class WFS implements GISDataConnector {
return features;
}
@Override
public double getMinZ(String layerURL, String layerName) {
return 0;
}
@Override
public double getMaxZ(String layerURL, String layerName) {
return 0;
}
}

View File

@ -6,7 +6,9 @@ import org.gcube.dataanalysis.ecoengine.utils.Tuple;
public interface GISDataConnector {
List<Double> getFeaturesInTimeInstantAndArea(String layerURL, String layerName, int time, List<Tuple<Double>> coordinates3d, double BBxL,double BBxR, double BByL, double BByR) throws Exception;
public List<Double> getFeaturesInTimeInstantAndArea(String layerURL, String layerName, int time, List<Tuple<Double>> coordinates3d, double BBxL,double BBxR, double BByL, double BByR) throws Exception;
public double getMinZ(String layerURL, String layerName);
public double getMaxZ(String layerURL, String layerName);
}

View File

@ -1,7 +1,6 @@
package org.gcube.dataanalysis.geo.matrixmodel;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
@ -22,6 +21,8 @@ public class MatrixExtractor {
private GeoNetworkInspector gnInspector;
private AlgorithmConfiguration configuration;
public static int maxSignalLength = 100000;
public static int maxzLength = 100000;
boolean log = true;
public MatrixExtractor(AlgorithmConfiguration configuration) {
gnInspector = new GeoNetworkInspector();
@ -40,41 +41,20 @@ public class MatrixExtractor {
return false;
}
public static List<Tuple<Double>> generateCoordinateTripletsInBoundingBox(double x1, double x2, double y1, double y2, double z, double xResolution, double yResolution) {
int ysteps = (int) ((y2 - y1) / yResolution);
int xsteps = (int) ((x2 - x1) / xResolution);
List<Tuple<Double>> tuples = new ArrayList<Tuple<Double>>();
AnalysisLogger.getLogger().debug("Building the points grid according to YRes:" + yResolution + " and XRes:" + xResolution);
// build the tuples according to the desired resolution
for (int i = 0; i < ysteps + 1; i++) {
double y = (i * yResolution) + y1;
if (i == ysteps)
y = y2;
for (int j = 0; j < xsteps + 1; j++) {
double x = (j * xResolution) + x1;
if (j == xsteps)
x = x2;
tuples.add(new Tuple<Double>(x, y, z));
}
}
return tuples;
}
protected List<Double> getRawValuesInTimeInstantAndBoundingBox(String layerTitle, int time, List<Tuple<Double>> coordinates3d, double xL, double xR, double yL, double yR) throws Exception {
return getRawValuesInTimeInstantAndBoundingBox(layerTitle, time, coordinates3d, xL, xR, yL, yR, false);
}
private GISDataConnector currentconnector;
private String layer;
private String layerURL;
protected List<Double> getRawValuesInTimeInstantAndBoundingBox(String layerTitle, int time, List<Tuple<Double>> coordinates3d, double xL, double xR, double yL, double yR, boolean saveLayer) throws Exception {
public GISDataConnector currentconnector;
public String layerName;
public String layerURL;
public GISDataConnector getConnector(String layerTitle) throws Exception {
// get the layer
Metadata meta = null;
GISDataConnector connector = null;
if (currentconnector != null && saveLayer)
if (currentconnector != null)
connector = currentconnector;
else {
if (isTable()) {
@ -86,76 +66,61 @@ public class MatrixExtractor {
}
// if the layer is good
if (meta != null) {
layer = gnInspector.getLayerName(meta);
if (layer == null)
layer = layerTitle;
layerName = gnInspector.getLayerName(meta);
if (layerName == null)
layerName = layerTitle;
layerURL = "";
if (gnInspector.isNetCDFFile(meta)) {
Identification id = meta.getIdentificationInfo().iterator().next();
String title = id.getCitation().getTitle().toString();
AnalysisLogger.getLogger().debug("found a netCDF file with title " + title + " and layer name " + layer);
AnalysisLogger.getLogger().debug("found a netCDF file with title " + title + " and layer name " + layerName);
layerURL = gnInspector.getOpenDapLink(meta);
connector = new NetCDF();
connector = new NetCDF(layerURL, layerName);
} else if (gnInspector.isAscFile(meta)) {
AnalysisLogger.getLogger().debug("managing ASC File");
layerURL = gnInspector.getHttpLink(meta);
connector = new ASC();
} else if (gnInspector.isWFS(meta)) {
AnalysisLogger.getLogger().debug("found a Geo Layer with reference " + layerTitle + " and layer name " + layer);
AnalysisLogger.getLogger().debug("found a Geo Layer with reference " + layerTitle + " and layer name " + layerName);
layerURL = gnInspector.getGeoserverLink(meta);
connector = new WFS();
}
currentconnector = connector;
}
return currentconnector;
}
//4D Extraction
/**
* Extract raw values in a time instant according to a set of grid points and a bounding box
*/
protected List<Double> getRawValuesInTimeInstantAndBoundingBox(String layerTitle, int time, List<Tuple<Double>> coordinates3d, double xL, double xR, double yL, double yR, boolean saveLayer) throws Exception {
GISDataConnector connector = getConnector(layerTitle);
//execute connector
if (connector != null)
return connector.getFeaturesInTimeInstantAndArea(layerURL, layer, time, coordinates3d, xL, xR, yL, yR);
return connector.getFeaturesInTimeInstantAndArea(layerURL, layerName, time, coordinates3d, xL, xR, yL, yR);
else
throw new Exception("ERROR: Connector not found for layer " + layerTitle + " - Cannot Rasterize!");
}
boolean log = true;
public double[] takeSignalInTime(String layerTitle, double x, double y, double z, double resolution) throws Exception {
// HashMap<Integer, Double> signal = new HashMap<Integer, Double>();
double[] signal = new double[maxSignalLength];
int t = 0;
log=false;
while (true) {
try {
if (t%100==0)
AnalysisLogger.getLogger().debug("Matrix Extractor-> Extracting Time Instant " + t);
double[][] values = takeTimeInstantMatrix(layerTitle, t, x, x, y, y, z, resolution, resolution, true);
// signal.put(t,values[0][0]);
signal[t]=values[0][0];
t++;
if (t==maxSignalLength)
break;
} catch (Exception e) {
AnalysisLogger.getLogger().debug("Matrix Extractor-> No More Time Intervals!");
break;
}
}
// double[] dsignal = new double[signal.size()];
double[] dsignal = new double[t];
// int i = 0;
// for (Double signald : signal.values()) {
for (int i=0;i<t;i++){
dsignal[i] = signal[i];
i++;
}
log = true;
return dsignal;
}
public double[][] takeTimeInstantMatrix(String layerTitle, int timeInstant, double x1, double x2, double y1, double y2, double z, double xResolution, double yResolution) throws Exception {
return takeTimeInstantMatrix(layerTitle, timeInstant, x1, x2, y1, y2, z, xResolution, yResolution, false);
}
public double[][] takeTimeInstantMatrix(String layerTitle, int timeInstant, double x1, double x2, double y1, double y2, double z, double xResolution, double yResolution, boolean savelayer) throws Exception {
/**
* Extract a grid of XY points with fixed time and z
* @param layerTitle
* @param timeInstant
* @param x1
* @param x2
* @param y1
* @param y2
* @param z
* @param xResolution
* @param yResolution
* @param cachelayer
* @return
* @throws Exception
*/
public double[][] extractXYGridWithFixedTZ(String layerTitle, int timeInstant, double x1, double x2, double y1, double y2, double z, double xResolution, double yResolution, boolean cachelayer) throws Exception {
boolean faolayer = false;
if (layerTitle.toLowerCase().contains("MatrixExtractor->FAO aquatic species distribution map")) {
@ -207,7 +172,7 @@ public class MatrixExtractor {
AnalysisLogger.getLogger().debug("Bounding box: (" + x1 + "," + x2 + ";" + y1 + "," + y2 + ")");
// long t0=System.currentTimeMillis();
List<Double> timeValues = getRawValuesInTimeInstantAndBoundingBox(layerTitle, timeInstant, tuples, x1, x2, y1, y2, savelayer);
List<Double> timeValues = getRawValuesInTimeInstantAndBoundingBox(layerTitle, timeInstant, tuples, x1, x2, y1, y2, cachelayer);
// AnalysisLogger.getLogger().debug("Elapsed:"+(System.currentTimeMillis()-t0));
if (log)
@ -243,18 +208,8 @@ public class MatrixExtractor {
// applyNearestNeighbor();
if (log)
AnalysisLogger.getLogger().debug("Features map: " + slice.length + "," + slice[0].length);
AnalysisLogger.getLogger().debug("Features map: rows " + slice.length + ", cols " + slice[0].length);
return slice;
}
public void applyNearestNeighbor() {
/*
* AnalysisLogger.getLogger().debug("Applying nearest Neighbor to all the rows"); //apply nearest neighbor to each row AlgorithmConfiguration config = new AlgorithmConfiguration(); config.setConfigPath(configDir); boolean rapidinit = false;
*
*
* for (int i=0;i<slice.length;i++){ // AnalysisLogger.getLogger().debug("Checking for unfilled values"); boolean tofill = false; for (int j=0;j<slice[i].length;j++) { if (new Double(slice[i][j]).equals(Double.NaN)) tofill = true; } if (tofill){ if (!rapidinit){ config.initRapidMiner(); rapidinit=true; } AnalysisLogger.getLogger().debug("Filling signal"); double[] ssliced = SignalProcessing.fillSignal(slice[i]); slice[i] = ssliced; } // else // AnalysisLogger.getLogger().debug("Signal yet complete"); }
*/
}
}

View File

@ -0,0 +1,24 @@
package org.gcube.dataanalysis.geo.matrixmodel;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
public class PointsExtractor extends MatrixExtractor{
public PointsExtractor(AlgorithmConfiguration configuration) {
super(configuration);
}
//XYZT Analysis
public double extractXYZT(String layerTitle, double x, double y, double z, int timeIndex, double resolution) throws Exception {
AnalysisLogger.getLogger().debug("Matrix Extractor-> Extracting Time Instant " + timeIndex);
double[][] values = extractXYGridWithFixedTZ(layerTitle, timeIndex, x, x, y, y, z, resolution, resolution, true);
int ver = values.length;
int hor = values.length;
//take central value
double value=values[ver/2][hor/2];
return value;
}
}

View File

@ -10,6 +10,7 @@ import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.utils.DatabaseUtils;
import org.gcube.dataanalysis.ecoengine.utils.Tuple;
import org.gcube.dataanalysis.geo.utils.CSquareCodesConverter;
import org.gcube.dataanalysis.geo.utils.VectorOperations;
import org.hibernate.SessionFactory;
/**
@ -66,7 +67,7 @@ public class RasterTable {
// create a table
DatabaseFactory.executeSQLUpdate(String.format(createTableStatement, tablename), dbconnection);
AnalysisLogger.getLogger().debug("Table " + tablename + " created");
List<Tuple<Double>> coordinates = MatrixExtractor.generateCoordinateTripletsInBoundingBox(x1, x2, y1, y2, z, xResolution, yResolution);
List<Tuple<Double>> coordinates = VectorOperations.generateCoordinateTripletsInBoundingBox(x1, x2, y1, y2, z, xResolution, yResolution);
int triplets = coordinates.size();
AnalysisLogger.getLogger().debug("Generated " + triplets + " coordinates triples");
List<Double> values = associateValueToCoordinates(coordinates, valuesMatrix);

View File

@ -0,0 +1,62 @@
package org.gcube.dataanalysis.geo.matrixmodel;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
public class TimeSeriesExtractor extends MatrixExtractor{
public TimeSeriesExtractor(AlgorithmConfiguration configuration) {
super(configuration);
}
//Time Analysis
/**
* Extracts an observations signal for a certain point in space
*/
public double[] extractT(String layerTitle, double x, double y, double z, double resolution) throws Exception {
double[] signal = new double[maxSignalLength];
int t = 0;
log=false;
while (true) {
try {
if (t%100==0)
AnalysisLogger.getLogger().debug("Matrix Extractor-> Extracting Time Instant " + t);
double[][] values = extractXYGridWithFixedTZ(layerTitle, t, x, x, y, y, z, resolution, resolution, true);
signal[t]=values[0][0];
t++;
if (t==maxSignalLength)
break;
} catch (Exception e) {
AnalysisLogger.getLogger().debug("Matrix Extractor-> No More Time Intervals! "+e.getMessage());
break;
}
}
AnalysisLogger.getLogger().debug("Matrix Extractor-> Signal Length:"+t);
double[] dsignal = new double[t];
for (int i=0;i<t;i++){
dsignal[i] = signal[i];
i++;
}
log = true;
return dsignal;
}
/**
* Extract observations in time with 0 resolution
* @param layerTitle
* @return
* @throws Exception
*/
public double[] extractT(String layerTitle) throws Exception {
return extractT(layerTitle,0,0,0,0);
}
//END Time Analysis
}

View File

@ -0,0 +1,15 @@
package org.gcube.dataanalysis.geo.matrixmodel;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
public class XYExtractor extends MatrixExtractor{
public XYExtractor(AlgorithmConfiguration configuration) {
super(configuration);
}
public double[][] extractXYGrid(String layerTitle, int timeInstant, double x1, double x2, double y1, double y2, double z, double xResolution, double yResolution) throws Exception {
return extractXYGridWithFixedTZ(layerTitle, timeInstant, x1, x2, y1, y2, z, xResolution, yResolution, false);
}
}

View File

@ -0,0 +1,53 @@
package org.gcube.dataanalysis.geo.matrixmodel;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.geo.interfaces.GISDataConnector;
public class ZExtractor extends MatrixExtractor{
public ZExtractor(AlgorithmConfiguration configuration) {
super(configuration);
}
public double[] extractZ(String layerTitle, double x, double y, int timeIndex, double resolution) throws Exception {
double[] signal = new double[maxzLength];
GISDataConnector connector = getConnector(layerTitle);
double z0 = connector.getMinZ(layerURL, layerName);
double z1 = connector.getMaxZ(layerURL, layerName);
AnalysisLogger.getLogger().debug("ZExtractor: minimum Z "+z0+" maximum Z:"+z1+" step: "+resolution);
int zcounter=0;
if (resolution==0)
resolution=1;
for (double z=z0;z<=z1;z=z+resolution){
try {
if (z%100==0)
AnalysisLogger.getLogger().debug("Matrix Extractor-> Extracting Z value " + z);
double[][] values = extractXYGridWithFixedTZ(layerTitle, timeIndex, x, x, y, y, z, resolution, resolution, true);
signal[zcounter]=values[0][0];
zcounter++;
} catch (Exception e) {
AnalysisLogger.getLogger().debug("Matrix Extractor-> No More Time Intervals!");
break;
}
}
AnalysisLogger.getLogger().debug("Matrix Extractor-> Signal Length:"+zcounter);
double[] dsignal = new double[zcounter];
for (int i=0;i<zcounter;i++){
dsignal[i] = signal[i];
i++;
}
return dsignal;
}
}

View File

@ -3,6 +3,7 @@ package org.gcube.dataanalysis.geo.test;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.geo.matrixmodel.MatrixExtractor;
import org.gcube.dataanalysis.geo.matrixmodel.XYExtractor;
public class TestChunkization {
@ -15,10 +16,10 @@ public class TestChunkization {
AnalysisLogger.setLogger(cfg+AlgorithmConfiguration.defaultLoggerFile);
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setConfigPath(cfg);
MatrixExtractor intersector = new MatrixExtractor(config);
XYExtractor intersector = new XYExtractor (config);
// intersector.takeTimeSlice(layertitle, 0, -180, 180, -10, 10, 0, 1, 1);
// intersector.takeTimeSlice(layertitle, 0, -10, 10, -10, 10, 0,1, 1);
intersector.takeTimeInstantMatrix(layertitle, 0, -180, 180, -90, 90, 0, 0.5, 0.5);
intersector.extractXYGrid(layertitle, 0, -180, 180, -90, 90, 0, 0.5, 0.5);
System.out.println("ELAPSED TIME: "+(System.currentTimeMillis()-t0));
}
}

View File

@ -3,6 +3,7 @@ package org.gcube.dataanalysis.geo.test;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.geo.matrixmodel.MatrixExtractor;
import org.gcube.dataanalysis.geo.matrixmodel.XYExtractor;
public class TestChunkizationLayer {
@ -16,9 +17,9 @@ public class TestChunkizationLayer {
AnalysisLogger.setLogger(cfg+AlgorithmConfiguration.defaultLoggerFile);
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setConfigPath(cfg);
MatrixExtractor intersector = new MatrixExtractor(config);
XYExtractor intersector = new XYExtractor (config);
// intersector.takeTimeSlice(layertitle, 0, -10, 10, -10, 10, 0,1, 1);
intersector.takeTimeInstantMatrix(layertitle, 0, -180, 180, -90, 90, 0, 0.5, 0.5);
intersector.extractXYGrid(layertitle, 0, -180, 180, -90, 90, 0, 0.5, 0.5);
System.out.println("ELAPSED TIME: "+(System.currentTimeMillis()-t0));
}
}

View File

@ -3,6 +3,7 @@ package org.gcube.dataanalysis.geo.test;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.geo.matrixmodel.MatrixExtractor;
import org.gcube.dataanalysis.geo.matrixmodel.XYExtractor;
public class TestPointslice {
@ -18,11 +19,11 @@ public class TestPointslice {
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setGcubeScope("/gcube");
config.setConfigPath(cfg);
MatrixExtractor intersector = new MatrixExtractor(config);
XYExtractor intersector = new XYExtractor (config);
// intersector.takeTimeSlice(layertitle, 0, -180, 180, -10, 10, 0, 1, 1);
// intersector.takeTimeSlice(layertitle, 0, -10, 10, -10, 10, 0,1, 1);
// intersector.takeTimeInstantMatrix(layertitle, 0, -180, 180, -90, 90, 0, 0.5, 0.5);
double output[][] = intersector.takeTimeInstantMatrix(layertitle, 0, 0,0,0,0, 0, 0.5, 0.5);
double output[][] = intersector.extractXYGrid(layertitle, 0, 0,0,0,0, 0, 0.5, 0.5);
System.out.println("ELAPSED TIME: "+(System.currentTimeMillis()-t0));
System.out.println("Output: "+output[0][0]);
}

View File

@ -4,6 +4,7 @@ import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.geo.matrixmodel.MatrixExtractor;
import org.gcube.dataanalysis.geo.matrixmodel.RasterTable;
import org.gcube.dataanalysis.geo.matrixmodel.XYExtractor;
public class TestRasterTable {
@ -31,7 +32,7 @@ public class TestRasterTable {
config.setParam("DatabaseURL","jdbc:postgresql://localhost/testdb");
config.setParam("DatabaseDriver","org.postgresql.Driver");
MatrixExtractor intersector = new MatrixExtractor(config);
XYExtractor intersector = new XYExtractor (config);
int t = 0;
double x1 = -180;
double x2 = 180;
@ -41,7 +42,7 @@ public class TestRasterTable {
double xResolution = 0.5;
double yResolution = 0.5;
double[][] slice = intersector.takeTimeInstantMatrix(layertitle, t, x1, x2, y1,y2,z,xResolution,yResolution);
double[][] slice = intersector.extractXYGrid(layertitle, t, x1, x2, y1,y2,z,xResolution,yResolution);
RasterTable raster = new RasterTable(x1, x2, y1, y2, z, xResolution, yResolution, slice, config);
raster.dumpGeoTable();

View File

@ -11,6 +11,7 @@ import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.signals.PeriodicityDetector;
import org.gcube.dataanalysis.ecoengine.signals.SignalProcessing;
import org.gcube.dataanalysis.geo.matrixmodel.MatrixExtractor;
import org.gcube.dataanalysis.geo.matrixmodel.TimeSeriesExtractor;
public class TestSignal {
@ -23,8 +24,17 @@ public class TestSignal {
//String layertitle = "be24800d-7583-4efa-b925-e0d8760e0fd3";
//temperature short periodic signal
// String layertitle = "dffa504b-dbc8-4553-896e-002549f8f5d3";
// String layertitle = "afd54b39-30f7-403a-815c-4f91c6c74c26";
// String layertitle = "6411b110-7572-457a-a662-a16e4ff09e4e";
//wind stress
String layertitle = "255b5a95-ad28-4fec-99e0-5d48112dd6ab";
//wind speed
layertitle = "a116c9bc-9380-4d40-8374-aa0e376a6820";
//nitrates
layertitle = "b1cd9549-d9d0-4c77-9532-b161a69fbd44";
//ASC
String layertitle = "2c2304d1-681a-4f3a-8409-e8cdb5ed447f";
// String layertitle = "2c2304d1-681a-4f3a-8409-e8cdb5ed447f";
//WFS
// String layertitle = "0aac424b-5f5b-4fa6-97d6-4b4deee62b97";
//Chlorophyll
@ -35,14 +45,14 @@ public class TestSignal {
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setGcubeScope("/d4science.research-infrastructures.eu/gCubeApps/BiodiversityLab");
config.setConfigPath(cfg);
MatrixExtractor intersector = new MatrixExtractor(config);
TimeSeriesExtractor intersector = new TimeSeriesExtractor(config);
// intersector.takeTimeSlice(layertitle, 0, -180, 180, -10, 10, 0, 1, 1);
// intersector.takeTimeSlice(layertitle, 0, -10, 10, -10, 10, 0,1, 1);
// intersector.takeTimeInstantMatrix(layertitle, 0, -180, 180, -90, 90, 0, 0.5, 0.5);
double signal[] = intersector.takeSignalInTime(layertitle, 7.5d, 42.5d,0d,0.5);
double signal[] = intersector.extractT(layertitle, 0d, 0d,0d,0.5);
SignalProcessing.displaySignalWithGenericTime(signal, 0, 1, "signal");
// SignalProcessing.displaySignalWithGenericTime(signal, 0, 1, "signal");
System.out.println("ELAPSED TIME: "+(System.currentTimeMillis()-t0));
@ -53,7 +63,12 @@ public class TestSignal {
}
bw.close();
PeriodicityDetector pd = new PeriodicityDetector();
double F = pd.detectFrequency(signal,true);
System.out.println("Detected Frequency:"+F+" indecision ["+pd.lowermeanF+" , "+pd.uppermeanF+"]");
System.out.println("Detected Period:"+pd.meanPeriod+" indecision ["+pd.lowermeanPeriod+" , "+pd.uppermeanPeriod+"]");
System.out.println("Detected Periodicity Strength:"+pd.periodicityStrength+" "+pd.getPeriodicityStregthInterpretation());
}
public static void main1(String[] args) throws Exception{
@ -89,6 +104,7 @@ public class TestSignal {
System.out.println("Detected Frequency:"+F+" indecision ["+pd.lowermeanF+" , "+pd.uppermeanF+"]");
System.out.println("Detected Period:"+pd.meanPeriod+" indecision ["+pd.lowermeanPeriod+" , "+pd.uppermeanPeriod+"]");
System.out.println("Detected Periodicity Strength:"+pd.periodicityStrength);
}

View File

@ -9,9 +9,8 @@ import java.io.FileWriter;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.signals.PeriodicityDetector;
import org.gcube.dataanalysis.ecoengine.signals.SignalProcessing;
import org.gcube.dataanalysis.geo.connectors.table.TableMatrixRepresentation;
import org.gcube.dataanalysis.geo.matrixmodel.MatrixExtractor;
import org.gcube.dataanalysis.geo.matrixmodel.TimeSeriesExtractor;
public class TestSignalTable {
@ -27,40 +26,34 @@ public class TestSignalTable {
config.setParam("DatabaseDriver", "org.postgresql.Driver");
// vessels
config.setParam(TableMatrixRepresentation.tableNameParameter, "generic_id037d302d_2ba0_4e43_b6e4_1a797bb91728");
config.setParam(TableMatrixRepresentation.xDimensionColumnParameter, "x");
config.setParam(TableMatrixRepresentation.yDimensionColumnParameter, "y");
// config.setParam(TableMatrixRepresentation.xDimensionColumnParameter, "x");
// config.setParam(TableMatrixRepresentation.yDimensionColumnParameter, "y");
// config.setParam(zDimensionColumnParameter,"");
config.setParam(TableMatrixRepresentation.timeDimensionColumnParameter, "datetime");
config.setParam(TableMatrixRepresentation.valueDimensionColumnParameter, "speed");
config.setParam(TableMatrixRepresentation.filterParameter, "speed<2");
AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
config.setGcubeScope("/d4science.research-infrastructures.eu/gCubeApps/BiodiversityLab");
config.setConfigPath(cfg);
MatrixExtractor intersector = new MatrixExtractor(config);
TimeSeriesExtractor intersector = new TimeSeriesExtractor(config);
long t0 = System.currentTimeMillis();
double signal[] = intersector.takeSignalInTime("table", -47.14, 44.52, 0d, 0.5);
// double signal[] = intersector.takeSignalInTime("table", -47.14, 44.52, 0d, 0.5);
double signal[] = intersector.extractT("table");
// double signal[] = intersector.takeMatrixesInTime("table", -47.14, -47.14, 44.52, 44.52, 0d, 0.5, 0.5);
SignalProcessing.displaySignalWithGenericTime(signal, 0, 1, "signal");
// SignalProcessing.displaySignalWithGenericTime(signal, 0, 1, "signal");
System.out.println("ELAPSED TIME: "+(System.currentTimeMillis()-t0));
System.out.println("Signal: "+signal.length);
BufferedWriter bw = new BufferedWriter(new FileWriter(new File("signal.txt")));
for (double si: signal){
bw.write(si+",");
}
bw.close();
PeriodicityDetector pd = new PeriodicityDetector();
double F = pd.detectFrequency(signal,true);
System.out.println("Detected Frequency:"+F+" indecision ["+pd.lowermeanF+" , "+pd.uppermeanF+"]");
System.out.println("Detected Period:"+pd.meanPeriod+" indecision ["+pd.lowermeanPeriod+" , "+pd.uppermeanPeriod+"]");
System.out.println("Detected Periodicity Strength:"+pd.periodicityStrength);
}
public static void main1(String[] args) throws Exception{
@ -96,7 +89,7 @@ public class TestSignalTable {
System.out.println("Detected Frequency:"+F+" indecision ["+pd.lowermeanF+" , "+pd.uppermeanF+"]");
System.out.println("Detected Period:"+pd.meanPeriod+" indecision ["+pd.lowermeanPeriod+" , "+pd.uppermeanPeriod+"]");
System.out.println("Detected Period Strength:"+pd.periodicityStrength);
}
}

View File

@ -0,0 +1,44 @@
package org.gcube.dataanalysis.geo.test;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.geo.connectors.table.TableMatrixRepresentation;
import org.gcube.dataanalysis.geo.matrixmodel.ZExtractor;
public class TestWaterColumn {
static String cfg = "./cfg/";
public static void main(String[] args) throws Exception{
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setParam("DatabaseUserName", "utente");
config.setParam("DatabasePassword", "d4science");
config.setParam("DatabaseURL", "jdbc:postgresql://statistical-manager.d.d4science.research-infrastructures.eu/testdb");
config.setParam("DatabaseDriver", "org.postgresql.Driver");
// vessels
config.setParam(TableMatrixRepresentation.tableNameParameter, "generic_id037d302d_2ba0_4e43_b6e4_1a797bb91728");
config.setParam(TableMatrixRepresentation.xDimensionColumnParameter, "x");
config.setParam(TableMatrixRepresentation.yDimensionColumnParameter, "y");
config.setParam(TableMatrixRepresentation.zDimensionColumnParameter, "x");
config.setParam(TableMatrixRepresentation.timeDimensionColumnParameter, "datetime");
config.setParam(TableMatrixRepresentation.valueDimensionColumnParameter, "speed");
config.setParam(TableMatrixRepresentation.filterParameter, "speed<2");
AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
config.setGcubeScope("/d4science.research-infrastructures.eu/gCubeApps/BiodiversityLab");
config.setConfigPath(cfg);
ZExtractor extractor = new ZExtractor(config);
long t0 = System.currentTimeMillis();
double watercolumn[] = extractor.extractZ("table", -47.97,43.42, 0, 0.5);
System.out.println("ELAPSED TIME: "+(System.currentTimeMillis()-t0));
System.out.println("Signal: "+watercolumn.length);
}
}

View File

@ -0,0 +1,35 @@
package org.gcube.dataanalysis.geo.test;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.signals.SignalProcessing;
import org.gcube.dataanalysis.geo.connectors.table.TableMatrixRepresentation;
import org.gcube.dataanalysis.geo.matrixmodel.ZExtractor;
public class TestWaterColumnTemperature {
public static void main(String[] args) throws Exception{
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
String layertitle = "6411b110-7572-457a-a662-a16e4ff09e4e";
layertitle = "be24800d-7583-4efa-b925-e0d8760e0fd3";
layertitle = "320652c8-e986-4428-9306-619d9014822a";
layertitle = "0aac424b-5f5b-4fa6-97d6-4b4deee62b97";
AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
config.setGcubeScope("/d4science.research-infrastructures.eu/gCubeApps/BiodiversityLab");
ZExtractor extractor = new ZExtractor(config);
long t0 = System.currentTimeMillis();
double watercolumn[] = extractor.extractZ(layertitle, 0,0, 0, 100);
System.out.println("ELAPSED TIME: "+(System.currentTimeMillis()-t0));
System.out.println("Signal: "+watercolumn.length);
SignalProcessing.displaySignalWithGenericTime(watercolumn, 0, 1, "signal");
}
}

View File

@ -0,0 +1,33 @@
package org.gcube.dataanalysis.geo.test.regression;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.signals.PeriodicityDetector;
import org.gcube.dataanalysis.geo.matrixmodel.TimeSeriesExtractor;
public class RegressionLowPeriodicity {
static String cfg = "./cfg/";
public static void main(String[] args) throws Exception{
String layertitle = "c565e32c-c5b3-4964-b44f-06dc620563e9";
long t0 = System.currentTimeMillis();
AnalysisLogger.setLogger(cfg+AlgorithmConfiguration.defaultLoggerFile);
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setGcubeScope("/d4science.research-infrastructures.eu/gCubeApps/BiodiversityLab");
config.setConfigPath(cfg);
TimeSeriesExtractor intersector = new TimeSeriesExtractor(config);
double signal[] = intersector.extractT(layertitle, 0d, 0d,0d,0.5);
System.out.println("ELAPSED TIME: "+(System.currentTimeMillis()-t0));
System.out.println("Signal: "+signal.length);
PeriodicityDetector pd = new PeriodicityDetector();
double F = pd.detectFrequency(signal,true);
System.out.println("Detected Frequency:"+F+" indecision ["+pd.lowermeanF+" , "+pd.uppermeanF+"]");
System.out.println("Detected Period:"+pd.meanPeriod+" indecision ["+pd.lowermeanPeriod+" , "+pd.uppermeanPeriod+"]");
System.out.println("Detected Periodicity Strength:"+pd.periodicityStrength+":"+pd.getPeriodicityStregthInterpretation());
}
}

View File

@ -0,0 +1,47 @@
package org.gcube.dataanalysis.geo.test.regression;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileReader;
import java.io.FileWriter;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.signals.PeriodicityDetector;
import org.gcube.dataanalysis.ecoengine.signals.SignalProcessing;
import org.gcube.dataanalysis.geo.matrixmodel.MatrixExtractor;
import org.gcube.dataanalysis.geo.matrixmodel.TimeSeriesExtractor;
public class RegressionPeriodicity {
static String cfg = "./cfg/";
public static void main(String[] args) throws Exception{
takeSignal();
}
public static void takeSignal() throws Exception{
BufferedReader br = new BufferedReader(new FileReader(new File("signalPeriodic.txt")));
String line = br.readLine();
double[] signal = null;
while (line!=null){
String [] el = line.split(",");
signal=new double[el.length];
int i=0;
for (String e:el){
signal[i]=Double.parseDouble(e);
i++;
}
line = null;
}
br.close();
PeriodicityDetector pd = new PeriodicityDetector();
double F = pd.detectFrequency(signal,true);
System.out.println("Detected Frequency:"+F+" indecision ["+pd.lowermeanF+" , "+pd.uppermeanF+"]");
System.out.println("Detected Period:"+pd.meanPeriod+" indecision ["+pd.lowermeanPeriod+" , "+pd.uppermeanPeriod+"]");
System.out.println("Detected Periodicity Strength:"+pd.periodicityStrength+":"+pd.getPeriodicityStregthInterpretation());
}
}

View File

@ -0,0 +1,25 @@
package org.gcube.dataanalysis.geo.test.regression;
import org.gcube.dataanalysis.ecoengine.signals.PeriodicityDetector;
public class RegressionPeriodicityAutomatic {
static String cfg = "./cfg/";
public static void main(String[] args) throws Exception{
takeSignal();
}
public static void takeSignal() throws Exception{
PeriodicityDetector pd = new PeriodicityDetector();
double[] signal = pd.produceNoisySignal(2000, 1, 0.1f, 0f);
double F = pd.detectFrequency(signal,true);
System.out.println("Detected Frequency:"+F+" indecision ["+pd.lowermeanF+" , "+pd.uppermeanF+"]");
System.out.println("Detected Period:"+pd.meanPeriod+" indecision ["+pd.lowermeanPeriod+" , "+pd.uppermeanPeriod+"]");
System.out.println("Detected Periodicity Strength:"+pd.periodicityStrength+":"+pd.getPeriodicityStregthInterpretation());
}
}

View File

@ -0,0 +1,39 @@
package org.gcube.dataanalysis.geo.test.regression;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.signals.PeriodicityDetector;
import org.gcube.dataanalysis.geo.matrixmodel.PointsExtractor;
import org.gcube.dataanalysis.geo.matrixmodel.TimeSeriesExtractor;
import com.vividsolutions.jts.geom.util.PointExtracter;
public class RegressionPointExtraction {
static String cfg = "./cfg/";
public static void main(String[] args) throws Exception{
String layertitle = "afd54b39-30f7-403a-815c-4f91c6c74c26";
long t0 = System.currentTimeMillis();
AnalysisLogger.setLogger(cfg+AlgorithmConfiguration.defaultLoggerFile);
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setGcubeScope("/d4science.research-infrastructures.eu/gCubeApps/BiodiversityLab");
config.setConfigPath(cfg);
PointsExtractor pe = new PointsExtractor(config);
double value = pe.extractXYZT(layertitle, 0,0,0,0, 0);
System.out.println("Point value: "+value);
System.out.println("ELAPSED TIME: "+(System.currentTimeMillis()-t0));
}
}

View File

@ -0,0 +1,51 @@
package org.gcube.dataanalysis.geo.test.regression;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.signals.PeriodicityDetector;
import org.gcube.dataanalysis.geo.connectors.table.TableMatrixRepresentation;
import org.gcube.dataanalysis.geo.matrixmodel.TimeSeriesExtractor;
public class RegressionSignalFromTable {
static String cfg = "./cfg/";
public static void main(String[] args) throws Exception{
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setParam("DatabaseUserName", "utente");
config.setParam("DatabasePassword", "d4science");
config.setParam("DatabaseURL", "jdbc:postgresql://statistical-manager.d.d4science.research-infrastructures.eu/testdb");
config.setParam("DatabaseDriver", "org.postgresql.Driver");
// vessels
config.setParam(TableMatrixRepresentation.tableNameParameter, "generic_id037d302d_2ba0_4e43_b6e4_1a797bb91728");
config.setParam(TableMatrixRepresentation.xDimensionColumnParameter, "x");
config.setParam(TableMatrixRepresentation.yDimensionColumnParameter, "y");
config.setParam(TableMatrixRepresentation.timeDimensionColumnParameter, "datetime");
config.setParam(TableMatrixRepresentation.valueDimensionColumnParameter, "speed");
config.setParam(TableMatrixRepresentation.filterParameter, "speed<2");
AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
config.setGcubeScope("/d4science.research-infrastructures.eu/gCubeApps/BiodiversityLab");
config.setConfigPath(cfg);
TimeSeriesExtractor intersector = new TimeSeriesExtractor(config);
long t0 = System.currentTimeMillis();
double signal[] = intersector.extractT("table", -47.97,43.42, 0d, 0.5);
System.out.println("ELAPSED TIME: "+(System.currentTimeMillis()-t0));
System.out.println("Signal: "+signal.length);
PeriodicityDetector pd = new PeriodicityDetector();
double F = pd.detectFrequency(signal,true);
System.out.println("Detected Frequency:"+F+" indecision ["+pd.lowermeanF+" , "+pd.uppermeanF+"]");
System.out.println("Detected Period:"+pd.meanPeriod+" indecision ["+pd.lowermeanPeriod+" , "+pd.uppermeanPeriod+"]");
System.out.println("Detected Periodicity Strength:"+pd.periodicityStrength);
}
}

View File

@ -0,0 +1,45 @@
package org.gcube.dataanalysis.geo.test.regression;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.signals.SignalProcessing;
import org.gcube.dataanalysis.geo.connectors.table.TableMatrixRepresentation;
import org.gcube.dataanalysis.geo.matrixmodel.ZExtractor;
public class RegressionWaterColumnTable {
static String cfg = "./cfg/";
public static void main(String[] args) throws Exception{
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setParam("DatabaseUserName", "utente");
config.setParam("DatabasePassword", "d4science");
config.setParam("DatabaseURL", "jdbc:postgresql://statistical-manager.d.d4science.research-infrastructures.eu/testdb");
config.setParam("DatabaseDriver", "org.postgresql.Driver");
// vessels
config.setParam(TableMatrixRepresentation.tableNameParameter, "generic_id037d302d_2ba0_4e43_b6e4_1a797bb91728");
config.setParam(TableMatrixRepresentation.xDimensionColumnParameter, "x");
config.setParam(TableMatrixRepresentation.yDimensionColumnParameter, "y");
config.setParam(TableMatrixRepresentation.zDimensionColumnParameter, "x");
config.setParam(TableMatrixRepresentation.timeDimensionColumnParameter, "datetime");
config.setParam(TableMatrixRepresentation.valueDimensionColumnParameter, "speed");
config.setParam(TableMatrixRepresentation.filterParameter, "speed<2");
AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
config.setGcubeScope("/d4science.research-infrastructures.eu/gCubeApps/BiodiversityLab");
config.setConfigPath(cfg);
ZExtractor extractor = new ZExtractor(config);
long t0 = System.currentTimeMillis();
double watercolumn[] = extractor.extractZ("table", -47.97,43.42, 0, 0.5);
System.out.println("ELAPSED TIME: "+(System.currentTimeMillis()-t0));
System.out.println("Signal: "+watercolumn.length);
SignalProcessing.displaySignalWithGenericTime(watercolumn, 0, 1, "signal");
}
}

View File

@ -0,0 +1,32 @@
package org.gcube.dataanalysis.geo.test.regression;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.signals.SignalProcessing;
import org.gcube.dataanalysis.geo.connectors.table.TableMatrixRepresentation;
import org.gcube.dataanalysis.geo.matrixmodel.ZExtractor;
public class RegressionWaterColumnTemperature {
public static void main(String[] args) throws Exception{
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
String layertitle = "6411b110-7572-457a-a662-a16e4ff09e4e";
AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
config.setGcubeScope("/d4science.research-infrastructures.eu/gCubeApps/BiodiversityLab");
ZExtractor extractor = new ZExtractor(config);
long t0 = System.currentTimeMillis();
double watercolumn[] = extractor.extractZ(layertitle, 0,0, 0, 100);
System.out.println("ELAPSED TIME: "+(System.currentTimeMillis()-t0));
System.out.println("Signal: "+watercolumn.length);
SignalProcessing.displaySignalWithGenericTime(watercolumn, 0, 1, "signal");
}
}

View File

@ -0,0 +1,71 @@
package org.gcube.dataanalysis.geo.test.regression;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.geo.connectors.table.TableMatrixRepresentation;
import org.gcube.dataanalysis.geo.matrixmodel.XYExtractor;
public class RegressionXYSlice {
static String cfg = "./cfg/";
static String layertitle = "120313e1-c0cb-4b3c-9779-ed651c490cdb";
static AlgorithmConfiguration config = new AlgorithmConfiguration();
public static void main(String[] args) throws Exception {
config.setConfigPath(cfg);
config.setGcubeScope("/d4science.research-infrastructures.eu/gCubeApps/BiodiversityLab");
AnalysisLogger.setLogger(cfg + AlgorithmConfiguration.defaultLoggerFile);
config.setPersistencePath("./");
sliceWFS();
sliceNetCDF();
sliceASC();
sliceTable();
}
public static void sliceWFS() throws Exception{
AnalysisLogger.getLogger().debug("WFS");
// latimeria chalumnae
layertitle = "120313e1-c0cb-4b3c-9779-ed651c490cdb";
execute();
}
public static void sliceNetCDF() throws Exception{
AnalysisLogger.getLogger().debug("NetCDF");
// Chlorophyll
layertitle = "c565e32c-c5b3-4964-b44f-06dc620563e9";
execute();
}
public static void sliceASC() throws Exception{
AnalysisLogger.getLogger().debug("ASC");
//
layertitle = "2c2304d1-681a-4f3a-8409-e8cdb5ed447f";
execute();
}
public static void sliceTable() throws Exception{
AnalysisLogger.getLogger().debug("Table");
// latimeria chalumnae
config.setParam("DatabaseUserName", "utente");
config.setParam("DatabasePassword", "d4science");
config.setParam("DatabaseURL", "jdbc:postgresql://statistical-manager.d.d4science.research-infrastructures.eu/testdb");
config.setParam("DatabaseDriver", "org.postgresql.Driver");
// vessels
config.setParam(TableMatrixRepresentation.tableNameParameter, "generic_id037d302d_2ba0_4e43_b6e4_1a797bb91728");
config.setParam(TableMatrixRepresentation.xDimensionColumnParameter, "x");
config.setParam(TableMatrixRepresentation.yDimensionColumnParameter, "y");
// config.setParam(TableMatrixRepresentation.zDimensionColumnParameter, "x");
config.setParam(TableMatrixRepresentation.timeDimensionColumnParameter, "datetime");
config.setParam(TableMatrixRepresentation.valueDimensionColumnParameter, "speed");
config.setParam(TableMatrixRepresentation.filterParameter, "speed<2");
execute();
}
public static void execute() throws Exception{
long t0 = System.currentTimeMillis();
XYExtractor intersector = new XYExtractor(config);
intersector.extractXYGrid(layertitle, 0, -180, 180, -90, 90, 0, 0.5, 0.5);
System.out.println("ELAPSED TIME: " + (System.currentTimeMillis() - t0));
}
}