git-svn-id: https://svn.d4science.research-infrastructures.eu/gcube/trunk/data-analysis/EcologicalEngineGeoSpatialExtension@92535 82a268e6-3cf1-43bd-a215-b396298e98cf
This commit is contained in:
parent
4910092ee2
commit
c49d678970
|
@ -0,0 +1,51 @@
|
|||
package org.gcube.dataanalysis.geo.batch;
|
||||
|
||||
import org.gcube.dataanalysis.geo.meta.GenericLayerMetadata;
|
||||
import org.gcube.dataanalysis.geo.meta.NetCDFMetadata;
|
||||
import org.opengis.metadata.identification.TopicCategory;
|
||||
|
||||
public class GeoTiffMetadataInsertDev {
|
||||
|
||||
// static String geonetworkurl = "http://geoserver-dev2.d4science-ii.research-infrastructures.eu/geonetwork/";
|
||||
static String geonetworkurl = "http://geonetwork.d4science.org/geonetwork/";
|
||||
/*
|
||||
static String geonetworkurl = "http://geonetwork.d4science.org/geonetwork/";
|
||||
static String geonetworkurl = "http://geonetwork.d4science.org/geonetwork/";
|
||||
static String geoserverurl = "http://geoserver.d4science-ii.research-infrastructures.eu/geoserver";
|
||||
*/
|
||||
//static String geoserverurl = "http://geoserver-dev.d4science-ii.research-infrastructures.eu/geoserver";
|
||||
|
||||
static String user = "admin";
|
||||
static String password = "admin";
|
||||
|
||||
public static void main(String[] args) throws Exception{
|
||||
|
||||
for (int i=2;i<3;i++) {
|
||||
worldclim(i);
|
||||
// break;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
//gebco
|
||||
private static void worldclim(int index) throws Exception{
|
||||
GenericLayerMetadata metadataInserter = new GenericLayerMetadata();
|
||||
metadataInserter.setGeonetworkUrl(geonetworkurl);
|
||||
metadataInserter.setGeonetworkUser(user);
|
||||
metadataInserter.setGeonetworkPwd(password);
|
||||
|
||||
metadataInserter.setTitle("WorldClimBioGeoTiffTest"+index);
|
||||
|
||||
metadataInserter.setCategoryTypes("_"+TopicCategory.ENVIRONMENT.name()+"_");
|
||||
metadataInserter.setResolution(0.0083);
|
||||
metadataInserter.setAbstractField("WorldClim is a set of global climate layers (climate grids) with a spatial resolution of about 1 square kilometer. The data can be used for mapping and spatial modeling in a GIS or with other computer programs. Hijmans, R.J., S.E. Cameron, J.L. Parra, P.G. Jones and A. Jarvis, 2005. Very high resolution interpolated climate surfaces for global land areas. International Journal of Climatology 25: 1965-1978. Hosted on the D4Science Thredds Catalog: http://thredds.research-infrastructures.eu/thredds/catalog/public/netcdf/catalog.xml");
|
||||
metadataInserter.setCustomTopics("D4Science","EUBrazilOpenBio","WorldClim","WorldClimBio"+index+".tiff","Thredds");
|
||||
metadataInserter.setAuthor("D4Science");
|
||||
String [] urls = {"https://dl.dropboxusercontent.com/u/12809149/geoserver-GetCoverage.image.geotiff"};
|
||||
String [] protocols = {"HTTP"};
|
||||
metadataInserter.customMetaDataInsert(urls,protocols);
|
||||
}
|
||||
|
||||
|
||||
}
|
|
@ -0,0 +1,51 @@
|
|||
package org.gcube.dataanalysis.geo.batch;
|
||||
|
||||
import org.gcube.dataanalysis.geo.meta.GenericLayerMetadata;
|
||||
import org.gcube.dataanalysis.geo.meta.NetCDFMetadata;
|
||||
import org.opengis.metadata.identification.TopicCategory;
|
||||
|
||||
public class WCSMetadataInsertDev {
|
||||
|
||||
// static String geonetworkurl = "http://geoserver-dev2.d4science-ii.research-infrastructures.eu/geonetwork/";
|
||||
static String geonetworkurl = "http://geonetwork.d4science.org/geonetwork/";
|
||||
/*
|
||||
static String geonetworkurl = "http://geonetwork.d4science.org/geonetwork/";
|
||||
static String geonetworkurl = "http://geonetwork.d4science.org/geonetwork/";
|
||||
static String geoserverurl = "http://geoserver.d4science-ii.research-infrastructures.eu/geoserver";
|
||||
*/
|
||||
//static String geoserverurl = "http://geoserver-dev.d4science-ii.research-infrastructures.eu/geoserver";
|
||||
|
||||
static String user = "admin";
|
||||
static String password = "admin";
|
||||
|
||||
public static void main(String[] args) throws Exception{
|
||||
|
||||
for (int i=2;i<3;i++) {
|
||||
worldclim(i);
|
||||
// break;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
//gebco
|
||||
private static void worldclim(int index) throws Exception{
|
||||
GenericLayerMetadata metadataInserter = new GenericLayerMetadata();
|
||||
metadataInserter.setGeonetworkUrl(geonetworkurl);
|
||||
metadataInserter.setGeonetworkUser(user);
|
||||
metadataInserter.setGeonetworkPwd(password);
|
||||
|
||||
metadataInserter.setTitle("WorldClimBioWCS"+index);
|
||||
|
||||
metadataInserter.setCategoryTypes("_"+TopicCategory.ENVIRONMENT.name()+"_");
|
||||
metadataInserter.setResolution(0.0083);
|
||||
metadataInserter.setAbstractField("WorldClim is a set of global climate layers (climate grids) with a spatial resolution of about 1 square kilometer. The data can be used for mapping and spatial modeling in a GIS or with other computer programs. Hijmans, R.J., S.E. Cameron, J.L. Parra, P.G. Jones and A. Jarvis, 2005. Very high resolution interpolated climate surfaces for global land areas. International Journal of Climatology 25: 1965-1978. Hosted on the D4Science Thredds Catalog: http://thredds.research-infrastructures.eu/thredds/catalog/public/netcdf/catalog.xml");
|
||||
metadataInserter.setCustomTopics("D4Science","EUBrazilOpenBio","WorldClim","WorldClimBio"+index+".tiff","Thredds");
|
||||
metadataInserter.setAuthor("D4Science");
|
||||
String [] urls = {"http://geoserver-dev.d4science-ii.research-infrastructures.eu/geoserver/wcs/wcs?service=wcs&version=1.0.0&request=GetCoverage&coverage=aquamaps:WorldClimBio"+index,"http://thredds.research-infrastructures.eu/thredds/fileServer/public/netcdf/WorldClimBio"+index+".tiff"};
|
||||
String [] protocols = {"HTTP"};
|
||||
metadataInserter.customMetaDataInsert(urls,protocols);
|
||||
}
|
||||
|
||||
|
||||
}
|
|
@ -15,6 +15,8 @@ public class AscDataExplorer {
|
|||
public int ncolumns;
|
||||
public int nrows;
|
||||
public double cellsize;
|
||||
public double dx;
|
||||
public double dy;
|
||||
|
||||
AscRaster ascFile;
|
||||
|
||||
|
@ -27,6 +29,7 @@ public class AscDataExplorer {
|
|||
ascFile = reader.readRaster(file);
|
||||
break;
|
||||
}catch(Exception e){
|
||||
e.printStackTrace();
|
||||
AnalysisLogger.getLogger().debug("Error in reading remote file: "+file);
|
||||
}
|
||||
i++;
|
||||
|
@ -40,19 +43,29 @@ public class AscDataExplorer {
|
|||
ncolumns = ascFile.getCols();
|
||||
nrows = ascFile.getRows();
|
||||
cellsize = ascFile.getCellsize();
|
||||
dx = ascFile.getdx();
|
||||
dy = ascFile.getdy();
|
||||
|
||||
AnalysisLogger.getLogger().debug("Origin: "+xOrigin+","+yOrigin);
|
||||
AnalysisLogger.getLogger().debug("Cellsize: "+cellsize);
|
||||
AnalysisLogger.getLogger().debug("dx: "+dx+" dy:"+dy);
|
||||
AnalysisLogger.getLogger().debug("Rows: "+nrows+" Cols:"+ncolumns);
|
||||
|
||||
|
||||
}
|
||||
|
||||
public int longitude2Index (double longitude){
|
||||
return (int)Math.round((longitude-xOrigin)/cellsize);
|
||||
if (dx>0)
|
||||
return (int)Math.round((longitude-xOrigin)/dx);
|
||||
else
|
||||
return (int)Math.round((longitude-xOrigin)/cellsize);
|
||||
}
|
||||
|
||||
public int latitude2Index (double latitude){
|
||||
return (int) Math.round((latitude-yOrigin)/cellsize);
|
||||
if (dy>0)
|
||||
return (int) Math.round((latitude-yOrigin)/dy);
|
||||
else
|
||||
return (int) Math.round((latitude-yOrigin)/cellsize);
|
||||
}
|
||||
|
||||
public List<Double> retrieveDataFromAsc( List<Tuple<Double>> triplets, int time) throws Exception{
|
||||
|
@ -65,6 +78,18 @@ public class AscDataExplorer {
|
|||
double y = triplet.getElements().get(1);
|
||||
int j = longitude2Index(x);
|
||||
int i = latitude2Index(y);
|
||||
|
||||
if ((j>ncolumns) || (j<0) || (i>nrows) || (i<0)){
|
||||
values.add(Double.NaN);
|
||||
}
|
||||
else{
|
||||
double value = ascFile.getValue(i, j);
|
||||
if (value==Double.parseDouble(ascFile.NDATA))
|
||||
values.add(Double.NaN);
|
||||
else
|
||||
values.add(value);
|
||||
}
|
||||
/*
|
||||
if (j>ncolumns){
|
||||
AnalysisLogger.getLogger().debug("Warning: Column Overflow: adjusting!");
|
||||
AnalysisLogger.getLogger().debug("Overflow: y:"+y+","+"x:"+x);
|
||||
|
@ -77,10 +102,12 @@ public class AscDataExplorer {
|
|||
AnalysisLogger.getLogger().debug("Overflow: iy:"+i+","+"jx:"+j);
|
||||
i=nrows;
|
||||
}
|
||||
|
||||
//AnalysisLogger.getLogger().debug("y:"+y+","+"x:"+x);
|
||||
//AnalysisLogger.getLogger().debug("iy:"+i+","+"jx:"+j);
|
||||
double value = ascFile.getValue(i, j);
|
||||
values.add(value);
|
||||
*/
|
||||
}
|
||||
|
||||
return values;
|
||||
|
@ -149,12 +176,15 @@ public class AscDataExplorer {
|
|||
*/
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
AscDataExplorer ade = new AscDataExplorer("http://thredds.d4science.org/thredds/fileServer/public/netcdf/ph.asc");
|
||||
// AscDataExplorer ade = new AscDataExplorer("http://thredds.d4science.org/thredds/fileServer/public/netcdf/ph.asc");
|
||||
AscDataExplorer ade = new AscDataExplorer("https://dl.dropboxusercontent.com/u/12809149/geoserver-GetCoverage.image.asc");
|
||||
|
||||
List<Tuple<Double>> triplets = new ArrayList<Tuple<Double>>();
|
||||
triplets.add(new Tuple<Double>(-180d,-90d));
|
||||
triplets.add(new Tuple<Double>(0d,0d));
|
||||
triplets.add(new Tuple<Double>(180d,90d));
|
||||
triplets.add(new Tuple<Double>(18.620429d,20.836419d));
|
||||
|
||||
List<Double> values = ade.retrieveDataFromAsc(triplets,0);
|
||||
for (Double value:values){
|
||||
AnalysisLogger.getLogger().debug("val:"+value);
|
||||
|
|
|
@ -20,14 +20,16 @@ public class AscRaster
|
|||
protected double xll;
|
||||
protected double yll;
|
||||
protected double cellsize;
|
||||
protected double dx=-1;
|
||||
protected double dy=-1;
|
||||
protected int cols;
|
||||
protected int rows;
|
||||
protected String NDATA;
|
||||
|
||||
public static final String DEFAULT_NODATA = "-9999";
|
||||
|
||||
protected String NDATA=DEFAULT_NODATA;
|
||||
public void print()
|
||||
{
|
||||
System.out.println( "Rows: " + rows + " cols: " + cols + " cellsize " + cellsize );
|
||||
System.out.println( "Rows: " + rows + " cols: " + cols + " cellsize " + cellsize +" dx "+dx+" dy "+dy);
|
||||
for( double[] row : data )
|
||||
{
|
||||
for( double val : row )
|
||||
|
@ -51,12 +53,14 @@ public class AscRaster
|
|||
* @param xll
|
||||
* @param yll
|
||||
*/
|
||||
public AscRaster( double cellsize, double xll, double yll )
|
||||
public AscRaster( double cellsize, double dx, double dy, double xll, double yll )
|
||||
{
|
||||
this();
|
||||
setCellsize( cellsize );
|
||||
setXll( xll );
|
||||
setYll( yll );
|
||||
setdx(dx);
|
||||
setdy(dy);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -66,9 +70,9 @@ public class AscRaster
|
|||
* @param xll
|
||||
* @param yll
|
||||
*/
|
||||
public AscRaster( double[][] data, double cellsize, double xll, double yll )
|
||||
public AscRaster( double[][] data, double cellsize, double dx, double dy, double xll, double yll )
|
||||
{
|
||||
this(cellsize, xll, yll);
|
||||
this(cellsize, dx, dy ,xll, yll);
|
||||
setData( data );
|
||||
}
|
||||
|
||||
|
@ -79,9 +83,9 @@ public class AscRaster
|
|||
* @param xll
|
||||
* @param yll
|
||||
*/
|
||||
public AscRaster( int[][] data, double cellsize, double xll, double yll )
|
||||
public AscRaster( int[][] data, double cellsize, double dx, double dy, double xll, double yll )
|
||||
{
|
||||
this(cellsize, xll, yll);
|
||||
this(cellsize, dx, dy, xll, yll);
|
||||
setData( data );
|
||||
}
|
||||
|
||||
|
@ -225,6 +229,26 @@ public class AscRaster
|
|||
this.cellsize = cellsize;
|
||||
}
|
||||
|
||||
public double getdx()
|
||||
{
|
||||
return dx;
|
||||
}
|
||||
|
||||
public void setdx( double dx)
|
||||
{
|
||||
this.dx = dx;
|
||||
}
|
||||
|
||||
public double getdy()
|
||||
{
|
||||
return dy;
|
||||
}
|
||||
|
||||
public void setdy( double dy)
|
||||
{
|
||||
this.dy = dy;
|
||||
}
|
||||
|
||||
public int getCols()
|
||||
{
|
||||
return cols;
|
||||
|
|
|
@ -10,6 +10,8 @@ import java.net.URLConnection;
|
|||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import org.apache.tools.ant.types.CommandlineJava.SysProperties;
|
||||
|
||||
|
||||
/**
|
||||
* A class which reads an ESRI ASCII raster file into a Raster
|
||||
|
@ -56,6 +58,8 @@ public class AscRasterReader
|
|||
while( input.ready() )
|
||||
{
|
||||
String line = input.readLine();
|
||||
if (line!=null && line.length()>0)
|
||||
line = line.trim();
|
||||
Matcher headMatch = header.matcher( line );
|
||||
//Match all the heads
|
||||
if( headMatch.matches() )
|
||||
|
@ -74,6 +78,10 @@ public class AscRasterReader
|
|||
raster.NDATA = value;
|
||||
else if ( head.equals( "cellsize" ) )
|
||||
raster.cellsize = Double.parseDouble( value );
|
||||
else if ( head.equals( "dx" ) )
|
||||
raster.dx = Double.parseDouble( value );
|
||||
else if ( head.equals( "dy" ) )
|
||||
raster.dy = Double.parseDouble( value );
|
||||
else
|
||||
System.out.println( "Unknown setting: " + line );
|
||||
}
|
||||
|
@ -86,11 +94,15 @@ public class AscRasterReader
|
|||
double[][] data = new double[raster.rows][];
|
||||
while( true )
|
||||
{
|
||||
line=line.trim();
|
||||
//System.out.println( "Got data row: " + line );
|
||||
String[] inData = line.split( "\\s+" );
|
||||
double[] numData = new double[raster.cols];
|
||||
if( inData.length != numData.length ) throw new RuntimeException( "Wrong number of columns: Expected " +
|
||||
if( inData.length != numData.length ){
|
||||
System.out.println(inData);
|
||||
throw new RuntimeException( "Wrong number of columns: Expected " +
|
||||
raster.cols + " got " + inData.length + " for line \n" + line );
|
||||
}
|
||||
for( int col = 0; col < raster.cols; col ++ )
|
||||
{
|
||||
if( inData[col].equals( noData )) numData[col] = Double.NaN;
|
||||
|
|
|
@ -0,0 +1,74 @@
|
|||
package org.gcube.dataanalysis.geo.connectors.geotiff;
|
||||
|
||||
import java.io.File;
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.gcube.contentmanagement.graphtools.utils.HttpRequest;
|
||||
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
||||
import org.gcube.dataanalysis.ecoengine.utils.Tuple;
|
||||
import org.gcube.dataanalysis.geo.connectors.asc.ASC;
|
||||
import org.gcube.dataanalysis.geo.interfaces.GISDataConnector;
|
||||
import org.gcube.dataanalysis.geo.utils.GdalConverter;
|
||||
|
||||
public class GeoTiff implements GISDataConnector {
|
||||
|
||||
public double zScale = 0;
|
||||
public String persistenceDir;
|
||||
public String geoTiffUrl;
|
||||
|
||||
public GeoTiff(AlgorithmConfiguration config, String geoTiffURL) throws Exception {
|
||||
persistenceDir = config.getPersistencePath();
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Double> getFeaturesInTimeInstantAndArea(String layerURL, String layerName, int time, List<Tuple<Double>> coordinates3d, double BBxL, double BBxR, double BByL, double BByR) throws Exception {
|
||||
|
||||
|
||||
if (time > 0)
|
||||
throw new Exception("Time is currently not supported in WCS");
|
||||
|
||||
String uuid = null;
|
||||
String ascFile = null;
|
||||
try {
|
||||
|
||||
int urlCheck = HttpRequest.checkUrl(layerURL, null, null);
|
||||
AnalysisLogger.getLogger().debug("Checking url: " + urlCheck);
|
||||
String randomFile = new File(persistenceDir, "geotiff" + UUID.randomUUID().toString().replace("-", "")).getAbsolutePath();
|
||||
uuid = randomFile + ".tiff";
|
||||
HttpRequest.downloadFile(layerURL, uuid);
|
||||
AnalysisLogger.getLogger().debug("Converting to ASCII file: " + uuid);
|
||||
ascFile = GdalConverter.convertToASC(uuid);
|
||||
AnalysisLogger.getLogger().debug("Conversion to ASCII complete: " + ascFile);
|
||||
ASC asc = new ASC();
|
||||
List<Double> points = asc.getFeaturesInTimeInstantAndArea(ascFile, layerName, time, coordinates3d, BBxL, BBxR, BByL, BByR);
|
||||
AnalysisLogger.getLogger().debug("Retrieved: " + points.size() + " points");
|
||||
return points;
|
||||
} catch (Exception e) {
|
||||
throw e;
|
||||
} finally {
|
||||
if (uuid != null){
|
||||
AnalysisLogger.getLogger().debug("Deleting tiff:"+new File(uuid).delete());
|
||||
AnalysisLogger.getLogger().debug("Deleting tiff aux file:"+new File(uuid.replace(".tiff", ".prj")).delete());
|
||||
}
|
||||
if (ascFile!= null){
|
||||
AnalysisLogger.getLogger().debug("Deleting asc:"+new File(ascFile).delete());
|
||||
AnalysisLogger.getLogger().debug("Deleting asc aux file:"+new File(ascFile+".aux.xml").delete());
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public double getMinZ(String layerURL, String layerName) {
|
||||
// TODO: understand z management on the basis of at least one example
|
||||
return 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public double getMaxZ(String layerURL, String layerName) {
|
||||
// TODO: understand z management on the basis of at least one example
|
||||
return 0;
|
||||
}
|
||||
}
|
|
@ -67,14 +67,16 @@ public class NetCDFDataExplorer {
|
|||
public double minZ=0;
|
||||
public double maxZ=0;
|
||||
|
||||
private void calcZRange(String openDapLink, String layer) {
|
||||
public void calcZRange(String openDapLink, String layer) {
|
||||
try{
|
||||
if (isGridDataset(openDapLink)){
|
||||
gds = ucar.nc2.dt.grid.GridDataset.open(openDapLink);
|
||||
List<GridDatatype> gridTypes = gds.getGrids();
|
||||
for (GridDatatype gdt : gridTypes) {
|
||||
AnalysisLogger.getLogger().debug("Inside File - layer name: " + gdt.getFullName());
|
||||
if (layer.equalsIgnoreCase(gdt.getFullName())) {
|
||||
|
||||
// AnalysisLogger.getLogger().debug("Inside File - layer name: " + gdt.getFullName());
|
||||
AnalysisLogger.getLogger().debug("Inside File - layer name: " + gdt.getName());
|
||||
if (layer.equalsIgnoreCase(gdt.getName())) {
|
||||
CoordinateAxis zAxis = gdt.getCoordinateSystem().getVerticalAxis();
|
||||
minZ=zAxis.getMinValue();
|
||||
maxZ=zAxis.getMaxValue();
|
||||
|
@ -96,8 +98,8 @@ public class NetCDFDataExplorer {
|
|||
|
||||
List<GridDatatype> gridTypes = gds.getGrids();
|
||||
for (GridDatatype gdt : gridTypes) {
|
||||
AnalysisLogger.getLogger().debug("Inside File - layer name: " + gdt.getFullName());
|
||||
if (layer.equalsIgnoreCase(gdt.getFullName())) {
|
||||
AnalysisLogger.getLogger().debug("Inside File - layer name: " + gdt.getName());
|
||||
if (layer.equalsIgnoreCase(gdt.getName())) {
|
||||
AnalysisLogger.getLogger().debug("Found layer " + layer + " inside file");
|
||||
GridDatatype grid = gds.findGridDatatype(gdt.getName());
|
||||
CoordinateAxis zAxis = gdt.getCoordinateSystem().getVerticalAxis();
|
||||
|
@ -257,8 +259,8 @@ public class NetCDFDataExplorer {
|
|||
GridDataset gds = ucar.nc2.dt.grid.GridDataset.open(filename);
|
||||
List<GridDatatype> gridTypes = gds.getGrids();
|
||||
for (GridDatatype gdt : gridTypes) {
|
||||
AnalysisLogger.getLogger().debug("Inside File - layer name: " + gdt.getFullName());
|
||||
if (layer.equalsIgnoreCase(gdt.getFullName())) {
|
||||
AnalysisLogger.getLogger().debug("Inside File - layer name: " + gdt.getName());
|
||||
if (layer.equalsIgnoreCase(gdt.getName())) {
|
||||
AnalysisLogger.getLogger().debug("Found layer " + layer + " inside file");
|
||||
GridDatatype grid = gds.findGridDatatype(gdt.getName());
|
||||
GridCoordSystem gcs = grid.getCoordinateSystem();
|
||||
|
@ -393,6 +395,7 @@ public class NetCDFDataExplorer {
|
|||
} else
|
||||
return true;
|
||||
} catch (Throwable e) {
|
||||
e.printStackTrace();
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,209 @@
|
|||
package org.gcube.dataanalysis.geo.connectors.wcs;
|
||||
|
||||
import java.io.File;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.gcube.contentmanagement.graphtools.utils.HttpRequest;
|
||||
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
||||
import org.gcube.dataanalysis.ecoengine.utils.Tuple;
|
||||
import org.gcube.dataanalysis.geo.connectors.asc.ASC;
|
||||
import org.gcube.dataanalysis.geo.interfaces.GISDataConnector;
|
||||
import org.gcube.dataanalysis.geo.meta.OGCFormatter;
|
||||
import org.gcube.dataanalysis.geo.utils.GdalConverter;
|
||||
import org.gcube.dataanalysis.geo.utils.GeoTiffMetadata;
|
||||
import org.gcube.dataanalysis.geo.utils.VectorOperations;
|
||||
|
||||
public class WCS implements GISDataConnector {
|
||||
// WCS examples
|
||||
// String wcs = "http://geoserver-dev.d4science-ii.research-infrastructures.eu/geoserver/wcs?service=WCS&version=1.0.0&request=GetCoverage&COVERAGE=aquamaps:WorldClimBio2&CRS=EPSG:4326&BBOX=-60,-30,60,30,0,0&WIDTH=640&HEIGHT=480&DEPTH=1&FORMAT=geotiff";
|
||||
// String wcs = "http://geoserver-dev.d4science-ii.research-infrastructures.eu/geoserver/wcs?service=WCS&version=1.0.0&request=GetCoverage&COVERAGE=aquamaps:WorldClimBio2&CRS=EPSG:4326&BBOX=-60,-30,60,30&WIDTH=640&HEIGHT=480&FORMAT=geotiff&CIAO=1&DEPTH=2&TIME=3&RESX=2&RESY=3";
|
||||
// String wcs = "http://geoserver-dev.d4science-ii.research-infrastructures.eu/geoserver/wcs?service=WCS&version=1.0.0&request=GetCoverage&COVERAGE=aquamaps:WorldClimBio2&CRS=EPSG:4326&BBOX=-60,-30,60,30&WIDTH=640&HEIGHT=480&FORMAT=geotiff";
|
||||
// String wcs = "http://geoserver-dev.d4science-ii.research-infrastructures.eu/geoserver/wcs/wcs?service=wcs&version=1.0.0&request=GetCoverage&coverage=aquamaps:WorldClimBio2&CRS=EPSG:4326&bbox=-180,0,180,90&width=1&height=1&format=geotiff&RESPONSE_CRS=EPSG:4326";
|
||||
|
||||
public String baseURL;
|
||||
public String coverage;
|
||||
public String crs = "EPSG:4326";
|
||||
public String boundingBox = "-180,-90,180,90";
|
||||
public String width = "640";
|
||||
public String height = "480";
|
||||
public String depth = null;
|
||||
public String format = "geotiff";
|
||||
public String time = null;
|
||||
public String responseCRS = "EPSG:4326";
|
||||
public String resx;
|
||||
public String resy;
|
||||
public String resz;
|
||||
|
||||
public double zScale = 0;
|
||||
public String persistenceDir;
|
||||
|
||||
public HashMap<String, String> parameters = new HashMap<String, String>();
|
||||
|
||||
public WCS(AlgorithmConfiguration config, String wcsURL) throws Exception {
|
||||
parseWCSURL(wcsURL);
|
||||
persistenceDir = config.getPersistencePath();
|
||||
retrieveZScale();
|
||||
}
|
||||
|
||||
public void retrieveZScale() throws Exception {
|
||||
String uuid = null;
|
||||
try{
|
||||
String url = OGCFormatter.getWcsUrl(baseURL, coverage, crs, responseCRS, "-180,0,180,90", "1", "1", depth, format, resx, resy, resz, time, parameters);
|
||||
AnalysisLogger.getLogger().debug("Retrieving Z parameters: " + url);
|
||||
int urlCheck = HttpRequest.checkUrl(url, null, null);
|
||||
AnalysisLogger.getLogger().debug("Checking url: " + urlCheck);
|
||||
uuid = new File(persistenceDir, "geotiff" + UUID.randomUUID().toString().replace("-", "") + ".tiff").getAbsolutePath();
|
||||
HttpRequest.downloadFile(url, uuid);
|
||||
GeoTiffMetadata meta = new GeoTiffMetadata();
|
||||
meta.readAndDisplayMetadata(uuid);
|
||||
zScale = meta.zScale;
|
||||
AnalysisLogger.getLogger().debug("Retrieved Z Scale: " + zScale);
|
||||
}catch (Exception e){
|
||||
throw e;
|
||||
}
|
||||
finally{
|
||||
if (uuid!=null)
|
||||
AnalysisLogger.getLogger().debug("Deleting point-tiff file :"+uuid+" "+new File(uuid).delete());
|
||||
}
|
||||
}
|
||||
|
||||
public void parseWCSURL(String wcsURL) {
|
||||
int questionIDX = wcsURL.indexOf("?");
|
||||
if (questionIDX < 0) {
|
||||
baseURL = wcsURL;
|
||||
return;
|
||||
}
|
||||
|
||||
baseURL = wcsURL.substring(0, questionIDX);
|
||||
|
||||
String toParse = wcsURL.substring(questionIDX + 1);
|
||||
String[] elements = toParse.split("&");
|
||||
for (String element : elements) {
|
||||
int eqIdx = element.indexOf("=");
|
||||
if (eqIdx < 0)
|
||||
continue;
|
||||
String paramName = element.substring(0, eqIdx);
|
||||
String paramValue = element.substring(eqIdx + 1);
|
||||
associateParameter(paramName, paramValue);
|
||||
}
|
||||
|
||||
AnalysisLogger.getLogger().debug("WCS Parsing finished");
|
||||
}
|
||||
|
||||
private void associateParameter(String parameter, String value) {
|
||||
|
||||
if (parameter.equalsIgnoreCase("COVERAGE"))
|
||||
coverage = value;
|
||||
else if (parameter.equalsIgnoreCase("CRS"))
|
||||
crs = value;
|
||||
else if (parameter.equalsIgnoreCase("BBOX"))
|
||||
boundingBox = value;
|
||||
else if (parameter.equalsIgnoreCase("WIDTH"))
|
||||
width = value;
|
||||
else if (parameter.equalsIgnoreCase("HEIGHT"))
|
||||
height = value;
|
||||
else if (parameter.equalsIgnoreCase("DEPTH"))
|
||||
depth = value;
|
||||
else if (parameter.equalsIgnoreCase("FORMAT"))
|
||||
format = value;
|
||||
else if (parameter.equalsIgnoreCase("RESX"))
|
||||
resx = value;
|
||||
else if (parameter.equalsIgnoreCase("RESY"))
|
||||
resy = value;
|
||||
else if (parameter.equalsIgnoreCase("RESZ"))
|
||||
resz = value;
|
||||
else if (parameter.equalsIgnoreCase("TIME"))
|
||||
time = value;
|
||||
else if (parameter.equalsIgnoreCase("RESPONSE_CRS"))
|
||||
responseCRS = value;
|
||||
else {
|
||||
if (!parameter.equalsIgnoreCase("service") && !parameter.equalsIgnoreCase("version") && !parameter.equalsIgnoreCase("request"))
|
||||
parameters.put(parameter, value);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Double> getFeaturesInTimeInstantAndArea(String layerURL, String layerName, int time, List<Tuple<Double>> coordinates3d, double BBxL, double BBxR, double BByL, double BByR) throws Exception {
|
||||
|
||||
if (time > 0)
|
||||
throw new Exception("Time is currently not supported in WCS");
|
||||
|
||||
String uuid = null;
|
||||
String ascFile = null;
|
||||
try {
|
||||
String resolutionx = resx;
|
||||
String resolutiony = resy;
|
||||
|
||||
if (coordinates3d.size() > 1) {
|
||||
double x1 = coordinates3d.get(0).getElements().get(0);
|
||||
double x2 = coordinates3d.get(1).getElements().get(0);
|
||||
resolutionx = "" + Math.abs(x2 - x1);
|
||||
double y1 = coordinates3d.get(0).getElements().get(0);
|
||||
double y2 = coordinates3d.get(1).getElements().get(0);
|
||||
resolutiony = "" + Math.abs(y2 - y1);
|
||||
}
|
||||
|
||||
AnalysisLogger.getLogger().debug("Resolution parameters: resx: " +resolutionx+" resy: "+resolutiony );
|
||||
|
||||
String url = OGCFormatter.getWcsUrl(baseURL, coverage, crs, responseCRS, "" + BBxL + "," + BByL + "," + BBxR + "," + BByR, width, height, depth, format, resolutionx, resolutiony, resz, "" + time, parameters);
|
||||
AnalysisLogger.getLogger().debug("Retrieving Z parameters: " + url);
|
||||
int urlCheck = HttpRequest.checkUrl(url, null, null);
|
||||
AnalysisLogger.getLogger().debug("Checking url: " + urlCheck);
|
||||
String randomFile = new File(persistenceDir, "geotiff" + UUID.randomUUID().toString().replace("-", "")).getAbsolutePath();
|
||||
uuid = randomFile + ".tiff";
|
||||
HttpRequest.downloadFile(url, uuid);
|
||||
AnalysisLogger.getLogger().debug("Converting to ASCII file: " + uuid);
|
||||
ascFile = GdalConverter.convertToASC(uuid);
|
||||
AnalysisLogger.getLogger().debug("Conversion to ASCII complete: " + ascFile);
|
||||
ASC asc = new ASC();
|
||||
List<Double> points = asc.getFeaturesInTimeInstantAndArea(ascFile, layerName, time, coordinates3d, BBxL, BBxR, BByL, BByR);
|
||||
AnalysisLogger.getLogger().debug("Retrieved: " + points.size() + " points");
|
||||
return points;
|
||||
} catch (Exception e) {
|
||||
throw e;
|
||||
} finally {
|
||||
if (uuid != null){
|
||||
AnalysisLogger.getLogger().debug("Deleting tiff:"+new File(uuid).delete());
|
||||
AnalysisLogger.getLogger().debug("Deleting tiff aux file:"+new File(uuid.replace(".tiff", ".prj")).delete());
|
||||
}
|
||||
if (ascFile!= null){
|
||||
AnalysisLogger.getLogger().debug("Deleting asc:"+new File(ascFile).delete());
|
||||
AnalysisLogger.getLogger().debug("Deleting asc aux file:"+new File(ascFile+".aux.xml").delete());
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public double getMinZ(String layerURL, String layerName) {
|
||||
// TODO: understand z management on the basis of at least one example
|
||||
return 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public double getMaxZ(String layerURL, String layerName) {
|
||||
// TODO: understand z management on the basis of at least one example
|
||||
return 0;
|
||||
}
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
|
||||
String wcsUrl = "http://geoserver-dev.d4science-ii.research-infrastructures.eu/geoserver/wcs/wcs?service=wcs&version=1.0.0&request=GetCoverage&coverage=aquamaps:WorldClimBio2&CRS=EPSG:4326&bbox=-180,0,180,90&width=1&height=1&format=geotiff&RESPONSE_CRS=EPSG:4326";
|
||||
|
||||
AlgorithmConfiguration config = new AlgorithmConfiguration();
|
||||
config.setGcubeScope("/d4science.research-infrastructures.eu/gCubeApps/BiodiversityLab");
|
||||
config.setConfigPath("./cfg/");
|
||||
config.setPersistencePath("./");
|
||||
|
||||
WCS wcs = new WCS(config, wcsUrl);
|
||||
|
||||
List<Tuple<Double>> triplets = VectorOperations.generateCoordinateTripletsInBoundingBox(-30d, 30d, -30d, 30d, 0, 0.5, 0.5);
|
||||
|
||||
List<Double> points = wcs.getFeaturesInTimeInstantAndArea(wcsUrl, "", 0, triplets, -180d, 180d, -90d, 90d);
|
||||
AnalysisLogger.getLogger().debug("points:" + points);
|
||||
}
|
||||
|
||||
}
|
|
@ -164,6 +164,17 @@ public class GeoNetworkInspector {
|
|||
return searchInUrl(meta, "service=wcs");
|
||||
}
|
||||
|
||||
public String getGeoTiffLink(Metadata meta) {
|
||||
String url = searchInUrl(meta, ".tiff");
|
||||
if (url == null)
|
||||
url = searchInUrl(meta, ".geotiff");
|
||||
if (url == null)
|
||||
url = searchInUrl(meta, ".tif");
|
||||
if (url == null)
|
||||
url = searchInUrl(meta, ".geotif");
|
||||
return url;
|
||||
}
|
||||
|
||||
public String getOpenDapLink(Metadata meta) {
|
||||
return searchInUrl(meta, "/dodsC");
|
||||
}
|
||||
|
@ -212,6 +223,16 @@ public class GeoNetworkInspector {
|
|||
return (httplink!=null);
|
||||
}
|
||||
|
||||
public boolean isWCS(Metadata meta) {
|
||||
String httplink = getWCSLink(meta);
|
||||
return (httplink!=null);
|
||||
}
|
||||
|
||||
public boolean isGeoTiff(Metadata meta) {
|
||||
String httplink = getGeoTiffLink(meta);
|
||||
return (httplink!=null);
|
||||
}
|
||||
|
||||
Configuration gnconfiguration;
|
||||
public GeoNetworkReader initGeoNetworkReader() throws Exception {
|
||||
AnalysisLogger.getLogger().debug("Features Manager: configuring GeoNetwork");
|
||||
|
|
|
@ -47,7 +47,7 @@ public class ThreddsFetcher {
|
|||
for (GridDatatype gdt : gridTypes) {
|
||||
String description = gdt.getDescription();
|
||||
if ((description==null) || (description.length()==0))
|
||||
description = gdt.getFullName();
|
||||
description = gdt.getName();
|
||||
// get date range
|
||||
DateRange dr = gdt.getCoordinateSystem().getDateRange();
|
||||
// SimpleDateFormat netcdfDF = new SimpleDateFormat(NetCDFDateFormat, Locale.ENGLISH);
|
||||
|
@ -104,11 +104,11 @@ public class ThreddsFetcher {
|
|||
// Build standard info:
|
||||
metadataInserter.setThreddsCatalogUrl(threddsCatalogURL);
|
||||
metadataInserter.setLayerUrl(url);
|
||||
metadataInserter.setLayerName(gdt.getFullName());
|
||||
metadataInserter.setLayerName(gdt.getName());
|
||||
metadataInserter.setSourceFileName(filename);
|
||||
// insert ranges and sampling
|
||||
metadataInserter.setTitle(generatedTitle);
|
||||
metadataInserter.setAbstractField(generateAbstractField(gdt.getFullName(), filename, description, gdt.getUnitsString().trim(), hStartDate, hEndDate, duration, resolution, numberOfDimensions, gds.getTitle(), gds.getDescription()));
|
||||
metadataInserter.setAbstractField(generateAbstractField(gdt.getName(), filename, description, gdt.getUnitsString().trim(), hStartDate, hEndDate, duration, resolution, numberOfDimensions, gds.getTitle(), gds.getDescription()));
|
||||
metadataInserter.setResolution(Math.max(resolutionX, resolutionY));
|
||||
// set Bounding box
|
||||
double minX = NetCDFDataExplorer.getMinX(gdt.getCoordinateSystem());
|
||||
|
|
|
@ -7,9 +7,11 @@ import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
|
|||
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
||||
import org.gcube.dataanalysis.ecoengine.utils.Tuple;
|
||||
import org.gcube.dataanalysis.geo.connectors.asc.ASC;
|
||||
import org.gcube.dataanalysis.geo.connectors.geotiff.GeoTiff;
|
||||
import org.gcube.dataanalysis.geo.connectors.netcdf.NetCDF;
|
||||
import org.gcube.dataanalysis.geo.connectors.table.Table;
|
||||
import org.gcube.dataanalysis.geo.connectors.table.TableMatrixRepresentation;
|
||||
import org.gcube.dataanalysis.geo.connectors.wcs.WCS;
|
||||
import org.gcube.dataanalysis.geo.connectors.wfs.WFS;
|
||||
import org.gcube.dataanalysis.geo.infrastructure.GeoNetworkInspector;
|
||||
import org.gcube.dataanalysis.geo.interfaces.GISDataConnector;
|
||||
|
@ -84,6 +86,14 @@ public class MatrixExtractor {
|
|||
AnalysisLogger.getLogger().debug("found a Geo Layer with reference " + layerTitle + " and layer name " + layerName);
|
||||
layerURL = gnInspector.getGeoserverLink(meta);
|
||||
connector = new WFS();
|
||||
} else if (gnInspector.isWCS(meta)) {
|
||||
AnalysisLogger.getLogger().debug("found a WCS Layer with reference " + layerTitle + " and layer name " + layerName);
|
||||
layerURL = gnInspector.getWCSLink(meta);
|
||||
connector = new WCS(configuration,layerURL);
|
||||
} else if (gnInspector.isGeoTiff(meta)) {
|
||||
layerURL = gnInspector.getGeoTiffLink(meta);
|
||||
AnalysisLogger.getLogger().debug("found a GeoTiff with reference " + layerTitle + " and layer name " + layerName);
|
||||
connector = new GeoTiff(configuration,layerURL);
|
||||
}
|
||||
currentconnector = connector;
|
||||
}
|
||||
|
|
|
@ -0,0 +1,91 @@
|
|||
package org.gcube.dataanalysis.geo.test;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.BufferedWriter;
|
||||
import java.io.File;
|
||||
import java.io.FileReader;
|
||||
import java.io.FileWriter;
|
||||
|
||||
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
||||
import org.gcube.dataanalysis.ecoengine.signals.PeriodicityDetector;
|
||||
import org.gcube.dataanalysis.ecoengine.signals.SignalProcessing;
|
||||
import org.gcube.dataanalysis.geo.matrixmodel.MatrixExtractor;
|
||||
import org.gcube.dataanalysis.geo.matrixmodel.TimeSeriesExtractor;
|
||||
|
||||
public class TestGeoTiff {
|
||||
|
||||
static String cfg = "./cfg/";
|
||||
public static void main(String[] args) throws Exception{
|
||||
String layertitle = "WorldClimBioWCS2";
|
||||
|
||||
long t0 = System.currentTimeMillis();
|
||||
AnalysisLogger.setLogger(cfg+AlgorithmConfiguration.defaultLoggerFile);
|
||||
|
||||
AlgorithmConfiguration config = new AlgorithmConfiguration();
|
||||
config.setGcubeScope("/d4science.research-infrastructures.eu/gCubeApps/BiodiversityLab");
|
||||
config.setConfigPath(cfg);
|
||||
TimeSeriesExtractor intersector = new TimeSeriesExtractor(config);
|
||||
|
||||
// intersector.takeTimeSlice(layertitle, 0, -180, 180, -10, 10, 0, 1, 1);
|
||||
// intersector.takeTimeSlice(layertitle, 0, -10, 10, -10, 10, 0,1, 1);
|
||||
// intersector.takeTimeInstantMatrix(layertitle, 0, -180, 180, -90, 90, 0, 0.5, 0.5);
|
||||
double signal[] = intersector.extractT(layertitle, 0d, 0d,0d,0.5);
|
||||
|
||||
// SignalProcessing.displaySignalWithGenericTime(signal, 0, 1, "signal");
|
||||
|
||||
System.out.println("ELAPSED TIME: "+(System.currentTimeMillis()-t0));
|
||||
|
||||
System.out.println("Signal: "+signal.length);
|
||||
BufferedWriter bw = new BufferedWriter(new FileWriter(new File("signal.txt")));
|
||||
for (double si: signal){
|
||||
bw.write(si+",");
|
||||
}
|
||||
bw.close();
|
||||
|
||||
PeriodicityDetector pd = new PeriodicityDetector();
|
||||
double F = pd.detectFrequency(signal,true);
|
||||
|
||||
System.out.println("Detected Frequency:"+F+" indecision ["+pd.lowermeanF+" , "+pd.uppermeanF+"]");
|
||||
System.out.println("Detected Period:"+pd.meanPeriod+" indecision ["+pd.lowermeanPeriod+" , "+pd.uppermeanPeriod+"]");
|
||||
System.out.println("Detected Periodicity Strength:"+pd.periodicityStrength+" "+pd.getPeriodicityStregthInterpretation());
|
||||
}
|
||||
|
||||
public static void main1(String[] args) throws Exception{
|
||||
takeSignal();
|
||||
}
|
||||
public static void takeSignal() throws Exception{
|
||||
BufferedReader br = new BufferedReader(new FileReader(new File("signal.txt")));
|
||||
String line = br.readLine();
|
||||
double[] signal = null;
|
||||
|
||||
while (line!=null){
|
||||
String [] el = line.split(",");
|
||||
signal=new double[el.length];
|
||||
int i=0;
|
||||
for (String e:el){
|
||||
signal[i]=Double.parseDouble(e);
|
||||
i++;
|
||||
}
|
||||
line = null;
|
||||
}
|
||||
br.close();
|
||||
|
||||
|
||||
// SignalProcessing.displaySignalWithGenericTime(signal, 0, 1, "signal");
|
||||
PeriodicityDetector pd = new PeriodicityDetector();
|
||||
|
||||
// signal = pd.produceNoisySignal(2000, 1, 0.1f, 0f);
|
||||
|
||||
//float freq=1;//signal.length;
|
||||
|
||||
// double F = pd.detectFrequency(signal, (int)freq, 0, freq, 1f,true);
|
||||
double F = pd.detectFrequency(signal,true);
|
||||
|
||||
System.out.println("Detected Frequency:"+F+" indecision ["+pd.lowermeanF+" , "+pd.uppermeanF+"]");
|
||||
System.out.println("Detected Period:"+pd.meanPeriod+" indecision ["+pd.lowermeanPeriod+" , "+pd.uppermeanPeriod+"]");
|
||||
System.out.println("Detected Periodicity Strength:"+pd.periodicityStrength);
|
||||
|
||||
}
|
||||
|
||||
}
|
|
@ -29,9 +29,9 @@ public class TestSignal {
|
|||
//wind stress
|
||||
String layertitle = "255b5a95-ad28-4fec-99e0-5d48112dd6ab";
|
||||
//wind speed
|
||||
layertitle = "a116c9bc-9380-4d40-8374-aa0e376a6820";
|
||||
// layertitle = "a116c9bc-9380-4d40-8374-aa0e376a6820";
|
||||
//nitrates
|
||||
layertitle = "b1cd9549-d9d0-4c77-9532-b161a69fbd44";
|
||||
// layertitle = "b1cd9549-d9d0-4c77-9532-b161a69fbd44";
|
||||
|
||||
//ASC
|
||||
// String layertitle = "2c2304d1-681a-4f3a-8409-e8cdb5ed447f";
|
||||
|
|
|
@ -0,0 +1,36 @@
|
|||
package org.gcube.dataanalysis.geo.test;
|
||||
|
||||
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
||||
import org.gcube.dataanalysis.ecoengine.signals.SignalProcessing;
|
||||
import org.gcube.dataanalysis.ecoengine.utils.Tuple;
|
||||
import org.gcube.dataanalysis.geo.connectors.table.TableMatrixRepresentation;
|
||||
import org.gcube.dataanalysis.geo.matrixmodel.ZExtractor;
|
||||
|
||||
public class TestWaterColumnGeoTiff {
|
||||
|
||||
public static void main(String[] args) throws Exception{
|
||||
AlgorithmConfiguration config = new AlgorithmConfiguration();
|
||||
|
||||
config.setConfigPath("./cfg/");
|
||||
config.setPersistencePath("./");
|
||||
|
||||
String layertitle = "WorldClimBioGeoTiffTest2";
|
||||
// layertitle = "WorldClimBioWCS2";
|
||||
|
||||
AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
|
||||
config.setGcubeScope("/d4science.research-infrastructures.eu/gCubeApps/BiodiversityLab");
|
||||
|
||||
ZExtractor extractor = new ZExtractor(config);
|
||||
long t0 = System.currentTimeMillis();
|
||||
|
||||
double watercolumn[] = extractor.extractZ(layertitle, 18.620429d,20.836419d,0, 0);
|
||||
|
||||
System.out.println("ELAPSED TIME: "+(System.currentTimeMillis()-t0));
|
||||
System.out.println("Signal: "+watercolumn.length);
|
||||
System.out.println("Signal first element: "+watercolumn[0]);
|
||||
SignalProcessing.displaySignalWithGenericTime(watercolumn, 0, 1, "signal");
|
||||
|
||||
}
|
||||
|
||||
}
|
|
@ -15,9 +15,11 @@ public class TestWaterColumnTemperature {
|
|||
config.setPersistencePath("./");
|
||||
|
||||
String layertitle = "6411b110-7572-457a-a662-a16e4ff09e4e";
|
||||
/*
|
||||
layertitle = "be24800d-7583-4efa-b925-e0d8760e0fd3";
|
||||
layertitle = "320652c8-e986-4428-9306-619d9014822a";
|
||||
layertitle = "0aac424b-5f5b-4fa6-97d6-4b4deee62b97";
|
||||
*/
|
||||
AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
|
||||
config.setGcubeScope("/d4science.research-infrastructures.eu/gCubeApps/BiodiversityLab");
|
||||
|
||||
|
|
|
@ -0,0 +1,75 @@
|
|||
package org.gcube.dataanalysis.geo.test.regression;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.BufferedWriter;
|
||||
import java.io.File;
|
||||
import java.io.FileReader;
|
||||
import java.io.FileWriter;
|
||||
|
||||
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
||||
import org.gcube.dataanalysis.ecoengine.signals.PeriodicityDetector;
|
||||
import org.gcube.dataanalysis.ecoengine.signals.SignalProcessing;
|
||||
import org.gcube.dataanalysis.geo.matrixmodel.MatrixExtractor;
|
||||
import org.gcube.dataanalysis.geo.matrixmodel.TimeSeriesExtractor;
|
||||
|
||||
public class RegressionGeoTiff {
|
||||
|
||||
static String cfg = "./cfg/";
|
||||
public static void main(String[] args) throws Exception{
|
||||
String layertitle = "WorldClimBioGeoTiffTest2";
|
||||
|
||||
long t0 = System.currentTimeMillis();
|
||||
AnalysisLogger.setLogger(cfg+AlgorithmConfiguration.defaultLoggerFile);
|
||||
|
||||
AlgorithmConfiguration config = new AlgorithmConfiguration();
|
||||
config.setGcubeScope("/d4science.research-infrastructures.eu/gCubeApps/BiodiversityLab");
|
||||
config.setConfigPath(cfg);
|
||||
TimeSeriesExtractor intersector = new TimeSeriesExtractor(config);
|
||||
|
||||
double signal[] = intersector.extractT(layertitle, 0d, 0d,0d,0.5);
|
||||
|
||||
SignalProcessing.displaySignalWithGenericTime(signal, 0, 1, "signal");
|
||||
|
||||
System.out.println("ELAPSED TIME: "+(System.currentTimeMillis()-t0));
|
||||
|
||||
}
|
||||
|
||||
public static void main1(String[] args) throws Exception{
|
||||
takeSignal();
|
||||
}
|
||||
public static void takeSignal() throws Exception{
|
||||
BufferedReader br = new BufferedReader(new FileReader(new File("signal.txt")));
|
||||
String line = br.readLine();
|
||||
double[] signal = null;
|
||||
|
||||
while (line!=null){
|
||||
String [] el = line.split(",");
|
||||
signal=new double[el.length];
|
||||
int i=0;
|
||||
for (String e:el){
|
||||
signal[i]=Double.parseDouble(e);
|
||||
i++;
|
||||
}
|
||||
line = null;
|
||||
}
|
||||
br.close();
|
||||
|
||||
|
||||
// SignalProcessing.displaySignalWithGenericTime(signal, 0, 1, "signal");
|
||||
PeriodicityDetector pd = new PeriodicityDetector();
|
||||
|
||||
// signal = pd.produceNoisySignal(2000, 1, 0.1f, 0f);
|
||||
|
||||
//float freq=1;//signal.length;
|
||||
|
||||
// double F = pd.detectFrequency(signal, (int)freq, 0, freq, 1f,true);
|
||||
double F = pd.detectFrequency(signal,true);
|
||||
|
||||
System.out.println("Detected Frequency:"+F+" indecision ["+pd.lowermeanF+" , "+pd.uppermeanF+"]");
|
||||
System.out.println("Detected Period:"+pd.meanPeriod+" indecision ["+pd.lowermeanPeriod+" , "+pd.uppermeanPeriod+"]");
|
||||
System.out.println("Detected Periodicity Strength:"+pd.periodicityStrength);
|
||||
|
||||
}
|
||||
|
||||
}
|
|
@ -1,4 +1,4 @@
|
|||
package org.gcube.dataanalysis.geo.test.maps;
|
||||
package org.gcube.dataanalysis.geo.test.regression;
|
||||
|
||||
import java.util.List;
|
||||
|
|
@ -0,0 +1,75 @@
|
|||
package org.gcube.dataanalysis.geo.test.regression;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.BufferedWriter;
|
||||
import java.io.File;
|
||||
import java.io.FileReader;
|
||||
import java.io.FileWriter;
|
||||
|
||||
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
||||
import org.gcube.dataanalysis.ecoengine.signals.PeriodicityDetector;
|
||||
import org.gcube.dataanalysis.ecoengine.signals.SignalProcessing;
|
||||
import org.gcube.dataanalysis.geo.matrixmodel.MatrixExtractor;
|
||||
import org.gcube.dataanalysis.geo.matrixmodel.TimeSeriesExtractor;
|
||||
|
||||
public class RegressionWCS {
|
||||
|
||||
static String cfg = "./cfg/";
|
||||
public static void main(String[] args) throws Exception{
|
||||
String layertitle = "WorldClimBioWCS2";
|
||||
|
||||
long t0 = System.currentTimeMillis();
|
||||
AnalysisLogger.setLogger(cfg+AlgorithmConfiguration.defaultLoggerFile);
|
||||
|
||||
AlgorithmConfiguration config = new AlgorithmConfiguration();
|
||||
config.setGcubeScope("/d4science.research-infrastructures.eu/gCubeApps/BiodiversityLab");
|
||||
config.setConfigPath(cfg);
|
||||
TimeSeriesExtractor intersector = new TimeSeriesExtractor(config);
|
||||
|
||||
double signal[] = intersector.extractT(layertitle, 0d, 0d,0d,0.5);
|
||||
|
||||
SignalProcessing.displaySignalWithGenericTime(signal, 0, 1, "signal");
|
||||
|
||||
System.out.println("ELAPSED TIME: "+(System.currentTimeMillis()-t0));
|
||||
|
||||
}
|
||||
|
||||
public static void main1(String[] args) throws Exception{
|
||||
takeSignal();
|
||||
}
|
||||
public static void takeSignal() throws Exception{
|
||||
BufferedReader br = new BufferedReader(new FileReader(new File("signal.txt")));
|
||||
String line = br.readLine();
|
||||
double[] signal = null;
|
||||
|
||||
while (line!=null){
|
||||
String [] el = line.split(",");
|
||||
signal=new double[el.length];
|
||||
int i=0;
|
||||
for (String e:el){
|
||||
signal[i]=Double.parseDouble(e);
|
||||
i++;
|
||||
}
|
||||
line = null;
|
||||
}
|
||||
br.close();
|
||||
|
||||
|
||||
// SignalProcessing.displaySignalWithGenericTime(signal, 0, 1, "signal");
|
||||
PeriodicityDetector pd = new PeriodicityDetector();
|
||||
|
||||
// signal = pd.produceNoisySignal(2000, 1, 0.1f, 0f);
|
||||
|
||||
//float freq=1;//signal.length;
|
||||
|
||||
// double F = pd.detectFrequency(signal, (int)freq, 0, freq, 1f,true);
|
||||
double F = pd.detectFrequency(signal,true);
|
||||
|
||||
System.out.println("Detected Frequency:"+F+" indecision ["+pd.lowermeanF+" , "+pd.uppermeanF+"]");
|
||||
System.out.println("Detected Period:"+pd.meanPeriod+" indecision ["+pd.lowermeanPeriod+" , "+pd.uppermeanPeriod+"]");
|
||||
System.out.println("Detected Periodicity Strength:"+pd.periodicityStrength);
|
||||
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,52 @@
|
|||
package org.gcube.dataanalysis.geo.test.regression;
|
||||
|
||||
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
||||
import org.gcube.dataanalysis.ecoengine.signals.SignalProcessing;
|
||||
import org.gcube.dataanalysis.ecoengine.utils.Tuple;
|
||||
import org.gcube.dataanalysis.geo.connectors.table.TableMatrixRepresentation;
|
||||
import org.gcube.dataanalysis.geo.matrixmodel.ZExtractor;
|
||||
|
||||
public class RegressionWaterColumnGeoTiff {
|
||||
|
||||
public static void main(String[] args) throws Exception{
|
||||
AlgorithmConfiguration config = new AlgorithmConfiguration();
|
||||
|
||||
config.setConfigPath("./cfg/");
|
||||
config.setPersistencePath("./");
|
||||
|
||||
String layertitle = "WorldClimBioGeoTiffTest2";
|
||||
// layertitle = "WorldClimBioWCS2";
|
||||
|
||||
AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
|
||||
config.setGcubeScope("/d4science.research-infrastructures.eu/gCubeApps/BiodiversityLab");
|
||||
|
||||
ZExtractor extractor = new ZExtractor(config);
|
||||
long t0 = System.currentTimeMillis();
|
||||
|
||||
double watercolumn[] = extractor.extractZ(layertitle, 18.620429d,20.836419d,0, 0);
|
||||
|
||||
System.out.println("ELAPSED TIME: "+(System.currentTimeMillis()-t0));
|
||||
System.out.println("Signal: "+watercolumn.length);
|
||||
System.out.println("Signal first element: "+watercolumn[0]);
|
||||
SignalProcessing.displaySignalWithGenericTime(watercolumn, 0, 1, "signal");
|
||||
|
||||
layertitle = "WorldClimBioWCS2";
|
||||
|
||||
AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
|
||||
config.setGcubeScope("/d4science.research-infrastructures.eu/gCubeApps/BiodiversityLab");
|
||||
|
||||
extractor = new ZExtractor(config);
|
||||
t0 = System.currentTimeMillis();
|
||||
|
||||
watercolumn = extractor.extractZ(layertitle, 18.620429d,20.836419d,0, 0);
|
||||
|
||||
System.out.println("ELAPSED TIME: "+(System.currentTimeMillis()-t0));
|
||||
System.out.println("Signal: "+watercolumn.length);
|
||||
System.out.println("Signal first element: "+watercolumn[0]);
|
||||
SignalProcessing.displaySignalWithGenericTime(watercolumn, 0, 1, "signal");
|
||||
|
||||
|
||||
}
|
||||
|
||||
}
|
Loading…
Reference in New Issue