git-svn-id: https://svn.d4science.research-infrastructures.eu/gcube/trunk/data-analysis/EcologicalEngineGeoSpatialExtension@92165 82a268e6-3cf1-43bd-a215-b396298e98cf
This commit is contained in:
parent
6cc3f351f2
commit
586ab65adf
|
@ -1,162 +0,0 @@
|
|||
package org.gcube.dataanalysis.geo.utils;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
|
||||
import org.gcube.dataanalysis.ecoengine.utils.Tuple;
|
||||
|
||||
|
||||
|
||||
public class AscDataExplorer {
|
||||
|
||||
public double xOrigin;
|
||||
public double yOrigin;
|
||||
public int ncolumns;
|
||||
public int nrows;
|
||||
public double cellsize;
|
||||
|
||||
AscRaster ascFile;
|
||||
|
||||
public AscDataExplorer(String file) throws Exception{
|
||||
AnalysisLogger.getLogger().debug("Managing Asc File: "+file);
|
||||
AscRasterReader reader = new AscRasterReader();
|
||||
int i=0;
|
||||
while(i<10){
|
||||
try{
|
||||
ascFile = reader.readRaster(file);
|
||||
break;
|
||||
}catch(Exception e){
|
||||
AnalysisLogger.getLogger().debug("Error in reading remote file: "+file);
|
||||
}
|
||||
i++;
|
||||
}
|
||||
|
||||
if (i==10)
|
||||
throw new Exception("Reading Timeout for the file "+file);
|
||||
|
||||
xOrigin=ascFile.getXll();
|
||||
yOrigin=ascFile.getYll();
|
||||
ncolumns = ascFile.getCols();
|
||||
nrows = ascFile.getRows();
|
||||
cellsize = ascFile.getCellsize();
|
||||
|
||||
AnalysisLogger.getLogger().debug("Origin: "+xOrigin+","+yOrigin);
|
||||
AnalysisLogger.getLogger().debug("Cellsize: "+cellsize);
|
||||
AnalysisLogger.getLogger().debug("Rows: "+nrows+" Cols:"+ncolumns);
|
||||
|
||||
}
|
||||
|
||||
public int longitude2Index (double longitude){
|
||||
return (int)Math.round((longitude-xOrigin)/cellsize);
|
||||
}
|
||||
|
||||
public int latitude2Index (double latitude){
|
||||
return (int) Math.round((latitude-yOrigin)/cellsize);
|
||||
}
|
||||
|
||||
public List<Double> retrieveDataFromAsc( List<Tuple<Double>> triplets){
|
||||
List<Double> values = new ArrayList<Double>();
|
||||
for (Tuple<Double> triplet:triplets){
|
||||
double x = triplet.getElements().get(0);
|
||||
double y = triplet.getElements().get(1);
|
||||
int j = longitude2Index(x);
|
||||
int i = latitude2Index(y);
|
||||
if (j>ncolumns){
|
||||
AnalysisLogger.getLogger().debug("Warning: Column Overflow: adjusting!");
|
||||
AnalysisLogger.getLogger().debug("Overflow: y:"+y+","+"x:"+x);
|
||||
AnalysisLogger.getLogger().debug("Overflow: iy:"+i+","+"jx:"+j);
|
||||
j=ncolumns;
|
||||
}
|
||||
if (i>nrows){
|
||||
AnalysisLogger.getLogger().debug("Warning: Row Overflow: adjusting!");
|
||||
AnalysisLogger.getLogger().debug("Overflow: y:"+y+","+"x:"+x);
|
||||
AnalysisLogger.getLogger().debug("Overflow: iy:"+i+","+"jx:"+j);
|
||||
i=nrows;
|
||||
}
|
||||
//AnalysisLogger.getLogger().debug("y:"+y+","+"x:"+x);
|
||||
//AnalysisLogger.getLogger().debug("iy:"+i+","+"jx:"+j);
|
||||
double value = ascFile.getValue(i, j);
|
||||
values.add(value);
|
||||
}
|
||||
|
||||
return values;
|
||||
}
|
||||
|
||||
public static void testReaders() throws Exception {
|
||||
/*
|
||||
// File file = new File("http://thredds.d4science.org/thredds/fileServer/public/netcdf/sstrange.tiff");
|
||||
File file = new File("sstrange.tiff");
|
||||
AbstractGridFormat format = new GeoTiffFormat();
|
||||
StringBuilder buffer = new StringBuilder();
|
||||
|
||||
buffer.append(file.getAbsolutePath()).append("\n");
|
||||
// Object o = file.toURI().toURL();
|
||||
Object o = file;
|
||||
if (format.accepts(o)) {
|
||||
buffer.append("ACCEPTED").append("\n");
|
||||
|
||||
// getting a reader
|
||||
GeoTiffReader reader = new GeoTiffReader(o, new Hints(Hints.FORCE_LONGITUDE_FIRST_AXIS_ORDER, Boolean.TRUE));
|
||||
|
||||
if (reader != null) {
|
||||
// reading the coverage
|
||||
GridCoverage2D coverage = (GridCoverage2D) reader.read(null);
|
||||
buffer.append("CRS: ").append(coverage.getCoordinateReferenceSystem2D().toWKT()).append("\n");
|
||||
buffer.append("GG: ").append(coverage.getGridGeometry().toString()).append("\n");
|
||||
// display metadata
|
||||
IIOMetadataDumper iIOMetadataDumper = new IIOMetadataDumper(((GeoTiffReader) reader).getMetadata().getRootNode());
|
||||
buffer.append("TIFF metadata: ").append(iIOMetadataDumper.getMetadata()).append("\n");
|
||||
coverage.show();
|
||||
|
||||
// PlanarImage.wrapRenderedImage(coverage.getRenderedImage()).getTiles();
|
||||
}
|
||||
else
|
||||
buffer.append("NOT ACCEPTED").append("\n");
|
||||
}
|
||||
*/
|
||||
/*
|
||||
GeoTiff gt = new GeoTiff("sstrange.tiff");
|
||||
gt.read();
|
||||
System.out.println(gt.showInfo());
|
||||
gt.close();
|
||||
*/
|
||||
// GridDataset gds = ucar.nc2.dt.grid.GridDataset.open("sstrange.tiff");
|
||||
// List<GridDatatype> gridTypes = gds.getGrids();
|
||||
// GridDatatype grid = gds.findGridDatatype(gdt.getName());
|
||||
// GridCoordSystem gcs = grid.getCoordinateSystem();
|
||||
AscRasterReader reader = new AscRasterReader();
|
||||
AscRaster r1 = reader.readRaster( "ph.asc" );
|
||||
System.out.println("center:"+r1.getXll()+","+r1.getYll());
|
||||
System.out.println("cols:"+r1.getCols());
|
||||
System.out.println("data:"+r1.getValue(1, 1));
|
||||
}
|
||||
|
||||
/*
|
||||
public void testBandNames() throws Exception {
|
||||
final File file = TestData.file(GeoTiffReaderTest.class, "wind.tiff");
|
||||
assertNotNull(file);
|
||||
final AbstractGridFormat format = new GeoTiffFormat();
|
||||
GridCoverage2D coverage = format.getReader(file).read(null);
|
||||
String band1Name = coverage.getSampleDimension(0).getDescription().toString();
|
||||
String band2Name = coverage.getSampleDimension(1).getDescription().toString();
|
||||
assertEquals("Band1", band1Name);
|
||||
assertEquals("Band2", band2Name);
|
||||
}
|
||||
*/
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
AscDataExplorer ade = new AscDataExplorer("http://thredds.d4science.org/thredds/fileServer/public/netcdf/ph.asc");
|
||||
|
||||
List<Tuple<Double>> triplets = new ArrayList<Tuple<Double>>();
|
||||
triplets.add(new Tuple<Double>(-180d,-90d));
|
||||
triplets.add(new Tuple<Double>(0d,0d));
|
||||
triplets.add(new Tuple<Double>(180d,90d));
|
||||
List<Double> values = ade.retrieveDataFromAsc(triplets);
|
||||
for (Double value:values){
|
||||
AnalysisLogger.getLogger().debug("val:"+value);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
|
@ -1,260 +0,0 @@
|
|||
package org.gcube.dataanalysis.geo.utils;
|
||||
|
||||
import java.util.Arrays;
|
||||
|
||||
/**
|
||||
* Represents my best guess at the ESRI ASCII raster format. I couldn't find
|
||||
* any sensible documentation, so it supports the following features:
|
||||
* <ul>
|
||||
* <li>cellsize, xll and xll are stored as doubles, and largely ignored
|
||||
* <li>NDATA has a string representation (as it is typically read in from an
|
||||
* ascii string) and is internally represented as Double.NaN, as this is safer and
|
||||
* easier to deal with than the -9999 found in most rasters.
|
||||
* </ul>
|
||||
* @author dmrust
|
||||
*
|
||||
*/
|
||||
public class AscRaster
|
||||
{
|
||||
protected double[][] data;
|
||||
protected double xll;
|
||||
protected double yll;
|
||||
protected double cellsize;
|
||||
protected int cols;
|
||||
protected int rows;
|
||||
protected String NDATA;
|
||||
public static final String DEFAULT_NODATA = "-9999";
|
||||
|
||||
public void print()
|
||||
{
|
||||
System.out.println( "Rows: " + rows + " cols: " + cols + " cellsize " + cellsize );
|
||||
for( double[] row : data )
|
||||
{
|
||||
for( double val : row )
|
||||
System.out.print( val + " " );
|
||||
System.out.println( "" );
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates an empty raster
|
||||
*/
|
||||
public AscRaster()
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a raster from the given data
|
||||
* @param cellsize
|
||||
* @param xll
|
||||
* @param yll
|
||||
*/
|
||||
public AscRaster( double cellsize, double xll, double yll )
|
||||
{
|
||||
this();
|
||||
setCellsize( cellsize );
|
||||
setXll( xll );
|
||||
setYll( yll );
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a raster from the given data
|
||||
* @param data
|
||||
* @param cellsize
|
||||
* @param xll
|
||||
* @param yll
|
||||
*/
|
||||
public AscRaster( double[][] data, double cellsize, double xll, double yll )
|
||||
{
|
||||
this(cellsize, xll, yll);
|
||||
setData( data );
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a raster from the given data
|
||||
* @param data
|
||||
* @param cellsize
|
||||
* @param xll
|
||||
* @param yll
|
||||
*/
|
||||
public AscRaster( int[][] data, double cellsize, double xll, double yll )
|
||||
{
|
||||
this(cellsize, xll, yll);
|
||||
setData( data );
|
||||
}
|
||||
|
||||
public static AscRaster getTempRaster( double[][] data, double xll, double yll, double size )
|
||||
{
|
||||
return getTempRaster( data, xll, yll, size, DEFAULT_NODATA );
|
||||
}
|
||||
|
||||
public static AscRaster getTempRaster( double[][] data, double xll, double yll, double size, String ndata )
|
||||
{
|
||||
AscRaster a = new AscRaster();
|
||||
a.data = data;
|
||||
a.xll = xll;
|
||||
a.yll = yll;
|
||||
a.cellsize = size;
|
||||
a.NDATA = ndata;
|
||||
a.rows = data.length;
|
||||
a.cols = data[0].length;
|
||||
return a;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Sets the parameters of this raster (rows, columns, corner, cellsize, NDATA etc)
|
||||
* to be the same as the other raster. This includes initialising the data array
|
||||
* with NDATAs
|
||||
* @param other
|
||||
*/
|
||||
public void init( AscRaster other )
|
||||
{
|
||||
xll = other.xll;
|
||||
yll = other.yll;
|
||||
cellsize = other.cellsize;
|
||||
NDATA = other.NDATA;
|
||||
setSize( other.getRows(), other.getCols() );
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialises the Raster to Double.NaN (i.e. NDATA)
|
||||
*/
|
||||
public void initData()
|
||||
{
|
||||
initData( Double.NaN );
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialises the raster so the entire data array contains 'value'
|
||||
* @param value
|
||||
*/
|
||||
public void initData( double value )
|
||||
{
|
||||
data = new double[rows][];
|
||||
for( int i = 0; i < rows; i++ )
|
||||
{
|
||||
data[i] = new double[cols];
|
||||
|
||||
Arrays.fill( data[i], value );
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the underlying data array - NOTE: this is *NOT* a copy, if you
|
||||
* change it, you change the data
|
||||
* @return the data array
|
||||
*/
|
||||
public double[][] getData()
|
||||
{
|
||||
return data;
|
||||
}
|
||||
|
||||
public void setValue( int row, int column, double value )
|
||||
{
|
||||
if( row < rows && column < cols )
|
||||
data[row][column] = value;
|
||||
}
|
||||
|
||||
public double getValue( int row, int column )
|
||||
{
|
||||
if( row < rows && column < cols )
|
||||
return data[row][column];
|
||||
return Double.NaN;
|
||||
}
|
||||
|
||||
/**
|
||||
* Copies the given data into the underlying data array. Also updates the number of rows and columns.
|
||||
* @param data
|
||||
*/
|
||||
public void setData( double[][] data )
|
||||
{
|
||||
rows = data.length;
|
||||
cols = data[0].length;
|
||||
initData();
|
||||
for( int i = 0; i < rows; i++ )
|
||||
for( int j = 0; j < cols; j++ )
|
||||
this.data[i][j] = data[i][j];
|
||||
}
|
||||
|
||||
/**
|
||||
* Copies the given data into the underlying data array. Also updates the number of rows and columns.
|
||||
* @param data
|
||||
*/
|
||||
public void setData( int[][] data )
|
||||
{
|
||||
rows = data.length;
|
||||
cols = data[0].length;
|
||||
initData();
|
||||
for( int i = 0; i < rows; i++ )
|
||||
for( int j = 0; j < cols; j++ )
|
||||
this.data[i][j] = data[i][j];
|
||||
}
|
||||
|
||||
|
||||
|
||||
public double getXll()
|
||||
{
|
||||
return xll;
|
||||
}
|
||||
|
||||
public void setXll( double xll )
|
||||
{
|
||||
this.xll = xll;
|
||||
}
|
||||
|
||||
public double getYll()
|
||||
{
|
||||
return yll;
|
||||
}
|
||||
|
||||
public void setYll( double yll )
|
||||
{
|
||||
this.yll = yll;
|
||||
}
|
||||
|
||||
public double getCellsize()
|
||||
{
|
||||
return cellsize;
|
||||
}
|
||||
|
||||
public void setCellsize( double cellsize )
|
||||
{
|
||||
this.cellsize = cellsize;
|
||||
}
|
||||
|
||||
public int getCols()
|
||||
{
|
||||
return cols;
|
||||
}
|
||||
|
||||
public int getRows()
|
||||
{
|
||||
return rows;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the size of the raster, and also initialises the array
|
||||
* with NDATA
|
||||
* @param nrows
|
||||
* @param columns
|
||||
*/
|
||||
public void setSize( int nrows, int columns )
|
||||
{
|
||||
this.rows = nrows;
|
||||
this.cols = columns;
|
||||
initData();
|
||||
}
|
||||
|
||||
public String getNDATA()
|
||||
{
|
||||
return NDATA;
|
||||
}
|
||||
|
||||
public void setNDATA( String nDATA )
|
||||
{
|
||||
NDATA = nDATA;
|
||||
}
|
||||
}
|
|
@ -1,122 +0,0 @@
|
|||
package org.gcube.dataanalysis.geo.utils;
|
||||
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.FileReader;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStreamReader;
|
||||
import java.net.URL;
|
||||
import java.net.URLConnection;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
/**
|
||||
* A class which reads an ESRI ASCII raster file into a Raster
|
||||
* @author dmrust
|
||||
*
|
||||
*/
|
||||
public class AscRasterReader
|
||||
{
|
||||
String noData = AscRaster.DEFAULT_NODATA;
|
||||
Pattern header = Pattern.compile( "^(\\w+)\\s+(-?\\d+(.\\d+)?)");
|
||||
|
||||
public static void main( String[] args ) throws IOException
|
||||
{
|
||||
AscRasterReader rt = new AscRasterReader();
|
||||
rt.readRaster( "data/test.asc" );
|
||||
}
|
||||
|
||||
/**
|
||||
* The most useful method - reads a raster file, and returns a Raster object.
|
||||
*
|
||||
* Throws standard IOExceptions associated with opening and reading files, and
|
||||
* RuntimeExceptions if there are problems with the file format
|
||||
* @param filename
|
||||
* @return the Raster object read in from the file
|
||||
* @throws IOException
|
||||
*/
|
||||
public AscRaster readRaster( String filename ) throws IOException, RuntimeException
|
||||
{
|
||||
AscRaster raster = new AscRaster();
|
||||
BufferedReader input = null;
|
||||
URLConnection urlConn =null;
|
||||
if (filename.startsWith("http")){
|
||||
URL fileurl = new URL(filename);
|
||||
urlConn = fileurl.openConnection();
|
||||
urlConn.setConnectTimeout(60000);
|
||||
urlConn.setReadTimeout(60000);
|
||||
urlConn.setAllowUserInteraction(false);
|
||||
urlConn.setDoOutput(true);
|
||||
input = new BufferedReader(new InputStreamReader(urlConn.getInputStream()));
|
||||
}
|
||||
else
|
||||
input = new BufferedReader( new FileReader( filename ) );
|
||||
|
||||
while( input.ready() )
|
||||
{
|
||||
String line = input.readLine();
|
||||
Matcher headMatch = header.matcher( line );
|
||||
//Match all the heads
|
||||
if( headMatch.matches() )
|
||||
{
|
||||
String head = headMatch.group( 1 );
|
||||
String value = headMatch.group( 2 );
|
||||
if( head.equalsIgnoreCase( "nrows" ) )
|
||||
raster.rows = Integer.parseInt( value );
|
||||
else if ( head.equalsIgnoreCase( "ncols" ) )
|
||||
raster.cols = Integer.parseInt( value );
|
||||
else if ( head.equalsIgnoreCase( "xllcorner" ) )
|
||||
raster.xll = Double.parseDouble( value );
|
||||
else if ( head.equalsIgnoreCase( "yllcorner" ) )
|
||||
raster.yll = Double.parseDouble( value );
|
||||
else if ( head.equalsIgnoreCase( "NODATA_value" ) )
|
||||
raster.NDATA = value;
|
||||
else if ( head.equals( "cellsize" ) )
|
||||
raster.cellsize = Double.parseDouble( value );
|
||||
else
|
||||
System.out.println( "Unknown setting: " + line );
|
||||
}
|
||||
else if( line.matches( "^-?\\d+.*" ))
|
||||
{
|
||||
//System.out.println( "Processing data section");
|
||||
//Check that data is set up!
|
||||
//Start processing numbers!
|
||||
int row = 0;
|
||||
double[][] data = new double[raster.rows][];
|
||||
while( true )
|
||||
{
|
||||
//System.out.println( "Got data row: " + line );
|
||||
String[] inData = line.split( "\\s+" );
|
||||
double[] numData = new double[raster.cols];
|
||||
if( inData.length != numData.length ) throw new RuntimeException( "Wrong number of columns: Expected " +
|
||||
raster.cols + " got " + inData.length + " for line \n" + line );
|
||||
for( int col = 0; col < raster.cols; col ++ )
|
||||
{
|
||||
if( inData[col].equals( noData )) numData[col] = Double.NaN;
|
||||
else numData[col] = Double.parseDouble( inData[col] );
|
||||
}
|
||||
data[row] = numData;
|
||||
//Ugly backward input structure...
|
||||
if( input.ready() ) line = input.readLine();
|
||||
else break;
|
||||
row++;
|
||||
}
|
||||
if( row != raster.rows - 1)
|
||||
throw new RuntimeException( "Wrong number of rows: expected " + raster.rows + " got " + (row+1) );
|
||||
raster.data = data;
|
||||
}
|
||||
else
|
||||
{
|
||||
if( line.length() >= 0 && ! line.matches( "^\\s*$" ))
|
||||
System.out.println( "Unknown line: " + line);
|
||||
}
|
||||
}
|
||||
|
||||
if (input!=null){
|
||||
input.close();
|
||||
if (urlConn!=null && urlConn.getInputStream()!=null)
|
||||
urlConn.getInputStream().close();
|
||||
}
|
||||
return raster;
|
||||
}
|
||||
}
|
|
@ -1,31 +1,3 @@
|
|||
/*
|
||||
* Copyright (c) 2009 The University of Reading
|
||||
* All rights reserved.
|
||||
*
|
||||
* Redistribution and use in source and binary forms, with or without
|
||||
* modification, are permitted provided that the following conditions
|
||||
* are met:
|
||||
* 1. Redistributions of source code must retain the above copyright
|
||||
* notice, this list of conditions and the following disclaimer.
|
||||
* 2. Redistributions in binary form must reproduce the above copyright
|
||||
* notice, this list of conditions and the following disclaimer in the
|
||||
* documentation and/or other materials provided with the distribution.
|
||||
* 3. Neither the name of the University of Reading, nor the names of the
|
||||
* authors or contributors may be used to endorse or promote products
|
||||
* derived from this software without specific prior written permission.
|
||||
*
|
||||
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
|
||||
* IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
|
||||
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
|
||||
* IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
|
||||
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
|
||||
* NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
|
||||
* THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
|
||||
package org.gcube.dataanalysis.geo.utils;
|
||||
|
||||
import java.util.ArrayList;
|
||||
|
@ -45,9 +17,6 @@ import ucar.unidata.geoloc.ProjectionPointImpl;
|
|||
|
||||
/**
|
||||
* This class wraps the GeoTools/GeoAPI coordinate reference system methods, providing a set of convenience methods such as transformations and validity checks.
|
||||
*
|
||||
* @todo this object is immutable and could be re-used.
|
||||
* @author Jon
|
||||
*/
|
||||
public final class CrsHelper {
|
||||
|
||||
|
|
|
@ -1,117 +0,0 @@
|
|||
package org.gcube.dataanalysis.geo.utils;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.BufferedWriter;
|
||||
import java.io.File;
|
||||
import java.io.FileReader;
|
||||
import java.io.FileWriter;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStreamReader;
|
||||
import java.io.Reader;
|
||||
import java.io.Writer;
|
||||
import java.net.URL;
|
||||
import java.net.URLConnection;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
public class Downloader {
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
List<String> hfiles = getfiles("netcdf_data.html", "fileServer","http", ".nc");
|
||||
System.out.println(hfiles);
|
||||
System.out.println("Number of links:"+hfiles.size());
|
||||
List<String> files = enrichfiles(hfiles);
|
||||
System.out.println(files);
|
||||
buildwgetFile("wgetfiles.sh", hfiles, files);
|
||||
}
|
||||
|
||||
public static void buildwgetFile(String filename, List<String> hfiles, List<String> files) throws Exception{
|
||||
int size = hfiles.size();
|
||||
BufferedWriter bw = new BufferedWriter(new FileWriter(new File(filename)));
|
||||
for (int i=0;i<size;i++){
|
||||
bw.write(buildGetterString(hfiles.get(i), files.get(i))+System.getProperty("line.separator"));
|
||||
}
|
||||
|
||||
bw.close();
|
||||
}
|
||||
|
||||
public static List<String> enrichfiles(List<String> files) throws Exception{
|
||||
List<String> arrayfile = new ArrayList<String>();
|
||||
for (String fileh:files){
|
||||
String file = fileh.substring(fileh.lastIndexOf("/")+1);
|
||||
if (file.contains("temperature"))
|
||||
file = buildTopicString(file,"ENVIRONMENT", "OCEANS");
|
||||
else if (file.contains("salinity"))
|
||||
file = buildTopicString(file,"ENVIRONMENT", "OCEANS");
|
||||
else if (file.contains("oxygen"))
|
||||
file = buildTopicString(file,"ENVIRONMENT", "BIOTA");
|
||||
else if (file.contains("phosphate"))
|
||||
file = buildTopicString(file,"ENVIRONMENT", "BIOTA");
|
||||
else if (file.contains("nitrate"))
|
||||
file = buildTopicString(file,"ENVIRONMENT", "BIOTA");
|
||||
else if (file.contains("silicate"))
|
||||
file = buildTopicString(file,"ENVIRONMENT", "BIOTA");
|
||||
else
|
||||
file = buildTopicString(file,"ENVIRONMENT", "OCEANS");
|
||||
arrayfile.add(file);
|
||||
}
|
||||
return arrayfile;
|
||||
}
|
||||
|
||||
public static List<String> getfiles(String filename,String criterion,String initselection, String endselection) throws Exception{
|
||||
List<String> files = new ArrayList<String>();
|
||||
BufferedReader br = new BufferedReader(new FileReader(new File(filename)));
|
||||
String line = br.readLine();
|
||||
while (line!=null){
|
||||
if (line.contains(criterion)){
|
||||
String cut = line.substring(line.indexOf(initselection),line.indexOf(endselection)+endselection.length());
|
||||
files.add(cut);
|
||||
}
|
||||
line = br.readLine();
|
||||
}
|
||||
br.close();
|
||||
return files;
|
||||
}
|
||||
public static String buildGetterString(String httpstring,String filename){
|
||||
return String.format("wget --output-document=%1$s %2$s",filename,httpstring);
|
||||
}
|
||||
|
||||
public static String buildTopicString(String filename, String... topics){
|
||||
int idx = filename.indexOf(".n");
|
||||
String file = filename.substring(0,idx);
|
||||
String ext = filename.substring(idx);
|
||||
for (String topic:topics){
|
||||
file=file+"_"+topic;
|
||||
}
|
||||
return file+"_"+ext;
|
||||
}
|
||||
|
||||
public static void downloadData(String endpoint, String file) throws Exception {
|
||||
// Send data
|
||||
String urlStr = endpoint;
|
||||
URL url = new URL(urlStr);
|
||||
URLConnection conn = url.openConnection();
|
||||
InputStreamReader isr = new InputStreamReader(conn.getInputStream());
|
||||
FileWriter fw = new FileWriter(new File(file));
|
||||
pipe(isr, fw);
|
||||
fw.close();
|
||||
isr.close();
|
||||
}
|
||||
|
||||
private static void pipe(Reader reader, Writer writer) throws IOException {
|
||||
char[] buf = new char[1024];
|
||||
int read = 0;
|
||||
double bytes = 0;
|
||||
long i = 0;
|
||||
while ((read = reader.read(buf)) >= 0) {
|
||||
writer.write(buf, 0, read);
|
||||
bytes=(bytes+read);
|
||||
if (i%1000==0)
|
||||
System.out.println("B:"+bytes);
|
||||
i++;
|
||||
}
|
||||
|
||||
writer.flush();
|
||||
}
|
||||
|
||||
}
|
|
@ -1,407 +0,0 @@
|
|||
package org.gcube.dataanalysis.geo.utils;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashMap;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
|
||||
import org.gcube.contentmanagement.graphtools.utils.HttpRequest;
|
||||
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
|
||||
import org.gcube.dataanalysis.geo.meta.OGCFormatter;
|
||||
|
||||
import ucar.ma2.ArrayByte;
|
||||
import ucar.ma2.ArrayDouble;
|
||||
import ucar.ma2.Index;
|
||||
import ucar.ma2.IndexIterator;
|
||||
|
||||
import com.vividsolutions.jts.geom.Coordinate;
|
||||
import com.vividsolutions.jts.geom.Geometry;
|
||||
import com.vividsolutions.jts.geom.GeometryFactory;
|
||||
import com.vividsolutions.jts.geom.LinearRing;
|
||||
import com.vividsolutions.jts.geom.MultiPolygon;
|
||||
import com.vividsolutions.jts.geom.Point;
|
||||
import com.vividsolutions.jts.geom.Polygon;
|
||||
import com.vividsolutions.jts.geom.PrecisionModel;
|
||||
import com.vividsolutions.jts.geom.impl.CoordinateArraySequence;
|
||||
import com.vividsolutions.jts.geom.util.GeometryTransformer;
|
||||
import com.vividsolutions.jts.operation.overlay.snap.GeometrySnapper;
|
||||
|
||||
public class EnvDataExplorer {
|
||||
|
||||
private static String callWFS(String geoServer, String layer, double x, double y) {
|
||||
|
||||
float tolerance = 0.25f;
|
||||
String wfsURL = OGCFormatter.getWfsUrl(geoServer, layer, OGCFormatter.pointToBoundingBox(x, y, tolerance), 1, "json");
|
||||
AnalysisLogger.getLogger().debug("EnvDataExplorer-> Requesting URL: " + wfsURL);
|
||||
String returned = null;
|
||||
try {
|
||||
returned = HttpRequest.sendGetRequest(wfsURL, null);
|
||||
} catch (Exception e) {
|
||||
AnalysisLogger.getLogger().debug("EnvDataExplorer-> ERROR " + e.getLocalizedMessage());
|
||||
}
|
||||
if (returned != null)
|
||||
// AnalysisLogger.getLogger().debug("EnvDataExplorer-> Found Intersection: " + returned);
|
||||
AnalysisLogger.getLogger().debug("EnvDataExplorer-> Found Intersection");
|
||||
else
|
||||
AnalysisLogger.getLogger().debug("EnvDataExplorer-> Found Nothing!");
|
||||
|
||||
return returned;
|
||||
}
|
||||
|
||||
private static String callWFS(String geoServer, String layer, double xL, double yL, double xR, double yR) {
|
||||
|
||||
// String wfsURL = OGCFormatter.getWfsUrl(geoServer, layer, OGCFormatter.buildBoundingBox(xL, yL, xR, yR), 0, "json");
|
||||
// there is a bug in WFS in the retrieval according to a bounding box: y must be in the range -180;180. then I preferred to take all the features
|
||||
String wfsURL = OGCFormatter.getWfsUrl(geoServer, layer, null, 0, "json");
|
||||
AnalysisLogger.getLogger().debug("EnvDataExplorer-> Requesting URL: " + wfsURL);
|
||||
String returned = null;
|
||||
try {
|
||||
returned = HttpRequest.sendGetRequest(wfsURL, null);
|
||||
} catch (Exception e) {
|
||||
AnalysisLogger.getLogger().debug("EnvDataExplorer-> ERROR " + e.getLocalizedMessage());
|
||||
}
|
||||
if (returned != null)
|
||||
// AnalysisLogger.getLogger().debug("EnvDataExplorer-> Found Intersection: " + returned);
|
||||
AnalysisLogger.getLogger().debug("EnvDataExplorer-> Found Intersection");
|
||||
else
|
||||
AnalysisLogger.getLogger().debug("EnvDataExplorer-> Found Nothing!");
|
||||
|
||||
return returned;
|
||||
}
|
||||
|
||||
public static LinkedHashMap<String, Double> getFeatures(String geoserver, String layer, double x, double y) {
|
||||
try {
|
||||
AnalysisLogger.getLogger().debug("Calling WFS towards Geoserver:" + geoserver + " and layer:" + layer);
|
||||
String jsonString = callWFS(geoserver, layer, x, y);
|
||||
LinkedHashMap<String, Object> map = JsonMapper.parse(jsonString);
|
||||
LinkedHashMap<String, String> mapout = (LinkedHashMap<String, String>) ((HashMap<String, Object>) map.get("features")).get("properties");
|
||||
LinkedHashMap<String, Double> values = new LinkedHashMap<String, Double>();
|
||||
for (String key : mapout.keySet()) {
|
||||
values.put(key, Double.parseDouble(mapout.get(key)));
|
||||
}
|
||||
return values;
|
||||
} catch (Exception e) {
|
||||
AnalysisLogger.getLogger().debug("EnvDataExplorer-> Error in getting properties");
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
public static List<FeaturedPolygon> getFeatures(String geoserver, String layer, double xL, double yL, double xR, double yR) {
|
||||
try {
|
||||
AnalysisLogger.getLogger().debug("Calling WFS towards Geoserver:" + geoserver + " and layer:" + layer);
|
||||
String jsonString = callWFS(geoserver, layer, xL, yL, xR, yR);
|
||||
// System.out.println("JSON:"+jsonString);
|
||||
LinkedHashMap<String, Object> map = JsonMapper.parse(jsonString);
|
||||
List<FeaturedPolygon> fpolygons = new ArrayList<FeaturedPolygon>();
|
||||
FeaturedPolygon poly = null;
|
||||
for (String key : map.keySet()) {
|
||||
if (key.contains("features")) {
|
||||
HashMap<String, Object> propertiesMap = (HashMap<String, Object>) map.get(key);
|
||||
|
||||
// cycle on all the properties
|
||||
for (String properties : propertiesMap.keySet()) {
|
||||
if (properties.contains("properties")) {
|
||||
if (poly == null)
|
||||
poly = new FeaturedPolygon();
|
||||
|
||||
LinkedHashMap<String, String> props = (LinkedHashMap<String, String>) propertiesMap.get(properties);
|
||||
// fill the properties of the fpolygon
|
||||
for (String keyprop : props.keySet()) {
|
||||
try {
|
||||
// fulfill the FeaturedPolygon
|
||||
String value = props.get(keyprop);
|
||||
try {
|
||||
String lowcaseprop = keyprop.toLowerCase();
|
||||
if ((poly.value == null) && !lowcaseprop.startsWith("id") && !lowcaseprop.endsWith("id"))
|
||||
poly.setValue(Double.parseDouble(value));
|
||||
else
|
||||
poly.addFeature(keyprop, value);
|
||||
} catch (Exception e2) {
|
||||
poly.addFeature(keyprop, value);
|
||||
}
|
||||
} catch (Exception e) {
|
||||
}
|
||||
}
|
||||
} else if (properties.contains("geometry") && !properties.contains("geometry_")) {
|
||||
|
||||
if (poly == null)
|
||||
poly = new FeaturedPolygon();
|
||||
else if (poly.p != null) {
|
||||
if (poly.value == null)
|
||||
poly.value = Double.NaN;
|
||||
fpolygons.add(poly);
|
||||
poly = new FeaturedPolygon();
|
||||
}
|
||||
|
||||
LinkedHashMap<String, String> props = (LinkedHashMap<String, String>) propertiesMap.get(properties);
|
||||
List<double[]> coords = WFS2Coordinates(props.toString());
|
||||
Geometry p = buildGeometryFromCoordinates(coords);
|
||||
|
||||
if (p != null) {
|
||||
poly.setPolygon(p);
|
||||
// AnalysisLogger.getLogger().trace("Setting polygon p");
|
||||
// AnalysisLogger.getLogger().trace(p);
|
||||
}
|
||||
/*
|
||||
* GeometryFactory factory = new GeometryFactory(new PrecisionModel(), 4326); Polygon p = null; if (coords != null) {
|
||||
*
|
||||
* Coordinate[] coordarray = new Coordinate[coords.size()]; int i = 0; for (double[] pair : coords) { coordarray[i] = new Coordinate(pair[0], pair[1]);
|
||||
*
|
||||
* i++; } // TODO: build a multipoly if the ring is not closed! CoordinateArraySequence coordseq = new CoordinateArraySequence(coordarray); LinearRing ring = new LinearRing(coordseq, factory); p = new Polygon(ring, new LinearRing[] {}, factory); } poly.setPolygon(p);
|
||||
*/
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
}// end for on all the wfs
|
||||
|
||||
if (poly != null) {
|
||||
if (poly.value == null)
|
||||
poly.value = Double.NaN;
|
||||
fpolygons.add(poly);
|
||||
}
|
||||
return fpolygons;
|
||||
} catch (Exception e) {
|
||||
AnalysisLogger.getLogger().debug("EnvDataExplorer-> Error in getting properties");
|
||||
e.printStackTrace();
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
public static Geometry buildGeometryFromCoordinates(List<double[]> coords) {
|
||||
Geometry p = null;
|
||||
GeometryFactory factory = new GeometryFactory(new PrecisionModel(), 4326);
|
||||
|
||||
int idx = -1;
|
||||
List<Coordinate> coordinatesArray = new ArrayList<Coordinate>();
|
||||
List<Polygon> polys = new ArrayList<Polygon>();
|
||||
List<LinearRing> holespolys = new ArrayList<LinearRing>();
|
||||
int j = 1;
|
||||
int i = 1;
|
||||
|
||||
for (double[] pair : coords) {
|
||||
Coordinate coordPair = new Coordinate(pair[0], pair[1]);
|
||||
if ((idx = coordinatesArray.indexOf(coordPair)) >= 0) {
|
||||
// System.out.println("List Contains: "+coordinatesArray.get(idx));
|
||||
coordinatesArray.add(coordPair);
|
||||
if (idx == 0) {
|
||||
// System.out.println("Switching polygon: "+j+" "+coordinatesArray.get(idx));
|
||||
j++;
|
||||
i = 1;
|
||||
Polygon pp = sequence2Polygon(factory, coordinatesArray.subList(idx, coordinatesArray.size()), holespolys);
|
||||
|
||||
// System.out.println(pp);
|
||||
polys.add(pp);
|
||||
coordinatesArray = new ArrayList<Coordinate>();
|
||||
} else {
|
||||
// System.out.println("Linear Ring "+i + " "+coordinatesArray.get(idx));
|
||||
i++;
|
||||
LinearRing ring = sequence2Ring(factory, coordinatesArray.subList(idx, coordinatesArray.size()));
|
||||
holespolys.add(ring);
|
||||
coordinatesArray = coordinatesArray.subList(0, idx);
|
||||
}
|
||||
} else
|
||||
coordinatesArray.add(coordPair);
|
||||
|
||||
}
|
||||
|
||||
// build a multipoly if the ring is not closed!
|
||||
|
||||
if (polys.size() > 0) {
|
||||
// cut the holes
|
||||
List<Polygon> polysnoholes = new ArrayList<Polygon>();
|
||||
for (Polygon pp : polys) {
|
||||
|
||||
boolean found = false;
|
||||
int h = 0;
|
||||
for (Polygon polnh : polysnoholes) {
|
||||
boolean covers = false;
|
||||
|
||||
try{
|
||||
covers = polnh.covers(pp);
|
||||
}catch(Exception e){
|
||||
AnalysisLogger.getLogger().debug("Error in calculating superpositions: Snapping the geometries");
|
||||
double snapTol = GeometrySnapper.computeOverlaySnapTolerance(polnh, pp);
|
||||
pp = (Polygon) selfSnap(pp,snapTol);
|
||||
polnh = (Polygon) selfSnap(polnh,snapTol);
|
||||
AnalysisLogger.getLogger().debug("Geometries have been snapped");
|
||||
covers = polnh.covers(pp);
|
||||
}
|
||||
|
||||
if (covers) {
|
||||
// System.out.println("found hole! "+pp+" vs "+polnh);
|
||||
addDifference(h, polysnoholes, polnh, pp);
|
||||
found = true;
|
||||
} else if (pp.covers(polnh)) {
|
||||
// polysnoholes.set(h, (Polygon) pp.difference(polnh));
|
||||
addDifference(h, polysnoholes, pp, polnh);
|
||||
found = true;
|
||||
}
|
||||
h++;
|
||||
}
|
||||
if (!found)
|
||||
polysnoholes.add(pp);
|
||||
}
|
||||
Polygon[] polyrawarray = polysnoholes.toArray(new Polygon[polysnoholes.size()]);
|
||||
p = new MultiPolygon(polyrawarray, factory);
|
||||
}
|
||||
|
||||
return p;
|
||||
}
|
||||
|
||||
|
||||
private static Geometry selfSnap(Geometry g, double snapTolerance)
|
||||
{
|
||||
GeometrySnapper snapper = new GeometrySnapper(g);
|
||||
Geometry snapped = snapper.snapTo(g, snapTolerance);
|
||||
// need to "clean" snapped geometry - use buffer(0) as a simple way to do this
|
||||
Geometry fix = snapped.buffer(0);
|
||||
return fix;
|
||||
}
|
||||
|
||||
|
||||
private static void addDifference(int h , List<Polygon> polysnoholes, Polygon polnh, Polygon pp){
|
||||
|
||||
Geometry mp = polnh.difference(pp);
|
||||
if (mp instanceof com.vividsolutions.jts.geom.Polygon)
|
||||
polysnoholes.set(h, (Polygon) mp);
|
||||
else {
|
||||
MultiPolygon mup = (MultiPolygon) mp;
|
||||
int innerpolygons = mup.getNumGeometries();
|
||||
for (int k = 0; k < innerpolygons; k++) {
|
||||
Polygon ip = (Polygon) mup.getGeometryN(k);
|
||||
polysnoholes.set(h, ip);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
private static LinearRing sequence2Ring(GeometryFactory factory, List<Coordinate> coordinatesArray) {
|
||||
// System.out.println(coordinatesArray);
|
||||
Coordinate[] coordrawarray = coordinatesArray.toArray(new Coordinate[coordinatesArray.size()]);
|
||||
CoordinateArraySequence coordseq = new CoordinateArraySequence(coordrawarray);
|
||||
LinearRing ring = new LinearRing(coordseq, factory);
|
||||
|
||||
return ring;
|
||||
}
|
||||
|
||||
private static Polygon sequence2Polygon(GeometryFactory factory, List<Coordinate> coordinatesArray, List<LinearRing> holespolys) {
|
||||
// System.out.println(coordinatesArray);
|
||||
Coordinate[] coordrawarray = coordinatesArray.toArray(new Coordinate[coordinatesArray.size()]);
|
||||
LinearRing[] holes = holespolys.toArray(new LinearRing[holespolys.size()]);
|
||||
|
||||
CoordinateArraySequence coordseq = new CoordinateArraySequence(coordrawarray);
|
||||
LinearRing ring = new LinearRing(coordseq, factory);
|
||||
|
||||
Polygon p = new Polygon(ring, holes, factory);
|
||||
return p;
|
||||
}
|
||||
|
||||
public static List<double[]> WFS2Coordinates(String wfsgeometry) {
|
||||
|
||||
// geometry935133b1-ba3c-493d-8e18-6fb496ced995={type=MultiPolygon, coordinates={966a275c-23aa-4a43-a943-7e1c7eaf5d65=[[[1.5,125.00000000000011],[1.5,124.5],[2.000000000000057,124.5],[2.000000000000057,125.00000000000011],[1.5,125.00000000000011]]]}},
|
||||
String[] coordinatePairs = null;
|
||||
List<double[]> dpairs = new ArrayList<double[]>();
|
||||
if (wfsgeometry.toLowerCase().contains("multipolygon")) {
|
||||
String coordString = "coordinates=";
|
||||
String coordinates = wfsgeometry.substring(wfsgeometry.indexOf(coordString) + coordString.length());
|
||||
coordinates = coordinates.substring(coordinates.indexOf("=") + 1);
|
||||
if (coordinates.contains("=")) {
|
||||
coordinates = coordinates.replaceAll("([A-Za-z0-9]|-|_)+=", "");
|
||||
coordinates = coordinates.replaceAll("\\],( )+\\[", "],[");
|
||||
}
|
||||
coordinatePairs = coordinates.split("\\],\\[");
|
||||
for (String coord : coordinatePairs) {
|
||||
coord = coord.replaceAll("(\\[|\\]|\\}|\\{|)", "");
|
||||
String[] coordpair = coord.split(",");
|
||||
double[] dd = new double[2];
|
||||
// invert the coordinates as the final must be are long,lat
|
||||
dd[1] = Double.parseDouble(coordpair[0]);
|
||||
dd[0] = Double.parseDouble(coordpair[1]);
|
||||
dpairs.add(dd);
|
||||
}
|
||||
}
|
||||
return dpairs;
|
||||
}
|
||||
|
||||
public static void main1(String[] args) {
|
||||
|
||||
String geom = "{type=MultiPolygon, coordinates={cce4daf3-966e-4b5f-adea-f88ea2b93d03=[[[-16,-146.49999999999997],[-16,-146.99999999999994],[-15.5,-146.99999999999994],[-15.5,-146.49999999999997],[-16,-146.49999999999997]]]}}";
|
||||
List<double[]> coords = WFS2Coordinates(geom);
|
||||
|
||||
GeometryFactory factory = new GeometryFactory(new PrecisionModel(), 4326);
|
||||
// GeometryFactory factory = new GeometryFactory(new PrecisionModel(), 0);
|
||||
/*
|
||||
* CoordinateArraySequence coords = new CoordinateArraySequence(new Coordinate[] { new Coordinate(12.0, 34.23), new Coordinate(12.000, 54.555), new Coordinate(7, 8), new Coordinate(12.0, 34.23) }); LinearRing ring = new LinearRing(coords, factory); Polygon p = new Polygon(ring, null, factory); CoordinateArraySequence pcoords = new CoordinateArraySequence(new Coordinate[] { new Coordinate(12.0, 34.23),});
|
||||
*/
|
||||
// CoordinateArraySequence coords = new CoordinateArraySequence(new Coordinate[] { new Coordinate(1.5, 125.00000000000011), new Coordinate(1.5, 124.5), new Coordinate(2.000000000000057, 124.5), new Coordinate(2.000000000000057, 125.00000000000011), new Coordinate(1.5, 125.00000000000011) });
|
||||
|
||||
if (coords != null) {
|
||||
Coordinate[] coordarray = new Coordinate[coords.size()];
|
||||
int i = 0;
|
||||
for (double[] pair : coords) {
|
||||
coordarray[i] = new Coordinate(pair[0], pair[1]);
|
||||
i++;
|
||||
}
|
||||
CoordinateArraySequence coordseq = new CoordinateArraySequence(coordarray);
|
||||
LinearRing ring = new LinearRing(coordseq, factory);
|
||||
Polygon p = new Polygon(ring, new LinearRing[] {}, factory);
|
||||
// CoordinateArraySequence pcoords = new CoordinateArraySequence(new Coordinate[] { new Coordinate(-16,-146.49999999999997), });
|
||||
CoordinateArraySequence pcoords = new CoordinateArraySequence(new Coordinate[] { new Coordinate(-150, -16), });
|
||||
Point po = new Point(pcoords, factory);
|
||||
// po = p.getCentroid();
|
||||
System.out.println("contains: " + p.contains(po) + " boundary: " + p.covers(po));
|
||||
}
|
||||
}
|
||||
|
||||
public static ArrayDouble.D3 arrayByte3DArrayDouble(ArrayByte bytes){
|
||||
int[] shapeD = bytes.getShape();
|
||||
int zD=shapeD[0];
|
||||
int yD=shapeD[1];
|
||||
int xD=shapeD[2];
|
||||
|
||||
AnalysisLogger.getLogger().debug(xD+"X"+yD+"X"+zD+"="+(xD*yD*zD));
|
||||
ArrayDouble.D3 doublea = new ArrayDouble.D3(zD, yD,xD);
|
||||
|
||||
IndexIterator iterator = bytes.getIndexIterator();
|
||||
for (int x=0;x<xD;x++){
|
||||
for (int y=0;y<yD;y++){
|
||||
for (int z=0;z<zD;z++){
|
||||
Byte bytex = (Byte)iterator.next();
|
||||
doublea.set(z,y,x,bytex.doubleValue());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return doublea;
|
||||
}
|
||||
|
||||
public static ArrayDouble.D2 arrayByte2DArrayDouble(ArrayByte bytes){
|
||||
int[] shapeD = bytes.getShape();
|
||||
int yD=shapeD[0];
|
||||
int xD=shapeD[1];
|
||||
AnalysisLogger.getLogger().debug(xD+"X"+yD+"="+(xD*yD));
|
||||
ArrayDouble.D2 doublea = new ArrayDouble.D2(yD,xD);
|
||||
|
||||
IndexIterator iterator = bytes.getIndexIterator();
|
||||
for (int x=0;x<xD;x++){
|
||||
for (int y=0;y<yD;y++){
|
||||
Byte bytex = (Byte)iterator.next();
|
||||
doublea.set(y,x,bytex.doubleValue());
|
||||
}
|
||||
}
|
||||
return doublea;
|
||||
}
|
||||
|
||||
public static void main(String[] args) {
|
||||
|
||||
String q = "[[[-10.0011869534696,151.288335840039],[-10.0353384533966,151.27859643813],[-10.0228061679999,151.308700562],[-10.0011869534696,151.288335840039]]], e3c47901-3de5-45d2-a272-c6f7d5df1dec=[[[-8.54674625399991,150.53036499],[-8.83403205899992,150.287445068],[-9.20889866086486,150.195933942647],[-9.20555999999993,150.21039],[-9.20777999999995,150.23218],[-9.27360999999991,150.33095],[-9.38638999999995,150.37717],[-9.39873372345699,150.375441317138],[-9.37888717699991,150.41633606],[-9.64140796699991,150.411376953],[-9.68103313399996,150.684051514],[-9.79481071047286,150.758883440934],[-9.74832999999995,150.75027],[-9.73082999999991,150.74884],[-9.70784999999995,150.76262],[-9.7194399999999,150.78802],[-9.73138999999991,150.80304],[-9.74693999999994,150.82163],[-9.81916999999993,150.90026],[-9.85235999999992,150.93539],[-9.89360999999991,150.96274],[-9.98527999999993,151.03055],[-9.99693999999994,151.03943],[-10.0169399999999,151.05996],[-10.0244399999999,151.07303],[-10.0466,151.11809],[-10.0413899999999,151.13666],[-10.03014,151.14818],[-10.0194499999999,151.14875],[-10.0033999999999,151.13893],[-9.98916999999994,151.13637],[-9.94207999999991,151.18817],[-9.93666999999993,151.20053],[-9.93091343037411,151.222140060489],[-9.68598556499995,150.991424561],[-9.45813846599992,150.936889648],[-9.30954170199993,151.03604126],[-9.13122558599991,150.961669922],[-8.80926608999994,151.055862427],[-8.66848054747773,151.099704833311],[-8.63888999999995,151.10107],[-8.56673125859819,151.063276911059],[-8.52198028599992,150.922012329],[-8.54674625399991,150.53036499]],[[-9.43832999999995,150.66666],[-9.44124999999991,150.67997],[-9.42805999999996,150.73191],[-9.42055999999991,150.7462],[-9.40541999999993,150.7615],[-9.41471999999993,150.77777],[-9.43277999999992,150.80442],[-9.45638999999994,150.8283],[-9.52319999999992,150.88692],[-9.64471999999995,150.93219],[-9.65916999999996,150.93055],[-9.67082999999991,150.92163],[-9.68207999999993,150.90387],[-9.67221999999992,150.89177],[-9.67916999999994,150.87523],[-9.71805999999992,150.84692],[-9.68555999999995,150.84412],[-9.65860999999995,150.80163],[-9.66249999999991,150.76331],[-9.66332999999991,150.69135],[-9.66291999999993,150.65804],[-9.65388999999993,150.62274],[-9.62332999999995,150.51443],[-9.5836099999999,150.4905],[-9.44082999999995,150.42746],[-9.4313899999999,150.42331],[-9.41471999999993,150.41999],[-9.40110999999996,150.41999],[-9.38943999999992,150.4219],[-9.37666999999993,150.42609],[-9.35707999999994,150.43913],[-9.33770999999996,150.48044],[-9.34124999999994,150.5022],[-9.35166999999995,150.53028],[-9.37054999999992,150.57135],[-9.38499999999993,150.59802],[-9.40110999999996,150.62149],[-9.4233299999999,150.63734],[-9.43832999999995,150.66666]]], c905ab63-23c2-4587-bdd6-d6d37a56be51=[[[-8.58588343092737,151.123734225448],[-8.59127089890423,151.123748898655],[-8.58637142199996,151.125274658],[-8.58588343092737,151.123734225448]]], 8471299d-4904-4a10-ab00-c6cc5605bf3b=[[[-10.1228941076499,151.06827675758],[-10.1141699999999,151.02582],[-10.1108299999999,150.99831],[-10.1127799999999,150.98331],[-10.1127665622499,150.982996372512],[-10.1466360089999,151.011245728],[-10.1228941076499,151.06827675758]]], d0a0b923-b401-4cec-ac35-c3d8c837bffc=[[[-10.0506772730004,150.931209804608],[-10.0041699999999,150.91553],[-9.92666999999994,150.87774],[-9.83888999999993,150.8269],[-9.80718113528387,150.767019514441],[-10.0277585979999,150.912094116],[-10.0506772730004,150.931209804608]]]}}";
|
||||
q = q.replaceAll("([A-Za-z0-9]|-|_)+=", "");
|
||||
// q = q.replaceAll("\\], .*=\\[", "],[");
|
||||
System.out.println(q);
|
||||
q = q.replaceAll("\\],( )+\\[", "],[");
|
||||
System.out.println(q);
|
||||
}
|
||||
}
|
|
@ -1,28 +0,0 @@
|
|||
package org.gcube.dataanalysis.geo.utils;
|
||||
|
||||
import java.util.LinkedHashMap;
|
||||
|
||||
import com.vividsolutions.jts.geom.Geometry;
|
||||
|
||||
public class FeaturedPolygon {
|
||||
public Geometry p;
|
||||
public LinkedHashMap<String, String> features;
|
||||
public Double value;
|
||||
public FeaturedPolygon(){
|
||||
|
||||
}
|
||||
|
||||
public void setPolygon(Geometry p){
|
||||
this.p=p;
|
||||
}
|
||||
|
||||
public void setValue(Double v){
|
||||
this.value=v;
|
||||
}
|
||||
|
||||
public void addFeature(String key,String value){
|
||||
if (features==null)
|
||||
features = new LinkedHashMap<String, String>();
|
||||
features.put(key,value);
|
||||
}
|
||||
}
|
|
@ -10,6 +10,12 @@ import com.google.gson.JsonArray;
|
|||
import com.google.gson.JsonElement;
|
||||
import com.google.gson.JsonObject;
|
||||
|
||||
/**
|
||||
* Transforms a JSON into a map of key and values
|
||||
* Some values could be structured objects like HashMaps that will be transformed into strings
|
||||
* @author coro
|
||||
*
|
||||
*/
|
||||
public class JsonMapper {
|
||||
|
||||
|
||||
|
|
|
@ -1,32 +0,0 @@
|
|||
package org.gcube.dataanalysis.geo.utils;
|
||||
|
||||
import org.opengis.temporal.TemporalPrimitive;
|
||||
|
||||
public class NetCDFTemporalPrimitive implements TemporalPrimitive{
|
||||
|
||||
@Override
|
||||
protected Object clone() throws CloneNotSupportedException {
|
||||
return super.clone();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
return super.equals(obj);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void finalize() throws Throwable {
|
||||
super.finalize();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return 1;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "<gmd:extent/>";
|
||||
}
|
||||
|
||||
}
|
|
@ -1,485 +0,0 @@
|
|||
package org.gcube.dataanalysis.geo.utils;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Formatter;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
|
||||
import javax.xml.xpath.XPath;
|
||||
import javax.xml.xpath.XPathConstants;
|
||||
import javax.xml.xpath.XPathExpression;
|
||||
import javax.xml.xpath.XPathFactory;
|
||||
|
||||
import org.gcube.contentmanagement.graphtools.utils.HttpRequest;
|
||||
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
|
||||
import org.gcube.dataanalysis.ecoengine.utils.Tuple;
|
||||
import org.w3c.dom.Node;
|
||||
import org.w3c.dom.NodeList;
|
||||
import org.xml.sax.InputSource;
|
||||
|
||||
import ucar.ma2.Array;
|
||||
import ucar.ma2.ArrayByte;
|
||||
import ucar.ma2.ArrayDouble;
|
||||
import ucar.ma2.ArrayFloat;
|
||||
import ucar.ma2.ArrayInt;
|
||||
import ucar.ma2.ArrayLong;
|
||||
import ucar.ma2.Range;
|
||||
import ucar.ma2.StructureData;
|
||||
import ucar.ma2.StructureMembers.Member;
|
||||
import ucar.nc2.constants.FeatureType;
|
||||
import ucar.nc2.dataset.CoordinateAxis;
|
||||
import ucar.nc2.dataset.CoordinateAxis1DTime;
|
||||
import ucar.nc2.dt.GridCoordSystem;
|
||||
import ucar.nc2.dt.GridDatatype;
|
||||
import ucar.nc2.dt.grid.GridDataset;
|
||||
import ucar.nc2.ft.FeatureCollection;
|
||||
import ucar.nc2.ft.FeatureDataset;
|
||||
import ucar.nc2.ft.FeatureDatasetFactoryManager;
|
||||
import ucar.nc2.ft.PointFeatureCollection;
|
||||
import ucar.nc2.ft.PointFeatureIterator;
|
||||
import ucar.nc2.ft.point.PointDatasetImpl;
|
||||
import ucar.nc2.ft.point.standard.StandardPointCollectionImpl;
|
||||
import ucar.unidata.geoloc.LatLonPointImpl;
|
||||
import ucar.unidata.geoloc.LatLonRect;
|
||||
|
||||
public class ThreddsDataExplorer {
|
||||
|
||||
// http://thredds.research-infrastructures.eu:8080/thredds/catalog/public/netcdf/catalog.xml
|
||||
public static String timePrefix = "time:";
|
||||
|
||||
public static List<String> getFiles(String catalogURL) throws Exception {
|
||||
|
||||
String xml = HttpRequest.sendGetRequest(catalogURL, null);
|
||||
XPath xpath = XPathFactory.newInstance().newXPath();
|
||||
XPathExpression xPathExpression = xpath.compile("//child::*[local-name()='catalog']/child::*[local-name()='dataset']/child::*[local-name()='dataset']");
|
||||
InputSource inputSource = new InputSource(new ByteArrayInputStream(xml.getBytes("UTF-8")));
|
||||
NodeList nodes = (NodeList) xPathExpression.evaluate(inputSource, XPathConstants.NODESET);
|
||||
List<String> fileNames = new ArrayList<String>();
|
||||
for (int i = 0; i < nodes.getLength(); i++) {
|
||||
Node node = nodes.item(i);
|
||||
String name = node.getAttributes().getNamedItem("name").getNodeValue();
|
||||
if (name != null)
|
||||
fileNames.add(name);
|
||||
}
|
||||
|
||||
return fileNames;
|
||||
}
|
||||
|
||||
public static LinkedHashMap<String, Double> retrieveDataFromNetCDF(String openDapLink, String layer, double x, double y, double z) {
|
||||
try {
|
||||
LinkedHashMap<String, Double> map = new LinkedHashMap<String, Double>();
|
||||
if (isGridDataset(openDapLink)) {
|
||||
AnalysisLogger.getLogger().debug("Managing Grid File");
|
||||
return manageGridDataset(layer, openDapLink, x, y, z);
|
||||
}
|
||||
/*
|
||||
* else if (isPointDataset(openDapLink)) { AnalysisLogger.getLogger().debug("Managing Points File"); }
|
||||
*/
|
||||
else
|
||||
AnalysisLogger.getLogger().debug("Warning: the NETCDF file is of an unknown type");
|
||||
return map;
|
||||
} catch (Exception e) {
|
||||
AnalysisLogger.getLogger().debug("ERROR: " + e.getMessage());
|
||||
AnalysisLogger.getLogger().debug(e);
|
||||
e.printStackTrace();
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
public static List<Double> retrieveDataFromNetCDF(String openDapLink, String layer, int time, List<Tuple<Double>> triplets, double xL, double xR, double yL, double yR) {
|
||||
try {
|
||||
List<Double> values = new ArrayList<Double>();
|
||||
if (isGridDataset(openDapLink)) {
|
||||
AnalysisLogger.getLogger().debug("Managing Grid File");
|
||||
return manageGridDataset(layer, openDapLink, time, triplets, xL, xR, yL, yR);
|
||||
}
|
||||
/*
|
||||
* else if (isPointDataset(openDapLink)) { AnalysisLogger.getLogger().debug("Managing Points File"); }
|
||||
*/
|
||||
else
|
||||
AnalysisLogger.getLogger().debug("Warning: the NETCDF file is of an unknown type");
|
||||
return values;
|
||||
} catch (Exception e) {
|
||||
AnalysisLogger.getLogger().debug("ERROR: " + e.getMessage());
|
||||
AnalysisLogger.getLogger().debug(e);
|
||||
e.printStackTrace();
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
// A GridDatatype is like a specialized Variable that explicitly handles X,Y,Z,T dimensions
|
||||
public static List<Double> manageGridDataset(String layer, String filename, int time, List<Tuple<Double>> triplets, double xL, double xR, double yL, double yR) throws Exception {
|
||||
List<Double> values = new ArrayList<Double>();
|
||||
GridDataset gds = ucar.nc2.dt.grid.GridDataset.open(filename);
|
||||
List<GridDatatype> gridTypes = gds.getGrids();
|
||||
for (GridDatatype gdt : gridTypes) {
|
||||
AnalysisLogger.getLogger().debug("Inside File - layer name: " + gdt.getFullName());
|
||||
if (layer.equalsIgnoreCase(gdt.getFullName())) {
|
||||
AnalysisLogger.getLogger().debug("Found layer " + layer + " inside file");
|
||||
GridDatatype grid = gds.findGridDatatype(gdt.getName());
|
||||
CoordinateAxis zAxis = gdt.getCoordinateSystem().getVerticalAxis();
|
||||
CoordinateAxis xAxis = gdt.getCoordinateSystem().getXHorizAxis();
|
||||
CoordinateAxis yAxis = gdt.getCoordinateSystem().getYHorizAxis();
|
||||
double resolutionZ = 0;
|
||||
try{
|
||||
resolutionZ = Math.abs((double) (zAxis.getMaxValue() - zAxis.getMinValue()) / (double) zAxis.getShape()[0]);
|
||||
AnalysisLogger.getLogger().debug("Zmin:"+ zAxis.getMinValue()+" Zmax:"+zAxis.getMaxValue());
|
||||
}catch(Exception e){};
|
||||
double resolutionX = Math.abs((double) (xAxis.getMaxValue() - xAxis.getMinValue()) / (double) xAxis.getShape()[0]);
|
||||
double resolutionY = Math.abs((double) (yAxis.getMaxValue() - yAxis.getMinValue()) / (double) yAxis.getShape()[0]);
|
||||
|
||||
int tsize = triplets.size();
|
||||
long t01 = System.currentTimeMillis();
|
||||
LatLonRect llr = null;
|
||||
AnalysisLogger.getLogger().debug("Extracting subset...");
|
||||
GridDatatype gdtsub = grid.makeSubset(new Range(time, time), null, llr, 1, 1, 1);
|
||||
Array data = gdtsub.readVolumeData(time); // note order is t, z, y, x
|
||||
int[] shapeD = data.getShape();
|
||||
int zD = 0;
|
||||
int xD = 0;
|
||||
int yD = 0;
|
||||
if (shapeD.length>2)
|
||||
{
|
||||
zD=shapeD[0];
|
||||
yD=shapeD[1];
|
||||
xD=shapeD[2];
|
||||
}
|
||||
|
||||
else if (shapeD.length>1)
|
||||
{
|
||||
yD=shapeD[0];
|
||||
xD=shapeD[1];
|
||||
}
|
||||
|
||||
AnalysisLogger.getLogger().debug("Shape: Z:"+zD+" X:"+ xD+" Y:"+yD);
|
||||
|
||||
AnalysisLogger.getLogger().debug("Layer Information Retrieval ELAPSED Time: " + (System.currentTimeMillis() - t01));
|
||||
int rank = data.getRank();
|
||||
AnalysisLogger.getLogger().debug("Rank of the layer: " + rank);
|
||||
|
||||
ArrayFloat.D3 data3Float = null;
|
||||
ArrayDouble.D3 data3Double = null;
|
||||
ArrayInt.D3 data3Int = null;
|
||||
ArrayLong.D3 data3Long = null;
|
||||
ArrayFloat.D2 data2Float = null;
|
||||
ArrayDouble.D2 data2Double= null;
|
||||
ArrayInt.D2 data2Int = null;
|
||||
ArrayLong.D2 data2Long = null;
|
||||
|
||||
if (data.getRank() == 3){
|
||||
if (data instanceof ArrayFloat.D3)
|
||||
data3Float = (ArrayFloat.D3) data;
|
||||
else if (data instanceof ArrayInt.D3)
|
||||
data3Int = (ArrayInt.D3) data;
|
||||
else if (data instanceof ArrayDouble.D3)
|
||||
data3Double = (ArrayDouble.D3) data;
|
||||
else if (data instanceof ArrayDouble.D3)
|
||||
data3Double = (ArrayDouble.D3) data;
|
||||
else if (data instanceof ArrayLong.D3)
|
||||
data3Long = (ArrayLong.D3) data;
|
||||
else if (data instanceof ArrayByte.D3)
|
||||
data3Double = (ArrayDouble.D3)EnvDataExplorer.arrayByte3DArrayDouble((ArrayByte)data);
|
||||
else
|
||||
throw new Exception("Layer data format not supported");
|
||||
}
|
||||
else{
|
||||
if (data instanceof ArrayFloat.D2)
|
||||
data2Float = (ArrayFloat.D2) data;
|
||||
else if (data instanceof ArrayInt.D2)
|
||||
data2Int = (ArrayInt.D2) data;
|
||||
else if (data instanceof ArrayDouble.D2)
|
||||
data2Double = (ArrayDouble.D2) data;
|
||||
else if (data instanceof ArrayLong.D2)
|
||||
data2Long = (ArrayLong.D2) data;
|
||||
else if (data instanceof ArrayByte.D2)
|
||||
data2Double = (ArrayDouble.D2)EnvDataExplorer.arrayByte2DArrayDouble((ArrayByte)data);
|
||||
else
|
||||
throw new Exception("Layer data format not supported");
|
||||
}
|
||||
|
||||
|
||||
double xmin = xAxis.getMinValue();
|
||||
double xmax = xAxis.getMaxValue();
|
||||
if (((xmax==360) && (xmin==0)) || ((xmax==359.5) && (xmin==0.5))){
|
||||
xmax = 180;
|
||||
xmin=-180;
|
||||
}
|
||||
AnalysisLogger.getLogger().debug("X dimension: "+xD+" Xmin:"+ xmax+" Xmax:"+xmin);
|
||||
|
||||
for (int i = 0; i < tsize; i++) {
|
||||
int zint = 0;
|
||||
int xint = 0;
|
||||
int yint = 0;
|
||||
Tuple<Double> triplet = triplets.get(i);
|
||||
double x = triplet.getElements().get(0);
|
||||
double y = triplet.getElements().get(1);
|
||||
if (x == 180)
|
||||
x = -180;
|
||||
if (y == 90)
|
||||
y = -90;
|
||||
double z = 0;
|
||||
if (triplet.getElements().size() > 1)
|
||||
z = triplet.getElements().get(2);
|
||||
if (resolutionZ > 0) {
|
||||
if ((zAxis.getMinValue() <= z) && (zAxis.getMaxValue() >= z))
|
||||
zint = Math.abs((int) Math.round((z - zAxis.getMinValue()) / resolutionZ));
|
||||
}
|
||||
|
||||
// AnalysisLogger.getLogger().debug("Z Index: "+zint);
|
||||
/*
|
||||
GridCoordSystem gcs = grid.getCoordinateSystem();
|
||||
int[] xy = gcs.findXYindexFromLatLon(x, y, null);
|
||||
Array datas=grid.readDataSlice(time, zint, xy[1], xy[0]);
|
||||
*/
|
||||
if ((xmin <= x) && (xmax >= x))
|
||||
xint = (int) Math.round((x - xmin) / resolutionX);
|
||||
if ((yAxis.getMinValue() <= y) && (yAxis.getMaxValue() >= y))
|
||||
yint = (int) Math.round((y - yAxis.getMinValue()) / resolutionY);
|
||||
Double val = Double.NaN;
|
||||
if (xint > xD-1)
|
||||
xint = xD-1;
|
||||
if (yint > yD-1)
|
||||
yint = yD-1;
|
||||
if (zint>zD-1)
|
||||
zint = zD-1;
|
||||
if (data3Float != null)
|
||||
val = Double.valueOf(data3Float.get(zint, yint, xint));
|
||||
else if (data3Int != null)
|
||||
val = Double.valueOf(data3Int.get(zint, yint, xint));
|
||||
else if (data3Double != null)
|
||||
val = Double.valueOf(data3Double.get(zint, yint, xint));
|
||||
else if (data3Long != null)
|
||||
val = Double.valueOf(data3Long.get(zint, yint, xint));
|
||||
|
||||
else if (data2Float != null)
|
||||
val = Double.valueOf(data2Float.get(yint, xint));
|
||||
else if (data2Int != null)
|
||||
val = Double.valueOf(data2Int.get(yint, xint));
|
||||
else if (data2Double != null)
|
||||
val = Double.valueOf(data2Double.get(yint, xint));
|
||||
else if (data2Long != null)
|
||||
val = Double.valueOf(data2Long.get(yint, xint));
|
||||
|
||||
values.add(val);
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
return values;
|
||||
}
|
||||
|
||||
// A GridDatatype is like a specialized Variable that explicitly handles X,Y,Z,T dimensions
|
||||
public static LinkedHashMap<String, Double> manageGridDataset(String layer, String filename, double x, double y, double z) throws Exception {
|
||||
LinkedHashMap<String, Double> valuesMap = new LinkedHashMap<String, Double>();
|
||||
GridDataset gds = ucar.nc2.dt.grid.GridDataset.open(filename);
|
||||
List<GridDatatype> gridTypes = gds.getGrids();
|
||||
for (GridDatatype gdt : gridTypes) {
|
||||
AnalysisLogger.getLogger().debug("Inside File - layer name: " + gdt.getFullName());
|
||||
if (layer.equalsIgnoreCase(gdt.getFullName())) {
|
||||
AnalysisLogger.getLogger().debug("Found layer " + layer + " inside file");
|
||||
GridDatatype grid = gds.findGridDatatype(gdt.getName());
|
||||
GridCoordSystem gcs = grid.getCoordinateSystem();
|
||||
long timeSteps = 0;
|
||||
java.util.Date[] dates = null;
|
||||
if (gcs.hasTimeAxis1D()) {
|
||||
CoordinateAxis1DTime tAxis1D = gcs.getTimeAxis1D();
|
||||
dates = tAxis1D.getTimeDates();
|
||||
timeSteps = dates.length;
|
||||
} else if (gcs.hasTimeAxis()) {
|
||||
CoordinateAxis tAxis = gcs.getTimeAxis();
|
||||
timeSteps = tAxis.getSize();
|
||||
}
|
||||
|
||||
CoordinateAxis zAxis = gdt.getCoordinateSystem().getVerticalAxis();
|
||||
double resolutionZ = Math.abs((double) (zAxis.getMaxValue() - zAxis.getMinValue()) / (double) zAxis.getShape()[0]);
|
||||
int zint = 0;
|
||||
if (resolutionZ > 0) {
|
||||
if ((zAxis.getMinValue() <= z) && (zAxis.getMaxValue() >= z))
|
||||
zint = Math.abs((int) Math.round((z - zAxis.getMinValue()) / resolutionZ));
|
||||
}
|
||||
|
||||
AnalysisLogger.getLogger().debug("Z index to take: " + zint);
|
||||
|
||||
int[] xy = gcs.findXYindexFromLatLon(x, y, null);
|
||||
for (int j = 0; j < timeSteps; j++) {
|
||||
try {
|
||||
Array data = grid.readDataSlice(j, zint, xy[1], xy[0]); // note order is t, z, y, x
|
||||
Double val = takeFirstDouble(data);
|
||||
if (!val.isNaN()) {
|
||||
String date = "" + j;
|
||||
if (dates != null)
|
||||
date = dates[j].toString();
|
||||
valuesMap.put(timePrefix + date, Double.parseDouble("" + val));
|
||||
}
|
||||
} catch (Exception e) {
|
||||
AnalysisLogger.getLogger().debug("Error in getting grid values in (" + x + "," + y + "," + z + "= with zint: " + zint + " resolution: " + resolutionZ + " and shape: " + zAxis.getShape()[0]);
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
return valuesMap;
|
||||
}
|
||||
|
||||
public static Double takeFirstDouble(Array data) {
|
||||
long datal = data.getSize();
|
||||
Double val = Double.NaN;
|
||||
try {
|
||||
for (int k = 0; k < datal; k++) {
|
||||
Double testVal = data.getDouble(k);
|
||||
if (!testVal.isNaN()) {
|
||||
val = testVal;
|
||||
break;
|
||||
}
|
||||
}
|
||||
} catch (Exception ee) {
|
||||
AnalysisLogger.getLogger().debug("ThreddsDataExplorer-> WARNING: Error in getting value: " + ee.getLocalizedMessage());
|
||||
}
|
||||
return val;
|
||||
}
|
||||
|
||||
// A GridDatatype is like a specialized Variable that explicitly handles X,Y,Z,T dimensions
|
||||
public LinkedHashMap<String, String> managePointsDataset(String layer, String filename, double x, double y) throws Exception {
|
||||
LinkedHashMap<String, String> valuesMap = new LinkedHashMap<String, String>();
|
||||
float tolerance = 0.25f;
|
||||
Formatter errlog = new Formatter();
|
||||
FeatureDataset fdataset = FeatureDatasetFactoryManager.open(FeatureType.POINT, filename, null, errlog);
|
||||
PointDatasetImpl ds = (PointDatasetImpl) fdataset;
|
||||
List<FeatureCollection> lfc = ds.getPointFeatureCollectionList();
|
||||
|
||||
for (FeatureCollection fc : lfc) {
|
||||
|
||||
StandardPointCollectionImpl spf = (StandardPointCollectionImpl) fc;
|
||||
PointFeatureIterator iter = null;
|
||||
while ((y - tolerance > -90) && (x - tolerance > -180) && (y + tolerance < 90) && (x + tolerance < 180)) {
|
||||
LatLonRect rect = new LatLonRect(new LatLonPointImpl(y - tolerance, x - tolerance), new LatLonPointImpl(y + tolerance, x + tolerance));
|
||||
PointFeatureCollection coll = spf.subset(rect, null);
|
||||
iter = coll.getPointFeatureIterator(100 * 1000); // 100Kb buffer
|
||||
if (iter.getCount() == 0)
|
||||
iter.finish();
|
||||
else
|
||||
break;
|
||||
tolerance = tolerance + 0.25f;
|
||||
AnalysisLogger.getLogger().debug("ThreddsDataExplorer-> tolerance = " + tolerance);
|
||||
}
|
||||
|
||||
if (iter != null) {
|
||||
try {
|
||||
while (iter.hasNext()) {
|
||||
ucar.nc2.ft.PointFeature pf = iter.next();
|
||||
AnalysisLogger.getLogger().debug("ThreddsDataExplorer-> EarthLoc: " + pf.getLocation());
|
||||
AnalysisLogger.getLogger().debug("ThreddsDataExplorer-> EarthTime: " + pf.getObservationTime());
|
||||
StructureData sd = pf.getData();
|
||||
List<Member> mems = sd.getMembers();
|
||||
for (Member m : mems) {
|
||||
String unit = m.getUnitsString();
|
||||
if ((unit != null) && (unit.length() > 0)) {
|
||||
AnalysisLogger.getLogger().debug("ThreddsDataExplorer-> description: " + m.getDescription());
|
||||
AnalysisLogger.getLogger().debug("ThreddsDataExplorer-> data param: " + m.getDataParam());
|
||||
AnalysisLogger.getLogger().debug("ThreddsDataExplorer-> name: " + m.getName());
|
||||
AnalysisLogger.getLogger().debug("ThreddsDataExplorer-> unit: " + m.getUnitsString());
|
||||
AnalysisLogger.getLogger().debug("ThreddsDataExplorer-> type: " + m.getDataType());
|
||||
Array arr = sd.getArray(m.getName());
|
||||
AnalysisLogger.getLogger().debug("ThreddsDataExplorer-> is Time: " + m.getDataType());
|
||||
Double val = takeFirstDouble(arr);
|
||||
|
||||
AnalysisLogger.getLogger().debug("ThreddsDataExplorer-> extracted value: " + val);
|
||||
}
|
||||
}
|
||||
AnalysisLogger.getLogger().debug("ThreddsDataExplorer-> EarthTime: ");
|
||||
}
|
||||
} finally {
|
||||
iter.finish();
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
return valuesMap;
|
||||
}
|
||||
|
||||
// A GridDatatype is like a specialized Variable that explicitly handles X,Y,Z,T dimensions
|
||||
public static boolean isGridDataset(String filename) {
|
||||
try {
|
||||
AnalysisLogger.getLogger().debug("Analyzing file " + filename);
|
||||
Formatter errlog = new Formatter();
|
||||
FeatureDataset fdataset = FeatureDatasetFactoryManager.open(FeatureType.GRID, filename, null, errlog);
|
||||
if (fdataset == null) {
|
||||
// System.out.printf("GRID Parse failed --> %s\n", errlog);
|
||||
AnalysisLogger.getLogger().debug("ThreddsDataExplorer-> NOT GRID");
|
||||
return false;
|
||||
} else
|
||||
return true;
|
||||
} catch (Throwable e) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// A GridDatatype is like a specialized Variable that explicitly handles X,Y,Z,T dimensions
|
||||
public static boolean isPointDataset(String filename) {
|
||||
try {
|
||||
Formatter errlog = new Formatter();
|
||||
FeatureDataset fdataset = FeatureDatasetFactoryManager.open(FeatureType.POINT, filename, null, errlog);
|
||||
if (fdataset == null) {
|
||||
AnalysisLogger.getLogger().debug("ThreddsDataExplorer-> NOT POINT");
|
||||
return false;
|
||||
} else
|
||||
return true;
|
||||
} catch (Exception e) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
public static boolean isDataset(String filename) throws Exception {
|
||||
boolean isdataset = false;
|
||||
try {
|
||||
Formatter errlog = new Formatter();
|
||||
FeatureType[] fts = FeatureType.values();
|
||||
for (int i = 0; i < fts.length; i++) {
|
||||
FeatureDataset fdataset = FeatureDatasetFactoryManager.open(fts[i], filename, null, errlog);
|
||||
if (fdataset == null) {
|
||||
// System.out.printf(fts[i]+": Parse failed --> %s\n",errlog);
|
||||
} else {
|
||||
AnalysisLogger.getLogger().debug("ThreddsDataExplorer-> " + fts[i] + " OK!");
|
||||
isdataset = true;
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
}
|
||||
return isdataset;
|
||||
}
|
||||
|
||||
public static double adjX(double x) {
|
||||
/*
|
||||
* if (x < -180) x = -180; if (x > 180) x = 180;
|
||||
*/
|
||||
return x;
|
||||
}
|
||||
|
||||
public static double adjY(double y) {
|
||||
/*
|
||||
* if (y < -90) y = -90; if (y > 90) y = 90;
|
||||
*/
|
||||
return y;
|
||||
}
|
||||
|
||||
public static double getMinX(GridCoordSystem gcs) {
|
||||
CoordinateAxis xAxis = gcs.getXHorizAxis();
|
||||
return adjX(xAxis.getMinValue());
|
||||
}
|
||||
|
||||
public static double getMaxX(GridCoordSystem gcs) {
|
||||
CoordinateAxis xAxis = gcs.getXHorizAxis();
|
||||
return adjX(xAxis.getMaxValue());
|
||||
}
|
||||
|
||||
public static double getMinY(GridCoordSystem gcs) {
|
||||
CoordinateAxis yAxis = gcs.getYHorizAxis();
|
||||
return adjY(yAxis.getMinValue());
|
||||
}
|
||||
|
||||
public static double getMaxY(GridCoordSystem gcs) {
|
||||
CoordinateAxis yAxis = gcs.getYHorizAxis();
|
||||
return adjY(yAxis.getMaxValue());
|
||||
}
|
||||
}
|
|
@ -0,0 +1,102 @@
|
|||
package org.gcube.dataanalysis.geo.utils;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Formatter;
|
||||
import java.util.List;
|
||||
|
||||
import javax.xml.xpath.XPath;
|
||||
import javax.xml.xpath.XPathConstants;
|
||||
import javax.xml.xpath.XPathExpression;
|
||||
import javax.xml.xpath.XPathFactory;
|
||||
|
||||
import org.gcube.contentmanagement.graphtools.utils.HttpRequest;
|
||||
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
|
||||
import org.w3c.dom.Node;
|
||||
import org.w3c.dom.NodeList;
|
||||
import org.xml.sax.InputSource;
|
||||
|
||||
import ucar.nc2.constants.FeatureType;
|
||||
import ucar.nc2.ft.FeatureDataset;
|
||||
import ucar.nc2.ft.FeatureDatasetFactoryManager;
|
||||
/**
|
||||
* Explores the contents of a Thredds instance
|
||||
* Guesses the nature of a file hosted on the server
|
||||
* @author coro
|
||||
*
|
||||
*/
|
||||
public class ThreddsExplorer {
|
||||
|
||||
// http://thredds.research-infrastructures.eu:8080/thredds/catalog/public/netcdf/catalog.xml
|
||||
public static String timePrefix = "time:";
|
||||
|
||||
public static List<String> getFiles(String catalogURL) throws Exception {
|
||||
|
||||
String xml = HttpRequest.sendGetRequest(catalogURL, null);
|
||||
XPath xpath = XPathFactory.newInstance().newXPath();
|
||||
XPathExpression xPathExpression = xpath.compile("//child::*[local-name()='catalog']/child::*[local-name()='dataset']/child::*[local-name()='dataset']");
|
||||
InputSource inputSource = new InputSource(new ByteArrayInputStream(xml.getBytes("UTF-8")));
|
||||
NodeList nodes = (NodeList) xPathExpression.evaluate(inputSource, XPathConstants.NODESET);
|
||||
List<String> fileNames = new ArrayList<String>();
|
||||
for (int i = 0; i < nodes.getLength(); i++) {
|
||||
Node node = nodes.item(i);
|
||||
String name = node.getAttributes().getNamedItem("name").getNodeValue();
|
||||
if (name != null)
|
||||
fileNames.add(name);
|
||||
}
|
||||
|
||||
return fileNames;
|
||||
}
|
||||
|
||||
// A GridDatatype is like a specialized Variable that explicitly handles X,Y,Z,T dimensions
|
||||
public static boolean isGridDataset(String filename) {
|
||||
try {
|
||||
AnalysisLogger.getLogger().debug("Analyzing file " + filename);
|
||||
Formatter errlog = new Formatter();
|
||||
FeatureDataset fdataset = FeatureDatasetFactoryManager.open(FeatureType.GRID, filename, null, errlog);
|
||||
if (fdataset == null) {
|
||||
// System.out.printf("GRID Parse failed --> %s\n", errlog);
|
||||
AnalysisLogger.getLogger().debug("ThreddsDataExplorer-> NOT GRID");
|
||||
return false;
|
||||
} else
|
||||
return true;
|
||||
} catch (Throwable e) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// A GridDatatype is like a specialized Variable that explicitly handles X,Y,Z,T dimensions
|
||||
public static boolean isPointDataset(String filename) {
|
||||
try {
|
||||
Formatter errlog = new Formatter();
|
||||
FeatureDataset fdataset = FeatureDatasetFactoryManager.open(FeatureType.POINT, filename, null, errlog);
|
||||
if (fdataset == null) {
|
||||
AnalysisLogger.getLogger().debug("ThreddsDataExplorer-> NOT POINT");
|
||||
return false;
|
||||
} else
|
||||
return true;
|
||||
} catch (Exception e) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
public static boolean isDataset(String filename) throws Exception {
|
||||
boolean isdataset = false;
|
||||
try {
|
||||
Formatter errlog = new Formatter();
|
||||
FeatureType[] fts = FeatureType.values();
|
||||
for (int i = 0; i < fts.length; i++) {
|
||||
FeatureDataset fdataset = FeatureDatasetFactoryManager.open(fts[i], filename, null, errlog);
|
||||
if (fdataset == null) {
|
||||
// System.out.printf(fts[i]+": Parse failed --> %s\n",errlog);
|
||||
} else {
|
||||
AnalysisLogger.getLogger().debug("ThreddsDataExplorer-> " + fts[i] + " OK!");
|
||||
isdataset = true;
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
}
|
||||
return isdataset;
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,53 @@
|
|||
package org.gcube.dataanalysis.geo.utils;
|
||||
|
||||
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
|
||||
|
||||
import ucar.ma2.ArrayByte;
|
||||
import ucar.ma2.ArrayDouble;
|
||||
import ucar.ma2.IndexIterator;
|
||||
|
||||
public class VectorTransformations {
|
||||
|
||||
|
||||
public static ArrayDouble.D2 arrayByte2DArrayDouble(ArrayByte bytes){
|
||||
int[] shapeD = bytes.getShape();
|
||||
int yD=shapeD[0];
|
||||
int xD=shapeD[1];
|
||||
AnalysisLogger.getLogger().debug(xD+"X"+yD+"="+(xD*yD));
|
||||
ArrayDouble.D2 doublea = new ArrayDouble.D2(yD,xD);
|
||||
|
||||
IndexIterator iterator = bytes.getIndexIterator();
|
||||
for (int x=0;x<xD;x++){
|
||||
for (int y=0;y<yD;y++){
|
||||
Byte bytex = (Byte)iterator.next();
|
||||
doublea.set(y,x,bytex.doubleValue());
|
||||
}
|
||||
}
|
||||
return doublea;
|
||||
}
|
||||
|
||||
|
||||
public static ArrayDouble.D3 arrayByte3DArrayDouble(ArrayByte bytes){
|
||||
int[] shapeD = bytes.getShape();
|
||||
int zD=shapeD[0];
|
||||
int yD=shapeD[1];
|
||||
int xD=shapeD[2];
|
||||
|
||||
AnalysisLogger.getLogger().debug(xD+"X"+yD+"X"+zD+"="+(xD*yD*zD));
|
||||
ArrayDouble.D3 doublea = new ArrayDouble.D3(zD, yD,xD);
|
||||
|
||||
IndexIterator iterator = bytes.getIndexIterator();
|
||||
for (int x=0;x<xD;x++){
|
||||
for (int y=0;y<yD;y++){
|
||||
for (int z=0;z<zD;z++){
|
||||
Byte bytex = (Byte)iterator.next();
|
||||
doublea.set(z,y,x,bytex.doubleValue());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return doublea;
|
||||
}
|
||||
|
||||
|
||||
}
|
Loading…
Reference in New Issue