Adjustment for ASC raster mining and geo-table dumps
Corrections in the MaxEnt parameters descriptions git-svn-id: https://svn.d4science.research-infrastructures.eu/gcube/trunk/data-analysis/EcologicalEngineGeoSpatialExtension@100760 82a268e6-3cf1-43bd-a215-b396298e98cf
This commit is contained in:
parent
85f6d3c481
commit
168d78ca64
|
@ -109,10 +109,10 @@ public class MaxEnt4NicheModellingTransducer implements Transducerer {
|
|||
@Override
|
||||
public String getDescription() {
|
||||
return "A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. " +
|
||||
"In this adaptation for the D4Science infrastructure, the software can accept a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. " +
|
||||
"The user can also establish the bounding box and the spatial resolution (in deg) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one." +
|
||||
"In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. " +
|
||||
"The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one." +
|
||||
"The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment." +
|
||||
"Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points to Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO";
|
||||
"Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt";
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -134,16 +134,16 @@ public class MaxEnt4NicheModellingTransducer implements Transducerer {
|
|||
ColumnType p2 = new ColumnType(OccurrencesTableNameParameter, LatitudeColumn, "The column containing latitude values", "decimallatitude", false);
|
||||
inputs.add(p2);
|
||||
|
||||
IOHelper.addDoubleInput(inputs, z, "Value of Z. Default is 0, that means processing will be at surface level or at the first avaliable Z value in the layer", "0");
|
||||
IOHelper.addIntegerInput(inputs, t, "Time Index. The default is the first time indexed dataset", "0");
|
||||
|
||||
IOHelper.addDoubleInput(inputs, xRes, "Projection resolution on the X axis in degrees", "1");
|
||||
IOHelper.addDoubleInput(inputs, yRes, "Projection resolution on the Y axis in degrees", "1");
|
||||
IOHelper.addDoubleInput(inputs, xRes, "Model projection resolution on the X axis in decimal degrees", "1");
|
||||
IOHelper.addDoubleInput(inputs, yRes, "Model projection resolution on the Y axis in decimal degrees", "1");
|
||||
|
||||
// layers to use in the model
|
||||
PrimitiveTypesList listEnvLayers = new PrimitiveTypesList(String.class.getName(), PrimitiveTypes.STRING, Layers, "The list of environmental layers to use for enriching the points. Each entry is a layer Title or UUID or HTTP link. E.g. the title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Otherwise you can supply the direct HTTP link of the layer. The format will be guessed from the link. The default is GeoTiff. Supports several standards (NETCDF, WFS, WCS, ASC, GeoTiff ). E.g. https://dl.dropboxusercontent.com/u/12809149/wind1.tif", false);
|
||||
inputs.add(listEnvLayers);
|
||||
|
||||
IOHelper.addDoubleInput(inputs, z, "Value of Z. Default is 0, that means environmental layers processing will be at surface level or at the first avaliable Z value in the layer", "0");
|
||||
IOHelper.addIntegerInput(inputs, t, "Time Index. The default is the first time indexed in the input environmental datasets", "0");
|
||||
|
||||
DatabaseType.addDefaultDBPars(inputs);
|
||||
return inputs;
|
||||
}
|
||||
|
|
|
@ -54,20 +54,6 @@ public class AscDataExplorer {
|
|||
|
||||
}
|
||||
|
||||
public int longitude2Index (double longitude){
|
||||
if (dx>0)
|
||||
return (int)Math.round((longitude-xOrigin)/dx);
|
||||
else
|
||||
return (int)Math.round((longitude-xOrigin)/cellsize);
|
||||
}
|
||||
|
||||
public int latitude2Index (double latitude){
|
||||
if (dy>0)
|
||||
return (nrows-1) - (int) Math.round((latitude-yOrigin)/dy);
|
||||
else
|
||||
return (nrows-1) - (int) Math.round((latitude-yOrigin)/cellsize);
|
||||
}
|
||||
|
||||
public List<Double> retrieveDataFromAsc( List<Tuple<Double>> triplets, int time) throws Exception{
|
||||
if (time>0)
|
||||
throw new Exception("No Time Dimension For ASC Files!");
|
||||
|
@ -76,8 +62,8 @@ public class AscDataExplorer {
|
|||
for (Tuple<Double> triplet:triplets){
|
||||
double x = triplet.getElements().get(0);
|
||||
double y = triplet.getElements().get(1);
|
||||
int j = longitude2Index(x);
|
||||
int i = latitude2Index(y);
|
||||
int j = ascFile.longitude2Index(x);
|
||||
int i = ascFile.latitude2Index(y);
|
||||
|
||||
if ((j>ncolumns) || (j<0) || (i>nrows) || (i<0)){
|
||||
values.add(Double.NaN);
|
||||
|
|
|
@ -158,7 +158,11 @@ public class AscRaster {
|
|||
|
||||
public double getValue(int row, int column) {
|
||||
if (row < rows && column < cols)
|
||||
return data[row][column];
|
||||
if (data[row][column]!= Double.parseDouble(NDATA))
|
||||
return data[row][column];
|
||||
else
|
||||
return Double.NaN;
|
||||
|
||||
return Double.NaN;
|
||||
}
|
||||
|
||||
|
@ -259,10 +263,17 @@ public class AscRaster {
|
|||
}
|
||||
|
||||
public int longitude2Index(double longitude) {
|
||||
return (int) Math.round((longitude - xll) / cellsize);
|
||||
double factor = dx;
|
||||
if (dx<0)
|
||||
factor = cellsize;
|
||||
return (int) Math.round((longitude - xll) / factor);
|
||||
}
|
||||
|
||||
public int latitude2Index(double latitude) {
|
||||
return (rows - 1) - (int) Math.round((latitude - yll) / cellsize);
|
||||
double factor = dy;
|
||||
if (dy<0)
|
||||
factor = cellsize;
|
||||
return (rows-1) - (int) Math.round((latitude - yll) / factor);
|
||||
|
||||
}
|
||||
}
|
||||
|
|
|
@ -45,8 +45,46 @@ public class AscRasterWriter {
|
|||
}
|
||||
o.println( "NODATA_value " + r.getNDATA() );
|
||||
|
||||
double[][] values = r.getData();
|
||||
//for(int k=values.length-1;k>=0;k-- )
|
||||
for(int k=0;k<values.length;k++ )
|
||||
{
|
||||
double[] row =values[k];
|
||||
StringBuffer b = new StringBuffer();
|
||||
for( int i = 0; i < row.length; i++ )
|
||||
{
|
||||
if( Double.isNaN( row[i] ) ) b.append( r.getNDATA() );
|
||||
else if( cellFormat != null ) b.append( cellFormat.format( row[i] ));
|
||||
else b.append( row[i] );
|
||||
if( i < row.length-1 ) b.append( " " );
|
||||
}
|
||||
o.println( b );
|
||||
}
|
||||
o.close();
|
||||
}
|
||||
|
||||
public void writeRasterInvertYAxis( String filename, AscRaster r ) throws IOException
|
||||
{
|
||||
File f = new File( filename );
|
||||
if( f.exists() ) f.delete();
|
||||
if( ! f.createNewFile() ) throw new RuntimeException( "Could not create file for some reason!");
|
||||
PrintStream o = new PrintStream( f );
|
||||
o.println( "ncols " + r.getCols() );
|
||||
o.println( "nrows " + r.getRows() );
|
||||
o.println( "xllcorner " + r.getXll() );
|
||||
o.println( "yllcorner " + r.getYll());
|
||||
if (r.getCellsize()>0)
|
||||
o.println( "cellsize " + r.getCellsize() );
|
||||
else
|
||||
{
|
||||
o.println( "dx " + r.getdx() );
|
||||
o.println( "dy " + r.getdy() );
|
||||
}
|
||||
o.println( "NODATA_value " + r.getNDATA() );
|
||||
|
||||
double[][] values = r.getData();
|
||||
for(int k=values.length-1;k>=0;k-- )
|
||||
// for(int k=0;k<values.length;k++ )
|
||||
{
|
||||
double[] row =values[k];
|
||||
StringBuffer b = new StringBuffer();
|
||||
|
@ -79,6 +117,11 @@ public class AscRasterWriter {
|
|||
writeRaster( filename, AscRaster.getTempRaster( data, xll, yll, size, ndata ) );
|
||||
}
|
||||
|
||||
public void writeRasterInvertYAxis( String filename, double[][] data, double xll, double yll, double size, String ndata ) throws IOException
|
||||
{
|
||||
writeRasterInvertYAxis( filename, AscRaster.getTempRaster( data, xll, yll, size, ndata ) );
|
||||
}
|
||||
|
||||
/**
|
||||
* Can be used to set a number format for the cells. For example, if they are all integer
|
||||
* values, you can set an integer format. This should help with roundtrippability for
|
||||
|
|
|
@ -60,7 +60,8 @@ public class Table implements GISDataConnector {
|
|||
AnalysisLogger.getLogger().debug("Error in getting elements for Z ");
|
||||
throw new Exception("Outside the z boundaries [" + tmr.minZ + ";" + tmr.maxZ + "]");
|
||||
}
|
||||
|
||||
|
||||
AnalysisLogger.getLogger().debug("Assigning points to grid ");
|
||||
List<Double> v = VectorOperations.assignPointsValuesToGrid(coordinates3d, time, tuples, tolerance);
|
||||
|
||||
// AnalysisLogger.getLogger().debug("VALUES "+v);
|
||||
|
|
|
@ -47,7 +47,11 @@ public class WFSDataExplorer {
|
|||
|
||||
// String wfsURL = OGCFormatter.getWfsUrl(geoServer, layer, OGCFormatter.buildBoundingBox(xL, yL, xR, yR), 0, "json");
|
||||
// there is a bug in WFS in the retrieval according to a bounding box: y must be in the range -180;180. then I preferred to take all the features
|
||||
String wfsURL = OGCFormatter.getWfsUrl(geoServer, layer, null, 0, "json");
|
||||
//"51,-120,57,-106"
|
||||
String bbox = null;
|
||||
// bbox = "60,0,90,180";
|
||||
|
||||
String wfsURL = OGCFormatter.getWfsUrl(geoServer, layer, bbox, 0, "json");
|
||||
AnalysisLogger.getLogger().debug("WFSDataExplorer-> Requesting URL: " + wfsURL);
|
||||
String returned = null;
|
||||
try {
|
||||
|
@ -102,12 +106,12 @@ public class WFSDataExplorer {
|
|||
if (poly == null)
|
||||
poly = new FeaturedPolygon();
|
||||
|
||||
LinkedHashMap<String, String> props = (LinkedHashMap<String, String>) propertiesMap.get(properties);
|
||||
LinkedHashMap<String, Object> props = (LinkedHashMap<String, Object>) propertiesMap.get(properties);
|
||||
// fill the properties of the fpolygon
|
||||
for (String keyprop : props.keySet()) {
|
||||
try {
|
||||
// fulfill the FeaturedPolygon
|
||||
String value = props.get(keyprop);
|
||||
String value = (""+props.get(keyprop)).replace("{", "").replace("}", "");
|
||||
try {
|
||||
String lowcaseprop = keyprop.toLowerCase();
|
||||
// System.out.println(poly.p.getCentroid()+" -> "+value);
|
||||
|
@ -122,6 +126,7 @@ public class WFSDataExplorer {
|
|||
poly.addFeature(keyprop, value);
|
||||
}
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
} else if (properties.contains("geometry") && !properties.contains("geometry_")) {
|
||||
|
@ -216,7 +221,8 @@ public class WFSDataExplorer {
|
|||
|
||||
boolean found = false;
|
||||
int h = 0;
|
||||
for (Polygon polnh : polysnoholes) {
|
||||
while(h<polysnoholes.size()) {
|
||||
Polygon polnh = polysnoholes.get(h);
|
||||
boolean covers = false;
|
||||
|
||||
try{
|
||||
|
@ -232,11 +238,11 @@ public class WFSDataExplorer {
|
|||
|
||||
if (covers) {
|
||||
// System.out.println("found hole! "+pp+" vs "+polnh);
|
||||
addDifference(h, polysnoholes, polnh, pp);
|
||||
h=addDifference(h, polysnoholes, polnh, pp);
|
||||
found = true;
|
||||
} else if (pp.covers(polnh)) {
|
||||
// polysnoholes.set(h, (Polygon) pp.difference(polnh));
|
||||
addDifference(h, polysnoholes, pp, polnh);
|
||||
h=addDifference(h, polysnoholes, pp, polnh);
|
||||
found = true;
|
||||
}
|
||||
h++;
|
||||
|
@ -262,7 +268,7 @@ public class WFSDataExplorer {
|
|||
}
|
||||
|
||||
|
||||
private static void addDifference(int h , List<Polygon> polysnoholes, Polygon polnh, Polygon pp){
|
||||
private static int addDifference(int h , List<Polygon> polysnoholes, Polygon polnh, Polygon pp){
|
||||
|
||||
Geometry mp = polnh.difference(pp);
|
||||
if (mp instanceof com.vividsolutions.jts.geom.Polygon)
|
||||
|
@ -272,9 +278,16 @@ public class WFSDataExplorer {
|
|||
int innerpolygons = mup.getNumGeometries();
|
||||
for (int k = 0; k < innerpolygons; k++) {
|
||||
Polygon ip = (Polygon) mup.getGeometryN(k);
|
||||
polysnoholes.set(h, ip);
|
||||
if (k==0)
|
||||
polysnoholes.set(h, ip);
|
||||
else
|
||||
polysnoholes.add(h, ip);
|
||||
h++;
|
||||
}
|
||||
if (innerpolygons>0)
|
||||
h--;//set the cursor on the last element
|
||||
}
|
||||
return h;
|
||||
|
||||
}
|
||||
|
||||
|
|
|
@ -38,7 +38,7 @@ public class ASCConverter {
|
|||
|
||||
String outputFile =new File(outFilePath).getAbsolutePath();
|
||||
AscRasterWriter writer = new AscRasterWriter();
|
||||
writer.writeRaster(outputFile, raster);
|
||||
writer.writeRasterInvertYAxis(outputFile, raster);
|
||||
|
||||
return outputFile;
|
||||
} catch (Exception e) {
|
||||
|
|
|
@ -82,7 +82,7 @@ public class RasterTable {
|
|||
|
||||
public void init(double x1, double x2, double y1, double y2, double z, double time, double xResolution, double yResolution, double[][] values, HashMap<Double, Map<String, String>> valuesPropertiesMap, AlgorithmConfiguration configuration) {
|
||||
this.valuesMatrix = values;
|
||||
if (valuesPropertiesMap!=null && valuesPropertiesMap.size()>0)
|
||||
if (valuesPropertiesMap != null && valuesPropertiesMap.size() > 0)
|
||||
this.valuesPropertiesMap = valuesPropertiesMap;
|
||||
this.configuration = configuration;
|
||||
this.x1 = x1;
|
||||
|
@ -104,18 +104,17 @@ public class RasterTable {
|
|||
// create a table
|
||||
String columnNames = columnsnamesStandard;
|
||||
String emptycolumns = "";
|
||||
if (valuesPropertiesMap == null){
|
||||
if (valuesPropertiesMap == null) {
|
||||
AnalysisLogger.getLogger().debug("Rasterization->No properties to associate");
|
||||
DatabaseFactory.executeSQLUpdate(String.format(createTableStatementStandard, tablename), dbconnection);
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
AnalysisLogger.getLogger().debug("Managing Table with Custom Fields");
|
||||
Map<String, String> valuesMap = valuesPropertiesMap.values().iterator().next();
|
||||
AnalysisLogger.getLogger().debug("Rasterization->Sample of properties: "+valuesMap);
|
||||
AnalysisLogger.getLogger().debug("Rasterization->Sample of properties: " + valuesMap);
|
||||
emptycolumns = generateEmptyValues(valuesMap.size());
|
||||
DatabaseFactory.executeSQLUpdate(String.format(createTableStatementWithFields, tablename, propertiesMapToColumnString(valuesMap, true)), dbconnection);
|
||||
columnNames = String.format(columnsnamesWithFields, propertiesMapToColumnString(valuesMap, false));
|
||||
AnalysisLogger.getLogger().debug("Column names: "+columnNames);
|
||||
AnalysisLogger.getLogger().debug("Column names: " + columnNames);
|
||||
}
|
||||
AnalysisLogger.getLogger().debug("Table " + tablename + " created");
|
||||
if (coordinates == null)
|
||||
|
@ -127,7 +126,7 @@ public class RasterTable {
|
|||
AnalysisLogger.getLogger().debug("Association to values completed - fulfilling buffer");
|
||||
// for each element in the matrix, build the corresponding csquare code
|
||||
StringBuffer sb = new StringBuffer();
|
||||
int rowcounter = 1;
|
||||
int rowcounter = 1;
|
||||
for (int i = 0; i < triplets; i++) {
|
||||
// save the string in a buffer
|
||||
Tuple<Double> cset = coordinates.get(i);
|
||||
|
@ -140,12 +139,11 @@ public class RasterTable {
|
|||
|
||||
if (valuesPropertiesMap == null) {
|
||||
// we do not use NaNs in this case every value will be filled
|
||||
if (value.isNaN()){
|
||||
if (value.isNaN()) {
|
||||
value = 0d;
|
||||
valueForTable = null;
|
||||
}
|
||||
else
|
||||
valueForTable = "'"+value+"'";
|
||||
} else
|
||||
valueForTable = "'" + value + "'";
|
||||
} else {
|
||||
// we do not use NaNs in this case every value will be filled
|
||||
if (value.isNaN())
|
||||
|
@ -171,29 +169,28 @@ public class RasterTable {
|
|||
if (valuesPropertiesMap == null)
|
||||
sb.append("('" + csquare + "'," + x + "," + y + "," + zVal + "," + tVal + "," + valueForTable + ")");
|
||||
else
|
||||
sb.append("('" + csquare + "',"+ x + "," + y + "," + zVal + "," + tVal + "," + valueForTable + ")");
|
||||
sb.append("('" + csquare + "'," + x + "," + y + "," + zVal + "," + tVal + "," + valueForTable + ")");
|
||||
}
|
||||
if (rowcounter % 5000 == 0) {
|
||||
// AnalysisLogger.getLogger().debug("Partial Inserting Buffer of " + sb.length() + " Values");
|
||||
|
||||
if (sb.length() > 0) {
|
||||
String insertStatement = DatabaseUtils.insertFromBuffer(tablename, columnNames, sb);
|
||||
// AnalysisLogger.getLogger().debug("Inserting Buffer " + insertStatement);
|
||||
// AnalysisLogger.getLogger().debug("Inserting Buffer " + insertStatement);
|
||||
DatabaseFactory.executeSQLUpdate(insertStatement, dbconnection);
|
||||
|
||||
}
|
||||
// AnalysisLogger.getLogger().debug("Partial Insertion completed with Success!");
|
||||
sb = new StringBuffer();
|
||||
}
|
||||
else if (valueForTable != null)
|
||||
} else if (valueForTable != null)
|
||||
sb.append(",");
|
||||
|
||||
|
||||
}
|
||||
|
||||
AnalysisLogger.getLogger().debug("Inserting Final Buffer of " + sb.length() + " Values");
|
||||
// AnalysisLogger.getLogger().debug("Inserting Final Buffer " + sb);
|
||||
// save all the strings on the table
|
||||
if (sb.length() > 0) {
|
||||
String insertStatement = DatabaseUtils.insertFromString(tablename, columnNames, sb.substring(0, sb.length()-1));
|
||||
// AnalysisLogger.getLogger().debug("Inserting Buffer " + insertStatement);
|
||||
String insertStatement = DatabaseUtils.insertFromString(tablename, columnNames, sb.substring(0, sb.length() - 1));
|
||||
AnalysisLogger.getLogger().debug("Inserting Buffer " + insertStatement);
|
||||
DatabaseFactory.executeSQLUpdate(insertStatement, dbconnection);
|
||||
AnalysisLogger.getLogger().debug("Insertion completed with Success!");
|
||||
}
|
||||
|
@ -211,11 +208,12 @@ public class RasterTable {
|
|||
StringBuffer sb = new StringBuffer();
|
||||
int m = valuesMap.size();
|
||||
int i = 0;
|
||||
|
||||
for (String value : valuesMap.values()) {
|
||||
if (value.equals("NULL"))
|
||||
sb.append(value);
|
||||
else
|
||||
sb.append("'" + value .replace("'", ""+(char)96)+ "'");
|
||||
sb.append("'" + value.replace("'", "" + (char) 96) + "'");
|
||||
if (i < m - 1)
|
||||
sb.append(",");
|
||||
|
||||
|
@ -230,7 +228,7 @@ public class RasterTable {
|
|||
int m = valuesMap.size();
|
||||
int i = 0;
|
||||
for (String keys : valuesMap.keySet()) {
|
||||
sb.append("f_"+keys);
|
||||
sb.append("f_" + keys);
|
||||
if (withtype)
|
||||
sb.append(" character varying");
|
||||
if (i < m - 1)
|
||||
|
|
|
@ -87,7 +87,7 @@ public static void main3(String[] args) throws Exception{
|
|||
List<Double> currentTimeValues = extractor.currentTimeValues;
|
||||
|
||||
AscRasterWriter writer = new AscRasterWriter();
|
||||
writer.writeRaster("testwritten.asc", new AscRaster(values, 0.08333333, -1, -1, x1, y1));
|
||||
writer.writeRasterInvertYAxis("testwritten.asc", new AscRaster(values, 0.08333333, -1, -1, x1, y1));
|
||||
|
||||
AscDataExplorer ade2 = new AscDataExplorer("./maxentCompleteLayers/layer1.asc");
|
||||
List<Double>features2 = ade2.retrieveDataFromAsc(extractor.currentTuples,0);
|
||||
|
|
|
@ -55,7 +55,11 @@ public class TestMaxEnt {
|
|||
String sep=AlgorithmConfiguration.getListSeparator();
|
||||
// config.setParam("Layers","dfd1bad2-ab00-42ac-8bb2-46a17162f509"+sep+"23646f93-23a8-4be4-974e-aee6bebe1707");
|
||||
//config.setParam("Layers","94ea5767-ae76-41dc-be87-f9a0bdc96419");//temperature 99-09 2D
|
||||
config.setParam("Layers","23646f93-23a8-4be4-974e-aee6bebe1707");//ph
|
||||
// config.setParam("Layers","23646f93-23a8-4be4-974e-aee6bebe1707");//ph
|
||||
// config.setParam("Layers","fc9ac2f4-a2bd-43d1-a361-ac67c5ceac31");//temperature
|
||||
|
||||
config.setParam("Layers","http://geoserver-dev.d4science-ii.research-infrastructures.eu/geoserver/ows?service=wfs&version=1.0.0&request=GetFeature&srsName=urn:x-ogc:def:crs:EPSG:4326&TYPENAME=aquamaps:worldborders");
|
||||
|
||||
config.setParam("MaxIterations","10000");
|
||||
config.setParam("DefaultPrevalence","1");
|
||||
|
||||
|
@ -74,8 +78,12 @@ public class TestMaxEnt {
|
|||
config.setParam("BBox_UpperRightLat","10");
|
||||
*/
|
||||
|
||||
// config.setParam("XResolution","0.5");
|
||||
// config.setParam("YResolution","0.5");
|
||||
|
||||
config.setParam("XResolution","1");
|
||||
config.setParam("YResolution","1");
|
||||
|
||||
|
||||
return config;
|
||||
}
|
||||
|
|
|
@ -0,0 +1,148 @@
|
|||
package org.gcube.dataanalysis.geo.test.projections;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.File;
|
||||
import java.io.FileReader;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
|
||||
import org.gcube.contentmanagement.graphtools.utils.MathFunctions;
|
||||
import org.gcube.contentmanagement.lexicalmatcher.utils.DistanceCalculator;
|
||||
import org.gcube.dataanalysis.ecoengine.utils.Transformations;
|
||||
|
||||
public class GeolocateCountry {
|
||||
|
||||
static String faoreport = "FAO data.csv";
|
||||
|
||||
// static String faoreport = "C:\\Users\\coro\\Desktop\\allCountries.txt";
|
||||
|
||||
public static void main1(String[] args) throws Exception {
|
||||
BufferedReader fr = new BufferedReader(new FileReader(new File(faoreport)));
|
||||
String line = fr.readLine();
|
||||
long counter = 0;
|
||||
while (line != null) {
|
||||
// System.out.println(line);
|
||||
String[] split = line.split("\t");
|
||||
String country = split[17];
|
||||
String x = split[5];
|
||||
String y = split[4];
|
||||
if (country.contains("Russia"))
|
||||
break;
|
||||
// else
|
||||
// System.out.println("Country:"+country+" "+x+","+y);
|
||||
|
||||
counter++;
|
||||
if (counter % 500000 == 0)
|
||||
System.out.println("Country:" + country + " " + x + "," + y);
|
||||
line = fr.readLine();
|
||||
}
|
||||
|
||||
fr.close();
|
||||
}
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
BufferedReader fr = new BufferedReader(new FileReader(new File(faoreport)));
|
||||
String line = fr.readLine();
|
||||
parseCentroidsFile();
|
||||
parseWorldCapitalsFile();
|
||||
line = fr.readLine();
|
||||
HashMap<String, String> yetDone = new HashMap<String, String>();
|
||||
while (line != null) {
|
||||
List<String> p = Transformations.parseCVSString(line, ",");
|
||||
String country = p.get(1);
|
||||
//TO DO rebuild the original CSV file
|
||||
String suggestion = yetDone.get(country);
|
||||
if (suggestion==null){
|
||||
suggestion = getCentroid(country,capitals,0.6);
|
||||
if (suggestion.length()==0)
|
||||
suggestion = getCentroid(country,centroids,0.3);
|
||||
|
||||
yetDone.put(country, suggestion);
|
||||
}
|
||||
|
||||
System.out.println(line+","+suggestion);
|
||||
|
||||
line = fr.readLine();
|
||||
}
|
||||
|
||||
fr.close();
|
||||
}
|
||||
|
||||
static HashMap<String, String> centroids = new HashMap<String, String>();
|
||||
static HashMap<String, String> capitals = new HashMap<String, String>();
|
||||
|
||||
public static void parseCentroidsFile() throws Exception {
|
||||
BufferedReader fr = new BufferedReader(new FileReader(new File("countriescentroids.txt")));
|
||||
String line = fr.readLine();
|
||||
|
||||
while (line != null) {
|
||||
String[] elems = line.split(",");
|
||||
String x = elems[0];
|
||||
String y = elems[1];
|
||||
String cntry_name = elems[2];
|
||||
centroids.put(cntry_name, x + "," + y);
|
||||
line = fr.readLine();
|
||||
}
|
||||
|
||||
fr.close();
|
||||
}
|
||||
|
||||
public static void parseWorldCapitalsFile() throws Exception {
|
||||
BufferedReader fr = new BufferedReader(new FileReader(new File("country-capitals.csv")));
|
||||
String line = fr.readLine();
|
||||
|
||||
while (line != null) {
|
||||
String[] elems = line.split(",");
|
||||
String x = elems[3];
|
||||
String y = elems[2];
|
||||
String cntry_name = elems[0];
|
||||
capitals.put(cntry_name, x + "," + y);
|
||||
line = fr.readLine();
|
||||
}
|
||||
|
||||
fr.close();
|
||||
}
|
||||
|
||||
public static String getCentroid(String country, HashMap<String, String> centroids, double threshold) {
|
||||
|
||||
String c = centroids.get(country);
|
||||
List<String> sb = new ArrayList<String>();
|
||||
List<Double> scores = new ArrayList<Double>();
|
||||
DistanceCalculator dc = new DistanceCalculator();
|
||||
if (c == null) {
|
||||
for (String key : centroids.keySet()) {
|
||||
if (key.length() > 0) {
|
||||
/*
|
||||
if (key.contains(country) || country.contains(key)) {
|
||||
if (sb.length() > 0)
|
||||
sb.append("/");
|
||||
|
||||
sb.append(key + "," + centroids.get(key) + "("+0.8+")"+" ");
|
||||
} else {
|
||||
*/
|
||||
double score = dc.CD(false, country, key,true,false);
|
||||
if (score > threshold) {
|
||||
int i = 0;
|
||||
for (Double cscore : scores){
|
||||
if (cscore<score)
|
||||
break;
|
||||
i++;
|
||||
}
|
||||
|
||||
sb.add(i,key + "," + centroids.get(key) + ","+MathFunctions.roundDecimal(score,2));
|
||||
scores.add(i,score);
|
||||
}
|
||||
|
||||
// }
|
||||
}
|
||||
}
|
||||
if (sb.size()>0)
|
||||
return sb.get(0).toString();
|
||||
else
|
||||
return "";
|
||||
} else
|
||||
return country+","+c+ ","+1;
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,143 @@
|
|||
package org.gcube.dataanalysis.geo.test.projections;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileWriter;
|
||||
import java.util.List;
|
||||
|
||||
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
|
||||
import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent;
|
||||
import org.gcube.dataanalysis.ecoengine.processing.factories.TransducerersFactory;
|
||||
import org.gcube.dataanalysis.ecoengine.test.regression.Regressor;
|
||||
import org.gcube.dataanalysis.ecoengine.utils.Tuple;
|
||||
import org.gcube.dataanalysis.geo.connectors.asc.AscRasterWriter;
|
||||
import org.gcube.dataanalysis.geo.connectors.table.Table;
|
||||
import org.gcube.dataanalysis.geo.connectors.table.TableMatrixRepresentation;
|
||||
import org.gcube.dataanalysis.geo.utils.MapUtils;
|
||||
import org.gcube.dataanalysis.geo.utils.VectorOperations;
|
||||
|
||||
public class ProduceASCFile {
|
||||
|
||||
static String layer = "http://geoserver-dev.d4science-ii.research-infrastructures.eu/geoserver/ows?service=wfs&version=1.0.0&request=GetFeature&srsName=urn:x-ogc:def:crs:EPSG:4326&TYPENAME=aquamaps:worldborders";
|
||||
|
||||
// static String layer = "ed8f77bd-2423-4036-b34d-2f1cb5fcaffc";
|
||||
// static String layer = "http://geoserver-dev.d4science-ii.research-infrastructures.eu/geoserver/ows?service=wfs&version=1.0.0&request=GetFeature&srsName=urn:x-ogc:def:crs:EPSG:4326&TYPENAME=aquamaps:eezall";
|
||||
// static String layer = "http://geo.vliz.be/geoserver/MarineRegions/ows?service=wfs&version=1.0.0&request=GetFeature&srsName=urn:x-ogc:def:crs:EPSG:4326&typename=MarineRegions:eez";
|
||||
// static String layer = "aeabfdb5-9ddb-495e-b628-5b7d2cf1d8a2";
|
||||
|
||||
static String field = "f_cat";
|
||||
// static String field = "eez_id";
|
||||
// static String field = "f_eezall";
|
||||
// static String field = "f_eez_id";
|
||||
// static String field = "f_zone";
|
||||
|
||||
static double res = 0.3;
|
||||
static String table = "testextraction4";
|
||||
static String scope = "/gcube/devsec/devVRE";
|
||||
static String databaseUser = "gcube";
|
||||
static String databasePwd = "d4science2";
|
||||
static String databaseURL = "jdbc:postgresql://localhost/testdb";
|
||||
static String databaseDriver = "org.postgresql.Driver";
|
||||
static double xll = -180;
|
||||
static double yll=-90;
|
||||
static double xur=180;
|
||||
static double yur=90;
|
||||
|
||||
static String outASCIIMAP = "producedmap.txt";
|
||||
static String outASCFile = "produced.asc";
|
||||
|
||||
private static AlgorithmConfiguration XYExtractionConfig() {
|
||||
|
||||
AlgorithmConfiguration config = new AlgorithmConfiguration();
|
||||
|
||||
config.setAgent("XYEXTRACTOR");
|
||||
config.setConfigPath("./cfg/");
|
||||
config.setPersistencePath("./");
|
||||
config.setParam("DatabaseUserName", databaseUser);
|
||||
config.setParam("DatabasePassword", databasePwd);
|
||||
config.setParam("DatabaseURL", databaseURL);
|
||||
config.setParam("DatabaseDriver", databaseDriver);
|
||||
config.setGcubeScope(scope);
|
||||
|
||||
config.setParam("Layer", layer);
|
||||
|
||||
config.setParam("Z", "0");
|
||||
config.setParam("TimeIndex", "0");
|
||||
config.setParam("BBox_LowerLeftLat", ""+yll);
|
||||
config.setParam("BBox_LowerLeftLong", ""+xll);
|
||||
config.setParam("BBox_UpperRightLat", ""+yur);
|
||||
config.setParam("BBox_UpperRightLong", ""+xur);
|
||||
config.setParam("XResolution", ""+res);
|
||||
config.setParam("YResolution", ""+res);
|
||||
config.setParam("OutputTableName", table);
|
||||
config.setParam("OutputTableLabel", table);
|
||||
|
||||
return config;
|
||||
}
|
||||
|
||||
private static AlgorithmConfiguration TableExtractionConfig() {
|
||||
|
||||
AlgorithmConfiguration config = new AlgorithmConfiguration();
|
||||
|
||||
config.setConfigPath("./cfg/");
|
||||
config.setPersistencePath("./");
|
||||
config.setParam("DatabaseUserName", databaseUser);
|
||||
config.setParam("DatabasePassword", databasePwd);
|
||||
config.setParam("DatabaseURL", databaseURL);
|
||||
config.setParam("DatabaseDriver", databaseDriver);
|
||||
config.setGcubeScope(scope);
|
||||
config.setParam("BBox_LowerLeftLat", ""+yll);
|
||||
config.setParam("BBox_LowerLeftLong", ""+xll);
|
||||
config.setParam("BBox_UpperRightLat", ""+xur);
|
||||
config.setParam("BBox_UpperRightLong", ""+yur);
|
||||
config.setParam("XResolution", ""+res);
|
||||
config.setParam("YResolution", ""+res);
|
||||
config.setParam("OutputTableName", table);
|
||||
config.setParam("OutputTableLabel", table);
|
||||
|
||||
config.setParam(TableMatrixRepresentation.tableNameParameter, table);
|
||||
config.setParam(TableMatrixRepresentation.xDimensionColumnParameter, "approx_x");
|
||||
config.setParam(TableMatrixRepresentation.yDimensionColumnParameter, "approx_y");
|
||||
config.setParam(TableMatrixRepresentation.timeDimensionColumnParameter, "time");
|
||||
config.setParam(TableMatrixRepresentation.valueDimensionColumnParameter, field);
|
||||
config.setParam(TableMatrixRepresentation.filterParameter, " ");
|
||||
|
||||
return config;
|
||||
}
|
||||
|
||||
public static void main(String[] args) throws Exception{
|
||||
|
||||
// produce(XYExtractionConfig());
|
||||
AnalysisLogger.setLogger("./cfg/"+AlgorithmConfiguration.defaultLoggerFile);
|
||||
List<Tuple<Double>> tuples = VectorOperations.generateCoordinateTripletsInBoundingBox(xll,xur,yll,yur, 0, res, res);
|
||||
Table connector = new Table(TableExtractionConfig(), res);
|
||||
List<Double> values = connector.getFeaturesInTimeInstantAndArea(null, null, 0, tuples, xll,xur,yll,yur);
|
||||
double[][] matrix = VectorOperations.vectorToMatix(values, xll,xur,yll,yur,res, res);
|
||||
System.out.println(MapUtils.globalASCIIMap(matrix));
|
||||
FileWriter fw = new FileWriter(new File(outASCIIMAP));
|
||||
fw.write(MapUtils.globalASCIIMap(matrix));
|
||||
fw.close();
|
||||
|
||||
AscRasterWriter writer = new AscRasterWriter();
|
||||
writer.writeRasterInvertYAxis(outASCFile, matrix, xll,yll, res, "-9999");
|
||||
}
|
||||
|
||||
|
||||
public static void produce(AlgorithmConfiguration config) throws Exception {
|
||||
|
||||
System.out.println("TEST 1");
|
||||
|
||||
|
||||
AnalysisLogger.getLogger().debug("Executing: "+config.getAgent());
|
||||
List<ComputationalAgent> trans = null;
|
||||
trans = TransducerersFactory.getTransducerers(config);
|
||||
trans.get(0).init();
|
||||
Regressor.process(trans.get(0));
|
||||
StatisticalType st = trans.get(0).getOutput();
|
||||
AnalysisLogger.getLogger().debug("ST:" + st);
|
||||
trans = null;
|
||||
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,48 @@
|
|||
package org.gcube.dataanalysis.geo.test.projections;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
|
||||
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
|
||||
import org.gcube.dataanalysis.geo.connectors.wfs.FeaturedPolygon;
|
||||
import org.gcube.dataanalysis.geo.connectors.wfs.WFSDataExplorer;
|
||||
|
||||
import com.vividsolutions.jts.geom.Geometry;
|
||||
import com.vividsolutions.jts.geom.Point;
|
||||
import com.vividsolutions.jts.geom.Polygon;
|
||||
|
||||
public class ProduceCentroids {
|
||||
static String layer = "http://geoserver-dev.d4science-ii.research-infrastructures.eu/geoserver/ows?service=wfs&version=1.0.0&request=GetFeature&srsName=urn:x-ogc:def:crs:EPSG:4326&TYPENAME=aquamaps:worldborders";
|
||||
static String layername = "aquamaps:worldborders";
|
||||
|
||||
public static void main(String[] args) throws Exception{
|
||||
|
||||
List<FeaturedPolygon> featuresInTime = new ArrayList<FeaturedPolygon>();
|
||||
AnalysisLogger.getLogger().debug("taking WFS features from layer: "+layer);
|
||||
featuresInTime = WFSDataExplorer.getFeatures(layer, layername, -180, -90, 180, 90);
|
||||
HashMap<String, Point> centroidsmap = new HashMap<String, Point>();
|
||||
HashMap<String, Geometry> polymap = new HashMap<String, Geometry>();
|
||||
for (FeaturedPolygon fpoly:featuresInTime){
|
||||
// Point centroid = fpoly.p.getCentroid();
|
||||
Geometry prevPoly = polymap.get(fpoly.features.get("cntry_name"));
|
||||
|
||||
if (prevPoly!=null){
|
||||
prevPoly = prevPoly.union(fpoly.p);
|
||||
}
|
||||
else
|
||||
prevPoly = fpoly.p;
|
||||
|
||||
// if ((""+fpoly.features).contains("United States"))
|
||||
// System.out.println("centroid:"+fpoly.p.getCentroid()+" now "+prevPoly.getCentroid());
|
||||
|
||||
polymap.put(fpoly.features.get("cntry_name"),prevPoly);
|
||||
}
|
||||
|
||||
for (String key:polymap.keySet()){
|
||||
Point centroid = polymap.get(key).getCentroid();
|
||||
System.out.println(centroid.getX()+","+centroid.getY()+","+key);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -193,19 +193,23 @@ public class TestXYExtractionAlgorithm {
|
|||
config.setParam("DatabaseDriver","org.postgresql.Driver");
|
||||
config.setGcubeScope("/d4science.research-infrastructures.eu/gCubeApps/EGIP");
|
||||
|
||||
config.setParam("Layer","http://repoigg.services.iit.cnr.it/geoserver/IGG/ows?service=WFS&version=1.1.0&request=GetFeature&srsName=urn:x-ogc:def:crs:EPSG:4326&TYPENAME=IGG:HeatFlowUnit");
|
||||
//config.setParam("Layer","http://repoigg.services.iit.cnr.it/geoserver/IGG/ows?service=WFS&version=1.1.0&request=GetFeature&srsName=urn:x-ogc:def:crs:EPSG:4326&TYPENAME=IGG:HeatFlowUnit");
|
||||
// config.setParam("Layer","http://egip.brgm-rec.fr/wxs/?service=WFS&version=1.1.0&request=GetFeature&typeName=TemperatureUnit&srsName=EPSG:4326");
|
||||
|
||||
config.setParam("Layer","http://geoserver-dev.d4science-ii.research-infrastructures.eu/geoserver/ows?service=wfs&version=1.0.0&request=GetFeature&srsName=urn:x-ogc:def:crs:EPSG:4326&TYPENAME=aquamaps:worldborders");
|
||||
|
||||
https://issue.imarine.research-infrastructures.eu/raw-attachment/ticket/3082/gifgeomap.gif
|
||||
config.setParam("Z","0");
|
||||
config.setParam("TimeIndex","0");
|
||||
config.setParam("BBox_LowerLeftLat","0");
|
||||
config.setParam("BBox_LowerLeftLat","-90");
|
||||
config.setParam("BBox_LowerLeftLong","-180");
|
||||
config.setParam("BBox_UpperRightLat","90");
|
||||
config.setParam("BBox_UpperRightLong","180");
|
||||
config.setParam("XResolution","0.3");
|
||||
config.setParam("YResolution","0.3");
|
||||
config.setParam("OutputTableName","testextraction3");
|
||||
config.setParam("OutputTableLabel","testextraction3");
|
||||
// config.setParam("XResolution","0.3");
|
||||
// config.setParam("YResolution","0.3");
|
||||
config.setParam("XResolution","0.5");
|
||||
config.setParam("YResolution","0.5");
|
||||
config.setParam("OutputTableName","testextraction4");
|
||||
config.setParam("OutputTableLabel","testextraction4");
|
||||
|
||||
return config;
|
||||
}
|
||||
|
|
|
@ -8,8 +8,11 @@ import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
|
|||
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
||||
import org.gcube.dataanalysis.ecoengine.utils.Tuple;
|
||||
import org.gcube.dataanalysis.geo.connectors.asc.AscDataExplorer;
|
||||
import org.gcube.dataanalysis.geo.connectors.asc.AscRasterWriter;
|
||||
import org.gcube.dataanalysis.geo.connectors.geotiff.GeoTiff;
|
||||
import org.gcube.dataanalysis.geo.connectors.netcdf.NetCDF;
|
||||
import org.gcube.dataanalysis.geo.connectors.table.Table;
|
||||
import org.gcube.dataanalysis.geo.connectors.table.TableMatrixRepresentation;
|
||||
import org.gcube.dataanalysis.geo.connectors.wcs.WCS;
|
||||
import org.gcube.dataanalysis.geo.connectors.wfs.WFS;
|
||||
import org.gcube.dataanalysis.geo.utils.MapUtils;
|
||||
|
@ -18,80 +21,46 @@ import org.gcube.dataanalysis.geo.utils.VectorOperations;
|
|||
public class TestXYExtractionConnectors {
|
||||
|
||||
static String[] urlToTest3 = {
|
||||
// "http://geoserver3.d4science.research-infrastructures.eu/geoserver"
|
||||
// "http://geoserver2.d4science.research-infrastructures.eu/geoserver"
|
||||
"http://www.fao.org/figis/geoserver/species/ows"
|
||||
};
|
||||
// "http://geoserver3.d4science.research-infrastructures.eu/geoserver"
|
||||
// "http://geoserver2.d4science.research-infrastructures.eu/geoserver"
|
||||
"http://www.fao.org/figis/geoserver/species/ows" };
|
||||
|
||||
static String[] layernamesTest3 = {
|
||||
// "lsoleasolea20121217184934494cet"
|
||||
// "lcarcharodoncarcharias20121217173706733cet"
|
||||
// "lxiphiasgladius20130410182141778cest"
|
||||
// "SPECIES_DIST_BIB"
|
||||
|
||||
"SPECIES_DIST_SWO"
|
||||
};
|
||||
// "lsoleasolea20121217184934494cet"
|
||||
// "lcarcharodoncarcharias20121217173706733cet"
|
||||
// "lxiphiasgladius20130410182141778cest"
|
||||
// "SPECIES_DIST_BIB"
|
||||
|
||||
static String[] urlToTest1 = {
|
||||
"http://thredds.research-infrastructures.eu/thredds/dodsC/public/netcdf/global-reanalysis-phys-001-004-b-ref-fr-mjm95-gridv_OCEANS_CLIMATOLOGY_METEOROLOGY_ATMOSPHERE_1366211498692.nc",
|
||||
};
|
||||
"SPECIES_DIST_SWO" };
|
||||
|
||||
static String[] layernamesTest1= {
|
||||
"vomecrty"
|
||||
};
|
||||
static String[] urlToTest1 = { "http://thredds.research-infrastructures.eu/thredds/dodsC/public/netcdf/global-reanalysis-phys-001-004-b-ref-fr-mjm95-gridv_OCEANS_CLIMATOLOGY_METEOROLOGY_ATMOSPHERE_1366211498692.nc", };
|
||||
|
||||
|
||||
|
||||
static String[] urlToTest2 = {
|
||||
"http://thredds.research-infrastructures.eu/thredds/dodsC/public/netcdf/WOA2005TemperatureAnnual_CLIMATOLOGY_METEOROLOGY_ATMOSPHERE_.nc"
|
||||
};
|
||||
static String[] layernamesTest1 = { "vomecrty" };
|
||||
|
||||
|
||||
static String[] layernamesTest2 = {
|
||||
"t00an1"
|
||||
};
|
||||
static String[] urlToTest2 = { "http://thredds.research-infrastructures.eu/thredds/dodsC/public/netcdf/WOA2005TemperatureAnnual_CLIMATOLOGY_METEOROLOGY_ATMOSPHERE_.nc" };
|
||||
|
||||
static String[] urlToTest = {
|
||||
"https://dl.dropboxusercontent.com/u/12809149/layer1.asc",
|
||||
"http://thredds.research-infrastructures.eu/thredds/fileServer/public/netcdf/ph.asc",
|
||||
"http://thredds.research-infrastructures.eu/thredds/fileServer/public/netcdf/calcite.asc",
|
||||
"https://dl.dropboxusercontent.com/u/12809149/wind1.tif",
|
||||
"http://thredds.research-infrastructures.eu/thredds/dodsC/public/netcdf/WOA2005TemperatureAnnual_CLIMATOLOGY_METEOROLOGY_ATMOSPHERE_.nc",
|
||||
"http://thredds.research-infrastructures.eu/thredds/dodsC/public/netcdf/salinity_annual_1deg_ENVIRONMENT_OCEANS_.nc",
|
||||
"http://thredds.d4science.org/thredds/fileServer/public/netcdf/global-reanalysis-phys-001-004-b-ref-fr-mjm95-icemod_ENVIRONMENT_OCEANS_CLIMATOLOGY_METEOROLOGY_ATMOSPHERE_1366211441189.nc",
|
||||
"http://thredds.research-infrastructures.eu/thredds/dodsC/public/netcdf/CERSAT-GLO-CLIM_WIND_L4-OBS_FULL_TIME_SERIE_CLIMATOLOGY_METEOROLOGY_ATMOSPHERE_1366217956317.nc",
|
||||
"http://thredds.research-infrastructures.eu/thredds/dodsC/public/netcdf/phosphate_seasonal_5deg_ENVIRONMENT_BIOTA_.nc",
|
||||
"http://thredds.research-infrastructures.eu/thredds/dodsC/public/netcdf/global-analysis-bio-001-008-_a_BIOTA_ENVIRONMENT_1366217546908.nc",
|
||||
"http://thredds.research-infrastructures.eu/thredds/dodsC/public/netcdf/dissolved_oxygen_annual_1deg_ENVIRONMENT_BIOTA_.nc",
|
||||
"http://thredds.research-infrastructures.eu/thredds/dodsC/public/netcdf/global-reanalysis-phys-001-004-b-ref-fr-mjm95-gridv_OCEANS_CLIMATOLOGY_METEOROLOGY_ATMOSPHERE_1366211498692.nc",
|
||||
"http://thredds.research-infrastructures.eu/thredds/dodsC/public/netcdf/nitrate_seasonal_5deg_ENVIRONMENT_BIOTA_.nc",
|
||||
"http://thredds.research-infrastructures.eu/thredds/dodsC/public/netcdf/global-analysis-bio-001-008-a_BIOTA_ENVIRONMENT_1366217608283.nc",
|
||||
"http://thredds.research-infrastructures.eu/thredds/fileServer/public/netcdf/cloudmean.asc",
|
||||
"http://geoserver-dev.d4science-ii.research-infrastructures.eu/geoserver/wcs/wcs?service=wcs&version=1.0.0&request=GetCoverage&coverage=aquamaps:WorldClimBio2&CRS=EPSG:4326&bbox=-180,0,180,90&width=1&height=1&format=geotiff&RESPONSE_CRS=EPSG:4326",
|
||||
"http://geoserver2.d4science.research-infrastructures.eu/geoserver"
|
||||
|
||||
};
|
||||
|
||||
static String[] layernamesTest = {
|
||||
"layer1",
|
||||
"ph",
|
||||
"calcite",
|
||||
"wind",
|
||||
"t00an1",
|
||||
"s_sd",
|
||||
"iicevelu",
|
||||
"wind_speed",
|
||||
"p_mn",
|
||||
"CHL",
|
||||
"o_mn",
|
||||
"vomecrty",
|
||||
"n_mn",
|
||||
"PHYC",
|
||||
"cloud",
|
||||
"aquamaps:WorldClimBio2",
|
||||
"lxiphiasgladius20130410182141778cest"
|
||||
};
|
||||
static String[] layernamesTest2 = { "t00an1" };
|
||||
|
||||
static String[] urlToTest5 = { "./maxent3719990c-7998-4859-9dca-4b0a792f9d2f/layer1.asc" };
|
||||
|
||||
static String[] layernamesTest5 = { "layer1" };
|
||||
|
||||
static String[] urlToTest6 = { "table" };
|
||||
|
||||
static String[] layernamesTest6 = { "table" };
|
||||
|
||||
static String[] urlToTest = { "tableeez" };
|
||||
|
||||
static String[] layernamesTest = { "tableeez" };
|
||||
|
||||
static String[] urlToTest_ = { "https://dl.dropboxusercontent.com/u/12809149/layer1.asc", "http://thredds.research-infrastructures.eu/thredds/fileServer/public/netcdf/ph.asc", "http://thredds.research-infrastructures.eu/thredds/fileServer/public/netcdf/calcite.asc",
|
||||
"https://dl.dropboxusercontent.com/u/12809149/wind1.tif",
|
||||
"http://thredds.research-infrastructures.eu/thredds/dodsC/public/netcdf/WOA2005TemperatureAnnual_CLIMATOLOGY_METEOROLOGY_ATMOSPHERE_.nc", "http://thredds.research-infrastructures.eu/thredds/dodsC/public/netcdf/salinity_annual_1deg_ENVIRONMENT_OCEANS_.nc", "http://thredds.d4science.org/thredds/fileServer/public/netcdf/global-reanalysis-phys-001-004-b-ref-fr-mjm95-icemod_ENVIRONMENT_OCEANS_CLIMATOLOGY_METEOROLOGY_ATMOSPHERE_1366211441189.nc", "http://thredds.research-infrastructures.eu/thredds/dodsC/public/netcdf/CERSAT-GLO-CLIM_WIND_L4-OBS_FULL_TIME_SERIE_CLIMATOLOGY_METEOROLOGY_ATMOSPHERE_1366217956317.nc", "http://thredds.research-infrastructures.eu/thredds/dodsC/public/netcdf/phosphate_seasonal_5deg_ENVIRONMENT_BIOTA_.nc", "http://thredds.research-infrastructures.eu/thredds/dodsC/public/netcdf/global-analysis-bio-001-008-_a_BIOTA_ENVIRONMENT_1366217546908.nc", "http://thredds.research-infrastructures.eu/thredds/dodsC/public/netcdf/dissolved_oxygen_annual_1deg_ENVIRONMENT_BIOTA_.nc", "http://thredds.research-infrastructures.eu/thredds/dodsC/public/netcdf/global-reanalysis-phys-001-004-b-ref-fr-mjm95-gridv_OCEANS_CLIMATOLOGY_METEOROLOGY_ATMOSPHERE_1366211498692.nc", "http://thredds.research-infrastructures.eu/thredds/dodsC/public/netcdf/nitrate_seasonal_5deg_ENVIRONMENT_BIOTA_.nc", "http://thredds.research-infrastructures.eu/thredds/dodsC/public/netcdf/global-analysis-bio-001-008-a_BIOTA_ENVIRONMENT_1366217608283.nc", "http://thredds.research-infrastructures.eu/thredds/fileServer/public/netcdf/cloudmean.asc", "http://geoserver-dev.d4science-ii.research-infrastructures.eu/geoserver/wcs/wcs?service=wcs&version=1.0.0&request=GetCoverage&coverage=aquamaps:WorldClimBio2&CRS=EPSG:4326&bbox=-180,0,180,90&width=1&height=1&format=geotiff&RESPONSE_CRS=EPSG:4326", "http://geoserver2.d4science.research-infrastructures.eu/geoserver"
|
||||
|
||||
};
|
||||
|
||||
static String[] layernamesTest_ = { "layer1", "ph", "calcite", "wind", "t00an1", "s_sd", "iicevelu", "wind_speed", "p_mn", "CHL", "o_mn", "vomecrty", "n_mn", "PHYC", "cloud", "aquamaps:WorldClimBio2", "lxiphiasgladius20130410182141778cest" };
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
AnalysisLogger.setLogger("./cfg/" + AlgorithmConfiguration.defaultLoggerFile);
|
||||
AlgorithmConfiguration config = new AlgorithmConfiguration();
|
||||
|
@ -103,48 +72,69 @@ public class TestXYExtractionConnectors {
|
|||
config.setParam("DatabaseURL", "jdbc:postgresql://localhost/testdb");
|
||||
config.setParam("DatabaseDriver", "org.postgresql.Driver");
|
||||
config.setGcubeScope("/gcube/devsec/devVRE");
|
||||
|
||||
FileWriter fw = new FileWriter(new File("mapsconnectors.txt"));
|
||||
for (int t=0;t<urlToTest.length;t++){
|
||||
|
||||
for (int t = 0; t < urlToTest.length; t++) {
|
||||
|
||||
String layerURL = urlToTest[t];
|
||||
String layerName = layernamesTest[t];
|
||||
AnalysisLogger.getLogger().debug("Processing Layer: "+layerURL);
|
||||
AnalysisLogger.getLogger().debug("Processing Layer: " + layerURL);
|
||||
List<Double> values = null;
|
||||
double step = 2d;
|
||||
List<Tuple<Double>> tuples = VectorOperations.generateCoordinateTripletsInBoundingBox(-180,180,-90,90, 0, step, step);
|
||||
|
||||
if (layerURL.endsWith(".nc")){
|
||||
NetCDF geotiff = new NetCDF(layerURL,layerName);
|
||||
values = geotiff.getFeaturesInTimeInstantAndArea(layerURL,layerName, 0, tuples, -180, 180, -90, 90);
|
||||
}
|
||||
else if (layerURL.endsWith(".asc")){
|
||||
double res = 0.5d;
|
||||
List<Tuple<Double>> tuples = VectorOperations.generateCoordinateTripletsInBoundingBox(-180, 180, -90, 90, 0, res, res);
|
||||
|
||||
if (layerURL.endsWith(".nc")) {
|
||||
NetCDF geotiff = new NetCDF(layerURL, layerName);
|
||||
values = geotiff.getFeaturesInTimeInstantAndArea(layerURL, layerName, 0, tuples, -180, 180, -90, 90);
|
||||
} else if (layerURL.endsWith(".asc")) {
|
||||
AscDataExplorer asc = new AscDataExplorer(layerURL);
|
||||
values = asc.retrieveDataFromAsc(tuples, 0);
|
||||
}
|
||||
else if (layerURL.endsWith("tif")){
|
||||
} else if (layerURL.endsWith("tif")) {
|
||||
GeoTiff geotiff = new GeoTiff(config);
|
||||
values = geotiff.getFeaturesInTimeInstantAndArea(layerURL, layerName, 0, tuples, -180, 180, -90, 90);
|
||||
}
|
||||
else if (layerURL.contains("wcs")){
|
||||
WCS wcs = new WCS(config,layerURL);
|
||||
values=wcs.getFeaturesInTimeInstantAndArea(layerURL,layerName, 0, tuples, -180, 180, -90, 90);
|
||||
}
|
||||
else if (layerURL.contains("geoserver")){
|
||||
values = geotiff.getFeaturesInTimeInstantAndArea(layerURL, layerName, 0, tuples, -180, 180, -90, 90);
|
||||
} else if (layerURL.contains("wcs")) {
|
||||
WCS wcs = new WCS(config, layerURL);
|
||||
values = wcs.getFeaturesInTimeInstantAndArea(layerURL, layerName, 0, tuples, -180, 180, -90, 90);
|
||||
} else if (layerURL.contains("geoserver")) {
|
||||
WFS wfs = new WFS();
|
||||
values=wfs.getFeaturesInTimeInstantAndArea(layerURL,layerName, 0, tuples, -180, 180, -90, 90);
|
||||
values = wfs.getFeaturesInTimeInstantAndArea(layerURL, layerName, 0, tuples, -180, 180, -90, 90);
|
||||
} else if (layerURL.equals("table")) {
|
||||
config.setParam(TableMatrixRepresentation.tableNameParameter, "testextraction4");
|
||||
config.setParam(TableMatrixRepresentation.xDimensionColumnParameter, "approx_x");
|
||||
config.setParam(TableMatrixRepresentation.yDimensionColumnParameter, "approx_y");
|
||||
config.setParam(TableMatrixRepresentation.timeDimensionColumnParameter, "time");
|
||||
config.setParam(TableMatrixRepresentation.valueDimensionColumnParameter, "f_cat");
|
||||
config.setParam(TableMatrixRepresentation.filterParameter, " ");
|
||||
|
||||
Table connector = new Table(config, res);
|
||||
values = connector.getFeaturesInTimeInstantAndArea(null, null, 0, tuples, -180, 180, -90, 90);
|
||||
} else if (layerURL.contains("tableeez")) {
|
||||
config.setParam("DatabaseUserName", "postgres");
|
||||
config.setParam("DatabasePassword", "d4science2");
|
||||
config.setParam("DatabaseURL", "jdbc:postgresql://geoserver-dev.d4science-ii.research-infrastructures.eu/aquamapsdb");
|
||||
config.setParam(TableMatrixRepresentation.tableNameParameter, "\"WorldEEZv72012HR\"");
|
||||
config.setParam(TableMatrixRepresentation.xDimensionColumnParameter, "longitude");
|
||||
config.setParam(TableMatrixRepresentation.yDimensionColumnParameter, "latitude");
|
||||
config.setParam(TableMatrixRepresentation.timeDimensionColumnParameter, "");
|
||||
config.setParam(TableMatrixRepresentation.valueDimensionColumnParameter, "eez_id");
|
||||
config.setParam(TableMatrixRepresentation.filterParameter, " ");
|
||||
|
||||
Table connector = new Table(config, res);
|
||||
values = connector.getFeaturesInTimeInstantAndArea(null, null, 0, tuples, -180, 180, -90, 90);
|
||||
}
|
||||
|
||||
|
||||
double[][] matrix = VectorOperations.vectorToMatix(values, -180,180,-90,90,step, step);
|
||||
|
||||
// System.out.println(MapUtils.globalASCIIMap(values,step,step));
|
||||
double[][] matrix = VectorOperations.vectorToMatix(values, -180, 180, -90, 90, res, res);
|
||||
|
||||
// System.out.println(MapUtils.globalASCIIMap(values,step,step));
|
||||
System.out.println(MapUtils.globalASCIIMap(matrix));
|
||||
fw.write(MapUtils.globalASCIIMap(matrix));
|
||||
|
||||
|
||||
AscRasterWriter writer = new AscRasterWriter();
|
||||
writer.writeRasterInvertYAxis("testraster.asc", matrix, -180, -90, res, "-9999");
|
||||
|
||||
}
|
||||
|
||||
|
||||
fw.close();
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
|
|
@ -6,6 +6,7 @@ import java.util.List;
|
|||
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
|
||||
import org.gcube.dataanalysis.ecoengine.utils.Tuple;
|
||||
|
||||
import scala.actors.threadpool.Arrays;
|
||||
import ucar.ma2.ArrayByte;
|
||||
import ucar.ma2.ArrayDouble;
|
||||
import ucar.ma2.IndexIterator;
|
||||
|
@ -56,15 +57,20 @@ public class VectorOperations {
|
|||
**/
|
||||
// suggestion: given a resolution R, give Math.sqrt(2)*R/2=0.7*R as tolerance
|
||||
public static List<Double> assignPointsValuesToGrid(List<Tuple<Double>> grid3d, int gridTimeInstant, List<Tuple<Double>> coordinates5d, double tolerance) {
|
||||
List<Double> valuesForGrid = new ArrayList<Double>();
|
||||
// List<Double> valuesForGrid = new ArrayList<Double>();
|
||||
int gridSize = grid3d.size();
|
||||
Double [] valuesForGridd = new Double[gridSize];
|
||||
|
||||
for (int i = 0; i < gridSize; i++) {
|
||||
valuesForGrid.add(Double.NaN);
|
||||
// valuesForGrid.add(Double.NaN);
|
||||
valuesForGridd [i] = Double.NaN;
|
||||
}
|
||||
|
||||
// AnalysisLogger.getLogger().debug("Grid contains: "+grid3d.size()+" values");
|
||||
// AnalysisLogger.getLogger().debug("Dataset contains: "+coordinates5d.size()+" values");
|
||||
int foundmatches = 0;
|
||||
long count = 0;
|
||||
AnalysisLogger.getLogger().debug("Assigning : "+coordinates5d.size()+" elements");
|
||||
for (Tuple<Double> coord5d : coordinates5d) {
|
||||
double rx = coord5d.getElements().get(0);
|
||||
double ry = coord5d.getElements().get(1);
|
||||
|
@ -81,23 +87,31 @@ public class VectorOperations {
|
|||
|
||||
if (d <= tolerance) {
|
||||
// AnalysisLogger.getLogger().debug("Association: distance between grid:("+x+","+y+","+z+","+gridTimeInstant+") and point:("+rx+","+ry+","+rz+","+rt+") is "+d);
|
||||
valuesForGrid.set(gridIdx, rvalue);
|
||||
// valuesForGrid.set(gridIdx, rvalue);
|
||||
valuesForGridd[gridIdx]=rvalue;
|
||||
foundmatches++;
|
||||
}
|
||||
gridIdx++;
|
||||
}
|
||||
count++;
|
||||
if (count%50000==0)
|
||||
AnalysisLogger.getLogger().debug("Vector Operations Assigned: "+count+" elements");
|
||||
}
|
||||
|
||||
AnalysisLogger.getLogger().debug("Association: Found "+foundmatches+" matches between the grid of points and the coordinates");
|
||||
|
||||
List<Double> valuesForGrid = Arrays.asList(valuesForGridd);
|
||||
return valuesForGrid;
|
||||
}
|
||||
|
||||
public static List<Double> assignGridValuesToPoints2D(List<Tuple<Double>> grid3d, List<Double> gridValues, List<Tuple<Double>> coordinates4d, double tolerance) {
|
||||
|
||||
List<Double> valuesForPoints = new ArrayList<Double>();
|
||||
// List<Double> valuesForPoints = new ArrayList<Double>();
|
||||
int gridSize = coordinates4d.size();
|
||||
Double [] valuesForPoints = new Double[gridSize];
|
||||
|
||||
for (int i = 0; i < gridSize; i++) {
|
||||
valuesForPoints.add(Double.NaN);
|
||||
valuesForPoints[i]=Double.NaN;
|
||||
}
|
||||
|
||||
|
||||
|
@ -115,7 +129,7 @@ public class VectorOperations {
|
|||
double d = distance(x, y, 0, 0, rx, ry, 0, 0);
|
||||
if (d <= tolerance) {
|
||||
// AnalysisLogger.getLogger().debug("Association: distance between grid:("+x+","+y+","+z+","+gridTimeInstant+") and point:("+rx+","+ry+","+rz+","+rt+") is "+d);
|
||||
valuesForPoints.set(points, gridValues.get(gridIdx));
|
||||
valuesForPoints[points] = gridValues.get(gridIdx);
|
||||
foundmatches++;
|
||||
break;
|
||||
}
|
||||
|
@ -125,7 +139,8 @@ public class VectorOperations {
|
|||
}
|
||||
|
||||
AnalysisLogger.getLogger().debug("Association: Found "+foundmatches+" matches between the points and the grid");
|
||||
return valuesForPoints;
|
||||
|
||||
return Arrays.asList(valuesForPoints);
|
||||
}
|
||||
|
||||
public static List<Double> assignGridValuesToPoints(List<Tuple<Double>> grid3d, int gridTimeInstant, List<Double> gridValues, List<Tuple<Double>> coordinates4d, double tolerance) {
|
||||
|
|
Loading…
Reference in New Issue