git-svn-id: https://svn.d4science.research-infrastructures.eu/gcube/trunk/data-analysis/EcologicalEngineGeoSpatialExtension@92161 82a268e6-3cf1-43bd-a215-b396298e98cf
This commit is contained in:
parent
14ad264370
commit
677cabb537
|
@ -1,128 +0,0 @@
|
|||
package org.gcube.dataanalysis.geo.insertion;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
|
||||
import org.gcube.contentmanagement.lexicalmatcher.utils.DatabaseFactory;
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
||||
import org.gcube.dataanalysis.ecoengine.utils.DatabaseUtils;
|
||||
import org.gcube.dataanalysis.ecoengine.utils.Tuple;
|
||||
import org.gcube.dataanalysis.geo.retrieval.GeoIntersector;
|
||||
import org.gcube.dataanalysis.geo.utils.CSquareCodesConverter;
|
||||
import org.hibernate.SessionFactory;
|
||||
|
||||
/**
|
||||
* transforms a raster map into a table
|
||||
*
|
||||
* @author coro
|
||||
*
|
||||
*/
|
||||
public class RasterTable {
|
||||
|
||||
private double valuesMatrix[][];
|
||||
double x1;
|
||||
double x2;
|
||||
double y1;
|
||||
double y2;
|
||||
double z;
|
||||
double xResolution;
|
||||
double yResolution;
|
||||
|
||||
private AlgorithmConfiguration configuration;
|
||||
private String tablename = "rstr" + ("" + UUID.randomUUID()).replace("-", "");
|
||||
static String createTableStatement = "CREATE TABLE %1$s (id serial, csquarecode character varying, x real, y real, z real, probability real)";
|
||||
static String columnsnames = "csquarecode, x , y , z , probability";
|
||||
public static String csquareColumn = "csquarecode";
|
||||
public static String probabilityColumn = "probability";
|
||||
public static String idColumn = "id";
|
||||
|
||||
public String getTablename() {
|
||||
return tablename;
|
||||
}
|
||||
|
||||
public void setTablename(String tablename) {
|
||||
this.tablename = tablename;
|
||||
}
|
||||
|
||||
public RasterTable(double x1, double x2, double y1, double y2, double z, double xResolution, double yResolution, double[][] values, AlgorithmConfiguration configuration) {
|
||||
this.valuesMatrix = values;
|
||||
this.configuration = configuration;
|
||||
this.x1 = x1;
|
||||
this.x2 = x2;
|
||||
this.y1 = y1;
|
||||
this.y2 = y2;
|
||||
this.z = z;
|
||||
this.xResolution = xResolution;
|
||||
this.yResolution = yResolution;
|
||||
}
|
||||
|
||||
public void dumpGeoTable() {
|
||||
|
||||
// open the connection to the db
|
||||
SessionFactory dbconnection = DatabaseUtils.initDBSession(configuration);
|
||||
try {
|
||||
AnalysisLogger.getLogger().debug("Database Initialized");
|
||||
// create a table
|
||||
DatabaseFactory.executeSQLUpdate(String.format(createTableStatement, tablename), dbconnection);
|
||||
AnalysisLogger.getLogger().debug("Table " + tablename + " created");
|
||||
List<Tuple<Double>> coordinates = GeoIntersector.generateCoordinateTriplets(x1, x2, y1, y2, z, xResolution, yResolution);
|
||||
int triplets = coordinates.size();
|
||||
AnalysisLogger.getLogger().debug("Generated " + triplets + " coordinates triples");
|
||||
List<Double> values = GeoIntersector.associateValueToCoordinates(coordinates, valuesMatrix);
|
||||
AnalysisLogger.getLogger().debug("Association to values completed - fulfilling buffer");
|
||||
// for each element in the matrix, build the corresponding csquare code
|
||||
StringBuffer sb = new StringBuffer();
|
||||
for (int i = 0; i < triplets; i++) {
|
||||
// save the string in a buffer
|
||||
Tuple<Double> cset = coordinates.get(i);
|
||||
double x = cset.getElements().get(0);
|
||||
double y = cset.getElements().get(1);
|
||||
String csquare = CSquareCodesConverter.convertAtResolution(y,x, xResolution);
|
||||
Double value = values.get(i);
|
||||
//we do not use NaNs in this case every value will be filled
|
||||
if (value.isNaN())
|
||||
value = 0d;
|
||||
|
||||
sb.append("('" + csquare + "'," + x + "," + y + "," + z + ",'" + value + "')");
|
||||
|
||||
if (i % 5000 == 0) {
|
||||
// AnalysisLogger.getLogger().debug("Partial Inserting Buffer of " + sb.length() + " Values");
|
||||
String insertStatement = DatabaseUtils.insertFromBuffer(tablename, columnsnames, sb);
|
||||
DatabaseFactory.executeSQLUpdate(insertStatement, dbconnection);
|
||||
// AnalysisLogger.getLogger().debug("Partial Insertion completed with Success!");
|
||||
sb = new StringBuffer();
|
||||
} else if (i < triplets - 1)
|
||||
sb.append(",");
|
||||
}
|
||||
|
||||
AnalysisLogger.getLogger().debug("Inserting Final Buffer of " + sb.length() + " Values");
|
||||
// save all the strings on the table
|
||||
if (sb.length() > 0) {
|
||||
String insertStatement = DatabaseUtils.insertFromBuffer(tablename, columnsnames, sb);
|
||||
DatabaseFactory.executeSQLUpdate(insertStatement, dbconnection);
|
||||
AnalysisLogger.getLogger().debug("Insertion completed with Success!");
|
||||
}
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
AnalysisLogger.getLogger().debug("Error in dumping table: " + e.getLocalizedMessage());
|
||||
} finally {
|
||||
// close the connection
|
||||
DatabaseUtils.closeDBConnection(dbconnection);
|
||||
AnalysisLogger.getLogger().debug("Raster Geo Table DB closed!");
|
||||
}
|
||||
}
|
||||
|
||||
public void deleteTable() {
|
||||
SessionFactory dbconnection = null;
|
||||
try {
|
||||
dbconnection = DatabaseUtils.initDBSession(configuration);
|
||||
DatabaseFactory.executeSQLUpdate(DatabaseUtils.dropTableStatement(tablename), dbconnection);
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
} finally {
|
||||
DatabaseUtils.closeDBConnection(dbconnection);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -6,10 +6,11 @@ import java.util.Locale;
|
|||
|
||||
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
||||
import org.gcube.dataanalysis.geo.connectors.netcdf.NetCDFDataExplorer;
|
||||
import org.gcube.dataanalysis.geo.infrastructure.GeoNetworkInspector;
|
||||
import org.gcube.dataanalysis.geo.meta.NetCDFMetadata;
|
||||
import org.gcube.dataanalysis.geo.meta.OGCFormatter;
|
||||
import org.gcube.dataanalysis.geo.meta.features.FeaturesManager;
|
||||
import org.gcube.dataanalysis.geo.utils.ThreddsDataExplorer;
|
||||
import org.gcube.dataanalysis.geo.utils.ThreddsExplorer;
|
||||
import org.opengis.metadata.Metadata;
|
||||
|
||||
import ucar.nc2.dataset.CoordinateAxis;
|
||||
|
@ -19,18 +20,18 @@ import ucar.nc2.units.DateRange;
|
|||
|
||||
public class ThreddsFetcher {
|
||||
|
||||
private FeaturesManager featurer;
|
||||
private GeoNetworkInspector featurer;
|
||||
public static String NetCDFDateFormat = "time: E MMM dd HH:mm:ss zzz yyyy";
|
||||
public static String HumanDateFormat = "MM-dd-yy HH:mm";
|
||||
|
||||
public ThreddsFetcher(String scope) {
|
||||
featurer = new FeaturesManager();
|
||||
featurer = new GeoNetworkInspector();
|
||||
featurer.setScope(scope);
|
||||
}
|
||||
|
||||
public void fetch(String threddsCatalogURL) throws Exception {
|
||||
|
||||
List<String> filesURL = ThreddsDataExplorer.getFiles(threddsCatalogURL);
|
||||
List<String> filesURL = ThreddsExplorer.getFiles(threddsCatalogURL);
|
||||
for (String filename : filesURL) {
|
||||
if (!filename.endsWith(".nc"))
|
||||
continue;
|
||||
|
@ -38,7 +39,7 @@ public class ThreddsFetcher {
|
|||
// continue;
|
||||
|
||||
String url = OGCFormatter.getOpenDapURL(threddsCatalogURL, filename);
|
||||
if (ThreddsDataExplorer.isGridDataset(url)) {
|
||||
if (ThreddsExplorer.isGridDataset(url)) {
|
||||
// retrieve information
|
||||
GridDataset gds = ucar.nc2.dt.grid.GridDataset.open(url);
|
||||
|
||||
|
@ -110,10 +111,10 @@ public class ThreddsFetcher {
|
|||
metadataInserter.setAbstractField(generateAbstractField(gdt.getFullName(), filename, description, gdt.getUnitsString().trim(), hStartDate, hEndDate, duration, resolution, numberOfDimensions, gds.getTitle(), gds.getDescription()));
|
||||
metadataInserter.setResolution(Math.max(resolutionX, resolutionY));
|
||||
// set Bounding box
|
||||
double minX = ThreddsDataExplorer.getMinX(gdt.getCoordinateSystem());
|
||||
double maxX = ThreddsDataExplorer.getMaxX(gdt.getCoordinateSystem());
|
||||
double minY = ThreddsDataExplorer.getMinY(gdt.getCoordinateSystem());
|
||||
double maxY = ThreddsDataExplorer.getMaxY(gdt.getCoordinateSystem());
|
||||
double minX = NetCDFDataExplorer.getMinX(gdt.getCoordinateSystem());
|
||||
double maxX = NetCDFDataExplorer.getMaxX(gdt.getCoordinateSystem());
|
||||
double minY = NetCDFDataExplorer.getMinY(gdt.getCoordinateSystem());
|
||||
double maxY = NetCDFDataExplorer.getMaxY(gdt.getCoordinateSystem());
|
||||
|
||||
if (gds.getTitle().toUpperCase().contains("WORLD OCEAN ATLAS"))
|
||||
{
|
||||
|
|
Loading…
Reference in New Issue