Adjusted layers projections. Layers are now aligned
git-svn-id: https://svn.d4science.research-infrastructures.eu/gcube/trunk/data-analysis/EcologicalEngineGeoSpatialExtension@100254 82a268e6-3cf1-43bd-a215-b396298e98cf
This commit is contained in:
parent
b1bac19530
commit
b6c2356e6d
|
@ -45,7 +45,10 @@ public abstract class MapsCreator extends StandardLocalExternalAlgorithm {
|
|||
private static String columnsInfoNames = " geomid, x , y, info, the_geom";
|
||||
|
||||
private static String addGeometryColumn = "Select AddGeometryColumn('%1$s','the_geom',4326,'POLYGON',2);";
|
||||
private static String addPointsColumn = "Select AddGeometryColumn('%1$s','the_geom',4326,'POINT',2);";
|
||||
static String makeSquare = "ST_GeomFromText('POLYGON((%1$s ,%2$s, %3$s, %4$s, %1$s))',4326)";
|
||||
static String makePoint = "ST_GeomFromText('POINT(%1$s %2$s)',4326)";
|
||||
// static String makePoint = "ST_SetSRID(ST_MakePoint((%1$s,%2$s),4326)";
|
||||
|
||||
//changeable parameters for application purposes
|
||||
String datastore = "";
|
||||
|
@ -112,7 +115,8 @@ public abstract class MapsCreator extends StandardLocalExternalAlgorithm {
|
|||
if (config.getParam(xParameter) != null && config.getParam(yParameter) != null) {
|
||||
log("..from coordinates");
|
||||
// select the points from the SM DB up to a maximum of 190000 points
|
||||
points = DatabaseFactory.executeSQLQuery("select " + config.getParam(xParameter) + "," + config.getParam(yParameter) + "," +infoPar + " from " + config.getParam(inputTableParameter) + " limit " + maxNPoints, smdbconnection);
|
||||
String q = "select " + config.getParam(xParameter) + "," + config.getParam(yParameter) + "," +infoPar + " from " + config.getParam(inputTableParameter) + " limit " + maxNPoints;
|
||||
points = DatabaseFactory.executeSQLQuery(q, smdbconnection);
|
||||
}
|
||||
//points from csquares
|
||||
else if (config.getParam(csquareParameter)!=null){
|
||||
|
@ -137,8 +141,9 @@ public abstract class MapsCreator extends StandardLocalExternalAlgorithm {
|
|||
log("Points built from csquares!");
|
||||
}
|
||||
//GIS Table creation
|
||||
log("Creating GIS table");
|
||||
|
||||
String gisTableName = "stat" + UUID.randomUUID().toString().replace("-", "");
|
||||
log("Creating GIS table "+gisTableName);
|
||||
status = 30;
|
||||
String createTable$ = String.format(createProbTable, gisTableName);
|
||||
String columnNames$ = columnsProbNames;
|
||||
|
@ -155,7 +160,11 @@ public abstract class MapsCreator extends StandardLocalExternalAlgorithm {
|
|||
}
|
||||
//table creation
|
||||
DatabaseFactory.executeSQLUpdate(createTable$, gisdbconnection);
|
||||
DatabaseFactory.executeSQLQuery(String.format(addGeometryColumn, gisTableName), gisdbconnection);
|
||||
|
||||
if (resolution>0)
|
||||
DatabaseFactory.executeSQLQuery(String.format(addGeometryColumn, gisTableName), gisdbconnection);
|
||||
else
|
||||
DatabaseFactory.executeSQLQuery(String.format(addPointsColumn, gisTableName), gisdbconnection);
|
||||
log("Fulfilling elements");
|
||||
log("Resolution:" + resolution);
|
||||
//points fulfilling
|
||||
|
@ -168,11 +177,18 @@ public abstract class MapsCreator extends StandardLocalExternalAlgorithm {
|
|||
String probS = "" + elements[2];
|
||||
double x1 = x - resolution;
|
||||
double x2 = x + resolution;
|
||||
double y1 = y - resolution;
|
||||
double y2 = y + resolution;
|
||||
String square = String.format(makeSquare, "" + x1 + " " + y1, x1 + " " + y2, x2 + " " + y2, x2 + " " + y1);
|
||||
|
||||
double y1 = (y) - resolution;
|
||||
double y2 = (y) + resolution;
|
||||
|
||||
String geom = "";
|
||||
|
||||
if (resolution == 0)
|
||||
geom = String.format(makePoint, x,y);
|
||||
else
|
||||
geom = String.format(makeSquare, "" + x1 + " " + y1, x1 + " " + y2, x2 + " " + y2, x2 + " " + y1);
|
||||
// System.out.println(square);
|
||||
String[] selements = { "" + i, "" + x, "" + y, probS,square };
|
||||
String[] selements = { "" + i, "" + x, "" + y, probS,geom };
|
||||
values.add(selements);
|
||||
i++;
|
||||
}
|
||||
|
|
|
@ -0,0 +1,414 @@
|
|||
package org.gcube.dataanalysis.geo.algorithms;
|
||||
|
||||
import java.awt.Image;
|
||||
import java.io.BufferedReader;
|
||||
import java.io.BufferedWriter;
|
||||
import java.io.File;
|
||||
import java.io.FileReader;
|
||||
import java.io.FileWriter;
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
import javax.imageio.ImageIO;
|
||||
|
||||
import org.gcube.common.scope.api.ScopeProvider;
|
||||
import org.gcube.contentmanagement.graphtools.data.conversions.ImageTools;
|
||||
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
|
||||
import org.gcube.contentmanagement.lexicalmatcher.utils.FileTools;
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.INFRASTRUCTURE;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.ColumnType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.DatabaseType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.InputTable;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.OutputTable;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveTypesList;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.TableTemplates;
|
||||
import org.gcube.dataanalysis.ecoengine.interfaces.Transducerer;
|
||||
import org.gcube.dataanalysis.ecoengine.utils.DatabaseUtils;
|
||||
import org.gcube.dataanalysis.ecoengine.utils.IOHelper;
|
||||
import org.gcube.dataanalysis.ecoengine.utils.ResourceFactory;
|
||||
import org.gcube.dataanalysis.geo.connectors.asc.ASC;
|
||||
import org.gcube.dataanalysis.geo.matrixmodel.ASCConverter;
|
||||
import org.hibernate.SessionFactory;
|
||||
|
||||
import density.Maxent;
|
||||
|
||||
public class MaxEnt4NicheModellingTransducer implements Transducerer {
|
||||
|
||||
static String t = "TimeIndex";
|
||||
static String z = "Z";
|
||||
static String yLL = "BBox_LowerLeftLat";
|
||||
static String xLL = "BBox_LowerLeftLong";
|
||||
static String yUR = "BBox_UpperRightLat";
|
||||
static String xUR = "BBox_UpperRightLong";
|
||||
static String xRes = "XResolution";
|
||||
static String yRes = "YResolution";
|
||||
static String tableName = "OutputTableName";
|
||||
static String tableLabel = "OutputTableLabel";
|
||||
static String speciesLabel = "SpeciesName";
|
||||
static String OccurrencesTableNameParameter = "OccurrencesTable";
|
||||
static String LongitudeColumn = "LongitudeColumn";
|
||||
static String LatitudeColumn = "LatitudeColumn";
|
||||
static String Layers = "Layers";
|
||||
static String maxIterations = "MaxIterations";
|
||||
static String prevalence = "DefaultPrevalence";
|
||||
|
||||
AlgorithmConfiguration config;
|
||||
|
||||
float status;
|
||||
|
||||
public int time;
|
||||
public double zValue;
|
||||
public double xResValue;
|
||||
public double yResValue;
|
||||
public String tableNameValue="";
|
||||
public String tableLabelValue="";
|
||||
public double BBxLL = -180;
|
||||
public double BBxUR = 180;
|
||||
public double BByLL = -90;
|
||||
public double BByUR = 90;
|
||||
private int maxIterationsValue;
|
||||
private double prevalenceValue;
|
||||
private double bestThreshold=0;
|
||||
private double prevalenceVal=0;
|
||||
private String variablesContributions = "";
|
||||
private String variablesPermutationsImportance = "";
|
||||
private String warnings = "";
|
||||
private File warningsFile=null;
|
||||
private File projectionFile=null;
|
||||
|
||||
LinkedHashMap<String, Image> producedImages = new LinkedHashMap<String, Image>();
|
||||
|
||||
public List<StatisticalType> inputs = new ArrayList<StatisticalType>();
|
||||
public LinkedHashMap<String, String> outputParameters = new LinkedHashMap<String, String>();
|
||||
|
||||
private String[] layers;
|
||||
private String occurrencesTableName;
|
||||
private String speciesName;
|
||||
private String longitudeColumn;
|
||||
private String latitudeColumn;
|
||||
|
||||
SessionFactory dbconnection = null;
|
||||
|
||||
@Override
|
||||
public void init() throws Exception {
|
||||
AnalysisLogger.getLogger().debug("MaxEnt: Initialization");
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getDescription() {
|
||||
return "A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. " +
|
||||
"In this adaptation for the D4Science infrastructure, the software can accept a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. " +
|
||||
"Also, the user can establish the bounding box and the spatial resolution (in deg) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one." +
|
||||
"The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, the raw assigned values." +
|
||||
"Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points to Map process).";
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<StatisticalType> getInputParameters() {
|
||||
|
||||
// output table parameter
|
||||
IOHelper.addRandomStringInput(inputs, tableName, "The db name of the table to produce", "maxent_");
|
||||
IOHelper.addStringInput(inputs, tableLabel, "The name of the table to produce", "maxent_");
|
||||
IOHelper.addStringInput(inputs, speciesLabel, "The name of the species to model and the occurrence records refer to", "generic_species");
|
||||
IOHelper.addIntegerInput(inputs, maxIterations, "The number of learning iterations of the MaxEnt algorithm", "1000");
|
||||
IOHelper.addDoubleInput(inputs, prevalence, "A priori probability of presence at ordinary occurrence points", "0.5");
|
||||
|
||||
// table parameters
|
||||
List<TableTemplates> template = new ArrayList<TableTemplates>();
|
||||
template.add(TableTemplates.OCCURRENCE_SPECIES);
|
||||
InputTable table = new InputTable(template, OccurrencesTableNameParameter, "A geospatial table containing occurrence records, following the template of the Species Products Discovery datasets", "");
|
||||
inputs.add(table);
|
||||
ColumnType p1 = new ColumnType(OccurrencesTableNameParameter, LongitudeColumn, "The column containing longitude values", "decimallongitude", false);
|
||||
inputs.add(p1);
|
||||
ColumnType p2 = new ColumnType(OccurrencesTableNameParameter, LatitudeColumn, "The column containing latitude values", "decimallatitude", false);
|
||||
inputs.add(p2);
|
||||
|
||||
IOHelper.addDoubleInput(inputs, z, "Value of Z. Default is 0, that means processing will be at surface level or at the first avaliable Z value in the layer", "0");
|
||||
IOHelper.addIntegerInput(inputs, t, "Time Index. The default is the first time indexed dataset", "0");
|
||||
|
||||
IOHelper.addDoubleInput(inputs, xRes, "Projection resolution on the X axis", "1");
|
||||
IOHelper.addDoubleInput(inputs, yRes, "Projection resolution on the Y axis", "1");
|
||||
|
||||
// layers to use in the model
|
||||
PrimitiveTypesList listEnvLayers = new PrimitiveTypesList(String.class.getName(), PrimitiveTypes.STRING, Layers, "The list of environmental layers to use for enriching the points. Each entry is a layer Title or UUID or HTTP link. E.g. the title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Otherwise you can supply the direct HTTP link of the layer. The format will be guessed from the link. The default is GeoTiff. Supports several standards (NETCDF, WFS, WCS, ASC, GeoTiff )", false);
|
||||
inputs.add(listEnvLayers);
|
||||
|
||||
DatabaseType.addDefaultDBPars(inputs);
|
||||
return inputs;
|
||||
}
|
||||
|
||||
protected void getParameters() {
|
||||
|
||||
String scope = config.getGcubeScope();
|
||||
AnalysisLogger.getLogger().debug("MaxEnt: : Externally set scope " + scope);
|
||||
if (scope == null) {
|
||||
scope = ScopeProvider.instance.get();
|
||||
AnalysisLogger.getLogger().debug("MaxEnt: : Internally set scope " + scope);
|
||||
config.setGcubeScope(scope);
|
||||
}
|
||||
|
||||
// get input name and list of fields
|
||||
occurrencesTableName = IOHelper.getInputParameter(config, OccurrencesTableNameParameter);
|
||||
longitudeColumn = IOHelper.getInputParameter(config, LongitudeColumn);
|
||||
latitudeColumn = IOHelper.getInputParameter(config, LatitudeColumn);
|
||||
speciesName = IOHelper.getInputParameter(config, speciesLabel);
|
||||
|
||||
// get environmental layers
|
||||
layers = IOHelper.getInputParameter(config, Layers).trim().split(AlgorithmConfiguration.getListSeparator());
|
||||
AnalysisLogger.getLogger().debug("MaxEnt: N. of Layers to take " + layers.length);
|
||||
|
||||
// get time and z values
|
||||
String z$ = IOHelper.getInputParameter(config, z);
|
||||
String time$ = IOHelper.getInputParameter(config, t);
|
||||
|
||||
time = ((time$ != null) && (time$.trim().length() > 0)) ? Integer.parseInt(time$) : 0;
|
||||
if (time < 0)
|
||||
time = 0;
|
||||
|
||||
AnalysisLogger.getLogger().debug("MaxEnt: : Time " + time);
|
||||
zValue = 0;
|
||||
if ((z$ != null) && (z$.trim().length() > 0))
|
||||
try {
|
||||
zValue = Double.parseDouble(z$);
|
||||
} catch (Exception ee) {
|
||||
}
|
||||
|
||||
AnalysisLogger.getLogger().debug("MaxEnt: : Z " + zValue);
|
||||
|
||||
// get Bounding Box for the projection
|
||||
BByLL = -90;
|
||||
BBxLL = -180;
|
||||
BByUR = 90;
|
||||
BBxUR = 180;
|
||||
|
||||
AnalysisLogger.getLogger().debug("MaxEnt: : yLL " + BByLL);
|
||||
AnalysisLogger.getLogger().debug("MaxEnt: : xLL " + BBxLL);
|
||||
AnalysisLogger.getLogger().debug("MaxEnt: : yUR " + BByUR);
|
||||
AnalysisLogger.getLogger().debug("MaxEnt: : xUR " + BBxUR);
|
||||
|
||||
// get y and x resolutions
|
||||
yResValue = Double.parseDouble(IOHelper.getInputParameter(config, yRes));
|
||||
AnalysisLogger.getLogger().debug("MaxEnt: : yRes " + yResValue);
|
||||
xResValue = Double.parseDouble(IOHelper.getInputParameter(config, xRes));
|
||||
AnalysisLogger.getLogger().debug("MaxEnt: : xRes " + xResValue);
|
||||
|
||||
// get output table value
|
||||
tableNameValue = IOHelper.getInputParameter(config, tableName);
|
||||
tableLabelValue = IOHelper.getInputParameter(config, tableLabel);
|
||||
AnalysisLogger.getLogger().debug("MaxEnt: : tableName " + tableNameValue);
|
||||
AnalysisLogger.getLogger().debug("MaxEnt: : tableLabel " + tableLabelValue);
|
||||
|
||||
prevalenceValue = Double.parseDouble(IOHelper.getInputParameter(config, prevalence));
|
||||
maxIterationsValue = Integer.parseInt(IOHelper.getInputParameter(config, maxIterations));
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public void compute() throws Exception {
|
||||
|
||||
Maxent me = null;
|
||||
try {
|
||||
status = 10;
|
||||
AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
|
||||
getParameters();
|
||||
AnalysisLogger.getLogger().debug("MaxEnt: parameters initialized");
|
||||
long t0 = System.currentTimeMillis();
|
||||
String localtempFolder = new File(config.getPersistencePath(), "maxent" + UUID.randomUUID()).getAbsolutePath();
|
||||
|
||||
|
||||
if (!new File(localtempFolder).exists())
|
||||
new File(localtempFolder).mkdir();
|
||||
|
||||
String localOccurrencesFile = new File(localtempFolder, occurrencesTableName).getAbsolutePath();
|
||||
String localFinalOccurrencesFile = localOccurrencesFile + "_occ.csv";
|
||||
|
||||
AnalysisLogger.getLogger().debug("MaxEnt: local occurrence file to produce "+localFinalOccurrencesFile);
|
||||
AnalysisLogger.getLogger().debug("MaxEnt: initializing connection");
|
||||
dbconnection = DatabaseUtils.initDBSession(config);
|
||||
|
||||
// prepare input data
|
||||
AnalysisLogger.getLogger().debug("MaxEnt: creating local file from remote table "+occurrencesTableName);
|
||||
DatabaseUtils.createLocalFileFromRemoteTable(localOccurrencesFile, "(select " + longitudeColumn + " as longitude," + latitudeColumn + " as latitude from " + occurrencesTableName + ")", ",", config.getDatabaseUserName(), config.getDatabasePassword(), config.getDatabaseURL());
|
||||
AnalysisLogger.getLogger().debug("MaxEnt: table "+occurrencesTableName+" was dumped in file: " + localOccurrencesFile);
|
||||
// write an input file for maxent
|
||||
AnalysisLogger.getLogger().debug("MaxEnt: preparing input for maxent in file "+localFinalOccurrencesFile);
|
||||
prepareInputForMaxEnt(speciesName, localOccurrencesFile, localFinalOccurrencesFile);
|
||||
AnalysisLogger.getLogger().debug("MaxEnt: converting layers ... ");
|
||||
// convert all the layers into file
|
||||
int layersCount = 1;
|
||||
status = 30;
|
||||
for (String layer : layers) {
|
||||
ASCConverter converter = new ASCConverter(config);
|
||||
String layerfile = new File(localtempFolder, "layer"+layersCount+ ".asc").getAbsolutePath();
|
||||
AnalysisLogger.getLogger().debug("MaxEnt: converting " + layer +" into "+layerfile);
|
||||
|
||||
String converted = converter.convertToASC(layer, layerfile, time, BBxLL, BBxUR, BByLL, BByUR, zValue, xResValue, yResValue);
|
||||
AnalysisLogger.getLogger().debug("MaxEnt: converted into ASC file " + converted + " check: " + new File(converted).exists());
|
||||
layersCount++;
|
||||
}
|
||||
status = 70;
|
||||
|
||||
AnalysisLogger.getLogger().debug("MaxEnt: executing MaxEnt");
|
||||
|
||||
//call MaxEnt
|
||||
me = new Maxent(localFinalOccurrencesFile,localtempFolder,localtempFolder, maxIterationsValue, prevalenceValue, -9999);
|
||||
me.executeMaxEnt();
|
||||
|
||||
AnalysisLogger.getLogger().debug("MaxEnt: OK MaxEnt!");
|
||||
try{
|
||||
AnalysisLogger.getLogger().debug("MaxEnt: Result: "+me.getResult());
|
||||
}catch(Exception e){
|
||||
AnalysisLogger.getLogger().debug("MaxEnt: error in retrieving the result "+e.getLocalizedMessage());
|
||||
}
|
||||
|
||||
bestThreshold = me.getBestThr();
|
||||
prevalenceVal = me.getPrevalence();
|
||||
variablesContributions = me.getVariablesContributions().toString();
|
||||
variablesPermutationsImportance = me.getVariablesPermutationsImportance().toString();
|
||||
warnings = me.getWarnings();
|
||||
|
||||
String worldFile = me.getWorldPlot();
|
||||
String rocFile = me.getROCPlot();
|
||||
String omissionsFile = me.getOmissionPlot();
|
||||
|
||||
AnalysisLogger.getLogger().debug("MaxEnt: ROC plot: "+worldFile);
|
||||
AnalysisLogger.getLogger().debug("MaxEnt: World plot: "+rocFile);
|
||||
AnalysisLogger.getLogger().debug("MaxEnt: Omission/Commission Plot: "+omissionsFile);
|
||||
|
||||
producedImages.put("World Thumbnail",ImageTools.toImage(ImageIO.read(new File(worldFile))));
|
||||
producedImages.put("ROC Curve",ImageTools.toImage(ImageIO.read(new File(rocFile))));
|
||||
producedImages.put("Omission-Commission Curve",ImageTools.toImage(ImageIO.read(new File(omissionsFile))));
|
||||
|
||||
if (warnings!=null && warnings.trim().length()>0){
|
||||
warningsFile = new File(localtempFolder, "Warnings_"+tableLabelValue+".txt");
|
||||
FileTools.saveString(warningsFile.getAbsolutePath(),warnings,true,"UTF-8");
|
||||
}
|
||||
|
||||
projectionFile = new File(me.getResult());
|
||||
AnalysisLogger.getLogger().debug("MaxEnt: Best Threshold: "+bestThreshold);
|
||||
AnalysisLogger.getLogger().debug("MaxEnt: Prevalence: "+prevalenceVal);
|
||||
AnalysisLogger.getLogger().debug("MaxEnt: Variables Contribution: "+variablesContributions);
|
||||
AnalysisLogger.getLogger().debug("MaxEnt: Variables Permutations: "+variablesPermutationsImportance);
|
||||
if (warningsFile!=null)
|
||||
AnalysisLogger.getLogger().debug("MaxEnt: Warnings file: "+warningsFile.getAbsolutePath() +" exists " + warningsFile.exists() );
|
||||
AnalysisLogger.getLogger().debug("MaxEnt: Projection file: "+projectionFile.getAbsolutePath()+" exists " + projectionFile.exists() );
|
||||
status = 80;
|
||||
AnalysisLogger.getLogger().debug("MaxEnt: Writing the table "+tableNameValue);
|
||||
|
||||
//TO DO : write a table
|
||||
|
||||
// me.clean();
|
||||
status = 90;
|
||||
AnalysisLogger.getLogger().debug("MaxEnt: Elapsed: Whole operation completed in " + ((double) (System.currentTimeMillis() - t0) / 1000d) + "s");
|
||||
} catch (Throwable e) {
|
||||
e.printStackTrace();
|
||||
AnalysisLogger.getLogger().debug("MaxEnt: ERROR!: " + e.getLocalizedMessage());
|
||||
throw new Exception(e.getLocalizedMessage());
|
||||
} finally {
|
||||
shutdown();
|
||||
// if (me != null)
|
||||
//me.clean();
|
||||
status = 100;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
// from lon lat to species, lon, lat
|
||||
private void prepareInputForMaxEnt(String speciesname, String lonlatfile, String outputFile) throws Exception {
|
||||
BufferedReader br = null;
|
||||
BufferedWriter bw = null;
|
||||
try {
|
||||
br = new BufferedReader(new FileReader(new File(lonlatfile)));
|
||||
bw = new BufferedWriter(new FileWriter(new File(outputFile)));
|
||||
bw.write("species,longitude,latitude\n");
|
||||
String line = br.readLine();
|
||||
while (line != null) {
|
||||
bw.write(speciesname + "," + line + "\n");
|
||||
line = br.readLine();
|
||||
}
|
||||
|
||||
} catch (Exception e) {
|
||||
AnalysisLogger.getLogger().debug(e);
|
||||
} finally {
|
||||
if (br != null)
|
||||
br.close();
|
||||
if (bw != null)
|
||||
bw.close();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void shutdown() {
|
||||
AnalysisLogger.getLogger().debug("MaxEnt: Shutdown");
|
||||
if (dbconnection != null)
|
||||
dbconnection.close();
|
||||
}
|
||||
|
||||
@Override
|
||||
public StatisticalType getOutput() {
|
||||
List<TableTemplates> templateHspec = new ArrayList<TableTemplates>();
|
||||
templateHspec.add(TableTemplates.GENERIC);
|
||||
OutputTable p = new OutputTable(templateHspec, tableLabelValue, tableNameValue, "Output table");
|
||||
LinkedHashMap<String, StatisticalType> map = new LinkedHashMap<String, StatisticalType>();
|
||||
|
||||
map.put("Best Threshold", new PrimitiveType(String.class.getName(), "" + bestThreshold, PrimitiveTypes.STRING, "Best Threshold", "Best threshold for transforming MaxEnt values into 0/1 probability assignments"));
|
||||
map.put("Estimated Prevalence", new PrimitiveType(String.class.getName(), "" + prevalenceVal, PrimitiveTypes.STRING, "Estimated Prevalence", "The a posteriori estimated prevalence of the species"));
|
||||
map.put("Variables Contributions", new PrimitiveType(String.class.getName(), variablesContributions, PrimitiveTypes.STRING, "Variables contributions", "The contribution of each variable to the MaxEnt values estimates"));
|
||||
map.put("Variables Permutations Importance", new PrimitiveType(String.class.getName(), variablesPermutationsImportance, PrimitiveTypes.STRING, "Variables Permutations Importance", "The importance of the permutations of the variables during the training"));
|
||||
if (warningsFile!=null)
|
||||
map.put("Warnings", new PrimitiveType(File.class.getName(), warningsFile, PrimitiveTypes.FILE, "Warnings", "The warnings from the underlying MaxEnt model"));
|
||||
|
||||
map.put("Projection File", new PrimitiveType(File.class.getName(), projectionFile, PrimitiveTypes.FILE, "Projection file", "The file containing the projection of the model"));
|
||||
|
||||
try {
|
||||
AnalysisLogger.getLogger().debug("Test: "+new File(config.getConfigPath(),"testsspecies.png").getAbsolutePath());
|
||||
producedImages.put("test",ImageTools.toImage(ImageIO.read(new File(config.getConfigPath(),"testsspecies.png"))));
|
||||
} catch (IOException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
|
||||
PrimitiveType images = new PrimitiveType(LinkedHashMap.class.getName(), producedImages, PrimitiveTypes.IMAGES, "Model performance", "Model performance and projection");
|
||||
|
||||
map.put("Images", images);
|
||||
|
||||
map.put("OutputTable", p);
|
||||
PrimitiveType outputm = new PrimitiveType(LinkedHashMap.class.getName(), map, PrimitiveTypes.MAP, "ResultsMap", "Results Map");
|
||||
return outputm;
|
||||
}
|
||||
|
||||
@Override
|
||||
public float getStatus() {
|
||||
return status;
|
||||
}
|
||||
|
||||
@Override
|
||||
public INFRASTRUCTURE getInfrastructure() {
|
||||
return INFRASTRUCTURE.LOCAL;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setConfiguration(AlgorithmConfiguration config) {
|
||||
this.config = config;
|
||||
}
|
||||
|
||||
protected ResourceFactory resourceManager;
|
||||
|
||||
public String getResourceLoad() {
|
||||
if (resourceManager == null)
|
||||
resourceManager = new ResourceFactory();
|
||||
return resourceManager.getResourceLoad(1);
|
||||
}
|
||||
|
||||
public String getResources() {
|
||||
return ResourceFactory.getResources(100f);
|
||||
}
|
||||
}
|
|
@ -1,5 +1,7 @@
|
|||
package org.gcube.dataanalysis.geo.algorithms;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileWriter;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.LinkedHashMap;
|
||||
|
@ -23,6 +25,7 @@ import org.gcube.dataanalysis.geo.connectors.wfs.WFS;
|
|||
import org.gcube.dataanalysis.geo.interfaces.GISDataConnector;
|
||||
import org.gcube.dataanalysis.geo.matrixmodel.RasterTable;
|
||||
import org.gcube.dataanalysis.geo.matrixmodel.XYExtractor;
|
||||
import org.gcube.dataanalysis.geo.utils.MapUtils;
|
||||
|
||||
public class XYExtraction implements Transducerer {
|
||||
|
||||
|
@ -159,6 +162,8 @@ public class XYExtraction implements Transducerer {
|
|||
outputParameters.put("Max Z value in the Layer", ""+extractor.zmax);
|
||||
|
||||
double[][] matrix = extractor.extractXYGrid(layerNameValue, time, BBxLL, BBxUR, BByLL, BByUR, zValue, xResValue, yResValue);
|
||||
|
||||
|
||||
HashMap<Double,Map<String, String>> polygonsFeatures = null;
|
||||
if (extractor.currentconnector instanceof WFS)
|
||||
polygonsFeatures = ((WFS) extractor.currentconnector).getPolygonsFeatures();
|
||||
|
@ -166,6 +171,9 @@ public class XYExtraction implements Transducerer {
|
|||
AnalysisLogger.getLogger().debug("ELAPSED TIME: " + (System.currentTimeMillis() - t0));
|
||||
AnalysisLogger.getLogger().debug("Extractor: Matrix Extracted");
|
||||
AnalysisLogger.getLogger().debug("Extractor: ****Rasterizing grid into table****");
|
||||
|
||||
|
||||
//TODO: Check the Raster Table to avoid writing blanks and y flipping
|
||||
status = 30;
|
||||
RasterTable raster = new RasterTable(BBxLL, BBxUR, BByLL, BByUR, zValue, time, xResValue, yResValue, matrix, polygonsFeatures,config);
|
||||
raster.setTablename(tableNameValue);
|
||||
|
|
|
@ -85,8 +85,7 @@ public class GeothermalDataMetadataInsertDev {
|
|||
|
||||
metadataInserter.setTitle("Surface Heat Flow Map of Italy");
|
||||
metadataInserter.setAbstractField("Surface Heat Flow Contour Map of Italy");
|
||||
metadataInserter.setCustomTopics("geothermal energy","map","Italy","Energy resources","EGIP","D4Science");
|
||||
metadataInserter.setCategoryTypes("_"+TopicCategory.CLIMATOLOGY_METEOROLOGY_ATMOSPHERE.name()+"_"+"_"+TopicCategory.ENVIRONMENT.name()+"_");
|
||||
metadataInserter.setCustomTopics("geothermal energy","map","Italy","Energy resources","EGIP","IRENA","D4Science"); metadataInserter.setCategoryTypes("_"+TopicCategory.CLIMATOLOGY_METEOROLOGY_ATMOSPHERE.name()+"_"+"_"+TopicCategory.ENVIRONMENT.name()+"_");
|
||||
metadataInserter.setResolution(0);
|
||||
|
||||
metadataInserter.setLayerName("IGG:hf_1");
|
||||
|
|
|
@ -0,0 +1,142 @@
|
|||
package org.gcube.dataanalysis.geo.batch;
|
||||
|
||||
import org.gcube.dataanalysis.geo.meta.GenericLayerMetadata;
|
||||
import org.opengis.metadata.identification.TopicCategory;
|
||||
|
||||
public class GeothermalDataMetadataInsertItaly {
|
||||
|
||||
static String geonetworkurl = "http://geonetwork.geothermaldata.d4science.org/geonetwork";
|
||||
static String geoserverurl = "http://geoserver-dev.d4science-ii.research-infrastructures.eu/geoserver";
|
||||
static String user = "admin";
|
||||
static String password = "d4science2014";
|
||||
|
||||
public static void main(String[] args) throws Exception{
|
||||
|
||||
for (int i=0;i<titles.length;i++){
|
||||
insertLayer(i);
|
||||
}
|
||||
}
|
||||
|
||||
static String[] titles = {
|
||||
"HeatFlowUnit",
|
||||
"HeatFlowLine",
|
||||
"TemperatureLine3km",
|
||||
"TemperatureLine2km",
|
||||
"TemperatureUnit1km",
|
||||
"TemperatureUnit3km",
|
||||
"TemperatureUnit2km",
|
||||
"TemperatureLine1km",
|
||||
"Industry",
|
||||
"TrainingCenter",
|
||||
"Licences",
|
||||
};
|
||||
static String[] abstracts = {
|
||||
"Surface Heat Flow Map of Italy",
|
||||
"Surface Heat Flow Contour Map of Italy",
|
||||
"Temperature Isoline at 3 km depth (below ground level) of Italy",
|
||||
"Temperature Isoline at 2 km depth (below ground level) of Italy",
|
||||
"Temperature map at 1km depth (below ground level) of Italy",
|
||||
"The temperature map at 3 km depth of Italy has been obtained digitizing the map from scientific paper Cataldi et al. 1995",
|
||||
"Temperature map at 2km depth (below ground level) of Italy",
|
||||
"Temperature Isoline at 1 km depth",
|
||||
"Industries involved in geothermal activities refer to all companies that produce components both for power production and the direct use of heat",
|
||||
"List of education and research centres with geothermal courses and lectures: The list is in a table format and includes the type, the name, the location and the URL",
|
||||
"Exploration and production licenses and (projected) power production",
|
||||
};
|
||||
static String[] customTopics = {
|
||||
"geothermal energy, map, Italy, Energy resources, EGIP, IRENA, CNR",
|
||||
"geothermal energy, map, Italy, Energy resources, EGIP, IRENA, CNR",
|
||||
"geothermal energy, map, Italy, Energy resources, EGIP, IRENA, CNR",
|
||||
"geothermal energy, map, Italy, Energy resources, EGIP, IRENA, CNR",
|
||||
"geothermal energy, map, Italy, Energy resources, EGIP, IRENA, CNR",
|
||||
"geothermal energy, map, Italy, Energy resources, EGIP, IRENA, CNR",
|
||||
"geothermal energy, map, Italy, Energy resources, EGIP, IRENA, CNR",
|
||||
"geothermal energy, map, Italy, Energy resources, EGIP, IRENA, CNR",
|
||||
"geothermal energy, map, Italy, Energy resources, EGIP",
|
||||
"geothermal energy, map, Italy, Energy resources, EGIP",
|
||||
"geothermal energy, map, Italy, Energy resources, resources management, land management and planning, EGIP",
|
||||
|
||||
};
|
||||
static String[] categoryTypes = {
|
||||
"_"+TopicCategory.CLIMATOLOGY_METEOROLOGY_ATMOSPHERE.name()+"_"+"_"+TopicCategory.ENVIRONMENT.name()+"_",
|
||||
"_"+TopicCategory.CLIMATOLOGY_METEOROLOGY_ATMOSPHERE.name()+"_"+"_"+TopicCategory.ENVIRONMENT.name()+"_",
|
||||
"_"+TopicCategory.CLIMATOLOGY_METEOROLOGY_ATMOSPHERE.name()+"_"+"_"+TopicCategory.ENVIRONMENT.name()+"_",
|
||||
"_"+TopicCategory.CLIMATOLOGY_METEOROLOGY_ATMOSPHERE.name()+"_"+"_"+TopicCategory.ENVIRONMENT.name()+"_",
|
||||
"_"+TopicCategory.CLIMATOLOGY_METEOROLOGY_ATMOSPHERE.name()+"_"+"_"+TopicCategory.ENVIRONMENT.name()+"_",
|
||||
"_"+TopicCategory.CLIMATOLOGY_METEOROLOGY_ATMOSPHERE.name()+"_"+"_"+TopicCategory.ENVIRONMENT.name()+"_",
|
||||
"_"+TopicCategory.CLIMATOLOGY_METEOROLOGY_ATMOSPHERE.name()+"_"+"_"+TopicCategory.ENVIRONMENT.name()+"_",
|
||||
"_"+TopicCategory.CLIMATOLOGY_METEOROLOGY_ATMOSPHERE.name()+"_"+"_"+TopicCategory.ENVIRONMENT.name()+"_",
|
||||
"_"+TopicCategory.GEOSCIENTIFIC_INFORMATION.name()+"_"+"_"+TopicCategory.UTILITIES_COMMUNICATION.name()+"_",
|
||||
"_"+TopicCategory.GEOSCIENTIFIC_INFORMATION.name()+"_"+"_"+TopicCategory.UTILITIES_COMMUNICATION.name()+"_",
|
||||
"_"+TopicCategory.GEOSCIENTIFIC_INFORMATION.name()+"_"+"_"+TopicCategory.UTILITIES_COMMUNICATION.name()+"_",
|
||||
};
|
||||
static String[] layernames = {
|
||||
"IGG:HeatFlowUnit",
|
||||
"IGG:HeatFlowLine",
|
||||
"IGG:TemperatureLine3km",
|
||||
"IGG:TemperatureLine2km",
|
||||
"IGG:TemperatureUnit1km",
|
||||
"IGG:TemperatureUnit3km",
|
||||
"IGG:TemperatureUnit2km",
|
||||
"IGG:TemperatureLine1km",
|
||||
"IGG:Industry",
|
||||
"IGG:TrainingCenter",
|
||||
"IGG:licence",
|
||||
};
|
||||
static String[] wmsurls= {
|
||||
"http://repoigg.services.iit.cnr.it/geoserver/IGG/wms?service=WMS&version=1.1.1&request=GetMap&layers=IGG:HeatFlowUnit&styles=&bbox=6.66010808944702,36.571231842041,18.6017723083496,47.099250793457&width=512&height=451&crs=EPSG:4326&format=application/openlayers",
|
||||
"http://repoigg.services.iit.cnr.it/geoserver/IGG/wms?service=WMS&version=1.1.1&request=GetMap&layers=IGG:HeatFlowLine&styles=&bbox=6.66010808944702,36.571231842041,18.6017723083496,47.099250793457&width=512&height=451&crs=EPSG:4326&format=application/openlayers",
|
||||
"http://repoigg.services.iit.cnr.it/geoserver/IGG/wms?service=WMS&version=1.1.1&request=GetMap&layers=IGG:TemperatureLine3km&styles=&bbox=6.66010808944702,36.571231842041,18.6017723083496,47.099250793457&width=512&height=451&crs=EPSG:4326&format=application/openlayers",
|
||||
"http://repoigg.services.iit.cnr.it/geoserver/IGG/wms?service=WMS&version=1.1.1&request=GetMap&layers=IGG:TemperatureLine2km&styles=&bbox=6.66010808944702,36.571231842041,18.6017723083496,47.099250793457&width=512&height=451&crs=EPSG:4326&format=application/openlayers",
|
||||
"http://repoigg.services.iit.cnr.it/geoserver/IGG/wms?service=WMS&version=1.1.1&request=GetMap&layers=IGG:TemperatureUnit1km&styles=&bbox=6.66010808944702,36.571231842041,18.6017723083496,47.099250793457&width=512&height=451&crs=EPSG:4326&format=application/openlayers",
|
||||
"http://repoigg.services.iit.cnr.it/geoserver/IGG/wms?service=WMS&version=1.1.1&request=GetMap&layers=IGG:TemperatureUnit3km&styles=&bbox=6.66010808944702,36.571231842041,18.6017723083496,47.099250793457&width=512&height=451&crs=EPSG:4326&format=application/openlayers",
|
||||
"http://repoigg.services.iit.cnr.it/geoserver/IGG/wms?service=WMS&version=1.1.1&request=GetMap&layers=IGG:TemperatureUnit2km&styles=&bbox=6.66010808944702,36.571231842041,18.6017723083496,47.099250793457&width=512&height=451&crs=EPSG:4326&format=application/openlayers",
|
||||
"http://repoigg.services.iit.cnr.it/geoserver/IGG/wms?service=WMS&version=1.1.1&request=GetMap&layers=IGG:TemperatureLine1km&styles=&bbox=6.66010808944702,36.571231842041,18.6017723083496,47.099250793457&width=512&height=451&crs=EPSG:4326&format=application/openlayers",
|
||||
"http://repoigg.services.iit.cnr.it/geoserver/IGG/wms?service=WMS&version=1.1.1&request=GetMap&layers=IGG:Industry&styles=&bbox=9.189578001171471,41.909917999980756,12.480876999984194,45.52478199898418&width=512&height=451&srs=EPSG:4326&format=application/openlayers",
|
||||
"http://repoigg.services.iit.cnr.it/geoserver/IGG/wms?service=WMS&version=1.1.1&request=GetMap&layers=IGG:TrainingCenter&styles=&bbox=6.66010808944702,36.571231842041,18.6017723083496,47.099250793457&width=512&height=451&crs=EPSG:4326&format=application/openlayers",
|
||||
"http://repoigg.services.iit.cnr.it/geoserver/IGG/wms?service=WMS&version=1.1.1&request=GetMap&layers=IGG:licence&styles=&bbox=6.66010808944702,36.571231842041,18.6017723083496,47.099250793457&width=512&height=451&crs=EPSG:4326&format=application/openlayers",
|
||||
};
|
||||
static String[] wfsurls= {
|
||||
"http://repoigg.services.iit.cnr.it/geoserver/IGG/ows?service=WFS&version=1.1.0&request=GetFeature&srsName=urn:x-ogc:def:crs:EPSG:4326&typeName=IGG:HeatFlowUnit",
|
||||
"http://repoigg.services.iit.cnr.it/geoserver/IGG/ows?service=WFS&version=1.1.0&request=GetFeature&srsName=urn:x-ogc:def:crs:EPSG:4326&typeName=IGG:HeatFlowLine",
|
||||
"http://repoigg.services.iit.cnr.it/geoserver/IGG/ows?service=WFS&version=1.1.0&request=GetFeature&srsName=urn:x-ogc:def:crs:EPSG:4326&typeName=IGG:TemperatureLine3km",
|
||||
"http://repoigg.services.iit.cnr.it/geoserver/IGG/ows?service=WFS&version=1.1.0&request=GetFeature&srsName=urn:x-ogc:def:crs:EPSG:4326&typeName=IGG:TemperatureLine2km",
|
||||
"http://repoigg.services.iit.cnr.it/geoserver/IGG/ows?service=WFS&version=1.1.0&request=GetFeature&srsName=urn:x-ogc:def:crs:EPSG:4326&typeName=IGG:TemperatureUnit1km",
|
||||
"http://repoigg.services.iit.cnr.it/geoserver/IGG/ows?service=WFS&version=1.1.0&request=GetFeature&srsName=urn:x-ogc:def:crs:EPSG:4326&typeName=IGG:TemperatureUnit3km",
|
||||
"http://repoigg.services.iit.cnr.it/geoserver/IGG/ows?service=WFS&version=1.1.0&request=GetFeature&srsName=urn:x-ogc:def:crs:EPSG:4326&typeName=IGG:TemperatureUnit2km",
|
||||
"http://repoigg.services.iit.cnr.it/geoserver/IGG/ows?service=WFS&version=1.1.0&request=GetFeature&srsName=urn:x-ogc:def:crs:EPSG:4326&typeName=IGG:TemperatureLine1km",
|
||||
"http://repoigg.services.iit.cnr.it/geoserver/IGG/ows?service=WFS&version=1.1.0&request=GetFeature&srsName=urn:x-ogc:def:crs:EPSG:4326&typeName=IGG:Industry",
|
||||
"http://repoigg.services.iit.cnr.it/geoserver/IGG/ows?service=WFS&version=1.1.0&request=GetFeature&srsName=urn:x-ogc:def:crs:EPSG:4326&typeName=IGG:TrainingCenter",
|
||||
"http://repoigg.services.iit.cnr.it/geoserver/IGG/ows?service=WFS&version=1.1.0&request=GetFeature&srsName=urn:x-ogc:def:crs:EPSG:4326&typeName=IGG:licence"
|
||||
};
|
||||
|
||||
private static void insertLayer(int i) throws Exception{
|
||||
GenericLayerMetadata metadataInserter = new GenericLayerMetadata();
|
||||
metadataInserter.setGeonetworkUrl(geonetworkurl);
|
||||
metadataInserter.setGeonetworkUser(user);
|
||||
metadataInserter.setGeonetworkPwd(password);
|
||||
metadataInserter.setResolution(0);
|
||||
metadataInserter.setXLeftLow(-180);
|
||||
metadataInserter.setYLeftLow(-90);
|
||||
metadataInserter.setXRightUpper(180);
|
||||
metadataInserter.setYRightUpper(90);
|
||||
|
||||
metadataInserter.setTitle(titles[i]);
|
||||
metadataInserter.setAbstractField(abstracts[i]);
|
||||
metadataInserter.setCustomTopics(customTopics[i].split(","));
|
||||
metadataInserter.setCategoryTypes(categoryTypes[i]);
|
||||
metadataInserter.setResolution(0);
|
||||
|
||||
metadataInserter.setLayerName(layernames[i]);
|
||||
|
||||
String [] urls = {
|
||||
wmsurls[i],
|
||||
wfsurls[i]
|
||||
};
|
||||
String [] protocols = {"WMS","WFS"};
|
||||
|
||||
if (titles[i].length()>0)
|
||||
metadataInserter.customMetaDataInsert(urls,protocols);
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,127 @@
|
|||
package org.gcube.dataanalysis.geo.batch;
|
||||
|
||||
import org.gcube.dataanalysis.geo.meta.GenericLayerMetadata;
|
||||
import org.opengis.metadata.identification.TopicCategory;
|
||||
|
||||
public class GeothermalDataMetadataInsertSwitzerland {
|
||||
|
||||
static String geonetworkurl = "http://geonetwork.geothermaldata.d4science.org/geonetwork";
|
||||
static String geoserverurl = "http://geoserver-dev.d4science-ii.research-infrastructures.eu/geoserver";
|
||||
static String user = "admin";
|
||||
static String password = "d4science2014";
|
||||
|
||||
public static void main(String[] args) throws Exception{
|
||||
|
||||
for (int i=0;i<titles.length;i++){
|
||||
insertLayer(i);
|
||||
}
|
||||
}
|
||||
|
||||
static String[] titles = {
|
||||
"Industries",
|
||||
"Heat Flow Lines",
|
||||
"Heat Flow Units",
|
||||
"Licenses",
|
||||
"Temperature Units",
|
||||
"Temperature Lines",
|
||||
"Training Centers",
|
||||
//"Heatflow",
|
||||
//"Geothermal Map of Switzerland 1:500000"
|
||||
};
|
||||
static String[] abstracts = {
|
||||
"Industry data for EGIP",
|
||||
"Heat Flow Line data for EGIP",
|
||||
"Heat Flow Unit data for EGIP",
|
||||
"License data for EGIP",
|
||||
"Temperature Units data for EGIP",
|
||||
"Temperature Lines data for EGIP",
|
||||
"Training Center data for EGIP",
|
||||
//"The geothermal map shows the thermal energy that is produced in the subsurface and traverses the Earth's surface within an area of 1 m2. The heat itself is released in the Earth's crust (around 30 km thick) generally by radioactive decay processes. On average, the heat flow in Switzerland is around 90 mW/m2. The highest figures are found in northern Switzerland and the lowest in the Alps/Jura. Differences are caused by deep groundwater circulating in permeable rock strata. The heat flow data is calculated from the temperature gradient (average value approx. 30 K/km) and heat conductivity of the rock (average value approx. 3 W/m/K). Paper map: Ph. Bodmer, 1982. Vector map: 2001.",
|
||||
//"The geothermal map shows the thermal energy that is produced in the subsurface and traverses the Earth's surface within an area of 1 m2. The heat itself is released in the Earth's crust (around 30 km thick) generally by radioactive decay processes. On average, the heat flow in Switzerland is around 90 mW/m2. The highest figures are found in northern Switzerland and the lowest in the Alps/Jura. Differences are caused by deep groundwater circulating in permeable rock strata. The heat flow data is calculated from the temperature gradient (average value approx. 30 K/km) and heat conductivity of the rock (average value approx. 3 W/m/K). Paper map: Ph. Bodmer, 1982. Vector map: 2001."
|
||||
};
|
||||
static String[] customTopics = {
|
||||
"geothermal energy, Energy resources, Switzerland, EGIP",
|
||||
"geothermal energy, Energy resources, Switzerland, EGIP",
|
||||
"geothermal energy, Energy resources, Switzerland, EGIP",
|
||||
"geothermal energy, Energy resources, Switzerland, EGIP",
|
||||
"geothermal energy, Energy resources, Switzerland, EGIP",
|
||||
"geothermal energy, Energy resources, Switzerland, EGIP",
|
||||
"geothermal energy, Energy resources, Switzerland, EGIP",
|
||||
//"geophysics, geophysical map, geothermal energy, e-geo.ch geoportal, Geology, Energy resources, EGIP",
|
||||
//"geophysics, geophysical map, geothermal energy, e-geo.ch geoportal, Geology, Energy resources, EGIP"
|
||||
};
|
||||
static String[] categoryTypes = {
|
||||
"_"+TopicCategory.GEOSCIENTIFIC_INFORMATION.name()+"_"+"_"+TopicCategory.UTILITIES_COMMUNICATION.name()+"_",
|
||||
"_"+TopicCategory.CLIMATOLOGY_METEOROLOGY_ATMOSPHERE.name()+"_"+"_"+TopicCategory.ENVIRONMENT.name()+"_",
|
||||
"_"+TopicCategory.CLIMATOLOGY_METEOROLOGY_ATMOSPHERE.name()+"_"+"_"+TopicCategory.ENVIRONMENT.name()+"_",
|
||||
"_"+TopicCategory.GEOSCIENTIFIC_INFORMATION.name()+"_"+"_"+TopicCategory.UTILITIES_COMMUNICATION.name()+"_",
|
||||
"_"+TopicCategory.CLIMATOLOGY_METEOROLOGY_ATMOSPHERE.name()+"_"+"_"+TopicCategory.ENVIRONMENT.name()+"_",
|
||||
"_"+TopicCategory.CLIMATOLOGY_METEOROLOGY_ATMOSPHERE.name()+"_"+"_"+TopicCategory.ENVIRONMENT.name()+"_",
|
||||
"_"+TopicCategory.GEOSCIENTIFIC_INFORMATION.name()+"_"+"_"+TopicCategory.UTILITIES_COMMUNICATION.name()+"_",
|
||||
//"_"+TopicCategory.CLIMATOLOGY_METEOROLOGY_ATMOSPHERE.name()+"_"+"_"+TopicCategory.ENVIRONMENT.name()+"_",
|
||||
//"_"+TopicCategory.CLIMATOLOGY_METEOROLOGY_ATMOSPHERE.name()+"_"+"_"+TopicCategory.ENVIRONMENT.name()+"_",
|
||||
};
|
||||
static String[] layernames = {
|
||||
"swisstopo:industryTest",
|
||||
"swisstopo:heatFlowLineTest_WGS84",
|
||||
"swisstopo:heatFlowUnitTest_WGS84",
|
||||
"swisstopo:license",
|
||||
"swisstopo:tempDummyPoly",
|
||||
"swisstopo:tempDummyLine",
|
||||
"swisstopo:trainingCenter",
|
||||
//"Heatflow",
|
||||
//"Geothermal Map of Switzerland 1:500000"
|
||||
};
|
||||
static String[] wmsurls= {
|
||||
"http://swisstopo.geops.ch/geoserver/swisstopo/wms?service=WMS&version=1.1.0&request=GetMap&layers=swisstopo:industryTest&styles=&bbox=5.0,44.0,10.0,48.0&width=512&height=409&srs=EPSG:4326&format=application/openlayers",
|
||||
"http://swisstopo.geops.ch/geoserver/swisstopo/wms?service=WMS&version=1.1.0&request=GetMap&layers=swisstopo:heatFlowLineTest_WGS84&styles=&bbox=6.536429259689217,45.96452289074837,9.88179989473991,47.68507871081211&width=641&height=330&srs=EPSG:4326&format=application/openlayers",
|
||||
"http://swisstopo.geops.ch/geoserver/swisstopo/wms?service=WMS&version=1.1.0&request=GetMap&layers=swisstopo:heatFlowUnitTest_WGS84&styles=&bbox=6.495394739540089,45.92430483245075,9.923892462894338,47.67826328791616&width=645&height=330&srs=EPSG:4326&format=application/openlayers",
|
||||
"http://swisstopo.geops.ch/geoserver/swisstopo/wms?service=WMS&version=1.1.0&request=GetMap&layers=swisstopo:license&styles=&bbox=5.83103329189459,45.66406238270901,10.980959404235438,47.8584385534728&width=774&height=330&srs=EPSG:4326&format=application/openlayers",
|
||||
"http://swisstopo.geops.ch/geoserver/swisstopo/wms?service=WMS&version=1.1.0&request=GetMap&layers=swisstopo:tempDummyPoly&styles=&bbox=5.83103329189459,45.66406238270901,10.980959404235438,47.8584385534728&width=774&height=330&srs=EPSG:4326&format=application/openlayers",
|
||||
"http://swisstopo.geops.ch/geoserver/swisstopo/wms?service=WMS&version=1.1.0&request=GetMap&layers=swisstopo:tempDummyLine&styles=&bbox=5.83103329189459,45.66406238270901,10.980959404235438,47.8584385534728&width=774&height=330&srs=EPSG:4326&format=application/openlayers",
|
||||
"http://swisstopo.geops.ch/geoserver/swisstopo/wms?service=WMS&version=1.1.0&request=GetMap&layers=swisstopo:trainingCenter&styles=&bbox=5.83103329189459,45.66406238270901,10.980959404235438,47.8584385534728&width=774&height=330&srs=EPSG:4326&format=application/openlayers",
|
||||
//"Heatflow",
|
||||
//"Geothermal Map of Switzerland 1:500000"
|
||||
};
|
||||
static String[] wfsurls= {
|
||||
"http://swisstopo.geops.ch/geoserver/swisstopo/ows?service=WFS&version=1.0.0&request=GetFeature&typeName=swisstopo:industryTest",
|
||||
"http://swisstopo.geops.ch/geoserver/swisstopo/ows?service=WFS&version=1.0.0&request=GetFeature&typeName=swisstopo:heatFlowLineTest_WGS84",
|
||||
"http://swisstopo.geops.ch/geoserver/swisstopo/ows?service=WFS&version=1.0.0&request=GetFeature&typeName=swisstopo:heatFlowUnitTest_WGS84",
|
||||
"http://swisstopo.geops.ch/geoserver/swisstopo/ows?service=WFS&version=1.0.0&request=GetFeature&typeName=swisstopo:license",
|
||||
"http://swisstopo.geops.ch/geoserver/swisstopo/ows?service=WFS&version=1.0.0&request=GetFeature&typeName=swisstopo:tempDummyPoly",
|
||||
"http://swisstopo.geops.ch/geoserver/swisstopo/ows?service=WFS&version=1.0.0&request=GetFeature&typeName=swisstopo:tempDummyLine",
|
||||
"http://swisstopo.geops.ch/geoserver/swisstopo/ows?service=WFS&version=1.0.0&request=GetFeature&typeName=swisstopo:trainingCenter",
|
||||
//"Heatflow",
|
||||
//"Geothermal Map of Switzerland 1:500000"
|
||||
};
|
||||
|
||||
private static void insertLayer(int i) throws Exception{
|
||||
GenericLayerMetadata metadataInserter = new GenericLayerMetadata();
|
||||
metadataInserter.setGeonetworkUrl(geonetworkurl);
|
||||
metadataInserter.setGeonetworkUser(user);
|
||||
metadataInserter.setGeonetworkPwd(password);
|
||||
metadataInserter.setResolution(0);
|
||||
metadataInserter.setXLeftLow(-180);
|
||||
metadataInserter.setYLeftLow(-90);
|
||||
metadataInserter.setXRightUpper(180);
|
||||
metadataInserter.setYRightUpper(90);
|
||||
|
||||
metadataInserter.setTitle(titles[i]);
|
||||
metadataInserter.setAbstractField(abstracts[i]);
|
||||
metadataInserter.setCustomTopics(customTopics[i].split(","));
|
||||
metadataInserter.setCategoryTypes(categoryTypes[i]);
|
||||
metadataInserter.setResolution(0);
|
||||
|
||||
metadataInserter.setLayerName(layernames[i]);
|
||||
|
||||
String [] urls = {
|
||||
wmsurls[i],
|
||||
wfsurls[i]
|
||||
};
|
||||
String [] protocols = {"WMS","WFS"};
|
||||
|
||||
if (titles[i].length()>0)
|
||||
metadataInserter.customMetaDataInsert(urls,protocols);
|
||||
}
|
||||
|
||||
}
|
|
@ -18,7 +18,7 @@ public class AscDataExplorer {
|
|||
public double dx;
|
||||
public double dy;
|
||||
|
||||
AscRaster ascFile;
|
||||
public AscRaster ascFile;
|
||||
|
||||
public AscDataExplorer(String file) throws Exception{
|
||||
AnalysisLogger.getLogger().debug("Managing Asc File: "+file);
|
||||
|
@ -63,9 +63,9 @@ public class AscDataExplorer {
|
|||
|
||||
public int latitude2Index (double latitude){
|
||||
if (dy>0)
|
||||
return (int) Math.round((latitude-yOrigin)/dy);
|
||||
return (nrows-1) - (int) Math.round((latitude-yOrigin)/dy);
|
||||
else
|
||||
return (int) Math.round((latitude-yOrigin)/cellsize);
|
||||
return (nrows-1) - (int) Math.round((latitude-yOrigin)/cellsize);
|
||||
}
|
||||
|
||||
public List<Double> retrieveDataFromAsc( List<Tuple<Double>> triplets, int time) throws Exception{
|
||||
|
|
|
@ -16,9 +16,6 @@ import org.gcube.data.transfer.common.TransferUtil;
|
|||
|
||||
/**
|
||||
* A class which reads an ESRI ASCII raster file into a Raster
|
||||
*
|
||||
* @author dmrust
|
||||
*
|
||||
*/
|
||||
public class AscRasterReader {
|
||||
String noData = AscRaster.DEFAULT_NODATA;
|
||||
|
|
|
@ -0,0 +1,91 @@
|
|||
package org.gcube.dataanalysis.geo.connectors.asc;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.io.PrintStream;
|
||||
import java.text.NumberFormat;
|
||||
|
||||
/**
|
||||
* Modified from java-esri-ascii
|
||||
* @author coro
|
||||
*
|
||||
*/
|
||||
|
||||
public class AscRasterWriter {
|
||||
|
||||
|
||||
|
||||
NumberFormat cellFormat = null;
|
||||
String nodataString = AscRaster.DEFAULT_NODATA;
|
||||
|
||||
/**
|
||||
* Writes out the given Raster object to the given filename.
|
||||
*
|
||||
* Throws the exceptions associated with filehandling
|
||||
* @param filename
|
||||
* @param r
|
||||
* @throws IOException
|
||||
*/
|
||||
public void writeRaster( String filename, AscRaster r ) throws IOException
|
||||
{
|
||||
File f = new File( filename );
|
||||
if( f.exists() ) f.delete();
|
||||
if( ! f.createNewFile() ) throw new RuntimeException( "Could not create file for some reason!");
|
||||
PrintStream o = new PrintStream( f );
|
||||
o.println( "ncols " + r.getCols() );
|
||||
o.println( "nrows " + r.getRows() );
|
||||
o.println( "xllcorner " + r.getXll() );
|
||||
o.println( "yllcorner " + r.getYll());
|
||||
if (r.getCellsize()>0)
|
||||
o.println( "cellsize " + r.getCellsize() );
|
||||
else
|
||||
{
|
||||
o.println( "dx " + r.getdx() );
|
||||
o.println( "dy " + r.getdy() );
|
||||
}
|
||||
o.println( "NODATA_value " + r.getNDATA() );
|
||||
|
||||
for( double[] row : r.getData() )
|
||||
{
|
||||
StringBuffer b = new StringBuffer();
|
||||
for( int i = 0; i < row.length; i++ )
|
||||
{
|
||||
if( Double.isNaN( row[i] ) ) b.append( r.getNDATA() );
|
||||
else if( cellFormat != null ) b.append( cellFormat.format( row[i] ));
|
||||
else b.append( row[i] );
|
||||
if( i < row.length-1 ) b.append( " " );
|
||||
}
|
||||
o.println( b );
|
||||
}
|
||||
o.close();
|
||||
}
|
||||
|
||||
/**
|
||||
* Shortcut method, if you just have some data and want to write it out as a Raster.
|
||||
*
|
||||
* There is no error checking at the moment (e.g. about equal size rows)
|
||||
* @param filename
|
||||
* @param data
|
||||
* @param xll
|
||||
* @param yll
|
||||
* @param size
|
||||
* @param ndata
|
||||
* @throws IOException
|
||||
*/
|
||||
public void writeRaster( String filename, double[][] data, double xll, double yll, double size, String ndata ) throws IOException
|
||||
{
|
||||
writeRaster( filename, AscRaster.getTempRaster( data, xll, yll, size, ndata ) );
|
||||
}
|
||||
|
||||
/**
|
||||
* Can be used to set a number format for the cells. For example, if they are all integer
|
||||
* values, you can set an integer format. This should help with roundtrippability for
|
||||
* existing Raster files
|
||||
* @param format
|
||||
*/
|
||||
public void setCellFormat( NumberFormat format )
|
||||
{
|
||||
cellFormat = format;
|
||||
}
|
||||
|
||||
}
|
|
@ -20,7 +20,7 @@ public class GeoTiff implements GISDataConnector {
|
|||
public String persistenceDir;
|
||||
public String geoTiffUrl;
|
||||
|
||||
public GeoTiff(AlgorithmConfiguration config, String geoTiffURL) throws Exception {
|
||||
public GeoTiff(AlgorithmConfiguration config) throws Exception {
|
||||
persistenceDir = config.getPersistencePath();
|
||||
}
|
||||
|
||||
|
@ -46,7 +46,7 @@ public class GeoTiff implements GISDataConnector {
|
|||
downloadutil.performTransfer(new URI(layerURL), uuid);
|
||||
|
||||
AnalysisLogger.getLogger().debug("Converting to ASCII file: " + uuid);
|
||||
ascFile = GdalConverter.convertToASC(uuid);
|
||||
ascFile = GdalConverter.convertToASC(uuid,0);
|
||||
AnalysisLogger.getLogger().debug("Conversion to ASCII complete: " + ascFile);
|
||||
ASC asc = new ASC();
|
||||
List<Double> points = asc.getFeaturesInTimeInstantAndArea(ascFile, layerName, time, coordinates3d, BBxL, BBxR, BByL, BByR);
|
||||
|
|
|
@ -24,7 +24,7 @@ public class NetCDF implements GISDataConnector {
|
|||
if (layerURL == null)
|
||||
return null;
|
||||
|
||||
return netcdffile.retrieveDataFromNetCDF(layerURL, layerName, time, coordinates3d, BBxL, BBxR, BByL, BByR);
|
||||
return netcdffile.retrieveDataFromNetCDF(layerURL, layerName, time, coordinates3d);
|
||||
|
||||
}
|
||||
|
||||
|
|
|
@ -31,6 +31,7 @@ import ucar.nc2.ft.PointFeatureCollection;
|
|||
import ucar.nc2.ft.PointFeatureIterator;
|
||||
import ucar.nc2.ft.point.PointDatasetImpl;
|
||||
import ucar.nc2.ft.point.standard.StandardPointCollectionImpl;
|
||||
import ucar.unidata.geoloc.LatLonPoint;
|
||||
import ucar.unidata.geoloc.LatLonPointImpl;
|
||||
import ucar.unidata.geoloc.LatLonRect;
|
||||
|
||||
|
@ -39,16 +40,16 @@ public class NetCDFDataExplorer {
|
|||
// http://thredds.research-infrastructures.eu:8080/thredds/catalog/public/netcdf/catalog.xml
|
||||
public static String timePrefix = "time:";
|
||||
|
||||
public NetCDFDataExplorer(String openDapLink, String layer){
|
||||
public NetCDFDataExplorer(String openDapLink, String layer) {
|
||||
calcZRange(openDapLink, layer);
|
||||
}
|
||||
|
||||
public List<Double> retrieveDataFromNetCDF(String openDapLink, String layer, int time, List<Tuple<Double>> triplets, double xL, double xR, double yL, double yR) {
|
||||
|
||||
public List<Double> retrieveDataFromNetCDF(String openDapLink, String layer, int time, List<Tuple<Double>> triplets) {
|
||||
try {
|
||||
List<Double> values = new ArrayList<Double>();
|
||||
if (isGridDataset(openDapLink)) {
|
||||
AnalysisLogger.getLogger().debug("Managing Grid File");
|
||||
return manageGridDataset(layer, openDapLink, time, triplets, xL, xR, yL, yR);
|
||||
return manageGridDataset(layer, openDapLink, time, triplets);
|
||||
}
|
||||
/*
|
||||
* else if (isPointDataset(openDapLink)) { AnalysisLogger.getLogger().debug("Managing Points File"); }
|
||||
|
@ -59,47 +60,48 @@ public class NetCDFDataExplorer {
|
|||
} catch (Exception e) {
|
||||
AnalysisLogger.getLogger().debug("ERROR: " + e.getMessage());
|
||||
AnalysisLogger.getLogger().debug(e);
|
||||
// e.printStackTrace();
|
||||
// e.printStackTrace();
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
public double minZ=0;
|
||||
public double maxZ=0;
|
||||
|
||||
public double minZ = 0;
|
||||
public double maxZ = 0;
|
||||
|
||||
public void calcZRange(String openDapLink, String layer) {
|
||||
try{
|
||||
if (isGridDataset(openDapLink)){
|
||||
try {
|
||||
if (isGridDataset(openDapLink)) {
|
||||
gds = ucar.nc2.dt.grid.GridDataset.open(openDapLink);
|
||||
List<GridDatatype> gridTypes = gds.getGrids();
|
||||
for (GridDatatype gdt : gridTypes) {
|
||||
|
||||
// AnalysisLogger.getLogger().debug("Inside File - layer name: " + gdt.getFullName());
|
||||
|
||||
// AnalysisLogger.getLogger().debug("Inside File - layer name: " + gdt.getFullName());
|
||||
AnalysisLogger.getLogger().debug("Inside File - layer name: " + gdt.getName());
|
||||
if (layer.equalsIgnoreCase(gdt.getName())) {
|
||||
CoordinateAxis zAxis = gdt.getCoordinateSystem().getVerticalAxis();
|
||||
minZ=zAxis.getMinValue();
|
||||
maxZ=zAxis.getMaxValue();
|
||||
minZ = zAxis.getMinValue();
|
||||
maxZ = zAxis.getMaxValue();
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}catch(Exception e){
|
||||
AnalysisLogger.getLogger().debug("NetCDF Explorer Error:"+e.getLocalizedMessage());
|
||||
} catch (Exception e) {
|
||||
AnalysisLogger.getLogger().debug("NetCDF Explorer Error:" + e.getLocalizedMessage());
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// A GridDatatype is like a specialized Variable that explicitly handles X,Y,Z,T dimensions
|
||||
GridDataset gds;
|
||||
public List<Double> manageGridDataset(String layer, String filename, int time, List<Tuple<Double>> triplets, double xL, double xR, double yL, double yR) throws Exception {
|
||||
|
||||
public List<Double> manageGridDataset(String layer, String filename, int time, List<Tuple<Double>> triplets) throws Exception {
|
||||
List<Double> values = new ArrayList<Double>();
|
||||
if (gds==null)
|
||||
if (gds == null)
|
||||
gds = ucar.nc2.dt.grid.GridDataset.open(filename);
|
||||
|
||||
|
||||
List<GridDatatype> gridTypes = gds.getGrids();
|
||||
for (GridDatatype gdt : gridTypes) {
|
||||
AnalysisLogger.getLogger().debug("Inside File - layer name: " + gdt.getName()+" layer to find "+layer);
|
||||
//if the layer is an HTTP link then take the first innser layer
|
||||
AnalysisLogger.getLogger().debug("Inside File - layer name: " + gdt.getName() + " layer to find " + layer);
|
||||
// if the layer is an HTTP link then take the first innser layer
|
||||
if (layer.equalsIgnoreCase(gdt.getName()) || layer.toLowerCase().startsWith("http:")) {
|
||||
AnalysisLogger.getLogger().debug("Found layer " + layer + " inside file");
|
||||
GridDatatype grid = gds.findGridDatatype(gdt.getName());
|
||||
|
@ -107,13 +109,13 @@ public class NetCDFDataExplorer {
|
|||
CoordinateAxis xAxis = gdt.getCoordinateSystem().getXHorizAxis();
|
||||
CoordinateAxis yAxis = gdt.getCoordinateSystem().getYHorizAxis();
|
||||
double resolutionZ = 0;
|
||||
try{
|
||||
try {
|
||||
resolutionZ = Math.abs((double) (zAxis.getMaxValue() - zAxis.getMinValue()) / (double) zAxis.getShape()[0]);
|
||||
AnalysisLogger.getLogger().debug("Zmin:"+ zAxis.getMinValue()+" Zmax:"+zAxis.getMaxValue());
|
||||
}catch(Exception e){};
|
||||
double resolutionX = Math.abs((double) (xAxis.getMaxValue() - xAxis.getMinValue()) / (double) xAxis.getShape()[0]);
|
||||
double resolutionY = Math.abs((double) (yAxis.getMaxValue() - yAxis.getMinValue()) / (double) yAxis.getShape()[0]);
|
||||
AnalysisLogger.getLogger().debug("Zmin:" + zAxis.getMinValue() + " Zmax:" + zAxis.getMaxValue());
|
||||
} catch (Exception e) {
|
||||
}
|
||||
|
||||
GridCoordSystem gcs = grid.getCoordinateSystem();
|
||||
int tsize = triplets.size();
|
||||
long t01 = System.currentTimeMillis();
|
||||
LatLonRect llr = null;
|
||||
|
@ -124,35 +126,38 @@ public class NetCDFDataExplorer {
|
|||
int zD = 0;
|
||||
int xD = 0;
|
||||
int yD = 0;
|
||||
if (shapeD.length>2)
|
||||
{
|
||||
zD=shapeD[0];
|
||||
yD=shapeD[1];
|
||||
xD=shapeD[2];
|
||||
if (shapeD.length > 2) {
|
||||
zD = shapeD[0];
|
||||
yD = shapeD[1];
|
||||
xD = shapeD[2];
|
||||
}
|
||||
|
||||
else if (shapeD.length>1)
|
||||
{
|
||||
yD=shapeD[0];
|
||||
xD=shapeD[1];
|
||||
|
||||
else if (shapeD.length > 1) {
|
||||
yD = shapeD[0];
|
||||
xD = shapeD[1];
|
||||
}
|
||||
|
||||
AnalysisLogger.getLogger().debug("Shape: Z:"+zD+" X:"+ xD+" Y:"+yD);
|
||||
|
||||
|
||||
// double resolutionX = Math.abs((double) (xAxis.getMaxValue() - xAxis.getMinValue()) / (double) xAxis.getShape()[0]);
|
||||
// double resolutionY = Math.abs((double) (yAxis.getMaxValue() - yAxis.getMinValue()) / (double) yAxis.getShape()[0]);
|
||||
double resolutionX = Math.abs((double) (xAxis.getMaxValue() - xAxis.getMinValue()) / (double) xD);
|
||||
double resolutionY = Math.abs((double) (yAxis.getMaxValue() - yAxis.getMinValue()) / (double) yD);
|
||||
|
||||
AnalysisLogger.getLogger().debug("Shape: Z:" + zD + " X:" + xD + " Y:" + yD);
|
||||
|
||||
AnalysisLogger.getLogger().debug("Layer Information Retrieval ELAPSED Time: " + (System.currentTimeMillis() - t01));
|
||||
int rank = data.getRank();
|
||||
AnalysisLogger.getLogger().debug("Rank of the layer: " + rank);
|
||||
|
||||
|
||||
ArrayFloat.D3 data3Float = null;
|
||||
ArrayDouble.D3 data3Double = null;
|
||||
ArrayInt.D3 data3Int = null;
|
||||
ArrayLong.D3 data3Long = null;
|
||||
ArrayFloat.D2 data2Float = null;
|
||||
ArrayDouble.D2 data2Double= null;
|
||||
ArrayDouble.D2 data2Double = null;
|
||||
ArrayInt.D2 data2Int = null;
|
||||
ArrayLong.D2 data2Long = null;
|
||||
|
||||
if (data.getRank() == 3){
|
||||
|
||||
if (data.getRank() == 3) {
|
||||
if (data instanceof ArrayFloat.D3)
|
||||
data3Float = (ArrayFloat.D3) data;
|
||||
else if (data instanceof ArrayInt.D3)
|
||||
|
@ -164,11 +169,10 @@ public class NetCDFDataExplorer {
|
|||
else if (data instanceof ArrayLong.D3)
|
||||
data3Long = (ArrayLong.D3) data;
|
||||
else if (data instanceof ArrayByte.D3)
|
||||
data3Double = (ArrayDouble.D3)VectorOperations.arrayByte3DArrayDouble((ArrayByte)data);
|
||||
data3Double = (ArrayDouble.D3) VectorOperations.arrayByte3DArrayDouble((ArrayByte) data);
|
||||
else
|
||||
throw new Exception("Layer data format not supported");
|
||||
}
|
||||
else{
|
||||
} else {
|
||||
if (data instanceof ArrayFloat.D2)
|
||||
data2Float = (ArrayFloat.D2) data;
|
||||
else if (data instanceof ArrayInt.D2)
|
||||
|
@ -178,20 +182,31 @@ public class NetCDFDataExplorer {
|
|||
else if (data instanceof ArrayLong.D2)
|
||||
data2Long = (ArrayLong.D2) data;
|
||||
else if (data instanceof ArrayByte.D2)
|
||||
data2Double = (ArrayDouble.D2)VectorOperations.arrayByte2DArrayDouble((ArrayByte)data);
|
||||
data2Double = (ArrayDouble.D2) VectorOperations.arrayByte2DArrayDouble((ArrayByte) data);
|
||||
else
|
||||
throw new Exception("Layer data format not supported");
|
||||
}
|
||||
|
||||
|
||||
|
||||
double xmin = xAxis.getMinValue();
|
||||
double xmax = xAxis.getMaxValue();
|
||||
if (((xmax==360) && (xmin==0)) || ((xmax==359.5) && (xmin==0.5))){
|
||||
double ymax = yAxis.getMaxValue();
|
||||
double ymin = yAxis.getMinValue();
|
||||
int xmaxidx = (int) Math.round((xmax - xmin) / resolutionX);
|
||||
int ymaxidx = (int) Math.round((ymax - ymin) / resolutionY);
|
||||
|
||||
boolean is0_360 = false;
|
||||
// if (((xmax == 360) && (xmin == 0)) || ((xmax == 359.5) && (xmin == 0.5))) {
|
||||
// if ((xmin>=0) || (ymin == -77.0104751586914 && ymax==89.94786834716797)) {
|
||||
|
||||
AnalysisLogger.getLogger().debug("X dimension: " + xD + " Xmin:" + xmin + " Xmax:" + xmax + " Xmaxidx:" + xmaxidx+" XRes: "+resolutionX);
|
||||
AnalysisLogger.getLogger().debug("Y dimension: " + yD + " Ymin:" + ymin + " Ymax:" + ymax + " Ymaxidx:" + ymaxidx+" YRes: "+resolutionY);
|
||||
|
||||
if ((xmin >= 0)) {
|
||||
xmax = 180;
|
||||
xmin=-180;
|
||||
xmin = -180;
|
||||
is0_360 = true;
|
||||
}
|
||||
AnalysisLogger.getLogger().debug("X dimension: "+xD+" Xmin:"+ xmax+" Xmax:"+xmin);
|
||||
|
||||
|
||||
for (int i = 0; i < tsize; i++) {
|
||||
int zint = 0;
|
||||
int xint = 0;
|
||||
|
@ -203,31 +218,65 @@ public class NetCDFDataExplorer {
|
|||
x = -180;
|
||||
if (y == 90)
|
||||
y = -90;
|
||||
|
||||
double z = 0;
|
||||
|
||||
if (triplet.getElements().size() > 1)
|
||||
z = triplet.getElements().get(2);
|
||||
if (resolutionZ > 0) {
|
||||
if ((zAxis.getMinValue() <= z) && (zAxis.getMaxValue() >= z))
|
||||
zint = Math.abs((int) Math.round((z - zAxis.getMinValue()) / resolutionZ));
|
||||
}
|
||||
|
||||
if (y < ymin)
|
||||
y = ymin;
|
||||
if (x < xmin)
|
||||
x = xmin;
|
||||
if (y > ymax)
|
||||
y = ymax;
|
||||
if (x > xmax)
|
||||
x = xmax;
|
||||
|
||||
|
||||
|
||||
// AnalysisLogger.getLogger().debug("Z Index: "+zint);
|
||||
int[] idxbb = gcs.findXYindexFromLatLon(75,10, null);
|
||||
int[] idxo = gcs.findXYindexFromLatLon(0,0, null);
|
||||
LatLonPoint inverseOrigin = gcs.getLatLon(idxo[0],idxo[1]);
|
||||
// LatLonPoint inverseBB = gcs.getLatLon(idxbb[0],idxbb[1]);
|
||||
//correction to origin offset
|
||||
x = x - inverseOrigin.getLongitude();
|
||||
y = y - inverseOrigin.getLatitude();
|
||||
|
||||
if (i==0)
|
||||
AnalysisLogger.getLogger().debug("bb: " + idxbb[0] +","+idxbb[1]+" origin: "+idxo[0]+","+idxo[1]+" middle "+xD/2+","+yD/2+" shift "+(idxo[0]-(xD/2))+" inverse shift on origin "+inverseOrigin);
|
||||
int[] idx = gcs.findXYindexFromLatLon(y,x, null);
|
||||
|
||||
xint = idx[0];
|
||||
yint = idx[1];
|
||||
|
||||
if (yint < 0) {
|
||||
yint = 0;
|
||||
}
|
||||
if (xint < 0) {
|
||||
xint = 0;
|
||||
}
|
||||
if (xint > xD - 1)
|
||||
xint = xD - 1;
|
||||
if (yint > yD - 1)
|
||||
yint = yD - 1;
|
||||
|
||||
/*
|
||||
GridCoordSystem gcs = grid.getCoordinateSystem();
|
||||
int[] xy = gcs.findXYindexFromLatLon(x, y, null);
|
||||
Array datas=grid.readDataSlice(time, zint, xy[1], xy[0]);
|
||||
*/
|
||||
if ((xmin <= x) && (xmax >= x))
|
||||
xint = (int) Math.round((x - xmin) / resolutionX);
|
||||
if ((yAxis.getMinValue() <= y) && (yAxis.getMaxValue() >= y))
|
||||
yint = (int) Math.round((y - yAxis.getMinValue()) / resolutionY);
|
||||
* if ((xmin <= x) && (xmax >= x)) // xint = (int) Math.round((x - xmin) / resolutionX); { if (is0_360) { if (x < 0) xint = (int) Math.round((x - xmin + xmax) / resolutionX); else xint = (int) Math.round((x) / resolutionX); } else { xint = (int) Math.round((x-xmin) / resolutionX); } }
|
||||
*
|
||||
* if ((yAxis.getMinValue() <= y) && (yAxis.getMaxValue() >= y)) { yint = (int) Math.round((ymax - y) / resolutionY); }
|
||||
*
|
||||
*
|
||||
* if (xint > xD - 1) xint = xD - 1; if (yint > yD - 1) yint = yD - 1;
|
||||
*/
|
||||
Double val = Double.NaN;
|
||||
if (xint > xD-1)
|
||||
xint = xD-1;
|
||||
if (yint > yD-1)
|
||||
yint = yD-1;
|
||||
if (zint>zD-1)
|
||||
zint = zD-1;
|
||||
if (zint > zD - 1)
|
||||
zint = zD - 1;
|
||||
|
||||
if (data3Float != null)
|
||||
val = Double.valueOf(data3Float.get(zint, yint, xint));
|
||||
else if (data3Int != null)
|
||||
|
@ -236,10 +285,10 @@ public class NetCDFDataExplorer {
|
|||
val = Double.valueOf(data3Double.get(zint, yint, xint));
|
||||
else if (data3Long != null)
|
||||
val = Double.valueOf(data3Long.get(zint, yint, xint));
|
||||
|
||||
else if (data2Float != null)
|
||||
|
||||
else if (data2Float != null)
|
||||
val = Double.valueOf(data2Float.get(yint, xint));
|
||||
else if (data2Int != null)
|
||||
else if (data2Int != null)
|
||||
val = Double.valueOf(data2Int.get(yint, xint));
|
||||
else if (data2Double != null)
|
||||
val = Double.valueOf(data2Double.get(yint, xint));
|
||||
|
@ -254,6 +303,13 @@ public class NetCDFDataExplorer {
|
|||
return values;
|
||||
}
|
||||
|
||||
private boolean detIsPositive(double x0, double y0, double x1, double y1, double x2, double y2) {
|
||||
double det = (x1 * y2 - y1 * x2 - x0 * y2 + y0 * x2 + x0 * y1 - y0 * x1);
|
||||
if (det == 0)
|
||||
System.out.printf("determinate = 0%n");
|
||||
return det > 0;
|
||||
}
|
||||
|
||||
// A GridDatatype is like a specialized Variable that explicitly handles X,Y,Z,T dimensions
|
||||
public static LinkedHashMap<String, Double> manageGridDataset(String layer, String filename, double x, double y, double z) throws Exception {
|
||||
LinkedHashMap<String, Double> valuesMap = new LinkedHashMap<String, Double>();
|
||||
|
|
|
@ -123,7 +123,9 @@ public class TableMatrixRepresentation {
|
|||
double x = Double.parseDouble("" + row[0]);
|
||||
double y = Double.parseDouble("" + row[1]);
|
||||
double z = Double.parseDouble("" + row[2]);
|
||||
double value = Double.parseDouble("" + row[4]);
|
||||
double value = Double.NaN;
|
||||
if (row[4]!=null && ((""+row[4]).trim().length()>0))
|
||||
value = Double.parseDouble("" + row[4]);
|
||||
|
||||
// transform time into a sequence
|
||||
String time = "" + row[3];
|
||||
|
|
|
@ -154,7 +154,7 @@ public class WCS implements GISDataConnector {
|
|||
|
||||
AnalysisLogger.getLogger().debug("Resolution parameters: resx: " +resolutionx+" resy: "+resolutiony );
|
||||
|
||||
String url = OGCFormatter.getWcsUrl(baseURL, coverage, crs, responseCRS, "" + BBxL + "," + BByL + "," + BBxR + "," + BByR, width, height, depth, format, resolutionx, resolutiony, resz, "" + time, parameters);
|
||||
String url = OGCFormatter.getWcsUrl(baseURL, coverage, crs, responseCRS, "" + BBxL + "," + BByL + "," + BBxR + "," + BByR, null, null, depth, format, resolutionx, resolutiony, resz, "" + time, parameters);
|
||||
AnalysisLogger.getLogger().debug("Retrieving Z parameters: " + url);
|
||||
int urlCheck = HttpRequest.checkUrl(url, null, null);
|
||||
AnalysisLogger.getLogger().debug("Checking url: " + urlCheck);
|
||||
|
@ -166,7 +166,7 @@ public class WCS implements GISDataConnector {
|
|||
downloadutil.setTransferTimeout(120000);
|
||||
downloadutil.performTransfer(new URI(url), uuid);
|
||||
AnalysisLogger.getLogger().debug("Converting to ASCII file: " + uuid);
|
||||
ascFile = GdalConverter.convertToASC(uuid);
|
||||
ascFile = GdalConverter.convertToASC(uuid,255);
|
||||
AnalysisLogger.getLogger().debug("Conversion to ASCII complete: " + ascFile);
|
||||
ASC asc = new ASC();
|
||||
List<Double> points = asc.getFeaturesInTimeInstantAndArea(ascFile, layerName, time, coordinates3d, BBxL, BBxR, BByL, BByR);
|
||||
|
|
|
@ -31,7 +31,7 @@ public class WFS implements GISDataConnector {
|
|||
return null;
|
||||
|
||||
featuresInTime = new ArrayList<FeaturedPolygon>();
|
||||
AnalysisLogger.getLogger().debug("taking WFS features");
|
||||
AnalysisLogger.getLogger().debug("taking WFS features from layer: "+layerURL);
|
||||
featuresInTime = WFSDataExplorer.getFeatures(layerURL, layerName, BBxL, BByL, BBxR, BByR);
|
||||
polygonsFeatures=new HashMap<Double, Map<String,String>>();
|
||||
int tsize = coordinates3d.size();
|
||||
|
@ -43,6 +43,8 @@ public class WFS implements GISDataConnector {
|
|||
GeometryFactory factory = new GeometryFactory(new PrecisionModel(), 4326);
|
||||
for (Tuple<Double> triplet : coordinates3d) {
|
||||
ArrayList<Double> elements = triplet.getElements();
|
||||
|
||||
//add correction in WFS projection: y axis is inverted!
|
||||
CoordinateArraySequence pcoords = new CoordinateArraySequence(new Coordinate[] { new Coordinate(elements.get(0), elements.get(1)), });
|
||||
Point po = new Point(pcoords, factory);
|
||||
boolean found = false;
|
||||
|
|
|
@ -497,7 +497,7 @@ public class GeoNetworkInspector {
|
|||
try {
|
||||
meta = checkForMetadatabyTitle(GeoNetworkInspector.treatTitleForGN(layerUUIDorTitle), layerUUIDorTitle);
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
// e.printStackTrace();
|
||||
throw new Exception("Layer does not exist "+e.getLocalizedMessage());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,83 @@
|
|||
package org.gcube.dataanalysis.geo.matrixmodel;
|
||||
|
||||
import java.io.File;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
||||
import org.gcube.dataanalysis.ecoengine.utils.Tuple;
|
||||
import org.gcube.dataanalysis.geo.connectors.asc.AscDataExplorer;
|
||||
import org.gcube.dataanalysis.geo.connectors.asc.AscRaster;
|
||||
import org.gcube.dataanalysis.geo.connectors.asc.AscRasterWriter;
|
||||
|
||||
public class ASCConverter {
|
||||
|
||||
XYExtractor extractor;
|
||||
AlgorithmConfiguration config;
|
||||
public ASCConverter(AlgorithmConfiguration config) {
|
||||
extractor = new XYExtractor(config);
|
||||
this.config=config;
|
||||
}
|
||||
|
||||
public String convertToASC(String layerTitle, String layerName, int timeInstant, double z, double xResolution, double yResolution) throws Exception
|
||||
{
|
||||
return convertToASC(layerTitle, layerName, timeInstant, -180, 180, -90, 90, z, xResolution, yResolution);
|
||||
}
|
||||
|
||||
|
||||
public String convertToASC(String layerTitle, String outFilePath, int timeInstant, double x1, double x2, double y1, double y2, double z, double xResolution, double yResolution) throws Exception {
|
||||
try {
|
||||
|
||||
double[][] values = extractor.extractXYGrid(layerTitle, timeInstant, x1, x2, y1, y2, z, xResolution, yResolution);
|
||||
AscRaster raster = null;
|
||||
if (xResolution == yResolution)
|
||||
raster = new AscRaster(values, xResolution, -1, -1, x1, y1);
|
||||
else
|
||||
raster = new AscRaster(values, -1, xResolution, yResolution, x1,y1);
|
||||
|
||||
|
||||
String outputFile =new File(outFilePath).getAbsolutePath();
|
||||
AscRasterWriter writer = new AscRasterWriter();
|
||||
writer.writeRaster(outputFile, raster);
|
||||
|
||||
return outputFile;
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
AnalysisLogger.getLogger().debug("Error in converting to ASC : " + e.getLocalizedMessage());
|
||||
AnalysisLogger.getLogger().debug(e);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public static void main(String[] args) throws Exception{
|
||||
|
||||
AlgorithmConfiguration config = new AlgorithmConfiguration();
|
||||
|
||||
config.setAgent("XYEXTRACTOR");
|
||||
config.setConfigPath("./cfg/");
|
||||
config.setPersistencePath("./");
|
||||
|
||||
config.setGcubeScope("/gcube/devsec/devVRE");
|
||||
|
||||
String layername = "dfd1bad2-ab00-42ac-8bb2-46a17162f509";
|
||||
float z = 0;
|
||||
int time = 0;
|
||||
float xres = 1f;
|
||||
float yres = 1f;
|
||||
float xll = -11.080947f;
|
||||
float yll = 31.695501f;
|
||||
float xur = 23.152451f;
|
||||
float yur = 51.265385f;
|
||||
|
||||
AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
|
||||
ASCConverter converter = new ASCConverter(config);
|
||||
String converted = converter.convertToASC(layername, "./test.asc", time, xll,xur,yll,yur,z, xres, yres);
|
||||
|
||||
AnalysisLogger.getLogger().debug("ASC : "+converted);
|
||||
|
||||
|
||||
}
|
||||
|
||||
}
|
|
@ -8,6 +8,7 @@ import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
|
|||
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
||||
import org.gcube.dataanalysis.ecoengine.utils.Tuple;
|
||||
import org.gcube.dataanalysis.geo.connectors.asc.ASC;
|
||||
import org.gcube.dataanalysis.geo.connectors.asc.AscDataExplorer;
|
||||
import org.gcube.dataanalysis.geo.connectors.geotiff.GeoTiff;
|
||||
import org.gcube.dataanalysis.geo.connectors.netcdf.NetCDF;
|
||||
|
||||
|
@ -18,6 +19,7 @@ import org.gcube.dataanalysis.geo.connectors.wfs.WFS;
|
|||
import org.gcube.dataanalysis.geo.infrastructure.GeoNetworkInspector;
|
||||
import org.gcube.dataanalysis.geo.interfaces.GISDataConnector;
|
||||
import org.gcube.dataanalysis.geo.meta.GenericLayerMetadata;
|
||||
import org.gcube.dataanalysis.geo.utils.VectorOperations;
|
||||
import org.opengis.metadata.Metadata;
|
||||
import org.opengis.metadata.identification.Identification;
|
||||
|
||||
|
@ -33,7 +35,7 @@ public class MatrixExtractor {
|
|||
public List<Tuple<Double>> currentTuples;
|
||||
|
||||
public MatrixExtractor(AlgorithmConfiguration configuration) {
|
||||
AnalysisLogger.getLogger().debug("Matrix Extractor: setting GeoNetwork search scope to "+configuration.getGcubeScope());
|
||||
AnalysisLogger.getLogger().debug("Matrix Extractor: setting GeoNetwork search scope to " + configuration.getGcubeScope());
|
||||
gnInspector = new GeoNetworkInspector();
|
||||
gnInspector.setScope(configuration.getGcubeScope());
|
||||
this.configuration = configuration;
|
||||
|
@ -94,7 +96,7 @@ public class MatrixExtractor {
|
|||
connector = new ASC();
|
||||
} else if (gnInspector.isWFS(meta)) {
|
||||
AnalysisLogger.getLogger().debug("found a Geo Layer with reference " + layerURL + " and layer name " + layerName);
|
||||
// layerURL = gnInspector.getGeoserverLink(meta);
|
||||
// layerURL = gnInspector.getGeoserverLink(meta);
|
||||
layerURL = gnInspector.getWFSLink(meta);
|
||||
connector = new WFS();
|
||||
} else if (gnInspector.isWCS(meta)) {
|
||||
|
@ -104,12 +106,12 @@ public class MatrixExtractor {
|
|||
} else if (gnInspector.isGeoTiff(meta)) {
|
||||
layerURL = gnInspector.getGeoTiffLink(meta);
|
||||
AnalysisLogger.getLogger().debug("found a GeoTiff with reference " + layerURL + " and layer name " + layerName);
|
||||
connector = new GeoTiff(configuration, layerURL);
|
||||
connector = new GeoTiff(configuration);
|
||||
} else {
|
||||
// treat as geotiff
|
||||
layerURL = layerTitle;
|
||||
AnalysisLogger.getLogger().debug("guessing a GeoTiff with reference " + layerURL + " and layer name " + layerName);
|
||||
connector = new GeoTiff(configuration, layerURL);
|
||||
connector = new GeoTiff(configuration);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -122,6 +124,7 @@ public class MatrixExtractor {
|
|||
* Extract raw values in a time instant according to a set of grid points and a bounding box
|
||||
*/
|
||||
public GISDataConnector connector;
|
||||
|
||||
public List<Double> getRawValuesInTimeInstantAndBoundingBox(String layerTitle, int time, List<Tuple<Double>> coordinates3d, double xL, double xR, double yL, double yR, double resolution, boolean saveLayer) throws Exception {
|
||||
connector = getConnector(layerTitle, resolution);
|
||||
// execute connector
|
||||
|
@ -131,25 +134,26 @@ public class MatrixExtractor {
|
|||
throw new Exception("ERROR: Connector not found for layer " + layerTitle + " - Cannot Rasterize!");
|
||||
|
||||
}
|
||||
public GISDataConnector getCurrentConnector(){
|
||||
|
||||
public GISDataConnector getCurrentConnector() {
|
||||
return connector;
|
||||
}
|
||||
|
||||
|
||||
public double zmin;
|
||||
public double zmax;
|
||||
|
||||
public double correctZ(double zValue, String layerURL, double resolution) throws Exception{
|
||||
|
||||
public double correctZ(double zValue, String layerURL, double resolution) throws Exception {
|
||||
GISDataConnector connector = getConnector(layerURL, resolution);
|
||||
zmin = connector.getMinZ(layerURL, layerName);
|
||||
zmax = connector.getMaxZ(layerURL, layerName);
|
||||
if (zValue<zmin)
|
||||
if (zValue < zmin)
|
||||
zValue = zmin;
|
||||
else if (zValue>zmax)
|
||||
zValue = zmax;
|
||||
|
||||
else if (zValue > zmax)
|
||||
zValue = zmax;
|
||||
|
||||
return zValue;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Extract a grid of XY points with fixed time and z
|
||||
*
|
||||
|
@ -181,32 +185,6 @@ public class MatrixExtractor {
|
|||
AnalysisLogger.getLogger().debug("MatrixExtractor->ERROR: BAD BOUNDING BOX!!!");
|
||||
return new double[0][0];
|
||||
}
|
||||
int ysteps = (int) ((y2 - y1) / yResolution);
|
||||
int xsteps = (int) ((x2 - x1) / xResolution);
|
||||
|
||||
double[][] slice = new double[ysteps + 1][xsteps + 1];
|
||||
List<Tuple<Double>> tuples = new ArrayList<Tuple<Double>>();
|
||||
|
||||
if (log) {
|
||||
AnalysisLogger.getLogger().debug("MatrixExtractor->Building the points grid according to YRes:" + yResolution + " and XRes:" + xResolution);
|
||||
AnalysisLogger.getLogger().debug("MatrixExtractor->Points to reassign:" + (ysteps * xsteps));
|
||||
}
|
||||
|
||||
// build the tuples according to the desired resolution
|
||||
for (int i = 0; i < ysteps + 1; i++) {
|
||||
double y = (i * yResolution) + y1;
|
||||
if (i == ysteps)
|
||||
y = y2;
|
||||
for (int j = 0; j < xsteps + 1; j++) {
|
||||
double x = (j * xResolution) + x1;
|
||||
if (j == xsteps)
|
||||
x = x2;
|
||||
tuples.add(new Tuple<Double>(x, y, z));
|
||||
}
|
||||
}
|
||||
|
||||
if (log)
|
||||
AnalysisLogger.getLogger().debug("Taking " + (ysteps + 1) + " values per " + (xsteps + 1) + "=" + (ysteps + 1 * xsteps + 1) + "...");
|
||||
|
||||
// adjust the BB in the case of one single point
|
||||
if (x2 == x1) {
|
||||
|
@ -218,43 +196,26 @@ public class MatrixExtractor {
|
|||
y2 = y2 + (yResolution / 2d);
|
||||
y1 = y1 - (yResolution / 2d);
|
||||
}
|
||||
|
||||
if (log)
|
||||
AnalysisLogger.getLogger().debug("Bounding box: (" + x1 + "," + x2 + ";" + y1 + "," + y2 + ")");
|
||||
|
||||
List<Tuple<Double>> tuples = VectorOperations.generateCoordinateTripletsInBoundingBox(x1, x2, y1, y2, z, xResolution, yResolution);
|
||||
|
||||
if (log) {
|
||||
AnalysisLogger.getLogger().debug("MatrixExtractor->Building the points grid according to YRes:" + yResolution + " and XRes:" + xResolution);
|
||||
}
|
||||
|
||||
// long t0=System.currentTimeMillis();
|
||||
currentTimeValues = getRawValuesInTimeInstantAndBoundingBox(layerTitle, timeInstant, tuples, x1, x2, y1, y2, currentResolution, cachelayer);
|
||||
currentTuples = tuples;
|
||||
// AnalysisLogger.getLogger().debug("Elapsed:"+(System.currentTimeMillis()-t0));
|
||||
|
||||
if (log)
|
||||
double[][] slice = VectorOperations.vectorToMatix(currentTimeValues, x1, x2, y1, y2, xResolution, yResolution);
|
||||
|
||||
if (log){
|
||||
AnalysisLogger.getLogger().debug("Taken " + currentTimeValues.size() + " values");
|
||||
|
||||
// build back the values matrix
|
||||
int k = 0;
|
||||
int g = 0;
|
||||
int ntriplets = currentTimeValues.size();
|
||||
// cycle on all the triplets to recontruct the matrix
|
||||
for (int t = 0; t < ntriplets; t++) {
|
||||
// take the corresponding (time,value) pair
|
||||
Double value = currentTimeValues.get(t);
|
||||
// if there is value, then set it, otherwise set NaN
|
||||
// the layer is undefined in that point and a value must be generated
|
||||
// assign a value to the matrix
|
||||
|
||||
// WARNING: PATCH FOR FAO LAYERS:. Probability can be equal to 2 for uncertainty
|
||||
if (faolayer && (value > 1)) {
|
||||
AnalysisLogger.getLogger().debug("APPLYING FAO PATCH!");
|
||||
slice[k][g] = 0.5;
|
||||
} else
|
||||
slice[k][g] = value;
|
||||
// increase the x step according to the matrix
|
||||
if (g == xsteps) {
|
||||
g = 0;
|
||||
k++;
|
||||
} else
|
||||
g++;
|
||||
AnalysisLogger.getLogger().debug("MatrixExtractor->Reassigned:" + (slice.length* slice[0].length));
|
||||
}
|
||||
|
||||
// applyNearestNeighbor();
|
||||
|
||||
if (log)
|
||||
|
|
|
@ -140,9 +140,12 @@ public class RasterTable {
|
|||
|
||||
if (valuesPropertiesMap == null) {
|
||||
// we do not use NaNs in this case every value will be filled
|
||||
if (value.isNaN())
|
||||
if (value.isNaN()){
|
||||
value = 0d;
|
||||
valueForTable = "" + value;
|
||||
valueForTable = null;
|
||||
}
|
||||
else
|
||||
valueForTable = "'"+value+"'";
|
||||
} else {
|
||||
// we do not use NaNs in this case every value will be filled
|
||||
if (value.isNaN())
|
||||
|
@ -166,7 +169,7 @@ public class RasterTable {
|
|||
if (valueForTable != null) {
|
||||
rowcounter++;
|
||||
if (valuesPropertiesMap == null)
|
||||
sb.append("('" + csquare + "'," + x + "," + y + "," + zVal + "," + tVal + ",'" + valueForTable + "')");
|
||||
sb.append("('" + csquare + "'," + x + "," + y + "," + zVal + "," + tVal + "," + valueForTable + ")");
|
||||
else
|
||||
sb.append("('" + csquare + "',"+ x + "," + y + "," + zVal + "," + tVal + "," + valueForTable + ")");
|
||||
}
|
||||
|
@ -209,7 +212,10 @@ public class RasterTable {
|
|||
int m = valuesMap.size();
|
||||
int i = 0;
|
||||
for (String value : valuesMap.values()) {
|
||||
sb.append("'" + value .replace("'", ""+(char)96)+ "'");
|
||||
if (value.equals("NULL"))
|
||||
sb.append(value);
|
||||
else
|
||||
sb.append("'" + value .replace("'", ""+(char)96)+ "'");
|
||||
if (i < m - 1)
|
||||
sb.append(",");
|
||||
|
||||
|
|
|
@ -27,7 +27,12 @@ public class OGCFormatter {
|
|||
if (!wcsURL.endsWith("?"))
|
||||
wcsURL+="/wcs?";
|
||||
|
||||
wcsURL+="service=wcs&version=1.0.0"+"&request=GetCoverage&coverage=" + coverage+"&CRS="+crs+ "&bbox=" + boundingbox+"&width="+width+"&height="+height+"&format="+format;
|
||||
wcsURL+="service=wcs&version=1.0.0"+"&request=GetCoverage&coverage=" + coverage+"&CRS="+crs+ "&bbox=" + boundingbox+"&format="+format;
|
||||
|
||||
if (width!=null && width.trim().length()>0)
|
||||
wcsURL+="&width="+width;
|
||||
if (height!=null && height.trim().length()>0)
|
||||
wcsURL+="&height="+height;
|
||||
if (responsecrs!=null && responsecrs.trim().length()>0)
|
||||
wcsURL+="&RESPONSE_CRS="+responsecrs;
|
||||
if (depth!=null && depth.trim().length()>0)
|
||||
|
|
|
@ -0,0 +1,50 @@
|
|||
package org.gcube.dataanalysis.geo.test;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileWriter;
|
||||
|
||||
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
||||
import org.gcube.dataanalysis.geo.matrixmodel.XYExtractor;
|
||||
import org.gcube.dataanalysis.geo.utils.MapUtils;
|
||||
|
||||
public class TestExtractionXYMatrix {
|
||||
|
||||
static String[] layers = {
|
||||
"94ea5767-ae76-41dc-be87-f9a0bdc96419",//temp
|
||||
"9196c9cf-47c9-413e-8a34-04fadde48e63",//salinity 3d
|
||||
"23646f93-23a8-4be4-974e-aee6bebe1707",//ph
|
||||
"46b16749-88c1-4d35-a60a-8ad328cc320c",//oxygen
|
||||
"229c135f-2379-4712-bdd6-89baa8637a27",//nitrate
|
||||
"3fb7fd88-33d4-492d-b241-4e61299c44bb",//latimeria
|
||||
"4aa10e73-5bda-4eac-a059-792b240ef759",//cloud fraction
|
||||
"fao-rfb-map-ccsbt",//tuna
|
||||
"889d67b4-32f5-4159-b01f-9c9662176434"//carcharodon
|
||||
|
||||
};
|
||||
|
||||
public static void main(String[] args) throws Exception{
|
||||
AnalysisLogger.setLogger("./cfg/" + AlgorithmConfiguration.defaultLoggerFile);
|
||||
AlgorithmConfiguration config = new AlgorithmConfiguration();
|
||||
|
||||
config.setConfigPath("./cfg/");
|
||||
config.setPersistencePath("./");
|
||||
config.setParam("DatabaseUserName", "gcube");
|
||||
config.setParam("DatabasePassword", "d4science2");
|
||||
config.setParam("DatabaseURL", "jdbc:postgresql://localhost/testdb");
|
||||
config.setParam("DatabaseDriver", "org.postgresql.Driver");
|
||||
config.setGcubeScope("/gcube/devsec/devVRE");
|
||||
double resolution = 1;
|
||||
FileWriter fw = new FileWriter(new File("maps.txt"));
|
||||
for (String layer:layers){
|
||||
XYExtractor extractor = new XYExtractor(config);
|
||||
double[][] matrix = extractor.extractXYGrid(layer, 0, -180, 180, -90, 90, 0, resolution,resolution);
|
||||
String map = MapUtils.globalASCIIMap(matrix);
|
||||
fw.write(map);
|
||||
}
|
||||
|
||||
fw.close();
|
||||
System.out.println("DONE!");
|
||||
}
|
||||
|
||||
}
|
|
@ -1,14 +1,25 @@
|
|||
package org.gcube.dataanalysis.geo.test;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
|
||||
import org.gcube.contentmanagement.lexicalmatcher.utils.MathFunctions;
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
||||
import org.gcube.dataanalysis.geo.matrixmodel.MatrixExtractor;
|
||||
import org.gcube.dataanalysis.ecoengine.utils.Operations;
|
||||
import org.gcube.dataanalysis.ecoengine.utils.Transformations;
|
||||
import org.gcube.dataanalysis.ecoengine.utils.Tuple;
|
||||
import org.gcube.dataanalysis.geo.connectors.asc.ASC;
|
||||
import org.gcube.dataanalysis.geo.connectors.asc.AscDataExplorer;
|
||||
import org.gcube.dataanalysis.geo.connectors.asc.AscRaster;
|
||||
import org.gcube.dataanalysis.geo.connectors.asc.AscRasterWriter;
|
||||
import org.gcube.dataanalysis.geo.matrixmodel.ASCConverter;
|
||||
import org.gcube.dataanalysis.geo.matrixmodel.XYExtractor;
|
||||
|
||||
public class TestPointslice {
|
||||
|
||||
static String cfg = "./cfg/";
|
||||
public static void main(String[] args) throws Exception{
|
||||
public static void main1(String[] args) throws Exception{
|
||||
String layertitle = "Statistical Mean in [07-01-01 01:00] (3D) {World Ocean Atlas 09: Sea Water Temperature - annual: dods://thredds.research-infrastructures.eu/thredds/dodsC/public/netcdf/temperature_annual_1deg_ENVIRONMENT_OCEANS_.nc}";
|
||||
// String layertitle = "Mass Concentration of Chlorophyll in Sea Water in [03-30-13 01:00] (3D) {Mercator Ocean BIOMER1V1R1: Data extracted from dataset http://atoll-mercator.vlandata.cls.fr:44080/thredds/dodsC/global-analysis-bio-001-008-a}";
|
||||
// String layertitle = "Objectively Analyzed Climatology in [07-01-01 01:00] (3D) {World Ocean Atlas 09: Sea Water Temperature - annual: dods://thredds.research-infrastructures.eu/thredds/dodsC/public/netcdf/temperature_annual_1deg_ENVIRONMENT_OCEANS_.nc}";
|
||||
|
@ -27,4 +38,144 @@ public class TestPointslice {
|
|||
System.out.println("ELAPSED TIME: "+(System.currentTimeMillis()-t0));
|
||||
System.out.println("Output: "+output[0][0]);
|
||||
}
|
||||
|
||||
|
||||
public static void main2(String[] args) throws Exception{
|
||||
|
||||
List<Tuple<Double>> tuples = new ArrayList<Tuple<Double>>();
|
||||
for (int j=0;j<100;j++){
|
||||
double randomx = ((180) * Math.random()) -180;
|
||||
double randomy = ((90) * Math.random()) -90;
|
||||
tuples.add(new Tuple<Double>(randomx,randomy,0d));
|
||||
}
|
||||
|
||||
|
||||
AscDataExplorer ade1 = new AscDataExplorer("./maxentfd4c59b3-2c65-4c4e-a235-84093d58230d/layer1.asc");
|
||||
// AscDataExplorer ade1 = new AscDataExplorer("./maxenttestfolder/nitrate.asc");
|
||||
|
||||
List<Double>features = ade1.retrieveDataFromAsc(tuples,0);
|
||||
|
||||
AscDataExplorer ade2 = new AscDataExplorer("./maxentCompleteLayers/layer1.asc");
|
||||
List<Double>features2 = ade2.retrieveDataFromAsc(tuples,0);
|
||||
|
||||
for (int i=0;i<features.size();i++){
|
||||
if (features.get(i)-features2.get(i)!=0)
|
||||
if ((features.get(i).isNaN() && !features2.get(i).isNaN()) || (!features.get(i).isNaN() && features2.get(i).isNaN()))
|
||||
System.out.println(tuples.get(i)+":"+features.get(i)+" vs "+features2.get(i)+" - "+(features.get(i)-features2.get(i)));
|
||||
|
||||
|
||||
}
|
||||
|
||||
System.out.println("Finished");
|
||||
}
|
||||
|
||||
public static void main3(String[] args) throws Exception{
|
||||
|
||||
AlgorithmConfiguration config = new AlgorithmConfiguration();
|
||||
config.setConfigPath("./cfg/");
|
||||
config.setPersistencePath("./");
|
||||
config.setGcubeScope("/gcube/devsec/devVRE");
|
||||
AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
|
||||
XYExtractor extractor = new XYExtractor(config);
|
||||
double x1 = -60d;
|
||||
double x2 = 60d;
|
||||
double y1 = -10d;
|
||||
double y2 = 10d;
|
||||
|
||||
double[][] values = extractor.extractXYGrid("dfd1bad2-ab00-42ac-8bb2-46a17162f509", 0, x1,x2,y1,y2,0d, 0.08333333, 0.08333333);
|
||||
|
||||
List<Double> currentTimeValues = extractor.currentTimeValues;
|
||||
|
||||
AscRasterWriter writer = new AscRasterWriter();
|
||||
writer.writeRaster("testwritten.asc", new AscRaster(values, 0.08333333, -1, -1, x1, y1));
|
||||
|
||||
AscDataExplorer ade2 = new AscDataExplorer("./maxentCompleteLayers/layer1.asc");
|
||||
List<Double>features2 = ade2.retrieveDataFromAsc(extractor.currentTuples,0);
|
||||
int g = 0;
|
||||
int k = 0;
|
||||
|
||||
for (int i=0;i<currentTimeValues.size();i++){
|
||||
System.out.println("1-"+extractor.currentTuples.get(i)+":"+currentTimeValues.get(i)+" vs "+features2.get(i)+" - "+(currentTimeValues.get(i)-features2.get(i)));
|
||||
System.out.println("2-"+extractor.currentTuples.get(i)+":"+values[k][g]+" vs "+currentTimeValues.get(i)+" - "+(values[k][g]-currentTimeValues.get(i)));
|
||||
|
||||
g++;
|
||||
if (g>=values[0].length){
|
||||
g =0;
|
||||
k++;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
System.out.println("Finished");
|
||||
}
|
||||
|
||||
|
||||
public static void main4(String[] args) throws Exception{
|
||||
|
||||
AlgorithmConfiguration config = new AlgorithmConfiguration();
|
||||
config.setConfigPath("./cfg/");
|
||||
config.setPersistencePath("./");
|
||||
config.setGcubeScope("/gcube/devsec/devVRE");
|
||||
AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
|
||||
XYExtractor extractor = new XYExtractor(config);
|
||||
double x1 = -60d;
|
||||
double x2 = 60d;
|
||||
double y1 = -10d;
|
||||
double y2 = 10d;
|
||||
|
||||
double[][] values = extractor.extractXYGrid("dfd1bad2-ab00-42ac-8bb2-46a17162f509", 0, x1,x2,y1,y2,0d, 0.08333333, 0.08333333);
|
||||
List<Double> currentTimeValues = extractor.currentTimeValues;
|
||||
|
||||
AscDataExplorer ade1 = new AscDataExplorer("./testwritten.asc");
|
||||
List<Double>features1 = ade1.retrieveDataFromAsc(extractor.currentTuples,0);
|
||||
|
||||
AscDataExplorer ade2 = new AscDataExplorer("./maxentCompleteLayers/layer1.asc");
|
||||
List<Double>features2 = ade2.retrieveDataFromAsc(extractor.currentTuples,0);
|
||||
|
||||
for (int i=0;i<currentTimeValues.size();i++){
|
||||
System.out.println("1-"+extractor.currentTuples.get(i)+":"+features1.get(i)+" vs "+features2.get(i)+" - "+(features1.get(i)-features2.get(i)));
|
||||
}
|
||||
|
||||
System.out.println("Finished");
|
||||
}
|
||||
|
||||
|
||||
public static void main(String[] args) throws Exception{
|
||||
|
||||
AlgorithmConfiguration config = new AlgorithmConfiguration();
|
||||
config.setConfigPath("./cfg/");
|
||||
config.setPersistencePath("./");
|
||||
config.setGcubeScope("/gcube/devsec/devVRE");
|
||||
AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
|
||||
|
||||
double x1 = -60d;
|
||||
double x2 = 60d;
|
||||
double y1 = -10d;
|
||||
double y2 = 10d;
|
||||
|
||||
List<Tuple<Double>> tuples = new ArrayList<Tuple<Double>>();
|
||||
for (int j=0;j<100;j++){
|
||||
double randomx = ((x1-x2) * Math.random()) +x2;
|
||||
double randomy = ((y1-y2) * Math.random()) +y2;
|
||||
tuples.add(new Tuple<Double>(randomx,randomy,0d));
|
||||
}
|
||||
|
||||
// AscDataExplorer ade1 = new AscDataExplorer("./testwritten.asc");
|
||||
AscDataExplorer ade1 = new AscDataExplorer("./maxent93db29d5-6a38-4598-9c66-5a814f4a9f36/layer1.asc");
|
||||
|
||||
List<Double>features1 = ade1.retrieveDataFromAsc(tuples,0);
|
||||
|
||||
//AscDataExplorer ade2 = new AscDataExplorer("./maxentCompleteLayers/layer1.asc");
|
||||
AscDataExplorer ade2 = new AscDataExplorer("./maxenttestfolder/nitrate.asc");
|
||||
|
||||
List<Double>features2 = ade2.retrieveDataFromAsc(tuples,0);
|
||||
|
||||
for (int i=0;i<tuples.size();i++){
|
||||
System.out.println("1-"+tuples.get(i)+":"+features1.get(i)+" vs "+features2.get(i)+" - "+(features1.get(i)-features2.get(i)));
|
||||
}
|
||||
|
||||
System.out.println("Finished");
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -16,8 +16,9 @@ public class TestExtraction {
|
|||
// static AlgorithmConfiguration[] configs = { testXYExtractionTable2()};
|
||||
// static AlgorithmConfiguration[] configs = { testDirectExtraction()};
|
||||
// static AlgorithmConfiguration[] configs = { testXYExtractionAquaMaps()};
|
||||
static AlgorithmConfiguration[] configs = { testXYExtractionGeotermia()};
|
||||
// static AlgorithmConfiguration[] configs = { testXYExtractionGeotermia()};
|
||||
// static AlgorithmConfiguration[] configs = { testXYExtractionFAO()};
|
||||
static AlgorithmConfiguration[] configs = { testXYExtractionNetCDF()};
|
||||
public static void main(String[] args) throws Exception {
|
||||
|
||||
System.out.println("TEST 1");
|
||||
|
@ -140,8 +141,8 @@ public class TestExtraction {
|
|||
config.setParam("BBox_UpperRightLong","50");
|
||||
config.setParam("XResolution","0.5");
|
||||
config.setParam("YResolution","0.5");
|
||||
config.setParam("OutputTableName","testextraction");
|
||||
config.setParam("OutputTableLabel","testextraction");
|
||||
config.setParam("OutputTableName","testextraction2");
|
||||
config.setParam("OutputTableLabel","testextraction2");
|
||||
|
||||
return config;
|
||||
}
|
||||
|
|
|
@ -0,0 +1,84 @@
|
|||
package org.gcube.dataanalysis.geo.test.infra;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
|
||||
import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent;
|
||||
import org.gcube.dataanalysis.ecoengine.processing.factories.TransducerersFactory;
|
||||
import org.gcube.dataanalysis.ecoengine.test.regression.Regressor;
|
||||
import org.gcube.dataanalysis.geo.connectors.table.TableMatrixRepresentation;
|
||||
|
||||
public class TestMaxEnt {
|
||||
|
||||
static AlgorithmConfiguration[] configs = { testMaxentTemperature()};
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
|
||||
System.out.println("TEST 1");
|
||||
|
||||
for (int i = 0; i < configs.length; i++) {
|
||||
AnalysisLogger.getLogger().debug("Executing: "+configs[i].getAgent());
|
||||
List<ComputationalAgent> trans = null;
|
||||
trans = TransducerersFactory.getTransducerers(configs[i]);
|
||||
trans.get(0).init();
|
||||
Regressor.process(trans.get(0));
|
||||
StatisticalType st = trans.get(0).getOutput();
|
||||
AnalysisLogger.getLogger().debug("ST:" + st);
|
||||
trans = null;
|
||||
}
|
||||
}
|
||||
|
||||
private static AlgorithmConfiguration testMaxentTemperature() {
|
||||
|
||||
AlgorithmConfiguration config = new AlgorithmConfiguration();
|
||||
|
||||
config.setAgent("MAX_ENT_NICHE_MODELLING");
|
||||
config.setConfigPath("./cfg/");
|
||||
config.setPersistencePath("./");
|
||||
config.setParam("DatabaseUserName","gcube");
|
||||
config.setParam("DatabasePassword","d4science2");
|
||||
config.setParam("DatabaseURL","jdbc:postgresql://localhost/testdb");
|
||||
config.setParam("DatabaseDriver","org.postgresql.Driver");
|
||||
config.setGcubeScope("/gcube/devsec/devVRE");
|
||||
|
||||
config.setParam("OutputTableName","maxenttest");
|
||||
config.setParam("OutputTableLabel","maxenttest");
|
||||
config.setParam("SpeciesName","testsspecies");
|
||||
|
||||
config.setParam("OccurrencesTable","occurrence_species_id0045886b_2a7c_4ede_afc4_3157c694b893");
|
||||
config.setParam("LongitudeColumn","decimallongitude");
|
||||
config.setParam("LatitudeColumn","decimallatitude");
|
||||
config.setParam("ScientificNameColumn","scientificname");
|
||||
|
||||
String sep=AlgorithmConfiguration.getListSeparator();
|
||||
// config.setParam("Layers","dfd1bad2-ab00-42ac-8bb2-46a17162f509"+sep+"23646f93-23a8-4be4-974e-aee6bebe1707");
|
||||
//config.setParam("Layers","94ea5767-ae76-41dc-be87-f9a0bdc96419");//temperature 99-09 2D
|
||||
config.setParam("Layers","23646f93-23a8-4be4-974e-aee6bebe1707");//ph
|
||||
config.setParam("MaxIterations","10000");
|
||||
config.setParam("DefaultPrevalence","1");
|
||||
|
||||
config.setParam("Z","0");
|
||||
config.setParam("TimeIndex","0");
|
||||
|
||||
config.setParam("BBox_LowerLeftLong","-180");
|
||||
config.setParam("BBox_UpperRightLong","180");
|
||||
config.setParam("BBox_LowerLeftLat","-90");
|
||||
config.setParam("BBox_UpperRightLat","90");
|
||||
|
||||
/*
|
||||
config.setParam("BBox_LowerLeftLong","-60");
|
||||
config.setParam("BBox_UpperRightLong","60");
|
||||
config.setParam("BBox_LowerLeftLat","-10");
|
||||
config.setParam("BBox_UpperRightLat","10");
|
||||
*/
|
||||
|
||||
config.setParam("XResolution","1");
|
||||
config.setParam("YResolution","1");
|
||||
|
||||
return config;
|
||||
}
|
||||
|
||||
|
||||
}
|
|
@ -2,36 +2,42 @@ package org.gcube.dataanalysis.geo.test.maps;
|
|||
|
||||
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
||||
import org.gcube.dataanalysis.geo.algorithms.PointsMapsCreator;
|
||||
import org.gcube.dataanalysis.geo.algorithms.PolygonMapsCreator;
|
||||
|
||||
public class TestMapCreation {
|
||||
|
||||
|
||||
static String cfg = "./cfg/";
|
||||
public static void main(String[] args) throws Exception{
|
||||
String layertitle2 = "4e5c1bbf-f5ce-4b66-a67c-14d7d9920aa0";
|
||||
String layertitle = "38b2eb74-1c07-4569-8a81-36ac2f973146";
|
||||
public static void main1(String[] args) throws Exception{
|
||||
|
||||
AnalysisLogger.setLogger(cfg+AlgorithmConfiguration.defaultLoggerFile);
|
||||
AlgorithmConfiguration config = new AlgorithmConfiguration();
|
||||
config.setConfigPath(cfg);
|
||||
config.setGcubeScope("/gcube/devsec/statVRE");
|
||||
// config.setGcubeScope("/gcube/devsec/statVRE");
|
||||
config.setGcubeScope("/gcube/devsec/devVRE");
|
||||
config.setPersistencePath("./");
|
||||
|
||||
config.setParam("MapName","Test Polygonal Map");
|
||||
config.setParam("MapName","Test Polygonal Map Ph 6");
|
||||
/*
|
||||
config.setParam("InputTable","occurrence_species_id0045886b_2a7c_4ede_afc4_3157c694b893");
|
||||
config.setParam("xDimension","decimallongitude");
|
||||
config.setParam("yDimension","decimallatitude");
|
||||
config.setParam("yDimension","decimallatitude");
|
||||
config.setParam("Info","recordedby") ;
|
||||
config.setParam("Resolution","0.5");
|
||||
*/
|
||||
config.setParam("InputTable","generic_idbc699da3_a4d5_40fb_80ff_666dbf1316d5");
|
||||
config.setParam("xDimension","x");
|
||||
config.setParam("yDimension","y");
|
||||
config.setParam("Info","fvalue") ;
|
||||
|
||||
|
||||
|
||||
config.setParam("DatabaseUserName","utente");
|
||||
config.setParam("DatabasePassword","d4science");
|
||||
config.setParam("DatabaseURL","jdbc:postgresql://statistical-manager.d.d4science.org/testdb");
|
||||
config.setParam("DatabaseDriver","org.postgresql.Driver");
|
||||
config.setParam("Layer_1",layertitle);
|
||||
config.setParam("Layer_2",layertitle2);
|
||||
config.setParam("ValuesComparisonThreshold","0.1");
|
||||
|
||||
config.setParam("Z","0");
|
||||
|
||||
config.setParam("user", "postgres");
|
||||
|
@ -39,11 +45,61 @@ public class TestMapCreation {
|
|||
config.setParam("STOREURL","jdbc:postgresql://geoserver-test.d4science-ii.research-infrastructures.eu/timeseriesgisdb");
|
||||
config.setParam("driver", "org.postgresql.Driver");
|
||||
config.setParam("dialect", "org.hibernatespatial.postgis.PostgisDialect");
|
||||
|
||||
/*
|
||||
PolygonMapsCreator mc = new PolygonMapsCreator();
|
||||
*/
|
||||
PointsMapsCreator mc = new PointsMapsCreator();
|
||||
mc.setConfiguration(config);
|
||||
mc.init();
|
||||
mc.compute();
|
||||
|
||||
}
|
||||
|
||||
|
||||
public static void main(String[] args) throws Exception{
|
||||
|
||||
AnalysisLogger.setLogger(cfg+AlgorithmConfiguration.defaultLoggerFile);
|
||||
AlgorithmConfiguration config = new AlgorithmConfiguration();
|
||||
config.setConfigPath(cfg);
|
||||
// config.setGcubeScope("/gcube/devsec/statVRE");
|
||||
config.setGcubeScope("/gcube/devsec/devVRE");
|
||||
config.setPersistencePath("./");
|
||||
|
||||
config.setParam("MapName","Test Polygonal Map Ph 7");
|
||||
/*
|
||||
config.setParam("InputTable","occurrence_species_id0045886b_2a7c_4ede_afc4_3157c694b893");
|
||||
config.setParam("xDimension","decimallongitude");
|
||||
config.setParam("yDimension","decimallatitude");
|
||||
config.setParam("Info","recordedby") ;
|
||||
config.setParam("Resolution","0.5");
|
||||
*/
|
||||
config.setParam("InputTable","testextraction2");
|
||||
config.setParam("xDimension","x");
|
||||
config.setParam("yDimension","y");
|
||||
config.setParam("Info","fvalue") ;
|
||||
|
||||
|
||||
|
||||
config.setParam("DatabaseUserName","gcube");
|
||||
config.setParam("DatabasePassword","d4science2");
|
||||
config.setParam("DatabaseURL","jdbc:postgresql://localhost/testdb");
|
||||
config.setParam("DatabaseDriver","org.postgresql.Driver");
|
||||
|
||||
config.setParam("Z","0");
|
||||
|
||||
config.setParam("user", "postgres");
|
||||
config.setParam("password", "d4science2");
|
||||
config.setParam("STOREURL","jdbc:postgresql://geoserver-test.d4science-ii.research-infrastructures.eu/timeseriesgisdb");
|
||||
config.setParam("driver", "org.postgresql.Driver");
|
||||
config.setParam("dialect", "org.hibernatespatial.postgis.PostgisDialect");
|
||||
/*
|
||||
PolygonMapsCreator mc = new PolygonMapsCreator();
|
||||
*/
|
||||
PointsMapsCreator mc = new PointsMapsCreator();
|
||||
mc.setConfiguration(config);
|
||||
mc.init();
|
||||
mc.compute();
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -38,7 +38,8 @@ public class TestMapsComparisonAquaMapsvsFAO {
|
|||
config.setParam("KThreshold","0.5");
|
||||
|
||||
config.setParam("Z","0");
|
||||
config.setGcubeScope(null);
|
||||
config.setGcubeScope("/gcube/devsec/devVRE");
|
||||
|
||||
|
||||
MapsComparator mc = new MapsComparator();
|
||||
mc.setConfiguration(config);
|
||||
|
|
|
@ -24,7 +24,7 @@ public class TestMapsComparisonTemperatureWOA {
|
|||
config.setParam("Layer_2",layertitle2);
|
||||
config.setParam("ValuesComparisonThreshold",""+0.1);
|
||||
config.setParam("Z","0");
|
||||
config.setGcubeScope(null);
|
||||
config.setGcubeScope("/gcube/devsec/devVRE");
|
||||
|
||||
MapsComparator mc = new MapsComparator();
|
||||
mc.setConfiguration(config);
|
||||
|
|
|
@ -0,0 +1,93 @@
|
|||
package org.gcube.dataanalysis.geo.test.projections;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileWriter;
|
||||
|
||||
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
||||
import org.gcube.dataanalysis.geo.connectors.table.TableMatrixRepresentation;
|
||||
import org.gcube.dataanalysis.geo.matrixmodel.XYExtractor;
|
||||
import org.gcube.dataanalysis.geo.utils.MapUtils;
|
||||
|
||||
public class TestExtractionXYMatrixFromTable {
|
||||
|
||||
public static void sliceTableAquaMaps(AlgorithmConfiguration config) throws Exception {
|
||||
// latimeria chalumnae
|
||||
config.setParam(TableMatrixRepresentation.tableNameParameter, "testextractionaquamaps");
|
||||
config.setParam(TableMatrixRepresentation.xDimensionColumnParameter, "approx_x");
|
||||
config.setParam(TableMatrixRepresentation.yDimensionColumnParameter, "approx_y");
|
||||
config.setParam(TableMatrixRepresentation.timeDimensionColumnParameter, "time");
|
||||
config.setParam(TableMatrixRepresentation.valueDimensionColumnParameter, "f_probability");
|
||||
config.setParam(TableMatrixRepresentation.filterParameter, "");
|
||||
}
|
||||
|
||||
public static void sliceTablePhImported(AlgorithmConfiguration config) throws Exception {
|
||||
// ph
|
||||
config.setParam("DatabaseUserName", "utente");
|
||||
config.setParam("DatabasePassword", "d4science");
|
||||
config.setParam("DatabaseURL", "jdbc:postgresql://statistical-manager.d.d4science.org/testdb");
|
||||
config.setParam(TableMatrixRepresentation.tableNameParameter, "generic_idbc699da3_a4d5_40fb_80ff_666dbf1316d5");
|
||||
config.setParam(TableMatrixRepresentation.xDimensionColumnParameter, "x");
|
||||
config.setParam(TableMatrixRepresentation.yDimensionColumnParameter, "y");
|
||||
config.setParam(TableMatrixRepresentation.timeDimensionColumnParameter, "time");
|
||||
config.setParam(TableMatrixRepresentation.valueDimensionColumnParameter, "fvalue");
|
||||
config.setParam(TableMatrixRepresentation.filterParameter, "");
|
||||
}
|
||||
|
||||
|
||||
public static void sliceTablePh(AlgorithmConfiguration config) throws Exception {
|
||||
// ph
|
||||
config.setParam(TableMatrixRepresentation.tableNameParameter, "testextraction");
|
||||
config.setParam(TableMatrixRepresentation.xDimensionColumnParameter, "x");
|
||||
config.setParam(TableMatrixRepresentation.yDimensionColumnParameter, "y");
|
||||
config.setParam(TableMatrixRepresentation.timeDimensionColumnParameter, "time");
|
||||
config.setParam(TableMatrixRepresentation.valueDimensionColumnParameter, "fvalue");
|
||||
config.setParam(TableMatrixRepresentation.filterParameter, "");
|
||||
}
|
||||
|
||||
public static void sliceMapCreated(AlgorithmConfiguration config) throws Exception {
|
||||
|
||||
config.setParam(TableMatrixRepresentation.tableNameParameter, "testextraction2");
|
||||
config.setParam(TableMatrixRepresentation.xDimensionColumnParameter, "x");
|
||||
config.setParam(TableMatrixRepresentation.yDimensionColumnParameter, "y");
|
||||
config.setParam(TableMatrixRepresentation.timeDimensionColumnParameter, "time");
|
||||
config.setParam(TableMatrixRepresentation.valueDimensionColumnParameter, "fvalue");
|
||||
config.setParam(TableMatrixRepresentation.filterParameter, "");
|
||||
}
|
||||
|
||||
public static void sliceMapCreated2(AlgorithmConfiguration config) throws Exception {
|
||||
|
||||
config.setParam(TableMatrixRepresentation.tableNameParameter, "testextraction2");
|
||||
config.setParam(TableMatrixRepresentation.xDimensionColumnParameter, "approx_x");
|
||||
config.setParam(TableMatrixRepresentation.yDimensionColumnParameter, "approx_y");
|
||||
config.setParam(TableMatrixRepresentation.timeDimensionColumnParameter, "time");
|
||||
config.setParam(TableMatrixRepresentation.valueDimensionColumnParameter, "f_temp");
|
||||
config.setParam(TableMatrixRepresentation.filterParameter, "");
|
||||
}
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
AnalysisLogger.setLogger("./cfg/" + AlgorithmConfiguration.defaultLoggerFile);
|
||||
AlgorithmConfiguration config = new AlgorithmConfiguration();
|
||||
|
||||
config.setConfigPath("./cfg/");
|
||||
config.setPersistencePath("./");
|
||||
config.setParam("DatabaseUserName", "gcube");
|
||||
config.setParam("DatabasePassword", "d4science2");
|
||||
config.setParam("DatabaseURL", "jdbc:postgresql://localhost/testdb");
|
||||
config.setParam("DatabaseDriver", "org.postgresql.Driver");
|
||||
config.setGcubeScope("/gcube/devsec/devVRE");
|
||||
sliceMapCreated2(config);
|
||||
|
||||
double resolution = 1;
|
||||
FileWriter fw = new FileWriter(new File("maps.txt"));
|
||||
|
||||
XYExtractor extractor = new XYExtractor(config);
|
||||
double[][] matrix = extractor.extractXYGrid(null, 0, -180, 180, -90, 90, 0, resolution, resolution);
|
||||
String map = MapUtils.globalASCIIMap(matrix);
|
||||
fw.write(map);
|
||||
|
||||
fw.close();
|
||||
System.out.println("DONE!");
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,60 @@
|
|||
package org.gcube.dataanalysis.geo.test.projections;
|
||||
|
||||
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
||||
import org.gcube.dataanalysis.geo.algorithms.PointsMapsCreator;
|
||||
import org.gcube.dataanalysis.geo.algorithms.PolygonMapsCreator;
|
||||
|
||||
public class TestMapCreation {
|
||||
|
||||
|
||||
static String cfg = "./cfg/";
|
||||
|
||||
|
||||
public static void main(String[] args) throws Exception{
|
||||
|
||||
AnalysisLogger.setLogger(cfg+AlgorithmConfiguration.defaultLoggerFile);
|
||||
AlgorithmConfiguration config = new AlgorithmConfiguration();
|
||||
config.setConfigPath(cfg);
|
||||
// config.setGcubeScope("/gcube/devsec/statVRE");
|
||||
config.setGcubeScope("/gcube/devsec/devVRE");
|
||||
config.setPersistencePath("./");
|
||||
|
||||
config.setParam("MapName","Test Polygonal Map Ph 8");
|
||||
/*
|
||||
config.setParam("InputTable","occurrence_species_id0045886b_2a7c_4ede_afc4_3157c694b893");
|
||||
config.setParam("xDimension","decimallongitude");
|
||||
config.setParam("yDimension","decimallatitude");
|
||||
config.setParam("Info","recordedby") ;
|
||||
config.setParam("Resolution","0.5");
|
||||
*/
|
||||
|
||||
config.setParam("InputTable","testextraction2");
|
||||
config.setParam("xDimension","x");
|
||||
config.setParam("yDimension","y");
|
||||
config.setParam("Info","fvalue") ;
|
||||
|
||||
|
||||
config.setParam("DatabaseUserName","gcube");
|
||||
config.setParam("DatabasePassword","d4science2");
|
||||
config.setParam("DatabaseURL","jdbc:postgresql://localhost/testdb");
|
||||
config.setParam("DatabaseDriver","org.postgresql.Driver");
|
||||
|
||||
config.setParam("Z","0");
|
||||
|
||||
config.setParam("user", "postgres");
|
||||
config.setParam("password", "d4science2");
|
||||
config.setParam("STOREURL","jdbc:postgresql://geoserver-test.d4science-ii.research-infrastructures.eu/timeseriesgisdb");
|
||||
config.setParam("driver", "org.postgresql.Driver");
|
||||
config.setParam("dialect", "org.hibernatespatial.postgis.PostgisDialect");
|
||||
/*
|
||||
PolygonMapsCreator mc = new PolygonMapsCreator();
|
||||
*/
|
||||
PointsMapsCreator mc = new PointsMapsCreator();
|
||||
mc.setConfiguration(config);
|
||||
mc.init();
|
||||
mc.compute();
|
||||
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,281 @@
|
|||
package org.gcube.dataanalysis.geo.test.projections;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
|
||||
import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent;
|
||||
import org.gcube.dataanalysis.ecoengine.processing.factories.TransducerersFactory;
|
||||
import org.gcube.dataanalysis.ecoengine.test.regression.Regressor;
|
||||
import org.gcube.dataanalysis.geo.connectors.table.TableMatrixRepresentation;
|
||||
|
||||
public class TestXYExtractionAlgorithm {
|
||||
|
||||
// static AlgorithmConfiguration[] configs = { testXYExtractionNetCDF(),testXYExtractionAquaMaps(),testXYExtractionTable(),testXYExtractionTable2(),testDirectExtraction()};
|
||||
// static AlgorithmConfiguration[] configs = { testXYExtractionTable2()};
|
||||
// static AlgorithmConfiguration[] configs = { testDirectExtraction()};
|
||||
// static AlgorithmConfiguration[] configs = { testXYExtractionAquaMaps()};
|
||||
// static AlgorithmConfiguration[] configs = { testXYExtractionGeotermia()};
|
||||
// static AlgorithmConfiguration[] configs = { testXYExtractionFAO()};
|
||||
// static AlgorithmConfiguration[] configs = { testXYExtractionNetCDF()};
|
||||
static AlgorithmConfiguration[] configs = { testXYExtractionWFS11()};
|
||||
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
|
||||
System.out.println("TEST 1");
|
||||
|
||||
for (int i = 0; i < configs.length; i++) {
|
||||
AnalysisLogger.getLogger().debug("Executing: "+configs[i].getAgent());
|
||||
List<ComputationalAgent> trans = null;
|
||||
trans = TransducerersFactory.getTransducerers(configs[i]);
|
||||
trans.get(0).init();
|
||||
Regressor.process(trans.get(0));
|
||||
StatisticalType st = trans.get(0).getOutput();
|
||||
AnalysisLogger.getLogger().debug("ST:" + st);
|
||||
trans = null;
|
||||
}
|
||||
}
|
||||
|
||||
private static AlgorithmConfiguration testXYExtractionProd() {
|
||||
|
||||
AlgorithmConfiguration config = new AlgorithmConfiguration();
|
||||
|
||||
config.setAgent("XYEXTRACTOR");
|
||||
config.setConfigPath("./cfg/");
|
||||
config.setPersistencePath("./");
|
||||
config.setParam("DatabaseUserName","gcube");
|
||||
config.setParam("DatabasePassword","d4science2");
|
||||
config.setParam("DatabaseURL","jdbc:postgresql://localhost/testdb");
|
||||
config.setParam("DatabaseDriver","org.postgresql.Driver");
|
||||
config.setGcubeScope("/d4science.research-infrastructures.eu/gCubeApps/BiodiversityLab");
|
||||
|
||||
config.setParam("Layer","0aac424b-5f5b-4fa6-97d6-4b4deee62b97");
|
||||
config.setParam("Z","0");
|
||||
config.setParam("TimeIndex","0");
|
||||
config.setParam("BBox_LowerLeftLat","-60");
|
||||
config.setParam("BBox_LowerLeftLong","-50");
|
||||
config.setParam("BBox_UpperRightLat","60");
|
||||
config.setParam("BBox_UpperRightLong","50");
|
||||
config.setParam("XResolution","0.5");
|
||||
config.setParam("YResolution","0.5");
|
||||
config.setParam("OutputTableName","testextractionprod");
|
||||
config.setParam("OutputTableLabel","testextractionprod");
|
||||
|
||||
return config;
|
||||
}
|
||||
|
||||
private static AlgorithmConfiguration testXYExtractionGeotermia() {
|
||||
|
||||
AlgorithmConfiguration config = new AlgorithmConfiguration();
|
||||
|
||||
config.setAgent("XYEXTRACTOR");
|
||||
config.setConfigPath("./cfg/");
|
||||
config.setPersistencePath("./");
|
||||
config.setParam("DatabaseUserName","gcube");
|
||||
config.setParam("DatabasePassword","d4science2");
|
||||
config.setParam("DatabaseURL","jdbc:postgresql://localhost/testdb");
|
||||
config.setParam("DatabaseDriver","org.postgresql.Driver");
|
||||
config.setGcubeScope("/d4science.research-infrastructures.eu/gCubeApps/BiodiversityLab");
|
||||
|
||||
config.setParam("Layer","http://repoigg.services.iit.cnr.it:8080/geoserver/IGG/ows?service=WFS&version=1.0.0&request=GetFeature&typeName=IGG:area_temp_1000&maxFeatures=50");
|
||||
config.setParam("Z","-1000");
|
||||
config.setParam("TimeIndex","0");
|
||||
config.setParam("BBox_LowerLeftLat","34.46");
|
||||
config.setParam("BBox_LowerLeftLong","5.85");
|
||||
config.setParam("BBox_UpperRightLat","49");
|
||||
config.setParam("BBox_UpperRightLong","21.41");
|
||||
config.setParam("XResolution","0.01");
|
||||
config.setParam("YResolution","0.01");
|
||||
config.setParam("OutputTableName","testextractiongeotermia");
|
||||
config.setParam("OutputTableLabel","testextractiongeotermia");
|
||||
|
||||
return config;
|
||||
}
|
||||
|
||||
private static AlgorithmConfiguration testXYExtractionFAO() {
|
||||
|
||||
AlgorithmConfiguration config = new AlgorithmConfiguration();
|
||||
|
||||
config.setAgent("XYEXTRACTOR");
|
||||
config.setConfigPath("./cfg/");
|
||||
config.setPersistencePath("./");
|
||||
config.setParam("DatabaseUserName","gcube");
|
||||
config.setParam("DatabasePassword","d4science2");
|
||||
config.setParam("DatabaseURL","jdbc:postgresql://localhost/testdb");
|
||||
config.setParam("DatabaseDriver","org.postgresql.Driver");
|
||||
config.setGcubeScope("/gcube/devsec/devVRE");
|
||||
|
||||
config.setParam("Layer","20c06241-f00f-4cb0-82a3-4e5ec97a0d0a");
|
||||
config.setParam("Z","0");
|
||||
config.setParam("TimeIndex","0");
|
||||
config.setParam("BBox_LowerLeftLat","-90");
|
||||
config.setParam("BBox_LowerLeftLong","-180");
|
||||
config.setParam("BBox_UpperRightLat","90");
|
||||
config.setParam("BBox_UpperRightLong","180");
|
||||
config.setParam("XResolution","0.2");
|
||||
config.setParam("YResolution","0.2");
|
||||
config.setParam("OutputTableName","testextractionfao");
|
||||
config.setParam("OutputTableLabel","testextractionfao");
|
||||
|
||||
return config;
|
||||
}
|
||||
|
||||
private static AlgorithmConfiguration testXYExtractionNetCDF() {
|
||||
|
||||
AlgorithmConfiguration config = new AlgorithmConfiguration();
|
||||
|
||||
config.setAgent("XYEXTRACTOR");
|
||||
config.setConfigPath("./cfg/");
|
||||
config.setPersistencePath("./");
|
||||
config.setParam("DatabaseUserName","gcube");
|
||||
config.setParam("DatabasePassword","d4science2");
|
||||
config.setParam("DatabaseURL","jdbc:postgresql://localhost/testdb");
|
||||
config.setParam("DatabaseDriver","org.postgresql.Driver");
|
||||
config.setGcubeScope("/d4science.research-infrastructures.eu/gCubeApps/BiodiversityLab");
|
||||
|
||||
config.setParam("Layer","c565e32c-c5b3-4964-b44f-06dc620563e9");
|
||||
config.setParam("Z","0");
|
||||
config.setParam("TimeIndex","0");
|
||||
config.setParam("BBox_LowerLeftLat","-60");
|
||||
config.setParam("BBox_LowerLeftLong","-50");
|
||||
config.setParam("BBox_UpperRightLat","60");
|
||||
config.setParam("BBox_UpperRightLong","50");
|
||||
config.setParam("XResolution","0.5");
|
||||
config.setParam("YResolution","0.5");
|
||||
config.setParam("OutputTableName","testextraction2");
|
||||
config.setParam("OutputTableLabel","testextraction2");
|
||||
|
||||
return config;
|
||||
}
|
||||
|
||||
|
||||
private static AlgorithmConfiguration testXYExtractionWFS11() {
|
||||
|
||||
AlgorithmConfiguration config = new AlgorithmConfiguration();
|
||||
|
||||
config.setAgent("XYEXTRACTOR");
|
||||
config.setConfigPath("./cfg/");
|
||||
config.setPersistencePath("./");
|
||||
config.setParam("DatabaseUserName","gcube");
|
||||
config.setParam("DatabasePassword","d4science2");
|
||||
config.setParam("DatabaseURL","jdbc:postgresql://localhost/testdb");
|
||||
config.setParam("DatabaseDriver","org.postgresql.Driver");
|
||||
config.setGcubeScope("/d4science.research-infrastructures.eu/gCubeApps/EGIP");
|
||||
|
||||
config.setParam("Layer","3f16f87a-68db-49ca-bfc7-affcd83ae274");
|
||||
config.setParam("Z","0");
|
||||
config.setParam("TimeIndex","0");
|
||||
config.setParam("BBox_LowerLeftLat","-60");
|
||||
config.setParam("BBox_LowerLeftLong","-70");
|
||||
config.setParam("BBox_UpperRightLat","60");
|
||||
config.setParam("BBox_UpperRightLong","70");
|
||||
config.setParam("XResolution","0.5");
|
||||
config.setParam("YResolution","0.5");
|
||||
config.setParam("OutputTableName","testextraction2");
|
||||
config.setParam("OutputTableLabel","testextraction2");
|
||||
|
||||
return config;
|
||||
}
|
||||
|
||||
private static AlgorithmConfiguration testDirectExtraction() {
|
||||
|
||||
AlgorithmConfiguration config = new AlgorithmConfiguration();
|
||||
|
||||
config.setAgent("XYEXTRACTOR");
|
||||
config.setConfigPath("./cfg/");
|
||||
config.setPersistencePath("./");
|
||||
config.setParam("DatabaseUserName","gcube");
|
||||
config.setParam("DatabasePassword","d4science2");
|
||||
config.setParam("DatabaseURL","jdbc:postgresql://localhost/testdb");
|
||||
config.setParam("DatabaseDriver","org.postgresql.Driver");
|
||||
config.setGcubeScope("/d4science.research-infrastructures.eu/gCubeApps/BiodiversityLab");
|
||||
|
||||
config.setParam("Layer","https://dl.dropboxusercontent.com/u/12809149/geoserver-GetCoverage.image.asc");
|
||||
config.setParam("Z","0");
|
||||
config.setParam("TimeIndex","0");
|
||||
config.setParam("BBox_LowerLeftLat","-60");
|
||||
config.setParam("BBox_LowerLeftLong","-50");
|
||||
config.setParam("BBox_UpperRightLat","60");
|
||||
config.setParam("BBox_UpperRightLong","50");
|
||||
config.setParam("XResolution","0.5");
|
||||
config.setParam("YResolution","0.5");
|
||||
config.setParam("OutputTableName","testextractiondirect");
|
||||
config.setParam("OutputTableLabel","testextractiondirect");
|
||||
|
||||
return config;
|
||||
}
|
||||
|
||||
|
||||
private static AlgorithmConfiguration testXYExtractionAquaMaps() {
|
||||
|
||||
AlgorithmConfiguration config = testXYExtractionNetCDF();
|
||||
config.setParam("Layer","04e61cb8-3c32-47fe-823c-80ac3d417a0b");
|
||||
config.setParam("OutputTableName","testextractionaquamaps");
|
||||
|
||||
return config;
|
||||
}
|
||||
|
||||
|
||||
private static AlgorithmConfiguration testXYExtractionTable() {
|
||||
|
||||
AlgorithmConfiguration config = testXYExtractionNetCDF();
|
||||
config.setAgent("XYEXTRACTOR_TABLE");
|
||||
|
||||
config.setParam("OutputTableName","testextractiontable");
|
||||
config.setParam("DatabaseUserName", "utente");
|
||||
config.setParam("DatabasePassword", "d4science");
|
||||
config.setParam("DatabaseURL", "jdbc:postgresql://statistical-manager.d.d4science.research-infrastructures.eu/testdb");
|
||||
config.setParam("DatabaseDriver", "org.postgresql.Driver");
|
||||
// vessels
|
||||
/*
|
||||
config.setParam(TableMatrixRepresentation.tableNameParameter, "generic_id037d302d_2ba0_4e43_b6e4_1a797bb91728");
|
||||
config.setParam(TableMatrixRepresentation.xDimensionColumnParameter, "x");
|
||||
config.setParam(TableMatrixRepresentation.yDimensionColumnParameter, "y");
|
||||
config.setParam(TableMatrixRepresentation.timeDimensionColumnParameter, "datetime");
|
||||
config.setParam(TableMatrixRepresentation.valueDimensionColumnParameter, "speed");
|
||||
config.setParam(TableMatrixRepresentation.filterParameter, "speed<2");
|
||||
*/
|
||||
config.setParam(TableMatrixRepresentation.tableNameParameter, "occurrence_species_id0045886b_2a7c_4ede_afc4_3157c694b893");
|
||||
config.setParam(TableMatrixRepresentation.xDimensionColumnParameter, "decimallongitude");
|
||||
config.setParam(TableMatrixRepresentation.yDimensionColumnParameter, "decimallatitude");
|
||||
config.setParam(TableMatrixRepresentation.timeDimensionColumnParameter, " ");
|
||||
config.setParam(TableMatrixRepresentation.valueDimensionColumnParameter, "decimallatitude");
|
||||
config.setParam(TableMatrixRepresentation.filterParameter, " ");
|
||||
|
||||
return config;
|
||||
}
|
||||
|
||||
private static AlgorithmConfiguration testXYExtractionTable2() {
|
||||
|
||||
AlgorithmConfiguration config = testXYExtractionNetCDF();
|
||||
config.setAgent("XYEXTRACTOR_TABLE");
|
||||
|
||||
config.setParam("OutputTableName","testextractiontable2");
|
||||
config.setParam("DatabaseUserName", "utente");
|
||||
config.setParam("DatabasePassword", "d4science");
|
||||
config.setParam("DatabaseURL", "jdbc:postgresql://statistical-manager.d.d4science.research-infrastructures.eu/testdb");
|
||||
config.setParam("DatabaseDriver", "org.postgresql.Driver");
|
||||
// vessels
|
||||
/*
|
||||
config.setParam(TableMatrixRepresentation.tableNameParameter, "generic_id037d302d_2ba0_4e43_b6e4_1a797bb91728");
|
||||
config.setParam(TableMatrixRepresentation.xDimensionColumnParameter, "x");
|
||||
config.setParam(TableMatrixRepresentation.yDimensionColumnParameter, "y");
|
||||
config.setParam(TableMatrixRepresentation.timeDimensionColumnParameter, "datetime");
|
||||
config.setParam(TableMatrixRepresentation.valueDimensionColumnParameter, "speed");
|
||||
config.setParam(TableMatrixRepresentation.filterParameter, "speed<2");
|
||||
*/
|
||||
config.setParam(TableMatrixRepresentation.tableNameParameter, "occurrence_species_id0045886b_2a7c_4ede_afc4_3157c694b893");
|
||||
config.setParam(TableMatrixRepresentation.xDimensionColumnParameter, "decimallongitude");
|
||||
config.setParam(TableMatrixRepresentation.yDimensionColumnParameter, "decimallatitude");
|
||||
config.setParam(TableMatrixRepresentation.timeDimensionColumnParameter, "modified");
|
||||
config.setParam(TableMatrixRepresentation.valueDimensionColumnParameter, "decimallatitude");
|
||||
config.setParam(TableMatrixRepresentation.filterParameter, " ");
|
||||
config.setParam("Z","0");
|
||||
config.setParam("TimeIndex","1");
|
||||
|
||||
return config;
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,142 @@
|
|||
package org.gcube.dataanalysis.geo.test.projections;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
||||
import org.gcube.dataanalysis.ecoengine.utils.Tuple;
|
||||
import org.gcube.dataanalysis.geo.connectors.asc.AscDataExplorer;
|
||||
import org.gcube.dataanalysis.geo.connectors.geotiff.GeoTiff;
|
||||
import org.gcube.dataanalysis.geo.connectors.netcdf.NetCDF;
|
||||
import org.gcube.dataanalysis.geo.connectors.wcs.WCS;
|
||||
import org.gcube.dataanalysis.geo.connectors.wfs.WFS;
|
||||
import org.gcube.dataanalysis.geo.utils.MapUtils;
|
||||
import org.gcube.dataanalysis.geo.utils.VectorOperations;
|
||||
|
||||
public class TestXYExtractionConnectors {
|
||||
|
||||
static String[] urlToTest3 = {
|
||||
// "http://geoserver3.d4science.research-infrastructures.eu/geoserver"
|
||||
// "http://geoserver2.d4science.research-infrastructures.eu/geoserver"
|
||||
"http://www.fao.org/figis/geoserver/species/ows"
|
||||
};
|
||||
|
||||
static String[] layernamesTest3 = {
|
||||
// "lsoleasolea20121217184934494cet"
|
||||
// "lcarcharodoncarcharias20121217173706733cet"
|
||||
// "lxiphiasgladius20130410182141778cest"
|
||||
// "SPECIES_DIST_BIB"
|
||||
|
||||
"SPECIES_DIST_SWO"
|
||||
};
|
||||
|
||||
static String[] urlToTest1 = {
|
||||
"http://thredds.research-infrastructures.eu/thredds/dodsC/public/netcdf/global-reanalysis-phys-001-004-b-ref-fr-mjm95-gridv_OCEANS_CLIMATOLOGY_METEOROLOGY_ATMOSPHERE_1366211498692.nc",
|
||||
};
|
||||
|
||||
static String[] layernamesTest1= {
|
||||
"vomecrty"
|
||||
};
|
||||
|
||||
|
||||
|
||||
static String[] urlToTest2 = {
|
||||
"http://thredds.research-infrastructures.eu/thredds/dodsC/public/netcdf/WOA2005TemperatureAnnual_CLIMATOLOGY_METEOROLOGY_ATMOSPHERE_.nc"
|
||||
};
|
||||
|
||||
|
||||
static String[] layernamesTest2 = {
|
||||
"t00an1"
|
||||
};
|
||||
|
||||
static String[] urlToTest = {
|
||||
"http://thredds.research-infrastructures.eu/thredds/fileServer/public/netcdf/ph.asc",
|
||||
"http://thredds.research-infrastructures.eu/thredds/fileServer/public/netcdf/calcite.asc",
|
||||
"https://dl.dropboxusercontent.com/u/12809149/wind1.tif",
|
||||
"http://thredds.research-infrastructures.eu/thredds/dodsC/public/netcdf/WOA2005TemperatureAnnual_CLIMATOLOGY_METEOROLOGY_ATMOSPHERE_.nc",
|
||||
"http://thredds.research-infrastructures.eu/thredds/dodsC/public/netcdf/salinity_annual_1deg_ENVIRONMENT_OCEANS_.nc",
|
||||
"http://thredds.d4science.org/thredds/fileServer/public/netcdf/global-reanalysis-phys-001-004-b-ref-fr-mjm95-icemod_ENVIRONMENT_OCEANS_CLIMATOLOGY_METEOROLOGY_ATMOSPHERE_1366211441189.nc",
|
||||
"http://thredds.research-infrastructures.eu/thredds/dodsC/public/netcdf/CERSAT-GLO-CLIM_WIND_L4-OBS_FULL_TIME_SERIE_CLIMATOLOGY_METEOROLOGY_ATMOSPHERE_1366217956317.nc",
|
||||
"http://thredds.research-infrastructures.eu/thredds/dodsC/public/netcdf/phosphate_seasonal_5deg_ENVIRONMENT_BIOTA_.nc",
|
||||
"http://thredds.research-infrastructures.eu/thredds/dodsC/public/netcdf/global-analysis-bio-001-008-_a_BIOTA_ENVIRONMENT_1366217546908.nc",
|
||||
"http://thredds.research-infrastructures.eu/thredds/dodsC/public/netcdf/dissolved_oxygen_annual_1deg_ENVIRONMENT_BIOTA_.nc",
|
||||
"http://thredds.research-infrastructures.eu/thredds/dodsC/public/netcdf/global-reanalysis-phys-001-004-b-ref-fr-mjm95-gridv_OCEANS_CLIMATOLOGY_METEOROLOGY_ATMOSPHERE_1366211498692.nc",
|
||||
"http://thredds.research-infrastructures.eu/thredds/dodsC/public/netcdf/nitrate_seasonal_5deg_ENVIRONMENT_BIOTA_.nc",
|
||||
"http://thredds.research-infrastructures.eu/thredds/dodsC/public/netcdf/global-analysis-bio-001-008-a_BIOTA_ENVIRONMENT_1366217608283.nc",
|
||||
"http://thredds.research-infrastructures.eu/thredds/fileServer/public/netcdf/cloudmean.asc",
|
||||
"http://geoserver-dev.d4science-ii.research-infrastructures.eu/geoserver/wcs/wcs?service=wcs&version=1.0.0&request=GetCoverage&coverage=aquamaps:WorldClimBio2&CRS=EPSG:4326&bbox=-180,0,180,90&width=1&height=1&format=geotiff&RESPONSE_CRS=EPSG:4326",
|
||||
"http://geoserver2.d4science.research-infrastructures.eu/geoserver"
|
||||
};
|
||||
|
||||
static String[] layernamesTest = {
|
||||
"ph",
|
||||
"calcite",
|
||||
"wind",
|
||||
"t00an1",
|
||||
"s_sd",
|
||||
"iicevelu",
|
||||
"wind_speed",
|
||||
"p_mn",
|
||||
"CHL",
|
||||
"o_mn",
|
||||
"vomecrty",
|
||||
"n_mn",
|
||||
"PHYC",
|
||||
"cloud",
|
||||
"aquamaps:WorldClimBio2",
|
||||
"lxiphiasgladius20130410182141778cest"
|
||||
};
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
AnalysisLogger.setLogger("./cfg/" + AlgorithmConfiguration.defaultLoggerFile);
|
||||
AlgorithmConfiguration config = new AlgorithmConfiguration();
|
||||
|
||||
config.setConfigPath("./cfg/");
|
||||
config.setPersistencePath("./");
|
||||
config.setParam("DatabaseUserName", "gcube");
|
||||
config.setParam("DatabasePassword", "d4science2");
|
||||
config.setParam("DatabaseURL", "jdbc:postgresql://localhost/testdb");
|
||||
config.setParam("DatabaseDriver", "org.postgresql.Driver");
|
||||
config.setGcubeScope("/gcube/devsec/devVRE");
|
||||
|
||||
for (int t=0;t<urlToTest.length;t++){
|
||||
|
||||
String layerURL = urlToTest[t];
|
||||
String layerName = layernamesTest[t];
|
||||
AnalysisLogger.getLogger().debug("Processing Layer: "+layerURL);
|
||||
List<Double> values = null;
|
||||
double step = 2d;
|
||||
List<Tuple<Double>> tuples = VectorOperations.generateCoordinateTripletsInBoundingBox(-180,180,-90,90, 0, step, step);
|
||||
|
||||
if (layerURL.endsWith(".nc")){
|
||||
NetCDF geotiff = new NetCDF(layerURL,layerName);
|
||||
values = geotiff.getFeaturesInTimeInstantAndArea(layerURL,layerName, 0, tuples, -180, 180, -90, 90);
|
||||
}
|
||||
else if (layerURL.endsWith(".asc")){
|
||||
AscDataExplorer asc = new AscDataExplorer(layerURL);
|
||||
values = asc.retrieveDataFromAsc(tuples, 0);
|
||||
}
|
||||
else if (layerURL.endsWith("tif")){
|
||||
GeoTiff geotiff = new GeoTiff(config);
|
||||
values = geotiff.getFeaturesInTimeInstantAndArea(layerURL, layerName, 0, tuples, -180, 180, -90, 90);
|
||||
}
|
||||
else if (layerURL.contains("wcs")){
|
||||
WCS wcs = new WCS(config,layerURL);
|
||||
values=wcs.getFeaturesInTimeInstantAndArea(layerURL,layerName, 0, tuples, -180, 180, -90, 90);
|
||||
}
|
||||
else if (layerURL.contains("geoserver")){
|
||||
WFS wfs = new WFS();
|
||||
values=wfs.getFeaturesInTimeInstantAndArea(layerURL,layerName, 0, tuples, -180, 180, -90, 90);
|
||||
}
|
||||
|
||||
|
||||
double[][] matrix = VectorOperations.vectorToMatix(values, -180,180,-90,90,step, step);
|
||||
|
||||
// System.out.println(MapUtils.globalASCIIMap(values,step,step));
|
||||
System.out.println(MapUtils.globalASCIIMap(matrix));
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
|
@ -16,10 +16,10 @@ public class RegressionXYSlice {
|
|||
AnalysisLogger.setLogger(cfg + AlgorithmConfiguration.defaultLoggerFile);
|
||||
config.setPersistencePath("./");
|
||||
|
||||
sliceWFS();
|
||||
sliceNetCDF();
|
||||
// sliceWFS();
|
||||
// sliceNetCDF();
|
||||
sliceASC();
|
||||
sliceTable();
|
||||
// sliceTable();
|
||||
}
|
||||
|
||||
public static void sliceWFS() throws Exception{
|
||||
|
|
|
@ -52,7 +52,7 @@ public class GdalConverter {
|
|||
return geoTiffFile;
|
||||
}
|
||||
|
||||
public static String convertToASC(String fullPathToFile){
|
||||
public static String convertToASC(String fullPathToFile,int nodata){
|
||||
String gdalConverter = "";
|
||||
|
||||
|
||||
|
@ -69,7 +69,7 @@ public class GdalConverter {
|
|||
|
||||
String ascTiffFile = fullPathToFile.substring(0,pointIndex)+".asc";
|
||||
|
||||
String executionResult = ExecuteGetLine(gdalConverter+" -of AAIGrid "+fullPathToFile+" "+ascTiffFile);
|
||||
String executionResult = ExecuteGetLine(gdalConverter+" -of AAIGrid "+fullPathToFile+" -a_nodata "+nodata+" "+ascTiffFile);
|
||||
if (executionResult.equalsIgnoreCase("error"))
|
||||
return null;
|
||||
else
|
||||
|
|
|
@ -0,0 +1,44 @@
|
|||
package org.gcube.dataanalysis.geo.utils;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
public class MapUtils {
|
||||
|
||||
public static String globalASCIIMap(List<Double> values, double xstep,double ystep){
|
||||
int k = 0;
|
||||
StringBuffer sb = new StringBuffer();
|
||||
for (double i = -90; i < 90; i += ystep) {
|
||||
for (double j = -180; j < 180; j += xstep) {
|
||||
double value = values.get(k);
|
||||
if (Double.isNaN(value) )
|
||||
sb.append(" ");
|
||||
else
|
||||
sb.append("_");
|
||||
k++;
|
||||
}
|
||||
sb.append("\n");
|
||||
}
|
||||
|
||||
return sb.toString();
|
||||
}
|
||||
|
||||
//values map with inverted y axis
|
||||
public static String globalASCIIMap(double[][] values){
|
||||
|
||||
StringBuffer sb = new StringBuffer();
|
||||
|
||||
for (int i = values.length-1; i >0 ; i --) {
|
||||
for (int j = 0; j < values[0].length; j ++) {
|
||||
double value = values[i][j];
|
||||
if (Double.isNaN(value) )
|
||||
sb.append(" ");
|
||||
else
|
||||
sb.append("_");
|
||||
}
|
||||
sb.append("\n");
|
||||
}
|
||||
|
||||
return sb.toString();
|
||||
}
|
||||
|
||||
}
|
|
@ -173,7 +173,7 @@ public class VectorOperations {
|
|||
|
||||
}
|
||||
|
||||
public static List<Tuple<Double>> generateCoordinateTripletsInBoundingBox(double x1, double x2, double y1, double y2, double z, double xResolution, double yResolution) {
|
||||
public static List<Tuple<Double>> generateCoordinateTripletsInBoundingBox_old(double x1, double x2, double y1, double y2, double z, double xResolution, double yResolution) {
|
||||
|
||||
int ysteps = (int) ((y2 - y1) / yResolution);
|
||||
int xsteps = (int) ((x2 - x1) / xResolution);
|
||||
|
@ -194,6 +194,56 @@ public class VectorOperations {
|
|||
return tuples;
|
||||
}
|
||||
|
||||
|
||||
public static List<Tuple<Double>> generateCoordinateTripletsInBoundingBox(double x1, double x2, double y1, double y2, double z, double xResolution, double yResolution) {
|
||||
|
||||
List<Tuple<Double>> tuples = new ArrayList<Tuple<Double>>();
|
||||
for (double y=y1;y<=y2;y+=yResolution){
|
||||
for (double x=x1;x<=x2;x+=xResolution){
|
||||
tuples.add(new Tuple<Double>(x,y,z));
|
||||
}
|
||||
}
|
||||
|
||||
return tuples;
|
||||
}
|
||||
|
||||
|
||||
public static double [][] vectorToMatix(List<Double> values, double x1, double x2, double y1, double y2, double xResolution, double yResolution){
|
||||
|
||||
int ntriplets = values.size();
|
||||
int ysteps = 0;
|
||||
for (double y=y1;y<=y2;y+=yResolution){
|
||||
ysteps++;
|
||||
}
|
||||
int xsteps = 0;
|
||||
for (double x=x1;x<=x2;x+=xResolution){
|
||||
xsteps++;
|
||||
}
|
||||
|
||||
double[][] slice = new double[ysteps][xsteps];
|
||||
int k = 0;
|
||||
int g = 0;
|
||||
|
||||
// cycle on all the triplets to recontruct the matrix
|
||||
for (int t = 0; t < ntriplets; t++) {
|
||||
// take the corresponding (time,value) pair
|
||||
Double value = values.get(t);
|
||||
// if there is value, then set it, otherwise set NaN
|
||||
// the layer is undefined in that point and a value must be generated
|
||||
// assign a value to the matrix
|
||||
slice[k][g] = value;
|
||||
// increase the x step according to the matrix
|
||||
|
||||
if (g==xsteps-1) {
|
||||
g = 0;
|
||||
k++;
|
||||
} else
|
||||
g++;
|
||||
}
|
||||
|
||||
return slice;
|
||||
}
|
||||
|
||||
public void applyNearestNeighbor() {
|
||||
|
||||
/*
|
||||
|
|
Loading…
Reference in New Issue