Gianpaolo Coro 2014-03-04 19:01:27 +00:00
parent 56c76241db
commit 9ccff2229a
19 changed files with 1308 additions and 104 deletions

View File

@ -0,0 +1,241 @@
package org.gcube.dataanalysis.geo.algorithms;
import java.util.ArrayList;
import java.util.List;
import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.configuration.INFRASTRUCTURE;
import org.gcube.dataanalysis.ecoengine.datatypes.ColumnType;
import org.gcube.dataanalysis.ecoengine.datatypes.DatabaseType;
import org.gcube.dataanalysis.ecoengine.datatypes.InputTable;
import org.gcube.dataanalysis.ecoengine.datatypes.OutputTable;
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveTypesList;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.TableTemplates;
import org.gcube.dataanalysis.ecoengine.interfaces.Transducerer;
import org.gcube.dataanalysis.ecoengine.utils.DatabaseFactory;
import org.gcube.dataanalysis.ecoengine.utils.DatabaseUtils;
import org.gcube.dataanalysis.ecoengine.utils.IOHelper;
import org.gcube.dataanalysis.ecoengine.utils.ResourceFactory;
import org.gcube.dataanalysis.ecoengine.utils.Tuple;
import org.gcube.dataanalysis.geo.matrixmodel.XYExtractor;
import org.gcube.dataanalysis.geo.utils.VectorOperations;
import org.hibernate.SessionFactory;
public class OccurrenceEnrichment implements Transducerer {
static String OccurrencesTableNameParameter = "OccurrenceTable";
static String LongitudeColumn= "LongitudeColumn";
static String LatitudeColumn = "LatitudeColumn";
static String ScientificNameColumn = "ScientificNameColumn";
static String OutputTableLabelParameter = "OutputTableName";
static String OutputTableDBNameParameter = "OutputTableDBName";
static String FilterParameter = "OptionalFilter";
static String Resolution = "Resolution";
static String Layers = "Layers";
static String yLL = "BBox_LowerLeftLat";
static String xLL = "BBox_LowerLeftLong";
static String yUR = "BBox_UpperRightLat";
static String xUR = "BBox_UpperRightLong";
AlgorithmConfiguration config;
float status;
private String [] layers;
private String occurrencesTableName;
private String longitudeColumn;
private String latitudeColumn;
private String scientificnameColumn;
private String filter;
private float resolution;
private String outputTableLabel;
private String outputTableDBName;
private double BBxLL;
private double BByLL;
private double BBxUR;
private double BByUR;
@Override
public List<StatisticalType> getInputParameters() {
List<StatisticalType> inputs = new ArrayList<StatisticalType>();
List<TableTemplates> template= new ArrayList<TableTemplates>();
template.add(TableTemplates.OCCURRENCE_SPECIES);
InputTable table = new InputTable(template,OccurrencesTableNameParameter ,"A geospatial table containing at least x,y information","");
inputs.add(table);
ColumnType p1 = new ColumnType(OccurrencesTableNameParameter , LongitudeColumn, "column with longitude values", "decimallongitude", false);
inputs.add(p1);
ColumnType p2 = new ColumnType(OccurrencesTableNameParameter , LatitudeColumn, "column with latitude values", "decimallatitude", false);
inputs.add(p2);
ColumnType p3 = new ColumnType(OccurrencesTableNameParameter , ScientificNameColumn, "column with Scientific Names", "scientificname", false);
inputs.add(p3);
IOHelper.addStringInput(inputs, FilterParameter, "A filter on one of the columns (e.g. basisofrecord='HumanObservation')", " ");
IOHelper.addDoubleInput(inputs, Resolution, "The spatial resolution of the association between observations and environmental features.", "0.5");
IOHelper.addRandomStringInput(inputs, OutputTableDBNameParameter, "The db name of the table to produce", "enrich_");
IOHelper.addStringInput(inputs, OutputTableLabelParameter, "The name of the output table", "enrich_");
PrimitiveTypesList listEnvLayers = new PrimitiveTypesList(String.class.getName(), PrimitiveTypes.STRING, Layers, "The list of environmental layers to use for enriching the points. Each entry is a Layer Title or UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Supports several standards (NETCDF, WFS, WCS ASC, GeoTiff )", false);
inputs.add(listEnvLayers);
IOHelper.addDoubleInput(inputs, xLL, "Lower Left Longitude of the Bounding Box", "-180");
IOHelper.addDoubleInput(inputs, yLL, "Lower Left Latitute of the Bounding Box", "-90");
IOHelper.addDoubleInput(inputs, xUR, "Upper Right Longitude of the Bounding Box", "180");
IOHelper.addDoubleInput(inputs, yUR, "Upper Right Latitute of the Bounding Box", "90");
DatabaseType.addDefaultDBPars(inputs);
return inputs;
}
protected void getParameters() {
layers = IOHelper.getInputParameter(config, Layers).split(AlgorithmConfiguration.getListSeparator());
AnalysisLogger.getLogger().debug("Layers to take " + layers.length);
occurrencesTableName=IOHelper.getInputParameter(config, OccurrencesTableNameParameter);
longitudeColumn=IOHelper.getInputParameter(config, LongitudeColumn);
latitudeColumn=IOHelper.getInputParameter(config, LatitudeColumn);
scientificnameColumn=IOHelper.getInputParameter(config, ScientificNameColumn);
filter=IOHelper.getInputParameter(config, FilterParameter);
if (filter==null)
filter="";
filter=filter.trim();
resolution=IOHelper.getInputParameter(config, Resolution)==null?0.5f:Float.parseFloat(IOHelper.getInputParameter(config, Resolution));
outputTableLabel=IOHelper.getInputParameter(config, OutputTableLabelParameter);
outputTableDBName=IOHelper.getInputParameter(config, OutputTableDBNameParameter);
BBxLL=Double.parseDouble(IOHelper.getInputParameter(config, xLL));
BByLL=Double.parseDouble(IOHelper.getInputParameter(config, yLL));
BBxUR=Double.parseDouble(IOHelper.getInputParameter(config, xUR));
BByUR=Double.parseDouble(IOHelper.getInputParameter(config, yUR));
String scope = config.getGcubeScope();
AnalysisLogger.getLogger().debug("Extraction: Externally set scope " + scope);
if (scope == null) {
scope = ScopeProvider.instance.get();
AnalysisLogger.getLogger().debug("Extraction: Internally set scope " + scope);
config.setGcubeScope(scope);
}
}
@Override
public void init() throws Exception {
AnalysisLogger.getLogger().debug("Occurrence Enrichment Initialization");
}
@Override
public void shutdown() {
AnalysisLogger.getLogger().debug("Occurrence Enrichment Shutdown");
}
@Override
public String getDescription() {
return "An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layers from the e-infrastructure GeoNetwork (through the GeoExplorer application). Produces one table reporting the set of environmental values associated to the occurrence points.";
}
@Override
public void compute() throws Exception {
//TODO: report times
SessionFactory dbconnection=null;
try{
dbconnection=DatabaseUtils.initDBSession(config);
String columns = longitudeColumn+","+latitudeColumn+","+scientificnameColumn;
String columnsToProduce = longitudeColumn+","+latitudeColumn+","+scientificnameColumn;
//take the occurrence points
List<Object> rows = (List<Object>)DatabaseFactory.executeSQLQuery(DatabaseUtils.getDinstictElements(occurrencesTableName, columns, filter),dbconnection);
if (rows==null || rows.size()==0)
throw new Exception("Could not find occurrence data");
int rowsize = rows.size();
List<Tuple<Double>> coordinates4d = new ArrayList<Tuple<Double>>();
List<String[]> enrichment = new ArrayList<String[]>();
int elementstoreport = 4;
int elementsfromoccurrences = 3;
for (Object row:rows){
Object[] elements = (Object[]) row;
double x =elements[0]==null?0:Double.parseDouble(""+elements[0]);
double y =elements[1]==null?0:Double.parseDouble(""+elements[1]);
String species=elements[2]==null?"":""+elements[2];
Tuple<Double> el = new Tuple<Double>(x,y);
coordinates4d.add(el);
String[] singlerow = new String[elementstoreport];
singlerow [0]=""+x;
singlerow[1]=""+y;
singlerow[2]=""+species;
}
//take the layers matrices
int layeridx = 0;
for (String layerID:layers){
//for each layer
XYExtractor extractor = new XYExtractor(config);
extractor.extractXYGrid(layerID, 0, BBxLL, BBxUR, BByLL, BByUR, 0, resolution,resolution);
List<Double> gridValues = extractor.currentTimeValues;
List<Tuple<Double>> grid3d =extractor.currentTuples;
int time = 0;
//take the association
List<Double> enriched = VectorOperations.assignGridValuesToPoints(grid3d, time , gridValues, coordinates4d, resolution);
int k=0;
for (Double value:enriched){
String[] singlerow =enrichment.get(k);
singlerow[layeridx+elementsfromoccurrences] = ""+value;
k++;
}
layeridx++;
}
//write the complete association into the db
DatabaseFactory.executeSQLQuery(DatabaseUtils.dropTableStatement(outputTableDBName),dbconnection);
//TODO: create table
DatabaseUtils.insertChunksIntoTable(outputTableDBName, columnsToProduce, enrichment, 5000, dbconnection);
}catch(Exception e){
if (dbconnection!=null)
dbconnection.close();
}
}
@Override
public StatisticalType getOutput() {
List<TableTemplates> templateHspec = new ArrayList<TableTemplates>();
templateHspec.add(TableTemplates.GENERIC);
OutputTable p = new OutputTable(templateHspec, OutputTableLabelParameter, OutputTableDBNameParameter, "Output table");
return p;
}
@Override
public float getStatus() {
return status;
}
@Override
public INFRASTRUCTURE getInfrastructure() {
return INFRASTRUCTURE.LOCAL;
}
@Override
public void setConfiguration(AlgorithmConfiguration config) {
this.config = config;
}
protected ResourceFactory resourceManager;
public String getResourceLoad() {
if (resourceManager == null)
resourceManager = new ResourceFactory();
return resourceManager.getResourceLoad(1);
}
public String getResources() {
return ResourceFactory.getResources(100f);
}
}

View File

@ -0,0 +1,249 @@
package org.gcube.dataanalysis.geo.algorithms;
import java.awt.Image;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.contentmanagement.graphtools.utils.MathFunctions;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.datatypes.DatabaseType;
import org.gcube.dataanalysis.ecoengine.datatypes.OutputTable;
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.TableTemplates;
import org.gcube.dataanalysis.ecoengine.signals.PeriodicityDetector;
import org.gcube.dataanalysis.ecoengine.signals.SignalConverter;
import org.gcube.dataanalysis.ecoengine.signals.SignalProcessing;
import org.gcube.dataanalysis.ecoengine.utils.IOHelper;
import org.gcube.dataanalysis.ecoengine.utils.Tuple;
import org.gcube.dataanalysis.geo.matrixmodel.RasterTable;
import org.gcube.dataanalysis.geo.matrixmodel.TimeSeriesExtractor;
public class TimeExtraction extends XYExtraction{
public static String x = "X";
public static String y = "Y";
public static String resolution = "Resolution";
public static String samplingFrequency = "SamplingFreq";
public static String minFrequency = "MinFrequency";
public static String maxFrequency = "MaxFrequency";
public static String expectedFrequencyError = "FrequencyError";
public double xValue;
public double yValue;
public double resolutionValue;
public int samplingFrequencyValue;
public double minFrequencyValue;
public double maxFrequencyValue;
public double expectedFrequencyErrorValue;
public PeriodicityDetector pd;
public double signal[];
public double timeline[];
@Override
public String getDescription() {
return "An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram.";
}
@Override
public List<StatisticalType> getInputParameters() {
List<StatisticalType> previnputs = super.getInputParameters();
inputs = new ArrayList<StatisticalType>();
inputs.add(previnputs.get(0));
inputs.add(previnputs.get(5));
inputs.add(previnputs.get(6));
IOHelper.addDoubleInput(inputs, x, "X coordinate", "0");
IOHelper.addDoubleInput(inputs, y, "Y coordinate", "0");
inputs.add(previnputs.get(7));
IOHelper.addDoubleInput(inputs, resolution, "Extraction point resolution", "0.5");
IOHelper.addIntegerInput(inputs, samplingFrequency, "Sampling frequency in Hz. Leave it to -1 if unknown or under 1", "-1");
IOHelper.addDoubleInput(inputs, minFrequency, "Minimum expected frequency in Hz. Can be decimal.", "-1");
IOHelper.addDoubleInput(inputs, maxFrequency, "Maximum expected frequency in Hz. Can be decimal.", "-1");
IOHelper.addDoubleInput(inputs, expectedFrequencyError, "Expected precision on periodicity detection in Hz. Can be decimal and depends on the signal length.", "0.1");
DatabaseType.addDefaultDBPars(inputs);
return inputs;
}
protected void getParameters() {
layerNameValue = IOHelper.getInputParameter(config, layerName);
AnalysisLogger.getLogger().debug("Extraction: Layer " + layerNameValue);
zValue = Double.parseDouble(IOHelper.getInputParameter(config, z));
xValue = Double.parseDouble(IOHelper.getInputParameter(config, x));
yValue = Double.parseDouble(IOHelper.getInputParameter(config, y));
resolutionValue=Double.parseDouble(IOHelper.getInputParameter(config, resolution));
samplingFrequencyValue=Integer.parseInt(IOHelper.getInputParameter(config, samplingFrequency));
minFrequencyValue=Double.parseDouble(IOHelper.getInputParameter(config, minFrequency));
maxFrequencyValue=Double.parseDouble(IOHelper.getInputParameter(config, maxFrequency));
expectedFrequencyErrorValue=Double.parseDouble(IOHelper.getInputParameter(config, expectedFrequencyError));
AnalysisLogger.getLogger().debug("Extraction: Z " + zValue);
AnalysisLogger.getLogger().debug("Extraction: X " + xValue);
AnalysisLogger.getLogger().debug("Extraction: Y " + yValue);
AnalysisLogger.getLogger().debug("Extraction: Res " + resolutionValue);
AnalysisLogger.getLogger().debug("Extraction: SamplingF " + samplingFrequency);
AnalysisLogger.getLogger().debug("Extraction: minF " + minFrequencyValue);
AnalysisLogger.getLogger().debug("Extraction: maxF " + maxFrequencyValue);
AnalysisLogger.getLogger().debug("Extraction: expectedError " + expectedFrequencyErrorValue);
tableNameValue = IOHelper.getInputParameter(config, tableName);
tableLabelValue = IOHelper.getInputParameter(config, tableLabel);
AnalysisLogger.getLogger().debug("Extraction: tableName " + tableNameValue);
AnalysisLogger.getLogger().debug("Extraction: tableLabel " + tableLabelValue);
String scope = config.getGcubeScope();
AnalysisLogger.getLogger().debug("Extraction: Externally set scope " + scope);
if (scope == null) {
scope = ScopeProvider.instance.get();
config.setGcubeScope(scope);
}
}
Image signalimage;
Image spectrogramImage;
@Override
public void compute() throws Exception {
try {
status = 10;
AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
getParameters();
AnalysisLogger.getLogger().debug("Extracting Time Series from layer");
TimeSeriesExtractor intersector = new TimeSeriesExtractor(config);
long t0 = System.currentTimeMillis();
signal = intersector.extractT(layerNameValue, xValue,yValue, zValue, resolutionValue);
AnalysisLogger.getLogger().debug("ELAPSED TIME: "+(System.currentTimeMillis()-t0));
AnalysisLogger.getLogger().debug("Signal: "+signal.length);
status = 30;
if (signal.length>1){
AnalysisLogger.getLogger().debug("Detecting Periodicity..");
this.pd = new PeriodicityDetector();
double F = -1;
if (samplingFrequencyValue>0&&minFrequencyValue>0&&maxFrequencyValue>0&&expectedFrequencyErrorValue>0)
F = pd.detectFrequency(signal,samplingFrequencyValue,(float)minFrequencyValue,(float)maxFrequencyValue,(float)expectedFrequencyErrorValue,false);
else
F = pd.detectFrequency(signal,false);
AnalysisLogger.getLogger().debug("Detected Frequency:"+F+" indecision ["+pd.lowermeanF+" , "+pd.uppermeanF+"]");
AnalysisLogger.getLogger().debug("Detected Period:"+pd.meanPeriod+" indecision ["+pd.lowermeanPeriod+" , "+pd.uppermeanPeriod+"]");
AnalysisLogger.getLogger().debug("Detected Periodicity Strength:"+pd.periodicityStrength);
AnalysisLogger.getLogger().debug("Extractor: MatrixExtractor initialized");
String uom = "samples";
if (samplingFrequencyValue>0)
uom = "s";
String frequom = "1/samples";
if (samplingFrequencyValue>0)
frequom = "Hz";
if (pd.periodicityStrength>0){
outputParameters.put("Detected Periodicity", ""+MathFunctions.roundDecimal(pd.meanPeriod,2)+" ("+uom+") "+" indecision ["+MathFunctions.roundDecimal(pd.lowermeanPeriod,2)+" , "+MathFunctions.roundDecimal(pd.uppermeanPeriod,2)+"]");
outputParameters.put("Periodicity Strength", ""+MathFunctions.roundDecimal(pd.periodicityStrength,2)+" ("+pd.getPeriodicityStregthInterpretation()+")");
outputParameters.put("Detected Frequency",""+MathFunctions.roundDecimal(F,2)+" ("+frequom+") "+" indecision ["+MathFunctions.roundDecimal(pd.lowermeanF,2)+" , "+MathFunctions.roundDecimal(pd.uppermeanF,2)+"]");
}
else
{
outputParameters.put("Detected Periodicity", "No Periodicities");
outputParameters.put("Periodicity Strength", "-"+MathFunctions.roundDecimal(pd.periodicityStrength,2)+" ("+pd.getPeriodicityStregthInterpretation()+")");
outputParameters.put("Detected Frequency","-");
}
outputParameters.put("Maximum Frequency in the Spectrogram", ""+MathFunctions.roundDecimal(pd.maxFrequency,2) +" ("+frequom+") ");
outputParameters.put("Minimum Frequency in the Spectrogram", ""+MathFunctions.roundDecimal(pd.minFrequency,2) + " ("+frequom+") ");
}
else
AnalysisLogger.getLogger().debug("Extractor: Signal is only one point!");
status = 70;
AnalysisLogger.getLogger().debug("Extractor: Matrix Extracted");
AnalysisLogger.getLogger().debug("Extractor: ****Rasterizing grid into table****");
double matrix[][] = new double[1][];
matrix[0] = signal;
RasterTable raster = new RasterTable(xValue, xValue, yValue, yValue, zValue, resolutionValue, resolutionValue, matrix, config);
int signalRate = 1;
if (samplingFrequencyValue>0)
signalRate=samplingFrequencyValue;
timeline = SignalConverter.signalTimeLine(signal.length, signalRate);
List<Tuple<Double>> coordinates=new ArrayList<Tuple<Double>>();
for (int i=0;i<timeline.length;i++)
coordinates.add(new Tuple<Double>(xValue,yValue,zValue,timeline[i]));
raster.setTablename(tableNameValue);
raster.setCoordinates(coordinates);
raster.deleteTable();
raster.dumpGeoTable();
signalimage = SignalProcessing.renderSignalWithGenericTime(signal, timeline, "Signal");
// spectrogramImage = SignalProcessing.renderSignalSpectrogram(signal, timeline, pd.currentSamplingRate, pd.currentWindowAnalysisSamples, pd.currentWindowShiftSamples);
spectrogramImage = SignalProcessing.renderSignalSpectrogram2(pd.currentspectrum);
AnalysisLogger.getLogger().debug("Extractor: Map was dumped in table: " + tableNameValue);
status = 80;
AnalysisLogger.getLogger().debug("Extractor: Elapsed: Whole operation completed in " + ((double) (System.currentTimeMillis() - t0) / 1000d) + "s");
} catch (Exception e) {
e.printStackTrace();
AnalysisLogger.getLogger().debug("Extractor: ERROR!: " + e.getLocalizedMessage());
throw e;
} finally {
status = 100;
}
}
@Override
public StatisticalType getOutput() {
LinkedHashMap<String, StatisticalType> map = new LinkedHashMap<String, StatisticalType>();
List<TableTemplates> templateHspec = new ArrayList<TableTemplates>();
templateHspec.add(TableTemplates.GENERIC);
OutputTable p = new OutputTable(templateHspec, tableLabelValue, tableNameValue, "Output table");
map.put("OutputTable", p);
if (pd!=null && signal!=null && signal.length>0){
HashMap<String, Image> producedImages = new HashMap<String, Image>();
producedImages.put("Time Series Visualization", signalimage);
producedImages.put("Spectrogram", spectrogramImage);
/*
try {
ImageIO.write(ImageTools.toBufferedImage(signalimage), "png", new File("signal.png"));
ImageIO.write(ImageTools.toBufferedImage(spectrogramImage), "png", new File("spectrogram.png"));
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
*/
PrimitiveType images = new PrimitiveType("Images", producedImages, PrimitiveTypes.IMAGES, "Signal Processing", "Visualization of the signal and spectrogram");
for (String key:outputParameters.keySet()){
String value = outputParameters.get(key);
PrimitiveType val = new PrimitiveType(String.class.getName(), "" + value, PrimitiveTypes.STRING, key, key);
map.put(key, val);
}
map.put("Images", images);
}
// generate a primitive type for the collection
PrimitiveType outputm = new PrimitiveType(HashMap.class.getName(), map, PrimitiveTypes.MAP, "ResultsMap", "Results Map");
return outputm;
}
}

View File

@ -0,0 +1,56 @@
package org.gcube.dataanalysis.geo.algorithms;
import java.util.ArrayList;
import java.util.List;
import org.gcube.dataanalysis.ecoengine.datatypes.ColumnType;
import org.gcube.dataanalysis.ecoengine.datatypes.DatabaseType;
import org.gcube.dataanalysis.ecoengine.datatypes.InputTable;
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.TableTemplates;
import org.gcube.dataanalysis.ecoengine.utils.IOHelper;
import org.gcube.dataanalysis.geo.connectors.table.TableMatrixRepresentation;
public class TimeExtractionTable extends TimeExtraction{
@Override
public String getDescription() {
return "An algorithm to extract a time series of values associated to a table containing geospatial information. " +
"The algorithm analyses the time series and automatically searches for hidden periodicities. " +
"It produces one chart of the time series, one table containing the time series values and possibly the spectrogram.";
}
@Override
public List<StatisticalType> getInputParameters() {
List<StatisticalType> inputs = new ArrayList<StatisticalType>();
List<TableTemplates> template= new ArrayList<TableTemplates>();
template.add(TableTemplates.GENERIC);
InputTable table = new InputTable(template,TableMatrixRepresentation.tableNameParameter,"A geospatial table containing at least x,y information","");
inputs.add(table);
ColumnType columnx = new ColumnType(TableMatrixRepresentation.tableNameParameter, TableMatrixRepresentation.xDimensionColumnParameter, "The column containing x (longitude) information", "x", false);
inputs.add(columnx);
ColumnType columny = new ColumnType(TableMatrixRepresentation.tableNameParameter, TableMatrixRepresentation.yDimensionColumnParameter, "The column containing y (latitude) information", "y", false);
inputs.add(columny);
ColumnType columnt = new ColumnType(TableMatrixRepresentation.tableNameParameter, TableMatrixRepresentation.timeDimensionColumnParameter, "The column containing y (latitude) information", "y", false);
inputs.add(columnt);
ColumnType columnvalue = new ColumnType(TableMatrixRepresentation.tableNameParameter, TableMatrixRepresentation.valueDimensionColumnParameter, "A column containing real valued features", "value", false);
inputs.add(columnvalue);
inputs.add(new PrimitiveType(String.class.getName(), null, PrimitiveTypes.STRING, TableMatrixRepresentation.filterParameter, "A filter on one of the columns (e.g. speed=2)", " "));
IOHelper.addStringInput(inputs, TableMatrixRepresentation.zDimensionColumnParameter, "The column containing z (altitude or depth) information (optional)", "z");
List<StatisticalType> previnputs = super.getInputParameters();
previnputs.remove(0);
inputs.addAll(previnputs);
DatabaseType.addDefaultDBPars(inputs);
return inputs;
}
}

View File

@ -18,7 +18,7 @@ import org.gcube.dataanalysis.ecoengine.utils.ResourceFactory;
import org.gcube.dataanalysis.geo.matrixmodel.RasterTable;
import org.gcube.dataanalysis.geo.matrixmodel.XYExtractor;
public class XYExtraction implements Transducerer{
public class XYExtraction implements Transducerer {
static String layerName = "Layer";
static String t = "TimeIndex";
@ -33,9 +33,9 @@ public class XYExtraction implements Transducerer{
static String tableLabel = "OutputTableLabel";
AlgorithmConfiguration config;
float status;
public String layerNameValue;
public int time;
public double zValue;
@ -53,27 +53,27 @@ public class XYExtraction implements Transducerer{
@Override
public void init() throws Exception {
AnalysisLogger.getLogger().debug("XY Extraction Initialization");
AnalysisLogger.getLogger().debug("Extraction Initialization");
}
@Override
public String getDescription() {
return "An algorithm to extract associated to an environmental feature repository (e.g. NETCDF, ASC files, Tables etc. ). A grid of points is specified by the user and values are associated to the points from the environmental repository. " + "It accepts as two geospatial repositoried (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) " + "and the specification about time and space. Produces a table containing the values associated to the selected bounding box.";
return "An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. " + "It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) " + "and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box.";
}
@Override
public List<StatisticalType> getInputParameters() {
IOHelper.addStringInput(inputs, layerName, "Layer Title or UUID: The title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer", "");
IOHelper.addDoubleInput(inputs, yLL, "Lower Left Latitute of the Bounding Box", "-90");
IOHelper.addDoubleInput(inputs, xLL, "Lower Left Longitude of the Bounding Box", "-180");
IOHelper.addDoubleInput(inputs, yUR, "Upper Right Latitute of the Bounding Box", "90");
IOHelper.addDoubleInput(inputs, xUR, "Upper Right Longitude of the Bounding Box", "180");
IOHelper.addRandomStringInput(inputs, tableName, "the db name of the table to produce", "extr_");
IOHelper.addStringInput(inputs, layerName, "Layer Title or UUID: The title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Supports several standards (NETCDF, WFS, WCS ASC, GeoTiff )", "");
IOHelper.addDoubleInput(inputs, yLL, "Lower Left Latitute of the Bounding Box", "-60");
IOHelper.addDoubleInput(inputs, xLL, "Lower Left Longitude of the Bounding Box", "-50");
IOHelper.addDoubleInput(inputs, yUR, "Upper Right Latitute of the Bounding Box", "60");
IOHelper.addDoubleInput(inputs, xUR, "Upper Right Longitude of the Bounding Box", "50");
IOHelper.addRandomStringInput(inputs, tableName, "The db name of the table to produce", "extr_");
IOHelper.addStringInput(inputs, tableLabel, "The name of the table to produce", "extr_");
IOHelper.addDoubleInput(inputs, z, "value of Z. Default is 0, that means processing will be at surface level", "0");
IOHelper.addDoubleInput(inputs, z, "Value of Z. Default is 0, that means processing will be at surface level", "0");
IOHelper.addIntegerInput(inputs, t, "Time Index. The default is the first", "0");
IOHelper.addDoubleInput(inputs, xRes, "Projection resolution on the X axis", "0.5");
@ -102,31 +102,32 @@ public class XYExtraction implements Transducerer{
}
AnalysisLogger.getLogger().debug("Extraction: Z " + zValue);
BByLL = Double.parseDouble(IOHelper.getInputParameter(config, yLL));
BBxLL = Double.parseDouble(IOHelper.getInputParameter(config, xLL));
BByUR = Double.parseDouble(IOHelper.getInputParameter(config, yUR));
BBxUR = Double.parseDouble(IOHelper.getInputParameter(config, xUR));
AnalysisLogger.getLogger().debug("Extraction: yLL " + BByLL);
AnalysisLogger.getLogger().debug("Extraction: xLL " + BBxLL);
AnalysisLogger.getLogger().debug("Extraction: yUR " + BByUR);
AnalysisLogger.getLogger().debug("Extraction: xUR " + BBxUR);
yResValue = Double.parseDouble(IOHelper.getInputParameter(config, yRes));
AnalysisLogger.getLogger().debug("Extraction: yRes " + yResValue);
xResValue = Double.parseDouble(IOHelper.getInputParameter(config, xRes));
AnalysisLogger.getLogger().debug("Extraction: xRes " + xResValue);
tableNameValue = IOHelper.getInputParameter(config, tableName);
tableLabelValue = IOHelper.getInputParameter(config, tableLabel);
AnalysisLogger.getLogger().debug("Extraction: tableName " + tableNameValue);
AnalysisLogger.getLogger().debug("Extraction: tableLabel " + tableLabelValue);
String scope = config.getGcubeScope();
AnalysisLogger.getLogger().debug("Extraction: Externally set scope " + scope);
if (scope == null) {
scope = ScopeProvider.instance.get();
AnalysisLogger.getLogger().debug("Extraction: Internally set scope " + scope);
config.setGcubeScope(scope);
}
@ -134,30 +135,31 @@ public class XYExtraction implements Transducerer{
@Override
public void compute() throws Exception {
try{
status = 0;
AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
getParameters();
AnalysisLogger.getLogger().debug("Extractor: MatrixExtractor initialized");
long t0 = System.currentTimeMillis();
XYExtractor extractor = new XYExtractor(config);
double[][] matrix = extractor.extractXYGrid(layerNameValue,time,BBxLL,BBxUR,BByLL,BByUR,zValue,xResValue,yResValue);
System.out.println("ELAPSED TIME: " + (System.currentTimeMillis() - t0));
AnalysisLogger.getLogger().debug("Extractor: Matrix Extracted");
AnalysisLogger.getLogger().debug("Extractor: ****Rasterizing grid into table****");
status = 30;
RasterTable raster = new RasterTable(BBxLL, BBxUR, BByLL, BByUR, zValue, xResValue, yResValue, matrix, config);
raster.setTablename(tableNameValue);
raster.deleteTable();
raster.dumpGeoTable();
AnalysisLogger.getLogger().debug("Extractor: Map was dumped in table: " + tableNameValue);
status = 80;
AnalysisLogger.getLogger().debug("Extractor: Elapsed: Whole operation completed in " + ((double) (System.currentTimeMillis() - t0) / 1000d) + "s");
try {
status = 10;
AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
getParameters();
AnalysisLogger.getLogger().debug("Extractor: MatrixExtractor initialized");
long t0 = System.currentTimeMillis();
XYExtractor extractor = new XYExtractor(config);
double[][] matrix = extractor.extractXYGrid(layerNameValue, time, BBxLL, BBxUR, BByLL, BByUR, zValue, xResValue, yResValue);
System.out.println("ELAPSED TIME: " + (System.currentTimeMillis() - t0));
AnalysisLogger.getLogger().debug("Extractor: Matrix Extracted");
AnalysisLogger.getLogger().debug("Extractor: ****Rasterizing grid into table****");
status = 30;
RasterTable raster = new RasterTable(BBxLL, BBxUR, BByLL, BByUR, zValue, xResValue, yResValue, matrix, config);
raster.setTablename(tableNameValue);
raster.deleteTable();
raster.dumpGeoTable();
AnalysisLogger.getLogger().debug("Extractor: Map was dumped in table: " + tableNameValue);
status = 80;
AnalysisLogger.getLogger().debug("Extractor: Elapsed: Whole operation completed in " + ((double) (System.currentTimeMillis() - t0) / 1000d) + "s");
} catch (Exception e) {
e.printStackTrace();
AnalysisLogger.getLogger().debug("Extractor: ERROR!: " + e.getLocalizedMessage());
throw e;
} finally {
status = 100;
}
@ -174,7 +176,7 @@ public class XYExtraction implements Transducerer{
public StatisticalType getOutput() {
List<TableTemplates> templateHspec = new ArrayList<TableTemplates>();
templateHspec.add(TableTemplates.GENERIC);
OutputTable p = new OutputTable(templateHspec,tableLabelValue,tableNameValue,"Output table");
OutputTable p = new OutputTable(templateHspec, tableLabelValue, tableNameValue, "Output table");
return p;
}
@ -190,10 +192,11 @@ public class XYExtraction implements Transducerer{
@Override
public void setConfiguration(AlgorithmConfiguration config) {
this.config=config;
this.config = config;
}
protected ResourceFactory resourceManager;
public String getResourceLoad() {
if (resourceManager == null)
resourceManager = new ResourceFactory();

View File

@ -4,39 +4,54 @@ import java.util.ArrayList;
import java.util.List;
import org.gcube.dataanalysis.ecoengine.datatypes.ColumnType;
import org.gcube.dataanalysis.ecoengine.datatypes.DatabaseType;
import org.gcube.dataanalysis.ecoengine.datatypes.InputTable;
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.TableTemplates;
import org.gcube.dataanalysis.ecoengine.utils.IOHelper;
import org.gcube.dataanalysis.geo.connectors.table.TableMatrixRepresentation;
public class XYExtractionTable extends XYExtraction{
@Override
public String getDescription() {
return "An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. " +
"It accepts as one geospatial table " + "and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box.";
}
@Override
public List<StatisticalType> getInputParameters() {
List<StatisticalType> inputs = new ArrayList<StatisticalType>();
List<TableTemplates> template= new ArrayList<TableTemplates>();
template.add(TableTemplates.GENERIC);
InputTable table = new InputTable(template,TableMatrixRepresentation.tableNameParameter,"A geospatial table containing at least x,y information","");
ColumnType columnx = new ColumnType(TableMatrixRepresentation.tableNameParameter, TableMatrixRepresentation.xDimensionColumnParameter, "The column containing x (longitude) information", "x", false);
ColumnType columny = new ColumnType(TableMatrixRepresentation.tableNameParameter, TableMatrixRepresentation.yDimensionColumnParameter, "The column containing y (latitude) information", "y", false);
ColumnType columnz = new ColumnType(TableMatrixRepresentation.tableNameParameter, TableMatrixRepresentation.zDimensionColumnParameter, "The column containing z (altitude or depth) information", "z", true);
ColumnType columnt = new ColumnType(TableMatrixRepresentation.tableNameParameter, TableMatrixRepresentation.timeDimensionColumnParameter, "The column containing time", "datetime", true);
ColumnType columnvalue = new ColumnType(TableMatrixRepresentation.tableNameParameter, TableMatrixRepresentation.valueDimensionColumnParameter, "The column containing values", "value", false);
inputs.add(table);
ColumnType columnx = new ColumnType(TableMatrixRepresentation.tableNameParameter, TableMatrixRepresentation.xDimensionColumnParameter, "The column containing x (longitude) information", "x", false);
inputs.add(columnx);
ColumnType columny = new ColumnType(TableMatrixRepresentation.tableNameParameter, TableMatrixRepresentation.yDimensionColumnParameter, "The column containing y (latitude) information", "y", false);
inputs.add(columny);
inputs.add(columnz);
inputs.add(columnt);
ColumnType columnvalue = new ColumnType(TableMatrixRepresentation.tableNameParameter, TableMatrixRepresentation.valueDimensionColumnParameter, "A column containing real valued features", "value", false);
inputs.add(columnvalue);
IOHelper.addStringInput(inputs, TableMatrixRepresentation.filterParameter, "A filter on one of the columns (e.g. speed<2)","");
inputs.add(new PrimitiveType(String.class.getName(), null, PrimitiveTypes.STRING, TableMatrixRepresentation.filterParameter, "A filter on one of the columns (e.g. speed=2)", " "));
IOHelper.addStringInput(inputs, TableMatrixRepresentation.zDimensionColumnParameter, "The column containing z (altitude or depth) information (optional)", "z");
IOHelper.addStringInput(inputs, TableMatrixRepresentation.timeDimensionColumnParameter, "The column containing time (otional)", "datetime");
List<StatisticalType> previnputs = super.getInputParameters();
previnputs.remove(0);
inputs.add(previnputs.get(1));
inputs.add(previnputs.get(2));
inputs.add(previnputs.get(3));
inputs.add(previnputs.get(4));
inputs.add(previnputs.get(5));
inputs.add(previnputs.get(6));
inputs.add(previnputs.get(9));
inputs.add(previnputs.get(10));
inputs.addAll(previnputs);
DatabaseType.addDefaultDBPars(inputs);
return inputs;
}

View File

@ -0,0 +1,183 @@
package org.gcube.dataanalysis.geo.algorithms;
import java.awt.Image;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.datatypes.DatabaseType;
import org.gcube.dataanalysis.ecoengine.datatypes.OutputTable;
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.TableTemplates;
import org.gcube.dataanalysis.ecoengine.signals.SignalProcessing;
import org.gcube.dataanalysis.ecoengine.utils.IOHelper;
import org.gcube.dataanalysis.ecoengine.utils.Tuple;
import org.gcube.dataanalysis.geo.matrixmodel.RasterTable;
import org.gcube.dataanalysis.geo.matrixmodel.ZExtractor;
public class ZExtraction extends XYExtraction{
@Override
public String getDescription() {
return "An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). " +
"The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. " +
"It produces one chart of the Z values and one table containing the values.";
}
public static String x = "X";
public static String y = "Y";
public static String resolution = "Resolution";
public double xValue;
public double yValue;
public double resolutionValue;
public double signal[];
@Override
public List<StatisticalType> getInputParameters() {
List<StatisticalType> previnputs = super.getInputParameters();
inputs = new ArrayList<StatisticalType>();
//layername
inputs.add(previnputs.get(0));
inputs.add(previnputs.get(5));
inputs.add(previnputs.get(6));
IOHelper.addDoubleInput(inputs, x, "X coordinate", "0");
IOHelper.addDoubleInput(inputs, y, "Y coordinate", "0");
inputs.add(previnputs.get(8));
IOHelper.addDoubleInput(inputs, resolution, "Step for Z values", "100");
DatabaseType.addDefaultDBPars(inputs);
return inputs;
}
protected void getParameters() {
layerNameValue = IOHelper.getInputParameter(config, layerName);
AnalysisLogger.getLogger().debug("Extraction: Layer " + layerNameValue);
time = Integer.parseInt(IOHelper.getInputParameter(config, t));
xValue = Double.parseDouble(IOHelper.getInputParameter(config, x));
yValue = Double.parseDouble(IOHelper.getInputParameter(config, y));
resolutionValue=Double.parseDouble(IOHelper.getInputParameter(config, resolution));
AnalysisLogger.getLogger().debug("Extraction: T " + time);
AnalysisLogger.getLogger().debug("Extraction: X " + xValue);
AnalysisLogger.getLogger().debug("Extraction: Y " + yValue);
AnalysisLogger.getLogger().debug("Extraction: Res " + resolutionValue);
tableNameValue = IOHelper.getInputParameter(config, tableName);
tableLabelValue = IOHelper.getInputParameter(config, tableLabel);
AnalysisLogger.getLogger().debug("Extraction: tableName " + tableNameValue);
AnalysisLogger.getLogger().debug("Extraction: tableLabel " + tableLabelValue);
String scope = config.getGcubeScope();
AnalysisLogger.getLogger().debug("Extraction: Externally set scope " + scope);
if (scope == null) {
scope = ScopeProvider.instance.get();
config.setGcubeScope(scope);
}
}
Image signalimage;
Image spectrogramImage;
@Override
public void compute() throws Exception {
try {
status = 30;
AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
getParameters();
AnalysisLogger.getLogger().debug("Extracting Time Series from layer");
ZExtractor extractor = new ZExtractor(config);
long t0 = System.currentTimeMillis();
signal = extractor.extractZ(layerNameValue, xValue,yValue, time, resolutionValue);
AnalysisLogger.getLogger().debug("ELAPSED TIME: "+(System.currentTimeMillis()-t0));
AnalysisLogger.getLogger().debug("Signal: "+signal.length);
status = 30;
if (signal.length==1)
AnalysisLogger.getLogger().debug("Extractor: Signal is only one point!");
status = 70;
AnalysisLogger.getLogger().debug("Extractor: Matrix Extracted");
AnalysisLogger.getLogger().debug("Extractor: ****Rasterizing grid into table****");
double matrix[][] = new double[1][];
matrix[0] = signal;
RasterTable raster = new RasterTable(xValue, xValue, yValue, yValue, zValue, resolutionValue, resolutionValue, matrix, config);
int signalRate = 1;
double zline[] = new double[signal.length];
int j=0;
for (double z=extractor.zmin;z<=extractor.zmax;z=z+resolutionValue){
zline[j]=z;
j++;
}
List<Tuple<Double>> coordinates=new ArrayList<Tuple<Double>>();
for (int i=0;i<zline.length;i++)
coordinates.add(new Tuple<Double>(xValue,yValue,zline[i],(double)time));
raster.setTablename(tableNameValue);
raster.setCoordinates(coordinates);
raster.deleteTable();
raster.dumpGeoTable();
signalimage = SignalProcessing.renderSignalWithGenericTime(signal, zline, "Z");
AnalysisLogger.getLogger().debug("Extractor: Map was dumped in table: " + tableNameValue);
status = 80;
AnalysisLogger.getLogger().debug("Extractor: Elapsed: Whole operation completed in " + ((double) (System.currentTimeMillis() - t0) / 1000d) + "s");
} catch (Exception e) {
e.printStackTrace();
AnalysisLogger.getLogger().debug("Extractor: ERROR!: " + e.getLocalizedMessage());
throw e;
} finally {
status = 100;
}
}
@Override
public StatisticalType getOutput() {
LinkedHashMap<String, StatisticalType> map = new LinkedHashMap<String, StatisticalType>();
List<TableTemplates> templateHspec = new ArrayList<TableTemplates>();
templateHspec.add(TableTemplates.GENERIC);
OutputTable p = new OutputTable(templateHspec, tableLabelValue, tableNameValue, "Output table");
map.put("OutputTable", p);
if (signalimage!=null){
HashMap<String, Image> producedImages = new HashMap<String, Image>();
producedImages.put("Z Modulations Visualization", signalimage);
PrimitiveType images = new PrimitiveType("Images", producedImages, PrimitiveTypes.IMAGES, "Modulations of Z", "The modulations of Z");
map.put("Images", images);
}
// generate a primitive type for the collection
PrimitiveType outputm = new PrimitiveType(HashMap.class.getName(), map, PrimitiveTypes.MAP, "ResultsMap", "Results Map");
return outputm;
}
}

View File

@ -0,0 +1,56 @@
package org.gcube.dataanalysis.geo.algorithms;
import java.util.ArrayList;
import java.util.List;
import org.gcube.dataanalysis.ecoengine.datatypes.ColumnType;
import org.gcube.dataanalysis.ecoengine.datatypes.DatabaseType;
import org.gcube.dataanalysis.ecoengine.datatypes.InputTable;
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.TableTemplates;
import org.gcube.dataanalysis.ecoengine.utils.IOHelper;
import org.gcube.dataanalysis.geo.connectors.table.TableMatrixRepresentation;
public class ZExtractionTable extends ZExtraction{
@Override
public String getDescription() {
return "An algorithm to extract a time series of values associated to a table containing geospatial information. " +
"The algorithm analyses the time series and automatically searches for hidden periodicities. " +
"It produces one chart of the time series, one table containing the time series values and possibly the spectrogram.";
}
@Override
public List<StatisticalType> getInputParameters() {
List<StatisticalType> inputs = new ArrayList<StatisticalType>();
List<TableTemplates> template= new ArrayList<TableTemplates>();
template.add(TableTemplates.GENERIC);
InputTable table = new InputTable(template,TableMatrixRepresentation.tableNameParameter,"A geospatial table containing at least x,y information","");
inputs.add(table);
ColumnType columnx = new ColumnType(TableMatrixRepresentation.tableNameParameter, TableMatrixRepresentation.xDimensionColumnParameter, "The column containing x (longitude) information", "x", false);
inputs.add(columnx);
ColumnType columny = new ColumnType(TableMatrixRepresentation.tableNameParameter, TableMatrixRepresentation.yDimensionColumnParameter, "The column containing y (latitude) information", "y", false);
inputs.add(columny);
ColumnType columnt = new ColumnType(TableMatrixRepresentation.tableNameParameter, TableMatrixRepresentation.zDimensionColumnParameter, "The column containing z information", "datetime", false);
inputs.add(columnt);
ColumnType columnvalue = new ColumnType(TableMatrixRepresentation.tableNameParameter, TableMatrixRepresentation.valueDimensionColumnParameter, "A column containing real valued features", "value", false);
inputs.add(columnvalue);
inputs.add(new PrimitiveType(String.class.getName(), null, PrimitiveTypes.STRING, TableMatrixRepresentation.filterParameter, "A filter on one of the columns (e.g. speed=2)", " "));
IOHelper.addStringInput(inputs, TableMatrixRepresentation.timeDimensionColumnParameter, "The column containing time information (optional).", "time");
List<StatisticalType> previnputs = super.getInputParameters();
previnputs.remove(0);
inputs.addAll(previnputs);
DatabaseType.addDefaultDBPars(inputs);
return inputs;
}
}

View File

@ -13,11 +13,13 @@ public class Table implements GISDataConnector {
AlgorithmConfiguration config;
TableMatrixRepresentation tmr;
public Table(AlgorithmConfiguration config) throws Exception {
double defaultresolution;
public Table(AlgorithmConfiguration config,double resolution) throws Exception {
this.config = config;
tmr = new TableMatrixRepresentation();
tmr.build5DTuples(config, true);
this.defaultresolution=resolution;
}
@Override
@ -28,7 +30,7 @@ public class Table implements GISDataConnector {
List<Tuple<Double>> tuples = tmr.currentcoordinates5d.get((double) time);
// AnalysisLogger.getLogger().debug("TUPLES "+tuples);
double resolution = 0;
double resolution = defaultresolution;
if (coordinates3d.size() > 1)
resolution = Math.abs(coordinates3d.get(0).getElements().get(0) - coordinates3d.get(1).getElements().get(0));

View File

@ -45,23 +45,23 @@ public class TableMatrixRepresentation {
String dbtuple = "";
if (xField != null)
if (xField != null && xField.trim().length()>0)
dbtuple += xField + ",";
else
dbtuple += "0 as x,";
if (yField != null)
if (yField != null && yField.trim().length()>0)
dbtuple += yField + ",";
else
dbtuple += "0 as y,";
if (zField != null)
if (zField != null && zField.trim().length()>0)
dbtuple += zField + ",";
else
dbtuple += "0 as z,";
if (tField != null)
if (tField != null && tField.trim().length()>0)
dbtuple += tField + " as time,";
else
dbtuple += "0 as time,";
if (valueField != null)
if (valueField != null && valueField.trim().length()>0)
dbtuple += valueField;
else
dbtuple += "0 as v";

View File

@ -9,6 +9,7 @@ import org.gcube.dataanalysis.ecoengine.utils.Tuple;
import org.gcube.dataanalysis.geo.connectors.asc.ASC;
import org.gcube.dataanalysis.geo.connectors.geotiff.GeoTiff;
import org.gcube.dataanalysis.geo.connectors.netcdf.NetCDF;
import org.gcube.dataanalysis.geo.connectors.table.Table;
import org.gcube.dataanalysis.geo.connectors.table.TableMatrixRepresentation;
import org.gcube.dataanalysis.geo.connectors.wcs.WCS;
@ -25,7 +26,9 @@ public class MatrixExtractor {
public static int maxSignalLength = 100000;
public static int maxzLength = 100000;
boolean log = true;
protected double currentResolution=0.5;
public List<Double> currentTimeValues;
public List<Tuple<Double>> currentTuples;
public MatrixExtractor(AlgorithmConfiguration configuration) {
gnInspector = new GeoNetworkInspector();
gnInspector.setScope(configuration.getGcubeScope());
@ -43,8 +46,8 @@ public class MatrixExtractor {
return false;
}
protected List<Double> getRawValuesInTimeInstantAndBoundingBox(String layerTitle, int time, List<Tuple<Double>> coordinates3d, double xL, double xR, double yL, double yR) throws Exception {
return getRawValuesInTimeInstantAndBoundingBox(layerTitle, time, coordinates3d, xL, xR, yL, yR, false);
protected List<Double> getRawValuesInTimeInstantAndBoundingBox(String layerTitle, int time, List<Tuple<Double>> coordinates3d, double xL, double xR, double yL, double yR, double resolution) throws Exception {
return getRawValuesInTimeInstantAndBoundingBox(layerTitle, time, coordinates3d, xL, xR, yL, yR, resolution, false);
}
@ -52,7 +55,7 @@ public class MatrixExtractor {
public String layerName;
public String layerURL;
public GISDataConnector getConnector(String layerTitle) throws Exception {
public GISDataConnector getConnector(String layerTitle, double resolution) throws Exception {
// get the layer
Metadata meta = null;
GISDataConnector connector = null;
@ -61,7 +64,7 @@ public class MatrixExtractor {
else {
if (isTable()) {
AnalysisLogger.getLogger().debug("Extracting grid from table " + configuration.getParam(TableMatrixRepresentation.tableNameParameter));
connector = new Table(configuration);
connector = new Table(configuration,resolution);
currentconnector = connector;
} else
meta = gnInspector.getGNInfobyUUIDorName(layerTitle);
@ -105,8 +108,8 @@ public class MatrixExtractor {
/**
* Extract raw values in a time instant according to a set of grid points and a bounding box
*/
protected List<Double> getRawValuesInTimeInstantAndBoundingBox(String layerTitle, int time, List<Tuple<Double>> coordinates3d, double xL, double xR, double yL, double yR, boolean saveLayer) throws Exception {
GISDataConnector connector = getConnector(layerTitle);
public List<Double> getRawValuesInTimeInstantAndBoundingBox(String layerTitle, int time, List<Tuple<Double>> coordinates3d, double xL, double xR, double yL, double yR, double resolution, boolean saveLayer) throws Exception {
GISDataConnector connector = getConnector(layerTitle,resolution);
//execute connector
if (connector != null)
return connector.getFeaturesInTimeInstantAndArea(layerURL, layerName, time, coordinates3d, xL, xR, yL, yR);
@ -132,7 +135,11 @@ public class MatrixExtractor {
*/
public double[][] extractXYGridWithFixedTZ(String layerTitle, int timeInstant, double x1, double x2, double y1, double y2, double z, double xResolution, double yResolution, boolean cachelayer) throws Exception {
currentResolution = (double)(xResolution+yResolution)/2d;
boolean faolayer = false;
if (layerTitle==null)
layerTitle="";
if (layerTitle.toLowerCase().contains("MatrixExtractor->FAO aquatic species distribution map")) {
AnalysisLogger.getLogger().debug("MatrixExtractor->FAO DISTRIBUTION LAYER ... TO APPY PATCH!");
faolayer = true;
@ -182,21 +189,22 @@ public class MatrixExtractor {
AnalysisLogger.getLogger().debug("Bounding box: (" + x1 + "," + x2 + ";" + y1 + "," + y2 + ")");
// long t0=System.currentTimeMillis();
List<Double> timeValues = getRawValuesInTimeInstantAndBoundingBox(layerTitle, timeInstant, tuples, x1, x2, y1, y2, cachelayer);
currentTimeValues = getRawValuesInTimeInstantAndBoundingBox(layerTitle, timeInstant, tuples, x1, x2, y1, y2, currentResolution, cachelayer);
currentTuples=tuples;
// AnalysisLogger.getLogger().debug("Elapsed:"+(System.currentTimeMillis()-t0));
if (log)
AnalysisLogger.getLogger().debug("Taken " + timeValues.size() + " values");
AnalysisLogger.getLogger().debug("Taken " + currentTimeValues.size() + " values");
// build back the values matrix
int k = 0;
int g = 0;
int ntriplets = timeValues.size();
int ntriplets = currentTimeValues.size();
// cycle on all the triplets to recontruct the matrix
for (int t = 0; t < ntriplets; t++) {
// take the corresponding (time,value) pair
Double value = timeValues.get(t);
Double value = currentTimeValues.get(t);
// if there is value, then set it, otherwise set NaN
// the layer is undefined in that point and a value must be generated
// assign a value to the matrix

View File

@ -27,13 +27,15 @@ public class RasterTable {
double y1;
double y2;
double z;
double time;
double xResolution;
double yResolution;
List<Tuple<Double>> coordinates;
private AlgorithmConfiguration configuration;
private String tablename = "rstr" + ("" + UUID.randomUUID()).replace("-", "");
static String createTableStatement = "CREATE TABLE %1$s (id serial, csquarecode character varying, x real, y real, z real, fvalue real)";
static String columnsnames = "csquarecode, x , y , z , fvalue";
static String createTableStatement = "CREATE TABLE %1$s (id serial, csquarecode character varying, x real, y real, z real, t real, fvalue real)";
static String columnsnames = "csquarecode, x , y , z , t, fvalue";
public static String csquareColumn = "csquarecode";
public static String valuesColumn = "fvalue";
public static String idColumn = "id";
@ -45,8 +47,24 @@ public class RasterTable {
public void setTablename(String tablename) {
this.tablename = tablename;
}
public List<Tuple<Double>> getCoordinates(){
return coordinates;
}
public void setCoordinates(List<Tuple<Double>> coordinates) {
this.coordinates=coordinates;
}
public RasterTable(double x1, double x2, double y1, double y2, double z, double xResolution, double yResolution, double[][] values, AlgorithmConfiguration configuration) {
init(x1, x2, y1, y2, z, 0, xResolution, yResolution, values, configuration);
}
public RasterTable(double x1, double x2, double y1, double y2, double z, double time, double xResolution, double yResolution, double[][] values, AlgorithmConfiguration configuration) {
init(x1, x2, y1, y2, z, time, xResolution, yResolution, values, configuration);
}
public void init(double x1, double x2, double y1, double y2, double z, double time, double xResolution, double yResolution, double[][] values, AlgorithmConfiguration configuration){
this.valuesMatrix = values;
this.configuration = configuration;
this.x1 = x1;
@ -54,10 +72,10 @@ public class RasterTable {
this.y1 = y1;
this.y2 = y2;
this.z = z;
this.time = time;
this.xResolution = xResolution;
this.yResolution = yResolution;
}
public void dumpGeoTable() {
// open the connection to the db
@ -67,7 +85,9 @@ public class RasterTable {
// create a table
DatabaseFactory.executeSQLUpdate(String.format(createTableStatement, tablename), dbconnection);
AnalysisLogger.getLogger().debug("Table " + tablename + " created");
List<Tuple<Double>> coordinates = VectorOperations.generateCoordinateTripletsInBoundingBox(x1, x2, y1, y2, z, xResolution, yResolution);
if (coordinates==null)
coordinates = VectorOperations.generateCoordinateTripletsInBoundingBox(x1, x2, y1, y2, z, xResolution, yResolution);
int triplets = coordinates.size();
AnalysisLogger.getLogger().debug("Generated " + triplets + " coordinates triples");
List<Double> values = associateValueToCoordinates(coordinates, valuesMatrix);
@ -85,8 +105,20 @@ public class RasterTable {
if (value.isNaN())
value = 0d;
sb.append("('" + csquare + "'," + x + "," + y + "," + z + ",'" + value + "')");
double zVal = z;
if (cset.getElements().size()>2)
zVal = cset.getElements().get(2);
String tVal = ""+time;
if (cset.getElements().size()>3){
tVal = ""+cset.getElements().get(3);
if (Double.isNaN(cset.getElements().get(3)) || (Double.isInfinite(cset.getElements().get(3))))
tVal="NULL";
}
sb.append("('" + csquare + "'," + x + "," + y + "," + zVal + "," + tVal +",'" + value + "')");
if (i % 5000 == 0) {
// AnalysisLogger.getLogger().debug("Partial Inserting Buffer of " + sb.length() + " Values");
String insertStatement = DatabaseUtils.insertFromBuffer(tablename, columnsnames, sb);
@ -98,6 +130,7 @@ public class RasterTable {
}
AnalysisLogger.getLogger().debug("Inserting Final Buffer of " + sb.length() + " Values");
// AnalysisLogger.getLogger().debug("Inserting Final Buffer " + sb);
// save all the strings on the table
if (sb.length() > 0) {
String insertStatement = DatabaseUtils.insertFromBuffer(tablename, columnsnames, sb);
@ -114,6 +147,7 @@ public class RasterTable {
}
}
public void deleteTable() {
SessionFactory dbconnection = null;
try {

View File

@ -10,22 +10,28 @@ public class ZExtractor extends MatrixExtractor{
super(configuration);
}
public double zmin;
public double zmax;
public double[] extractZ(String layerTitle, double x, double y, int timeIndex, double resolution) throws Exception {
double[] signal = new double[maxzLength];
GISDataConnector connector = getConnector(layerTitle);
if (layerTitle==null)
layerTitle="";
double z0 = connector.getMinZ(layerURL, layerName);
double z1 = connector.getMaxZ(layerURL, layerName);
GISDataConnector connector = getConnector(layerTitle,resolution);
AnalysisLogger.getLogger().debug("ZExtractor: minimum Z "+z0+" maximum Z:"+z1+" step: "+resolution);
zmin = connector.getMinZ(layerURL, layerName);
zmax = connector.getMaxZ(layerURL, layerName);
AnalysisLogger.getLogger().debug("ZExtractor: minimum Z "+zmin+" maximum Z:"+zmax+" step: "+resolution);
int zcounter=0;
if (resolution==0)
resolution=1;
for (double z=z0;z<=z1;z=z+resolution){
for (double z=zmin;z<=zmax;z=z+resolution){
try {
if (z%100==0)
AnalysisLogger.getLogger().debug("Matrix Extractor-> Extracting Z value " + z);

View File

@ -27,7 +27,7 @@ public class TestSignal {
// String layertitle = "afd54b39-30f7-403a-815c-4f91c6c74c26";
// String layertitle = "6411b110-7572-457a-a662-a16e4ff09e4e";
//wind stress
String layertitle = "255b5a95-ad28-4fec-99e0-5d48112dd6ab";
// String layertitle = "255b5a95-ad28-4fec-99e0-5d48112dd6ab";
//wind speed
// layertitle = "a116c9bc-9380-4d40-8374-aa0e376a6820";
//nitrates
@ -38,7 +38,7 @@ public class TestSignal {
//WFS
// String layertitle = "0aac424b-5f5b-4fa6-97d6-4b4deee62b97";
//Chlorophyll
// String layertitle = "c565e32c-c5b3-4964-b44f-06dc620563e9";
String layertitle = "c565e32c-c5b3-4964-b44f-06dc620563e9";
long t0 = System.currentTimeMillis();
AnalysisLogger.setLogger(cfg+AlgorithmConfiguration.defaultLoggerFile);

View File

@ -12,8 +12,8 @@ import org.gcube.dataanalysis.geo.connectors.table.TableMatrixRepresentation;
public class TestExtraction {
static AlgorithmConfiguration[] configs = { testXYExtractionNetCDF(),testXYExtractionAquaMaps(),testXYExtractionTable()};
// static AlgorithmConfiguration[] configs = { testXYExtractionNetCDF(),testXYExtractionAquaMaps(),testXYExtractionTable()};
static AlgorithmConfiguration[] configs = { testXYExtractionTable()};
public static void main(String[] args) throws Exception {
System.out.println("TEST 1");
@ -35,7 +35,7 @@ public class TestExtraction {
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setAgent("XYEXTRACTOR");
config.setAgent("XYEXTRACTION");
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setParam("DatabaseUserName","gcube");
@ -47,10 +47,10 @@ public class TestExtraction {
config.setParam("Layer","c565e32c-c5b3-4964-b44f-06dc620563e9");
config.setParam("Z","0");
config.setParam("TimeIndex","0");
config.setParam("BBox_LowerLeftLat","-90");
config.setParam("BBox_LowerLeftLong","-180");
config.setParam("BBox_UpperRightLat","90");
config.setParam("BBox_UpperRightLong","180");
config.setParam("BBox_LowerLeftLat","-60");
config.setParam("BBox_LowerLeftLong","-50");
config.setParam("BBox_UpperRightLat","60");
config.setParam("BBox_UpperRightLong","50");
config.setParam("XResolution","0.5");
config.setParam("YResolution","0.5");
config.setParam("OutputTableName","testextraction");
@ -81,12 +81,20 @@ public class TestExtraction {
config.setParam("DatabaseURL", "jdbc:postgresql://statistical-manager.d.d4science.research-infrastructures.eu/testdb");
config.setParam("DatabaseDriver", "org.postgresql.Driver");
// vessels
/*
config.setParam(TableMatrixRepresentation.tableNameParameter, "generic_id037d302d_2ba0_4e43_b6e4_1a797bb91728");
config.setParam(TableMatrixRepresentation.xDimensionColumnParameter, "x");
config.setParam(TableMatrixRepresentation.yDimensionColumnParameter, "y");
config.setParam(TableMatrixRepresentation.timeDimensionColumnParameter, "datetime");
config.setParam(TableMatrixRepresentation.valueDimensionColumnParameter, "speed");
config.setParam(TableMatrixRepresentation.filterParameter, "speed<2");
*/
config.setParam(TableMatrixRepresentation.tableNameParameter, "occurrence_species_id0045886b_2a7c_4ede_afc4_3157c694b893");
config.setParam(TableMatrixRepresentation.xDimensionColumnParameter, "decimallongitude");
config.setParam(TableMatrixRepresentation.yDimensionColumnParameter, "decimallatitude");
config.setParam(TableMatrixRepresentation.timeDimensionColumnParameter, " ");
config.setParam(TableMatrixRepresentation.valueDimensionColumnParameter, "decimallatitude");
config.setParam(TableMatrixRepresentation.filterParameter, " ");
return config;
}

View File

@ -0,0 +1,143 @@
package org.gcube.dataanalysis.geo.test.infra;
import java.util.List;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent;
import org.gcube.dataanalysis.ecoengine.processing.factories.TransducerersFactory;
import org.gcube.dataanalysis.ecoengine.test.regression.Regressor;
import org.gcube.dataanalysis.ecoengine.utils.IOHelper;
import org.gcube.dataanalysis.geo.connectors.table.TableMatrixRepresentation;
public class TestTimeExtraction {
static AlgorithmConfiguration[] configs = { testTimeExtractionTable(), testTimeExtractionNetCDF(),testTimeExtractionTable2()};
public static void main(String[] args) throws Exception {
System.out.println("TEST 1");
for (int i = 0; i < configs.length; i++) {
AnalysisLogger.getLogger().debug("Executing: "+configs[i].getAgent());
List<ComputationalAgent> trans = null;
trans = TransducerersFactory.getTransducerers(configs[i]);
trans.get(0).init();
Regressor.process(trans.get(0));
StatisticalType st = trans.get(0).getOutput();
AnalysisLogger.getLogger().debug("ST:" + st);
trans = null;
}
}
private static AlgorithmConfiguration testTimeExtractionNetCDF() {
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setAgent("TIMEEXTRACTION");
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setParam("DatabaseUserName","gcube");
config.setParam("DatabasePassword","d4science2");
config.setParam("DatabaseURL","jdbc:postgresql://localhost/testdb");
config.setParam("DatabaseDriver","org.postgresql.Driver");
config.setGcubeScope("/d4science.research-infrastructures.eu/gCubeApps/BiodiversityLab");
config.setParam("Layer","c565e32c-c5b3-4964-b44f-06dc620563e9");
// config.setParam("Layer","dffa504b-dbc8-4553-896e-002549f8f5d3");
config.setParam("OutputTableName","testtimeextraction");
config.setParam("OutputTableLabel","testtimeextraction");
config.setParam("Z","0");
config.setParam("X","0");
config.setParam("Y","0");
config.setParam("Resolution","0.5");
config.setParam("SamplingFreq","-1");
config.setParam("MinFrequency","-1");
config.setParam("MaxFrequency","-1");
config.setParam("FrequencyError","-1");
return config;
}
private static AlgorithmConfiguration testTimeExtractionTable() {
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setAgent("TIMEEXTRACTION");
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setParam("DatabaseUserName", "utente");
config.setParam("DatabasePassword", "d4science");
config.setParam("DatabaseURL", "jdbc:postgresql://statistical-manager.d.d4science.research-infrastructures.eu/testdb");
config.setParam("DatabaseDriver", "org.postgresql.Driver");
config.setGcubeScope("/d4science.research-infrastructures.eu/gCubeApps/BiodiversityLab");
config.setParam("Layer","c565e32c-c5b3-4964-b44f-06dc620563e9");
config.setParam("OutputTableName","testtimeextraction2");
config.setParam("OutputTableLabel","testtimeextraction2");
config.setParam("Z","0");
config.setParam("X","-47.97");
config.setParam("Y","43.42");
config.setParam("Resolution","0.5");
config.setParam("SamplingFreq","-1");
config.setParam("MinFrequency","-1");
config.setParam("MaxFrequency","-1");
config.setParam("FrequencyError","-1");
config.setParam(TableMatrixRepresentation.tableNameParameter, "generic_id037d302d_2ba0_4e43_b6e4_1a797bb91728");
config.setParam(TableMatrixRepresentation.xDimensionColumnParameter, "x");
config.setParam(TableMatrixRepresentation.yDimensionColumnParameter, "y");
config.setParam(TableMatrixRepresentation.timeDimensionColumnParameter, "datetime");
config.setParam(TableMatrixRepresentation.valueDimensionColumnParameter, "speed");
config.setParam(TableMatrixRepresentation.filterParameter, "speed<2");
return config;
}
private static AlgorithmConfiguration testTimeExtractionTable2() {
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setAgent("TIMEEXTRACTION");
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setParam("DatabaseUserName", "utente");
config.setParam("DatabasePassword", "d4science");
config.setParam("DatabaseURL", "jdbc:postgresql://statistical-manager.d.d4science.research-infrastructures.eu/testdb");
config.setParam("DatabaseDriver", "org.postgresql.Driver");
config.setGcubeScope("/d4science.research-infrastructures.eu/gCubeApps/BiodiversityLab");
config.setParam("OutputTableName","testtimeextraction2");
config.setParam("OutputTableLabel","testtimeextraction2");
config.setParam("Z","0");
config.setParam("X","18.61669921875");
config.setParam("Y","-34.1833000183105");
config.setParam("Resolution","10");
config.setParam("SamplingFreq","-1");
config.setParam("MinFrequency","-1");
config.setParam("MaxFrequency","-1");
config.setParam("FrequencyError","-1");
config.setParam(TableMatrixRepresentation.tableNameParameter, "occurrence_species_id0045886b_2a7c_4ede_afc4_3157c694b893");
config.setParam(TableMatrixRepresentation.xDimensionColumnParameter, "decimallongitude");
config.setParam(TableMatrixRepresentation.yDimensionColumnParameter, "decimallatitude");
config.setParam(TableMatrixRepresentation.timeDimensionColumnParameter, "eventdate");
config.setParam(TableMatrixRepresentation.valueDimensionColumnParameter, "decimallongitude");
config.setParam(TableMatrixRepresentation.filterParameter, " ");
return config;
}
}

View File

@ -0,0 +1,165 @@
package org.gcube.dataanalysis.geo.test.infra;
import java.util.List;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent;
import org.gcube.dataanalysis.ecoengine.processing.factories.TransducerersFactory;
import org.gcube.dataanalysis.ecoengine.test.regression.Regressor;
import org.gcube.dataanalysis.ecoengine.utils.IOHelper;
import org.gcube.dataanalysis.geo.connectors.table.TableMatrixRepresentation;
public class TestZExtraction {
// static AlgorithmConfiguration[] configs = { testTimeExtractionTable(), testTimeExtractionNetCDF(),testTimeExtractionTable2()};
static AlgorithmConfiguration[] configs = { testZExtractionNetCDF()};
public static void main(String[] args) throws Exception {
System.out.println("TEST 1");
for (int i = 0; i < configs.length; i++) {
AnalysisLogger.getLogger().debug("Executing: "+configs[i].getAgent());
List<ComputationalAgent> trans = null;
trans = TransducerersFactory.getTransducerers(configs[i]);
trans.get(0).init();
Regressor.process(trans.get(0));
StatisticalType st = trans.get(0).getOutput();
AnalysisLogger.getLogger().debug("ST:" + st);
trans = null;
}
}
private static AlgorithmConfiguration testZExtractionLongNetCDF() {
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setAgent("ZEXTRACTION");
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setParam("DatabaseUserName","gcube");
config.setParam("DatabasePassword","d4science2");
config.setParam("DatabaseURL","jdbc:postgresql://localhost/testdb");
config.setParam("DatabaseDriver","org.postgresql.Driver");
config.setGcubeScope("/d4science.research-infrastructures.eu/gCubeApps/BiodiversityLab");
config.setParam("Layer","6411b110-7572-457a-a662-a16e4ff09e4e");
// config.setParam("Layer","dffa504b-dbc8-4553-896e-002549f8f5d3");
config.setParam("OutputTableName","testtimeextraction");
config.setParam("OutputTableLabel","testtimeextraction");
config.setParam("TimeIndex","0");
config.setParam("X","0");
config.setParam("Y","0");
config.setParam("Resolution","0.5");
return config;
}
private static AlgorithmConfiguration testZExtractionNetCDF() {
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setAgent("ZEXTRACTION");
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setParam("DatabaseUserName","gcube");
config.setParam("DatabasePassword","d4science2");
config.setParam("DatabaseURL","jdbc:postgresql://localhost/testdb");
config.setParam("DatabaseDriver","org.postgresql.Driver");
config.setGcubeScope("/gcube/devsec");
config.setParam("Layer","7f90e153-0c5c-4d45-a498-a6374593e68d");
config.setParam("OutputTableName","testtimeextraction");
config.setParam("OutputTableLabel","testtimeextraction");
config.setParam("TimeIndex","0");
config.setParam("X","0");
config.setParam("Y","0");
config.setParam("Resolution","100");
return config;
}
private static AlgorithmConfiguration testTimeExtractionTable() {
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setAgent("TIMEEXTRACTION");
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setParam("DatabaseUserName", "utente");
config.setParam("DatabasePassword", "d4science");
config.setParam("DatabaseURL", "jdbc:postgresql://statistical-manager.d.d4science.research-infrastructures.eu/testdb");
config.setParam("DatabaseDriver", "org.postgresql.Driver");
config.setGcubeScope("/d4science.research-infrastructures.eu/gCubeApps/BiodiversityLab");
config.setParam("Layer","c565e32c-c5b3-4964-b44f-06dc620563e9");
config.setParam("OutputTableName","testtimeextraction2");
config.setParam("OutputTableLabel","testtimeextraction2");
config.setParam("Z","0");
config.setParam("X","-47.97");
config.setParam("Y","43.42");
config.setParam("Resolution","0.5");
config.setParam("SamplingFreq","-1");
config.setParam("MinFrequency","-1");
config.setParam("MaxFrequency","-1");
config.setParam("FrequencyError","-1");
config.setParam(TableMatrixRepresentation.tableNameParameter, "generic_id037d302d_2ba0_4e43_b6e4_1a797bb91728");
config.setParam(TableMatrixRepresentation.xDimensionColumnParameter, "x");
config.setParam(TableMatrixRepresentation.yDimensionColumnParameter, "y");
config.setParam(TableMatrixRepresentation.timeDimensionColumnParameter, "datetime");
config.setParam(TableMatrixRepresentation.valueDimensionColumnParameter, "speed");
config.setParam(TableMatrixRepresentation.filterParameter, "speed<2");
return config;
}
private static AlgorithmConfiguration testTimeExtractionTable2() {
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setAgent("TIMEEXTRACTION");
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setParam("DatabaseUserName", "utente");
config.setParam("DatabasePassword", "d4science");
config.setParam("DatabaseURL", "jdbc:postgresql://statistical-manager.d.d4science.research-infrastructures.eu/testdb");
config.setParam("DatabaseDriver", "org.postgresql.Driver");
config.setGcubeScope("/d4science.research-infrastructures.eu/gCubeApps/BiodiversityLab");
config.setParam("OutputTableName","testtimeextraction2");
config.setParam("OutputTableLabel","testtimeextraction2");
config.setParam("Z","0");
config.setParam("X","18.61669921875");
config.setParam("Y","-34.1833000183105");
config.setParam("Resolution","10");
config.setParam("SamplingFreq","-1");
config.setParam("MinFrequency","-1");
config.setParam("MaxFrequency","-1");
config.setParam("FrequencyError","-1");
config.setParam(TableMatrixRepresentation.tableNameParameter, "occurrence_species_id0045886b_2a7c_4ede_afc4_3157c694b893");
config.setParam(TableMatrixRepresentation.xDimensionColumnParameter, "decimallongitude");
config.setParam(TableMatrixRepresentation.yDimensionColumnParameter, "decimallatitude");
config.setParam(TableMatrixRepresentation.timeDimensionColumnParameter, "eventdate");
config.setParam(TableMatrixRepresentation.valueDimensionColumnParameter, "decimallongitude");
config.setParam(TableMatrixRepresentation.filterParameter, " ");
return config;
}
}

View File

@ -1,17 +1,10 @@
package org.gcube.dataanalysis.geo.test.regression;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileReader;
import java.io.FileWriter;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.signals.PeriodicityDetector;
import org.gcube.dataanalysis.ecoengine.signals.SignalProcessing;
import org.gcube.dataanalysis.geo.matrixmodel.MatrixExtractor;
import org.gcube.dataanalysis.geo.matrixmodel.TimeSeriesExtractor;
public class RegressionPeriodicity {

View File

@ -46,16 +46,16 @@ public class RegressionTestMapsComparison {
// config.setParam("Layer_2","c9a31223-cc00-4acd-bc5b-a0c76a7f79c7"); //humbolt squid
//FAO vs AquaMaps
config.setParam("Layer_1","b040894b-c5db-47fc-ba9c-d4fafcdcf620");
config.setParam("Layer_2","c9a31223-cc00-4acd-bc5b-a0c76a7f79c7");
// config.setParam("Layer_1","b040894b-c5db-47fc-ba9c-d4fafcdcf620");
// config.setParam("Layer_2","c9a31223-cc00-4acd-bc5b-a0c76a7f79c7");
//NetCDF vs NETCDF WOA
// config.setParam("Layer_1","e0dbbcc0-8364-4087-8bcb-c7d95b2f55c8"); //statistical mean oxygen
// config.setParam("Layer_2","49f5a5a1-80ff-4a00-8c84-dac29bda1a23");//statistical mean phosphate
//Eleutheronema tetradactylum
// config.setParam("Layer_1","fao-species-map-fot");
// config.setParam("Layer_2","c492f5d3-1cfc-44e3-b8d2-8530fec3e7e7");
config.setParam("Layer_1","fao-species-map-fot");
config.setParam("Layer_2","c492f5d3-1cfc-44e3-b8d2-8530fec3e7e7");

View File

@ -64,7 +64,7 @@ public class VectorOperations {
// AnalysisLogger.getLogger().debug("Grid contains: "+grid3d.size()+" values");
// AnalysisLogger.getLogger().debug("Dataset contains: "+coordinates5d.size()+" values");
int foundmatches = 0;
for (Tuple<Double> coord5d : coordinates5d) {
double rx = coord5d.getElements().get(0);
double ry = coord5d.getElements().get(1);
@ -82,14 +82,56 @@ public class VectorOperations {
if (d <= tolerance) {
// AnalysisLogger.getLogger().debug("Association: distance between grid:("+x+","+y+","+z+","+gridTimeInstant+") and point:("+rx+","+ry+","+rz+","+rt+") is "+d);
valuesForGrid.set(gridIdx, rvalue);
foundmatches++;
}
gridIdx++;
}
}
AnalysisLogger.getLogger().debug("Association: Found "+foundmatches+" matches between the grid of points and the coordinates");
return valuesForGrid;
}
public static List<Double> assignGridValuesToPoints(List<Tuple<Double>> grid3d, int gridTimeInstant, List<Double> gridValues, List<Tuple<Double>> coordinates4d, double tolerance) {
List<Double> valuesForPoints = new ArrayList<Double>();
int gridSize = coordinates4d.size();
for (int i = 0; i < gridSize; i++) {
valuesForPoints.add(Double.NaN);
}
int foundmatches = 0;
int points=0;
for (Tuple<Double> coord4d : coordinates4d) {
double rx = coord4d.getElements().get(0);
double ry = coord4d.getElements().get(1);
double rz = coord4d.getElements().get(2);
double rt = coord4d.getElements().get(3);
int gridIdx = 0;
for (Tuple<Double> gridElement : grid3d) {
double x = gridElement.getElements().get(0);
double y = gridElement.getElements().get(1);
double z = gridElement.getElements().get(2);
double d = distance(x, y, z, gridTimeInstant, rx, ry, rz, rt);
if (d <= tolerance) {
// AnalysisLogger.getLogger().debug("Association: distance between grid:("+x+","+y+","+z+","+gridTimeInstant+") and point:("+rx+","+ry+","+rz+","+rt+") is "+d);
valuesForPoints.set(points, gridValues.get(gridIdx));
foundmatches++;
break;
}
gridIdx++;
}
points++;
}
AnalysisLogger.getLogger().debug("Association: Found "+foundmatches+" matches between the points and the grid");
return valuesForPoints;
}
public static double distance(double x1, double y1, double z1, double t1, double x2, double y2, double z2, double t2) {
return Math.sqrt(((x1 - x2) * (x1 - x2)) + ((y1 - y2) * (y1 - y2)) + ((z1 - z2) * (z1 - z2)) + ((t1 - t2) * (t1 - t2)));