Compare commits

...

19 Commits

Author SHA1 Message Date
Giancarlo Panichi b5622f59f3 ref 20971: ShortLink - Check for obsolete short urls
https://support.d4science.org/issues/20971

Fixed ShortLink urls.
2021-05-25 11:41:48 +02:00
Giancarlo Panichi f2bba79a02 ref 20971: ShortLink - Check for obsolete short urls
https://support.d4science.org/issues/20971

Fixed ShortLink urls.
2021-05-25 11:33:51 +02:00
Giancarlo Panichi 933c22182e ref 20971: ShortLink - Check for obsolete short urls
https://support.d4science.org/issues/20971

Fixed ShortLink urls.
2021-05-25 11:32:06 +02:00
Roberto Cirillo 93613b2724 Update 'CHANGELOG.md'
fix changelog (1.5.2)
2021-01-20 16:47:10 +01:00
Roberto Cirillo 0d875a3320 Update 'CHANGELOG.md'
update changelog entry 1.5.2
2021-01-20 15:54:48 +01:00
Roberto Cirillo d55a1000a8 Update 'CHANGELOG.md'
update changelog
2021-01-20 15:53:36 +01:00
Roberto Cirillo 69e4c483a3 Update 'pom.xml'
update ecological-engine-smart-executor lower bound
2021-01-20 15:52:27 +01:00
Giancarlo Panichi 8abb4d2f7a Updated for Release 2021-01-19 16:25:09 +01:00
Giancarlo Panichi 95697b28e0 Removed marytts-d4science #20135 2021-01-19 16:23:16 +01:00
Giancarlo Panichi 0e12b0890d Removed marytts-d4science #20135 2021-01-19 16:19:30 +01:00
Giancarlo Panichi 2feee0196b Removed marytts-d4science #20135 2021-01-19 16:17:17 +01:00
Giancarlo Panichi 49ff55c145 Removed marytts-d4science #20135 2021-01-19 16:16:17 +01:00
Roberto Cirillo d82ca437d8 Update 'pom.xml'
update ecological engine lower bound
2021-01-19 16:00:44 +01:00
Roberto Cirillo 429b7e5d67 Update 'pom.xml'
removed SNAPSHOT from version
2021-01-19 09:32:26 +01:00
Roberto Cirillo f1f1aef6fb Update 'pom.xml'
added SNAPSHOT in order to perform a SNAPSHOT build
2021-01-19 09:30:43 +01:00
Roberto Cirillo 1b19ab5777 Update 'CHANGELOG.md'
addde 1.5.2 entry to CHANGELOG
2021-01-18 18:05:23 +01:00
Roberto Cirillo 20ef0ac106 Update 'pom.xml'
update gis-interface lower buond range.
osgeo repo moved to the new url: https://nexus.d4science.org/nexus/content/repositories/osgeo/
version updated to 1.5.2
2021-01-18 18:00:33 +01:00
Roberto Cirillo e5388dd35e Update 'CHANGELOG.md'
update changelog to 1.5.2-SNAPSHOT version
2020-11-16 17:16:04 +01:00
Roberto Cirillo 2b80c2ea46 Update 'pom.xml'
replace repositories defined into the pom  with the new nexus url (nexus.d4science.org). update version to 1.5.2-SNAPSHOT
2020-11-16 17:14:21 +01:00
14 changed files with 31 additions and 917 deletions

View File

@ -1,5 +1,23 @@
# Changelog
This project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
# Changelog for "ecological-engine-geospatial-extensions"
## [v1.5.3] - 2021-05-24
### Fixes
- Fixed obsolete short urls [#20971]
## [v1.5.2] 2021-01-18
### Features
- update gis-interface lower bound range
- replaced repositories defined into the pom with the new nexus url (nexus.d4science.org). update version to 1.5.2-SNAPSHOT
- update ecological-engine-smart-executor lower range
- removed maryTTS classes #20135
## [v1.5.1] [r4.24.0] - 2020-06-10

18
pom.xml
View File

@ -9,7 +9,7 @@
</parent>
<groupId>org.gcube.dataanalysis</groupId>
<artifactId>ecological-engine-geospatial-extensions</artifactId>
<version>1.5.1</version>
<version>1.5.3</version>
<name>ecological-engine-geospatial-extensions</name>
<description>ecological-engine-geospatial-extension</description>
@ -51,7 +51,7 @@
<dependency>
<groupId>org.gcube.dataanalysis</groupId>
<artifactId>ecological-engine-smart-executor</artifactId>
<version>[1.6.2,2.0.0-SNAPSHOT)</version>
<version>[1.6.5-SNAPSHOT,2.0.0-SNAPSHOT)</version>
<exclusions>
<exclusion>
<artifactId>common-utils-encryption</artifactId>
@ -66,7 +66,7 @@
<dependency>
<groupId>org.gcube.dataanalysis</groupId>
<artifactId>ecological-engine</artifactId>
<version>[1.12.0,2.0.0-SNAPSHOT)</version>
<version>[1.14.0-SNAPSHOT,2.0.0-SNAPSHOT)</version>
</dependency>
<dependency>
<groupId>geoutils-custom</groupId>
@ -103,7 +103,7 @@
<dependency>
<groupId>org.gcube.spatial.data</groupId>
<artifactId>gis-interface</artifactId>
<version>[2.4.1,3.0.0-SNAPSHOT)</version>
<version>[2.4.6,3.0.0-SNAPSHOT)</version>
<exclusions>
<exclusion>
<artifactId>geotk-coverageio-netcdf</artifactId>
@ -147,7 +147,7 @@
<repository>
<id>osgeo</id>
<name>Open Source Geospatial Foundation Repository Mirror</name>
<url>http://maven.research-infrastructures.eu/nexus/content/repositories/osgeo//</url>
<url>https://nexus.d4science.org/nexus/content/repositories/osgeo/</url>
</repository>
<!--repository>
<id>geotoolkit</id>
@ -156,22 +156,22 @@
</repository-->
<repository>
<id>geotoolkit</id>
<url>http://maven.research-infrastructures.eu:8081/nexus/content/repositories/geotoolkit/</url>
<url>https://nexus.d4science.org/nexus/content/repositories/geotoolkit//</url>
</repository>
<repository>
<id>52north-releases</id>
<name>52north-releases</name>
<url>http://maven.research-infrastructures.eu/nexus/content/repositories/52north-releases/</url>
<url>https://nexus.d4science.org/nexus/content/repositories/52north-releases/</url>
</repository>
<repository>
<id>GeoSolutions</id>
<name>GeoSolutions</name>
<url>http://maven.research-infrastructures.eu/nexus/content/repositories/geo-solutions-snapshots/</url>
<url>https://nexus.d4science.org/nexus/content/repositories/geo-solutions-snapshots/</url>
</repository>
<repository>
<id>GeoSolutions-releases</id>
<name>GeoSolutions-releases</name>
<url>http://maven.research-infrastructures.eu/nexus/content/repositories/geo-solutions/</url>
<url>https://nexus.d4science.org/nexus/content/repositories/geo-solutions/</url>
</repository>
</repositories>
<build>

View File

@ -112,7 +112,7 @@ public class MaxEnt4NicheModellingTransducer implements Transducerer {
"In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. " +
"The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one." +
"The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment." +
"Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt";
"Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Instructions http://wiki.i-marine.eu/index.php/MaxEnt";
}
@Override

View File

@ -1,227 +0,0 @@
package org.gcube.dataanalysis.geo.algorithms;
import java.awt.Image;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.datatypes.DatabaseType;
import org.gcube.dataanalysis.ecoengine.datatypes.OutputTable;
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.TableTemplates;
import org.gcube.dataanalysis.ecoengine.signals.PeriodicityDetector;
import org.gcube.dataanalysis.ecoengine.signals.SignalConverter;
import org.gcube.dataanalysis.ecoengine.signals.SignalProcessing;
import org.gcube.dataanalysis.ecoengine.utils.IOHelper;
import org.gcube.dataanalysis.ecoengine.utils.Tuple;
import org.gcube.dataanalysis.geo.connectors.wfs.WFS;
import org.gcube.dataanalysis.geo.matrixmodel.RasterTable;
import org.gcube.dataanalysis.geo.matrixmodel.TimeSeriesExtractor;
public class TimeExtraction extends XYExtraction{
public static String x = "X";
public static String y = "Y";
public static String resolution = "Resolution";
public static String samplingFrequency = "SamplingFreq";
public static String minFrequency = "MinFrequency";
public static String maxFrequency = "MaxFrequency";
public static String expectedFrequencyError = "FrequencyError";
public static String FFTSamplesParam = "FFTSamples";
public double xValue;
public double yValue;
public double resolutionValue;
public int samplingFrequencyValue;
public double minFrequencyValue;
public double maxFrequencyValue;
public double expectedFrequencyErrorValue;
public int FFTSamples;
public PeriodicityDetector pd;
public double signal[];
public double timeline[];
@Override
public String getDescription() {
return "An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram.";
}
@Override
public List<StatisticalType> getInputParameters() {
List<StatisticalType> previnputs = super.getInputParameters();
inputs = new ArrayList<StatisticalType>();
inputs.add(previnputs.get(0));
inputs.add(previnputs.get(5));
inputs.add(previnputs.get(6));
IOHelper.addDoubleInput(inputs, x, "X coordinate", "0");
IOHelper.addDoubleInput(inputs, y, "Y coordinate", "0");
inputs.add(previnputs.get(7));
IOHelper.addDoubleInput(inputs, resolution, "Extraction point resolution", "0.5");
IOHelper.addIntegerInput(inputs, samplingFrequency, "Sampling frequency in Hz. Leave it to -1 if unknown or under 1", "-1");
// IOHelper.addDoubleInput(inputs, minFrequency, "Minimum expected frequency in Hz. Can be decimal", "-1");
// IOHelper.addDoubleInput(inputs, maxFrequency, "Maximum expected frequency in Hz. Can be decimal", "-1");
//IOHelper.addDoubleInput(inputs, expectedFrequencyError, "Expected precision on periodicity detection in Hz or 1/samples. Can be decimal and depends on the signal length. Default is 0.1", "0.1");
// IOHelper.addIntegerInput(inputs, FFTSamplesParam, "Number of samples to use in the Fourier Analysis. All samples will be used at maximum.", "100");
DatabaseType.addDefaultDBPars(inputs);
return inputs;
}
protected void getParameters() {
layerNameValue = IOHelper.getInputParameter(config, layerName);
AnalysisLogger.getLogger().debug("Extraction: Layer " + layerNameValue);
zValue = Double.parseDouble(IOHelper.getInputParameter(config, z));
xValue = Double.parseDouble(IOHelper.getInputParameter(config, x));
yValue = Double.parseDouble(IOHelper.getInputParameter(config, y));
resolutionValue=Double.parseDouble(IOHelper.getInputParameter(config, resolution));
samplingFrequencyValue=Integer.parseInt(IOHelper.getInputParameter(config, samplingFrequency));
// minFrequencyValue=Double.parseDouble(IOHelper.getInputParameter(config, minFrequency));
// maxFrequencyValue=Double.parseDouble(IOHelper.getInputParameter(config, maxFrequency));
expectedFrequencyErrorValue=-1;
AnalysisLogger.getLogger().debug("Extraction: Z " + zValue);
AnalysisLogger.getLogger().debug("Extraction: X " + xValue);
AnalysisLogger.getLogger().debug("Extraction: Y " + yValue);
AnalysisLogger.getLogger().debug("Extraction: Res " + resolutionValue);
AnalysisLogger.getLogger().debug("Extraction: SamplingF " + samplingFrequency);
AnalysisLogger.getLogger().debug("Extraction: minF " + minFrequencyValue);
AnalysisLogger.getLogger().debug("Extraction: maxF " + maxFrequencyValue);
AnalysisLogger.getLogger().debug("Extraction: expectedError " + expectedFrequencyErrorValue);
tableNameValue = IOHelper.getInputParameter(config, tableName);
tableLabelValue = IOHelper.getInputParameter(config, tableLabel);
AnalysisLogger.getLogger().debug("Extraction: tableName " + tableNameValue);
AnalysisLogger.getLogger().debug("Extraction: tableLabel " + tableLabelValue);
String scope = config.getGcubeScope();
AnalysisLogger.getLogger().debug("Extraction: Externally set scope " + scope);
if (scope == null) {
scope = ScopeProvider.instance.get();
config.setGcubeScope(scope);
}
}
Image signalimage;
Image spectrogramImage;
@Override
public void compute() throws Exception {
try {
status = 10;
AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
getParameters();
AnalysisLogger.getLogger().debug("Extracting Time Series from layer");
TimeSeriesExtractor intersector = new TimeSeriesExtractor(config);
long t0 = System.currentTimeMillis();
//take best z
zValue = intersector.correctZ(zValue, layerNameValue, resolutionValue);
AnalysisLogger.getLogger().debug("TimeExtraction->Best Z for this reference layer: " + zValue);
outputParameters.put("Matching Z value in the layer", ""+zValue);
outputParameters.put("Min Z value in the Layer", ""+intersector.zmin);
outputParameters.put("Max Z value in the Layer", ""+intersector.zmax);
AnalysisLogger.getLogger().debug("Z allowed to be: "+zValue);
signal = intersector.extractT(layerNameValue, xValue,yValue, zValue, resolutionValue);
AnalysisLogger.getLogger().debug("ELAPSED TIME: "+(System.currentTimeMillis()-t0));
AnalysisLogger.getLogger().debug("Signal: "+signal.length);
status = 30;
AnalysisLogger.getLogger().debug("Extractor: Matrix Extracted");
AnalysisLogger.getLogger().debug("Extractor: ****Rasterizing grid into table****");
double matrix[][] = new double[1][];
matrix[0] = signal;
HashMap<Double,Map<String, String>> polygonsFeatures = null;
if (intersector.currentconnector instanceof WFS)
polygonsFeatures = ((WFS) intersector.currentconnector).getPolygonsFeatures();
RasterTable raster = new RasterTable(xValue, xValue, yValue, yValue, zValue, resolutionValue, resolutionValue, matrix, polygonsFeatures, config);
int signalRate = 1;
if (samplingFrequencyValue>0)
signalRate=samplingFrequencyValue;
timeline = SignalConverter.signalTimeLine(signal.length, signalRate);
List<Tuple<Double>> coordinates=new ArrayList<Tuple<Double>>();
for (int i=0;i<timeline.length;i++)
coordinates.add(new Tuple<Double>(xValue,yValue,zValue,timeline[i]));
raster.setTablename(tableNameValue);
raster.setCoordinates(coordinates);
raster.deleteTable();
raster.dumpGeoTable();
signalimage = SignalProcessing.renderSignalWithGenericTime(signal, timeline, "Signal");
AnalysisLogger.getLogger().debug("Extractor: Map was dumped in table: " + tableNameValue);
status = 80;
AnalysisLogger.getLogger().debug("Extractor: Elapsed: Whole operation completed in " + ((double) (System.currentTimeMillis() - t0) / 1000d) + "s");
} catch (Exception e) {
e.printStackTrace();
AnalysisLogger.getLogger().debug("Extractor: ERROR!: " + e.getLocalizedMessage());
throw e;
} finally {
status = 100;
}
}
@Override
public StatisticalType getOutput() {
LinkedHashMap<String, StatisticalType> map = new LinkedHashMap<String, StatisticalType>();
List<TableTemplates> templateHspec = new ArrayList<TableTemplates>();
templateHspec.add(TableTemplates.TIMESERIES);
OutputTable p = new OutputTable(templateHspec, tableLabelValue, tableNameValue, "Output table");
map.put("OutputTable", p);
if (signal!=null && signal.length>0){
HashMap<String, Image> producedImages = new HashMap<String, Image>();
if (signalimage!=null)
producedImages.put("Time Series Visualization", signalimage);
if (spectrogramImage!=null)
producedImages.put("Spectrogram", spectrogramImage);
/*
try {
ImageIO.write(ImageTools.toBufferedImage(signalimage), "png", new File("signal.png"));
ImageIO.write(ImageTools.toBufferedImage(spectrogramImage), "png", new File("spectrogram.png"));
} catch (IOException e) {
e.printStackTrace();
}
*/
PrimitiveType images = new PrimitiveType("Images", producedImages, PrimitiveTypes.IMAGES, "Signal Processing", "Visualization of the signal and spectrogram");
for (String key:outputParameters.keySet()){
String value = outputParameters.get(key);
PrimitiveType val = new PrimitiveType(String.class.getName(), "" + value, PrimitiveTypes.STRING, key, key);
map.put(key, val);
}
map.put("Images", images);
}
else
map.put("Note", new PrimitiveType(String.class.getName(), "The signal contains only one point. The charts will not be displayed.", PrimitiveTypes.STRING,"Note","Note about the signal"));
// generate a primitive type for the collection
PrimitiveType outputm = new PrimitiveType(HashMap.class.getName(), map, PrimitiveTypes.MAP, "ResultsMap", "Results Map");
return outputm;
}
}

View File

@ -1,56 +0,0 @@
package org.gcube.dataanalysis.geo.algorithms;
import java.util.ArrayList;
import java.util.List;
import org.gcube.dataanalysis.ecoengine.datatypes.ColumnType;
import org.gcube.dataanalysis.ecoengine.datatypes.DatabaseType;
import org.gcube.dataanalysis.ecoengine.datatypes.InputTable;
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.TableTemplates;
import org.gcube.dataanalysis.ecoengine.utils.IOHelper;
import org.gcube.dataanalysis.geo.connectors.table.TableMatrixRepresentation;
public class TimeExtractionTable extends TimeExtraction{
@Override
public String getDescription() {
return "An algorithm to extract a time series of values associated to a table containing geospatial information. " +
"The algorithm analyses the time series and automatically searches for hidden periodicities. " +
"It produces one chart of the time series, one table containing the time series values and possibly the spectrogram.";
}
@Override
public List<StatisticalType> getInputParameters() {
List<StatisticalType> inputs = new ArrayList<StatisticalType>();
List<TableTemplates> template= new ArrayList<TableTemplates>();
template.add(TableTemplates.GENERIC);
InputTable table = new InputTable(template,TableMatrixRepresentation.tableNameParameter,"A geospatial table containing at least x,y information","");
inputs.add(table);
ColumnType columnx = new ColumnType(TableMatrixRepresentation.tableNameParameter, TableMatrixRepresentation.xDimensionColumnParameter, "The column containing x (longitude) information", "x", false);
inputs.add(columnx);
ColumnType columny = new ColumnType(TableMatrixRepresentation.tableNameParameter, TableMatrixRepresentation.yDimensionColumnParameter, "The column containing y (latitude) information", "y", false);
inputs.add(columny);
ColumnType columnt = new ColumnType(TableMatrixRepresentation.tableNameParameter, TableMatrixRepresentation.timeDimensionColumnParameter, "The column containing time information", "datetime", false);
inputs.add(columnt);
ColumnType columnvalue = new ColumnType(TableMatrixRepresentation.tableNameParameter, TableMatrixRepresentation.valueDimensionColumnParameter, "A column containing real valued features", "value", false);
inputs.add(columnvalue);
inputs.add(new PrimitiveType(String.class.getName(), null, PrimitiveTypes.STRING, TableMatrixRepresentation.filterParameter, "A filter on one of the columns (e.g. speed=2)", " "));
IOHelper.addStringInput(inputs, TableMatrixRepresentation.zDimensionColumnParameter, "The column containing z (altitude or depth) information (optional)", "z");
List<StatisticalType> previnputs = super.getInputParameters();
previnputs.remove(0);
inputs.addAll(previnputs);
DatabaseType.addDefaultDBPars(inputs);
return inputs;
}
}

View File

@ -1,193 +0,0 @@
package org.gcube.dataanalysis.geo.algorithms;
import java.awt.Image;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.datatypes.DatabaseType;
import org.gcube.dataanalysis.ecoengine.datatypes.OutputTable;
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.TableTemplates;
import org.gcube.dataanalysis.ecoengine.signals.SignalProcessing;
import org.gcube.dataanalysis.ecoengine.utils.IOHelper;
import org.gcube.dataanalysis.ecoengine.utils.Tuple;
import org.gcube.dataanalysis.geo.connectors.wfs.WFS;
import org.gcube.dataanalysis.geo.matrixmodel.RasterTable;
import org.gcube.dataanalysis.geo.matrixmodel.ZExtractor;
public class ZExtraction extends XYExtraction{
@Override
public String getDescription() {
return "An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). " +
"The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. " +
"It produces one chart of the Z values and one table containing the values.";
}
public static String x = "X";
public static String y = "Y";
public static String resolution = "Resolution";
public double xValue;
public double yValue;
public double resolutionValue;
public double signal[];
@Override
public List<StatisticalType> getInputParameters() {
List<StatisticalType> previnputs = super.getInputParameters();
inputs = new ArrayList<StatisticalType>();
//layername
inputs.add(previnputs.get(0));
inputs.add(previnputs.get(5));
inputs.add(previnputs.get(6));
IOHelper.addDoubleInput(inputs, x, "X coordinate", "0");
IOHelper.addDoubleInput(inputs, y, "Y coordinate", "0");
inputs.add(previnputs.get(8));
IOHelper.addDoubleInput(inputs, resolution, "Step for Z values", "100");
DatabaseType.addDefaultDBPars(inputs);
return inputs;
}
protected void getParameters() {
layerNameValue = IOHelper.getInputParameter(config, layerName);
AnalysisLogger.getLogger().debug("Extraction: Layer " + layerNameValue);
time = Integer.parseInt(IOHelper.getInputParameter(config, t));
xValue = Double.parseDouble(IOHelper.getInputParameter(config, x));
yValue = Double.parseDouble(IOHelper.getInputParameter(config, y));
resolutionValue=Double.parseDouble(IOHelper.getInputParameter(config, resolution));
AnalysisLogger.getLogger().debug("Extraction: T " + time);
AnalysisLogger.getLogger().debug("Extraction: X " + xValue);
AnalysisLogger.getLogger().debug("Extraction: Y " + yValue);
AnalysisLogger.getLogger().debug("Extraction: Res " + resolutionValue);
tableNameValue = IOHelper.getInputParameter(config, tableName);
tableLabelValue = IOHelper.getInputParameter(config, tableLabel);
AnalysisLogger.getLogger().debug("Extraction: tableName " + tableNameValue);
AnalysisLogger.getLogger().debug("Extraction: tableLabel " + tableLabelValue);
String scope = config.getGcubeScope();
AnalysisLogger.getLogger().debug("Extraction: Externally set scope " + scope);
if (scope == null) {
scope = ScopeProvider.instance.get();
config.setGcubeScope(scope);
}
}
Image signalimage;
Image spectrogramImage;
@Override
public void compute() throws Exception {
try {
status = 30;
AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
getParameters();
AnalysisLogger.getLogger().debug("Extracting Time Series from layer");
ZExtractor extractor = new ZExtractor(config);
extractor.correctZ(0, layerNameValue,resolutionValue);
long t0 = System.currentTimeMillis();
signal = extractor.extractZ(layerNameValue, xValue,yValue, time, resolutionValue);
AnalysisLogger.getLogger().debug("ELAPSED TIME: "+(System.currentTimeMillis()-t0));
AnalysisLogger.getLogger().debug("Signal: "+signal.length);
status = 30;
if (signal.length==1)
AnalysisLogger.getLogger().debug("Extractor: Signal is only one point!");
status = 70;
AnalysisLogger.getLogger().debug("Extractor: Matrix Extracted");
AnalysisLogger.getLogger().debug("Extractor: ****Rasterizing grid into table****");
double matrix[][] = new double[1][];
matrix[0] = signal;
HashMap<Double,Map<String, String>> polygonsFeatures = null;
if (extractor.currentconnector instanceof WFS)
polygonsFeatures = ((WFS) extractor.currentconnector).getPolygonsFeatures();
RasterTable raster = new RasterTable(xValue, xValue, yValue, yValue, zValue, time,resolutionValue, resolutionValue, matrix, polygonsFeatures,config);
int signalRate = 1;
double zline[] = new double[signal.length];
int j=0;
for (double z=extractor.zmin;z<=extractor.zmax;z=z+resolutionValue){
zline[j]=z;
j++;
}
List<Tuple<Double>> coordinates=new ArrayList<Tuple<Double>>();
for (int i=0;i<zline.length;i++)
coordinates.add(new Tuple<Double>(xValue,yValue,zline[i],(double)time));
raster.setTablename(tableNameValue);
raster.setCoordinates(coordinates);
raster.deleteTable();
raster.dumpGeoTable();
signalimage = SignalProcessing.renderSignalWithGenericTime(signal, zline, "Z");
AnalysisLogger.getLogger().debug("Extractor: Map was dumped in table: " + tableNameValue);
status = 80;
AnalysisLogger.getLogger().debug("Extractor: Elapsed: Whole operation completed in " + ((double) (System.currentTimeMillis() - t0) / 1000d) + "s");
} catch (Exception e) {
e.printStackTrace();
AnalysisLogger.getLogger().debug("Extractor: ERROR!: " + e.getLocalizedMessage());
throw e;
} finally {
status = 100;
}
}
@Override
public StatisticalType getOutput() {
LinkedHashMap<String, StatisticalType> map = new LinkedHashMap<String, StatisticalType>();
List<TableTemplates> templateHspec = new ArrayList<TableTemplates>();
templateHspec.add(TableTemplates.TIMESERIES);
OutputTable p = new OutputTable(templateHspec, tableLabelValue, tableNameValue, "Output table");
map.put("OutputTable", p);
if (signalimage!=null){
HashMap<String, Image> producedImages = new HashMap<String, Image>();
producedImages.put("Z Modulations Visualization", signalimage);
PrimitiveType images = new PrimitiveType("Images", producedImages, PrimitiveTypes.IMAGES, "Modulations of Z", "The modulations of Z");
map.put("Images", images);
}
// generate a primitive type for the collection
PrimitiveType outputm = new PrimitiveType(HashMap.class.getName(), map, PrimitiveTypes.MAP, "ResultsMap", "Results Map");
return outputm;
}
}

View File

@ -1,56 +0,0 @@
package org.gcube.dataanalysis.geo.algorithms;
import java.util.ArrayList;
import java.util.List;
import org.gcube.dataanalysis.ecoengine.datatypes.ColumnType;
import org.gcube.dataanalysis.ecoengine.datatypes.DatabaseType;
import org.gcube.dataanalysis.ecoengine.datatypes.InputTable;
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.TableTemplates;
import org.gcube.dataanalysis.ecoengine.utils.IOHelper;
import org.gcube.dataanalysis.geo.connectors.table.TableMatrixRepresentation;
public class ZExtractionTable extends ZExtraction{
@Override
public String getDescription() {
return "An algorithm to extract a time series of values associated to a table containing geospatial information. " +
"The algorithm analyses the time series and automatically searches for hidden periodicities. " +
"It produces one chart of the time series, one table containing the time series values and possibly the spectrogram.";
}
@Override
public List<StatisticalType> getInputParameters() {
List<StatisticalType> inputs = new ArrayList<StatisticalType>();
List<TableTemplates> template= new ArrayList<TableTemplates>();
template.add(TableTemplates.GENERIC);
InputTable table = new InputTable(template,TableMatrixRepresentation.tableNameParameter,"A geospatial table containing at least x,y information","");
inputs.add(table);
ColumnType columnx = new ColumnType(TableMatrixRepresentation.tableNameParameter, TableMatrixRepresentation.xDimensionColumnParameter, "The column containing x (longitude) information", "x", false);
inputs.add(columnx);
ColumnType columny = new ColumnType(TableMatrixRepresentation.tableNameParameter, TableMatrixRepresentation.yDimensionColumnParameter, "The column containing y (latitude) information", "y", false);
inputs.add(columny);
ColumnType columnt = new ColumnType(TableMatrixRepresentation.tableNameParameter, TableMatrixRepresentation.zDimensionColumnParameter, "The column containing z information", "z", false);
inputs.add(columnt);
ColumnType columnvalue = new ColumnType(TableMatrixRepresentation.tableNameParameter, TableMatrixRepresentation.valueDimensionColumnParameter, "A column containing real valued features", "value", false);
inputs.add(columnvalue);
inputs.add(new PrimitiveType(String.class.getName(), null, PrimitiveTypes.STRING, TableMatrixRepresentation.filterParameter, "A filter on one of the columns (e.g. speed=2)", " "));
IOHelper.addStringInput(inputs, TableMatrixRepresentation.timeDimensionColumnParameter, "The column containing time information (optional).", "time");
List<StatisticalType> previnputs = super.getInputParameters();
previnputs.remove(0);
inputs.addAll(previnputs);
DatabaseType.addDefaultDBPars(inputs);
return inputs;
}
}

View File

@ -60,8 +60,7 @@ public class EstimateFishingActivity extends GridCWP2Coordinates{
"adds bathymetry information to the table and classifies (point-by-point) fishing activity of the involved vessels according to two algorithms: " +
"one based on speed (activity_class_speed output column) and the other based on speed and bathymetry (activity_class_speed_bath output column). " +
"The algorithm produces new columns containing this information. " +
"This algorithm is based on the paper 'Deriving Fishing Monthly Effort and Caught Species' (Coro et al. 2013, in proc. of OCEANS - Bergen, 2013 MTS/IEEE). " +
"Example of input table (NAFO anonymised data): http://goo.gl/3auJkM";
"This algorithm is based on the paper 'Deriving Fishing Monthly Effort and Caught Species' (Coro et al. 2013, in proc. of OCEANS - Bergen, 2013 MTS/IEEE). ";
}
@Override

View File

@ -56,8 +56,7 @@ public class EstimateMonthlyFishingEffort extends GridCWP2Coordinates{
return "An algorithm that estimates fishing exploitation at 0.5 degrees resolution from activity-classified vessels trajectories. " +
"Produces a table with csquare codes, latitudes, longitudes and resolution and associated overall fishing hours in the time frame of the vessels activity. " +
"Requires each activity point to be classified as Fishing or other. " +
"This algorithm is based on the paper 'Deriving Fishing Monthly Effort and Caught Species' (Coro et al. 2013, in proc. of OCEANS - Bergen, 2013 MTS/IEEE). " +
"Example of input table (NAFO anonymised data): http://goo.gl/3auJkM";
"This algorithm is based on the paper 'Deriving Fishing Monthly Effort and Caught Species' (Coro et al. 2013, in proc. of OCEANS - Bergen, 2013 MTS/IEEE).";
}
@Override

View File

@ -1,91 +0,0 @@
package org.gcube.dataanalysis.geo.test;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileReader;
import java.io.FileWriter;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.signals.PeriodicityDetector;
import org.gcube.dataanalysis.ecoengine.signals.SignalProcessing;
import org.gcube.dataanalysis.geo.matrixmodel.MatrixExtractor;
import org.gcube.dataanalysis.geo.matrixmodel.TimeSeriesExtractor;
public class TestGeoTiff {
static String cfg = "./cfg/";
public static void main(String[] args) throws Exception{
String layertitle = "WorldClimBioWCS2";
long t0 = System.currentTimeMillis();
AnalysisLogger.setLogger(cfg+AlgorithmConfiguration.defaultLoggerFile);
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setGcubeScope("/d4science.research-infrastructures.eu/gCubeApps/BiodiversityLab");
config.setConfigPath(cfg);
TimeSeriesExtractor intersector = new TimeSeriesExtractor(config);
// intersector.takeTimeSlice(layertitle, 0, -180, 180, -10, 10, 0, 1, 1);
// intersector.takeTimeSlice(layertitle, 0, -10, 10, -10, 10, 0,1, 1);
// intersector.takeTimeInstantMatrix(layertitle, 0, -180, 180, -90, 90, 0, 0.5, 0.5);
double signal[] = intersector.extractT(layertitle, 0d, 0d,0d,0.5);
// SignalProcessing.displaySignalWithGenericTime(signal, 0, 1, "signal");
System.out.println("ELAPSED TIME: "+(System.currentTimeMillis()-t0));
System.out.println("Signal: "+signal.length);
BufferedWriter bw = new BufferedWriter(new FileWriter(new File("signal.txt")));
for (double si: signal){
bw.write(si+",");
}
bw.close();
PeriodicityDetector pd = new PeriodicityDetector();
double F = pd.detectFrequency(signal,true);
System.out.println("Detected Frequency:"+F+" indecision ["+pd.lowermeanF+" , "+pd.uppermeanF+"]");
System.out.println("Detected Period:"+pd.meanPeriod+" indecision ["+pd.lowermeanPeriod+" , "+pd.uppermeanPeriod+"]");
System.out.println("Detected Periodicity Strength:"+pd.periodicityStrength+" "+pd.getPeriodicityStregthInterpretation());
}
public static void main1(String[] args) throws Exception{
takeSignal();
}
public static void takeSignal() throws Exception{
BufferedReader br = new BufferedReader(new FileReader(new File("signal.txt")));
String line = br.readLine();
double[] signal = null;
while (line!=null){
String [] el = line.split(",");
signal=new double[el.length];
int i=0;
for (String e:el){
signal[i]=Double.parseDouble(e);
i++;
}
line = null;
}
br.close();
// SignalProcessing.displaySignalWithGenericTime(signal, 0, 1, "signal");
PeriodicityDetector pd = new PeriodicityDetector();
// signal = pd.produceNoisySignal(2000, 1, 0.1f, 0f);
//float freq=1;//signal.length;
// double F = pd.detectFrequency(signal, (int)freq, 0, freq, 1f,true);
double F = pd.detectFrequency(signal,true);
System.out.println("Detected Frequency:"+F+" indecision ["+pd.lowermeanF+" , "+pd.uppermeanF+"]");
System.out.println("Detected Period:"+pd.meanPeriod+" indecision ["+pd.lowermeanPeriod+" , "+pd.uppermeanPeriod+"]");
System.out.println("Detected Periodicity Strength:"+pd.periodicityStrength);
}
}

View File

@ -1,111 +0,0 @@
package org.gcube.dataanalysis.geo.test;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileReader;
import java.io.FileWriter;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.signals.PeriodicityDetector;
import org.gcube.dataanalysis.ecoengine.signals.SignalProcessing;
import org.gcube.dataanalysis.geo.matrixmodel.MatrixExtractor;
import org.gcube.dataanalysis.geo.matrixmodel.TimeSeriesExtractor;
public class TestSignal {
static String cfg = "./cfg/";
public static void main(String[] args) throws Exception{
// String layertitle = "Statistical Mean in [07-01-01 01:00] (3D) {World Ocean Atlas 09: Sea Water Temperature - annual: dods://thredds.research-infrastructures.eu/thredds/dodsC/public/netcdf/temperature_annual_1deg_ENVIRONMENT_OCEANS_.nc}";
// String layertitle = "Mass Concentration of Chlorophyll in Sea Water in [03-30-13 01:00] (3D) {Mercator Ocean BIOMER1V1R1: Data extracted from dataset http://atoll-mercator.vlandata.cls.fr:44080/thredds/dodsC/global-analysis-bio-001-008-a}";
// String layertitle = "Objectively Analyzed Climatology in [07-01-01 01:00] (3D) {World Ocean Atlas 09: Sea Water Temperature - annual: dods://thredds.research-infrastructures.eu/thredds/dodsC/public/netcdf/temperature_annual_1deg_ENVIRONMENT_OCEANS_.nc}";
//temperature anomaly: long signal
//String layertitle = "be24800d-7583-4efa-b925-e0d8760e0fd3";
//temperature short periodic signal
// String layertitle = "dffa504b-dbc8-4553-896e-002549f8f5d3";
// String layertitle = "afd54b39-30f7-403a-815c-4f91c6c74c26";
// String layertitle = "6411b110-7572-457a-a662-a16e4ff09e4e";
//wind stress
// String layertitle = "255b5a95-ad28-4fec-99e0-5d48112dd6ab";
//wind speed
// layertitle = "a116c9bc-9380-4d40-8374-aa0e376a6820";
//nitrates
// layertitle = "b1cd9549-d9d0-4c77-9532-b161a69fbd44";
//ASC
// String layertitle = "2c2304d1-681a-4f3a-8409-e8cdb5ed447f";
//WFS
// String layertitle = "0aac424b-5f5b-4fa6-97d6-4b4deee62b97";
//Chlorophyll
String layertitle = "c565e32c-c5b3-4964-b44f-06dc620563e9";
long t0 = System.currentTimeMillis();
AnalysisLogger.setLogger(cfg+AlgorithmConfiguration.defaultLoggerFile);
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setGcubeScope("/d4science.research-infrastructures.eu/gCubeApps/BiodiversityLab");
config.setConfigPath(cfg);
TimeSeriesExtractor intersector = new TimeSeriesExtractor(config);
// intersector.takeTimeSlice(layertitle, 0, -180, 180, -10, 10, 0, 1, 1);
// intersector.takeTimeSlice(layertitle, 0, -10, 10, -10, 10, 0,1, 1);
// intersector.takeTimeInstantMatrix(layertitle, 0, -180, 180, -90, 90, 0, 0.5, 0.5);
double signal[] = intersector.extractT(layertitle, 0d, 0d,0d,0.5);
// SignalProcessing.displaySignalWithGenericTime(signal, 0, 1, "signal");
System.out.println("ELAPSED TIME: "+(System.currentTimeMillis()-t0));
System.out.println("Signal: "+signal.length);
BufferedWriter bw = new BufferedWriter(new FileWriter(new File("signal.txt")));
for (double si: signal){
bw.write(si+",");
}
bw.close();
PeriodicityDetector pd = new PeriodicityDetector();
double F = pd.detectFrequency(signal,true);
System.out.println("Detected Frequency:"+F+" indecision ["+pd.lowermeanF+" , "+pd.uppermeanF+"]");
System.out.println("Detected Period:"+pd.meanPeriod+" indecision ["+pd.lowermeanPeriod+" , "+pd.uppermeanPeriod+"]");
System.out.println("Detected Periodicity Strength:"+pd.periodicityStrength+" "+pd.getPeriodicityStregthInterpretation());
}
public static void main1(String[] args) throws Exception{
takeSignal();
}
public static void takeSignal() throws Exception{
BufferedReader br = new BufferedReader(new FileReader(new File("signal.txt")));
String line = br.readLine();
double[] signal = null;
while (line!=null){
String [] el = line.split(",");
signal=new double[el.length];
int i=0;
for (String e:el){
signal[i]=Double.parseDouble(e);
i++;
}
line = null;
}
br.close();
// SignalProcessing.displaySignalWithGenericTime(signal, 0, 1, "signal");
PeriodicityDetector pd = new PeriodicityDetector();
// signal = pd.produceNoisySignal(2000, 1, 0.1f, 0f);
//float freq=1;//signal.length;
// double F = pd.detectFrequency(signal, (int)freq, 0, freq, 1f,true);
double F = pd.detectFrequency(signal,true);
System.out.println("Detected Frequency:"+F+" indecision ["+pd.lowermeanF+" , "+pd.uppermeanF+"]");
System.out.println("Detected Period:"+pd.meanPeriod+" indecision ["+pd.lowermeanPeriod+" , "+pd.uppermeanPeriod+"]");
System.out.println("Detected Periodicity Strength:"+pd.periodicityStrength);
}
}

View File

@ -1,95 +0,0 @@
package org.gcube.dataanalysis.geo.test;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileReader;
import java.io.FileWriter;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.signals.PeriodicityDetector;
import org.gcube.dataanalysis.geo.connectors.table.TableMatrixRepresentation;
import org.gcube.dataanalysis.geo.matrixmodel.TimeSeriesExtractor;
public class TestSignalTable {
static String cfg = "./cfg/";
public static void main(String[] args) throws Exception{
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setParam("DatabaseUserName", "utente");
config.setParam("DatabasePassword", "d4science");
config.setParam("DatabaseURL", "jdbc:postgresql://statistical-manager.d.d4science.research-infrastructures.eu/testdb");
config.setParam("DatabaseDriver", "org.postgresql.Driver");
// vessels
config.setParam(TableMatrixRepresentation.tableNameParameter, "generic_id037d302d_2ba0_4e43_b6e4_1a797bb91728");
// config.setParam(TableMatrixRepresentation.xDimensionColumnParameter, "x");
// config.setParam(TableMatrixRepresentation.yDimensionColumnParameter, "y");
// config.setParam(zDimensionColumnParameter,"");
config.setParam(TableMatrixRepresentation.timeDimensionColumnParameter, "datetime");
config.setParam(TableMatrixRepresentation.valueDimensionColumnParameter, "speed");
config.setParam(TableMatrixRepresentation.filterParameter, "speed<2");
AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
config.setGcubeScope("/d4science.research-infrastructures.eu/gCubeApps/BiodiversityLab");
config.setConfigPath(cfg);
TimeSeriesExtractor intersector = new TimeSeriesExtractor(config);
long t0 = System.currentTimeMillis();
// double signal[] = intersector.takeSignalInTime("table", -47.14, 44.52, 0d, 0.5);
double signal[] = intersector.extractT("table");
// SignalProcessing.displaySignalWithGenericTime(signal, 0, 1, "signal");
System.out.println("ELAPSED TIME: "+(System.currentTimeMillis()-t0));
System.out.println("Signal: "+signal.length);
PeriodicityDetector pd = new PeriodicityDetector();
double F = pd.detectFrequency(signal,true);
System.out.println("Detected Frequency:"+F+" indecision ["+pd.lowermeanF+" , "+pd.uppermeanF+"]");
System.out.println("Detected Period:"+pd.meanPeriod+" indecision ["+pd.lowermeanPeriod+" , "+pd.uppermeanPeriod+"]");
System.out.println("Detected Periodicity Strength:"+pd.periodicityStrength);
}
public static void main1(String[] args) throws Exception{
takeSignal();
}
public static void takeSignal() throws Exception{
BufferedReader br = new BufferedReader(new FileReader(new File("signal.txt")));
String line = br.readLine();
double[] signal = null;
while (line!=null){
String [] el = line.split(",");
signal=new double[el.length];
int i=0;
for (String e:el){
signal[i]=Double.parseDouble(e);
i++;
}
line = null;
}
br.close();
// SignalProcessing.displaySignalWithGenericTime(signal, 0, 1, "signal");
PeriodicityDetector pd = new PeriodicityDetector();
// signal = pd.produceNoisySignal(2000, 1, 0.1f, 0f);
//float freq=1;//signal.length;
// double F = pd.detectFrequency(signal, (int)freq, 0, freq, 1f,true);
double F = pd.detectFrequency(signal,true);
System.out.println("Detected Frequency:"+F+" indecision ["+pd.lowermeanF+" , "+pd.uppermeanF+"]");
System.out.println("Detected Period:"+pd.meanPeriod+" indecision ["+pd.lowermeanPeriod+" , "+pd.uppermeanPeriod+"]");
System.out.println("Detected Period Strength:"+pd.periodicityStrength);
}
}

View File

@ -1,36 +0,0 @@
package org.gcube.dataanalysis.geo.test;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.signals.SignalProcessing;
import org.gcube.dataanalysis.ecoengine.utils.Tuple;
import org.gcube.dataanalysis.geo.connectors.table.TableMatrixRepresentation;
import org.gcube.dataanalysis.geo.matrixmodel.ZExtractor;
public class TestWaterColumnGeoTiff {
public static void main(String[] args) throws Exception{
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
String layertitle = "WorldClimBioGeoTiffTest2";
// layertitle = "WorldClimBioWCS2";
AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
config.setGcubeScope("/d4science.research-infrastructures.eu/gCubeApps/BiodiversityLab");
ZExtractor extractor = new ZExtractor(config);
long t0 = System.currentTimeMillis();
double watercolumn[] = extractor.extractZ(layertitle, 18.620429d,20.836419d,0, 0);
System.out.println("ELAPSED TIME: "+(System.currentTimeMillis()-t0));
System.out.println("Signal: "+watercolumn.length);
System.out.println("Signal first element: "+watercolumn[0]);
SignalProcessing.displaySignalWithGenericTime(watercolumn, 0, 1, "signal");
}
}

View File

@ -1,37 +0,0 @@
package org.gcube.dataanalysis.geo.test;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.signals.SignalProcessing;
import org.gcube.dataanalysis.geo.connectors.table.TableMatrixRepresentation;
import org.gcube.dataanalysis.geo.matrixmodel.ZExtractor;
public class TestWaterColumnTemperature {
public static void main(String[] args) throws Exception{
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
String layertitle = "6411b110-7572-457a-a662-a16e4ff09e4e";
/*
layertitle = "be24800d-7583-4efa-b925-e0d8760e0fd3";
layertitle = "320652c8-e986-4428-9306-619d9014822a";
layertitle = "0aac424b-5f5b-4fa6-97d6-4b4deee62b97";
*/
AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
config.setGcubeScope("/d4science.research-infrastructures.eu/gCubeApps/BiodiversityLab");
ZExtractor extractor = new ZExtractor(config);
long t0 = System.currentTimeMillis();
double watercolumn[] = extractor.extractZ(layertitle, 0,0, 0, 100);
System.out.println("ELAPSED TIME: "+(System.currentTimeMillis()-t0));
System.out.println("Signal: "+watercolumn.length);
SignalProcessing.displaySignalWithGenericTime(watercolumn, 0, 1, "signal");
}
}