Geospatial analysis now produces TimeSeries tables
git-svn-id: https://svn.d4science.research-infrastructures.eu/gcube/trunk/data-analysis/EcologicalEngineGeoSpatialExtension@99087 82a268e6-3cf1-43bd-a215-b396298e98cf
This commit is contained in:
parent
eba1ae56ec
commit
b8cbe95376
|
@ -8,7 +8,6 @@ import java.util.List;
|
|||
import java.util.Map;
|
||||
|
||||
import org.gcube.common.scope.api.ScopeProvider;
|
||||
import org.gcube.contentmanagement.graphtools.utils.MathFunctions;
|
||||
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.DatabaseType;
|
||||
|
@ -23,7 +22,6 @@ import org.gcube.dataanalysis.ecoengine.signals.SignalProcessing;
|
|||
import org.gcube.dataanalysis.ecoengine.utils.IOHelper;
|
||||
import org.gcube.dataanalysis.ecoengine.utils.Tuple;
|
||||
import org.gcube.dataanalysis.geo.connectors.wfs.WFS;
|
||||
import org.gcube.dataanalysis.geo.interfaces.GISDataConnector;
|
||||
import org.gcube.dataanalysis.geo.matrixmodel.RasterTable;
|
||||
import org.gcube.dataanalysis.geo.matrixmodel.TimeSeriesExtractor;
|
||||
|
||||
|
@ -36,6 +34,7 @@ public class TimeExtraction extends XYExtraction{
|
|||
public static String minFrequency = "MinFrequency";
|
||||
public static String maxFrequency = "MaxFrequency";
|
||||
public static String expectedFrequencyError = "FrequencyError";
|
||||
public static String FFTSamplesParam = "FFTSamples";
|
||||
|
||||
public double xValue;
|
||||
public double yValue;
|
||||
|
@ -44,6 +43,7 @@ public class TimeExtraction extends XYExtraction{
|
|||
public double minFrequencyValue;
|
||||
public double maxFrequencyValue;
|
||||
public double expectedFrequencyErrorValue;
|
||||
public int FFTSamples;
|
||||
public PeriodicityDetector pd;
|
||||
public double signal[];
|
||||
public double timeline[];
|
||||
|
@ -66,9 +66,10 @@ public class TimeExtraction extends XYExtraction{
|
|||
IOHelper.addDoubleInput(inputs, resolution, "Extraction point resolution", "0.5");
|
||||
|
||||
IOHelper.addIntegerInput(inputs, samplingFrequency, "Sampling frequency in Hz. Leave it to -1 if unknown or under 1", "-1");
|
||||
IOHelper.addDoubleInput(inputs, minFrequency, "Minimum expected frequency in Hz. Can be decimal", "-1");
|
||||
IOHelper.addDoubleInput(inputs, maxFrequency, "Maximum expected frequency in Hz. Can be decimal", "-1");
|
||||
IOHelper.addDoubleInput(inputs, expectedFrequencyError, "Expected precision on periodicity detection in Hz or 1/samples. Can be decimal and depends on the signal length. Default is 0.1", "0.1");
|
||||
// IOHelper.addDoubleInput(inputs, minFrequency, "Minimum expected frequency in Hz. Can be decimal", "-1");
|
||||
// IOHelper.addDoubleInput(inputs, maxFrequency, "Maximum expected frequency in Hz. Can be decimal", "-1");
|
||||
//IOHelper.addDoubleInput(inputs, expectedFrequencyError, "Expected precision on periodicity detection in Hz or 1/samples. Can be decimal and depends on the signal length. Default is 0.1", "0.1");
|
||||
// IOHelper.addIntegerInput(inputs, FFTSamplesParam, "Number of samples to use in the Fourier Analysis. All samples will be used at maximum.", "100");
|
||||
|
||||
DatabaseType.addDefaultDBPars(inputs);
|
||||
|
||||
|
@ -84,9 +85,9 @@ public class TimeExtraction extends XYExtraction{
|
|||
yValue = Double.parseDouble(IOHelper.getInputParameter(config, y));
|
||||
resolutionValue=Double.parseDouble(IOHelper.getInputParameter(config, resolution));
|
||||
samplingFrequencyValue=Integer.parseInt(IOHelper.getInputParameter(config, samplingFrequency));
|
||||
minFrequencyValue=Double.parseDouble(IOHelper.getInputParameter(config, minFrequency));
|
||||
maxFrequencyValue=Double.parseDouble(IOHelper.getInputParameter(config, maxFrequency));
|
||||
expectedFrequencyErrorValue=Double.parseDouble(IOHelper.getInputParameter(config, expectedFrequencyError));
|
||||
// minFrequencyValue=Double.parseDouble(IOHelper.getInputParameter(config, minFrequency));
|
||||
// maxFrequencyValue=Double.parseDouble(IOHelper.getInputParameter(config, maxFrequency));
|
||||
expectedFrequencyErrorValue=-1;
|
||||
|
||||
AnalysisLogger.getLogger().debug("Extraction: Z " + zValue);
|
||||
AnalysisLogger.getLogger().debug("Extraction: X " + xValue);
|
||||
|
@ -136,50 +137,10 @@ public class TimeExtraction extends XYExtraction{
|
|||
AnalysisLogger.getLogger().debug("ELAPSED TIME: "+(System.currentTimeMillis()-t0));
|
||||
AnalysisLogger.getLogger().debug("Signal: "+signal.length);
|
||||
status = 30;
|
||||
if (signal.length>1){
|
||||
AnalysisLogger.getLogger().debug("Detecting Periodicity..");
|
||||
this.pd = new PeriodicityDetector();
|
||||
double F = -1;
|
||||
if (samplingFrequencyValue>0&&minFrequencyValue>0&&maxFrequencyValue>0&&expectedFrequencyErrorValue>0)
|
||||
F = pd.detectFrequency(signal,samplingFrequencyValue,(float)minFrequencyValue,(float)maxFrequencyValue,(float)expectedFrequencyErrorValue,-1,false);
|
||||
else
|
||||
F = pd.detectFrequency(signal,false);
|
||||
|
||||
AnalysisLogger.getLogger().debug("Detected Frequency:"+F+" indecision ["+pd.lowermeanF+" , "+pd.uppermeanF+"]");
|
||||
AnalysisLogger.getLogger().debug("Detected Period:"+pd.meanPeriod+" indecision ["+pd.lowermeanPeriod+" , "+pd.uppermeanPeriod+"]");
|
||||
AnalysisLogger.getLogger().debug("Detected Periodicity Strength:"+pd.periodicityStrength);
|
||||
AnalysisLogger.getLogger().debug("Extractor: MatrixExtractor initialized");
|
||||
|
||||
String uom = "samples";
|
||||
if (samplingFrequencyValue>0)
|
||||
uom = "s";
|
||||
String frequom = "1/samples";
|
||||
if (samplingFrequencyValue>0)
|
||||
frequom = "Hz";
|
||||
|
||||
if (pd.periodicityStrength>0){
|
||||
outputParameters.put("Detected Periodicity", ""+MathFunctions.roundDecimal(pd.meanPeriod,2)+" ("+uom+") "+" indecision ["+MathFunctions.roundDecimal(pd.lowermeanPeriod,2)+" , "+MathFunctions.roundDecimal(pd.uppermeanPeriod,2)+"]");
|
||||
outputParameters.put("Periodicity Strength", ""+MathFunctions.roundDecimal(pd.periodicityStrength,2)+" ("+pd.getPeriodicityStregthInterpretation()+")");
|
||||
outputParameters.put("Detected Frequency",""+MathFunctions.roundDecimal(F,2)+" ("+frequom+") "+" indecision ["+MathFunctions.roundDecimal(pd.lowermeanF,2)+" , "+MathFunctions.roundDecimal(pd.uppermeanF,2)+"]");
|
||||
}
|
||||
else
|
||||
{
|
||||
outputParameters.put("Detected Periodicity", "No Periodicities");
|
||||
outputParameters.put("Periodicity Strength", "-"+MathFunctions.roundDecimal(pd.periodicityStrength,2)+" ("+pd.getPeriodicityStregthInterpretation()+")");
|
||||
outputParameters.put("Detected Frequency","-");
|
||||
}
|
||||
outputParameters.put("Maximum Frequency in the Spectrogram", ""+MathFunctions.roundDecimal(pd.maxFrequency,2) +" ("+frequom+") ");
|
||||
outputParameters.put("Minimum Frequency in the Spectrogram", ""+MathFunctions.roundDecimal(pd.minFrequency,2) + " ("+frequom+") ");
|
||||
|
||||
|
||||
}
|
||||
else
|
||||
AnalysisLogger.getLogger().debug("Extractor: Signal is only one point!");
|
||||
|
||||
status = 70;
|
||||
|
||||
AnalysisLogger.getLogger().debug("Extractor: Matrix Extracted");
|
||||
AnalysisLogger.getLogger().debug("Extractor: ****Rasterizing grid into table****");
|
||||
|
||||
double matrix[][] = new double[1][];
|
||||
matrix[0] = signal;
|
||||
HashMap<Double,Map<String, String>> polygonsFeatures = null;
|
||||
|
@ -203,12 +164,8 @@ public class TimeExtraction extends XYExtraction{
|
|||
raster.deleteTable();
|
||||
raster.dumpGeoTable();
|
||||
|
||||
|
||||
// spectrogramImage = SignalProcessing.renderSignalSpectrogram(signal, timeline, pd.currentSamplingRate, pd.currentWindowAnalysisSamples, pd.currentWindowShiftSamples);
|
||||
if (pd!=null && pd.currentspectrum!=null && pd.currentspectrum.length>0){
|
||||
signalimage = SignalProcessing.renderSignalWithGenericTime(signal, timeline, "Signal");
|
||||
spectrogramImage = SignalProcessing.renderSignalSpectrogram2(pd.currentspectrum);
|
||||
}
|
||||
signalimage = SignalProcessing.renderSignalWithGenericTime(signal, timeline, "Signal");
|
||||
|
||||
AnalysisLogger.getLogger().debug("Extractor: Map was dumped in table: " + tableNameValue);
|
||||
status = 80;
|
||||
AnalysisLogger.getLogger().debug("Extractor: Elapsed: Whole operation completed in " + ((double) (System.currentTimeMillis() - t0) / 1000d) + "s");
|
||||
|
@ -229,10 +186,10 @@ public class TimeExtraction extends XYExtraction{
|
|||
LinkedHashMap<String, StatisticalType> map = new LinkedHashMap<String, StatisticalType>();
|
||||
|
||||
List<TableTemplates> templateHspec = new ArrayList<TableTemplates>();
|
||||
templateHspec.add(TableTemplates.GENERIC);
|
||||
templateHspec.add(TableTemplates.TIMESERIES);
|
||||
OutputTable p = new OutputTable(templateHspec, tableLabelValue, tableNameValue, "Output table");
|
||||
map.put("OutputTable", p);
|
||||
if (pd!=null && signal!=null && signal.length>0){
|
||||
if (signal!=null && signal.length>0){
|
||||
HashMap<String, Image> producedImages = new HashMap<String, Image>();
|
||||
if (signalimage!=null)
|
||||
producedImages.put("Time Series Visualization", signalimage);
|
||||
|
|
|
@ -193,7 +193,7 @@ public class XYExtraction implements Transducerer {
|
|||
@Override
|
||||
public StatisticalType getOutput() {
|
||||
List<TableTemplates> templateHspec = new ArrayList<TableTemplates>();
|
||||
templateHspec.add(TableTemplates.GENERIC);
|
||||
templateHspec.add(TableTemplates.TIMESERIES);
|
||||
OutputTable p = new OutputTable(templateHspec, tableLabelValue, tableNameValue, "Output table");
|
||||
LinkedHashMap<String, StatisticalType> map = new LinkedHashMap<String, StatisticalType>();
|
||||
|
||||
|
|
|
@ -169,7 +169,7 @@ public class ZExtraction extends XYExtraction{
|
|||
LinkedHashMap<String, StatisticalType> map = new LinkedHashMap<String, StatisticalType>();
|
||||
|
||||
List<TableTemplates> templateHspec = new ArrayList<TableTemplates>();
|
||||
templateHspec.add(TableTemplates.GENERIC);
|
||||
templateHspec.add(TableTemplates.TIMESERIES);
|
||||
|
||||
OutputTable p = new OutputTable(templateHspec, tableLabelValue, tableNameValue, "Output table");
|
||||
map.put("OutputTable", p);
|
||||
|
|
|
@ -26,6 +26,7 @@ public class GeothermalDataMetadataInsertDev {
|
|||
//Temperature map at 3 km depth
|
||||
|
||||
public static void main(String[] args) throws Exception{
|
||||
/*
|
||||
TemperatureIsolineat3kmdepth();
|
||||
SurfaceHeatFlowMapofItaly();
|
||||
TemperatureIsolineat2kmdepth();
|
||||
|
@ -34,6 +35,12 @@ public class GeothermalDataMetadataInsertDev {
|
|||
SurfaceHeatFlowContourMapofItaly();
|
||||
Temperaturemapat2kmdepth();
|
||||
Temperaturemapat3kmdepth();
|
||||
Energy();
|
||||
Licenses();
|
||||
|
||||
*/
|
||||
Industry();
|
||||
TrainingCenter();
|
||||
}
|
||||
|
||||
private static void TemperatureIsolineat3kmdepth() throws Exception{
|
||||
|
@ -260,4 +267,132 @@ public class GeothermalDataMetadataInsertDev {
|
|||
metadataInserter.customMetaDataInsert(urls,protocols);
|
||||
}
|
||||
|
||||
private static void Energy() throws Exception{
|
||||
GenericLayerMetadata metadataInserter = new GenericLayerMetadata();
|
||||
metadataInserter.setGeonetworkUrl(geonetworkurl);
|
||||
metadataInserter.setGeonetworkUser(user);
|
||||
metadataInserter.setGeonetworkPwd(password);
|
||||
metadataInserter.setResolution(0);
|
||||
metadataInserter.setXLeftLow(-180);
|
||||
metadataInserter.setYLeftLow(-90);
|
||||
metadataInserter.setXRightUpper(180);
|
||||
metadataInserter.setYRightUpper(90);
|
||||
|
||||
metadataInserter.setTitle("GeothermalManagementArea_ERANET");
|
||||
metadataInserter.setAbstractField("GeothermalManagementArea_ERANET");
|
||||
metadataInserter.setCustomTopics("geothermal energy","map","Italy","Energy resources","EGIP","D4Science");
|
||||
metadataInserter.setCategoryTypes("_"+TopicCategory.GEOSCIENTIFIC_INFORMATION.name()+"_"+"_"+TopicCategory.UTILITIES_COMMUNICATION.name()+"_");
|
||||
metadataInserter.setResolution(0);
|
||||
|
||||
metadataInserter.setLayerName("IGG:GeothermalManagementArea_ERANET");
|
||||
|
||||
/*
|
||||
String [] urls = {
|
||||
"http://geoserver-dev.d4science-ii.research-infrastructures.eu/geoserver/timeseriesws/wms?service=WMS&version=1.1.0&request=GetMap&layers=timeseriesws:GeothermalManagementArea2&styles=&bbox=-24.5465240478516,35.8154258728027,44.8349914550781,66.5346374511719&width=745&height=330&srs=EPSG:4326",
|
||||
"http://geoserver-dev.d4science-ii.research-infrastructures.eu/geoserver/timeseriesws/wfs?service=wfs&version=1.1.0&REQUEST=GetFeature&TYPENAME=timeseriesws:GeothermalManagementArea2&srsName=urn:x-ogc:def:crs:EPSG:4326"
|
||||
};
|
||||
*/
|
||||
|
||||
|
||||
String [] urls = {
|
||||
"http://repoigg.services.iit.cnr.it/geoserver/IGG/wms?service=WMS&version=1.1.0&request=GetMap&layers=IGG:GeothermalManagementArea_ERANET&styles=&bbox=-24.546524000000005,35.49220699999999,44.83498800000001,66.563774&width=736&height=330&srs=EPSG:4326&format=application/openlayers",
|
||||
"http://repoigg.services.iit.cnr.it/geoserver/IGG/ows?service=WFS&version=1.1.0&request=GetFeature&TYPENAME=IGG:GeothermalManagementArea_ERANET&srsName=urn:x-ogc:def:crs:EPSG:4326"
|
||||
};
|
||||
|
||||
String [] protocols = {"WMS","WFS"};
|
||||
|
||||
metadataInserter.customMetaDataInsert(urls,protocols);
|
||||
}
|
||||
|
||||
private static void Licenses() throws Exception{
|
||||
GenericLayerMetadata metadataInserter = new GenericLayerMetadata();
|
||||
metadataInserter.setGeonetworkUrl(geonetworkurl);
|
||||
metadataInserter.setGeonetworkUser(user);
|
||||
metadataInserter.setGeonetworkPwd(password);
|
||||
metadataInserter.setResolution(0);
|
||||
metadataInserter.setXLeftLow(-180);
|
||||
metadataInserter.setYLeftLow(-90);
|
||||
metadataInserter.setXRightUpper(180);
|
||||
metadataInserter.setYRightUpper(90);
|
||||
|
||||
metadataInserter.setTitle("Licences");
|
||||
metadataInserter.setAbstractField("Exploration and production licenses and (projected) power production");
|
||||
metadataInserter.setCustomTopics("geothermal energy","map","Italy","Licenses","EGIP","D4Science");
|
||||
metadataInserter.setCategoryTypes("_"+TopicCategory.GEOSCIENTIFIC_INFORMATION.name()+"_"+"_"+TopicCategory.UTILITIES_COMMUNICATION.name()+"_");
|
||||
metadataInserter.setResolution(0);
|
||||
|
||||
metadataInserter.setLayerName("IGG:licence");
|
||||
|
||||
|
||||
String [] urls = {
|
||||
"http://repoigg.services.iit.cnr.it/geoserver/IGG/wms?service=WMS&version=1.1.1&request=GetMap&layers=IGG:licence&styles=&bbox=8.519806711445952,36.75219999995809,15.243165,45.612201456761284&width=388&height=512&srs=EPSG:4326&format=application/openlayers",
|
||||
"http://repoigg.services.iit.cnr.it/geoserver/IGG/ows?service=WFS&version=1.1.0&request=GetFeature&srsName=urn:x-ogc:def:crs:EPSG:4326&typeName=IGG:licence"
|
||||
};
|
||||
|
||||
String [] protocols = {"WMS","WFS"};
|
||||
|
||||
metadataInserter.customMetaDataInsert(urls,protocols);
|
||||
}
|
||||
|
||||
|
||||
private static void Industry() throws Exception{
|
||||
GenericLayerMetadata metadataInserter = new GenericLayerMetadata();
|
||||
metadataInserter.setGeonetworkUrl(geonetworkurl);
|
||||
metadataInserter.setGeonetworkUser(user);
|
||||
metadataInserter.setGeonetworkPwd(password);
|
||||
metadataInserter.setResolution(0);
|
||||
metadataInserter.setXLeftLow(-180);
|
||||
metadataInserter.setYLeftLow(-90);
|
||||
metadataInserter.setXRightUpper(180);
|
||||
metadataInserter.setYRightUpper(90);
|
||||
|
||||
metadataInserter.setTitle("Industry");
|
||||
metadataInserter.setAbstractField("Industries involved in geothermal activities refer to all companies that produce components both for power production and the direct use of heat");
|
||||
metadataInserter.setCustomTopics("geothermal energy","map","Italy","Industry","EGIP","D4Science");
|
||||
metadataInserter.setCategoryTypes("_"+TopicCategory.GEOSCIENTIFIC_INFORMATION.name()+"_"+"_"+TopicCategory.UTILITIES_COMMUNICATION.name()+"_");
|
||||
metadataInserter.setResolution(0);
|
||||
|
||||
metadataInserter.setLayerName("IGG:Industry");
|
||||
|
||||
|
||||
String [] urls = {
|
||||
"http://repoigg.services.iit.cnr.it/geoserver/IGG/wms?service=WMS&version=1.1.1&request=GetMap&layers=IGG:Industry&styles=&bbox=9.189578001171471,41.909917999980756,12.480876999984194,45.52478199898418&width=466&height=512&srs=EPSG:4326&format=application/openlayers",
|
||||
"http://repoigg.services.iit.cnr.it/geoserver/IGG/ows?service=WFS&version=1.1.0&request=GetFeature&srsName=urn:x-ogc:def:crs:EPSG:4326&typeName=IGG:Industry"
|
||||
};
|
||||
|
||||
String [] protocols = {"WMS","WFS"};
|
||||
|
||||
metadataInserter.customMetaDataInsert(urls,protocols);
|
||||
}
|
||||
|
||||
private static void TrainingCenter() throws Exception{
|
||||
GenericLayerMetadata metadataInserter = new GenericLayerMetadata();
|
||||
metadataInserter.setGeonetworkUrl(geonetworkurl);
|
||||
metadataInserter.setGeonetworkUser(user);
|
||||
metadataInserter.setGeonetworkPwd(password);
|
||||
metadataInserter.setResolution(0);
|
||||
metadataInserter.setXLeftLow(-180);
|
||||
metadataInserter.setYLeftLow(-90);
|
||||
metadataInserter.setXRightUpper(180);
|
||||
metadataInserter.setYRightUpper(90);
|
||||
|
||||
metadataInserter.setTitle("TrainingCenter");
|
||||
metadataInserter.setAbstractField("List of education and research centres with geothermal courses and lectures: The list is in a table format and includes the type, the name, the location and the URL");
|
||||
metadataInserter.setCustomTopics("geothermal energy","map","Italy","TrainingCenter","EGIP","D4Science");
|
||||
metadataInserter.setCategoryTypes("_"+TopicCategory.GEOSCIENTIFIC_INFORMATION.name()+"_"+"_"+TopicCategory.UTILITIES_COMMUNICATION.name()+"_");
|
||||
metadataInserter.setResolution(0);
|
||||
|
||||
metadataInserter.setLayerName("IGG:TrainingCenter");
|
||||
|
||||
|
||||
String [] urls = {
|
||||
"http://repoigg.services.iit.cnr.it/geoserver/IGG/wms?service=WMS&version=1.1.1&request=GetMap&layers=IGG:TrainingCenter&styles=&bbox=7.673140015606858,37.50289999999999,16.861828000003374,45.635315999999726&width=512&height=453&srs=EPSG:4326&format=application/openlayers",
|
||||
"http://repoigg.services.iit.cnr.it/geoserver/IGG/ows?service=WFS&version=1.1.0&request=GetFeature&srsName=urn:x-ogc:def:crs:EPSG:4326&typeName=IGG:TrainingCenter"
|
||||
};
|
||||
|
||||
String [] protocols = {"WMS","WFS"};
|
||||
|
||||
metadataInserter.customMetaDataInsert(urls,protocols);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -38,11 +38,11 @@ public class RasterTable {
|
|||
private AlgorithmConfiguration configuration;
|
||||
private String tablename = "rstr" + ("" + UUID.randomUUID()).replace("-", "");
|
||||
// static String createTableStatement = "CREATE TABLE %1$s (id serial, csquarecode character varying, x real, y real, z real, t real, fvalue real)";
|
||||
static String createTableStatementStandard = "CREATE TABLE %1$s (id serial, csquarecode character varying, x real, y real, z real, t real, fvalue character varying)";
|
||||
static String createTableStatementWithFields = "CREATE TABLE %1$s (id serial, csquarecode character varying, approx_x real, approx_y real, z real, t real, %2$s)";
|
||||
static String createTableStatementStandard = "CREATE TABLE %1$s (id serial, csquarecode character varying, x real, y real, z real, time real, fvalue character varying)";
|
||||
static String createTableStatementWithFields = "CREATE TABLE %1$s (id serial, csquarecode character varying, approx_x real, approx_y real, z real, time real, %2$s)";
|
||||
|
||||
static String columnsnamesStandard = "csquarecode, x , y , z , t, fvalue";
|
||||
static String columnsnamesWithFields = "csquarecode, approx_x , approx_y , z , t , %1$s";
|
||||
static String columnsnamesStandard = "csquarecode, x , y , z , time, fvalue";
|
||||
static String columnsnamesWithFields = "csquarecode, approx_x , approx_y , z , time , %1$s";
|
||||
|
||||
public static String csquareColumn = "csquarecode";
|
||||
public static String valuesColumn = "fvalue";
|
||||
|
|
|
@ -8,6 +8,7 @@ import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
|
|||
import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent;
|
||||
import org.gcube.dataanalysis.ecoengine.processing.factories.TransducerersFactory;
|
||||
import org.gcube.dataanalysis.ecoengine.test.regression.Regressor;
|
||||
import org.gcube.dataanalysis.ecoengine.transducers.TimeSeriesAnalysis;
|
||||
import org.gcube.dataanalysis.ecoengine.utils.IOHelper;
|
||||
import org.gcube.dataanalysis.geo.connectors.table.TableMatrixRepresentation;
|
||||
|
||||
|
@ -15,12 +16,13 @@ public class TestTimeExtraction {
|
|||
|
||||
// static AlgorithmConfiguration[] configs = { testTimeExtractionTable(), testTimeExtractionNetCDF(),testTimeExtractionTable2()};
|
||||
// static AlgorithmConfiguration[] configs = { testTimeExtractionNetCDF()};
|
||||
static AlgorithmConfiguration[] configs = { testTExtractionAquamaps()};
|
||||
// static AlgorithmConfiguration[] configs = { testTExtractionAquamaps()};
|
||||
static AlgorithmConfiguration[] configs = { testTExtractionTemperature()};
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
|
||||
System.out.println("TEST 1");
|
||||
|
||||
TimeSeriesAnalysis.display=true;
|
||||
for (int i = 0; i < configs.length; i++) {
|
||||
AnalysisLogger.getLogger().debug("Executing: "+configs[i].getAgent());
|
||||
List<ComputationalAgent> trans = null;
|
||||
|
@ -34,6 +36,38 @@ public class TestTimeExtraction {
|
|||
}
|
||||
|
||||
|
||||
|
||||
|
||||
private static AlgorithmConfiguration testTExtractionTemperature() {
|
||||
|
||||
AlgorithmConfiguration config = new AlgorithmConfiguration();
|
||||
|
||||
config.setAgent("TIMEEXTRACTION");
|
||||
config.setConfigPath("./cfg/");
|
||||
config.setPersistencePath("./");
|
||||
config.setParam("DatabaseUserName","gcube");
|
||||
config.setParam("DatabasePassword","d4science2");
|
||||
config.setParam("DatabaseURL","jdbc:postgresql://localhost/testdb");
|
||||
config.setParam("DatabaseDriver","org.postgresql.Driver");
|
||||
config.setGcubeScope("/gcube/devsec/devVRE");
|
||||
|
||||
config.setParam("Layer","b875403f-1e63-44e1-a1c0-2296d3e147a6");
|
||||
|
||||
config.setParam("OutputTableName","testtextractiontemp2");
|
||||
config.setParam("OutputTableLabel","testtextractiontemp2");
|
||||
|
||||
config.setParam("X","0");
|
||||
config.setParam("Y","0");
|
||||
config.setParam("Resolution","0.5");
|
||||
config.setParam("Z","0");
|
||||
config.setParam("SamplingFreq","-1");
|
||||
// config.setParam("MinFrequency","-1");
|
||||
// config.setParam("MaxFrequency","-1");
|
||||
// config.setParam("FFTSamples","100");
|
||||
|
||||
return config;
|
||||
}
|
||||
|
||||
private static AlgorithmConfiguration testTExtractionAquamaps() {
|
||||
|
||||
AlgorithmConfiguration config = new AlgorithmConfiguration();
|
||||
|
|
|
@ -0,0 +1,49 @@
|
|||
package org.gcube.dataanalysis.geo.test.maps;
|
||||
|
||||
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
||||
import org.gcube.dataanalysis.geo.algorithms.PolygonMapsCreator;
|
||||
|
||||
public class TestMapCreation {
|
||||
|
||||
|
||||
static String cfg = "./cfg/";
|
||||
public static void main(String[] args) throws Exception{
|
||||
String layertitle2 = "4e5c1bbf-f5ce-4b66-a67c-14d7d9920aa0";
|
||||
String layertitle = "38b2eb74-1c07-4569-8a81-36ac2f973146";
|
||||
|
||||
AnalysisLogger.setLogger(cfg+AlgorithmConfiguration.defaultLoggerFile);
|
||||
AlgorithmConfiguration config = new AlgorithmConfiguration();
|
||||
config.setConfigPath(cfg);
|
||||
config.setGcubeScope("/gcube/devsec/statVRE");
|
||||
config.setPersistencePath("./");
|
||||
|
||||
config.setParam("MapName","Test Polygonal Map");
|
||||
config.setParam("InputTable","occurrence_species_id0045886b_2a7c_4ede_afc4_3157c694b893");
|
||||
config.setParam("xDimension","decimallongitude");
|
||||
config.setParam("yDimension","decimallatitude");
|
||||
config.setParam("Info","recordedby") ;
|
||||
config.setParam("Resolution","0.5");
|
||||
|
||||
config.setParam("DatabaseUserName","utente");
|
||||
config.setParam("DatabasePassword","d4science");
|
||||
config.setParam("DatabaseURL","jdbc:postgresql://statistical-manager.d.d4science.org/testdb");
|
||||
config.setParam("DatabaseDriver","org.postgresql.Driver");
|
||||
config.setParam("Layer_1",layertitle);
|
||||
config.setParam("Layer_2",layertitle2);
|
||||
config.setParam("ValuesComparisonThreshold","0.1");
|
||||
config.setParam("Z","0");
|
||||
|
||||
config.setParam("user", "postgres");
|
||||
config.setParam("password", "d4science2");
|
||||
config.setParam("STOREURL","jdbc:postgresql://geoserver-test.d4science-ii.research-infrastructures.eu/timeseriesgisdb");
|
||||
config.setParam("driver", "org.postgresql.Driver");
|
||||
config.setParam("dialect", "org.hibernatespatial.postgis.PostgisDialect");
|
||||
|
||||
PolygonMapsCreator mc = new PolygonMapsCreator();
|
||||
mc.setConfiguration(config);
|
||||
mc.init();
|
||||
mc.compute();
|
||||
|
||||
}
|
||||
}
|
Loading…
Reference in New Issue