git-svn-id: https://svn.d4science.research-infrastructures.eu/gcube/trunk/data-analysis/EcologicalEngine@58808 82a268e6-3cf1-43bd-a215-b396298e98cf
This commit is contained in:
parent
e6fe58bb2a
commit
ab15cf8e6d
|
@ -0,0 +1,158 @@
|
||||||
|
package org.gcube.dataanalysis.ecoengine.test.checks;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
||||||
|
import org.gcube.dataanalysis.ecoengine.evaluation.bioclimate.InterpolateTables.INTERPOLATIONFUNCTIONS;
|
||||||
|
import org.gcube.dataanalysis.ecoengine.interfaces.Transducerer;
|
||||||
|
import org.gcube.dataanalysis.ecoengine.processing.factories.TransducerersFactory;
|
||||||
|
import org.gcube.dataanalysis.ecoengine.test.regression.Regressor;
|
||||||
|
|
||||||
|
public class TestTransducers {
|
||||||
|
|
||||||
|
public static void main(String[] args) throws Exception {
|
||||||
|
|
||||||
|
System.out.println("TEST 1");
|
||||||
|
List<Transducerer> trans = null;
|
||||||
|
/*
|
||||||
|
trans = TransducerersFactory.getTransducerers(testConfigLocal());
|
||||||
|
trans.get(0).init();
|
||||||
|
Regressor.process(trans.get(0));
|
||||||
|
trans = null;
|
||||||
|
|
||||||
|
trans = TransducerersFactory.getTransducerers(testConfigLocal2());
|
||||||
|
trans.get(0).init();
|
||||||
|
Regressor.process(trans.get(0));
|
||||||
|
trans = null;
|
||||||
|
|
||||||
|
trans = TransducerersFactory.getTransducerers(testConfigLocal3());
|
||||||
|
trans.get(0).init();
|
||||||
|
Regressor.process(trans.get(0));
|
||||||
|
trans = null;
|
||||||
|
|
||||||
|
trans = TransducerersFactory.getTransducerers(testConfigLocal4());
|
||||||
|
trans.get(0).init();
|
||||||
|
Regressor.process(trans.get(0));
|
||||||
|
trans = null;
|
||||||
|
|
||||||
|
trans = TransducerersFactory.getTransducerers(testConfigLocal5());
|
||||||
|
trans.get(0).init();
|
||||||
|
Regressor.process(trans.get(0));
|
||||||
|
trans = null;
|
||||||
|
|
||||||
|
trans = TransducerersFactory.getTransducerers(testConfigLocal6());
|
||||||
|
trans.get(0).init();
|
||||||
|
Regressor.process(trans.get(0));
|
||||||
|
trans = null;
|
||||||
|
*/
|
||||||
|
trans = TransducerersFactory.getTransducerers(testConfigLocal7());
|
||||||
|
trans.get(0).init();
|
||||||
|
Regressor.process(trans.get(0));
|
||||||
|
trans = null;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
private static AlgorithmConfiguration testConfigLocal() {
|
||||||
|
|
||||||
|
AlgorithmConfiguration config = Regressor.getConfig();
|
||||||
|
config.setAgent("BIOCLIMATE_HSPEC");
|
||||||
|
config.setParam("HSPEC_TABLE_LIST", "hspec_validation"+AlgorithmConfiguration.getListSeparator()+"hspec_validation2");
|
||||||
|
config.setParam("HSPEC_TABLE_NAMES", "test"+AlgorithmConfiguration.getListSeparator()+"test");
|
||||||
|
config.setParam("Threshold", "0.5");
|
||||||
|
|
||||||
|
return config;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static AlgorithmConfiguration testConfigLocal2() {
|
||||||
|
|
||||||
|
AlgorithmConfiguration config = Regressor.getConfig();
|
||||||
|
config.setAgent("BIOCLIMATE_HCAF");
|
||||||
|
config.setParam("HCAF_TABLE_LIST","hcaf_d"+AlgorithmConfiguration.getListSeparator()+"hcaf_d_2016_linear_01332632269756"+AlgorithmConfiguration.getListSeparator()+"hcaf_d_2016_linear_01336062995861"+AlgorithmConfiguration.getListSeparator()+"hcaf_d_2050");
|
||||||
|
config.setParam("HCAF_TABLE_NAMES", "test"+AlgorithmConfiguration.getListSeparator()+"test"+AlgorithmConfiguration.getListSeparator()+"test"+AlgorithmConfiguration.getListSeparator()+"test");
|
||||||
|
|
||||||
|
return config;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static AlgorithmConfiguration testConfigLocal3() {
|
||||||
|
|
||||||
|
AlgorithmConfiguration config = Regressor.getConfig();
|
||||||
|
config.setAgent("BIOCLIMATE_HSPEN");
|
||||||
|
config.setParam("HSPEN_TABLE_LIST","hspen"+AlgorithmConfiguration.getListSeparator()+"hspen_2016"+AlgorithmConfiguration.getListSeparator()+"hspen_2020"+AlgorithmConfiguration.getListSeparator()+"hspen_2050");
|
||||||
|
config.setParam("HSPEN_TABLE_NAMES", "test"+AlgorithmConfiguration.getListSeparator()+"test"+AlgorithmConfiguration.getListSeparator()+"test"+AlgorithmConfiguration.getListSeparator()+"test");
|
||||||
|
|
||||||
|
return config;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
private static AlgorithmConfiguration testConfigLocal4() {
|
||||||
|
|
||||||
|
AlgorithmConfiguration config = Regressor.getConfig();
|
||||||
|
config.setAgent("HCAF_INTERPOLATION");
|
||||||
|
|
||||||
|
config.setParam("FirstHCAF","hcaf_d");
|
||||||
|
config.setParam("SecondHCAF","hcaf_d_2050");
|
||||||
|
config.setParam("YearStart","2012");
|
||||||
|
config.setParam("YearEnd","2050");
|
||||||
|
config.setParam("NumberOfInterpolations","2");
|
||||||
|
config.setParam("InterpolationFunction",INTERPOLATIONFUNCTIONS.LINEAR.name());
|
||||||
|
|
||||||
|
return config;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static AlgorithmConfiguration testConfigLocal5() {
|
||||||
|
|
||||||
|
AlgorithmConfiguration config = Regressor.getConfig();
|
||||||
|
config.setAgent("OCCURRENCES_MERGER");
|
||||||
|
|
||||||
|
config.setParam("longitudeColumn", "decimallongitude");
|
||||||
|
config.setParam("latitudeColumn", "decimallatitude");
|
||||||
|
config.setParam("recordedByColumn", "recordedby");
|
||||||
|
config.setParam("scientificNameColumn", "scientificname");
|
||||||
|
config.setParam("eventDateColumn", "eventdate");
|
||||||
|
config.setParam("lastModificationColumn", "modified");
|
||||||
|
config.setParam("rightTableName", "whitesharkoccurrences2");
|
||||||
|
config.setParam("leftTableName", "whitesharkoccurrences1");
|
||||||
|
config.setParam("finalTableName", "whitesharkoccurrencesmerged");
|
||||||
|
config.setParam("spatialTolerance", "0.5");
|
||||||
|
config.setParam("confidence", "80");
|
||||||
|
|
||||||
|
|
||||||
|
return config;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static AlgorithmConfiguration testConfigLocal6() {
|
||||||
|
|
||||||
|
AlgorithmConfiguration config = Regressor.getConfig();
|
||||||
|
config.setAgent("OCCURRENCES_INSEAS_ONEARTH");
|
||||||
|
|
||||||
|
config.setParam("longitudeColumn", "decimallongitude");
|
||||||
|
config.setParam("latitudeColumn", "decimallatitude");
|
||||||
|
config.setParam("OccurrencePointsTableName", "whitesharkoccurrences2");
|
||||||
|
config.setParam("finalTableName", "whitesharkoccurrencesfilteredseas");
|
||||||
|
config.setParam("FilterType", "IN_THE_WATER");
|
||||||
|
// config.setParam("FilterType", "ON_EARTH");
|
||||||
|
|
||||||
|
return config;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static AlgorithmConfiguration testConfigLocal7() {
|
||||||
|
|
||||||
|
AlgorithmConfiguration config = Regressor.getConfig();
|
||||||
|
config.setAgent("OCCURRENCES_DUPLICATE_DELETER");
|
||||||
|
|
||||||
|
config.setParam("longitudeColumn", "decimallongitude");
|
||||||
|
config.setParam("latitudeColumn", "decimallatitude");
|
||||||
|
config.setParam("recordedByColumn", "recordedby");
|
||||||
|
config.setParam("scientificNameColumn", "scientificname");
|
||||||
|
config.setParam("eventDateColumn", "eventdate");
|
||||||
|
config.setParam("lastModificationColumn", "modified");
|
||||||
|
config.setParam("OccurrencePointsTableName", "occurrencetestduplicates");
|
||||||
|
config.setParam("finalTableName", "occurrencesnoduplicates");
|
||||||
|
config.setParam("spatialTolerance", "0.5");
|
||||||
|
config.setParam("confidence", "50");
|
||||||
|
|
||||||
|
return config;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
}
|
|
@ -113,7 +113,7 @@ public static void main(String[] args) throws Exception {
|
||||||
config.setParam("leftTableName", "whitesharkoccurrences1");
|
config.setParam("leftTableName", "whitesharkoccurrences1");
|
||||||
config.setParam("finalTableName", "whitesharkoccurrencesmerged");
|
config.setParam("finalTableName", "whitesharkoccurrencesmerged");
|
||||||
config.setParam("spatialTolerance", "0.5");
|
config.setParam("spatialTolerance", "0.5");
|
||||||
config.setParam("confidence", "0.8");
|
config.setParam("confidence", "80");
|
||||||
|
|
||||||
|
|
||||||
return config;
|
return config;
|
||||||
|
@ -148,7 +148,7 @@ public static void main(String[] args) throws Exception {
|
||||||
config.setParam("OccurrencePointsTableName", "whitesharkoccurrences2");
|
config.setParam("OccurrencePointsTableName", "whitesharkoccurrences2");
|
||||||
config.setParam("finalTableName", "whitesharkoccurrencesnoduplicates");
|
config.setParam("finalTableName", "whitesharkoccurrencesnoduplicates");
|
||||||
config.setParam("spatialTolerance", "0.5");
|
config.setParam("spatialTolerance", "0.5");
|
||||||
config.setParam("confidence", "0.8");
|
config.setParam("confidence", "80");
|
||||||
|
|
||||||
return config;
|
return config;
|
||||||
}
|
}
|
||||||
|
|
|
@ -41,7 +41,7 @@ public class OccurrencePointsDuplicatesDeleter extends OccurrencePointsMerger{
|
||||||
ColumnType p8 = new ColumnType(leftTableNameF, lastModificationColumn, "column with Modified values", "modified", false);
|
ColumnType p8 = new ColumnType(leftTableNameF, lastModificationColumn, "column with Modified values", "modified", false);
|
||||||
ServiceType p9 = new ServiceType(ServiceParameters.RANDOMSTRING, finalTableNameF, "Name of the resulting table", "processedOccurrences_");
|
ServiceType p9 = new ServiceType(ServiceParameters.RANDOMSTRING, finalTableNameF, "Name of the resulting table", "processedOccurrences_");
|
||||||
PrimitiveType p10 = new PrimitiveType(Float.class.getName(), null, PrimitiveTypes.NUMBER, spatialTolerance, "The tolerance in degree for assessing that two points could be the same", "0.5");
|
PrimitiveType p10 = new PrimitiveType(Float.class.getName(), null, PrimitiveTypes.NUMBER, spatialTolerance, "The tolerance in degree for assessing that two points could be the same", "0.5");
|
||||||
PrimitiveType p11 = new PrimitiveType(Float.class.getName(), null, PrimitiveTypes.NUMBER, confidence, "The overall acceptance similarity threshold over which two points are the same", "0.8");
|
PrimitiveType p11 = new PrimitiveType(Float.class.getName(), null, PrimitiveTypes.NUMBER, confidence, "The overall acceptance similarity threshold over which two points are the same - from 0 to 100", "80");
|
||||||
|
|
||||||
List<StatisticalType> inputs = new ArrayList<StatisticalType>();
|
List<StatisticalType> inputs = new ArrayList<StatisticalType>();
|
||||||
inputs.add(p1);
|
inputs.add(p1);
|
||||||
|
|
|
@ -202,7 +202,7 @@ public class OccurrencePointsMerger implements Transducerer {
|
||||||
ColumnType p8 = new ColumnType(leftTableNameF, lastModificationColumn, "column with Modified values", "modified", false);
|
ColumnType p8 = new ColumnType(leftTableNameF, lastModificationColumn, "column with Modified values", "modified", false);
|
||||||
ServiceType p9 = new ServiceType(ServiceParameters.RANDOMSTRING, finalTableNameF, "Name of the resulting table", "processedOccurrences_");
|
ServiceType p9 = new ServiceType(ServiceParameters.RANDOMSTRING, finalTableNameF, "Name of the resulting table", "processedOccurrences_");
|
||||||
PrimitiveType p10 = new PrimitiveType(Float.class.getName(), null, PrimitiveTypes.NUMBER, spatialTolerance, "The tolerance in degree for assessing that two points could be the same", "0.5");
|
PrimitiveType p10 = new PrimitiveType(Float.class.getName(), null, PrimitiveTypes.NUMBER, spatialTolerance, "The tolerance in degree for assessing that two points could be the same", "0.5");
|
||||||
PrimitiveType p11 = new PrimitiveType(Float.class.getName(), null, PrimitiveTypes.NUMBER, confidence, "The overall acceptance similarity threshold over which two points are the same", "0.8");
|
PrimitiveType p11 = new PrimitiveType(Float.class.getName(), null, PrimitiveTypes.NUMBER, confidence, "The overall acceptance similarity threshold over which two points are the same - from 0 to 100", "80");
|
||||||
|
|
||||||
List<StatisticalType> inputs = new ArrayList<StatisticalType>();
|
List<StatisticalType> inputs = new ArrayList<StatisticalType>();
|
||||||
inputs.add(p1);
|
inputs.add(p1);
|
||||||
|
|
Loading…
Reference in New Issue