outputs description management and changed time series windowing parameter

git-svn-id: https://svn.d4science.research-infrastructures.eu/gcube/trunk/data-analysis/EcologicalEngine@95916 82a268e6-3cf1-43bd-a215-b396298e98cf
This commit is contained in:
Gianpaolo Coro 2014-05-22 10:06:51 +00:00
parent d947394cda
commit 9a9cfdb909
16 changed files with 693 additions and 46 deletions

View File

@ -5,59 +5,68 @@ import java.util.LinkedHashMap;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
public class PrimitiveType extends StatisticalType {
public class PrimitiveType extends StatisticalType{
public PrimitiveType(String className, Object content, PrimitiveTypes type, String name, String description, String defaultValue, boolean optional) {
super(name, description, defaultValue, optional);
this.className=className;
this.content=content;
this.type=type;
this.className = className;
this.content = content;
this.type = type;
}
public PrimitiveType(String className, Object content, PrimitiveTypes type,String name, String description, String defaultValue) {
public PrimitiveType(String className, Object content, PrimitiveTypes type, String name, String description, String defaultValue) {
super(name, description, defaultValue);
this.className=className;
this.content=content;
this.type=type;
this.className = className;
this.content = content;
this.type = type;
}
public PrimitiveType(String className, Object content, PrimitiveTypes type,String name, String description) {
public PrimitiveType(String className, Object content, PrimitiveTypes type, String name, String description) {
super(name, description);
this.className=className;
this.content=content;
this.type=type;
this.className = className;
this.content = content;
this.type = type;
}
protected String className;
public String getClassName() {
return className;
}
public void setClassName(String className) {
this.className = className;
}
protected Object content;
public Object getContent() {
return content;
}
public void setContent(Object content) {
this.content = content;
}
protected PrimitiveTypes type;
public PrimitiveTypes getType() {
return type;
}
public void setType(PrimitiveTypes type) {
this.type = type;
}
public static LinkedHashMap<String,StatisticalType> stringMap2StatisticalMap(HashMap<String,String> stringmap){
LinkedHashMap<String,StatisticalType> map = new LinkedHashMap<String, StatisticalType>();
for (String key:stringmap.keySet()){
String value = stringmap.get(key);
PrimitiveType string = new PrimitiveType(String.class.getName(), value, PrimitiveTypes.STRING, key,key);
map.put(key,string);
public static LinkedHashMap<String, StatisticalType> stringMap2StatisticalMap(HashMap<String, String> stringmap) {
LinkedHashMap<String, StatisticalType> map = new LinkedHashMap<String, StatisticalType>();
if (stringmap != null) {
for (String key : stringmap.keySet()) {
String value = stringmap.get(key);
PrimitiveType string = new PrimitiveType(String.class.getName(), value, PrimitiveTypes.STRING, key, key);
map.put(key, string);
}
}
return map;
}
}

View File

@ -26,6 +26,11 @@ public class ClusterersFactory {
return inputs;
}
public static StatisticalType getClustererOutput(String configPath, String algorithmName) throws Exception {
StatisticalType output = ProcessorsFactory.getOutputDescriptions(configPath + AlgorithmConfiguration.clusterersFile, algorithmName);
return output;
}
public static String getDescription(String configPath, String algorithmName) throws Exception{
String input = ProcessorsFactory.getDescription(configPath + AlgorithmConfiguration.clusterersFile, algorithmName);
return input;

View File

@ -27,6 +27,11 @@ public class EvaluatorsFactory {
return inputs;
}
public static StatisticalType getEvaluatorOutput(String configPath, String algorithmName) throws Exception {
StatisticalType output = ProcessorsFactory.getOutputDescriptions(configPath + AlgorithmConfiguration.evaluatorsFile, algorithmName);
return output;
}
public static String getDescription(String configPath, String algorithmName) throws Exception{
String input = ProcessorsFactory.getDescription(configPath + AlgorithmConfiguration.evaluatorsFile, algorithmName);
return input;

View File

@ -81,6 +81,11 @@ public class GeneratorsFactory {
return inputs;
}
public static StatisticalType getAlgorithmOutput(String configPath, String algorithmName) throws Exception {
StatisticalType output = ProcessorsFactory.getOutputDescriptions(configPath + AlgorithmConfiguration.algorithmsFile, algorithmName);
return output;
}
public static Generator getGenerator(AlgorithmConfiguration config) throws Exception {
return (Generator)ProcessorsFactory.getProcessor(config, config.getConfigPath() + AlgorithmConfiguration.generatorsFile);
}

View File

@ -30,12 +30,16 @@ public class ModelersFactory {
return modelers;
}
public static List<StatisticalType> getModelParameters(String configPath, String algorithmName) throws Exception{
List<StatisticalType> inputs = ProcessorsFactory.getParameters(configPath + AlgorithmConfiguration.modelsFile, algorithmName);
return inputs;
}
public static StatisticalType getModelOutput(String configPath, String algorithmName) throws Exception {
StatisticalType output = ProcessorsFactory.getOutputDescriptions(configPath + AlgorithmConfiguration.modelsFile, algorithmName);
return output;
}
public static String getDescription(String configPath, String algorithmName) throws Exception{
String input = ProcessorsFactory.getDescription(configPath + AlgorithmConfiguration.modelsFile, algorithmName);
return input;

View File

@ -102,6 +102,37 @@ public class ProcessorsFactory {
}
public static StatisticalType getOutputDescriptions(String file, String algorithmName) {
try{
Properties p = AlgorithmConfiguration.getProperties(file);
String algorithmclass = p.getProperty(algorithmName);
if (algorithmclass==null) return null;
Object algclass = Class.forName(algorithmclass).newInstance();
// if the algorithm is a generator itself then take it
if (algclass instanceof Generator) {
return ((Generator) algclass).getOutput();
} else if (algclass instanceof Modeler) {
return ((Modeler) algclass).getOutput();
} else if (algclass instanceof Model) {
return ((Model) algclass).getOutput();
}
else if (algclass instanceof Evaluator) {
return ((Evaluator) algclass).getOutput();
}
else if (algclass instanceof Clusterer) {
return ((Clusterer) algclass).getOutput();
}
else if (algclass instanceof Transducerer) {
return ((Transducerer) algclass).getOutput();
}
else
return ((GenericAlgorithm) algclass).getOutput();
}catch (Exception e){
return null;
}
}
public static ComputationalAgent getProcessor(AlgorithmConfiguration config, String file) throws Exception {
return getProcessor(config, file,null);
}

View File

@ -44,6 +44,25 @@ public class TransducerersFactory {
}
}
public static StatisticalType getTransducerOutput(AlgorithmConfiguration config, String algorithmName) throws Exception {
StatisticalType output = ProcessorsFactory.getOutputDescriptions(config.getConfigPath()+ AlgorithmConfiguration.transducererFile, algorithmName);
if (output != null)
return output;
else {
config.setAgent(algorithmName);
output = DynamicTransducerersFactory.getTransducerer(config).getOutput();
AnalysisLogger.getLogger().debug("Dynamic Output:"+output);
return output;
}
}
public static StatisticalType getModelOutput(String configPath, String algorithmName) throws Exception {
StatisticalType output = ProcessorsFactory.getOutputDescriptions(configPath + AlgorithmConfiguration.modelsFile, algorithmName);
return output;
}
public static String getDescription(AlgorithmConfiguration config, String algorithmName) throws Exception {
String input = ProcessorsFactory.getDescription(config.getConfigPath()+ AlgorithmConfiguration.transducererFile, algorithmName);
if (input!=null)

View File

@ -57,7 +57,7 @@ public class PeriodicityDetector {
double[] signal = produceNoisySignal(defaultSignalLengthTimeinSec, defaultSamplingRate, defaultHiddenFrequency, defaultSNratio);
AnalysisLogger.getLogger().debug("Signal samples: " + signal.length);
double F = detectFrequency(signal, defaultSamplingRate, defaultMinPossibleFreq, defaultMaxPossibleFreq, defaultFreqError, true);
double F = detectFrequency(signal, defaultSamplingRate, defaultMinPossibleFreq, defaultMaxPossibleFreq, defaultFreqError, -1, true);
AnalysisLogger.getLogger().debug("Detected F:" + F + " indecision [" + lowermeanF + " , " + uppermeanF + "]");
}
@ -81,7 +81,7 @@ public class PeriodicityDetector {
public double detectFrequency(double[] signal, boolean display) throws Exception {
return detectFrequency(signal, 1, 0, 1, 1f, display);
return detectFrequency(signal, 1, 0, 1, 1f, -1, display);
}
public double detectFrequency(double[] signal) throws Exception {
@ -89,17 +89,32 @@ public class PeriodicityDetector {
return detectFrequency(signal, false);
}
public double detectFrequency(double[] signal, int samplingRate, float minPossibleFreq, float maxPossibleFreq, float wantedFreqError, boolean display) throws Exception {
public double detectFrequency(double[] signal, int samplingRate, float minPossibleFreq, float maxPossibleFreq, float wantedFreqError, int FFTnsamples, boolean display) throws Exception {
// estimate the best samples based on the error we want
long pow = Math.round(Math.log((float) samplingRate / wantedFreqError) / Math.log(2));
int wLength = 0;
long pow = 0;
if (wantedFreqError>-1){
pow = Math.round(Math.log((float) samplingRate / wantedFreqError) / Math.log(2));
if (pow <= 1)
pow = Math.round(Math.log((float) signal.length / (float) ("" + signal.length).length()) / Math.log(2));
int wLength = (int) Math.pow(2, pow);
AnalysisLogger.getLogger().debug("Suggested pow for window length=" + pow);
}
//adjust FFT Samples to be even
else{
if (FFTnsamples<2)
FFTnsamples=2;
else if (FFTnsamples>signal.length)
FFTnsamples=signal.length;
pow = Math.round(Math.log((float) FFTnsamples) / Math.log(2));
}
wLength = (int) Math.pow(2, pow);
AnalysisLogger.getLogger().debug("Suggested windows length (samples)=" + wLength);
AnalysisLogger.getLogger().debug("Suggested windows length (s)=" + ((float) wLength / (float) samplingRate) + " s");
int windowAnalysisSamples = (int) Math.pow(2, 14);// (int)

View File

@ -306,7 +306,7 @@ public class SignalConverter {
}
if (overfirstmin) {
if (ele > (max + (Math.abs(max) * tolerance))) {
AnalysisLogger.getLogger().debug(">max up:"+ele +">" +(max + (Math.abs(max) * tolerance))+" at idx "+k);
// AnalysisLogger.getLogger().debug(">max up:"+ele +">" +(max + (Math.abs(max) * tolerance))+" at idx "+k);
max = ele;
bestidx = k;
}

View File

@ -0,0 +1,43 @@
package org.gcube.dataanalysis.ecoengine.test;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.utils.AliasToEntityOrderedMapResultTransformer;
import org.gcube.dataanalysis.ecoengine.utils.DatabaseUtils;
import org.hibernate.Query;
import org.hibernate.Session;
import org.hibernate.SessionFactory;
public class GenericTests {
public static void main(String[] args) throws Exception{
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setParam("DatabaseUserName","gcube");
config.setParam("DatabasePassword","d4science2");
config.setParam("DatabaseURL","jdbc:postgresql://localhost/testdb");
config.setParam("DatabaseDriver","org.postgresql.Driver");
SessionFactory dbconnection = DatabaseUtils.initDBSession(config);
String query = "select * from absence_data_baskingshark";
Session ss = dbconnection.getCurrentSession();
ss.beginTransaction();
Query qr = null;
qr = ss.createSQLQuery(query);
qr.setResultTransformer(AliasToEntityOrderedMapResultTransformer.INSTANCE);
List<Object> result = qr.list();
ss.getTransaction().commit();
// List<Map<String, Object>> aliasToValueMapList = qr.list();
// // to get column names
// Object[] ColNames = (Object[]) aliasToValueMapList.get(0).keySet()
// .toArray();
for (Object row : result){
// System.out.println((LinkedHashMap<String, Object>) row);
ArrayList<String> listKeys = new ArrayList<String>(((LinkedHashMap<String, Object>) row).keySet());
ArrayList<Object> listvalues = new ArrayList<Object>(((LinkedHashMap<String, Object>) row).values());
System.out.println(listvalues);
}
}
}

View File

@ -0,0 +1,321 @@
package org.gcube.dataanalysis.ecoengine.test;
import java.util.List;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent;
import org.gcube.dataanalysis.ecoengine.processing.factories.TransducerersFactory;
import org.gcube.dataanalysis.ecoengine.test.regression.Regressor;
public class TestSimpleSignal {
// static AlgorithmConfiguration[] configs = {periodicSignalConfig(), russianSignalConfig(),simpleSignalConfig(), occurrencePointsSignalConfig(),hugeSignalConfig()};
//static AlgorithmConfiguration[] configs = {periodicSignalConfig(), russianSignalConfig(),simpleSignalConfig()};
// static AlgorithmConfiguration[] configs = {NAFOSignalConfig()};
// static AlgorithmConfiguration[] configs = {largeCustomSignalConfig()};
// static AlgorithmConfiguration[] configs = {temperatureSignalConfig()};
// static AlgorithmConfiguration[] configs = {periodicSignalConfig()};
// static AlgorithmConfiguration[] configs = {simpleSignalConfig()};
// static AlgorithmConfiguration[] configs = {sawSignalConfig()};
static AlgorithmConfiguration[] configs = {temperatureSignalConfig()};
// static AlgorithmConfiguration[] configs = {russianSignalConfig()};
// static AlgorithmConfiguration[] configs = {largeCustomSignalConfig()};
// static AlgorithmConfiguration[] configs = {occurrencePointsSignalConfig()};
// static AlgorithmConfiguration[] configs = {hugeSignalConfig()};
public static void main(String[] args) throws Exception {
int wLength = (int) Math.pow(2, 1);
System.out.println("L:"+wLength);
for (int i = 0; i < configs.length; i++) {
System.out.println("*****************TEST "+i+" *****************");
List<ComputationalAgent> trans = null;
trans = TransducerersFactory.getTransducerers(configs[i]);
trans.get(0).init();
Regressor.process(trans.get(0));
StatisticalType st = trans.get(0).getOutput();
AnalysisLogger.getLogger().debug("ST:" + st);
trans = null;
System.out.println("*****************END TEST*****************");
}
}
public static AlgorithmConfiguration simpleSignalConfig() {
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setAgent("TIME_SERIES_PROCESSING");
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setParam("DatabaseUserName", "utente");
config.setParam("DatabasePassword", "d4science");
config.setParam("DatabaseURL", "jdbc:postgresql://statistical-manager.d.d4science.research-infrastructures.eu/testdb");
config.setParam("DatabaseDriver", "org.postgresql.Driver");
// vessels
config.setParam("TimeSeriesTable", "timeseries_id4dd368bf_63fb_4d19_8e31_20ced63a477d");
config.setParam("ValueColum", "quantity");
config.setParam("SSA_Window_in_Samples", "30");
config.setParam("SSA_EigenvaluesThreshold", "0.7");
config.setParam("SSA_Points_to_Forecast", "10");
AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
config.setGcubeScope("/gcube");
config.setConfigPath("./cfg");
return config;
}
public static AlgorithmConfiguration russianSignalConfig() {
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setAgent("TIME_SERIES_PROCESSING");
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setParam("DatabaseUserName", "utente");
config.setParam("DatabasePassword", "d4science");
config.setParam("DatabaseURL", "jdbc:postgresql://statistical-manager.d.d4science.research-infrastructures.eu/testdb");
config.setParam("DatabaseDriver", "org.postgresql.Driver");
// vessels
config.setParam("TimeSeriesTable", "generic_ideb9efbe0_61ad_4eea_b0ee_95e64ce11b28");
config.setParam("ValueColum", "quantity");
config.setParam("SSA_Window_in_Samples", "20");
config.setParam("SSA_EigenvaluesThreshold", "0.7");
config.setParam("SSA_Points_to_Forecast", "10");
AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
config.setGcubeScope("/gcube");
config.setConfigPath("./cfg");
return config;
}
public static AlgorithmConfiguration occurrencePointsSignalConfig() {
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setAgent("TIME_SERIES_PROCESSING");
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setParam("DatabaseUserName", "utente");
config.setParam("DatabasePassword", "d4science");
config.setParam("DatabaseURL", "jdbc:postgresql://statistical-manager.d.d4science.research-infrastructures.eu/testdb");
config.setParam("DatabaseDriver", "org.postgresql.Driver");
// vessels
config.setParam("TimeSeriesTable", "generic_id037d302d_2ba0_4e43_b6e4_1a797bb91728");
config.setParam("ValueColum", "speed");
config.setParam("TimeColum", "datetime");
config.setParam("AggregationFunction", "AVG");
config.setParam("SSA_Window_in_Samples", "20");
config.setParam("SSA_EigenvaluesThreshold", "0.7");
config.setParam("SSA_Points_to_Forecast", "10");
AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
config.setGcubeScope("/gcube");
config.setConfigPath("./cfg");
return config;
}
public static AlgorithmConfiguration periodicSignalConfig() {
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setAgent("TIME_SERIES_PROCESSING");
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setParam("DatabaseUserName", "utente");
config.setParam("DatabasePassword", "d4science");
config.setParam("DatabaseURL", "jdbc:postgresql://statistical-manager.d.d4science.research-infrastructures.eu/testdb");
config.setParam("DatabaseDriver", "org.postgresql.Driver");
// vessels
config.setParam("TimeSeriesTable", "timeseries_ide814eb07_c13b_41b3_a240_aa99446db831");
config.setParam("ValueColum", "quantity");
config.setParam("FrequencyResolution", "0.01");
config.setParam("SSA_Window_in_Samples", "20");
config.setParam("SSA_EigenvaluesThreshold", "0.7");
config.setParam("SSA_Points_to_Forecast", "10");
AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
config.setGcubeScope("/gcube");
config.setConfigPath("./cfg");
return config;
}
public static AlgorithmConfiguration hugeSignalConfig() {
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setAgent("TIME_SERIES_PROCESSING");
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setParam("DatabaseUserName", "utente");
config.setParam("DatabasePassword", "d4science");
config.setParam("DatabaseURL", "jdbc:postgresql://statistical-manager.d.d4science.research-infrastructures.eu/testdb");
config.setParam("DatabaseDriver", "org.postgresql.Driver");
// vessels
config.setParam("TimeSeriesTable", "generic_id634a660c_4d1a_410c_aa45_eb6e4c5afdf9");
config.setParam("ValueColum", "quantity");
config.setParam("TimeColum", "years");
config.setParam("SSA_Window_in_Samples", "20");
config.setParam("SSA_EigenvaluesThreshold", "0.7");
config.setParam("SSA_Points_to_Forecast", "10");
AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
config.setGcubeScope("/gcube");
config.setConfigPath("./cfg");
return config;
}
public static AlgorithmConfiguration NAFOSignalConfig() {
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setAgent("TIME_SERIES_PROCESSING");
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setParam("DatabaseUserName", "utente");
config.setParam("DatabasePassword", "d4science");
config.setParam("DatabaseURL", "jdbc:postgresql://statistical-manager.d.d4science.research-infrastructures.eu/testdb");
config.setParam("DatabaseDriver", "org.postgresql.Driver");
// vessels
config.setParam("TimeSeriesTable", "timeseries_id39c6c28f_2484_421c_8ffb_9c2cc2330c62");
config.setParam("ValueColum", "speed");
config.setParam("SSA_Window_in_Samples", "30");
config.setParam("SSA_EigenvaluesThreshold", "0.7");
config.setParam("SSA_Points_to_Forecast", "10");
AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
config.setGcubeScope("/gcube");
config.setConfigPath("./cfg");
return config;
}
public static AlgorithmConfiguration sawSignalConfig() {
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setAgent("TIME_SERIES_PROCESSING");
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setParam("DatabaseUserName", "utente");
config.setParam("DatabasePassword", "d4science");
config.setParam("DatabaseURL", "jdbc:postgresql://statistical-manager.d.d4science.research-infrastructures.eu/testdb");
config.setParam("DatabaseDriver", "org.postgresql.Driver");
// vessels
config.setParam("TimeSeriesTable", "timeseries_ide814eb07_c13b_41b3_a240_aa99446db831");
config.setParam("ValueColum", "quantity");
config.setParam("FrequencyResolution", "0.01");
config.setParam("SSA_Window_in_Samples", "20");
config.setParam("SSA_EigenvaluesThreshold", "0.7");
config.setParam("SSA_Points_to_Forecast", "10");
AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
config.setGcubeScope("/gcube");
config.setConfigPath("./cfg");
return config;
}
public static AlgorithmConfiguration largeCustomSignalConfig() {
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setAgent("TIME_SERIES_PROCESSING");
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setParam("DatabaseUserName", "utente");
config.setParam("DatabasePassword", "d4science");
config.setParam("DatabaseURL", "jdbc:postgresql://statistical-manager.d.d4science.research-infrastructures.eu/testdb");
config.setParam("DatabaseDriver", "org.postgresql.Driver");
// vessels
config.setParam("TimeSeriesTable", "timeseries_idd3dd174e_242c_4f8b_920a_faa79691ca43");
config.setParam("ValueColum", "quantity");
config.setParam("FFT_Window_Samples", "14");
config.setParam("SSA_Window_in_Samples", "20");
config.setParam("SSA_EigenvaluesThreshold", "0.7");
config.setParam("SSA_Points_to_Forecast", "10");
AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
config.setGcubeScope("/gcube");
config.setConfigPath("./cfg");
return config;
}
public static AlgorithmConfiguration earthquakesSignalConfig() {
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setAgent("TIME_SERIES_PROCESSING");
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setParam("DatabaseUserName", "utente");
config.setParam("DatabasePassword", "d4science");
config.setParam("DatabaseURL", "jdbc:postgresql://statistical-manager.d.d4science.research-infrastructures.eu/testdb");
config.setParam("DatabaseDriver", "org.postgresql.Driver");
// vessels
config.setParam("TimeSeriesTable", "timeseries_id0f44b131_de55_4839_b07f_2721574e2b9d");
config.setParam("ValueColum", "magnitude");
config.setParam("FFT_Window_Samples", "14");
config.setParam("SSA_Window_in_Samples", "20");
config.setParam("SSA_EigenvaluesThreshold", "0.7");
config.setParam("SSA_Points_to_Forecast", "10");
AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
config.setGcubeScope("/gcube");
config.setConfigPath("./cfg");
return config;
}
public static AlgorithmConfiguration temperatureSignalConfig() {
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setAgent("TIME_SERIES_PROCESSING");
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setParam("DatabaseUserName", "utente");
config.setParam("DatabasePassword", "d4science");
config.setParam("DatabaseURL", "jdbc:postgresql://statistical-manager.d.d4science.research-infrastructures.eu/testdb");
config.setParam("DatabaseDriver", "org.postgresql.Driver");
// vessels
config.setParam("TimeSeriesTable", "timeseries_id08b3abb9_c7b0_4b82_8117_64b69055416f");
config.setParam("ValueColum", "fvalue");
config.setParam("FFT_Window_Samples", "70");
config.setParam("SSA_Window_in_Samples", "10");
config.setParam("SSA_EigenvaluesThreshold", "0.7");
config.setParam("SSA_Points_to_Forecast", "10");
AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
config.setGcubeScope("/gcube");
config.setConfigPath("./cfg");
return config;
}
}

View File

@ -28,6 +28,19 @@ public static void main(String[] args) throws Exception {
List<StatisticalType> map;
// List<StatisticalType> map = GeneratorsFactory.getAlgorithmParameters("./cfg/","DUMMY");
// System.out.println("input for DUMMY algorithm: "+map);
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setConfigPath("./cfg/");
StatisticalType type = GeneratorsFactory.getAlgorithmOutput("./cfg/","AQUAMAPS_SUITABLE");
System.out.println("output for AQUAMAPS_SUITABLE algorithm: "+type);
type = ModelersFactory.getModelOutput("./cfg/","HSPEN");
System.out.println("output for HSPEN algorithm: "+type);
type = EvaluatorsFactory.getEvaluatorOutput("./cfg/","HRS");
System.out.println("output for HRS algorithm: "+type);
type = ClusterersFactory.getClustererOutput("./cfg/","DBSCAN");
System.out.println("output for DBSCAN algorithm: "+type);
type = TransducerersFactory.getTransducerOutput(config,"BIOCLIMATE_HSPEC");
System.out.println("output for BIOCLIMATE_HSPEC algorithm: "+type);
map = GeneratorsFactory.getAlgorithmParameters("./cfg/","AQUAMAPS_SUITABLE");
System.out.println("input for AQUAMAPS_SUITABLE algorithm: "+map);
@ -68,8 +81,7 @@ public static void main(String[] args) throws Exception {
System.out.println("Database Default Values: "+eval);
System.out.println("\n***TEST 12- Get All Supported features***");
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setConfigPath("./cfg/");
HashMap<String,List<String>> features = ProcessorsFactory.getAllFeatures(config);
System.out.println("Database Default Values: "+features);

View File

@ -0,0 +1,121 @@
package org.gcube.dataanalysis.ecoengine.test.signalprocessing;
import java.util.List;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent;
import org.gcube.dataanalysis.ecoengine.processing.factories.TransducerersFactory;
import org.gcube.dataanalysis.ecoengine.test.regression.Regressor;
import org.gcube.dataanalysis.ecoengine.transducers.TimeSeriesAnalysis;
public class TestIOTCSignals {
static AlgorithmConfiguration[] configs = {IOTClongitudeConfig()};
public static void main(String[] args) throws Exception {
int wLength = (int) Math.pow(2, 1);
System.out.println("L:"+wLength);
for (int i = 0; i < configs.length; i++) {
TimeSeriesAnalysis.display=true;
System.out.println("*****************TEST "+i+" *****************");
List<ComputationalAgent> trans = null;
trans = TransducerersFactory.getTransducerers(configs[i]);
trans.get(0).init();
Regressor.process(trans.get(0));
StatisticalType st = trans.get(0).getOutput();
AnalysisLogger.getLogger().debug("ST:" + st);
trans = null;
System.out.println("*****************END TEST*****************");
}
}
public static AlgorithmConfiguration IOTCSignalConfig() {
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setAgent("TIME_SERIES_PROCESSING");
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setParam("DatabaseUserName", "utente");
config.setParam("DatabasePassword", "d4science");
config.setParam("DatabaseURL", "jdbc:postgresql://statistical-manager.d.d4science.research-infrastructures.eu/testdb");
config.setParam("DatabaseDriver", "org.postgresql.Driver");
// vessels
config.setParam("TimeSeriesTable", "timeseries_idb73029b9_226e_4d0f_b828_24854d0b7b44 ");
config.setParam("ValueColum", "fishing_hours");
config.setParam("FFT_Window_Samples", "128");
config.setParam("SSA_Window_in_Samples", "80");
config.setParam("SSA_EigenvaluesThreshold", "0.07");
config.setParam("SSA_Points_to_Forecast", "24");
AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
config.setGcubeScope("/gcube");
config.setConfigPath("./cfg");
return config;
}
public static AlgorithmConfiguration IOTCLatitudeConfig() {
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setAgent("TIME_SERIES_PROCESSING");
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setParam("DatabaseUserName", "utente");
config.setParam("DatabasePassword", "d4science");
config.setParam("DatabaseURL", "jdbc:postgresql://statistical-manager.d.d4science.research-infrastructures.eu/testdb");
config.setParam("DatabaseDriver", "org.postgresql.Driver");
// vessels
config.setParam("TimeSeriesTable", "timeseries_idb73029b9_226e_4d0f_b828_24854d0b7b44 ");
config.setParam("ValueColum", "latitude");
config.setParam("FFT_Window_Samples", "256");
config.setParam("SSA_Window_in_Samples", "200");
config.setParam("SSA_EigenvaluesThreshold", "0.07");
config.setParam("SSA_Points_to_Forecast", "12");
AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
config.setGcubeScope("/gcube");
config.setConfigPath("./cfg");
return config;
}
public static AlgorithmConfiguration IOTClongitudeConfig() {
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setAgent("TIME_SERIES_PROCESSING");
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setParam("DatabaseUserName", "utente");
config.setParam("DatabasePassword", "d4science");
config.setParam("DatabaseURL", "jdbc:postgresql://statistical-manager.d.d4science.research-infrastructures.eu/testdb");
config.setParam("DatabaseDriver", "org.postgresql.Driver");
// vessels
config.setParam("TimeSeriesTable", "timeseries_idb73029b9_226e_4d0f_b828_24854d0b7b44 ");
config.setParam("ValueColum", "longitude");
config.setParam("FFT_Window_Samples", "256");
config.setParam("SSA_Window_in_Samples", "200");
config.setParam("SSA_EigenvaluesThreshold", "0.07");
config.setParam("SSA_Points_to_Forecast", "12");
AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
config.setGcubeScope("/gcube");
config.setConfigPath("./cfg");
return config;
}
}

View File

@ -18,9 +18,9 @@ public class TestSimpleSignal {
// static AlgorithmConfiguration[] configs = {largeCustomSignalConfig()};
// static AlgorithmConfiguration[] configs = {temperatureSignalConfig()};
// static AlgorithmConfiguration[] configs = {periodicSignalConfig()};
static AlgorithmConfiguration[] configs = {simpleSignalConfig()};
// static AlgorithmConfiguration[] configs = {simpleSignalConfig()};
// static AlgorithmConfiguration[] configs = {sawSignalConfig()};
// static AlgorithmConfiguration[] configs = {temperatureSignalConfig()};
static AlgorithmConfiguration[] configs = {temperatureSignalConfig()};
// static AlgorithmConfiguration[] configs = {russianSignalConfig()};
// static AlgorithmConfiguration[] configs = {largeCustomSignalConfig()};
// static AlgorithmConfiguration[] configs = {occurrencePointsSignalConfig()};
@ -245,7 +245,7 @@ public class TestSimpleSignal {
config.setParam("ValueColum", "quantity");
config.setParam("FrequencyResolution", "0.01");
config.setParam("FFT_Window_Samples", "14");
config.setParam("SSA_Window_in_Samples", "20");
config.setParam("SSA_EigenvaluesThreshold", "0.7");
config.setParam("SSA_Points_to_Forecast", "10");
@ -274,7 +274,7 @@ public class TestSimpleSignal {
config.setParam("ValueColum", "magnitude");
config.setParam("FrequencyResolution", "1");
config.setParam("FFT_Window_Samples", "14");
config.setParam("SSA_Window_in_Samples", "20");
config.setParam("SSA_EigenvaluesThreshold", "0.7");
config.setParam("SSA_Points_to_Forecast", "10");
@ -304,7 +304,7 @@ public class TestSimpleSignal {
config.setParam("ValueColum", "fvalue");
config.setParam("FrequencyResolution", "0.01");
config.setParam("FFT_Window_Samples", "70");
config.setParam("SSA_Window_in_Samples", "10");
config.setParam("SSA_EigenvaluesThreshold", "0.7");
config.setParam("SSA_Points_to_Forecast", "10");

View File

@ -37,7 +37,7 @@ public class TimeSeriesAnalysis extends StandardLocalExternalAlgorithm {
private static String timeSeriesTable = "TimeSeriesTable";
private static String valuesColumn = "ValueColum";
private static String timeColumn = "TimeColum";
private static String frequencyResolution = "FrequencyResolution";
private static String fftwindowsamples = "FFT_Window_Samples";
private static String aggregationFunction = "AggregationFunction";
private static String SSAAnalysisWindowSamples = "SSA_Window_in_Samples";
private static String SSAEigenvaluesThreshold = "SSA_EigenvaluesThreshold";
@ -74,19 +74,19 @@ public class TimeSeriesAnalysis extends StandardLocalExternalAlgorithm {
String valuescolum = config.getParam(valuesColumn);
String timecolumn = config.getParam(timeColumn);
String aggregationFunc = config.getParam(aggregationFunction);
String frequencyRes = config.getParam(frequencyResolution);
String fftwindowsamplesS = config.getParam(fftwindowsamples);
int windowLength = Integer.parseInt(config.getParam(SSAAnalysisWindowSamples));
float eigenvaluespercthr = Float.parseFloat(config.getParam(SSAEigenvaluesThreshold));
int pointsToReconstruct = Integer.parseInt(config.getParam(SSAPointsToForecast));
float frequencyResDouble = 1;
int fftWindowSamplesDouble = 1;
if (timecolumn == null)
timecolumn = "time";
if (aggregationFunc == null)
aggregationFunc = "SUM";
if (frequencyRes != null) {
if (fftwindowsamplesS != null) {
try {
frequencyResDouble = Float.parseFloat(frequencyRes);
fftWindowSamplesDouble = Integer.parseInt(fftwindowsamplesS);
} catch (Exception e) {
}
}
@ -95,7 +95,7 @@ public class TimeSeriesAnalysis extends StandardLocalExternalAlgorithm {
AnalysisLogger.getLogger().debug("TimeSeriesAnalysis->Time Column: " + timecolumn);
AnalysisLogger.getLogger().debug("TimeSeriesAnalysis->Values Column: " + valuescolum);
AnalysisLogger.getLogger().debug("TimeSeriesAnalysis->Aggregation: " + aggregationFunc);
AnalysisLogger.getLogger().debug("TimeSeriesAnalysis->Frequency Resolution: " + frequencyRes);
AnalysisLogger.getLogger().debug("TimeSeriesAnalysis->FFT Window Samples: " + fftWindowSamplesDouble);
AnalysisLogger.getLogger().debug("TimeSeriesAnalysis->SSA Window Samples: " + windowLength);
AnalysisLogger.getLogger().debug("TimeSeriesAnalysis->SSA Eigenvalues threshold: " + eigenvaluespercthr);
AnalysisLogger.getLogger().debug("TimeSeriesAnalysis->SSA Points to Reconstruct: " + pointsToReconstruct);
@ -153,7 +153,7 @@ public class TimeSeriesAnalysis extends StandardLocalExternalAlgorithm {
// spectrum and signal processing
AnalysisLogger.getLogger().debug("TimeSeriesAnalysis->Detecting periodicity");
PeriodicityDetector pd = new PeriodicityDetector();
double F = pd.detectFrequency(ts.getValues(), 1, 0.01f, 0.5f, frequencyResDouble, display);
double F = pd.detectFrequency(ts.getValues(), 1, 0.01f, 0.5f, -1, fftWindowSamplesDouble, display);
outputParameters.put("Original Time Series Length", ""+originalSignalLength);
outputParameters.put("Uniformly Samples Time Series Length", ""+ts.getValues().length);
outputParameters.put("Spectral Analysis Window Length", ""+pd.currentWindowAnalysisSamples);
@ -260,7 +260,8 @@ public class TimeSeriesAnalysis extends StandardLocalExternalAlgorithm {
inputs.add(p);
ColumnType p1 = new ColumnType(timeSeriesTable, valuesColumn, "The column containing the values of the time series", "values", false);
inputs.add(p1);
addDoubleInput(frequencyResolution, "The precision in detecting the period. The lower this number the less the number of points in the Spectrogram (higher number of samples used at each step). Reducing this, the spectrogram will be finer and sharper, but you should tune it. Too many samples will make the Spectrogram noisy.", "1");
// addDoubleInput(fftwindowsamples, "The number of samples precision in detecting the period. The lower this number the less the number of points in the Spectrogram (higher number of samples used at each step). Reducing this, the spectrogram will be finer and sharper, but you should tune it. Too many samples will make the Spectrogram noisy.", "1");
addDoubleInput(fftwindowsamples, "The number of samples N on which the Fourier Transform (FFT) will be extracted. The FFT will be calculated every N/2 samples, taking N samples each time. The spectrogram will display the FFT on the slices of N samples.", "12");
addEnumerateInput(AggregationFunctions.values(), aggregationFunction, "Function to apply to samples with the same time instant", AggregationFunctions.SUM.name());
addIntegerInput(SSAAnalysisWindowSamples, "The number of samples in the produced uniformly sampled signal, to use in the SSA algorithm. Must be strictly less than the Time Series length. This number should identify a portion of the signal long enough to make the system guess the nature of the trend", "20");
addDoubleInput(SSAEigenvaluesThreshold, "The threshold under which an SSA eigenvalue will be ignored, along with its eigenvector, for the reconstruction of the signal", "0.7");

View File

@ -0,0 +1,56 @@
package org.gcube.dataanalysis.ecoengine.utils;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.hibernate.Query;
import org.hibernate.Session;
import org.hibernate.SessionFactory;
import org.hibernate.transform.BasicTransformerAdapter;
public class AliasToEntityOrderedMapResultTransformer extends BasicTransformerAdapter {
public static final AliasToEntityOrderedMapResultTransformer INSTANCE = new AliasToEntityOrderedMapResultTransformer();
/**
* Disallow instantiation of AliasToEntityOrderedMapResultTransformer .
*/
private AliasToEntityOrderedMapResultTransformer () {
super();
}
/**
* {@inheritDoc}
*/
public Object transformTuple(Object[] tuple, String[] aliases) {
//linkedhashmap to get table column name in order
Map result = new LinkedHashMap(tuple.length);
for ( int i=0; i<tuple.length; i++ ) {
String alias = aliases[i];
if ( alias!=null ) {
result.put( alias, tuple[i] );
}
}
return result;
}
/**
* {@inheritDoc}
*/
public boolean isTransformedValueATupleElement(String[] aliases, int tupleLength) {
return false;
}
/**
* Serialization hook for ensuring singleton uniqueing.
*
* @return The singleton instance : {@link #INSTANCE}
*/
private Object readResolve() {
return INSTANCE;
}
}