almost complete SSA Analysis

git-svn-id: https://svn.d4science.research-infrastructures.eu/gcube/trunk/data-analysis/EcologicalEngine@95014 82a268e6-3cf1-43bd-a215-b396298e98cf
This commit is contained in:
Gianpaolo Coro 2014-04-18 16:14:22 +00:00
parent 68a3e28eea
commit afa3e4975f
10 changed files with 617 additions and 175 deletions

View File

@ -8,14 +8,8 @@ import org.gcube.dataanalysis.ecoengine.signals.SignalProcessing;
public class PeriodicityDetector {
/*
static int defaultSamplingRate = 1000;// Hz
static float defaultSignalLengthTimeinSec = 5;// s
static float defaultHiddenFrequency = 100f;// Hz
static float defaultMinPossibleFreq = 0; // Hz
static float defaultMaxPossibleFreq = 200; // Hz
static float defaultSNratio = 2;
static float defaultFreqError = 1f;
*/
* static int defaultSamplingRate = 1000;// Hz static float defaultSignalLengthTimeinSec = 5;// s static float defaultHiddenFrequency = 100f;// Hz static float defaultMinPossibleFreq = 0; // Hz static float defaultMaxPossibleFreq = 200; // Hz static float defaultSNratio = 2; static float defaultFreqError = 1f;
*/
static int defaultSamplingRate = 8000;// Hz
static float defaultSignalLengthTimeinSec = 5;// s
static float defaultHiddenFrequency = 2f;// Hz
@ -23,62 +17,61 @@ public class PeriodicityDetector {
static float defaultMaxPossibleFreq = 1000; // Hz
static float defaultSNratio = 0;
static float defaultFreqError = 1f;
public int currentSamplingRate;
public int currentWindowShiftSamples;
public int currentWindowAnalysisSamples;
public double[][] currentspectrum;
public double meanF=0;
public double lowermeanF=0;
public double uppermeanF=0;
public double meanPeriod=0;
public double lowermeanPeriod=0;
public double uppermeanPeriod=0;
public double startPeriodTime=0;
public double endPeriodTime=0;
public double startPeriodSampleIndex=0;
public double endPeriodSampleIndex=0;
public double periodicityStrength=0;
public double meanF = 0;
public double lowermeanF = 0;
public double uppermeanF = 0;
public double meanPeriod = 0;
public double lowermeanPeriod = 0;
public double uppermeanPeriod = 0;
public double startPeriodTime = 0;
public double endPeriodTime = 0;
public double startPeriodSampleIndex = 0;
public double endPeriodSampleIndex = 0;
public double periodicityStrength = 0;
public double minFrequency;
public double maxFrequency;
public String getPeriodicityStregthInterpretation(){
if (periodicityStrength>0.6)
public String getPeriodicityStregthInterpretation() {
if (periodicityStrength > 0.6)
return "High";
if (periodicityStrength<0.6 && periodicityStrength>0.5)
if (periodicityStrength < 0.6 && periodicityStrength > 0.5)
return "Moderate";
if (periodicityStrength<0.5 && periodicityStrength>0.3)
if (periodicityStrength < 0.5 && periodicityStrength > 0.3)
return "Weak";
if (periodicityStrength<0.5 && periodicityStrength>0.3)
if (periodicityStrength < 0.5 && periodicityStrength > 0.3)
return "Very Low";
else
return "None";
}
public void demo()throws Exception{
double[] signal = produceNoisySignal(defaultSignalLengthTimeinSec, defaultSamplingRate, defaultHiddenFrequency, defaultSNratio);
AnalysisLogger.getLogger().debug("Signal samples: "+signal.length);
double F = detectFrequency(signal, defaultSamplingRate, defaultMinPossibleFreq, defaultMaxPossibleFreq,defaultFreqError,true);
AnalysisLogger.getLogger().debug("Detected F:"+F+" indecision ["+lowermeanF+" , "+uppermeanF+"]");
}
public static void main(String[] args) throws Exception{
PeriodicityDetector processor=new PeriodicityDetector();
public void demo() throws Exception {
double[] signal = produceNoisySignal(defaultSignalLengthTimeinSec, defaultSamplingRate, defaultHiddenFrequency, defaultSNratio);
AnalysisLogger.getLogger().debug("Signal samples: " + signal.length);
double F = detectFrequency(signal, defaultSamplingRate, defaultMinPossibleFreq, defaultMaxPossibleFreq, defaultFreqError, true);
AnalysisLogger.getLogger().debug("Detected F:" + F + " indecision [" + lowermeanF + " , " + uppermeanF + "]");
}
public static void main(String[] args) throws Exception {
PeriodicityDetector processor = new PeriodicityDetector();
processor.demo();
}
public double[] produceNoisySignal(float signalLengthTimeinSec, int samplingRate, float frequency, float SNratio) {
// generate a signal with the above period
double[] sin = SignalConverter.generateSinSignal((int) signalLengthTimeinSec * samplingRate, 1f/samplingRate, frequency);
double[] sin = SignalConverter.generateSinSignal((int) signalLengthTimeinSec * samplingRate, 1f / samplingRate, frequency);
// add noise
for (int i = 0; i < sin.length; i++) {
sin[i] = sin[i] + SNratio * Math.random();
@ -86,59 +79,55 @@ public class PeriodicityDetector {
return sin;
}
public double detectFrequency(double[] signal, boolean display) throws Exception {
return detectFrequency(signal, 1, 0, 1, 1f,display);
return detectFrequency(signal, 1, 0, 1, 1f, display);
}
public double detectFrequency(double[] signal) throws Exception {
return detectFrequency(signal, false);
}
public double detectFrequency(double[] signal, int samplingRate, float minPossibleFreq, float maxPossibleFreq, float wantedFreqError, boolean display) throws Exception {
// estimate the best samples based on the error we want
long pow = Math.round(Math.log((float) samplingRate / wantedFreqError) / Math.log(2));
if (pow==0)
pow = Math.round(Math.log((float)signal.length/(float)(""+signal.length).length()) / Math.log(2));
if (pow <= 1)
pow = Math.round(Math.log((float) signal.length / (float) ("" + signal.length).length()) / Math.log(2));
int wLength = (int) Math.pow(2, pow);
AnalysisLogger.getLogger().debug("Suggested pow for window length=" + pow);
AnalysisLogger.getLogger().debug("Suggested windows length (samples)=" + wLength);
AnalysisLogger.getLogger().debug("Suggested windows length (s)=" + ((float) wLength / (float) samplingRate) + " s");
int windowAnalysisSamples = (int) Math.pow(2, 14);// (int)
windowAnalysisSamples = wLength;
int windowShiftSamples = (int) Math.round((float) windowAnalysisSamples / 2f);
float windowShiftTime = (float)SignalConverter.sample2Time(windowShiftSamples, samplingRate);
float windowShiftTime = (float) SignalConverter.sample2Time(windowShiftSamples, samplingRate);
float error = ((float) samplingRate / (float) windowAnalysisSamples);
AnalysisLogger.getLogger().debug("Error in the Measure will be: " + error + " Hz");
AnalysisLogger.getLogger().debug("A priori Min Freq: " + minPossibleFreq + " s");
AnalysisLogger.getLogger().debug("A priori Max Freq: " + maxPossibleFreq + " s");
if (maxPossibleFreq>=samplingRate)
maxPossibleFreq = (float)(samplingRate/2f)-(0.1f*samplingRate/2f);
if (minPossibleFreq==0)
if (maxPossibleFreq >= samplingRate)
maxPossibleFreq = (float) (samplingRate / 2f) - (0.1f * samplingRate / 2f);
if (minPossibleFreq == 0)
minPossibleFreq = 0.1f;
minFrequency=minPossibleFreq;
maxFrequency=maxPossibleFreq;
minFrequency = minPossibleFreq;
maxFrequency = maxPossibleFreq;
// display the signal
// if (display)
// SignalProcessing.displaySignalWithGenericTime(signal, 0, 1, "signal");
// if (display)
// SignalProcessing.displaySignalWithGenericTime(signal, 0, 1, "signal");
this.currentSamplingRate = samplingRate;
this.currentWindowShiftSamples = windowShiftSamples;
this.currentWindowAnalysisSamples = windowAnalysisSamples;
// trace spectrum
double[][] spectrum = SignalConverter.spectrogram("spectrogram", signal, samplingRate, windowShiftSamples, windowAnalysisSamples, false);
if (display)
@ -146,50 +135,65 @@ public class PeriodicityDetector {
// apply the bandpass filter
spectrum = SignalConverter.cutSpectrum(spectrum, minPossibleFreq, maxPossibleFreq, windowAnalysisSamples, samplingRate);
if (display)
// display cut spectrum
// display cut spectrum
SignalConverter.displaySpectrogram(spectrum, signal, "clean spectrogram", samplingRate, windowShiftSamples, windowAnalysisSamples);
// extract the maximum frequencies in each frame
SignalConverter signalMaximumAnalyzer = new SignalConverter();
double[] maxfrequencies = signalMaximumAnalyzer.takeMaxFrequenciesInSpectrogram(spectrum, samplingRate, windowAnalysisSamples, minPossibleFreq);
double[] powers = signalMaximumAnalyzer.averagepower;
currentspectrum=spectrum;
currentspectrum = spectrum;
// display the maximum freqs
AnalysisLogger.getLogger().debug("Number of frequency peaks " + maxfrequencies.length);
// take the longest stable sequence of frequencies
SignalConverter signalconverter = new SignalConverter();
maxfrequencies = signalconverter.takeLongestStableTract(maxfrequencies, 0.5);
if (maxfrequencies == null)
return 0;
this.startPeriodTime = SignalConverter.spectrogramTimeFromIndex(signalconverter.startStableTractIdx, windowShiftTime);
this.endPeriodTime = SignalConverter.spectrogramTimeFromIndex(signalconverter.endStableTractIdx, windowShiftTime);
this.startPeriodSampleIndex=SignalConverter.time2Sample(startPeriodTime, samplingRate);
this.endPeriodSampleIndex=Math.min(SignalConverter.time2Sample(endPeriodTime, samplingRate),signal.length-1);
this.startPeriodSampleIndex = SignalConverter.time2Sample(startPeriodTime, samplingRate);
this.endPeriodSampleIndex = Math.min(SignalConverter.time2Sample(endPeriodTime, samplingRate), signal.length - 1);
float power =0;
int counter=0;
//calculate the average spectrum relative amplitude in the most stable periodic tract
for (int i=signalconverter.startStableTractIdx;i<signalconverter.endStableTractIdx;i++){
power = MathFunctions.incrementPerc(power, (float)powers[i], counter);
float power = 0;
int counter = 0;
// calculate the average spectrum relative amplitude in the most stable periodic tract
for (int i = signalconverter.startStableTractIdx; i < signalconverter.endStableTractIdx; i++) {
power = MathFunctions.incrementPerc(power, (float) powers[i], counter);
counter++;
}
this.periodicityStrength=power;
if (this.periodicityStrength==-0.0)
this.periodicityStrength=0;
this.periodicityStrength = power;
if (this.periodicityStrength == -0.0)
this.periodicityStrength = 0;
// reconstruct the F
double meanF = MathFunctions.mean(maxfrequencies);
if ((meanF <= minPossibleFreq) || (meanF >= maxPossibleFreq)) {
meanF=0;
this.meanF = 0;
this.lowermeanF = 0;
this.uppermeanF = 0;
this.meanF = meanF;
this.lowermeanF = meanF - error;
this.uppermeanF = meanF + error;
this.meanPeriod = 1d/meanF;
this.lowermeanPeriod = 1d/lowermeanF;
this.uppermeanPeriod = 1d/uppermeanF;
this.meanPeriod = 0;
this.lowermeanPeriod = 0;
this.uppermeanPeriod = 0;
this.periodicityStrength = 0;
this.startPeriodTime = 0;
this.endPeriodTime = 0;
this.startPeriodSampleIndex = 0;
this.endPeriodSampleIndex = 0;
} else {
this.meanF = meanF;
this.lowermeanF = Math.max(meanF - error, minPossibleFreq);
this.uppermeanF = Math.min(meanF + error, maxFrequency);
this.meanPeriod = 1d / meanF;
this.lowermeanPeriod = 1d / lowermeanF;
this.uppermeanPeriod = 1d / uppermeanF;
}
return meanF;
}
}

View File

@ -309,17 +309,19 @@ public class SignalConverter {
double[] maxs = new double[spectrogram.length];
averagepower = new double[spectrogram.length];
int j = 0;
// SignalProcessing.displaySignalWithGenericTime(spectrogram[0], 0, 1, "spectrum");
for (double[] slice : spectrogram) {
int bestidx = 0;
double max = -Double.MAX_VALUE;
double min = Double.MAX_VALUE;
for (int k = 0; k < slice.length; k++) {
double ele = slice[k];
if (ele > max) {
if (ele > (max+(Math.abs(max)*0.1))) {
max = ele;
bestidx = k;
// AnalysisLogger.getLogger().debug(">max up:"+ele +">" +(max-max*0.1));
}
if (ele<min){
if (ele<(min-(Math.abs(min)*0.1))){
min = ele;
}
}
@ -342,7 +344,10 @@ public class SignalConverter {
else{
max=max-min;
mean = mean-min;
averagepower[j]=Math.abs((max-mean)/max);
if (max ==0)
averagepower[j]=0;
else
averagepower[j]=Math.abs((max-mean)/max);
}
AnalysisLogger.getLogger().debug("max power : "+max+" min power: "+min+" mean "+mean+" power "+averagepower[j]);

View File

@ -119,6 +119,10 @@ public class SignalProcessing {
}
public static Image renderSignalWithGenericTime(double[] signal, float t0, float timeshift, String name) {
if (signal.length>20000) {
AnalysisLogger.getLogger().debug("Too many points to display: "+signal.length);
return null;
}
org.jfree.data.xy.XYSeries xyseries = new org.jfree.data.xy.XYSeries(name);
float time = t0;
for (int i = 0; i < signal.length; i++) {
@ -132,6 +136,11 @@ public class SignalProcessing {
}
public static Image renderSignalWithGenericTime(double[] signal, double [] timeline, String name) {
if (signal.length>20000) {
AnalysisLogger.getLogger().debug("Too many points to display: "+signal.length);
return null;
}
org.jfree.data.xy.XYSeries xyseries = new org.jfree.data.xy.XYSeries(name);
for (int i = 0; i < signal.length; i++) {
@ -144,6 +153,30 @@ public class SignalProcessing {
return image;
}
public static Image renderSignalWithTime(double[] signal, Date[] dates, String name, String format) {
org.jfree.data.time.TimeSeries series = new org.jfree.data.time.TimeSeries(name);
if (signal.length>20000) {
AnalysisLogger.getLogger().debug("Too many points to display: "+signal.length);
return null;
}
for (int i = 0; i < signal.length; i++){
try{
FixedMillisecond ms = new FixedMillisecond(dates[i]);
series.add(ms, signal[i]);
}catch(Exception e){
AnalysisLogger.getLogger().debug("Skipping value yet present: "+dates[i]);
}
}
TimeSeriesCollection dataset = new TimeSeriesCollection();
dataset.addSeries(series);
JFreeChart chart = TimeSeriesGraph.createStaticChart(dataset, format);
Image image = ImageTools.toImage(chart.createBufferedImage(680, 420));
return image;
}
public static Image renderSignalSpectrogram(double[] signal, double [] timeline, int samplingRate, int frameslength, int windowshift) {
SpectrogramCustom spec = new SpectrogramCustom(signal, samplingRate, Window.get(Window.HAMMING, frameslength), windowshift, frameslength, 640, 480);
double[][] spectrum = spec.spectra.toArray(new double[spec.spectra.size()][]);
@ -178,7 +211,8 @@ public class SignalProcessing {
}
for (int i = 0; i < signal.length; i++){
try{
series.add(new FixedMillisecond(dates[i]), signal[i]);
FixedMillisecond ms = new FixedMillisecond(dates[i]);
series.add(ms, signal[i]);
}catch(Exception e){
AnalysisLogger.getLogger().debug("Skipping value yet present: "+dates[i]);
}

View File

@ -19,6 +19,8 @@ public class TimeSeries {
private Date[] unsortedtimes;
private String[] timeLabels;
private long minimumtimegap = -1;
private String timepattern;
AlgorithmConfiguration config;
public TimeSeries(int timeLength, AlgorithmConfiguration config) {
@ -60,6 +62,22 @@ public class TimeSeries {
public Date[] getTime() {
return times;
}
public Date[] extendTime(int furtherPointsinTime){
Date[] time = new Date[times.length+furtherPointsinTime];
for (int i=0;i<times.length;i++){
time[i]=times[i];
}
long lastDate = times[times.length-1].getTime();
for (int i=times.length;i<(times.length+furtherPointsinTime);i++){
time[i]=new Date(lastDate+(i+1-times.length)*minimumtimegap);
}
return time;
}
public double[] getMillisecondsTimeline() {
double[] secondstimes = new double[times.length];
@ -140,6 +158,7 @@ public class TimeSeries {
if (counter == 0) {
timepattern = DateGuesser.getPattern(timel);
ts.setTimepattern(timepattern);
AnalysisLogger.getLogger().debug("Time pattern: " + timepattern);
sdf = new SimpleDateFormat(timepattern, Locale.ENGLISH);
}
@ -159,7 +178,9 @@ public class TimeSeries {
counter++;
}
ts.sort();
return ts;
}
@ -207,4 +228,12 @@ public class TimeSeries {
values[i] = values[i] / max;
}
}
public String getTimepattern() {
return timepattern;
}
public void setTimepattern(String timepattern) {
this.timepattern = timepattern;
}
}

View File

@ -15,9 +15,12 @@ public class SSADataset {
private Matrix X []; //Basic Matrix singular decomposition
private Matrix groupX []; //the resulting matrix for each of the groups
private Matrix V []; //the main components of singular decomposition
private List<Double> reconstructionList; //recycled a number of
private List<Double> forecastList; //recycled a number of
private List<Double> reconstructionList;
private List<Double> forecastList;
private double[] reconstructedSignal;
private double[] forecastSignal;
private List <Double> SMA; //moving averages
private List <Double> cov; //averaging the diagonal covariance
private List <Double> eigenValueList;//eigenvalues
@ -236,5 +239,21 @@ public class SSADataset {
this.forecastList = forecastList;
}
public double[] getReconstructedSignal() {
return reconstructedSignal;
}
public void setReconstructedSignal(double[] reconstructedSignal) {
this.reconstructedSignal = reconstructedSignal;
}
public double[] getForecastSignal() {
return forecastSignal;
}
public void setForecastSignal(double[] forecastSignal) {
this.forecastSignal = forecastSignal;
}
}

View File

@ -20,17 +20,21 @@ public class SSAWorkflow {
SingularSpectrumAnalysis.inclosure(data);
// apply SVD and get a number of eigenvectors equal to the rank of the
// embedding matrix
System.gc();
SingularSpectrumAnalysis.singularDecomposition(data);
// calculate averages for each frame of the time series
System.gc();
SingularSpectrumAnalysis.setMovingAverage(data);
// Diagonal averaging of the covariance matrix
System.gc();
SingularSpectrumAnalysis.averagedCovariance(data);
// store the logs and the sqrts of the eigenvalues
System.gc();
SingularSpectrumAnalysis.functionEigenValue(data);
//build groups of indices
List<SSAGroupList> groupsModel = new ArrayList<SSAGroupList>();
List<SSAUnselectList> groups = new ArrayList<SSAUnselectList>();
AnalysisLogger.getLogger().debug("Listing All the Eigenvalues");
for (int i = 0; i < data.getPercentList().size(); i++) {
double currentperc = data.getPercentList().get(i);
AnalysisLogger.getLogger().debug("Eigenvalue: Number: "+i+" Percentage: "+currentperc);
@ -51,9 +55,11 @@ public class SSAWorkflow {
double[] rsignal = new double[data.getForecastList().size()];
for(int i = 0; i < data.getForecastList().size(); i++) rsignal[i] = data.getForecastList().get(i);
//TODO: Report the weights of the components in a chart along with the cutoff
SignalProcessing.displaySignalWithGenericTime(signal, 0, 1, "signal");
SignalProcessing.displaySignalWithGenericTime(rsignal, 0, 1, "reconstructed signal");
data.setReconstructedSignal(rsignal);
data.setForecastSignal(rsignal);
// SignalProcessing.displaySignalWithGenericTime(signal, 0, 1, "signal");
// SignalProcessing.displaySignalWithGenericTime(rsignal, 0, 1, "reconstructed signal");
AnalysisLogger.getLogger().debug("SSA workflow DONE");
return data;

View File

@ -240,6 +240,7 @@ public class SingularSpectrumAnalysis {
* @return the resulting matrix
*/
private static double[][] transpositionMatrix(double matrix[][]) {
AnalysisLogger.getLogger().debug("SSA->Building a matrix with dimensions: "+matrix[0].length+" X "+matrix.length);
double transpMatrix[][] = new double[matrix[0].length][matrix.length];
for (int i = 0; i < matrix.length; i++) {
for (int j = 0; j < matrix[i].length; j++) {
@ -330,7 +331,10 @@ public class SingularSpectrumAnalysis {
* @param data
*/
public static void forecast(SSADataset data, int nPointsToForecast, boolean reconstructedSignal){
if (nPointsToForecast==0){
data.setForecastList(data.getReconstructionList());
return;
}
// List eigenvectors = data.getEigenVectors().subList(0, 11);
int nTotalEigenV = data.getPercentList().size();
int bestEigenVectors = nTotalEigenV;
@ -348,7 +352,7 @@ public class SingularSpectrumAnalysis {
int lastcoordinate = L-1;
AnalysisLogger.getLogger().debug("SSA: value for L: "+L);
int nEigenVectors = eigenvectors.size();
AnalysisLogger.getLogger().debug("Total number of Eigenvectors: "+nEigenVectors);
AnalysisLogger.getLogger().debug("Number of Selected Eigenvectors For Reconstruction: "+nEigenVectors);
double[] p = new double[nEigenVectors];
for (int i = 0;i<nEigenVectors;i++){
p[i] = (Double)((List)eigenvectors.get(i)).get(lastcoordinate);

View File

@ -0,0 +1,98 @@
package org.gcube.dataanalysis.ecoengine.test.signalprocessing;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import org.gcube.dataanalysis.ecoengine.signals.PeriodicityDetector;
import org.gcube.dataanalysis.ecoengine.signals.ssa.SSAWorkflow;
import org.gcube.dataanalysis.ecoengine.utils.Transformations;
public class TestSSA {
/**
* @param args
*/
public static void main(String[] args) throws Exception{
int windowLength = 20;
float eigenvaluespercthr = 0.7f;
int pointsToReconstruct = 100;
SSAWorkflow.applyCompleteWorkflow(sawTimeSeries(), windowLength,eigenvaluespercthr,pointsToReconstruct,false);
SSAWorkflow.applyCompleteWorkflow(sinTimeSeries(), windowLength,eigenvaluespercthr,pointsToReconstruct,false);
SSAWorkflow.applyCompleteWorkflow(noisyTimeSeries(), windowLength,eigenvaluespercthr,pointsToReconstruct,false);
SSAWorkflow.applyCompleteWorkflow(largeTimeSeries(), windowLength,eigenvaluespercthr,10,false);
}
public static List<Double> sawTimeSeries() throws Exception{
String file = "timeseries";
BufferedReader br = new BufferedReader(new FileReader(new File(file)));
String line = "";
List<Double> timeseries = new ArrayList<Double>();
while ((line = br.readLine()) != null) {
timeseries.add(Double.parseDouble(line));
}
br.close();
return timeseries;
}
public static List<Double> sinTimeSeries() throws Exception{
double sin [] = new PeriodicityDetector().produceNoisySignal(120, 1, 0.1f, 0);
List<Double> timeseries = new ArrayList<Double>();
for (int i=0;i<sin.length;i++){
timeseries.add(sin[i]);
}
return timeseries;
}
public static List<Double> noisyTimeSeries() throws Exception{
double sin [] = new PeriodicityDetector().produceNoisySignal(120, 1, 0.1f, 1.2f);
List<Double> timeseries = new ArrayList<Double>();
for (int i=0;i<sin.length;i++){
timeseries.add(sin[i]);
}
return timeseries;
}
public static List<Double> largeTimeSeries() throws Exception{
String file = "LargeTS.csv";
BufferedReader br = new BufferedReader(new FileReader(new File(file)));
String line = "";
List<Double> timeseries = new ArrayList<Double>();
LinkedHashMap<String,String> values= new LinkedHashMap<String,String>();
line = br.readLine();
while ((line = br.readLine()) != null) {
List<String> row= Transformations.parseCVSString(line, ",");
if (values.get(row.get(3))==null)
values.put(row.get(3), row.get(5));
else{
double val = Double.parseDouble(values.get(row.get(3)));
val = val+Double.parseDouble(row.get(5));
values.put(row.get(3), ""+val);
}
}
br.close();
for (String val:values.values()){
timeseries.add(Double.parseDouble(val));
}
return timeseries;
}
}

View File

@ -11,12 +11,17 @@ import org.gcube.dataanalysis.ecoengine.test.regression.Regressor;
public class TestSimpleSignal {
// static AlgorithmConfiguration[] configs = {periodicSignalConfig()};//russianSignalConfig(),simpleSignalConfig()};
static AlgorithmConfiguration[] configs = {periodicSignalConfig(), russianSignalConfig(),simpleSignalConfig(), occurrencePointsSignalConfig(),hugeSignalConfig()};
// static AlgorithmConfiguration[] configs = {russianSignalConfig()};
// static AlgorithmConfiguration[] configs = {hugeSignalConfig()};
// static AlgorithmConfiguration[] configs = {periodicSignalConfig()};
// static AlgorithmConfiguration[] configs = {periodicSignalConfig(), russianSignalConfig(),simpleSignalConfig(), occurrencePointsSignalConfig(),hugeSignalConfig()};
static AlgorithmConfiguration[] configs = {periodicSignalConfig(), russianSignalConfig(),simpleSignalConfig()};
// static AlgorithmConfiguration[] configs = {NAFOSignalConfig()};
// static AlgorithmConfiguration[] configs = {largeCustomSignalConfig()};
// static AlgorithmConfiguration[] configs = {sawSignalConfig()};
public static void main(String[] args) throws Exception {
int wLength = (int) Math.pow(2, 1);
System.out.println("L:"+wLength);
for (int i = 0; i < configs.length; i++) {
System.out.println("*****************TEST "+i+" *****************");
@ -44,7 +49,11 @@ public class TestSimpleSignal {
// vessels
config.setParam("TimeSeriesTable", "timeseries_id4dd368bf_63fb_4d19_8e31_20ced63a477d");
config.setParam("ValueColum", "quantity");
config.setParam("SSA_Window_in_Samples", "30");
config.setParam("SSA_EigenvaluesThreshold", "0.7");
config.setParam("SSA_Points_to_Forecast", "10");
AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
config.setGcubeScope("/gcube");
@ -66,6 +75,9 @@ public class TestSimpleSignal {
// vessels
config.setParam("TimeSeriesTable", "generic_ideb9efbe0_61ad_4eea_b0ee_95e64ce11b28");
config.setParam("ValueColum", "quantity");
config.setParam("SSA_Window_in_Samples", "20");
config.setParam("SSA_EigenvaluesThreshold", "0.7");
config.setParam("SSA_Points_to_Forecast", "10");
AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
@ -90,6 +102,9 @@ public class TestSimpleSignal {
config.setParam("ValueColum", "speed");
config.setParam("TimeColum", "datetime");
config.setParam("AggregationFunction", "AVG");
config.setParam("SSA_Window_in_Samples", "20");
config.setParam("SSA_EigenvaluesThreshold", "0.7");
config.setParam("SSA_Points_to_Forecast", "10");
AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
@ -113,7 +128,10 @@ public class TestSimpleSignal {
config.setParam("TimeSeriesTable", "signalcsv");
config.setParam("ValueColum", "signal");
config.setParam("FrequencyResolution", "1");
config.setParam("SSA_Window_in_Samples", "20");
config.setParam("SSA_EigenvaluesThreshold", "0.7");
config.setParam("SSA_Points_to_Forecast", "10");
AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
@ -137,7 +155,10 @@ public class TestSimpleSignal {
config.setParam("TimeSeriesTable", "generic_id634a660c_4d1a_410c_aa45_eb6e4c5afdf9");
config.setParam("ValueColum", "quantity");
config.setParam("TimeColum", "years");
config.setParam("SSA_Window_in_Samples", "20");
config.setParam("SSA_EigenvaluesThreshold", "0.7");
config.setParam("SSA_Points_to_Forecast", "10");
AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
@ -147,4 +168,88 @@ public class TestSimpleSignal {
}
public static AlgorithmConfiguration NAFOSignalConfig() {
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setAgent("TIME_SERIES_PROCESSING");
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setParam("DatabaseUserName", "utente");
config.setParam("DatabasePassword", "d4science");
config.setParam("DatabaseURL", "jdbc:postgresql://statistical-manager.d.d4science.research-infrastructures.eu/testdb");
config.setParam("DatabaseDriver", "org.postgresql.Driver");
// vessels
config.setParam("TimeSeriesTable", "timeseries_id39c6c28f_2484_421c_8ffb_9c2cc2330c62");
config.setParam("ValueColum", "speed");
config.setParam("SSA_Window_in_Samples", "30");
config.setParam("SSA_EigenvaluesThreshold", "0.7");
config.setParam("SSA_Points_to_Forecast", "10");
AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
config.setGcubeScope("/gcube");
config.setConfigPath("./cfg");
return config;
}
public static AlgorithmConfiguration sawSignalConfig() {
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setAgent("TIME_SERIES_PROCESSING");
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setParam("DatabaseUserName", "utente");
config.setParam("DatabasePassword", "d4science");
config.setParam("DatabaseURL", "jdbc:postgresql://statistical-manager.d.d4science.research-infrastructures.eu/testdb");
config.setParam("DatabaseDriver", "org.postgresql.Driver");
// vessels
config.setParam("TimeSeriesTable", "timeseries_ide814eb07_c13b_41b3_a240_aa99446db831");
config.setParam("ValueColum", "quantity");
config.setParam("FrequencyResolution", "0.01");
config.setParam("SSA_Window_in_Samples", "20");
config.setParam("SSA_EigenvaluesThreshold", "0.7");
config.setParam("SSA_Points_to_Forecast", "10");
AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
config.setGcubeScope("/gcube");
config.setConfigPath("./cfg");
return config;
}
public static AlgorithmConfiguration largeCustomSignalConfig() {
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setAgent("TIME_SERIES_PROCESSING");
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setParam("DatabaseUserName", "utente");
config.setParam("DatabasePassword", "d4science");
config.setParam("DatabaseURL", "jdbc:postgresql://statistical-manager.d.d4science.research-infrastructures.eu/testdb");
config.setParam("DatabaseDriver", "org.postgresql.Driver");
// vessels
config.setParam("TimeSeriesTable", "timeseries_idd3dd174e_242c_4f8b_920a_faa79691ca43");
config.setParam("ValueColum", "quantity");
config.setParam("FrequencyResolution", "0.01");
config.setParam("SSA_Window_in_Samples", "20");
config.setParam("SSA_EigenvaluesThreshold", "0.7");
config.setParam("SSA_Points_to_Forecast", "10");
AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
config.setGcubeScope("/gcube");
config.setConfigPath("./cfg");
return config;
}
}

View File

@ -1,130 +1,235 @@
package org.gcube.dataanalysis.ecoengine.transducers;
import java.awt.Image;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import org.gcube.contentmanagement.graphtools.utils.MathFunctions;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.datatypes.ColumnType;
import org.gcube.dataanalysis.ecoengine.datatypes.DatabaseType;
import org.gcube.dataanalysis.ecoengine.datatypes.InputTable;
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.TableTemplates;
import org.gcube.dataanalysis.ecoengine.interfaces.StandardLocalExternalAlgorithm;
import org.gcube.dataanalysis.ecoengine.signals.PeriodicityDetector;
import org.gcube.dataanalysis.ecoengine.signals.SignalProcessing;
import org.gcube.dataanalysis.ecoengine.signals.TimeSeries;
import org.gcube.dataanalysis.ecoengine.signals.ssa.SSADataset;
import org.gcube.dataanalysis.ecoengine.signals.ssa.SSAWorkflow;
import org.gcube.dataanalysis.ecoengine.utils.AggregationFunctions;
import org.gcube.dataanalysis.ecoengine.utils.DatabaseFactory;
import org.gcube.dataanalysis.ecoengine.utils.DatabaseUtils;
import org.gcube.dataanalysis.ecoengine.utils.Operations;
import org.gcube.dataanalysis.ecoengine.utils.Tuple;
import org.hibernate.SessionFactory;
public class TimeSeriesAnalysis extends StandardLocalExternalAlgorithm {
private static String timeSeriesTable = "TimeSeriesTable";
private static String valuesColumn= "ValueColum";
private static String timeColumn= "TimeColum";
private static String frequencyResolution= "FrequencyResolution";
private static String aggregationFunction= "AggregationFunction";
private static String valuesColumn = "ValueColum";
private static String timeColumn = "TimeColum";
private static String frequencyResolution = "FrequencyResolution";
private static String aggregationFunction = "AggregationFunction";
private static String SSAAnalysisWindowSamples = "SSA_Window_in_Samples";
private static String SSAEigenvaluesThreshold = "SSA_EigenvaluesThreshold";
private static String SSAPointsToForecast = "SSA_Points_to_Forecast";
private Image signalImg = null;
private Image uniformSignalImg = null;
private Image uniformSignalSamplesImg = null;
private Image spectrogramImg = null;
private Image forecastsignalImg = null;
private Image eigenValuesImg=null;
private File outputfilename=null;
private static boolean display = false;
private static int maxpoints = 10000;
@Override
public void init() throws Exception {
}
@Override
public String getDescription() {
return "An algorithms applying signal processing to a non uniform time series. It uniformly samples the series, then extracts hidden periodicities and signal properties.";
return "An algorithms applying signal processing to a non uniform time series. A maximum of "+maxpoints+" distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram.";
}
@Override
protected void process() throws Exception {
SessionFactory dbconnection = null;
try{
status = 0;
try {
dbconnection = DatabaseUtils.initDBSession(config);
String tablename = config.getParam(timeSeriesTable);
String valuescolum = config.getParam(valuesColumn);
// String timecolumn = "datetime";//"time";
String timecolumn = config.getParam(timeColumn);
String aggregationFunc= config.getParam(aggregationFunction);
String frequencyRes= config.getParam(frequencyResolution);
String aggregationFunc = config.getParam(aggregationFunction);
String frequencyRes = config.getParam(frequencyResolution);
int windowLength = Integer.parseInt(config.getParam(SSAAnalysisWindowSamples));
float eigenvaluespercthr = Float.parseFloat(config.getParam(SSAEigenvaluesThreshold));
int pointsToReconstruct = Integer.parseInt(config.getParam(SSAPointsToForecast));
float frequencyResDouble = 1;
if (timecolumn==null)
if (timecolumn == null)
timecolumn = "time";
if (aggregationFunc==null)
aggregationFunc="SUM";
if (frequencyRes!=null)
{ try{
frequencyResDouble=Float.parseFloat(frequencyRes);
}catch(Exception e){
if (aggregationFunc == null)
aggregationFunc = "SUM";
if (frequencyRes != null) {
try {
frequencyResDouble = Float.parseFloat(frequencyRes);
} catch (Exception e) {
}
}
AnalysisLogger.getLogger().debug("TimeSeriesAnalysis->Table Name: " + tablename);
AnalysisLogger.getLogger().debug("TimeSeriesAnalysis->Time Column: " + timecolumn);
AnalysisLogger.getLogger().debug("TimeSeriesAnalysis->Values Column: " + valuescolum);
AnalysisLogger.getLogger().debug("TimeSeriesAnalysis->Aggregation: " + aggregationFunc);
AnalysisLogger.getLogger().debug("TimeSeriesAnalysis->Frequency Resolution: " + frequencyRes);
AnalysisLogger.getLogger().debug("TimeSeriesAnalysis->SSA Window Samples: " + windowLength);
AnalysisLogger.getLogger().debug("TimeSeriesAnalysis->SSA Eigenvalues threshold: " + eigenvaluespercthr);
AnalysisLogger.getLogger().debug("TimeSeriesAnalysis->SSA Points to Reconstruct: " + pointsToReconstruct);
AnalysisLogger.getLogger().debug("TimeSeriesAnalysis->Table Name: "+tablename);
AnalysisLogger.getLogger().debug("TimeSeriesAnalysis->Time Column: "+timecolumn);
AnalysisLogger.getLogger().debug("TimeSeriesAnalysis->Values Column: "+valuescolum);
AnalysisLogger.getLogger().debug("TimeSeriesAnalysis->Aggregation: "+aggregationFunc);
AnalysisLogger.getLogger().debug("TimeSeriesAnalysis->Frequency Resolution: "+frequencyRes);
// String query = "select * from (select "+valuescolum+","+timecolumn+" from "+tablename+") as a";
String query = "select * from (select "+aggregationFunc+"("+valuescolum+"),"+timecolumn+" from "+tablename+" group by "+timecolumn+") as a";
AnalysisLogger.getLogger().debug("TimeSeriesAnalysis->Query to execute: "+query);
AnalysisLogger.getLogger().debug("TimeSeriesAnalysis->Extracting Points...");
String query = "select * from (select " + aggregationFunc + "(" + valuescolum + ")," + timecolumn + " from " + tablename + " group by " + timecolumn + ") as a";
AnalysisLogger.getLogger().debug("TimeSeriesAnalysis->Query to execute: " + query);
List<Object> results = DatabaseFactory.executeSQLQuery(query, dbconnection);
if (results==null || results.size()==0)
status = 10;
if (results == null || results.size() == 0)
throw new Exception("Error in retrieving values from the table: no time series found");
//build signal
else if (results.size() > maxpoints)
throw new Exception("Too long Time Series: a maximum of distinct "+maxpoints+" in time is allowed");
AnalysisLogger.getLogger().debug("TimeSeriesAnalysis->Points Extracted!");
// build signal
AnalysisLogger.getLogger().debug("TimeSeriesAnalysis->Building signal");
List<Tuple<String>> signal = new ArrayList<Tuple<String>>();
int sizesignal = 0;
for (Object row:results){
for (Object row : results) {
Object[] srow = (Object[]) row;
String value =""+srow[0];
String time=""+srow[1];
signal.add(new Tuple<String>(time,value));
String value = "" + srow[0];
String time = "" + srow[1];
signal.add(new Tuple<String>(time, value));
sizesignal++;
}
status = 20;
AnalysisLogger.getLogger().debug("TimeSeriesAnalysis->Signal built with success. Size: "+sizesignal);
AnalysisLogger.getLogger().debug("TimeSeriesAnalysis->Signal built with success. Size: " + sizesignal);
AnalysisLogger.getLogger().debug("TimeSeriesAnalysis->Building Time Series");
TimeSeries ts = TimeSeries.buildFromSignal(signal, config);
String timepattern = ts.getTimepattern();
String chartpattern = "HH:mm:ss MM-dd-yy";
if (timepattern.equals("s"))
chartpattern = "HH:mm:ss:SS";
AnalysisLogger.getLogger().debug("TimeSeriesAnalysis->Uniformly sampling the signal");
SignalProcessing.displaySignalWithTime(ts.getValues(), ts.getTime(), "Time Series", "HH:mm:ss MM-dd-yy");
if (display)
SignalProcessing.displaySignalWithTime(ts.getValues(), ts.getTime(), "Time Series", chartpattern);
signalImg = SignalProcessing.renderSignalWithTime(ts.getValues(), ts.getTime(), "Original Time Series", chartpattern);
ts.convertToUniformSignal(0);
AnalysisLogger.getLogger().debug("TimeSeriesAnalysis->Uniform sampling finished");
//spectrum and signal processing
SignalProcessing.displaySignalWithTime(ts.getValues(), ts.getTime(), "Unif Samp Time Series", "HH:mm:ss MM-dd-yy");
status = 30;
// spectrum and signal processing
AnalysisLogger.getLogger().debug("TimeSeriesAnalysis->Detecting periodicity");
PeriodicityDetector pd = new PeriodicityDetector();
double F = pd.detectFrequency(ts.getValues(),1,0.01f,0.5f,frequencyResDouble,true);
double F = pd.detectFrequency(ts.getValues(), 1, 0.01f, 0.5f, frequencyResDouble, display);
outputParameters.put("Detected Frequency (samples^-1)", ""+MathFunctions.roundDecimal(F,2));
outputParameters.put("Indecision on Frequency", "["+MathFunctions.roundDecimal(pd.lowermeanF,2)+" , "+MathFunctions.roundDecimal(pd.uppermeanF,2) + "]");
outputParameters.put("Average detected Period (samples)", ""+MathFunctions.roundDecimal(pd.meanPeriod,2));
outputParameters.put("Indecision on Average Period", "["+MathFunctions.roundDecimal(pd.lowermeanPeriod,2)+" , "+MathFunctions.roundDecimal(pd.uppermeanPeriod,2) + "]");
outputParameters.put("Samples range in which periodicity was detected", "from "+pd.startPeriodSampleIndex+" to "+pd.endPeriodSampleIndex);
outputParameters.put("Period Strength with interpretation", ""+MathFunctions.roundDecimal(pd.periodicityStrength,2)+" ("+pd.getPeriodicityStregthInterpretation()+")");
outputParameters.put("Range of frequencies (in samples^-1) represented in the Spectrogram:", "["+MathFunctions.roundDecimal(pd.minFrequency, 2)+" , "+MathFunctions.roundDecimal(pd.maxFrequency, 2) + "]");
AnalysisLogger.getLogger().debug("TimeSeriesAnalysis->Periodicity Detected!");
status =60;
AnalysisLogger.getLogger().debug("Detected Frequency: "+F+" indecision ["+pd.lowermeanF+" , "+pd.uppermeanF+"]");
AnalysisLogger.getLogger().debug("Detected Period: "+pd.meanPeriod+" indecision ["+pd.lowermeanPeriod+" , "+pd.uppermeanPeriod+"]");
AnalysisLogger.getLogger().debug("Detected Periodicity Strength: "+pd.periodicityStrength+" ("+pd.getPeriodicityStregthInterpretation()+")");
AnalysisLogger.getLogger().debug("Periodicity inside this samples range: ["+pd.startPeriodTime+";"+pd.endPeriodTime+"]");
System.gc();
AnalysisLogger.getLogger().debug("TimeSeriesAnalysis->Executing SSA analysis");
List<Double> values = new ArrayList<Double>();
for (double v : ts.getValues()) {
values.add(v);
}
SSADataset ssa = SSAWorkflow.applyCompleteWorkflow(values, windowLength, eigenvaluespercthr, pointsToReconstruct, false);
Date[] newtimes = ts.extendTime(pointsToReconstruct);
AnalysisLogger.getLogger().debug("TimeSeriesAnalysis->SSA analysis completed");
status = 70;
AnalysisLogger.getLogger().debug("Maximum Frequency in the Spectrogram "+MathFunctions.roundDecimal(pd.maxFrequency,2));
AnalysisLogger.getLogger().debug("Minimum Frequency in the Spectrogram "+MathFunctions.roundDecimal(pd.minFrequency,2));
AnalysisLogger.getLogger().debug("TimeSeriesAnalysis->Rendering Images");
uniformSignalImg = SignalProcessing.renderSignalWithTime(ts.getValues(), ts.getTime(), "Uniformly Sampled Time Series", chartpattern);
if (uniformSignalImg==null)
outputParameters.put("Note:", "The charts for uniformly sampled and forecasted signals contain too many points and will not be displayed. The values will be only reported in the output file.");
else
outputParameters.put("Note:", "Details about the values are reported in the output file.");
// Image spectrogramImage = SignalProcessing.renderSignalSpectrogram2(pd.currentspectrum);
}catch(Exception e){
uniformSignalSamplesImg = SignalProcessing.renderSignalWithGenericTime(ts.getValues(), 0,1, "Uniformly Sampled Time Series in Samples");
spectrogramImg = SignalProcessing.renderSignalSpectrogram2(pd.currentspectrum);
forecastsignalImg = SignalProcessing.renderSignalWithTime(ssa.getForecastSignal(), newtimes, "Forecasted Time Series", chartpattern);
double[] eigenValues = new double[ssa.getPercentList().size()];
for (int i=0;i<eigenValues.length;i++){
eigenValues[i] = ssa.getPercentList().get(i);
}
eigenValuesImg = SignalProcessing.renderSignalWithGenericTime(eigenValues, 0f, 1,"SSA Eigenvalues");
AnalysisLogger.getLogger().debug("TimeSeriesAnalysis->Images Rendered");
System.gc();
AnalysisLogger.getLogger().debug("TimeSeriesAnalysis->Producing Files");
outputfilename = new File(config.getPersistencePath(),valuescolum+"_SignalProcessing.csv");
BufferedWriter bw = new BufferedWriter(new FileWriter(outputfilename ));
bw.write("Uniformly Sampled Time Series,Time Line,Forecasted Time Series,SSA Eigenvalues\n");
int[] lengthsVector = {ts.getValues().length,newtimes.length,ssa.getForecastSignal().length,eigenValues.length};
int maxLen = Operations.getMax(lengthsVector);
for (int i=0;i<maxLen;i++){
if (i<ts.getValues().length)
bw.write(""+ts.getValues()[i]+",");
else
bw.write(",");
if (i<newtimes.length)
bw.write(""+newtimes[i]+",");
else
bw.write(",");
if (i<ssa.getForecastSignal().length)
bw.write(""+ssa.getForecastSignal()[i]+",");
else
bw.write(",");
if (i<eigenValues.length)
bw.write(""+eigenValues[i]+",");
else
bw.write(",");
bw.write("\n");
}
bw.close();
AnalysisLogger.getLogger().debug("TimeSeriesAnalysis->Files Produced");
if (display){
SignalProcessing.displaySignalWithTime(ts.getValues(), ts.getTime(), "Uniformly Sampled Time Series", chartpattern);
SignalProcessing.displaySignalWithGenericTime(ts.getValues(), 0,1, "Uniformly Sampled Time Series in Samples");
SignalProcessing.displaySignalWithTime(ssa.getForecastSignal(), newtimes, "Forecasted Time Series", chartpattern);
SignalProcessing.displaySignalWithGenericTime(eigenValues, 0f, 1,"SSA Eigenvalues");
}
AnalysisLogger.getLogger().debug("TimeSeriesAnalysis->"+outputParameters);
AnalysisLogger.getLogger().debug("TimeSeriesAnalysis->Computation has finished");
} catch (Throwable e) {
e.printStackTrace();
throw e;
}finally{
if (dbconnection!=null)
throw new Exception(e.getLocalizedMessage());
} finally {
if (dbconnection != null)
dbconnection.close();
}
}
@Override
protected void setInputParameters() {
//the time series table
// the time series table
List<TableTemplates> templates = new ArrayList<TableTemplates>();
templates.add(TableTemplates.TIMESERIES);
InputTable p = new InputTable(templates, timeSeriesTable, "The table containing the time series", "timeseries");
@ -133,11 +238,44 @@ public class TimeSeriesAnalysis extends StandardLocalExternalAlgorithm {
inputs.add(p1);
addDoubleInput(frequencyResolution, "The precision in detecting the period. The lower this number the less the number of points in the Spectrogram (higher number of samples used at each step). Reducing this, the spectrogram will be finer and sharper, but you should tune it. Too many samples will make the Spectrogram noisy.", "1");
addEnumerateInput(AggregationFunctions.values(), aggregationFunction, "Function to apply to samples with the same time instant", AggregationFunctions.SUM.name());
addIntegerInput(SSAAnalysisWindowSamples, "The number of samples in the produced uniformly sampled signal, to use in the SSA algorithm. This number should identify a portion of the signal long enough to make the system guess the nature of the trend", "20");
addDoubleInput(SSAEigenvaluesThreshold, "The threshold under which an SSA eigenvalue will be ignored, along with its eigenvector, for the reconstruction of the signal", "0.7");
addIntegerInput(SSAPointsToForecast, "The number of points to forecast over the original length of the time series", "10");
DatabaseType.addDefaultDBPars(inputs);
}
@Override
public void shutdown() {
public StatisticalType getOutput() {
LinkedHashMap<String, StatisticalType> outMap = PrimitiveType.stringMap2StatisticalMap(outputParameters);
LinkedHashMap<String, Image> producedImages = new LinkedHashMap<String, Image>();
if (signalImg!=null)
producedImages.put("Original Time Series",signalImg);
if (uniformSignalImg!=null)
producedImages.put("Uniformly Sampled Time Series",uniformSignalImg);
if (uniformSignalSamplesImg!=null)
producedImages.put("Uniformly Sampled Time Series in Samples",uniformSignalSamplesImg);
if (forecastsignalImg!=null)
producedImages.put("Forecasted Time Series",forecastsignalImg);
if (spectrogramImg!=null)
producedImages.put("Spectrogram of the Uniformly Sampled Time Series",spectrogramImg);
if (eigenValuesImg!=null)
producedImages.put("SSA Eigenvalues",eigenValuesImg);
PrimitiveType images = new PrimitiveType(HashMap.class.getName(),producedImages, PrimitiveTypes.IMAGES, "Time Series Report","Charts reporting the Time Series Analysis");
outMap.put("Images", images);
if (outputfilename!=null){
PrimitiveType file = new PrimitiveType(File.class.getName(), outputfilename, PrimitiveTypes.FILE, "AnalysisReport", "AnalysisReport");
outMap.put("Analysis Report", file);
}
PrimitiveType p = new PrimitiveType(LinkedHashMap.class.getName(), outMap, PrimitiveTypes.MAP, "Output", "");
return p;
}
@Override
public void shutdown() {
}
}