New Time Series Analysis

git-svn-id: https://svn.d4science.research-infrastructures.eu/gcube/trunk/data-analysis/EcologicalEngine@99082 82a268e6-3cf1-43bd-a215-b396298e98cf
This commit is contained in:
Gianpaolo Coro 2014-07-30 10:04:42 +00:00
parent 2cec208eb1
commit c4a3d3ae9d
5 changed files with 531 additions and 130 deletions

View File

@ -4,6 +4,7 @@ import java.math.BigInteger;
import java.util.ArrayList;
import java.util.List;
import org.gcube.dataanalysis.ecoengine.utils.Operations;
import org.gcube.portlets.user.timeseries.charts.support.types.Point;
public class MathFunctions {
@ -136,6 +137,33 @@ public class MathFunctions {
return d;
}
// returns a list of spikes indexes
public static boolean[] findMaxima(double[] derivative,double threshold) {
boolean[] d = new boolean[derivative.length];
if (d.length > 0) {
d[0] = false;
for (int i = 1; i < derivative.length - 1; i++) {
if ((derivative[i] / derivative[i + 1] < 0) && derivative[i]>0){
// double ratio = Math.abs((double) derivative[i]/ (double) derivative[i+1]);
// System.out.println("RATIO "+i+" "+Math.abs(derivative[i]));
// if ((threshold>0)&&(ratio<threshold))
if ((threshold>0)&&(Math.abs(derivative[i])>threshold))
d[i] = true;
}
else
d[i] = false;
}
double max = Operations.getMax(derivative);
if (max==derivative[derivative.length - 1])
d[derivative.length - 1] = true;
else
d[derivative.length - 1] = false;
}
return d;
}
// returns a list of spikes indexes
public static boolean[] findSpikes(double[] derivative,double threshold) {
boolean[] d = new boolean[derivative.length];

View File

@ -1,9 +1,10 @@
package org.gcube.dataanalysis.ecoengine.signals;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import org.gcube.contentmanagement.graphtools.utils.MathFunctions;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.signals.SignalConverter;
import org.gcube.dataanalysis.ecoengine.signals.SignalProcessing;
public class PeriodicityDetector {
@ -43,16 +44,30 @@ public class PeriodicityDetector {
public String getPeriodicityStregthInterpretation() {
if (periodicityStrength > 0.6)
return "High";
if (periodicityStrength < 0.6 && periodicityStrength > 0.5)
if (periodicityStrength <= 0.6 && periodicityStrength > 0.5)
return "Moderate";
if (periodicityStrength < 0.5 && periodicityStrength > 0.3)
if (periodicityStrength <= 0.5 && periodicityStrength > 0.3)
return "Weak";
if (periodicityStrength < 0.5 && periodicityStrength > 0.3)
if (periodicityStrength >= 0.3)
return "Very Low";
else
return "None";
}
public String getPowerSpectrumStregthInterpretation(double powerStrength) {
if (powerStrength > 3)
return "High";
if (powerStrength <= 3 && powerStrength > 2.5)
return "Moderate";
if (powerStrength <= 2.5 && powerStrength > 2)
return "Weak";
if (powerStrength >= 1.4)
return "Very Low";
else
return "None";
}
public void demo() throws Exception {
double[] signal = produceNoisySignal(defaultSignalLengthTimeinSec, defaultSamplingRate, defaultHiddenFrequency, defaultSNratio);
@ -94,27 +109,27 @@ public class PeriodicityDetector {
// estimate the best samples based on the error we want
int wLength = 0;
long pow = 0;
if (wantedFreqError>-1){
if (wantedFreqError > -1) {
pow = Math.round(Math.log((float) samplingRate / wantedFreqError) / Math.log(2));
if (pow <= 1)
pow = Math.round(Math.log((float) signal.length / (float) ("" + signal.length).length()) / Math.log(2));
if (pow <= 1)
pow = Math.round(Math.log((float) signal.length / (float) ("" + signal.length).length()) / Math.log(2));
AnalysisLogger.getLogger().debug("Suggested pow for window length=" + pow);
AnalysisLogger.getLogger().debug("Suggested pow for window length=" + pow);
}
//adjust FFT Samples to be even
else{
if (FFTnsamples<2)
FFTnsamples=2;
else if (FFTnsamples>signal.length)
FFTnsamples=signal.length;
// adjust FFT Samples to be even
else {
if (FFTnsamples < 2)
FFTnsamples = 2;
else if (FFTnsamples > signal.length)
FFTnsamples = signal.length;
pow = Math.round(Math.log((float) FFTnsamples) / Math.log(2));
}
wLength = (int) Math.pow(2, pow);
AnalysisLogger.getLogger().debug("Suggested windows length (samples)=" + wLength);
AnalysisLogger.getLogger().debug("Suggested windows length (s)=" + ((float) wLength / (float) samplingRate) + " s");
int windowAnalysisSamples = (int) Math.pow(2, 14);// (int)
@ -185,12 +200,17 @@ public class PeriodicityDetector {
// reconstruct the F
double meanF = MathFunctions.mean(maxfrequencies);
//we consider a complete cycle
double possibleperiod = 2d/meanF;
AnalysisLogger.getLogger().debug("TimeSeriesAnalysis->Frequency "+meanF);
AnalysisLogger.getLogger().debug("TimeSeriesAnalysis->Periodicity "+possibleperiod);
if ((meanF <= minPossibleFreq) || (meanF >= maxPossibleFreq) || (possibleperiod==0) || (possibleperiod>(endPeriodTime-startPeriodTime))) {
meanF=0;
// we consider a complete cycle
double possibleperiod = 2d / meanF;
AnalysisLogger.getLogger().debug("TimeSeriesAnalysis->Frequency " + meanF);
AnalysisLogger.getLogger().debug("TimeSeriesAnalysis->Periodicity " + possibleperiod);
double maxperiod = Math.min(signal.length, currentWindowAnalysisSamples);
if ((meanF <= minPossibleFreq) || (meanF >= maxPossibleFreq) || (possibleperiod == 0) || (possibleperiod > (maxperiod))) {
AnalysisLogger.getLogger().debug("TimeSeriesAnalysis->Invalid periodicity " + (meanF <= minPossibleFreq) + " , " + (meanF >= maxPossibleFreq) + " , " + (possibleperiod == 0) + " , " + (possibleperiod > (maxperiod)));
meanF = 0;
this.meanF = 0;
this.lowermeanF = 0;
this.uppermeanF = 0;
@ -203,16 +223,183 @@ public class PeriodicityDetector {
this.endPeriodTime = 0;
this.startPeriodSampleIndex = 0;
this.endPeriodSampleIndex = 0;
} else {
AnalysisLogger.getLogger().debug("TimeSeriesAnalysis->periodicity is valid " + possibleperiod);
this.meanF = meanF;
this.lowermeanF = Math.max(meanF - error, minPossibleFreq);
this.uppermeanF = Math.min(meanF + error, maxFrequency);
this.meanPeriod = 2d / meanF;
this.meanPeriod = possibleperiod;
this.lowermeanPeriod = 2d / lowermeanF;
this.uppermeanPeriod = 2d / uppermeanF;
}
return meanF;
}
public void adjustParameters(double[] signal, int samplingRate, float minPossibleFreq, float maxPossibleFreq, float wantedFreqError, int FFTnsamples) {
// estimate the best samples based on the error we want
int wLength = 0;
long pow = 0;
if (wantedFreqError > -1) {
pow = Math.round(Math.log((float) samplingRate / wantedFreqError) / Math.log(2));
if (pow <= 1)
pow = Math.round(Math.log((float) signal.length / (float) ("" + signal.length).length()) / Math.log(2));
AnalysisLogger.getLogger().debug("Suggested pow for window length=" + pow);
}
// adjust FFT Samples to be even
else {
if (FFTnsamples < 2)
FFTnsamples = 2;
else if (FFTnsamples > signal.length)
FFTnsamples = signal.length;
pow = Math.round(Math.log((float) FFTnsamples) / Math.log(2));
}
wLength = (int) Math.pow(2, pow);
AnalysisLogger.getLogger().debug("Suggested windows length (samples)=" + wLength);
AnalysisLogger.getLogger().debug("Suggested windows length (s)=" + ((float) wLength / (float) samplingRate) + " s");
int windowAnalysisSamples = (int) Math.pow(2, 14);// (int)
windowAnalysisSamples = wLength;
int windowShiftSamples = (int) Math.round((float) windowAnalysisSamples / 2f);
float windowShiftTime = (float) SignalConverter.sample2Time(windowShiftSamples, samplingRate);
float error = ((float) samplingRate / (float) windowAnalysisSamples);
AnalysisLogger.getLogger().debug("Error in the Measure will be: " + error + " Hz");
AnalysisLogger.getLogger().debug("A priori Min Freq: " + minPossibleFreq + " s");
AnalysisLogger.getLogger().debug("A priori Max Freq: " + maxPossibleFreq + " s");
if (maxPossibleFreq >= samplingRate)
maxPossibleFreq = (float) (samplingRate / 2f) - (0.1f * samplingRate / 2f);
if (minPossibleFreq == 0)
minPossibleFreq = 0.1f;
minFrequency = minPossibleFreq;
maxFrequency = maxPossibleFreq;
// display the signal
// if (display)
// SignalProcessing.displaySignalWithGenericTime(signal, 0, 1, "signal");
this.currentSamplingRate = samplingRate;
this.currentWindowShiftSamples = windowShiftSamples;
this.currentWindowAnalysisSamples = windowAnalysisSamples;
}
public LinkedHashMap<String, String> detectAllFrequencies(double[] signal, int samplingRate, float minPossibleFreq, float maxPossibleFreq, float wantedFreqError, int FFTnsamples, float sensitivity, boolean display) throws Exception {
adjustParameters(signal, samplingRate, minPossibleFreq, maxPossibleFreq, wantedFreqError, FFTnsamples);
//evaluate the minimum frequency resolution
double frequencyRes = ((double)samplingRate/2d)/ (double)currentWindowAnalysisSamples;
AnalysisLogger.getLogger().debug("Frequency Resolution: "+frequencyRes);
// trace spectrum
double[][] spectrum = SignalConverter.spectrogram("spectrogram", signal, samplingRate, currentWindowShiftSamples, currentWindowAnalysisSamples, false);
if (display)
SignalConverter.displaySpectrogram(spectrum, signal, "complete spectrogram", samplingRate, currentWindowShiftSamples, currentWindowAnalysisSamples);
// apply the bandpass filter
spectrum = SignalConverter.cutSpectrum(spectrum, minPossibleFreq, maxPossibleFreq, currentWindowAnalysisSamples, samplingRate);
if (display)
// display cut spectrum
SignalConverter.displaySpectrogram(spectrum, signal, "clean spectrogram", samplingRate, currentWindowShiftSamples, currentWindowAnalysisSamples);
float windowShiftTime = (float) SignalConverter.sample2Time(this.currentWindowShiftSamples, samplingRate);
float windowLengthTime = (float) SignalConverter.sample2Time(this.currentWindowAnalysisSamples, samplingRate);
float signalTime = (float) SignalConverter.sample2Time(signal.length, samplingRate);
currentspectrum = spectrum;
// extract the maximum frequencies in each frame
SignalConverter signalMaximumAnalyzer = new SignalConverter();
ArrayList<Double>[] maxfrequencies = signalMaximumAnalyzer.takePeaksInSpectrogramFrames(spectrum, samplingRate, currentWindowAnalysisSamples, minPossibleFreq);
LinkedHashMap<String, String> peaks = new LinkedHashMap<String, String>();
double maxperiod = (double) Math.min(signal.length, currentWindowAnalysisSamples) * (double) samplingRate;
double error= 1.96*frequencyRes;// ((float) samplingRate / (float) currentWindowAnalysisSamples);
for (int i = 0; i < maxfrequencies.length; i++) {
double startTime = SignalConverter.spectrogramTimeFromIndex(i, windowShiftTime);
double endTime = Math.min(startTime+windowLengthTime,signalTime);
int counter = 0;
int freqCounter = 0;
Double previousFreq=0d;
Double previousPeriod=-100d;
String prefix = "";
if (maxfrequencies.length>1)
prefix = " (Section " + (i + 1)+")";
for (Double peakFreq : maxfrequencies[i]) {
double period = 1d / peakFreq;
double power = signalMaximumAnalyzer.currentSpikesPowerSpectra[i].get(freqCounter);
double periodResolution = sensitivity/samplingRate;
//the period distance has to be at least of 9 sample rates, the frequencies should not go under the resolution and over the borders
//the period should be included two times in the window
//the power of spectrum should be high enough
if ((Math.abs(previousPeriod-period)>(periodResolution))
&& (peakFreq-previousFreq>error)
&& (peakFreq >= minPossibleFreq)
&& (peakFreq <= maxPossibleFreq)
&& (period > 0)
&& (period < maxperiod*0.55f)
&& (!getPowerSpectrumStregthInterpretation(power).equalsIgnoreCase("None")))
{
AnalysisLogger.getLogger().debug("DISCREPANCY WITH RESPECT TO THE PREVIOUS FREQ:"+(peakFreq-previousFreq));
AnalysisLogger.getLogger().debug("RATIO WITH RESPECT TO THE PREVIOUS FREQ:"+((peakFreq-previousFreq)/error));
if (counter == 0) {
AnalysisLogger.getLogger().debug("Section "+(i+1));
peaks.put("*StartTime_In_Spectrogram"+prefix, "" + startTime);
peaks.put("*EndTime_In_Spectrogram" + prefix, "" + endTime);
}
double lowermeanF = Math.max(peakFreq - error, minPossibleFreq);
double uppermeanF = Math.min(peakFreq + error, maxPossibleFreq);
double upperUncertPeriod = 0;
double lowerUncertPeriod = 0;
if (peakFreq-previousFreq>error){
upperUncertPeriod=MathFunctions.roundDecimal(1d / lowermeanF,2);
lowerUncertPeriod=MathFunctions.roundDecimal(1d / uppermeanF,2);
}
else
{
upperUncertPeriod=MathFunctions.roundDecimal(period+periodResolution/2,2);
lowerUncertPeriod=Math.max(1/samplingRate,MathFunctions.roundDecimal(period-periodResolution/2,2));
}
peaks.put("Period_"+(counter+1)+prefix, MathFunctions.roundDecimal(period,2)+" ~ "+"["+lowerUncertPeriod+";"+upperUncertPeriod+"]");
peaks.put("Frequency_"+(counter+1)+prefix, MathFunctions.roundDecimal(peakFreq,2)+" ~ "+"["+MathFunctions.roundDecimal(lowermeanF,2)+";"+MathFunctions.roundDecimal(uppermeanF,2)+"]");
peaks.put("Strength_of_Periodicity_"+(counter+1)+prefix, MathFunctions.roundDecimal(signalMaximumAnalyzer.currentSpikesPowerSpectra[i].get(freqCounter),2)+" ("+getPowerSpectrumStregthInterpretation(signalMaximumAnalyzer.currentSpikesPowerSpectra[i].get(freqCounter))+")");
int minFidx = SignalConverter.frequencyIndex(minPossibleFreq, currentWindowAnalysisSamples, samplingRate);
double spectrogramidx = SignalConverter.spectrumFreq2Idx(peakFreq.floatValue(), samplingRate, currentWindowAnalysisSamples)-minFidx;
AnalysisLogger.getLogger().debug("SpectorgramIdx_"+(counter+1)+":" + spectrogramidx);
AnalysisLogger.getLogger().debug("Strength_of_Periodicity_"+(counter+1)+":" + signalMaximumAnalyzer.currentSpikesPowerSpectra[i].get(freqCounter));
AnalysisLogger.getLogger().debug("Strength_of_Periodicity_Interpretation"+(counter+1)+":" + getPowerSpectrumStregthInterpretation(signalMaximumAnalyzer.currentSpikesPowerSpectra[i].get(freqCounter)));
AnalysisLogger.getLogger().debug("Frequency_"+(counter+1)+":" + peakFreq);
AnalysisLogger.getLogger().debug("UpperFrequencyConfidence_"+(counter+1)+":" + uppermeanF);
AnalysisLogger.getLogger().debug("LowerFrequencyConfidence_"+(counter+1)+":" + lowermeanF);
AnalysisLogger.getLogger().debug("Period"+":" + period);
AnalysisLogger.getLogger().debug("UpperFrequencyPeriod_"+(counter+1)+":" + (1d / lowermeanF));
AnalysisLogger.getLogger().debug("LowerFrequencyPeriod_"+(counter+1)+":"+ (1d / uppermeanF));
AnalysisLogger.getLogger().debug("");
counter++;
previousFreq=peakFreq;
previousPeriod=period;
}
freqCounter++;
}
if (counter==0)
peaks.put("Periodicity_"+(counter+1)+prefix, "No periodicities found");
}
return peaks;
}
}

View File

@ -11,8 +11,10 @@ import marytts.signalproc.display.SpectrogramCustom;
import marytts.signalproc.window.Window;
import org.gcube.contentmanagement.graphtools.data.BigSamplesTable;
import org.gcube.contentmanagement.graphtools.utils.MathFunctions;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.transducers.TimeSeriesAnalysis;
import org.gcube.dataanalysis.ecoengine.utils.Operations;
import com.rapidminer.example.Attribute;
import com.rapidminer.example.Example;
@ -345,6 +347,71 @@ public class SignalConverter {
return maxs;
}
ArrayList<Double>[] currentSpikesPowerSpectra;
public ArrayList<Double>[] takePeaksInSpectrogramFrames(double[][] spectrogram, int samplingRate, int windowSamples, float minfreq) {
ArrayList<Double>[] maxs = new ArrayList[spectrogram.length];
ArrayList<Double>[] powers = new ArrayList[spectrogram.length];
if (TimeSeriesAnalysis.display)
SignalProcessing.displaySignalWithGenericTime(spectrogram[0], 0, 1, "spectrum");
int minFidx = SignalConverter.frequencyIndex(minfreq, windowSamples, samplingRate);
for (int j=0;j<spectrogram.length;j++) {
double[] slice = spectrogram[j];
double maxAmp = Operations.getMax(slice);
double minAmp = Operations.getMin(slice);
//old code: once we used the first element of the FFT as reference, but it is unrealiable
double refAmplitude = 0;
if (maxAmp!=slice[0])
refAmplitude = (slice[0]-minAmp);//(maxAmp-minAmp)/2d;
else
refAmplitude = MathFunctions.mean(slice)-minAmp;
ArrayList<Double> maxFreqs = new ArrayList<Double>();
ArrayList<Double> localpowers = new ArrayList<Double>();
double [] derivSlice = MathFunctions.derivative(slice);
boolean [] spikes = MathFunctions.findMaxima(derivSlice,0.001);
for (int i=0;i<spikes.length;i++){
if (spikes[i]){
// AnalysisLogger.getLogger().debug("Spike at "+i);
maxFreqs.add((double)spectrumIdx2Frequency(minFidx + i, samplingRate, windowSamples));
//make the min correspond to y=0
//take few samples around the spike and evaluate the amplitude with respect to the samples around
int round =Math.max(slice.length/10,1);
//take samples to the left
double roundmean = 0;
for (int g=1;g<=round;g++){
if (i-g>=0){
roundmean = roundmean+slice[i-g]-minAmp;
}
}
//take samples to the right
for (int g=1;g<=round;g++){
if (i+g<slice.length){
roundmean = roundmean+slice[i+g]-minAmp;
}
}
//take mean value
roundmean = roundmean/(2d*(double)round);
//calculate the power as the ration between the spike and the surrounding points
double power = (slice[i]-minAmp)/(roundmean);
localpowers.add(power);
}
}
powers[j]=localpowers;
maxs[j]=maxFreqs;
}
currentSpikesPowerSpectra=powers;
return maxs;
}
public int startStableTractIdx = -1;
public int endStableTractIdx = -1;
@ -395,6 +462,8 @@ public class SignalConverter {
int[] bestcouple = pairs.get(best);
// take the related slice of signal
if (bestcouple[1]==bestcouple[0])
bestcouple[1]=bestcouple[0]+1;
double[] subsignal = new double[bestcouple[1] - bestcouple[0]];
AnalysisLogger.getLogger().debug("Longest range: from " + bestcouple[0] + " to " + bestcouple[1]);
startStableTractIdx = bestcouple[0];

View File

@ -8,6 +8,7 @@ import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent;
import org.gcube.dataanalysis.ecoengine.processing.factories.TransducerersFactory;
import org.gcube.dataanalysis.ecoengine.test.regression.Regressor;
import org.gcube.dataanalysis.ecoengine.transducers.TimeSeriesAnalysis;
public class TestSimpleSignal {
@ -17,16 +18,23 @@ public class TestSimpleSignal {
// static AlgorithmConfiguration[] configs = {NAFOSignalConfig()};
// static AlgorithmConfiguration[] configs = {largeCustomSignalConfig()};
// static AlgorithmConfiguration[] configs = {temperatureSignalConfig()};
// static AlgorithmConfiguration[] configs = {periodicSignalConfig()};
static AlgorithmConfiguration[] configs = {periodicSignalConfig()};
// static AlgorithmConfiguration[] configs = {simpleSignalConfig()};
// static AlgorithmConfiguration[] configs = {sawSignalConfig()};
static AlgorithmConfiguration[] configs = {temperatureSignalConfig()};
// static AlgorithmConfiguration[] configs = {temperatureSignalConfig()};
// static AlgorithmConfiguration[] configs = {temperatureBariSignalConfig()};
// static AlgorithmConfiguration[] configs = {russianSignalConfig()};
// static AlgorithmConfiguration[] configs = {largeCustomSignalConfig()};
// static AlgorithmConfiguration[] configs = {occurrencePointsSignalConfig()};
// static AlgorithmConfiguration[] configs = {hugeSignalConfig()};
// static AlgorithmConfiguration[] configs = {IOTCSSignalConfig()};
// static AlgorithmConfiguration[] configs = {temperatureShortSignalConfig()};
public static void main(String[] args) throws Exception {
TimeSeriesAnalysis.display=true;
int wLength = (int) Math.pow(2, 1);
System.out.println("L:"+wLength);
for (int i = 0; i < configs.length; i++) {
@ -43,6 +51,37 @@ public class TestSimpleSignal {
}
}
public static AlgorithmConfiguration IOTCSSignalConfig() {
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setAgent("TIME_SERIES_PROCESSING");
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setParam("DatabaseUserName", "utente");
config.setParam("DatabasePassword", "d4science");
config.setParam("DatabaseURL", "jdbc:postgresql://statistical-manager.d.d4science.research-infrastructures.eu/testdb");
config.setParam("DatabaseDriver", "org.postgresql.Driver");
// vessels
config.setParam("TimeSeriesTable", "timeseries_idb73029b9_226e_4d0f_b828_24854d0b7b44");
config.setParam("ValueColum", "cpue");
config.setParam("FFT_Window_Samples", "200");
config.setParam("AggregationFunction", "SUM");
config.setParam("SSA_Window_in_Samples", "20");
config.setParam("SSA_EigenvaluesThreshold", "0.7");
config.setParam("SSA_Points_to_Forecast", "10");
AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
config.setGcubeScope("/gcube");
config.setConfigPath("./cfg");
return config;
}
public static AlgorithmConfiguration simpleSignalConfig() {
AlgorithmConfiguration config = new AlgorithmConfiguration();
@ -56,7 +95,7 @@ public class TestSimpleSignal {
// vessels
config.setParam("TimeSeriesTable", "timeseries_id4dd368bf_63fb_4d19_8e31_20ced63a477d");
config.setParam("ValueColum", "quantity");
config.setParam("FFT_Window_Samples", "70");
config.setParam("SSA_Window_in_Samples", "30");
config.setParam("SSA_EigenvaluesThreshold", "0.7");
config.setParam("SSA_Points_to_Forecast", "10");
@ -82,6 +121,7 @@ public class TestSimpleSignal {
// vessels
config.setParam("TimeSeriesTable", "generic_ideb9efbe0_61ad_4eea_b0ee_95e64ce11b28");
config.setParam("ValueColum", "quantity");
config.setParam("FFT_Window_Samples", "70");
config.setParam("SSA_Window_in_Samples", "20");
config.setParam("SSA_EigenvaluesThreshold", "0.7");
config.setParam("SSA_Points_to_Forecast", "10");
@ -109,6 +149,7 @@ public class TestSimpleSignal {
config.setParam("ValueColum", "speed");
config.setParam("TimeColum", "datetime");
config.setParam("AggregationFunction", "AVG");
config.setParam("FFT_Window_Samples", "200");
config.setParam("SSA_Window_in_Samples", "20");
config.setParam("SSA_EigenvaluesThreshold", "0.7");
config.setParam("SSA_Points_to_Forecast", "10");
@ -132,7 +173,10 @@ public class TestSimpleSignal {
config.setParam("DatabaseURL", "jdbc:postgresql://statistical-manager.d.d4science.research-infrastructures.eu/testdb");
config.setParam("DatabaseDriver", "org.postgresql.Driver");
// vessels
config.setParam("FFT_Window_Samples", "200");
config.setParam("TimeSeriesTable", "timeseries_ide814eb07_c13b_41b3_a240_aa99446db831");
config.setParam("Sensitivity", "HIGH");
config.setParam("ValueColum", "quantity");
config.setParam("FrequencyResolution", "0.01");
config.setParam("SSA_Window_in_Samples", "20");
@ -190,6 +234,7 @@ public class TestSimpleSignal {
config.setParam("TimeSeriesTable", "timeseries_id39c6c28f_2484_421c_8ffb_9c2cc2330c62");
config.setParam("ValueColum", "speed");
config.setParam("FFT_Window_Samples", "50");
config.setParam("SSA_Window_in_Samples", "30");
config.setParam("SSA_EigenvaluesThreshold", "0.7");
config.setParam("SSA_Points_to_Forecast", "10");
@ -217,7 +262,7 @@ public class TestSimpleSignal {
config.setParam("ValueColum", "quantity");
config.setParam("FrequencyResolution", "0.01");
config.setParam("FFT_Window_Samples", "50");
config.setParam("SSA_Window_in_Samples", "20");
config.setParam("SSA_EigenvaluesThreshold", "0.7");
config.setParam("SSA_Points_to_Forecast", "10");
@ -304,7 +349,7 @@ public class TestSimpleSignal {
config.setParam("ValueColum", "fvalue");
config.setParam("FFT_Window_Samples", "70");
config.setParam("FFT_Window_Samples", "52");
config.setParam("SSA_Window_in_Samples", "10");
config.setParam("SSA_EigenvaluesThreshold", "0.7");
config.setParam("SSA_Points_to_Forecast", "10");
@ -317,5 +362,60 @@ public class TestSimpleSignal {
}
public static AlgorithmConfiguration temperatureBariSignalConfig() {
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setAgent("TIME_SERIES_PROCESSING");
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setParam("DatabaseUserName", "utente");
config.setParam("DatabasePassword", "d4science");
config.setParam("DatabaseURL", "jdbc:postgresql://statistical-manager.d.d4science.research-infrastructures.eu/testdb");
config.setParam("DatabaseDriver", "org.postgresql.Driver");
// vessels
config.setParam("TimeSeriesTable", "timeseries_id5b39298a_0e32_4a9c_8e6c_f2e48e3f1b1a");
config.setParam("ValueColum", "fvalue");
config.setParam("FFT_Window_Samples", "500");
config.setParam("SSA_Window_in_Samples", "10");
config.setParam("SSA_EigenvaluesThreshold", "0.7");
config.setParam("SSA_Points_to_Forecast", "10");
AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
config.setGcubeScope("/gcube");
config.setConfigPath("./cfg");
return config;
}
public static AlgorithmConfiguration temperatureShortSignalConfig() {
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setAgent("TIME_SERIES_PROCESSING");
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setParam("DatabaseUserName", "gcube");
config.setParam("DatabasePassword", "d4science2");
config.setParam("DatabaseURL", "jdbc:postgresql://localhost/testdb");
config.setParam("DatabaseDriver", "org.postgresql.Driver");
// vessels
config.setParam("TimeSeriesTable", "testtextractiontemp");
config.setParam("ValueColum", "fvalue");
config.setParam("FFT_Window_Samples", "500");
config.setParam("SSA_Window_in_Samples", "10");
config.setParam("SSA_EigenvaluesThreshold", "0.7");
config.setParam("SSA_Points_to_Forecast", "10");
AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
config.setGcubeScope("/gcube");
config.setConfigPath("./cfg");
return config;
}
}

View File

@ -40,6 +40,7 @@ public class TimeSeriesAnalysis extends StandardLocalExternalAlgorithm {
private static String timeColumn = "TimeColum";
private static String fftwindowsamples = "FFT_Window_Samples";
private static String aggregationFunction = "AggregationFunction";
private static String sensitivityParam = "Sensitivity";
private static String SSAAnalysisWindowSamples = "SSA_Window_in_Samples";
private static String SSAEigenvaluesThreshold = "SSA_EigenvaluesThreshold";
private static String SSAPointsToForecast = "SSA_Points_to_Forecast";
@ -48,10 +49,15 @@ public class TimeSeriesAnalysis extends StandardLocalExternalAlgorithm {
private Image uniformSignalSamplesImg = null;
private Image spectrogramImg = null;
private Image forecastsignalImg = null;
private Image eigenValuesImg=null;
private File outputfilename=null;
private Image eigenValuesImg = null;
private File outputfilename = null;
public static boolean display = false;
private static int maxpoints = 10000;
public enum Sensitivity {
LOW, NORMAL, HIGH
}
@Override
public void init() throws Exception {
@ -59,11 +65,9 @@ public class TimeSeriesAnalysis extends StandardLocalExternalAlgorithm {
@Override
public String getDescription() {
return "An algorithms applying signal processing to a non uniform time series. A maximum of "+maxpoints+" distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram.";
return "An algorithms applying signal processing to a non uniform time series. A maximum of " + maxpoints + " distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram.";
}
@Override
protected void process() throws Exception {
@ -79,8 +83,24 @@ public class TimeSeriesAnalysis extends StandardLocalExternalAlgorithm {
int windowLength = Integer.parseInt(config.getParam(SSAAnalysisWindowSamples));
float eigenvaluespercthr = Float.parseFloat(config.getParam(SSAEigenvaluesThreshold));
int pointsToReconstruct = Integer.parseInt(config.getParam(SSAPointsToForecast));
Sensitivity sensitivityP = Sensitivity.LOW;
try{sensitivityP = Sensitivity.valueOf(config.getParam(sensitivityParam));}catch(Exception e){}
int fftWindowSamplesDouble = 1;
float sensitivity = 9;
switch (sensitivityP) {
case LOW:
sensitivity = 9;
break;
case NORMAL:
sensitivity = 5;
break;
case HIGH:
sensitivity = 1;
break;
}
int fftWindowSamplesDouble = 1;
if (timecolumn == null)
timecolumn = "time";
if (aggregationFunc == null)
@ -100,18 +120,18 @@ public class TimeSeriesAnalysis extends StandardLocalExternalAlgorithm {
AnalysisLogger.getLogger().debug("TimeSeriesAnalysis->SSA Window Samples: " + windowLength);
AnalysisLogger.getLogger().debug("TimeSeriesAnalysis->SSA Eigenvalues threshold: " + eigenvaluespercthr);
AnalysisLogger.getLogger().debug("TimeSeriesAnalysis->SSA Points to Reconstruct: " + pointsToReconstruct);
AnalysisLogger.getLogger().debug("TimeSeriesAnalysis->Extracting Points...");
String query = "select * from (select " + aggregationFunc + "(" + valuescolum + ")," + timecolumn + " from " + tablename + " group by " + timecolumn + ") as a";
String query = "select * from (select " + aggregationFunc + "( CAST ( " + valuescolum + " as real))," + timecolumn + " from " + tablename + " group by " + timecolumn + ") as a";
AnalysisLogger.getLogger().debug("TimeSeriesAnalysis->Query to execute: " + query);
List<Object> results = DatabaseFactory.executeSQLQuery(query, dbconnection);
status = 10;
if (results == null || results.size() == 0)
throw new Exception("Error in retrieving values from the table: no time series found");
else if (results.size() > maxpoints)
throw new Exception("Too long Time Series: a maximum of distinct "+maxpoints+" in time is allowed");
throw new Exception("Too long Time Series: a maximum of distinct " + maxpoints + " in time is allowed");
AnalysisLogger.getLogger().debug("TimeSeriesAnalysis->Points Extracted!");
// build signal
AnalysisLogger.getLogger().debug("TimeSeriesAnalysis->Building signal");
@ -125,23 +145,18 @@ public class TimeSeriesAnalysis extends StandardLocalExternalAlgorithm {
sizesignal++;
}
status = 20;
AnalysisLogger.getLogger().debug("TimeSeriesAnalysis->Signal built with success. Size: " + sizesignal);
AnalysisLogger.getLogger().debug("TimeSeriesAnalysis->Building Time Series");
TimeSeries ts = TimeSeries.buildFromSignal(signal, config);
String timepattern = ts.getTimepattern();
String chartpattern = "MM-dd-yy";
if (timepattern.equals("s") ||
(DateGuesser.isJavaDateOrigin(ts.getTime()[0]) &&
DateGuesser.isJavaDateOrigin(ts.getTime()[ts.getTime().length-1]))
)
{
if (timepattern.equals("s") || (DateGuesser.isJavaDateOrigin(ts.getTime()[0]) && DateGuesser.isJavaDateOrigin(ts.getTime()[ts.getTime().length - 1]))) {
AnalysisLogger.getLogger().debug("TimeSeriesAnalysis->Changing chart pattern to Seconds!");
chartpattern = "HH:mm:ss:SS";
}
else
AnalysisLogger.getLogger().debug("TimeSeriesAnalysis->Chart pattern remains "+chartpattern);
} else
AnalysisLogger.getLogger().debug("TimeSeriesAnalysis->Chart pattern remains " + chartpattern);
AnalysisLogger.getLogger().debug("TimeSeriesAnalysis->Uniformly sampling the signal");
if (display)
SignalProcessing.displaySignalWithTime(ts.getValues(), ts.getTime(), "Time Series", chartpattern);
@ -150,27 +165,31 @@ public class TimeSeriesAnalysis extends StandardLocalExternalAlgorithm {
ts.convertToUniformSignal(0);
AnalysisLogger.getLogger().debug("TimeSeriesAnalysis->Uniform sampling finished");
status = 30;
// spectrum and signal processing
AnalysisLogger.getLogger().debug("TimeSeriesAnalysis->Detecting periodicity");
AnalysisLogger.getLogger().debug("TimeSeriesAnalysis->Detecting periodicities");
PeriodicityDetector pd = new PeriodicityDetector();
double F = pd.detectFrequency(ts.getValues(), 1, 0.01f, 0.5f, -1, fftWindowSamplesDouble, display);
outputParameters.put("Original Time Series Length", ""+originalSignalLength);
outputParameters.put("Uniformly Samples Time Series Length", ""+ts.getValues().length);
outputParameters.put("Spectral Analysis Window Length", ""+pd.currentWindowAnalysisSamples);
outputParameters.put("Spectral Analysis Window Shift", ""+pd.currentWindowShiftSamples);
outputParameters.put("Spectral Analysis Sampling Rate", ""+MathFunctions.roundDecimal(pd.currentSamplingRate,2));
outputParameters.put("Spectrogram Sections", ""+pd.currentspectrum.length);
outputParameters.put("Detected Frequency (samples^-1)", ""+MathFunctions.roundDecimal(F,2));
outputParameters.put("Indecision on Frequency", "["+MathFunctions.roundDecimal(pd.lowermeanF,2)+" , "+MathFunctions.roundDecimal(pd.uppermeanF,2) + "]");
outputParameters.put("Average detected Period (samples)", ""+MathFunctions.roundDecimal(pd.meanPeriod,2));
outputParameters.put("Indecision on Average Period", "["+MathFunctions.roundDecimal(pd.lowermeanPeriod,2)+" , "+MathFunctions.roundDecimal(pd.uppermeanPeriod,2) + "]");
outputParameters.put("Samples range in which periodicity was detected", "from "+pd.startPeriodSampleIndex+" to "+pd.endPeriodSampleIndex);
outputParameters.put("Period Strength with interpretation", ""+MathFunctions.roundDecimal(pd.periodicityStrength,2)+" ("+pd.getPeriodicityStregthInterpretation()+")");
outputParameters.put("Range of frequencies (in samples^-1) represented in the Spectrogram:", "["+MathFunctions.roundDecimal(pd.minFrequency, 2)+" , "+MathFunctions.roundDecimal(pd.maxFrequency, 2) + "]");
LinkedHashMap<String, String> frequencies = pd.detectAllFrequencies(ts.getValues(), 1, 0.01f, 0.5f, -1, fftWindowSamplesDouble, sensitivity, display);
outputParameters.put("Original Time Series Length", "" + originalSignalLength);
outputParameters.put("Uniformly Samples Time Series Length", "" + ts.getValues().length);
outputParameters.put("Spectral Analysis Window Length", "" + pd.currentWindowAnalysisSamples);
outputParameters.put("Spectral Analysis Window Shift", "" + pd.currentWindowShiftSamples);
outputParameters.put("Spectral Analysis Sampling Rate", "" + MathFunctions.roundDecimal(pd.currentSamplingRate, 2));
outputParameters.put("Spectrogram Sections", "" + pd.currentspectrum.length);
outputParameters.put("Range of frequencies (in samples^-1) represented in the Spectrogram:", "[" + MathFunctions.roundDecimal(pd.minFrequency, 2) + " ; " + MathFunctions.roundDecimal(pd.maxFrequency, 2) + "]");
outputParameters.put("Unit of Measure of Frequency", "samples^-1");
outputParameters.put("Unit of Measure of Time", "samples");
for (String freqPar : frequencies.keySet()) {
outputParameters.put(freqPar, frequencies.get(freqPar));
}
/*
* outputParameters.put("Detected Frequency (samples^-1)", ""+MathFunctions.roundDecimal(F,2)); outputParameters.put("Indecision on Frequency", "["+MathFunctions.roundDecimal(pd.lowermeanF,2)+" , "+MathFunctions.roundDecimal(pd.uppermeanF,2) + "]"); outputParameters.put("Average detected Period (samples)", ""+MathFunctions.roundDecimal(pd.meanPeriod,2)); outputParameters.put("Indecision on Average Period", "["+MathFunctions.roundDecimal(pd.lowermeanPeriod,2)+" , "+MathFunctions.roundDecimal(pd.uppermeanPeriod,2) + "]"); outputParameters.put("Samples range in which periodicity was detected", "from "+pd.startPeriodSampleIndex+" to "+pd.endPeriodSampleIndex); outputParameters.put("Period Strength with interpretation", ""+MathFunctions.roundDecimal(pd.periodicityStrength,2)+" ("+pd.getPeriodicityStregthInterpretation()+")");
*/
AnalysisLogger.getLogger().debug("TimeSeriesAnalysis->Periodicity Detected!");
status =60;
status = 60;
System.gc();
AnalysisLogger.getLogger().debug("TimeSeriesAnalysis->Executing SSA analysis");
List<Double> values = new ArrayList<Double>();
@ -180,27 +199,27 @@ public class TimeSeriesAnalysis extends StandardLocalExternalAlgorithm {
Date[] newtimes = ts.extendTime(pointsToReconstruct);
SSADataset ssa = null;
if (windowLength<ts.getValues().length)
ssa = SSAWorkflow.applyCompleteWorkflow(values, windowLength, eigenvaluespercthr, pointsToReconstruct, false);
else{
if (windowLength < ts.getValues().length)
ssa = SSAWorkflow.applyCompleteWorkflow(values, windowLength, eigenvaluespercthr, pointsToReconstruct, false);
else {
AnalysisLogger.getLogger().debug("TimeSeriesAnalysis->SSA analysis impossible to complete");
outputParameters.put("SSA Note:", "The window length is higher than the signal length. Please reduce the value to less than the signal length.");
return;
}
AnalysisLogger.getLogger().debug("TimeSeriesAnalysis->SSA analysis completed");
status = 70;
AnalysisLogger.getLogger().debug("TimeSeriesAnalysis->Rendering Images");
uniformSignalImg = SignalProcessing.renderSignalWithTime(ts.getValues(), ts.getTime(), "Uniformly Sampled Time Series", chartpattern);
if (uniformSignalImg==null)
if (uniformSignalImg == null)
outputParameters.put("Note:", "The charts for uniformly sampled and forecasted signals contain too many points and will not be displayed. The values will be only reported in the output file.");
else
outputParameters.put("Note:", "Details about the values are reported in the output file.");
uniformSignalSamplesImg = SignalProcessing.renderSignalWithGenericTime(ts.getValues(), 0,1, "Uniformly Sampled Time Series in Samples");
uniformSignalSamplesImg = SignalProcessing.renderSignalWithGenericTime(ts.getValues(), 0, 1, "Uniformly Sampled Time Series in Samples");
spectrogramImg = SignalProcessing.renderSignalSpectrogram2(pd.currentspectrum);
int timeseriesV = ts.getValues().length;
double[] forecastedpiece = Arrays.copyOfRange(ssa.getForecastSignal(), timeseriesV, timeseriesV+pointsToReconstruct);
double[] forecastedpiece = Arrays.copyOfRange(ssa.getForecastSignal(), timeseriesV, timeseriesV + pointsToReconstruct);
List<String> tsnames = new ArrayList<String>();
tsnames.add("Original Time Series");
tsnames.add("Forecasted Time Series");
@ -208,50 +227,50 @@ public class TimeSeriesAnalysis extends StandardLocalExternalAlgorithm {
signals.add(ts.getValues());
signals.add(forecastedpiece);
forecastsignalImg = SignalProcessing.renderSignalsWithTime(signals, newtimes, tsnames, chartpattern);
if (display){
if (display) {
SignalProcessing.displaySignalsWithTime(signals, newtimes, tsnames, chartpattern);
}
double[] eigenValues = new double[ssa.getPercentList().size()];
for (int i=0;i<eigenValues.length;i++){
eigenValues[i] = ssa.getPercentList().get(i);
for (int i = 0; i < eigenValues.length; i++) {
eigenValues[i] = ssa.getPercentList().get(i);
}
eigenValuesImg = SignalProcessing.renderSignalWithGenericTime(eigenValues, 0f, 1,"SSA Eigenvalues");
eigenValuesImg = SignalProcessing.renderSignalWithGenericTime(eigenValues, 0f, 1, "SSA Eigenvalues");
AnalysisLogger.getLogger().debug("TimeSeriesAnalysis->Images Rendered");
System.gc();
AnalysisLogger.getLogger().debug("TimeSeriesAnalysis->Producing Files");
outputfilename = new File(config.getPersistencePath(),valuescolum+"_SignalProcessing.csv");
BufferedWriter bw = new BufferedWriter(new FileWriter(outputfilename ));
outputfilename = new File(config.getPersistencePath(), valuescolum + "_SignalProcessing.csv");
BufferedWriter bw = new BufferedWriter(new FileWriter(outputfilename));
bw.write("Uniformly Sampled Time Series,Time Line,Forecasted Time Series,SSA Eigenvalues\n");
int[] lengthsVector = {ts.getValues().length,newtimes.length,ssa.getForecastSignal().length,eigenValues.length};
int[] lengthsVector = { ts.getValues().length, newtimes.length, ssa.getForecastSignal().length, eigenValues.length };
int maxLen = Operations.getMax(lengthsVector);
for (int i=0;i<maxLen;i++){
if (i<ts.getValues().length)
bw.write(""+ts.getValues()[i]+",");
for (int i = 0; i < maxLen; i++) {
if (i < ts.getValues().length)
bw.write("" + ts.getValues()[i] + ",");
else
bw.write(",");
if (i<newtimes.length)
bw.write(""+newtimes[i]+",");
if (i < newtimes.length)
bw.write("" + newtimes[i] + ",");
else
bw.write(",");
if (i<ssa.getForecastSignal().length)
bw.write(""+ssa.getForecastSignal()[i]+",");
if (i < ssa.getForecastSignal().length)
bw.write("" + ssa.getForecastSignal()[i] + ",");
else
bw.write(",");
if (i<eigenValues.length)
bw.write(""+eigenValues[i]+",");
if (i < eigenValues.length)
bw.write("" + eigenValues[i] + ",");
else
bw.write(",");
bw.write("\n");
}
bw.close();
AnalysisLogger.getLogger().debug("TimeSeriesAnalysis->Files Produced");
if (display){
if (display) {
SignalProcessing.displaySignalWithTime(ts.getValues(), ts.getTime(), "Uniformly Sampled Time Series", chartpattern);
SignalProcessing.displaySignalWithGenericTime(ts.getValues(), 0,1, "Uniformly Sampled Time Series in Samples");
SignalProcessing.displaySignalWithGenericTime(ts.getValues(), 0, 1, "Uniformly Sampled Time Series in Samples");
SignalProcessing.displaySignalWithTime(ssa.getForecastSignal(), newtimes, "Forecasted Time Series", chartpattern);
SignalProcessing.displaySignalWithGenericTime(eigenValues, 0f, 1,"SSA Eigenvalues");
SignalProcessing.displaySignalWithGenericTime(eigenValues, 0f, 1, "SSA Eigenvalues");
}
AnalysisLogger.getLogger().debug("TimeSeriesAnalysis->"+outputParameters);
AnalysisLogger.getLogger().debug("TimeSeriesAnalysis->" + outputParameters);
AnalysisLogger.getLogger().debug("TimeSeriesAnalysis->Computation has finished");
} catch (Throwable e) {
e.printStackTrace();
@ -272,9 +291,10 @@ public class TimeSeriesAnalysis extends StandardLocalExternalAlgorithm {
inputs.add(p);
ColumnType p1 = new ColumnType(timeSeriesTable, valuesColumn, "The column containing the values of the time series", "values", false);
inputs.add(p1);
// addDoubleInput(fftwindowsamples, "The number of samples precision in detecting the period. The lower this number the less the number of points in the Spectrogram (higher number of samples used at each step). Reducing this, the spectrogram will be finer and sharper, but you should tune it. Too many samples will make the Spectrogram noisy.", "1");
addDoubleInput(fftwindowsamples, "The number of samples N on which the Fourier Transform (FFT) will be extracted. The FFT will be calculated every N/2 samples, taking N samples each time. The spectrogram will display the FFT on the slices of N samples.", "12");
// addDoubleInput(fftwindowsamples, "The number of samples precision in detecting the period. The lower this number the less the number of points in the Spectrogram (higher number of samples used at each step). Reducing this, the spectrogram will be finer and sharper, but you should tune it. Too many samples will make the Spectrogram noisy.", "1");
addDoubleInput(fftwindowsamples, "The number of samples N on which the Fourier Transform (FFT) will be extracted. It should be a power of two and less than the signal length, otherwise it will be automatically recalculated. The FFT will be calculated every N/2 samples, taking N samples each time. The spectrogram will display the FFT on the slices of N samples.", "12");
addEnumerateInput(AggregationFunctions.values(), aggregationFunction, "Function to apply to samples with the same time instant", AggregationFunctions.SUM.name());
addEnumerateInput(Sensitivity.values(), sensitivityParam, "Sensitivity to the frequency components. High sensitivity will report all the frequency components, low sensitivity will report only the most distant ones.", Sensitivity.LOW.name());
addIntegerInput(SSAAnalysisWindowSamples, "The number of samples in the produced uniformly sampled signal, to use in the SSA algorithm. Must be strictly less than the Time Series length. This number should identify a portion of the signal long enough to make the system guess the nature of the trend", "20");
addDoubleInput(SSAEigenvaluesThreshold, "The threshold under which an SSA eigenvalue will be ignored, along with its eigenvector, for the reconstruction of the signal", "0.7");
addIntegerInput(SSAPointsToForecast, "The number of points to forecast over the original length of the time series", "10");
@ -283,39 +303,36 @@ public class TimeSeriesAnalysis extends StandardLocalExternalAlgorithm {
@Override
public StatisticalType getOutput() {
LinkedHashMap<String, StatisticalType> outMap = PrimitiveType.stringMap2StatisticalMap(outputParameters);
LinkedHashMap<String, Image> producedImages = new LinkedHashMap<String, Image>();
if (signalImg!=null)
producedImages.put("Original Time Series",signalImg);
if (uniformSignalImg!=null)
producedImages.put("Uniformly Sampled Time Series",uniformSignalImg);
if (uniformSignalSamplesImg!=null)
producedImages.put("Uniformly Sampled Time Series in Samples",uniformSignalSamplesImg);
if (forecastsignalImg!=null)
producedImages.put("Forecasted Time Series",forecastsignalImg);
if (spectrogramImg!=null)
producedImages.put("Spectrogram of the Uniformly Sampled Time Series",spectrogramImg);
if (eigenValuesImg!=null)
producedImages.put("SSA Eigenvalues",eigenValuesImg);
PrimitiveType images = new PrimitiveType(HashMap.class.getName(),producedImages, PrimitiveTypes.IMAGES, "Time Series Report","Charts reporting the Time Series Analysis");
outMap.put("Images", images);
if (outputfilename!=null){
PrimitiveType file = new PrimitiveType(File.class.getName(), outputfilename, PrimitiveTypes.FILE, "AnalysisReport", "AnalysisReport");
outMap.put("Analysis Report", file);
}
PrimitiveType p = new PrimitiveType(LinkedHashMap.class.getName(), outMap, PrimitiveTypes.MAP, "Output", "");
LinkedHashMap<String, StatisticalType> outMap = PrimitiveType.stringMap2StatisticalMap(outputParameters);
LinkedHashMap<String, Image> producedImages = new LinkedHashMap<String, Image>();
if (signalImg != null)
producedImages.put("Original Time Series", signalImg);
if (uniformSignalImg != null)
producedImages.put("Uniformly Sampled Time Series", uniformSignalImg);
if (uniformSignalSamplesImg != null)
producedImages.put("Uniformly Sampled Time Series in Samples", uniformSignalSamplesImg);
if (forecastsignalImg != null)
producedImages.put("Forecasted Time Series", forecastsignalImg);
if (spectrogramImg != null)
producedImages.put("Spectrogram of the Uniformly Sampled Time Series", spectrogramImg);
if (eigenValuesImg != null)
producedImages.put("SSA Eigenvalues", eigenValuesImg);
PrimitiveType images = new PrimitiveType(HashMap.class.getName(), producedImages, PrimitiveTypes.IMAGES, "Time Series Report", "Charts reporting the Time Series Analysis");
outMap.put("Images", images);
if (outputfilename != null) {
PrimitiveType file = new PrimitiveType(File.class.getName(), outputfilename, PrimitiveTypes.FILE, "AnalysisReport", "AnalysisReport");
outMap.put("Analysis Report", file);
}
PrimitiveType p = new PrimitiveType(LinkedHashMap.class.getName(), outMap, PrimitiveTypes.MAP, "Output", "");
return p;
}
@Override
public void shutdown() {
}
}