enhancements for the new version of StatMan

git-svn-id: https://svn.d4science.research-infrastructures.eu/gcube/trunk/data-analysis/EcologicalEngine@115635 82a268e6-3cf1-43bd-a215-b396298e98cf
This commit is contained in:
Gianpaolo Coro 2015-06-30 09:56:42 +00:00
parent 9ac4b270ad
commit 8ea5625378
5 changed files with 179 additions and 9 deletions

View File

@ -156,7 +156,6 @@ public class ProcessorsFactory {
if (algclass instanceof Generator) {
Generator g = (Generator) algclass;
g.setConfiguration(config);
g.setConfiguration(config);
return g;
} else if (algclass instanceof Modeler) {
Modeler m = (Modeler) algclass;
@ -195,7 +194,6 @@ public class ProcessorsFactory {
map.put("EVALUATORS", EvaluatorsFactory.getAllEvaluators(config.getConfigPath()));
map.put("CLUSTERERS", ClusterersFactory.getAllClusterers(config.getConfigPath()));
map.put("TRANSDUCERS", TransducerersFactory.getAllTransducerers(config));
map.put("TEMPORAL_ANALYSIS", new ArrayList<String>());
return map;
}

View File

@ -291,8 +291,11 @@ public class SignalConverter {
double[] maxs = new double[spectrogram.length];
averagepower = new double[spectrogram.length];
int j = 0;
if (TimeSeriesAnalysis.display)
SignalProcessing.displaySignalWithGenericTime(spectrogram[0], 0, 1, "spectrum");
if (TimeSeriesAnalysis.display){
for (int g=0;g<spectrogram.length;g++){
SignalProcessing.displaySignalWithGenericTime(spectrogram[g], 0, 1, "spectrum "+(g+1));
}
}
double tolerance = 0.05;
for (double[] slice : spectrogram) {
int bestidx = 0;
@ -353,9 +356,11 @@ public class SignalConverter {
ArrayList<Double>[] maxs = new ArrayList[spectrogram.length];
ArrayList<Double>[] powers = new ArrayList[spectrogram.length];
if (TimeSeriesAnalysis.display)
SignalProcessing.displaySignalWithGenericTime(spectrogram[0], 0, 1, "spectrum");
if (TimeSeriesAnalysis.display){
for (int g=0;g<spectrogram.length;g++){
SignalProcessing.displaySignalWithGenericTime(spectrogram[g], 0, 1, "spectrum "+(g+1));
}
}
int minFidx = SignalConverter.frequencyIndex(minfreq, windowSamples, samplingRate);
for (int j=0;j<spectrogram.length;j++) {
@ -479,11 +484,11 @@ public class SignalConverter {
}
public static float spectrumIdx2Frequency(int idx, int samplingRate, int windowsSizeSamples) {
return ((float) idx * samplingRate) / (2f*(float) (windowsSizeSamples - 1));
return ((float) idx * samplingRate) / (1f*(float) (windowsSizeSamples - 1));
}
public static int spectrumFreq2Idx(float freq, int samplingRate, int windowsSizeSamples) {
return Math.round((windowsSizeSamples - 1) * 2f *freq / samplingRate);
return Math.round((windowsSizeSamples - 1) * 1f *freq / samplingRate);
}
}

View File

@ -0,0 +1,55 @@
package org.gcube.dataanalysis.ecoengine.test.signalprocessing;
import java.util.List;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent;
import org.gcube.dataanalysis.ecoengine.processing.factories.TransducerersFactory;
import org.gcube.dataanalysis.ecoengine.test.regression.Regressor;
import org.gcube.dataanalysis.ecoengine.transducers.TimeSeriesAnalysis;
public class IOTCAnalyse {
/**
* @param args
*/
public static void main(String[] args) throws Exception{
System.out.println("*******START TEST******");
List<ComputationalAgent> agent = TransducerersFactory.getTransducerers(configAlgorithm());
agent.get(0).init();
Regressor.process(agent.get(0));
StatisticalType st = agent.get(0).getOutput();
System.out.println("Output:"+st);
agent = null;
System.out.println("*******END TEST******");
}
public static AlgorithmConfiguration configAlgorithm(){
AlgorithmConfiguration config = new AlgorithmConfiguration();
AnalysisLogger.setLogger("./cfg/"+AlgorithmConfiguration.defaultLoggerFile);
config.setAgent("TIME_SERIES_PROCESSING");
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setParam("DatabaseUserName", "utente");
config.setParam("DatabasePassword", "d4science");
config.setParam("DatabaseURL", "jdbc:postgresql://statistical-manager.d.d4science.research-infrastructures.eu/testdb");
config.setParam("DatabaseDriver", "org.postgresql.Driver");
config.setParam("TimeSeriesTable", "timeseries_id9ac52133_3d3b_418e_8d70_c61844623e81");
config.setParam("ValueColum", "Effort");
config.setParam("FFT_Window_Samples", "128");
config.setParam("SSA_Window_in_Samples", "36");
config.setParam("SSA_EigenvaluesThreshold", "0.07");
config.setParam("SSA_Points_to_Forecast", "12");
config.setParam("AggregationFunction", "SUM");
config.setParam("Sensitivity", "LOW");
config.setGcubeScope("/gcube");
TimeSeriesAnalysis.display=true;
return config;
}
}

View File

@ -1,5 +1,6 @@
package org.gcube.dataanalysis.ecoengine.utils;
import java.io.File;
import java.util.LinkedHashMap;
import java.util.List;
@ -44,6 +45,10 @@ public class IOHelper {
inputs.add(new PrimitiveType(Double.class.getName(), null, PrimitiveTypes.NUMBER, name,description,defaultvalue));
}
public static void addFileInput(List<StatisticalType> inputs,String name, String description, String defaultvalue) {
inputs.add(new PrimitiveType(File.class.getName(), null, PrimitiveTypes.FILE, name,description, defaultvalue));
}
public static void addRandomStringInput(List<StatisticalType> inputs,String name,String description, String defaultPrefix) {
inputs.add(new ServiceType(ServiceParameters.RANDOMSTRING, name,description,defaultPrefix));
}

View File

@ -0,0 +1,107 @@
package org.gcube.dataanalysis.ecoengine.utils;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.zip.ZipEntry;
import java.util.zip.ZipInputStream;
import java.util.zip.ZipOutputStream;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
public class ZipTools {
public static void unZip(String zipFile, String outputFolder) throws Exception {
byte[] buffer = new byte[1024];
try {
// create output directory is not exists
File folder = new File(outputFolder);
if (!folder.exists()) {
folder.mkdir();
}
// get the zip file content
ZipInputStream zis = new ZipInputStream(new FileInputStream(zipFile));
// get the zipped file list entry
ZipEntry ze = zis.getNextEntry();
while (ze != null) {
String fileName = ze.getName();
File newFile = new File(outputFolder + File.separator + fileName);
AnalysisLogger.getLogger().debug("Unzipping : " + newFile.getAbsoluteFile());
// create all non exists folders
// else you will hit FileNotFoundException for compressed folder
new File(newFile.getParent()).mkdirs();
FileOutputStream fos = new FileOutputStream(newFile);
int len;
while ((len = zis.read(buffer)) > 0) {
fos.write(buffer, 0, len);
}
fos.close();
ze = zis.getNextEntry();
}
zis.closeEntry();
zis.close();
AnalysisLogger.getLogger().debug("Unzip done");
} catch (IOException ex) {
ex.printStackTrace();
}
}
static public void zipFolder(String srcFolder, String destZipFile) throws Exception {
ZipOutputStream zip = null;
FileOutputStream fileWriter = null;
fileWriter = new FileOutputStream(destZipFile);
zip = new ZipOutputStream(fileWriter);
addFolderToZip("", srcFolder, zip);
zip.flush();
zip.close();
}
static private void addFileToZip(String path, String srcFile, ZipOutputStream zip)
throws Exception {
File folder = new File(srcFile);
if (folder.isDirectory()) {
addFolderToZip(path, srcFile, zip);
} else {
byte[] buf = new byte[1024];
int len;
FileInputStream in = new FileInputStream(srcFile);
zip.putNextEntry(new ZipEntry(path + "/" + folder.getName()));
while ((len = in.read(buf)) > 0) {
zip.write(buf, 0, len);
}
}
}
static private void addFolderToZip(String path, String srcFolder, ZipOutputStream zip)
throws Exception {
File folder = new File(srcFolder);
for (String fileName : folder.list()) {
if (path.equals("")) {
addFileToZip(folder.getName(), srcFolder + "/" + fileName, zip);
} else {
addFileToZip(path + "/" + folder.getName(), srcFolder + "/" + fileName, zip);
}
}
}
}