diff --git a/src/main/java/org/gcube/dataanalysis/ecoengine/processing/factories/ProcessorsFactory.java b/src/main/java/org/gcube/dataanalysis/ecoengine/processing/factories/ProcessorsFactory.java index e5c856f..8b78d29 100644 --- a/src/main/java/org/gcube/dataanalysis/ecoengine/processing/factories/ProcessorsFactory.java +++ b/src/main/java/org/gcube/dataanalysis/ecoengine/processing/factories/ProcessorsFactory.java @@ -156,7 +156,6 @@ public class ProcessorsFactory { if (algclass instanceof Generator) { Generator g = (Generator) algclass; g.setConfiguration(config); - g.setConfiguration(config); return g; } else if (algclass instanceof Modeler) { Modeler m = (Modeler) algclass; @@ -195,7 +194,6 @@ public class ProcessorsFactory { map.put("EVALUATORS", EvaluatorsFactory.getAllEvaluators(config.getConfigPath())); map.put("CLUSTERERS", ClusterersFactory.getAllClusterers(config.getConfigPath())); map.put("TRANSDUCERS", TransducerersFactory.getAllTransducerers(config)); - map.put("TEMPORAL_ANALYSIS", new ArrayList()); return map; } diff --git a/src/main/java/org/gcube/dataanalysis/ecoengine/signals/SignalConverter.java b/src/main/java/org/gcube/dataanalysis/ecoengine/signals/SignalConverter.java index d715ea2..332082a 100644 --- a/src/main/java/org/gcube/dataanalysis/ecoengine/signals/SignalConverter.java +++ b/src/main/java/org/gcube/dataanalysis/ecoengine/signals/SignalConverter.java @@ -291,8 +291,11 @@ public class SignalConverter { double[] maxs = new double[spectrogram.length]; averagepower = new double[spectrogram.length]; int j = 0; - if (TimeSeriesAnalysis.display) - SignalProcessing.displaySignalWithGenericTime(spectrogram[0], 0, 1, "spectrum"); + if (TimeSeriesAnalysis.display){ + for (int g=0;g[] maxs = new ArrayList[spectrogram.length]; ArrayList[] powers = new ArrayList[spectrogram.length]; - if (TimeSeriesAnalysis.display) - SignalProcessing.displaySignalWithGenericTime(spectrogram[0], 0, 1, "spectrum"); - + if (TimeSeriesAnalysis.display){ + for (int g=0;g agent = TransducerersFactory.getTransducerers(configAlgorithm()); + agent.get(0).init(); + Regressor.process(agent.get(0)); + StatisticalType st = agent.get(0).getOutput(); + System.out.println("Output:"+st); + agent = null; + System.out.println("*******END TEST******"); + } + + + public static AlgorithmConfiguration configAlgorithm(){ + AlgorithmConfiguration config = new AlgorithmConfiguration(); + AnalysisLogger.setLogger("./cfg/"+AlgorithmConfiguration.defaultLoggerFile); + config.setAgent("TIME_SERIES_PROCESSING"); + config.setConfigPath("./cfg/"); + config.setPersistencePath("./"); + config.setParam("DatabaseUserName", "utente"); + config.setParam("DatabasePassword", "d4science"); + config.setParam("DatabaseURL", "jdbc:postgresql://statistical-manager.d.d4science.research-infrastructures.eu/testdb"); + config.setParam("DatabaseDriver", "org.postgresql.Driver"); + + config.setParam("TimeSeriesTable", "timeseries_id9ac52133_3d3b_418e_8d70_c61844623e81"); + config.setParam("ValueColum", "Effort"); + config.setParam("FFT_Window_Samples", "128"); + config.setParam("SSA_Window_in_Samples", "36"); + config.setParam("SSA_EigenvaluesThreshold", "0.07"); + config.setParam("SSA_Points_to_Forecast", "12"); + config.setParam("AggregationFunction", "SUM"); + config.setParam("Sensitivity", "LOW"); + + config.setGcubeScope("/gcube"); + + TimeSeriesAnalysis.display=true; + return config; + } +} diff --git a/src/main/java/org/gcube/dataanalysis/ecoengine/utils/IOHelper.java b/src/main/java/org/gcube/dataanalysis/ecoengine/utils/IOHelper.java index a9b6e09..46e21fc 100644 --- a/src/main/java/org/gcube/dataanalysis/ecoengine/utils/IOHelper.java +++ b/src/main/java/org/gcube/dataanalysis/ecoengine/utils/IOHelper.java @@ -1,5 +1,6 @@ package org.gcube.dataanalysis.ecoengine.utils; +import java.io.File; import java.util.LinkedHashMap; import java.util.List; @@ -44,6 +45,10 @@ public class IOHelper { inputs.add(new PrimitiveType(Double.class.getName(), null, PrimitiveTypes.NUMBER, name,description,defaultvalue)); } + public static void addFileInput(List inputs,String name, String description, String defaultvalue) { + inputs.add(new PrimitiveType(File.class.getName(), null, PrimitiveTypes.FILE, name,description, defaultvalue)); + } + public static void addRandomStringInput(List inputs,String name,String description, String defaultPrefix) { inputs.add(new ServiceType(ServiceParameters.RANDOMSTRING, name,description,defaultPrefix)); } diff --git a/src/main/java/org/gcube/dataanalysis/ecoengine/utils/ZipTools.java b/src/main/java/org/gcube/dataanalysis/ecoengine/utils/ZipTools.java new file mode 100644 index 0000000..a85d829 --- /dev/null +++ b/src/main/java/org/gcube/dataanalysis/ecoengine/utils/ZipTools.java @@ -0,0 +1,107 @@ +package org.gcube.dataanalysis.ecoengine.utils; + +import java.io.File; +import java.io.FileInputStream; +import java.io.FileOutputStream; +import java.io.IOException; +import java.util.zip.ZipEntry; +import java.util.zip.ZipInputStream; +import java.util.zip.ZipOutputStream; + +import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; + +public class ZipTools { + + public static void unZip(String zipFile, String outputFolder) throws Exception { + + byte[] buffer = new byte[1024]; + + try { + + // create output directory is not exists + File folder = new File(outputFolder); + if (!folder.exists()) { + folder.mkdir(); + } + + // get the zip file content + ZipInputStream zis = new ZipInputStream(new FileInputStream(zipFile)); + // get the zipped file list entry + ZipEntry ze = zis.getNextEntry(); + + while (ze != null) { + + String fileName = ze.getName(); + File newFile = new File(outputFolder + File.separator + fileName); + + AnalysisLogger.getLogger().debug("Unzipping : " + newFile.getAbsoluteFile()); + + // create all non exists folders + // else you will hit FileNotFoundException for compressed folder + new File(newFile.getParent()).mkdirs(); + + FileOutputStream fos = new FileOutputStream(newFile); + + int len; + while ((len = zis.read(buffer)) > 0) { + fos.write(buffer, 0, len); + } + + fos.close(); + ze = zis.getNextEntry(); + } + + zis.closeEntry(); + zis.close(); + + AnalysisLogger.getLogger().debug("Unzip done"); + + } catch (IOException ex) { + ex.printStackTrace(); + } + } + + + static public void zipFolder(String srcFolder, String destZipFile) throws Exception { + ZipOutputStream zip = null; + FileOutputStream fileWriter = null; + + fileWriter = new FileOutputStream(destZipFile); + zip = new ZipOutputStream(fileWriter); + + addFolderToZip("", srcFolder, zip); + zip.flush(); + zip.close(); + } + + static private void addFileToZip(String path, String srcFile, ZipOutputStream zip) + throws Exception { + + File folder = new File(srcFile); + if (folder.isDirectory()) { + addFolderToZip(path, srcFile, zip); + } else { + byte[] buf = new byte[1024]; + int len; + FileInputStream in = new FileInputStream(srcFile); + zip.putNextEntry(new ZipEntry(path + "/" + folder.getName())); + while ((len = in.read(buf)) > 0) { + zip.write(buf, 0, len); + } + } + } + + static private void addFolderToZip(String path, String srcFolder, ZipOutputStream zip) + throws Exception { + File folder = new File(srcFolder); + + for (String fileName : folder.list()) { + if (path.equals("")) { + addFileToZip(folder.getName(), srcFolder + "/" + fileName, zip); + } else { + addFileToZip(path + "/" + folder.getName(), srcFolder + "/" + fileName, zip); + } + } + } + +}