diff --git a/.classpath b/.classpath
index f17e828..d90b917 100644
--- a/.classpath
+++ b/.classpath
@@ -1,31 +1,9 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
diff --git a/.settings/org.eclipse.core.resources.prefs b/.settings/org.eclipse.core.resources.prefs
index cf6931b..a82ca26 100644
--- a/.settings/org.eclipse.core.resources.prefs
+++ b/.settings/org.eclipse.core.resources.prefs
@@ -1,4 +1,6 @@
+#Wed May 06 16:54:55 CEST 2015
eclipse.preferences.version=1
encoding//src/main/java=UTF-8
encoding//src/main/resources=UTF-8
+encoding//src/test/java=UTF-8
encoding/=UTF-8
diff --git a/PARALLEL_PROCESSING/ecological-engine-1.8.1-SNAPSHOT.jar b/PARALLEL_PROCESSING/ecological-engine-1.8.6-SNAPSHOT.jar
similarity index 78%
rename from PARALLEL_PROCESSING/ecological-engine-1.8.1-SNAPSHOT.jar
rename to PARALLEL_PROCESSING/ecological-engine-1.8.6-SNAPSHOT.jar
index 1d8beea..497ced6 100644
Binary files a/PARALLEL_PROCESSING/ecological-engine-1.8.1-SNAPSHOT.jar and b/PARALLEL_PROCESSING/ecological-engine-1.8.6-SNAPSHOT.jar differ
diff --git a/PARALLEL_PROCESSING/EcologicalEngineExecutor-1.6.4-SNAPSHOT.jar b/PARALLEL_PROCESSING/ecological-engine-smart-executor-1.1.0-SNAPSHOT.jar
similarity index 57%
rename from PARALLEL_PROCESSING/EcologicalEngineExecutor-1.6.4-SNAPSHOT.jar
rename to PARALLEL_PROCESSING/ecological-engine-smart-executor-1.1.0-SNAPSHOT.jar
index 6d416dd..be865e9 100644
Binary files a/PARALLEL_PROCESSING/EcologicalEngineExecutor-1.6.4-SNAPSHOT.jar and b/PARALLEL_PROCESSING/ecological-engine-smart-executor-1.1.0-SNAPSHOT.jar differ
diff --git a/PARALLEL_PROCESSING/script_LWR.sh b/PARALLEL_PROCESSING/script_LWR.sh
deleted file mode 100644
index 766a501..0000000
--- a/PARALLEL_PROCESSING/script_LWR.sh
+++ /dev/null
@@ -1,5 +0,0 @@
-#!/bin/sh
-# LWR
-cd $1
-
-java -Xmx1024M -classpath ./:./c3p0-0.9.1.2.jar:./common-configuration-scanner-1.0.1-SNAPSHOT.jar:./common-encryption-1.0.1-3.5.0.jar:./common-gcore-resources-1.2.0-3.5.0.jar:./common-gcore-stubs-1.2.0-3.5.0.jar:./common-scope-1.2.1-SNAPSHOT.jar:./common-scope-maps-1.0.2-3.5.0.jar:./commons-collections-3.1.jar:./commons-io-1.2.jar:./discovery-client-1.0.1-3.5.0.jar:./dom4j-1.6.1.jar:./ecological-engine-1.8.1-SNAPSHOT.jar:./EcologicalEngineExecutor-1.6.4-SNAPSHOT.jar:./hibernate3.jar:./ic-client-1.0.1-3.5.0.jar:./jaxen-1.1.2.jar:./jta-1.1.jar:./log4j-1.2.16.jar:./mongo-java-driver-2.12.4.jar:./postgresql-8.4-702.jdbc4.jar:./slf4j-api-1.6.0.jar:./slf4j-log4j12-1.6.0.jar:./storage-manager-core-2.1.3-3.6.0.jar:./storage-manager-wrapper-2.1.0-3.5.0.jar:./xalan-2.6.0.jar:./xpp3_min-1.1.4c.jar:./xstream-1.3.1.jar:./YASMEEN-matcher-1.2.0.1.jar:./YASMEEN-parser-1.2.0.jar org.gcube.dataanalysis.executor.nodes.algorithms.LWR $2 execution.output
diff --git a/PARALLEL_PROCESSING/storage-manager-core-2.1.3-3.6.0.jar b/PARALLEL_PROCESSING/storage-manager-core-2.1.3-3.6.0.jar
deleted file mode 100644
index 9d4a9b6..0000000
Binary files a/PARALLEL_PROCESSING/storage-manager-core-2.1.3-3.6.0.jar and /dev/null differ
diff --git a/PARALLEL_PROCESSING/storage-manager-core-2.2.0-3.7.0.jar b/PARALLEL_PROCESSING/storage-manager-core-2.2.0-3.7.0.jar
new file mode 100644
index 0000000..d38e338
Binary files /dev/null and b/PARALLEL_PROCESSING/storage-manager-core-2.2.0-3.7.0.jar differ
diff --git a/PARALLEL_PROCESSING/storage-manager-wrapper-2.1.0-3.5.0.jar b/PARALLEL_PROCESSING/storage-manager-wrapper-2.1.0-3.5.0.jar
deleted file mode 100644
index 24eec7a..0000000
Binary files a/PARALLEL_PROCESSING/storage-manager-wrapper-2.1.0-3.5.0.jar and /dev/null differ
diff --git a/PARALLEL_PROCESSING/storage-manager-wrapper-2.2.0-3.7.0.jar b/PARALLEL_PROCESSING/storage-manager-wrapper-2.2.0-3.7.0.jar
new file mode 100644
index 0000000..bc38dfb
Binary files /dev/null and b/PARALLEL_PROCESSING/storage-manager-wrapper-2.2.0-3.7.0.jar differ
diff --git a/aquamapsjarcreator.jardesc b/aquamapsjarcreator.jardesc
deleted file mode 100644
index e2111ef..0000000
--- a/aquamapsjarcreator.jardesc
+++ /dev/null
@@ -1,17 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/cfg/algorithms.properties b/cfg/algorithms.properties
index 29d4187..edfd708 100644
--- a/cfg/algorithms.properties
+++ b/cfg/algorithms.properties
@@ -7,4 +7,5 @@ AQUAMAPS_SUITABLE_NEURALNETWORK=org.gcube.dataanalysis.ecoengine.spatialdistribu
FEED_FORWARD_A_N_N_DISTRIBUTION=org.gcube.dataanalysis.ecoengine.spatialdistributions.FeedForwardNeuralNetworkDistribution
LWR=org.gcube.dataanalysis.executor.nodes.algorithms.LWR
CMSY=org.gcube.dataanalysis.executor.nodes.algorithms.CMSY
-FAOMSY=org.gcube.dataanalysis.executor.nodes.algorithms.FAOMSY
\ No newline at end of file
+FAOMSY=org.gcube.dataanalysis.executor.nodes.algorithms.FAOMSY
+ICCAT_VPA=org.gcube.dataanalysis.executor.nodes.algorithms.ICCATVPA
\ No newline at end of file
diff --git a/cfg/nodealgorithms.properties b/cfg/nodealgorithms.properties
index 7b60bda..742cbee 100644
--- a/cfg/nodealgorithms.properties
+++ b/cfg/nodealgorithms.properties
@@ -9,4 +9,5 @@ LWR=org.gcube.dataanalysis.executor.nodes.algorithms.LWR
BIONYM=org.gcube.dataanalysis.executor.nodes.transducers.bionym.BionymFlexibleWorkflowTransducer
BIONYM_BIODIV=org.gcube.dataanalysis.executor.nodes.transducers.bionym.BionymBiodiv
CMSY=org.gcube.dataanalysis.executor.nodes.algorithms.CMSY
-FAOMSY=org.gcube.dataanalysis.executor.nodes.algorithms.FAOMSY
\ No newline at end of file
+FAOMSY=org.gcube.dataanalysis.executor.nodes.algorithms.FAOMSY
+ICCAT_VPA=org.gcube.dataanalysis.executor.nodes.algorithms.ICCATVPA
\ No newline at end of file
diff --git a/createscript.jardesc b/createscript.jardesc
deleted file mode 100644
index b01ebc3..0000000
--- a/createscript.jardesc
+++ /dev/null
@@ -1,18 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/deployDesktop.jardesc b/deployDesktop.jardesc
deleted file mode 100644
index 7595d24..0000000
--- a/deployDesktop.jardesc
+++ /dev/null
@@ -1,16 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/deployParallelProcessing.jardesc b/deployParallelProcessing.jardesc
deleted file mode 100644
index f9020ad..0000000
--- a/deployParallelProcessing.jardesc
+++ /dev/null
@@ -1,16 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/pom.xml b/pom.xml
index b68bf67..fdaffcf 100644
--- a/pom.xml
+++ b/pom.xml
@@ -9,7 +9,7 @@
org.gcube.dataanalysis
ecological-engine-smart-executor
- 1.0.1-SNAPSHOT
+ 1.1.0-SNAPSHOT
Smart Ecological Engine Executor
Smart Ecological Engine Executor Description
@@ -19,17 +19,17 @@
org.gcube.vremanagement
smart-executor-client
- [1.0.0,3.0.0)
+ [1.0.0-SNAPSHOT,3.0.0-SNAPSHOT)
org.gcube.contentmanagement
storage-manager-core
- [2.0.2,3.0.0)
+ [2.0.2-SNAPSHOT,3.0.0-SNAPSHOT)
org.gcube.contentmanagement
storage-manager-wrapper
- [2.0.2,3.0.0)
+ [2.0.2-SNAPSHOT,3.0.0-SNAPSHOT)
org.apache.activemq
@@ -39,7 +39,7 @@
org.gcube.dataanalysis
ecological-engine
- [1.8.0,2.0.0)
+ [1.8.0-SNAPSHOT,2.0.0-SNAPSHOT)
diff --git a/src/main/java/org/gcube/dataanalysis/executor/nodes/algorithms/CMSY.java b/src/main/java/org/gcube/dataanalysis/executor/nodes/algorithms/CMSY.java
index 28c9024..b7a2e5b 100644
--- a/src/main/java/org/gcube/dataanalysis/executor/nodes/algorithms/CMSY.java
+++ b/src/main/java/org/gcube/dataanalysis/executor/nodes/algorithms/CMSY.java
@@ -98,7 +98,7 @@ public class CMSY extends ActorNode {
HashMap codeinj = new HashMap();
codeinj.put("HLH_M07",config.getParam(stock));
config.setConfigPath("./");
- scriptmanager.executeRScript(config, scriptName, "", new HashMap(), "", "outputfile.txt", codeinj, true,false,false);
+ scriptmanager.executeRScript(config, scriptName, "", new HashMap(), "", "outputfile.txt", codeinj, true,false,false,sandboxFolder);
outputFileName = scriptmanager.getCurrentOutputFileName();
String outputFilePath = new File(sandboxFolder,outputFile).getAbsolutePath();
diff --git a/src/main/java/org/gcube/dataanalysis/executor/nodes/algorithms/FAOMSY.java b/src/main/java/org/gcube/dataanalysis/executor/nodes/algorithms/FAOMSY.java
index a3fc61d..01d8e3e 100644
--- a/src/main/java/org/gcube/dataanalysis/executor/nodes/algorithms/FAOMSY.java
+++ b/src/main/java/org/gcube/dataanalysis/executor/nodes/algorithms/FAOMSY.java
@@ -121,7 +121,7 @@ public class FAOMSY extends ActorNode {
HashMap codeinj = new HashMap();
config.setConfigPath("./");
- scriptmanager.executeRScript(config, scriptName, "", new HashMap(), "", "CatchMSY_Output.csv", codeinj, false,false,false);
+ scriptmanager.executeRScript(config, scriptName, "", new HashMap(), "", "CatchMSY_Output.csv", codeinj, false,false,false,sandboxFolder);
AnalysisLogger.getLogger().info("FAOMSY The script has finished");
String outputFileName = "";
//manage the fact that the outputfile could even not exist
diff --git a/src/main/java/org/gcube/dataanalysis/executor/nodes/algorithms/ICCATVPA.java b/src/main/java/org/gcube/dataanalysis/executor/nodes/algorithms/ICCATVPA.java
new file mode 100644
index 0000000..a3c8216
--- /dev/null
+++ b/src/main/java/org/gcube/dataanalysis/executor/nodes/algorithms/ICCATVPA.java
@@ -0,0 +1,277 @@
+package org.gcube.dataanalysis.executor.nodes.algorithms;
+
+import java.io.File;
+import java.io.FileWriter;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.UUID;
+
+import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
+import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS;
+import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
+import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
+import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
+import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
+import org.gcube.dataanalysis.ecoengine.interfaces.ActorNode;
+import org.gcube.dataanalysis.ecoengine.utils.IOHelper;
+import org.gcube.dataanalysis.ecoengine.utils.Transformations;
+import org.gcube.dataanalysis.ecoengine.utils.ZipTools;
+import org.gcube.dataanalysis.executor.scripts.OSCommand;
+import org.gcube.dataanalysis.executor.util.RScriptsManager;
+import org.gcube.dataanalysis.executor.util.StorageUtils;
+
+public class ICCATVPA extends ActorNode {
+
+
+
+ @Override
+ public ALG_PROPS[] getProperties() {
+ ALG_PROPS[] p = { ALG_PROPS.PHENOMENON_VS_PARALLEL_PHENOMENON };
+ return p;
+ }
+
+ @Override
+ public String getName() {
+ return "ICCAT_VPA";
+ }
+
+ @Override
+ public String getDescription() {
+ return "An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). " +
+ "Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. " +
+ "Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr.";
+ }
+
+ protected static String YearStartInp = "StartYear";
+ protected static String YearEndInp = "EndYear";
+ protected static String CAAInp = "CAAFile";
+ protected static String PCAAInp = "PCAAFile";
+ protected static String CPUEInp = "CPUEFile";
+ protected static String PwaaInp = "PwaaFile";
+ protected static String waaInp = "waaFile";
+
+ protected String CAAInpURL ;
+ protected String PCAAInpURL;
+ protected String CPUEInpURL;
+ protected String PwaaInpURL;
+ protected String waaInpURL;
+
+
+ protected static String effectInp = "shortComment";
+ protected static String nCPUEInp = "nCPUE";
+ protected static String CPUEcutInp = "CPUE_cut";
+ protected static String nRemoveYearInp = "n_remove_year";
+ protected static String agePlusGroupInp = "age_plus_group";
+ protected static String scriptName = "run_vpa.R";
+ protected static String packageURL = "http://goo.gl/EqFjNZ";
+
+ protected static String processOutputParam= "ProcessOutputParam";
+ protected String processOutput= "ProcessOutput";
+
+ protected AlgorithmConfiguration config;
+ public float status = 0;
+
+ @Override
+ public List getInputParameters() {
+ List parameters = new ArrayList();
+ IOHelper.addIntegerInput(parameters, YearStartInp, "First year of the dataset temporal extent", "1950");
+ IOHelper.addIntegerInput(parameters, YearEndInp, "Last year of the dataset temporal extent", "2013");
+
+ IOHelper.addFileInput(parameters, CAAInp, "Catch at Age Matrix (Number of Fish caught by year and for each age)", "CAA_Age1_25.csv");
+ IOHelper.addFileInput(parameters, PCAAInp, "Partial Catch at Age Matrix (Number of Fish caught by gear and year and for each age)", "PCAA_Age1_25.csv");
+ IOHelper.addFileInput(parameters, CPUEInp, "Table of Catch Per Unit of Effort used in the stock assessment", "CPUE.csv");
+ IOHelper.addFileInput(parameters, PwaaInp, "Partial weight at age (Weight of Fish caught by gear and year and for each age)", "waa.csv");
+ IOHelper.addFileInput(parameters, waaInp, "Fecundity at age (Fecundity of Fish caught by year and for each age)", "fecaa.csv");
+
+ IOHelper.addStringInput(parameters, effectInp, "Free text for users to describe the current simulation", " ");
+ IOHelper.addIntegerInput(parameters, nCPUEInp, "Number of Catch Per Unit of Effort Time series to use", "7");
+ IOHelper.addIntegerInput(parameters, CPUEcutInp, "Identifier of the Catch Per Unit of Effort Time Serie to be shrunk", "1");
+ IOHelper.addIntegerInput(parameters, nRemoveYearInp, "Number of the (last) years to be removed", "1");
+ IOHelper.addIntegerInput(parameters, agePlusGroupInp, "Maximal age class of catches to be taken into account", "10");
+
+ return parameters;
+ }
+
+ @Override
+ public void setup(AlgorithmConfiguration config) throws Exception {
+ this.config = config;
+
+ AnalysisLogger.getLogger().info("ICCAT-VPA process is initialized in scope "+config.getGcubeScope()+" for user "+config.getParam("ServiceUserName"));
+ String uuid = (UUID.randomUUID()+"").replace("-", "");
+ processOutput = "ICCAT-VPA_"+"output_"+uuid+".zip";
+
+ config.setParam(processOutputParam, processOutput);
+ AnalysisLogger.getLogger().debug("ICCAT-VPA Uploading input files: "+config.getGeneralProperties());
+ //upload files on the storage manager
+ CAAInpURL = StorageUtils.uploadFilesOnStorage(config.getGcubeScope(), config.getParam("ServiceUserName"), new File(config.getParam(CAAInp)).getParent(), new File(config.getParam(CAAInp)).getName());
+ AnalysisLogger.getLogger().debug("ICCAT-VPA: CAA DONE! "+CAAInpURL);
+ PCAAInpURL = StorageUtils.uploadFilesOnStorage(config.getGcubeScope(), config.getParam("ServiceUserName"), new File(config.getParam(PCAAInp)).getParent(), new File(config.getParam(PCAAInp)).getName());
+ AnalysisLogger.getLogger().debug("ICCAT-VPA: PCAA DONE! "+PCAAInpURL);
+ CPUEInpURL = StorageUtils.uploadFilesOnStorage(config.getGcubeScope(), config.getParam("ServiceUserName"), new File(config.getParam(CPUEInp)).getParent(), new File(config.getParam(CPUEInp)).getName());
+ AnalysisLogger.getLogger().debug("ICCAT-VPA: CPUE DONE! "+CPUEInpURL);
+ PwaaInpURL = StorageUtils.uploadFilesOnStorage(config.getGcubeScope(), config.getParam("ServiceUserName"), new File(config.getParam(PwaaInp)).getParent(), new File(config.getParam(PwaaInp)).getName());
+ AnalysisLogger.getLogger().debug("ICCAT-VPA: Pwaa DONE! "+PwaaInpURL);
+ waaInpURL = StorageUtils.uploadFilesOnStorage(config.getGcubeScope(), config.getParam("ServiceUserName"), new File(config.getParam(waaInp)).getParent(), new File(config.getParam(waaInp)).getName());
+ AnalysisLogger.getLogger().debug("ICCAT-VPA: waa DONE! "+waaInpURL);
+ AnalysisLogger.getLogger().debug("ICCAT-VPA Input files uploaded!");
+
+ AnalysisLogger.getLogger().debug("ICCAT-VPA Setting input URLs: "+config.getGeneralProperties());
+
+ config.setParam(CAAInp, CAAInpURL);
+ config.setParam(PCAAInp, PCAAInpURL);
+ config.setParam(CPUEInp, CPUEInpURL);
+ config.setParam(PwaaInp, PwaaInpURL);
+ config.setParam(waaInp, waaInpURL);
+
+ }
+
+ @Override
+ public StatisticalType getOutput() {
+
+ File outfile = new File(processOutput);
+ LinkedHashMap outputmap = new LinkedHashMap();
+
+ if (outfile.exists())
+ AnalysisLogger.getLogger().debug("ICCAT-VPA Output: "+outfile.getAbsolutePath()+" : "+outfile.exists());
+ else
+ AnalysisLogger.getLogger().debug("ICCAT-VPA Output file does not exist - returning a proxy");
+
+ PrimitiveType o = new PrimitiveType(File.class.getName(), outfile, PrimitiveTypes.FILE, "ProcessedSpecies", "Output file with processed species");
+ outputmap.put("Zip file containing the process output", o);
+
+ PrimitiveType output = new PrimitiveType(HashMap.class.getName(), outputmap, PrimitiveTypes.MAP, "Results File", "Results File");
+ AnalysisLogger.getLogger().debug("ICCAT-VPA Output Managed");
+ return output;
+ }
+
+ @Override
+ public void initSingleNode(AlgorithmConfiguration config) {}
+
+ @Override
+ public float getInternalStatus() {return status;}
+
+ @Override
+ public int executeNode(int leftStartIndex, int numberOfLeftElementsToProcess, int rightStartIndex, int numberOfRightElementsToProcess, boolean duplicate, String sandboxFolder, String nodeConfigurationFileObject, String logfileNameToProduce) {
+ try {
+ status = 0;
+ config = Transformations.restoreConfig(nodeConfigurationFileObject);
+
+ String outputFile = config.getParam(processOutputParam);
+ String localzipFile = "iccat_zip.zip";
+
+ AnalysisLogger.getLogger().info("ICCAT-VPA ranges: "+" Li:"+leftStartIndex+" NLi:"+leftStartIndex+" Ri:"+rightStartIndex+" NRi:"+numberOfRightElementsToProcess);
+ AnalysisLogger.getLogger().info("ICCAT-VPA expected output "+outputFile);
+
+ //download the package
+ AnalysisLogger.getLogger().info("ICCAT-VPA : downloading package URL: "+packageURL);
+ StorageUtils.downloadInputFile(packageURL, localzipFile);
+
+ //unzip the package
+ AnalysisLogger.getLogger().info("ICCAT-VPA : Unzipping file: "+localzipFile+" having size "+new File(localzipFile).length());
+ ZipTools.unZip(localzipFile, sandboxFolder);
+
+ //download input files
+ AnalysisLogger.getLogger().info("ICCAT-VPA : Downloading remote input files "+config.getGeneralProperties());
+ AnalysisLogger.getLogger().info("ICCAT-VPA : Downloading CAA");
+ StorageUtils.downloadInputFile(config.getParam(CAAInp), "CAA_Age1_25.csv");
+ StorageUtils.downloadInputFile(config.getParam(PCAAInp), "PCAA_Age1_25_Run3.csv");
+ AnalysisLogger.getLogger().info("ICCAT-VPA : Downloading PCAA");
+ StorageUtils.downloadInputFile(config.getParam(CPUEInp), "CPUE_Run3.csv");
+ AnalysisLogger.getLogger().info("ICCAT-VPA : Downloading CPUE");
+ StorageUtils.downloadInputFile(config.getParam(PwaaInp), "waa.csv");
+ AnalysisLogger.getLogger().info("ICCAT-VPA : Downloading Pwaa");
+ StorageUtils.downloadInputFile(config.getParam(waaInp), "fecaa.csv");
+ AnalysisLogger.getLogger().info("ICCAT-VPA : Downloading waa");
+
+ AnalysisLogger.getLogger().info("ICCAT-VPA : all files downloaded: ");
+ AnalysisLogger.getLogger().info("ICCAT-VPA : CCA size: "+new File("CAA_Age1_25.csv"));
+ AnalysisLogger.getLogger().info("ICCAT-VPA : PCAA size: "+new File("PCAA_Age1_25_Run3.csv"));
+ AnalysisLogger.getLogger().info("ICCAT-VPA : CPUE size: "+new File("CPUE_Run3.csv"));
+ AnalysisLogger.getLogger().info("ICCAT-VPA : Pwaa size: "+new File("waa.csv"));
+ AnalysisLogger.getLogger().info("ICCAT-VPA : waa size: "+new File("fecaa.csv"));
+
+ String run = "Run_"+rightStartIndex;
+ //create the input file: e.g.
+ //Run_7 1950-2013 CAA_Age1_25.csv PCAA_Age1_25_Run3.csv CPUE_Run3.csv waa.csv fecaa.csv Run_2 split JP_LL NEAST and wihtout last 1 year in ESPMARTrap 8 1 1 10
+ String header = "Run,Year,CAA,PCAA,CPUE,Pwaa,waa,compare to,effect,nCPUE,CPUE_cut,n_remove_year,age_plus_group";
+ String input = run+","+config.getParam(YearStartInp)+"-"+config.getParam(YearEndInp)+","+"CAA_Age1_25.csv"+","+"PCAA_Age1_25_Run3.csv"+","+"CPUE_Run3.csv"+","+"waa.csv"+","+"fecaa.csv"+",,"+config.getParam(effectInp)+","+config.getParam(nCPUEInp)+","+config.getParam(CPUEcutInp)+","+config.getParam(nRemoveYearInp)+","+config.getParam(agePlusGroupInp);
+ FileWriter fw = new FileWriter(new File(sandboxFolder,"run_spec.csv"));
+ fw.write(header+"\n");
+ fw.write(input+"\n");
+ fw.close();
+
+ //run the code as-is after substituting the Run string
+ HashMap codeinj = new HashMap();
+ codeinj.put("Run_0", run);
+ String pathSand = new File(sandboxFolder).getAbsolutePath();
+ if (!pathSand.endsWith("/"))
+ pathSand=pathSand+"/";
+
+ AnalysisLogger.getLogger().debug("ICCAT-VPA : substituting path to sandbox folder "+pathSand);
+ codeinj.put("/home/gcube/irdstockassessment/allinone/", pathSand);
+
+ AnalysisLogger.getLogger().info("ICCAT-VPA : changing rights to the fortran file");
+ OSCommand.ExecuteGetLine("chmod 777 vpa-2box.out", null);
+
+ AnalysisLogger.getLogger().info("ICCAT-VPA : running the script: "+scriptName);
+ RScriptsManager scriptmanager = new RScriptsManager();
+ scriptmanager.executeRScript(config, scriptName, "", new HashMap(), "", "output.csv", codeinj, false,false,false, sandboxFolder);
+
+ //zip the output
+ File outputFolder = new File(sandboxFolder,run);
+ AnalysisLogger.getLogger().info("ICCAT-VPA : checking the output folder: "+outputFolder.getAbsolutePath()+" exists: "+outputFolder.exists());
+ if (!outputFolder.exists())
+ throw new Exception("ICCAT-VPA: output was not produced!");
+
+ AnalysisLogger.getLogger().info("ICCAT-VPA : producing zip output: "+outputFile);
+ ZipTools.zipFolder(new File(sandboxFolder,run).getAbsolutePath(), outputFile);
+
+ AnalysisLogger.getLogger().info("ICCAT-VPA : zip output exists: "+new File(outputFile).exists());
+
+ //upload the output
+ AnalysisLogger.getLogger().info("ICCAT-VPA : uploading on storage");
+ StorageUtils.uploadFilesOnStorage(config.getGcubeScope(), config.getParam("ServiceUserName"), sandboxFolder,outputFile);
+
+ AnalysisLogger.getLogger().info("ICCAT-VPA : Finished");
+ }catch(Exception e){
+ e.printStackTrace();
+ }
+
+ return 0;
+ }
+
+
+
+ int nExperiments=1;
+ @Override
+ public int getNumberOfRightElements() {
+ return 1;
+ }
+
+ @Override
+ public int getNumberOfLeftElements() {
+ return 1;
+ }
+
+ @Override
+ public void stop() {
+ AnalysisLogger.getLogger().info("ICCAT-VPA process stopped");
+ }
+
+ boolean haspostprocessed = false;
+ @Override
+ public void postProcess(boolean manageDuplicates, boolean manageFault) {
+ try {
+
+ AnalysisLogger.getLogger().debug("ICCAT-VPA - Downloading file "+processOutput);
+ StorageUtils.downloadFilefromStorage(config.getGcubeScope(), config.getParam("ServiceUserName"), config.getPersistencePath(), processOutput);
+ AnalysisLogger.getLogger().debug("ICCAT-VPA - Postprocess complete: output ready in "+processOutput);
+
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+}
\ No newline at end of file
diff --git a/src/main/java/org/gcube/dataanalysis/executor/rscripts/SGVMS_Interpolation.java b/src/main/java/org/gcube/dataanalysis/executor/rscripts/SGVMS_Interpolation.java
index 37e4352..93cfb36 100644
--- a/src/main/java/org/gcube/dataanalysis/executor/rscripts/SGVMS_Interpolation.java
+++ b/src/main/java/org/gcube/dataanalysis/executor/rscripts/SGVMS_Interpolation.java
@@ -77,7 +77,7 @@ public class SGVMS_Interpolation extends StandardLocalExternalAlgorithm {
AnalysisLogger.getLogger().debug("SGVM Interpolation-> Executing the script ");
status = 10;
//execute the script in multi-user mode
- scriptmanager.executeRScript(config, scriptName, inputFile, inputParameters, defaultInputFileInTheScript, defaultOutputFileInTheScript, codeInjection, scriptMustReturnAFile,uploadScriptOnTheInfrastructureWorkspace);
+ scriptmanager.executeRScript(config, scriptName, inputFile, inputParameters, defaultInputFileInTheScript, defaultOutputFileInTheScript, codeInjection, scriptMustReturnAFile,uploadScriptOnTheInfrastructureWorkspace, config.getConfigPath());
//assign the file path to an output variable for the SM
outputFile = scriptmanager.currentOutputFileName;
AnalysisLogger.getLogger().debug("SGVM Interpolation-> Output File is "+outputFile);
diff --git a/src/main/java/org/gcube/dataanalysis/executor/tests/RegressionTestICCATVPA.java b/src/main/java/org/gcube/dataanalysis/executor/tests/RegressionTestICCATVPA.java
new file mode 100644
index 0000000..fe09927
--- /dev/null
+++ b/src/main/java/org/gcube/dataanalysis/executor/tests/RegressionTestICCATVPA.java
@@ -0,0 +1,69 @@
+package org.gcube.dataanalysis.executor.tests;
+
+import java.util.List;
+
+import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
+import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
+import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent;
+import org.gcube.dataanalysis.ecoengine.processing.factories.GeneratorsFactory;
+import org.gcube.dataanalysis.ecoengine.test.regression.Regressor;
+import org.gcube.dataanalysis.ecoengine.utils.IOHelper;
+
+public class RegressionTestICCATVPA {
+ /**
+ * example of parallel processing on a single machine the procedure will generate a new table for a distribution on suitable species
+ *
+ */
+
+public static AlgorithmConfiguration getConfig() {
+
+ AlgorithmConfiguration config = new AlgorithmConfiguration();
+
+ config.setConfigPath("./cfg/");
+ config.setPersistencePath("./");
+ config.setParam("DatabaseUserName","utente");
+ config.setParam("DatabasePassword","d4science");
+ config.setParam("DatabaseURL","jdbc:postgresql://dbtest.research-infrastructures.eu/testdb");
+ config.setParam("DatabaseDriver","org.postgresql.Driver");
+ AnalysisLogger.setLogger(config.getConfigPath()+AlgorithmConfiguration.defaultLoggerFile);
+ return config;
+ }
+
+ public static void main(String[] args) throws Exception {
+
+ System.out.println("TEST 1");
+
+ List generators = GeneratorsFactory.getGenerators(testICCATVPA());
+ generators.get(0).init();
+ CustomRegressor.process(generators.get(0));
+ generators = null;
+
+ }
+
+ private static AlgorithmConfiguration testICCATVPA() {
+
+ AlgorithmConfiguration config = getConfig();
+ config.setNumberOfResources(5);
+ config.setModel("ICCAT_VPA");
+
+ config.setParam("UserName", "gianpaolo.coro");
+ config.setGcubeScope("/gcube/devsec");
+ config.setParam("ServiceUserName", "gianpaolo.coro");
+
+ config.setParam("StartYear","1950");
+ config.setParam("EndYear","2013");
+ config.setParam("CAAFile","CAA_Age1_25.csv");
+ config.setParam("PCAAFile","PCAA_Age1_25_Run3.csv");
+ config.setParam("CPUEFile","CPUE_Run3.csv");
+ config.setParam("PwaaFile","waa.csv");
+ config.setParam("waaFile","fecaa.csv");
+
+ config.setParam("shortComment","split JP_LL NEAST and wihtout last 1 year in ESPMARTrap");
+ config.setParam("nCPUE","8");
+ config.setParam("CPUE_cut","1");
+ config.setParam("n_remove_year","1");
+ config.setParam("age_plus_group","10");
+
+ return config;
+ }
+}
diff --git a/src/main/java/org/gcube/dataanalysis/executor/util/RScriptsManager.java b/src/main/java/org/gcube/dataanalysis/executor/util/RScriptsManager.java
index 0257150..fb499b9 100644
--- a/src/main/java/org/gcube/dataanalysis/executor/util/RScriptsManager.java
+++ b/src/main/java/org/gcube/dataanalysis/executor/util/RScriptsManager.java
@@ -95,15 +95,14 @@ public class RScriptsManager {
}
}
- public void executeRScript(AlgorithmConfiguration config, String scriptName, String inputFileURL, HashMap inputParameters, String defaultInputFile, String defaultOutputFile, HashMap codeInjections, boolean mustReturnAFile, boolean uploadOutputOnStorage) throws Exception {
- executeRScript(config, scriptName, inputFileURL, inputParameters, defaultInputFile, defaultOutputFile, codeInjections, mustReturnAFile, uploadOutputOnStorage, true);
+ public void executeRScript(AlgorithmConfiguration config, String scriptName, String inputFileURL, HashMap inputParameters, String defaultInputFile, String defaultOutputFile, HashMap codeInjections, boolean mustReturnAFile, boolean uploadOutputOnStorage, String sandboxFolder) throws Exception {
+ executeRScript(config, scriptName, inputFileURL, inputParameters, defaultInputFile, defaultOutputFile, codeInjections, mustReturnAFile, uploadOutputOnStorage, true, sandboxFolder);
}
- public void executeRScript(AlgorithmConfiguration config, String scriptName, String inputFileURL, HashMap inputParameters, String defaultInputFile, String defaultOutputFile, HashMap codeInjections, boolean mustReturnAFile, boolean uploadOutputOnStorage, boolean deletefiles) throws Exception {
+ public void executeRScript(AlgorithmConfiguration config, String scriptName, String inputFileURL, HashMap inputParameters, String defaultInputFile, String defaultOutputFile, HashMap codeInjections, boolean mustReturnAFile, boolean uploadOutputOnStorage, boolean deletefiles, String sandboxFolder) throws Exception {
List tempfiles = new ArrayList();
try {
status = 0;
- String sandboxFolder = config.getConfigPath();
String scriptPath = new File(sandboxFolder, scriptName).getAbsolutePath();
// String originalScriptPath = scriptPath;
String preparedScriptPath = null;
@@ -283,7 +282,7 @@ public class RScriptsManager {
inputParameters.put("npoints", "10");
inputParameters.put("equalDist", "TRUE");
- scriptmanager.executeRScript(config, scriptName, inputFileURL, inputParameters, defaultInputFile, defaultOutputFile, null, true, true);
+ scriptmanager.executeRScript(config, scriptName, inputFileURL, inputParameters, defaultInputFile, defaultOutputFile, null, true, true,"./");
}
}
diff --git a/target/ecological-engine-smart-executor-1.0.0-SNAPSHOT.jar b/target/ecological-engine-smart-executor-1.0.0-SNAPSHOT.jar
index a54b5be..d1c8e13 100644
Binary files a/target/ecological-engine-smart-executor-1.0.0-SNAPSHOT.jar and b/target/ecological-engine-smart-executor-1.0.0-SNAPSHOT.jar differ
diff --git a/target/maven-archiver/pom.properties b/target/maven-archiver/pom.properties
index 4d03d39..80368a5 100644
--- a/target/maven-archiver/pom.properties
+++ b/target/maven-archiver/pom.properties
@@ -1,5 +1,5 @@
#Generated by Maven
-#Thu Apr 16 18:26:30 CEST 2015
-version=1.0.0-SNAPSHOT
+#Wed Jun 24 16:37:14 CEST 2015
+version=1.1.0-SNAPSHOT
groupId=org.gcube.dataanalysis
artifactId=ecological-engine-smart-executor
diff --git a/target/profile.xml b/target/profile.xml
index 495f836..70a5f7d 100644
--- a/target/profile.xml
+++ b/target/profile.xml
@@ -10,14 +10,14 @@
ecological-engine-smart-executor
- 1.0.0-SNAPSHOT
+ 1.1.0-SNAPSHOT
org.gcube.dataanalysis
ecological-engine-smart-executor
- 1.0.0-SNAPSHOT
+ 1.1.0-SNAPSHOT
- ecological-engine-smart-executor-1.0.0-SNAPSHOT.jar
+ ecological-engine-smart-executor-1.1.0-SNAPSHOT.jar