bug fixes on FAOMSY
git-svn-id: https://svn.d4science.research-infrastructures.eu/gcube/trunk/data-analysis/EcologicalEngineSmartExecutor@114220 82a268e6-3cf1-43bd-a215-b396298e98cf
This commit is contained in:
parent
67d403919f
commit
d6d450a708
|
@ -3,6 +3,7 @@ package org.gcube.dataanalysis.executor.nodes.algorithms;
|
|||
import java.io.File;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
|
@ -39,14 +40,20 @@ public class FAOMSY extends ActorNode {
|
|||
|
||||
@Override
|
||||
public String getDescription() {
|
||||
return "An algorithm to estimate the Maximum Sustainable Yield from a catch statistic by FAO.";
|
||||
return "An algorithm to be used by Fisheries managers for stock assessment. " +
|
||||
"Estimates the Maximum Sustainable Yield (MSY) of a stock, based on a catch trend. " +
|
||||
"The algorithm has been developed by the Resource Use and Conservation Division of the FAO Fisheries and Aquaculture Department (contact: Yimin Ye, yimin.ye@fao.org). " +
|
||||
"It is applicable to a CSV file containing metadata and catch statistics for a set of marine species and produces MSY estimates for each species. " +
|
||||
"The CSV must follow a FAO-defined format (e.g. http://goo.gl/g6YtVx). " +
|
||||
"The output is made up of two (optional) files: one for sucessfully processed species and another one for species that could not be processed because data were not sufficient to estimate MSY.";
|
||||
}
|
||||
|
||||
static String stocksFile = "StocksFile";
|
||||
static String processOutput= "ProcessOutput";
|
||||
static String nonProcessedOutput= "NonProcessedOutput";
|
||||
static String scriptName = "CatchMSY_Dec2014.R";
|
||||
|
||||
String processedSpOutputFile="";
|
||||
String nonProcessedSpOutputFile="";
|
||||
@Override
|
||||
public List<StatisticalType> getInputParameters() {
|
||||
|
||||
|
@ -57,20 +64,26 @@ public class FAOMSY extends ActorNode {
|
|||
|
||||
@Override
|
||||
public StatisticalType getOutput() {
|
||||
File outfile = new File(config.getPersistencePath(),config.getParam(processOutput));
|
||||
File outfile2 = new File(config.getPersistencePath(),config.getParam(nonProcessedOutput));
|
||||
PrimitiveTypesList list = new PrimitiveTypesList(File.class.getName(), PrimitiveTypes.FILE, "OutputFiles", "Textual output files - processed and non-processed species",false);
|
||||
File outfile = new File(processedSpOutputFile);
|
||||
File outfile2 = new File(nonProcessedSpOutputFile);
|
||||
AnalysisLogger.getLogger().debug("FAOMSY Output 1: "+outfile.getAbsolutePath()+" : "+outfile.exists());
|
||||
AnalysisLogger.getLogger().debug("FAOMSY Output 2: "+outfile2.getAbsolutePath()+" : "+outfile2.exists());
|
||||
|
||||
LinkedHashMap<String, StatisticalType> outputmap = new LinkedHashMap<String, StatisticalType>();
|
||||
|
||||
if (outfile.exists()){
|
||||
PrimitiveType o = new PrimitiveType(File.class.getName(), outfile, PrimitiveTypes.FILE, "ProcessedSpecies", "Output file");
|
||||
list.add(o);
|
||||
PrimitiveType o = new PrimitiveType(File.class.getName(), outfile, PrimitiveTypes.FILE, "ProcessedSpecies", "Output file with processed species");
|
||||
outputmap.put("File containing Processed Species", o);
|
||||
}
|
||||
if (outfile2.exists()){
|
||||
PrimitiveType o2 = new PrimitiveType(File.class.getName(), outfile2, PrimitiveTypes.FILE, "NonProcessedSpecies", "Output file");
|
||||
list.add(o2);
|
||||
PrimitiveType o2 = new PrimitiveType(File.class.getName(), outfile2, PrimitiveTypes.FILE, "NonProcessedSpecies", "Output file with non processed species");
|
||||
outputmap.put("File containing Non Processed Species", o2);
|
||||
}
|
||||
|
||||
PrimitiveType output = new PrimitiveType(HashMap.class.getName(), outputmap, PrimitiveTypes.MAP, "ResultsMap", "Results Map");
|
||||
AnalysisLogger.getLogger().debug("FAOMSY Output Managed");
|
||||
|
||||
return list;
|
||||
return output;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -152,7 +165,13 @@ public class FAOMSY extends ActorNode {
|
|||
config.setParam(processOutput, "FAOMSY_"+"output_"+uuid);
|
||||
config.setParam(nonProcessedOutput, "FAOMSY_nonprocessed_"+"output_"+uuid);
|
||||
File tempfile = new File(config.getPersistencePath(),"FAOMSY_input_"+(UUID.randomUUID()+".csv").replace("-", ""));
|
||||
try{
|
||||
if (config.getParam(stocksFile).toLowerCase().startsWith("https:"))
|
||||
throw new Exception("Error in FAOMSY: https link not supported!");
|
||||
StorageUtils.downloadInputFile(config.getParam(stocksFile), tempfile.getAbsolutePath());
|
||||
}catch(Exception e){
|
||||
throw new Exception("Error in FAOMSY: error in accessing file (alert: https not supported)");
|
||||
}
|
||||
nstocks = StorageUtils.calcFileRows(tempfile, true);
|
||||
AnalysisLogger.getLogger().info("FAOMSY Found "+nstocks+" stocks!");
|
||||
if (nstocks==0)
|
||||
|
@ -212,6 +231,8 @@ public class FAOMSY extends ActorNode {
|
|||
try {
|
||||
String mainOutputfilename = config.getParam(processOutput);
|
||||
String optionalOutputfilename = config.getParam(nonProcessedOutput);
|
||||
processedSpOutputFile = new File(config.getPersistencePath(),mainOutputfilename).getAbsolutePath();
|
||||
nonProcessedSpOutputFile = new File(config.getPersistencePath(),optionalOutputfilename).getAbsolutePath();
|
||||
assembleFiles(mainOutputfilename);
|
||||
assembleFiles(optionalOutputfilename);
|
||||
AnalysisLogger.getLogger().debug("FAOMSY - Postprocess complete");
|
||||
|
|
|
@ -131,9 +131,9 @@ public class StorageUtils {
|
|||
try {
|
||||
|
||||
int nfiles = filenames.size();
|
||||
BufferedWriter bw = new BufferedWriter(new FileWriter(outputfile));
|
||||
BufferedWriter bw = new BufferedWriter(new FileWriter(new File(localFolder,outputfile)));
|
||||
for (int i=0;i<nfiles;i++){
|
||||
BufferedReader br = new BufferedReader(new FileReader(filenames.get(i)));
|
||||
BufferedReader br = new BufferedReader(new FileReader(new File(localFolder,filenames.get(i))));
|
||||
String header = null;
|
||||
if (hasheader && i==0){
|
||||
header = br.readLine();
|
||||
|
|
Loading…
Reference in New Issue