Aligned Trunk to Branch

git-svn-id: https://svn.d4science.research-infrastructures.eu/gcube/trunk/data-analysis/DataMiner@174213 82a268e6-3cf1-43bd-a215-b396298e98cf
This commit is contained in:
Giancarlo Panichi 2018-11-19 09:03:18 +00:00
parent 60ccac1784
commit 34d131b900
5 changed files with 270 additions and 210 deletions

View File

@ -388,7 +388,9 @@ public class AbstractEcologicalEngineMapper extends AbstractAnnotatedAlgorithm i
time("Ecological Engine Algorithm selection");
// adding service parameters to the configuration
LOGGER.info("5 - Adding Service parameters to the configuration");
inputsManager.addInputServiceParameters(getInputParameters(algorithm), infrastructureDialoguer);
List<StatisticalType> dataminerInputParameters = getInputParameters(algorithm);
LOGGER.debug("Dataminer Algo Default InputParameters: "+dataminerInputParameters);
inputsManager.addInputServiceParameters(dataminerInputParameters, infrastructureDialoguer);
time("Service parameters added to the algorithm");
// merging wps with ecological engine parameters - modifies the
// config
@ -396,7 +398,7 @@ public class AbstractEcologicalEngineMapper extends AbstractAnnotatedAlgorithm i
LOGGER.debug("Operator class is " + this.getClass().getCanonicalName());
// build computation Data
currentComputation = new ComputationData(config.getTaskID(), config.getAgent(), "", "", startTime, "-", "0", config.getTaskID(), configManager.getUsername(), config.getGcubeScope(), this.getClass().getCanonicalName());
inputsManager.mergeWpsAndEcologicalInputs(supportDatabaseInfo);
inputsManager.mergeWpsAndEcologicalInputs(supportDatabaseInfo, dataminerInputParameters);
generatedInputTables = inputsManager.getGeneratedTables();
generatedFiles = inputsManager.getGeneratedInputFiles();
time("Setup and download of input parameters with tables creation");
@ -447,13 +449,13 @@ public class AbstractEcologicalEngineMapper extends AbstractAnnotatedAlgorithm i
time("Output preparation for WPS document (no storage manager)");
outputmanager.shutdown();
// delete all temporary tables
LOGGER.info("12 - Deleting possible generated temporary tables");
LOGGER.debug("Final Computation Output: " + outputs);
LOGGER.debug("12 - Final Computation Output");
LOGGER.debug("Outputs: "+ outputs);
endTime = new java.text.SimpleDateFormat("dd/MM/yyyy HH:mm:ss").format(System.currentTimeMillis());
if (!isCancelled()) {
LOGGER.debug("Save Computation Data");
saveComputationOnWS(inputsManager.getProvenanceData(), outputmanager.getProvenanceData(), agent, generatedFiles);
} else {
LOGGER.debug("Computation interrupted - no update");
@ -542,8 +544,13 @@ public class AbstractEcologicalEngineMapper extends AbstractAnnotatedAlgorithm i
}
private void saveComputationOnWS(List<StoredData> inputData, List<StoredData> outputData, ComputationalAgent agent, List<File> generatedFiles) {
LOGGER.debug("Save Computation On WS");
LOGGER.debug("InputData: "+inputData);
LOGGER.debug("OutputData: "+outputData);
LOGGER.debug("Agent: "+agent);
LOGGER.debug("Generated files: "+generatedFiles);
LOGGER.debug("Provenance manager started for operator " + this.getClass().getCanonicalName());
ComputationData computation = new ComputationData(config.getTaskID(), config.getAgent(), agent.getDescription(), agent.getInfrastructure().name(), startTime, endTime, "100", config.getTaskID(), config.getParam(ConfigurationManager.serviceUserNameParameterVariable), config.getGcubeScope(), this.getClass().getCanonicalName());
// post on WS
DataspaceManager manager = new DataspaceManager(config, computation, inputData, outputData, generatedFiles);

View File

@ -41,7 +41,7 @@ import org.slf4j.LoggerFactory;
public class InputsManager {
private static final Logger LOGGER = LoggerFactory.getLogger(InputsManager.class);
private static final Logger LOGGER = LoggerFactory.getLogger(InputsManager.class);
LinkedHashMap<String, Object> inputs;
List<String> generatedTables;
@ -51,11 +51,11 @@ public class InputsManager {
String computationId;
List<StoredData> provenanceData = new ArrayList<StoredData>();
public List<StoredData> getProvenanceData() {
return provenanceData;
}
public static String inputsSeparator = "\\|";
public AlgorithmConfiguration getConfig() {
@ -92,30 +92,35 @@ public class InputsManager {
config.setParam("DatabaseURL", supportDatabaseInfo.url);
}
public void mergeWpsAndEcologicalInputs(DatabaseInfo supportDatabaseInfo) throws Exception {
public void mergeWpsAndEcologicalInputs(DatabaseInfo supportDatabaseInfo,
List<StatisticalType> dataminerInputParameters) throws Exception {
LOGGER.debug("Merge WPS And Ecological Inputs");
// browse input parameters from WPS
for (String inputName : inputs.keySet()) {
Object input = inputs.get(inputName);
LOGGER.debug("Managing Input Parameter with Name "+ inputName);
LOGGER.debug("Managing Input Parameter with Name " + inputName);
// case of simple input
if (input instanceof String) {
LOGGER.debug("Simple Input: "+ input);
LOGGER.debug("Simple Input: " + input);
// manage lists
String inputAlgoOrig = ((String) input).trim();
String inputAlgo = ((String) input).trim().replaceAll(inputsSeparator, AlgorithmConfiguration.listSeparator);
String inputAlgo = ((String) input).trim().replaceAll(inputsSeparator,
AlgorithmConfiguration.listSeparator);
LOGGER.debug("Simple Input Transformed: " + inputAlgo);
config.setParam(inputName, inputAlgo);
saveInputData(inputName,inputName,inputAlgoOrig);
saveInputData(inputName, inputName, inputAlgoOrig);
}
// case of Complex Input
else if (input instanceof GenericFileData) {
LOGGER.debug("Complex Input: " + input);
LOGGER.debug("Complex Input");
// retrieve payload
GenericFileData files = ((GenericFileData) input);
LOGGER.debug("GenericFileData: [fileExtension=" + files.getFileExtension() + ", mimeType="
+ files.getMimeType() + "]");
List<File> localfiles = getLocalFiles(files,inputName);
List<File> localfiles = getLocalFiles(files, inputName, dataminerInputParameters);
String inputtables = "";
int nfiles = localfiles.size();
StringBuffer sb = new StringBuffer();
@ -128,10 +133,11 @@ public class InputsManager {
if (inputTableTemplates.get(inputName) != null) {
LOGGER.debug("Creating table: " + tableName);
createTable(tableName, tableFile, config, supportDatabaseInfo, inputTableTemplates.get(inputName));
createTable(tableName, tableFile, config, supportDatabaseInfo,
inputTableTemplates.get(inputName));
generatedTables.add(tableName);
}
//case of non-table input file, e.g. FFANN
// case of non-table input file, e.g. FFANN
else
tableName = tableFile.getAbsolutePath();
if (i > 0)
@ -140,13 +146,13 @@ public class InputsManager {
inputtables += tableName;
saveInputData(tableFile.getName(), inputName, tableFile.getAbsolutePath());
if (i>0)
if (i > 0)
sb.append("|");
sb.append(tableFile.getName());
}
sb.append("|");
if (nfiles>0)
if (nfiles > 0)
saveInputData(inputName, inputName, sb.toString());
// the only possible complex input is a table - check the WPS
@ -157,15 +163,15 @@ public class InputsManager {
}
public boolean isXML(String fileContent){
public boolean isXML(String fileContent) {
if (fileContent.startsWith("&lt;"))
return true;
else
else
return false;
}
public String readOneLine(String filename){
public String readOneLine(String filename) {
try {
BufferedReader in = new BufferedReader(new FileReader(new File(filename)));
@ -173,7 +179,7 @@ public class InputsManager {
String vud = "";
while ((line = in.readLine()) != null) {
if (line.trim().length()>0){
if (line.trim().length() > 0) {
vud = line.trim();
break;
}
@ -183,47 +189,54 @@ public class InputsManager {
} catch (Exception e) {
e.printStackTrace();
return null;
}
}
}
public String inputNameFromHttpHeader(String url) throws Exception{
public String inputNameFromHttpHeader(String url) throws Exception {
LOGGER.debug("Search filename in http header from: " + url);
URL obj = new URL(url);
URLConnection conn = obj.openConnection();
String filename=null;
String filename = null;
// get all headers
Map<String, List<String>> map = conn.getHeaderFields();
LOGGER.debug("Getting file name from http header");
for (Map.Entry<String, List<String>> entry : map.entrySet()) {
String value = entry.getValue().toString();
if (value.toLowerCase().contains("filename=")){
LOGGER.debug("Searching in http header: found file name in header value {}",value);
filename=value.substring(value.indexOf("=")+1);
filename=filename.replace("\"", "").replace("]", "");
LOGGER.debug("Searching in http header: retrieved file name {}",filename);
LOGGER.debug("Header value: " + value);
if (value.toLowerCase().contains("filename")) {
LOGGER.debug("Searching in http header: found file name in header value {}", value);
filename = value.substring(value.indexOf("=") + 1);
filename = filename.replace("\"", "").replace("]", "");
LOGGER.debug("Searching in http header: retrieved file name {}", filename);
break;
}
}
LOGGER.debug("Filename retrieved from http header: " + filename);
return filename;
}
public List<File> getLocalFiles(GenericFileData files,String inputName) throws Exception {
public List<File> getLocalFiles(GenericFileData files, String inputName,
List<StatisticalType> dataminerInputParameters) throws Exception {
LOGGER.debug("GetLocalFiles: [files: " + files + ", inputName: " + inputName + "]");
// download input
List<File> filesList = new ArrayList<File>();
File f = files.getBaseFile(false);
LOGGER.debug("Retrieving file content as a URL link: " + f.getAbsolutePath());
//TODO DO NOT READ FILE INTO MEMORY
LOGGER.debug("Retrieving local files: " + f.getAbsolutePath());
// TODO DO NOT READ FILE INTO MEMORY
// read file content
String fileLink = readOneLine(f.getAbsolutePath());
LOGGER.debug("File link: {} ...",fileLink.substring(0,Math.min(fileLink.length(),10)));
LOGGER.debug("Check File is link: {} ...", fileLink.substring(0, Math.min(fileLink.length(), 10)));
String fileName = "";
// case of a http link
if (fileLink!=null && (fileLink.toLowerCase().startsWith("http:") || fileLink.toLowerCase().startsWith("https:"))) {
if (fileLink != null
&& (fileLink.toLowerCase().startsWith("http:") || fileLink.toLowerCase().startsWith("https:"))) {
// manage the case of multiple files
LOGGER.debug("Complex Input payload is link");
LOGGER.debug("Retrieving files from url: " + fileLink);
String[] remotefiles = fileLink.split(inputsSeparator);
for (String subfilelink : remotefiles) {
subfilelink = subfilelink.trim();
LOGGER.debug("Managing link: {}",subfilelink);
LOGGER.debug("Managing link: {}", subfilelink);
if (subfilelink.length() == 0)
continue;
InputStream is = null;
@ -234,15 +247,18 @@ public class InputsManager {
// retrieve payload: for test purpose only
String fileNameTemp = inputNameFromHttpHeader(subfilelink);
LOGGER.debug("the fileNameTemp is {}",fileNameTemp);
if (fileNameTemp==null)
fileName = String.format("%s_(%s).%s",inputName, computationId, FilenameUtils.getExtension(inputName));
else
fileName = String.format("%s_(%s).%s",fileNameTemp, computationId, FilenameUtils.getExtension(fileNameTemp));
LOGGER.debug("the name of the generated file is {}",fileName);
LOGGER.debug("the fileNameTemp is {}", fileNameTemp);
if (fileNameTemp != null && !fileNameTemp.isEmpty()) {
fileName = String.format("%s_(%s).%s", inputName, computationId,
FilenameUtils.getExtension(fileNameTemp));
} else {
fileName = String.format("%s_(%s).%s", inputName, computationId,
FilenameUtils.getExtension(inputName));
}
LOGGER.debug("the name of the generated file is {}", fileName);
File of = new File(config.getPersistencePath(), fileName);
FileOutputStream fos = new FileOutputStream(of);
IOUtils.copy(is, fos);
@ -254,35 +270,54 @@ public class InputsManager {
LOGGER.debug("Created local file: {}", of.getAbsolutePath());
}
} else {
LOGGER.debug("Complex Input payload is the filelink");
LOGGER.debug("Complex Input payload is file");
fileName = f.getName();
LOGGER.debug("Retriving local input from file: {}", fileName);
String fileExt=null;
if (isXML(fileLink))
{
LOGGER.debug("Retrieving local input from file: {}", fileName);
String fileExt = null;
if (isXML(fileLink)) {
String xmlFile = f.getAbsolutePath();
String csvFile = xmlFile+".csv";
LOGGER.debug("Transforming XML file into a csv: {} ",csvFile);
String csvFile = xmlFile + ".csv";
LOGGER.debug("Transforming XML file into a csv: {} ", csvFile);
GML2CSV.parseGML(xmlFile, csvFile);
LOGGER.debug("GML Parsed: {} [..]",readOneLine(csvFile));
LOGGER.debug("GML Parsed: {} [..]", readOneLine(csvFile));
f = new File(csvFile);
fileExt = "csv";
}
else{
LOGGER.debug("The file is a csv: {}",f.getAbsolutePath());
} else {
LOGGER.debug("The file is a csv: {}", f.getAbsolutePath());
fileExt = FilenameUtils.getExtension(fileName);
}
String absFile = new File(f.getParent(), String.format("%s_(%s).%s",inputName,computationId, fileExt)).getAbsolutePath();
LOGGER.debug("Renaming to: "+absFile);
LOGGER.debug("Retrieve default extension");
String fileDefaultValue = null;
for (StatisticalType defaultInputParameter : dataminerInputParameters) {
if (defaultInputParameter.getName().compareTo(inputName) == 0) {
fileDefaultValue = defaultInputParameter.getDefaultValue();
break;
}
}
LOGGER.debug("Parameter default value retrieved: " + fileDefaultValue);
if (fileDefaultValue != null && !fileDefaultValue.isEmpty()) {
int lastPointIndex = fileDefaultValue.lastIndexOf(".");
if (lastPointIndex > -1 && lastPointIndex < (fileDefaultValue.length() - 1)) {
fileExt = fileDefaultValue.substring(lastPointIndex + 1);
LOGGER.debug("Default Extension retrieved: " + fileExt);
}
}
LOGGER.debug("Use extension: " + fileExt);
String absFile = new File(f.getParent(), String.format("%s_(%s).%s", inputName, computationId, fileExt))
.getAbsolutePath();
LOGGER.debug("Renaming to: " + absFile);
System.gc();
boolean renamed = f.renameTo(new File(absFile));
if (renamed)
f = new File(absFile);
LOGGER.debug("The file has been renamed as : {} - {}",f.getAbsolutePath(),renamed);
LOGGER.debug("The file has been renamed as : {} - {}", f.getAbsolutePath(), renamed);
filesList.add(f);
}
@ -290,7 +325,8 @@ public class InputsManager {
return filesList;
}
public void createTable(String tableName, File tableFile, AlgorithmConfiguration config, DatabaseInfo supportDatabaseInfo, String inputTableTemplate) throws Exception {
public void createTable(String tableName, File tableFile, AlgorithmConfiguration config,
DatabaseInfo supportDatabaseInfo, String inputTableTemplate) throws Exception {
// creating table
LOGGER.debug("Complex Input size after download: " + tableFile.length());
@ -316,11 +352,13 @@ public class InputsManager {
TableTemplatesMapper mapper = new TableTemplatesMapper();
String createstatement = mapper.generateCreateStatement(tableName, templatename, tableStructure);
LOGGER.debug("Creating table: " + tableName);
DatabaseUtils.createBigTable(true, tableName, supportDatabaseInfo.driver, supportDatabaseInfo.username, supportDatabaseInfo.password, supportDatabaseInfo.url, createstatement, dbConnection);
DatabaseUtils.createRemoteTableFromFile(tableFile.getAbsolutePath(), tableName, ",", true, supportDatabaseInfo.username, supportDatabaseInfo.password, supportDatabaseInfo.url);
DatabaseUtils.createBigTable(true, tableName, supportDatabaseInfo.driver, supportDatabaseInfo.username,
supportDatabaseInfo.password, supportDatabaseInfo.url, createstatement, dbConnection);
DatabaseUtils.createRemoteTableFromFile(tableFile.getAbsolutePath(), tableName, ",", true,
supportDatabaseInfo.username, supportDatabaseInfo.password, supportDatabaseInfo.url);
} catch (Exception e) {
LOGGER.error("Error in database transaction " ,e);
LOGGER.error("Error in database transaction ", e);
throw new Exception("Error in creating the table for " + tableName + ": " + e.getLocalizedMessage());
} finally {
DatabaseUtils.closeDBConnection(dbConnection);
@ -376,7 +414,8 @@ public class InputsManager {
return structure.toString();
}
public void addInputServiceParameters(List<StatisticalType> agentInputs, InfrastructureDialoguer infrastructureDialoguer) throws Exception {
public void addInputServiceParameters(List<StatisticalType> agentInputs,
InfrastructureDialoguer infrastructureDialoguer) throws Exception {
// check and fullfil additional parameters
DatabaseInfo dbinfo = null;
@ -384,10 +423,10 @@ public class InputsManager {
for (StatisticalType type : agentInputs) {
if (type instanceof PrimitiveType) {
if (((PrimitiveType) type).getType()==PrimitiveTypes.CONSTANT){
String constant = ""+((PrimitiveType) type).getDefaultValue();
if (((PrimitiveType) type).getType() == PrimitiveTypes.CONSTANT) {
String constant = "" + ((PrimitiveType) type).getDefaultValue();
config.setParam(type.getName(), constant);
LOGGER.debug("Constant parameter: "+constant);
LOGGER.debug("Constant parameter: " + constant);
}
}
if (type instanceof ServiceType) {
@ -400,10 +439,10 @@ public class InputsManager {
String value = "";
if (sp == ServiceParameters.RANDOMSTRING)
value = "stat" + UUID.randomUUID().toString().replace("-", "");
else if (sp == ServiceParameters.USERNAME){
else if (sp == ServiceParameters.USERNAME) {
value = (String) inputs.get(ConfigurationManager.usernameParameter);
LOGGER.debug("User name used by the client: "+value);
LOGGER.debug("User name used by the client: " + value);
}
LOGGER.debug("ServiceType Adding: (" + name + "," + value + ")");
config.setParam(name, value);
@ -444,8 +483,7 @@ public class InputsManager {
}
private void saveInputData(String name, String description, String payload){
private void saveInputData(String name, String description, String payload) {
String id = name;
DataProvenance provenance = DataProvenance.IMPORTED;
String creationDate = new java.text.SimpleDateFormat("dd/MM/yyyy HH:mm:ss").format(System.currentTimeMillis());
@ -453,18 +491,17 @@ public class InputsManager {
String type = "text/plain";
if (payload != null && (new File (payload).exists())) {
if (payload != null && (new File(payload).exists())) {
if (payload.toLowerCase().endsWith(".csv") || payload.toLowerCase().endsWith(".txt")) {
type = "text/csv";
} else
type = "application/d4science";
}
StoredData data = new StoredData(name, description, id, provenance, creationDate, operator, computationId, type,payload,config.getGcubeScope());
StoredData data = new StoredData(name, description, id, provenance, creationDate, operator, computationId, type,
payload, config.getGcubeScope());
provenanceData.add(data);
}
}

View File

@ -140,7 +140,7 @@ public class OutputsManager {
outputs.put(pkey, "");
}
}
LOGGER.debug("OutputsManager outputs "+outputs);
return outputs;
}
@ -256,10 +256,7 @@ public class OutputsManager {
String mimetype = info.getMimetype();
String abstractStr = info.getAbstractStr();
LOGGER.debug("IOWPS Information: " + "name "+info.getName()+","
+"abstr "+info.getAbstractStr()+","
+"content "+info.getContent()+","
+"def "+info.getDefaultVal()+",");
LOGGER.debug("IOWPS Information [name="+info.getName()+", abstr="+info.getAbstractStr()+", content="+info.getContent()+", def="+info.getDefaultVal()+"]");
if ((abstractStr==null || abstractStr.trim().length()==0) && (payload!= null && payload.trim().length()>0))
abstractStr = info.getName();

View File

@ -62,12 +62,15 @@ public class DataspaceManager implements Runnable {
public static String operator = "operator_name";
public static String payload = "payload";
public DataspaceManager(AlgorithmConfiguration config, ComputationData computation, List<StoredData> inputData, List<StoredData> outputData, List<File> generatedFiles) {
public DataspaceManager(AlgorithmConfiguration config, ComputationData computation, List<StoredData> inputData,
List<StoredData> outputData, List<File> generatedFiles) {
this.config = config;
this.computation = computation;
this.inputData = inputData;
this.outputData = outputData;
this.generatedFiles = generatedFiles;
LOGGER.debug("DataspaceManager [config=" + config + ", computation=" + computation + ", inputData=" + inputData
+ ", outputData=" + outputData + ", generatedFiles=" + generatedFiles + "]");
}
public void run() {
@ -92,20 +95,23 @@ public class DataspaceManager implements Runnable {
StorageHubClient shc = new StorageHubClient();
FolderContainer root =shc.getWSRoot();
FolderContainer root = shc.getWSRoot();
List<ItemContainer<? extends Item>> dataminerItems = root.findByName(dataminerFolder).getContainers();
FolderContainer dataminerFolderContainer;
FolderContainer dataminerFolderContainer;
// manage folders: create the folders network
if (dataminerItems.isEmpty()) {
LOGGER.debug("Dataspace->Creating DataMiner main folder");
dataminerFolderContainer = root.newFolder(dataminerFolder, "A folder collecting DataMiner experiments data and computation information");
//((WorkspaceFolder) root.find(dataminerFolder)).setSystemFolder(true);
} else if (dataminerItems.size()>1) throw new Exception("found more than one dataminer folder (impossible!!!)");
else dataminerFolderContainer = (FolderContainer) dataminerItems.get(0);
dataminerFolderContainer = root.newFolder(dataminerFolder,
"A folder collecting DataMiner experiments data and computation information");
// ((WorkspaceFolder)
// root.find(dataminerFolder)).setSystemFolder(true);
} else if (dataminerItems.size() > 1)
throw new Exception("found more than one dataminer folder (impossible!!!)");
else
dataminerFolderContainer = (FolderContainer) dataminerItems.get(0);
if (dataminerFolderContainer.findByName(importedDataFolder).getContainers().isEmpty()) {
LOGGER.debug("Dataspace->Creating DataMiner imported data folder");
@ -118,37 +124,41 @@ public class DataspaceManager implements Runnable {
}
if (dataminerFolderContainer.findByName(computationsFolder).getContainers().isEmpty()) {
LOGGER.debug("Dataspace->Creating DataMiner computations folder");
dataminerFolderContainer.newFolder(computationsFolder, "A folder collecting DataMiner computations information");
dataminerFolderContainer.newFolder(computationsFolder,
"A folder collecting DataMiner computations information");
}
return dataminerFolderContainer;
}
public String uploadData(StoredData data, FolderContainer destinationFolder) throws Exception {
public String uploadData(StoredData data, FolderContainer destinationFolder) throws Exception {
return uploadData(data, destinationFolder, true);
}
public String uploadData(StoredData data, FolderContainer destinationFolder, boolean changename) throws Exception {
public String uploadData(StoredData data, FolderContainer destinationFolder, boolean changename) throws Exception {
LOGGER.debug("Dataspace->Analysing " + data);
// String filenameonwsString = WorkspaceUtil.getUniqueName(data.name, wsFolder);
String filenameonwsString = data.name ;
// String filenameonwsString = WorkspaceUtil.getUniqueName(data.name,
// wsFolder);
String filenameonwsString = data.name;
if (changename)
filenameonwsString = String.format("%s_(%s)%s", data.name, data.computationId, getExtension(data.payload, data.type));
filenameonwsString = String.format("%s_(%s)%s", data.name, data.computationId,
getExtension(data.payload));
InputStream in = null;
String url = "";
try {
long size = 0;
if (data.type.equals("text/csv")||data.type.equals("application/d4science")||data.type.equals("image/png")) {
//long size = 0;
if (data.type.equals("text/csv") || data.type.equals("application/d4science")
|| data.type.equals("image/png")) {
if (new File(data.payload).exists() || !data.payload.startsWith("http")) {
LOGGER.debug("Dataspace->Uploading file {}", data.payload);
in = new FileInputStream(new File(data.payload));
size = new File(data.payload).length();
//size = new File(data.payload).length();
} else {
LOGGER.debug("Dataspace->Uploading via URL {}", data.payload);
int tries = 10;
for (int i=0;i<tries;i++){
for (int i = 0; i < tries; i++) {
try {
URL urlc = new URL(data.payload);
url = urlc.toString();
@ -156,21 +166,22 @@ public class DataspaceManager implements Runnable {
urlConnection.setConnectTimeout(10000);
urlConnection.setReadTimeout(10000);
in = new BufferedInputStream(urlConnection.getInputStream());
}catch(Exception ee){
LOGGER.warn("Dataspace->Retrying connection to {} number {} ",data.payload,(i+1),ee);
in =null;
} catch (Exception ee) {
LOGGER.warn("Dataspace->Retrying connection to {} number {} ", data.payload, (i + 1), ee);
in = null;
}
if (in!=null)
if (in != null)
break;
else
Thread.sleep(10000);
}
}
if (in==null)
throw new Exception("Impossible to open stream from "+data.payload);
if (in == null)
throw new Exception("Impossible to open stream from " + data.payload);
// LOGGER.debug("Dataspace->final file name on ws " + data.name+" description "+data.description);
// LOGGER.debug("Dataspace->final file name on ws " +
// data.name+" description "+data.description);
LOGGER.debug("Dataspace->WS OP saving the following file on the WS " + filenameonwsString);
Map<String, Object> properties = new LinkedHashMap<String, Object>();
@ -198,7 +209,7 @@ public class DataspaceManager implements Runnable {
in.close();
} catch (Exception e) {
LOGGER.debug("Dataspace->Error creating file {}", e.getMessage());
//LOGGER.debug(e);
// LOGGER.debug(e);
}
LOGGER.debug("Dataspace->File created {}", filenameonwsString);
} else {
@ -206,8 +217,8 @@ public class DataspaceManager implements Runnable {
url = data.payload;
}
} catch (Throwable e) {
LOGGER.error("Dataspace->Could not retrieve input payload {} ",data.payload,e);
//LOGGER.debug(e);
LOGGER.error("Dataspace->Could not retrieve input payload {} ", data.payload, e);
// LOGGER.debug(e);
url = "payload was not made available for this dataset";
data.payload = url;
}
@ -216,24 +227,25 @@ public class DataspaceManager implements Runnable {
public List<String> uploadInputData(List<StoredData> inputData, FolderContainer dataminerFolder) throws Exception {
LOGGER.debug("Dataspace->uploading input data; Number of data: {}", inputData.size());
FolderContainer destinationFolder = (FolderContainer) dataminerFolder.findByName(importedDataFolder).getContainers().get(0);
FolderContainer destinationFolder = (FolderContainer) dataminerFolder.findByName(importedDataFolder)
.getContainers().get(0);
List<String> urls = new ArrayList<String>();
for (StoredData input : inputData) {
List<ItemContainer<? extends Item>> items = null;
if (input.type.equals("text/csv")||input.type.equals("application/d4science")||input.type.equals("image/png"))
if (input.type.equals("text/csv") || input.type.equals("application/d4science")
|| input.type.equals("image/png"))
items = destinationFolder.findByName(input.name).getContainers();
if (items==null || items.isEmpty()){
String url = uploadData(input, destinationFolder,false);
LOGGER.debug("Dataspace->returning property {}",url);
if (items == null || items.isEmpty()) {
String url = uploadData(input, destinationFolder, false);
LOGGER.debug("Dataspace->returning property {}", url);
urls.add(url);
}
else{
} else {
FileContainer item = (FileContainer) items.get(0);
LOGGER.debug("Dataspace->Input item {} is already available in the input folder",input.name);
LOGGER.debug("Dataspace->Input item {} is already available in the input folder", input.name);
String url = item.getPublicLink().toString();
LOGGER.debug("Dataspace->returning WS url {}",url);
LOGGER.debug("Dataspace->returning WS url {}", url);
urls.add(url);
}
}
@ -242,9 +254,11 @@ public class DataspaceManager implements Runnable {
return urls;
}
public List<String> uploadOutputData(List<StoredData> outputData,FolderContainer dataminerFolder) throws Exception {
public List<String> uploadOutputData(List<StoredData> outputData, FolderContainer dataminerFolder)
throws Exception {
LOGGER.debug("Dataspace->uploading output data; Number of data: " + outputData.size());
FolderContainer destinationFolder = (FolderContainer)dataminerFolder.findByName(computedDataFolder).getContainers().get(0);
FolderContainer destinationFolder = (FolderContainer) dataminerFolder.findByName(computedDataFolder)
.getContainers().get(0);
List<String> urls = new ArrayList<String>();
for (StoredData output : outputData) {
String url = uploadData(output, destinationFolder);
@ -254,21 +268,23 @@ public class DataspaceManager implements Runnable {
return urls;
}
public void uploadComputationData(ComputationData computation, List<StoredData> inputData, List<StoredData> outputData, FolderContainer dataminerFolder) throws Exception {
public void uploadComputationData(ComputationData computation, List<StoredData> inputData,
List<StoredData> outputData, FolderContainer dataminerFolder) throws Exception {
LOGGER.debug("Dataspace->uploading computation data");
FolderContainer computationContainer = (FolderContainer) dataminerFolder.findByName(computationsFolder).getContainers().get(0);
FolderContainer computationContainer = (FolderContainer) dataminerFolder.findByName(computationsFolder)
.getContainers().get(0);
// create a folder in here
LOGGER.debug("Dataspace->Creating computation folder " + computation.id);
String cfoldername = computation.id;
FolderContainer newcomputationFolder = null;
try{
try {
newcomputationFolder = computationContainer.newFolder(cfoldername, computation.operatorDescription);
}catch(java.lang.ClassCastException e){
} catch (java.lang.ClassCastException e) {
LOGGER.debug("Dataspace->concurrency exception - deleting remaining item");
deleteRunningComputationData();
newcomputationFolder = computationContainer.newFolder(cfoldername, computation.operatorDescription);
}
//String itemType = "COMPUTATION";
// String itemType = "COMPUTATION";
// create IO folders
LOGGER.debug("Dataspace->creating IO folders under " + cfoldername);
@ -313,86 +329,88 @@ public class DataspaceManager implements Runnable {
LOGGER.debug("Dataspace->Adding input properties for " + ninput + " inputs");
for (int i = 1; i <= ninput; i++) {
StoredData input = inputData.get(i - 1);
if (input.payload.contains("|")){
String payload = input .payload;
LOGGER.debug("Dataspace->Managing complex input {} : {}",input.name, payload);
//delete the names that are not useful
if (input.payload.contains("|")) {
String payload = input.payload;
LOGGER.debug("Dataspace->Managing complex input {} : {}", input.name, payload);
// delete the names that are not useful
for (StoredData subinput:inputData){
if (input.description.equals(subinput.description)){
payload = payload.replace(subinput.name,subinput.payload);
subinput.name=null;
for (StoredData subinput : inputData) {
if (input.description.equals(subinput.description)) {
payload = payload.replace(subinput.name, subinput.payload);
subinput.name = null;
}
}
input.name = null;
//delete last pipe character
// delete last pipe character
if (payload.endsWith("|"))
payload = payload.substring(0,payload.length()-1);
LOGGER.debug("Dataspace->Complex input after processing "+payload);
payload = payload.substring(0, payload.length() - 1);
LOGGER.debug("Dataspace->Complex input after processing " + payload);
properties.put("input" + i + "_" + input.description, payload);
input.payload=payload;
input.payload = payload;
}
}
for (int i = 1; i <= ninput; i++) {
StoredData input = inputData.get(i - 1);
if (input.name!=null){
properties.put(String.format("input%d_%s",i, input.name), inputurls.get(i - 1));
if (input.name != null) {
properties.put(String.format("input%d_%s", i, input.name), inputurls.get(i - 1));
}
}
LOGGER.debug("Dataspace->Adding output properties for " + noutput + " outputs");
for (int i = 1; i <= noutput; i++) {
properties.put(String.format("output%d_%s",i,outputData.get(i - 1).name), outputurls.get(i - 1));
properties.put(String.format("output%d_%s", i, outputData.get(i - 1).name), outputurls.get(i - 1));
}
LOGGER.debug("Dataspace->Properties of the folder: {} ", properties );
LOGGER.debug("Dataspace->Properties of the folder: {} ", properties);
LOGGER.debug("Dataspace->Saving properties to ProvO XML file {} outputs",noutput);
LOGGER.debug("Dataspace->Saving properties to ProvO XML file {} outputs", noutput);
/*
* XStream xstream = new XStream(); String xmlproperties = xstream.toXML(properties);
* XStream xstream = new XStream(); String xmlproperties =
* xstream.toXML(properties);
*/
try {
String xmlproperties = ProvOGenerator.toProvO(computation, inputData, outputData);
File xmltosave = new File(config.getPersistencePath(), "prov_o_" + UUID.randomUUID());
FileTools.saveString(xmltosave.getAbsolutePath(), xmlproperties, true, "UTF-8");
try(InputStream sis = new FileInputStream(xmltosave)){
newcomputationFolder.uploadFile(sis,computation.id + ".xml", computation.operatorDescription);
try (InputStream sis = new FileInputStream(xmltosave)) {
newcomputationFolder.uploadFile(sis, computation.id + ".xml", computation.operatorDescription);
}
xmltosave.delete();
} catch (Exception e) {
LOGGER.error("Dataspace->Failed creating ProvO XML file ",e);
LOGGER.error("Dataspace->Failed creating ProvO XML file ", e);
}
/*
List<String> scopes = new ArrayList<String>();
scopes.add(config.getGcubeScope());
ws.createGcubeItem(computation.id, computation.operatorDescription, scopes, computation.user, itemType, properties, newcomputationFolder.getId());
* List<String> scopes = new ArrayList<String>();
* scopes.add(config.getGcubeScope());
* ws.createGcubeItem(computation.id, computation.operatorDescription,
* scopes, computation.user, itemType, properties,
* newcomputationFolder.getId());
*/
newcomputationFolder.setMetadata(new Metadata(properties));
LOGGER.debug("Dataspace->finished uploading computation data");
}
public String buildCompositePayload(List<StoredData> inputData,String payload, String inputName){
public String buildCompositePayload(List<StoredData> inputData, String payload, String inputName) {
for (StoredData input:inputData){
if (inputName.equals(input.description)){
payload = payload.replace(input.name,input.payload);
for (StoredData input : inputData) {
if (inputName.equals(input.description)) {
payload = payload.replace(input.name, input.payload);
}
}
return payload;
}
public void writeProvenance(ComputationData computation, List<StoredData> inputData, List<StoredData> outputData) throws Exception {
public void writeProvenance(ComputationData computation, List<StoredData> inputData, List<StoredData> outputData)
throws Exception {
LOGGER.debug("Dataspace->connecting to Workspace");
LOGGER.debug("Dataspace->create folders network");
FolderContainer dataminerFolder = createFoldersNetwork();
@ -412,19 +430,18 @@ public class DataspaceManager implements Runnable {
try {
deleteRunningComputationData();
} catch (Exception e) {
LOGGER.debug("Dataspace->impossible to delete running computation : {} ",e.getMessage());
LOGGER.debug("Dataspace->impossible to delete running computation : {} ", e.getMessage());
}
StorageHubClient shc = new StorageHubClient();
// LOGGER.debug("Dataspace->create folders network");
FolderContainer folderContainer = createFoldersNetwork();
FolderContainer folderContainer = createFoldersNetwork();
FolderContainer computationsContainer = (FolderContainer) folderContainer.findByName(computationsFolder).getContainers().get(0);
// LOGGER.debug("Dataspace->Creating computation item " + computation.id+" with status"+computation.status);
FolderContainer computationsContainer = (FolderContainer) folderContainer.findByName(computationsFolder)
.getContainers().get(0);
// LOGGER.debug("Dataspace->Creating computation item " +
// computation.id+" with status"+computation.status);
String itemType = "COMPUTATION";
// write a computation item for the computation
Map<String, Object> properties = new LinkedHashMap<String, Object>();
properties.put(computation_id, computation.id);
@ -452,23 +469,22 @@ public class DataspaceManager implements Runnable {
computationsContainer.newGcubeItem(gcubeItem);
LOGGER.debug("Dataspace->finished uploading computation data");
}
public String getStatus(String status){
public String getStatus(String status) {
double statusD = 0;
try{
try {
statusD = Double.parseDouble(status);
}catch(Exception e){
} catch (Exception e) {
return status;
}
if (statusD==100)
if (statusD == 100)
return "completed";
else if (statusD==-2)
else if (statusD == -2)
return "error";
else if (statusD==-1)
else if (statusD == -1)
return "cancelled";
else
return status;
@ -479,65 +495,69 @@ public class DataspaceManager implements Runnable {
LOGGER.debug("Dataspace->deleting computation item");
LOGGER.debug("Dataspace->connecting to Workspace");
StorageHubClient shc = new StorageHubClient();
FolderContainer dataminerContainer = (FolderContainer) shc.getWSRoot().findByName(dataminerFolder).getContainers().get(0);
FolderContainer computationContainer = (FolderContainer) dataminerContainer.findByName(computationsFolder).getContainers().get(0);
FolderContainer dataminerContainer = (FolderContainer) shc.getWSRoot().findByName(dataminerFolder)
.getContainers().get(0);
FolderContainer computationContainer = (FolderContainer) dataminerContainer.findByName(computationsFolder)
.getContainers().get(0);
LOGGER.debug("Dataspace->removing computation data");
List<ItemContainer<? extends Item>> wi = computationContainer.findByName(computation.id).getContainers();
if (wi.isEmpty()){
for (ItemContainer<? extends Item> container : wi)
if (wi.isEmpty()) {
for (ItemContainer<? extends Item> container : wi)
container.delete();
}
else
LOGGER.debug("Dataspace->Warning Could not find {} under {}",computation.id, computationContainer.get().getName());
} else
LOGGER.debug("Dataspace->Warning Could not find {} under {}", computation.id,
computationContainer.get().getName());
List<ItemContainer<? extends Item>> fileComputations = computationContainer.findByName(computation.id)
.getContainers();
List<ItemContainer<? extends Item>> fileComputations = computationContainer.findByName(computation.id).getContainers();
if (fileComputations.size()>0)
if (fileComputations.size() > 0)
fileComputations.get(0).delete();
/*TODO: ASK GIANPAOLO
int maxtries = 3;
int i =1;
while (ws.exists(computation.id,computationsFolderWs.getId()) && i<maxtries){
LOGGER.debug("Dataspace->computation data still exist... retrying "+i);
Thread.sleep(1000);
computationsFolderWs.find(computation.id).remove();
i++;
}*/
/*
* TODO: ASK GIANPAOLO int maxtries = 3; int i =1; while
* (ws.exists(computation.id,computationsFolderWs.getId()) &&
* i<maxtries){
* LOGGER.debug("Dataspace->computation data still exist... retrying "+i
* ); Thread.sleep(1000);
* computationsFolderWs.find(computation.id).remove(); i++; }
*/
LOGGER.debug("Dataspace->finished removing computation data ");
}
public static String getExtension(String payload, String type){
String extension = "";
if (type.toLowerCase().equals("text/plain")){}
else if (payload.toLowerCase().startsWith("http")){
// TODO
public static String getExtension(String payload) {
LOGGER.debug("DataSpace->Get Extension from: " + payload);
String extension="";
if (payload.toLowerCase().startsWith("http")) {
try {
URL obj= new URL(payload);
URL obj = new URL(payload);
URLConnection conn = obj.openConnection();
// get all headers
Map<String, List<String>> map = conn.getHeaderFields();
for (Map.Entry<String, List<String>> entry : map.entrySet()) {
String value = entry.getValue().toString();
if (value.toLowerCase().contains("filename=")){
LOGGER.debug("DataSpace->Searching in http header: found "+value);
extension = value.substring(value.lastIndexOf("."),value.lastIndexOf("\""));
LOGGER.debug("Header value: " + value);
if (value.toLowerCase().contains("filename")) {
LOGGER.debug("DataSpace->Searching in http header: found " + value);
extension = value.substring(value.lastIndexOf("."), value.lastIndexOf("\""));
break;
}
}
conn.getInputStream().close();
} catch (Exception e) {
LOGGER.warn("DataSpace->Error in the payload http link ",e);
LOGGER.warn("DataSpace->Error in the payload http link ", e);
}
}
else {
} else {
File paylFile = new File(payload);
if (paylFile.exists()){
if (paylFile.exists()) {
String paylname = paylFile.getName();
extension = paylname.substring(paylname.lastIndexOf("."));
}
}
LOGGER.debug("DataSpace->Extension retrieved: " + extension);
return extension;
}

View File

@ -2,7 +2,6 @@ package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.datasp
import java.io.StringReader;
import java.io.StringWriter;
import java.util.ArrayList;
import java.util.List;
import javax.xml.parsers.DocumentBuilder;