git-svn-id: https://svn.d4science.research-infrastructures.eu/gcube/trunk/data-analysis/EcologicalEngineSmartExecutor@149027 82a268e6-3cf1-43bd-a215-b396298e98cf
This commit is contained in:
parent
8a05e571d4
commit
6d617d97d6
|
@ -17,12 +17,12 @@
|
|||
<attribute name="maven.pomderived" value="true"/>
|
||||
</attributes>
|
||||
</classpathentry>
|
||||
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.7">
|
||||
<classpathentry kind="con" path="org.eclipse.m2e.MAVEN2_CLASSPATH_CONTAINER">
|
||||
<attributes>
|
||||
<attribute name="maven.pomderived" value="true"/>
|
||||
</attributes>
|
||||
</classpathentry>
|
||||
<classpathentry kind="con" path="org.eclipse.m2e.MAVEN2_CLASSPATH_CONTAINER">
|
||||
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.7">
|
||||
<attributes>
|
||||
<attribute name="maven.pomderived" value="true"/>
|
||||
</attributes>
|
||||
|
|
20
pom.xml
20
pom.xml
|
@ -54,7 +54,7 @@
|
|||
<dependency>
|
||||
<groupId>org.gcube.dataanalysis</groupId>
|
||||
<artifactId>ecological-engine</artifactId>
|
||||
<version>[1.8.0-SNAPSHOT,2.0.0-SNAPSHOT)</version>
|
||||
<version>[1.11.0-SNAPSHOT,2.0.0-SNAPSHOT)</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.gcube.contentmanagement</groupId>
|
||||
|
@ -75,19 +75,15 @@
|
|||
<groupId>org.gcube.core</groupId>
|
||||
<artifactId>common-scope</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>junit</groupId>
|
||||
<artifactId>junit</artifactId>
|
||||
<version>4.11</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
<build>
|
||||
<plugins>
|
||||
|
||||
|
||||
<plugin>
|
||||
<artifactId>maven-compiler-plugin</artifactId>
|
||||
<version>3.1</version>
|
||||
<configuration>
|
||||
<source>1.7</source>
|
||||
<target>1.7</target>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-surefire-plugin</artifactId>
|
||||
|
@ -97,8 +93,6 @@
|
|||
</configuration>
|
||||
</plugin>
|
||||
|
||||
|
||||
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-assembly-plugin</artifactId>
|
||||
|
|
|
@ -11,7 +11,7 @@ import org.gcube.dataanalysis.ecoengine.configuration.INFRASTRUCTURE;
|
|||
import org.gcube.dataanalysis.ecoengine.connectors.livemonitor.ResourceLoad;
|
||||
import org.gcube.dataanalysis.ecoengine.connectors.livemonitor.Resources;
|
||||
import org.gcube.dataanalysis.ecoengine.utils.Operations;
|
||||
import org.slf4j.Logger;
|
||||
import org.apache.log4j.Logger;
|
||||
|
||||
public class DistributedProcessingAgentWPS {
|
||||
|
||||
|
|
|
@ -17,7 +17,7 @@ import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
|
|||
import org.gcube.contentmanager.storageclient.model.protocol.smp.Handler;
|
||||
import org.gcube.dataanalysis.ecoengine.interfaces.ActorNode;
|
||||
import org.gcube.dataanalysis.ecoengine.interfaces.StandardLocalInfraAlgorithm;
|
||||
import org.slf4j.Logger;
|
||||
import org.apache.log4j.Logger;
|
||||
|
||||
public class GenericWorker extends StandardLocalInfraAlgorithm{
|
||||
|
||||
|
|
|
@ -171,7 +171,7 @@ public class WPSJobManager {
|
|||
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
AnalysisLogger.getLogger().error("WPSJobManager->Task Number {} - Process exception ", taskNumber,e);
|
||||
AnalysisLogger.getLogger().error("WPSJobManager->Task Number "+taskNumber+" - Process exception ", e);
|
||||
exitstatus = GenericWorker.TASK_FAILURE;
|
||||
|
||||
}finally{
|
||||
|
|
|
@ -8,7 +8,6 @@ import java.util.UUID;
|
|||
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.gcube.contentmanagement.graphtools.utils.MathFunctions;
|
||||
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.ServiceType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
|
||||
|
@ -17,6 +16,8 @@ import org.gcube.dataanalysis.ecoengine.interfaces.StandardLocalInfraAlgorithm;
|
|||
import org.gcube.dataanalysis.ecoengine.utils.ZipTools;
|
||||
import org.gcube.dataanalysis.executor.util.DataTransferer;
|
||||
import org.gcube.dataanalysis.executor.util.InfraRetrieval;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
|
||||
public class WebApplicationPublisher extends StandardLocalInfraAlgorithm{
|
||||
|
@ -25,6 +26,8 @@ public class WebApplicationPublisher extends StandardLocalInfraAlgorithm{
|
|||
private String transferServiceAddress = "";
|
||||
private int transferServicePort = 0;
|
||||
|
||||
private Logger LOGGER = LoggerFactory.getLogger(WebApplicationPublisher.class);
|
||||
|
||||
@Override
|
||||
public String getDescription() {
|
||||
return "This algorithm publishes a zip file containing a Web site, based on html and javascript in the e-Infrastructure. It generates a public URL to the application that can be shared.";
|
||||
|
@ -46,10 +49,10 @@ public class WebApplicationPublisher extends StandardLocalInfraAlgorithm{
|
|||
String fileAbsolutePath = config.getParam(FileParam);
|
||||
// String mainPage = config.getParam(MainPageParam);
|
||||
|
||||
AnalysisLogger.getLogger().debug("scope: "+scope);
|
||||
AnalysisLogger.getLogger().debug("username: "+username);
|
||||
AnalysisLogger.getLogger().debug("fileAbsolutePath: "+fileAbsolutePath);
|
||||
// AnalysisLogger.getLogger().debug("layerTitle: "+mainPage);
|
||||
LOGGER.debug("scope: {}",scope);
|
||||
LOGGER.debug("username: {}",username);
|
||||
LOGGER.debug("fileAbsolutePath: {}",fileAbsolutePath);
|
||||
// LOGGER.debug("layerTitle: "+mainPage);
|
||||
|
||||
if (scope==null || username==null)
|
||||
throw new Exception ("Service parameters are not set - please contact the Administrators");
|
||||
|
@ -62,14 +65,14 @@ public class WebApplicationPublisher extends StandardLocalInfraAlgorithm{
|
|||
String fileName = f.getName();
|
||||
|
||||
//unzip the file
|
||||
AnalysisLogger.getLogger().debug("Package is in file "+fileName);
|
||||
LOGGER.debug("Package is in file "+fileName);
|
||||
|
||||
boolean mkdir = folder.mkdir();
|
||||
AnalysisLogger.getLogger().debug("Sandbox " + folder.getAbsolutePath() + " generated: " + mkdir);
|
||||
AnalysisLogger.getLogger().debug("Unzipping package into " + folder.getAbsolutePath());
|
||||
LOGGER.debug("Sandbox {} generated: {} ",folder.getAbsolutePath(), mkdir);
|
||||
LOGGER.debug("Unzipping package into {}",folder.getAbsolutePath());
|
||||
ZipTools.unZip(f.getAbsolutePath(), folder.getAbsolutePath());
|
||||
// f.delete();
|
||||
AnalysisLogger.getLogger().debug("Package unzipped and original file deleted");
|
||||
LOGGER.debug("Package unzipped and original file deleted");
|
||||
|
||||
File[] webappfiles = folder.listFiles();
|
||||
//get all files for the upload
|
||||
|
@ -90,9 +93,9 @@ public class WebApplicationPublisher extends StandardLocalInfraAlgorithm{
|
|||
String subpath = allfiles.get(key);
|
||||
subpath = subpath.substring(0,subpath.lastIndexOf("/"));
|
||||
String remotePath = remoteFolder+subpath+"/";
|
||||
AnalysisLogger.getLogger().debug("Uploading "+key+" -> "+remotePath);
|
||||
LOGGER.debug("Uploading {} -> {}",key,remotePath);
|
||||
|
||||
long transferout = DataTransferer.transferFileToService(scope, username, transferServiceAddress, transferServicePort, key, remotePath);
|
||||
long transferout = DataTransferer.transferFileToService(scope, username, transferServiceAddress, transferServicePort, key, remotePath).getTransferedBytes();
|
||||
if (transferout<=0){
|
||||
throw new Exception("Error transferring files to the infrastructure ");
|
||||
}
|
||||
|
@ -102,22 +105,22 @@ public class WebApplicationPublisher extends StandardLocalInfraAlgorithm{
|
|||
if (webappfiles.length==1 && webappfiles[0].isDirectory()){
|
||||
producedPage = producedPage + webappfiles[0].getName()+"/";
|
||||
}
|
||||
AnalysisLogger.getLogger().debug("Entry point of the page "+producedPage);
|
||||
LOGGER.debug("Entry point of the page "+producedPage);
|
||||
|
||||
//get URL
|
||||
addOutputString("Generated Website - Main URL", producedPage);
|
||||
}catch(Exception e){
|
||||
AnalysisLogger.getLogger().error("an error occurred!",e);
|
||||
LOGGER.error("an error occurred!",e);
|
||||
throw e;
|
||||
}finally{
|
||||
//clean everything
|
||||
if (folder.exists()){
|
||||
AnalysisLogger.getLogger().debug("Cleaning folder "+folder);
|
||||
LOGGER.debug("Cleaning folder "+folder);
|
||||
FileUtils.cleanDirectory(folder);
|
||||
FileUtils.deleteDirectory(folder);
|
||||
}
|
||||
}
|
||||
AnalysisLogger.getLogger().debug("Process finished");
|
||||
LOGGER.debug("Process finished");
|
||||
status = 100;
|
||||
}
|
||||
|
||||
|
@ -143,7 +146,7 @@ public class WebApplicationPublisher extends StandardLocalInfraAlgorithm{
|
|||
|
||||
@Override
|
||||
public void shutdown() {
|
||||
AnalysisLogger.getLogger().debug("WebApplicationPublisher - shutdown");
|
||||
LOGGER.debug("WebApplicationPublisher - shutdown");
|
||||
}
|
||||
|
||||
|
||||
|
@ -157,23 +160,24 @@ public class WebApplicationPublisher extends StandardLocalInfraAlgorithm{
|
|||
String apacheServiceAddress = apacheAddress.get(0);
|
||||
apacheServiceAddress = apacheServiceAddress.substring(apacheServiceAddress.indexOf("http://")+7);
|
||||
|
||||
AnalysisLogger.getLogger().debug("Found "+apacheAddress.size()+" services");
|
||||
AnalysisLogger.getLogger().debug("Apache address: "+apacheServiceAddress);
|
||||
LOGGER.debug("Found {} services",apacheAddress.size());
|
||||
LOGGER.debug("Apache address: {}",apacheServiceAddress);
|
||||
|
||||
List<String> dataTransferAddress = InfraRetrieval.retrieveService("agent-service", scope);
|
||||
|
||||
if (dataTransferAddress.size()==0)
|
||||
throw new Exception("Data Transfer services are not available in scope "+scope);
|
||||
|
||||
AnalysisLogger.getLogger().debug("Found "+dataTransferAddress.size()+" transfer services");
|
||||
LOGGER.debug("Found {} transfer services",dataTransferAddress.size());
|
||||
|
||||
int apacheDTPort = 9090;
|
||||
boolean found = false;
|
||||
for (String datatransferservice:dataTransferAddress){
|
||||
AnalysisLogger.getLogger().debug("Transfer service found");
|
||||
LOGGER.debug("Transfer service found");
|
||||
datatransferservice = datatransferservice.substring(datatransferservice.indexOf("http://")+7);
|
||||
String servicehost = datatransferservice.substring(0,datatransferservice.indexOf(":"));
|
||||
String serviceport = datatransferservice.substring(datatransferservice.indexOf(":")+1,datatransferservice.indexOf("/"));
|
||||
AnalysisLogger.getLogger().debug("Transfer service: "+servicehost+":"+serviceport);
|
||||
LOGGER.debug("Transfer service: {} , {}",servicehost,serviceport);
|
||||
if (apacheServiceAddress.equals(servicehost)){
|
||||
apacheDTPort = Integer.parseInt(serviceport);
|
||||
found = true;
|
||||
|
@ -184,7 +188,7 @@ public class WebApplicationPublisher extends StandardLocalInfraAlgorithm{
|
|||
if (!found)
|
||||
throw new Exception("Apache data transfer has not been found in the same scope of the catalog: "+scope);
|
||||
else
|
||||
AnalysisLogger.getLogger().debug("Transfer service found at address "+apacheServiceAddress+":"+apacheDTPort);
|
||||
LOGGER.debug("Transfer service found at address {} : {}",apacheServiceAddress,apacheDTPort);
|
||||
|
||||
transferServiceAddress = apacheServiceAddress;
|
||||
transferServicePort = apacheDTPort;
|
||||
|
|
|
@ -153,7 +153,7 @@ public abstract class GenericRScript extends StandardLocalInfraAlgorithm {
|
|||
}
|
||||
|
||||
} catch (Exception e) {
|
||||
AnalysisLogger.getLogger().warn("Could not delete sandbox folder {} ",folder.getAbsolutePath(),e);
|
||||
AnalysisLogger.getLogger().warn("Could not delete sandbox folder "+folder.getAbsolutePath(),e);
|
||||
}
|
||||
|
||||
if (Rlog != null) {
|
||||
|
@ -235,7 +235,7 @@ public abstract class GenericRScript extends StandardLocalInfraAlgorithm {
|
|||
org.gcube.dataanalysis.executor.rscripts.generic.FileUtils.moveFileToDirectory(rowFile, new File(config.getConfigPath()), false);
|
||||
files.add(preparedFile);
|
||||
}catch(Exception e){
|
||||
AnalysisLogger.getLogger().error("error in moving file {} to {}",rowFile.getAbsolutePath(),preparedFile,e);
|
||||
AnalysisLogger.getLogger().error("error in moving file "+rowFile.getAbsolutePath()+" to "+preparedFile,e);
|
||||
throw e;
|
||||
}
|
||||
break;
|
||||
|
|
|
@ -1,14 +1,10 @@
|
|||
package org.gcube.dataanalysis.executor.util;
|
||||
|
||||
import java.io.File;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import org.gcube.data.transfer.library.DataTransferClient;
|
||||
import org.gcube.data.transfer.library.TransferResult;
|
||||
import org.gcube.data.transfer.model.Destination;
|
||||
import org.gcube.data.transfer.model.PluginInvocation;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
|
@ -17,8 +13,12 @@ public class DataTransferer {
|
|||
|
||||
private static final Logger logger = LoggerFactory.getLogger(DataTransferer.class);
|
||||
|
||||
public static TransferResult transferFileToService(String scope, String username, String host, int port, String fileAbsolutePath, String remoteFolder) throws Exception {
|
||||
return transferFileToService(scope, username, host, port, fileAbsolutePath, remoteFolder, null);
|
||||
}
|
||||
|
||||
// returns the number of transferred bytes
|
||||
public static long transferFileToService(String scope, String username, String host, int port, String fileAbsolutePath, String remoteFolder) throws Exception {
|
||||
public static TransferResult transferFileToService(String scope, String username, String host, int port, String fileAbsolutePath, String remoteFolder, String destinationId) throws Exception {
|
||||
logger.debug("Transferring file {} to {}:{} " , fileAbsolutePath, host, port );
|
||||
|
||||
DataTransferClient client=DataTransferClient.getInstanceByEndpoint("http://"+host+":"+port);
|
||||
|
@ -29,21 +29,18 @@ public class DataTransferer {
|
|||
|
||||
//String localfolder = localFile.getParent();
|
||||
String file = localFile.getName();
|
||||
|
||||
|
||||
Map<String,String> params=new HashMap<>();
|
||||
params.put("DESTINATION", remoteFolder);
|
||||
params.put("SOURCE_PARAMETER", PluginInvocation.DESTINATION_FILE_PATH);
|
||||
|
||||
|
||||
Destination dest=new Destination(file);
|
||||
dest.setSubFolder(remoteFolder);
|
||||
|
||||
TransferResult tranferResult = client.localFile(localFile,dest,Collections.singleton(new PluginInvocation("DECOMPRESS",params)));
|
||||
if(destinationId!=null)
|
||||
dest.setPersistenceId(destinationId);
|
||||
|
||||
TransferResult transferResult = client.localFile(localFile,dest);
|
||||
|
||||
logger.debug("Transferring...");
|
||||
|
||||
|
||||
|
||||
return tranferResult.getTransferedBytes();
|
||||
return transferResult;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue