This commit is contained in:
Ciro Formisano 2017-11-29 15:29:01 +00:00
parent 23a9d3ab45
commit 0a6a83698f
25 changed files with 1087 additions and 1002 deletions

View File

@ -12,7 +12,7 @@
<groupId>org.gcube.dataAnalysis</groupId>
<artifactId>dataminer-pool-manager</artifactId>
<packaging>war</packaging>
<version>2.2.0-SNAPSHOT</version>
<version>2.3.0-SNAPSHOT</version>
<name>dataminer-pool-manager</name>
<description>

View File

@ -1,41 +1,16 @@
package org.gcube.dataanalysis.dataminer.poolmanager.ansible;
import java.io.BufferedReader;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.PrintStream;
import java.io.PrintWriter;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Scanner;
import java.util.Set;
import org.gcube.dataanalysis.dataminer.poolmanager.ansible.model.Inventory;
import org.gcube.dataanalysis.dataminer.poolmanager.ansible.model.Playbook;
import org.gcube.dataanalysis.dataminer.poolmanager.ansible.model.Role;
import org.gcube.dataanalysis.dataminer.poolmanager.ansiblebridge.AnsibleSerializeHelper;
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Algorithm;
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.AlgorithmSet;
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Dependency;
import org.gcube.dataanalysis.dataminer.poolmanager.service.DataminerPoolManager;
import org.tmatesoft.svn.core.SVNException;
import org.tmatesoft.svn.core.SVNURL;
import org.tmatesoft.svn.core.auth.ISVNAuthenticationManager;
import org.tmatesoft.svn.core.internal.wc.SVNFileUtil;
import org.tmatesoft.svn.core.internal.wc.admin.SVNChecksumInputStream;
import org.tmatesoft.svn.core.io.ISVNEditor;
import org.tmatesoft.svn.core.io.SVNRepository;
import org.tmatesoft.svn.core.io.SVNRepositoryFactory;
import org.tmatesoft.svn.core.io.diff.SVNDeltaGenerator;
import org.tmatesoft.svn.core.wc.SVNWCUtil;
/**
* This class is responsible for the interface with ansible, retrieving log,
@ -72,12 +47,12 @@ public class AnsibleWorker {
this.ensureWorkStructure();
}
public File getWorkdir() {
return this.workerRoot;
}
// public File getWorkdir() {
// return this.workerRoot;
// }
public File getRolesDir() {
return new File(this.getWorkdir(), ROLES_DIR);
return new File(this.workerRoot, ROLES_DIR);
}
public String getWorkerId() {
@ -86,20 +61,20 @@ public class AnsibleWorker {
public void ensureWorkStructure() {
// generate root
this.getWorkdir().mkdirs();
this.workerRoot.mkdirs();
}
public void removeWorkStructure() {
// remove the working dir
this.getWorkdir().delete();
this.workerRoot.delete();
}
public File getPlaybookFile() {
return new File(this.getWorkdir(), PLAYBOOK_NAME);
return new File(this.workerRoot, PLAYBOOK_NAME);
}
public File getInventoryFile() {
return new File(this.getWorkdir(), INVENTORY_NAME);
return new File(this.workerRoot, INVENTORY_NAME);
}
@ -122,7 +97,7 @@ public class AnsibleWorker {
public int execute(PrintStream ps)
throws IOException, InterruptedException, SVNException {
System.out.println(this.getWorkdir());
System.out.println(this.workerRoot);
try {
Process p = Runtime.getRuntime().exec("ansible-playbook -v -i " + this.getInventoryFile().getAbsolutePath()
+ " " + this.getPlaybookFile().getAbsolutePath());
@ -149,6 +124,7 @@ public class AnsibleWorker {
while (sc.hasNextLine()) {
dest.println(sc.nextLine());
}
sc.close();
}
}).start();
}

View File

@ -1,10 +1,7 @@
package org.gcube.dataanalysis.dataminer.poolmanager.ansiblebridge;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.PrintStream;
import java.util.Collection;
import java.util.List;
import java.util.Map;
@ -32,21 +29,19 @@ import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Cluster;
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Dependency;
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Host;
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.comparator.HostComparator;
import org.slf4j.LoggerFactory;
import org.tmatesoft.svn.core.SVNException;
public class AnsibleBridge {
private static final org.slf4j.Logger LOGGER = LoggerFactory.getLogger(AnsibleBridge.class);
//private static final org.slf4j.Logger LOGGER = LoggerFactory.getLogger(AnsibleBridge.class);
private String dpmRoot;
public AnsibleBridge() {
this(System.getProperty("user.home")+File.separator+"dataminer-pool-manager");
//this(System.getProperty("/home/gcube/dataminer-pool-manager"));
}
// public AnsibleBridge() {
// this(System.getProperty("user.home")+File.separator+"dataminer-pool-manager");
// //this(System.getProperty("/home/gcube/dataminer-pool-manager"));
//
// }
public AnsibleBridge(String root) {
this.dpmRoot = root;

View File

@ -5,34 +5,71 @@ import java.io.FileOutputStream;
import java.io.IOException;
import java.io.PrintStream;
import java.io.PrintWriter;
import java.util.LinkedList;
import java.util.List;
import java.util.Collection;
import java.util.UUID;
import org.gcube.common.authorization.library.provider.SecurityTokenProvider;
import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.dataanalysis.dataminer.poolmanager.ansible.AnsibleWorker;
import org.gcube.dataanalysis.dataminer.poolmanager.ansiblebridge.AnsibleBridge;
import org.gcube.dataanalysis.dataminer.poolmanager.clients.configuration.Configuration;
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Algorithm;
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Cluster;
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Dependency;
import org.gcube.dataanalysis.dataminer.poolmanager.util.CheckMethod;
import org.gcube.dataanalysis.dataminer.poolmanager.util.NotificationHelper;
import org.gcube.dataanalysis.dataminer.poolmanager.util.SVNUpdater;
import org.gcube.dataanalysis.dataminer.poolmanager.util.SendMail;
import org.gcube.dataanalysis.dataminer.poolmanager.util.exception.EMailException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.tmatesoft.svn.core.SVNException;
public abstract class DMPMJob {
private Configuration configuration;
private String dmpmHomeDirectory;
private SVNUpdater svnUpdater;
private File jobLogs;
private String id;
private Algorithm algorithm;
private Cluster cluster;
private String vREName;
private String category;
private String algorithm_type;
private Logger logger;
protected SVNUpdater svnUpdater;
protected File jobLogs;
private enum STATUS
{
PROGRESS ("IN PROGRESS"),
COMPLETED ("COMPLETED"),
FAILED ("FAILED");
private String status;
STATUS (String status)
{
this.status = status;
}
}
protected String id;
public DMPMJob(SVNUpdater svnUpdater,Configuration configuration,Algorithm algorithm, Cluster cluster,String vREName,
String category, String algorithm_type){
this.logger = LoggerFactory.getLogger(DMPMJob.class);
public DMPMJob(SVNUpdater svnUpdater){
this.configuration = configuration;
this.algorithm = algorithm;
this.cluster = cluster;
this.vREName = vREName;
this.category = category;
this.algorithm_type = algorithm_type;
this.svnUpdater = svnUpdater;
this.dmpmHomeDirectory = new String (System.getProperty("user.home")+File.separator+"dataminer-pool-manager");
this.id = UUID.randomUUID().toString();
//TODO: dmpm work directory should be loaded from configuration file
this.jobLogs = new File(System.getProperty("user.home")+File.separator+"dataminer-pool-manager"+File.separator+"jobs");
this.jobLogs = new File(this.dmpmHomeDirectory+File.separator+"jobs");
this.jobLogs.mkdirs();
}
@ -56,7 +93,7 @@ public abstract class DMPMJob {
Cluster dataminerCluster,
boolean includeAlgorithmDependencies,
String user){
AnsibleBridge ansibleBridge = new AnsibleBridge();
AnsibleBridge ansibleBridge = new AnsibleBridge(this.dmpmHomeDirectory);
try {
return ansibleBridge.createWorker(algo, dataminerCluster, includeAlgorithmDependencies, user);
} catch (IOException e) {
@ -66,14 +103,148 @@ public abstract class DMPMJob {
}
protected abstract void execute();
protected int executeAnsibleWorker(AnsibleWorker worker) throws IOException, InterruptedException, SVNException{
File path = new File(worker.getWorkdir() + File.separator + "jobs");
path.mkdirs();
public void setStatusInformation(STATUS exitStatus) {
File n = new File(this.jobLogs + File.separator + this.id);
FileOutputStream fos = new FileOutputStream(n, true);
try
{
File statusFile = new File (this.jobLogs,this.id + "_exitStatus");
//File m = new File ( this.jobLogs + File.separator + this.id + "_exitStatus");
PrintWriter writer = new PrintWriter(statusFile, "UTF-8");
writer.println(exitStatus.status);
writer.close();
} catch (Exception e)
{
this.logger.error ("Unable to update exit status file with status "+exitStatus.status,e);
}
}
private void updateLogFile (File logFile, String message)
{
try
{
PrintWriter writer = new PrintWriter(logFile,"UTF-8");
writer.print(message);
writer.close();
} catch (Exception e)
{
this.logger.error("Unable to log the error message: "+message,e);
}
}
protected abstract void execute ();
private boolean preInstallation (SendMail sm,NotificationHelper nh, File logFile ) throws SVNException, EMailException
{
this.logger.debug("Checking dependencies...");
Collection<String> undefinedDependencies = this.svnUpdater.getUndefinedDependencies(
this.svnUpdater.getDependencyFile(this.algorithm.getLanguage()),
this.algorithm.getDependencies());
if (!undefinedDependencies.isEmpty())
{
this.logger.debug("Some dependencies are not defined");
String message = "Following dependencies are not defined:\n";
for (String n : undefinedDependencies) {
message += "\n" + n +"\n";
}
this.setStatusInformation(STATUS.FAILED);
String errorMessage = nh.getFailedBody(message+"\n\n"+this.buildInfo());
this.updateLogFile(logFile, errorMessage);
sm.sendNotification(nh.getFailedSubject() +" for "+this.algorithm.getName()+ " algorithm", errorMessage);
return false;
}
else return true;
}
private void installation (SendMail sm,NotificationHelper nh,CheckMethod methodChecker,File logFile ) throws Exception
{
methodChecker.deleteFiles(this.algorithm/*, env*/);
int ret = this.executeAnsibleWorker(createWorker(this.algorithm, this.cluster, false, "root"),logFile);
System.out.println("Return code= "+ret);
if (ret != 0)
{
this.logger.debug("Ansible work failed, return code "+ret);
this.setStatusInformation(STATUS.FAILED);
String errorMessage = nh.getFailedBody("Installation failed. Return code=" + ret)+"\n\n"+this.buildInfo();
sm.sendNotification(nh.getFailedSubject() + " for "+this.algorithm.getName()+ " algorithm",errorMessage);
}
else if (ret == 0)
{
this.logger.debug("Operation completed");
this.setStatusInformation(STATUS.PROGRESS);
this.logger.debug("Checking the method...");
if (methodChecker.checkMethod(this.configuration.getHost(), SecurityTokenProvider.instance.get())&&(methodChecker.algoExists(this.algorithm)))
{
this.logger.debug("Method OK and algo exists");
System.out.println("Interface check ok!");
System.out.println("Both the files exist at the correct path!");
this.svnUpdater.updateSVNAlgorithmList(this.algorithm, this.vREName,this.category, this.algorithm_type,
this.algorithm.getFullname());
this.setStatusInformation(STATUS.COMPLETED);
sm.sendNotification(nh.getSuccessSubject() + " for "+this.algorithm.getName()+ " algorithm", nh.getSuccessBody("\n\n"+this.buildInfo()));
return;
} else
{
this.logger.debug("Operation failed");
this.setStatusInformation(STATUS.FAILED);
sm.sendNotification(nh.getFailedSubject() + " for "+this.algorithm.getName()+ " algorithm",
nh.getFailedBody(
"\n"+
"Installation completed but DataMiner Interface not working correctly or files "
+ this.algorithm.getName() + ".jar and " + this.algorithm.getName()
+ "_interface.jar not availables at the expected path")+"\n\n"+this.buildInfo());
}
}
}
protected void execute(NotificationHelper nh, CheckMethod methodChecker)
{
SendMail sm = new SendMail();
try {
this.logger.debug("Pre installation operations");
File logFile = new File(this.jobLogs,this.id);
//File logFile = new File(this.jobLogs + File.separator + this.id);
boolean preInstallationResponse = preInstallation(sm, nh, logFile);
this.logger.debug("Pre installation operation completed with result "+preInstallationResponse);
if (preInstallationResponse)
{
this.logger.debug("Installation...");
installation(sm, nh, methodChecker, logFile);
this.logger.debug("Installation completed");
}
} catch (EMailException eme)
{
this.logger.error("Operation failed and unable to send notification email",eme);
}
catch (Exception e) {
e.printStackTrace();
}
}
protected int executeAnsibleWorker(AnsibleWorker worker, File logFile) throws IOException, InterruptedException, SVNException{
FileOutputStream fos = new FileOutputStream(logFile, true);
PrintStream ps = new PrintStream(fos);
// File m = new File(this.jobLogs + File.separator + this.id + "_exitStatus");
@ -81,5 +252,19 @@ public abstract class DMPMJob {
return worker.execute(ps);
}
public String buildInfo() {
return
"\n"+
"Algorithm details:\n"+"\n"+
"User: "+this.algorithm.getFullname()+"\n"+
"Algorithm name: "+this.algorithm.getName()+"\n"+
"Staging DataMiner Host: "+ this.configuration.getHost()+"\n"+
"Caller VRE: "+ScopeProvider.instance.get()+"\n"+
"Target VRE: "+this.vREName+"\n";
}
}

View File

@ -8,7 +8,10 @@ import java.util.Scanner;
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Algorithm;
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Cluster;
import org.gcube.dataanalysis.dataminer.poolmanager.util.ClusterBuilder;
import org.gcube.dataanalysis.dataminer.poolmanager.util.SVNUpdater;
import org.gcube.dataanalysis.dataminer.poolmanager.util.impl.ClusterBuilderProduction;
import org.gcube.dataanalysis.dataminer.poolmanager.util.impl.ClusterBuilderStaging;
import org.gcube.dataanalysis.dataminer.poolmanager.util.impl.SVNUpdaterProduction;
import org.gcube.dataanalysis.dataminer.poolmanager.util.impl.SVNUpdaterStaging;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.tmatesoft.svn.core.SVNException;
@ -17,14 +20,15 @@ import org.tmatesoft.svn.core.SVNException;
public class DataminerPoolManager {
private Logger logger;
private SVNUpdater svnUpdater;
private SVNUpdaterStaging svnUpdaterStaging;
private SVNUpdaterProduction svnUpdaterProduction;
public DataminerPoolManager() {
this.logger = LoggerFactory.getLogger(this.getClass());
try {
//TODO: read this from configuration
this.svnUpdater = new SVNUpdater();
this.svnUpdaterStaging = new SVNUpdaterStaging();
this.svnUpdaterProduction = new SVNUpdaterProduction();
} catch (SVNException e) {
e.printStackTrace();
}
@ -37,9 +41,10 @@ public class DataminerPoolManager {
this.logger.debug("Algo "+algo);
this.logger.debug("Category "+category);
this.logger.debug("Algo type "+algorithm_type);
Cluster stagingCluster = ClusterBuilder.getStagingDataminerCluster();
ClusterBuilder stagingClusterBuilder = new ClusterBuilderStaging();
Cluster stagingCluster = stagingClusterBuilder.getDataminerCluster();
//Cluster rProtoCluster = ClusterBuilder.getRProtoCluster();
DMPMJob job = new StagingJob(this.svnUpdater, algo, stagingCluster, /*rProtoCluster,*/ targetVRE, category, algorithm_type/*,env*/);
DMPMJob job = new StagingJob(this.svnUpdaterStaging, algo, stagingCluster, /*rProtoCluster,*/ targetVRE, category, algorithm_type/*,env*/);
String id = job.start();
return id;
}
@ -50,8 +55,9 @@ public class DataminerPoolManager {
this.logger.debug("Algo "+algo);
this.logger.debug("Category "+category);
this.logger.debug("Algo type "+algorithm_type);
Cluster prodCluster = ClusterBuilder.getProdDataminerCluster();
DMPMJob job = new ProductionPublishingJob(this.svnUpdater, algo, prodCluster, targetVRE, category, algorithm_type/*,env*/);
ClusterBuilder productionClusterBuilder = new ClusterBuilderProduction();
Cluster prodCluster = productionClusterBuilder.getDataminerCluster();
DMPMJob job = new ProductionPublishingJob(this.svnUpdaterProduction, algo, prodCluster, targetVRE, category, algorithm_type/*,env*/);
String id = job.start();
return id;
}

View File

@ -1,164 +1,44 @@
package org.gcube.dataanalysis.dataminer.poolmanager.service;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.PrintWriter;
import java.io.UnsupportedEncodingException;
import java.util.Collection;
import org.gcube.common.authorization.library.provider.SecurityTokenProvider;
import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.dataanalysis.dataminer.poolmanager.clients.configuration.DMPMClientConfiguratorManager;
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Algorithm;
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Cluster;
import org.gcube.dataanalysis.dataminer.poolmanager.util.CheckMethod;
import org.gcube.dataanalysis.dataminer.poolmanager.util.NotificationHelper;
import org.gcube.dataanalysis.dataminer.poolmanager.util.SVNUpdater;
import org.gcube.dataanalysis.dataminer.poolmanager.util.SendMail;
import org.gcube.dataanalysis.dataminer.poolmanager.util.impl.CheckMethodProduction;
import org.gcube.dataanalysis.dataminer.poolmanager.util.impl.NotificationHelperProduction;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class ProductionPublishingJob extends DMPMJob {
private Algorithm algorithm;
private Cluster prodCluster;
private String targetVREName;
private String category;
private String algorithm_type;
//private String targetVREToken;
//private String env;
private Logger logger;
public ProductionPublishingJob(SVNUpdater svnUpdater, Algorithm algorithm,
Cluster prodCluster, String targetVREName, String category,String algorithm_type/*, String env*/) throws FileNotFoundException, UnsupportedEncodingException {
super(svnUpdater);
this.jobLogs = new File(
System.getProperty("user.home") + File.separator + "dataminer-pool-manager" + File.separator + "jobs");
this.jobLogs.mkdirs();
this.algorithm = algorithm;
this.prodCluster = prodCluster;
this.targetVREName = targetVREName;
this.category = category;
this.algorithm_type = algorithm_type;
//this.targetVREToken = targetVREToken;
//this.env= env;
this.getStatus(0);
super(svnUpdater,DMPMClientConfiguratorManager.getInstance().getProductionConfiguration(),algorithm,prodCluster,targetVREName,category,algorithm_type);
this.logger = LoggerFactory.getLogger(StagingJob.class);// this.jobLogs = new File(
}
@Override
protected void execute() {
SendMail sm = new SendMail();
NotificationHelper nh = new NotificationHelper();
CheckMethod b = new CheckMethod();
try {
Collection<String> undefinedDependencies = this.svnUpdater.getUndefinedDependencies(
this.svnUpdater.getDependencyFileProd(this.algorithm.getLanguage()/*,env*/),
this.algorithm.getDependencies());
if (!undefinedDependencies.isEmpty()) {
String message = "Following dependencies are not defined:\n";
for (String n : undefinedDependencies) {
message += "\n" + n +"\n";
}
this.getStatus(2);
sm.sendNotification(nh.getFailedSubjectRelease() +" for "+this.algorithm.getName()+ " algorithm", nh.getFailedBody(message+"\n\n"+this.buildInfo()));
return;
}
b.deleteFilesProd(this.algorithm);
int ret = this.executeAnsibleWorker(createWorker(this.algorithm, this.prodCluster, false, "root"));
if (ret != 0) {
this.getStatus(2);
sm.sendNotification(nh.getFailedSubjectRelease() + " for "+this.algorithm.getName()+ " algorithm", nh.getFailedBody("Installation failed. Return code=" + ret)+"\n\n"+this.buildInfo());
return;
}
if (ret == 0) {
this.getStatus(0);
if (b.checkMethod(DMPMClientConfiguratorManager.getInstance().getProductionConfiguration().getHost(), SecurityTokenProvider.instance.get())&&(b.algoExistsProd(this.algorithm/*, env*/))) {
System.out.println("Interface check ok!");
System.out.println("Both the files exist at the correct path!");
this.svnUpdater.updateSVNProdAlgorithmList(this.algorithm, this.targetVREName,this.category, this.algorithm_type,
this.algorithm.getFullname()/*, env*/);
this.getStatus(9);
sm.sendNotification(nh.getSuccessSubjectRelease() + " for "+this.algorithm.getName()+ " algorithm", nh.getSuccessBody("\n\n"+this.buildInfo()));
return;
} else
this.getStatus(2);
sm.sendNotification(nh.getFailedSubjectRelease() + " for "+this.algorithm.getName()+ " algorithm",
nh.getFailedBody(
"\n"+
"Installation completed but DataMiner Interface not working correctly or files "
+ this.algorithm.getName() + ".jar and " + this.algorithm.getName()
+ "_interface.jar not availables at the expected path")+"\n\n"+this.buildInfo());
return;
}
} catch (Exception e) {
try {
this.getStatus(0);
} catch (FileNotFoundException e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
} catch (UnsupportedEncodingException e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
}
e.printStackTrace();
}
this.logger.debug("Executing staging job...");
super.execute(new NotificationHelperProduction(), new CheckMethodProduction());
}
public String getStatus(int exitstatus) throws FileNotFoundException, UnsupportedEncodingException {
File m = new File(this.jobLogs + File.separator + this.id + "_exitStatus");
PrintWriter writer = new PrintWriter(m, "UTF-8");
String response = "";
if (exitstatus == 0) {
response = "IN PROGRESS";
writer.println(response);
//writer.close();
}
if (exitstatus == 9) {
response = "COMPLETED";
writer.println(response);
//writer.close();
}
if (exitstatus == 2) {
response = "FAILED";
writer.println(response);
//writer.close();
}
writer.close();
return response;
}
public String buildInfo() throws UnsupportedEncodingException{
return
"\n"+
"Algorithm details:\n"+"\n"+
"User: "+this.algorithm.getFullname()+"\n"+
"Algorithm name: "+this.algorithm.getName()+"\n"+
"Staging DataMiner Host: "+ DMPMClientConfiguratorManager.getInstance().getProductionConfiguration().getHost()+"\n"+
"Caller VRE: "+ScopeProvider.instance.get()+"\n"+
"Target VRE: "+this.targetVREName+"\n";
}

View File

@ -1,184 +1,42 @@
package org.gcube.dataanalysis.dataminer.poolmanager.service;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.PrintWriter;
import java.io.UnsupportedEncodingException;
import java.util.Collection;
import org.gcube.common.authorization.library.provider.SecurityTokenProvider;
import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.dataanalysis.dataminer.poolmanager.clients.configuration.DMPMClientConfiguratorManager;
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Algorithm;
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Cluster;
import org.gcube.dataanalysis.dataminer.poolmanager.util.CheckMethod;
import org.gcube.dataanalysis.dataminer.poolmanager.util.NotificationHelper;
import org.gcube.dataanalysis.dataminer.poolmanager.util.SVNUpdater;
import org.gcube.dataanalysis.dataminer.poolmanager.util.SendMail;
import org.gcube.dataanalysis.dataminer.poolmanager.util.impl.CheckMethodStaging;
import org.gcube.dataanalysis.dataminer.poolmanager.util.impl.NotificationHelperStaging;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class StagingJob extends DMPMJob {
private Algorithm algorithm;
private Cluster stagingCluster;
// private Cluster rProtoCluster;
private String rProtoVREName;
//private String env;
private String category;
private String algorithm_type;
private Logger logger;
public StagingJob(SVNUpdater svnUpdater, Algorithm algorithm,
Cluster stagingCluster, /* Cluster rProtoCluster, */
String rProtoVREName, String category, String algorithm_type/*, String env*/) throws FileNotFoundException, UnsupportedEncodingException {
super(svnUpdater);
super(svnUpdater,DMPMClientConfiguratorManager.getInstance().getStagingConfiguration(),algorithm,stagingCluster,rProtoVREName,category,algorithm_type);
this.logger = LoggerFactory.getLogger(StagingJob.class);
this.jobLogs = new File(
System.getProperty("user.home") + File.separator + "dataminer-pool-manager" + File.separator + "jobs");
this.jobLogs.mkdirs();
this.algorithm = algorithm;
this.stagingCluster = stagingCluster;
// this.rProtoCluster = rProtoCluster;
this.rProtoVREName = rProtoVREName;
//this.env = env;
this.category = category;
this.algorithm_type = algorithm_type;
//File m = new File(this.jobLogs + File.separator + this.id + "_exitStatus");
//PrintWriter writer = new PrintWriter(m, "UTF-8");
this.getStatus(0);
//writer.close();
}
@Override
protected void execute() {
this.logger.debug("Executing staging job...");
super.execute(new NotificationHelperStaging(), new CheckMethodStaging());
CheckMethod methodChecker = new CheckMethod();
SendMail sm = new SendMail();
NotificationHelper nh = new NotificationHelper();
try {
this.logger.debug("Checking dependencies...");
Collection<String> undefinedDependencies = this.svnUpdater.getUndefinedDependencies(
this.svnUpdater.getDependencyFile(this.algorithm.getLanguage()/*,env*/),
this.algorithm.getDependencies());
if (!undefinedDependencies.isEmpty())
{
this.logger.debug("Some dependencies are not defined");
String message = "Following dependencies are not defined:\n";
for (String n : undefinedDependencies) {
message += "\n" + n +"\n";
}
this.getStatus(2);
sm.sendNotification(nh.getFailedSubject() +" for "+this.algorithm.getName()+ " algorithm", nh.getFailedBody(message+"\n\n"+this.buildInfo()));
return;
}
//before the installation to check if the files exist
methodChecker.deleteFiles(this.algorithm/*, env*/);
int ret = this.executeAnsibleWorker(createWorker(this.algorithm, this.stagingCluster, false, "root"));
System.out.println("Return code= "+ret);
if (ret != 0)
{
this.logger.debug("Ansible work failed, return code "+ret);
this.getStatus(2);
sm.sendNotification(nh.getFailedSubject() + " for "+this.algorithm.getName()+ " algorithm", nh.getFailedBody("Installation failed. Return code=" + ret)+"\n\n"+this.buildInfo());
return;
}
if (ret == 0)
{
this.logger.debug("Operation completed");
this.getStatus(0);
//System.out.println("1 - Checking existing in env: "+ env);
//System.out.println("2 - Checking existing in env: "+ this.env);
this.logger.debug("Checking the method...");
if (methodChecker.checkMethod(DMPMClientConfiguratorManager.getInstance().getStagingConfiguration().getHost(), SecurityTokenProvider.instance.get())&&(methodChecker.algoExists(this.algorithm)))
{
this.logger.debug("Method OK and algo exists");
System.out.println("Interface check ok!");
System.out.println("Both the files exist at the correct path!");
this.svnUpdater.updateSVNStagingAlgorithmList(this.algorithm, this.rProtoVREName,this.category, this.algorithm_type,
this.algorithm.getFullname()/*, env*/);
this.getStatus(9);
sm.sendNotification(nh.getSuccessSubject() + " for "+this.algorithm.getName()+ " algorithm", nh.getSuccessBody("\n\n"+this.buildInfo()));
return;
} else
{
this.logger.debug("Operation failed");
this.getStatus(2);
sm.sendNotification(nh.getFailedSubject() + " for "+this.algorithm.getName()+ " algorithm",
nh.getFailedBody(
"\n"+
"Installation completed but DataMiner Interface not working correctly or files "
+ this.algorithm.getName() + ".jar and " + this.algorithm.getName()
+ "_interface.jar not availables at the expected path")+"\n\n"+this.buildInfo());
return;
}
}
} catch (Exception e) {
e.printStackTrace();
}
}
public String getStatus(int exitstatus) throws FileNotFoundException, UnsupportedEncodingException {
File m = new File(this.jobLogs + File.separator + this.id + "_exitStatus");
PrintWriter writer = new PrintWriter(m, "UTF-8");
String response = "";
if (exitstatus == 0) {
response = "IN PROGRESS";
writer.println(response);
//writer.close();
}
if (exitstatus == 9) {
response = "COMPLETED";
writer.println(response);
//writer.close();
}
if (exitstatus == 2) {
response = "FAILED";
writer.println(response);
//writer.close();
}
writer.close();
return response;
}
public String buildInfo() throws UnsupportedEncodingException{
return
"\n"+
"Algorithm details:\n"+"\n"+
"User: "+this.algorithm.getFullname()+"\n"+
"Algorithm name: "+this.algorithm.getName()+"\n"+
"Staging DataMiner Host: "+ DMPMClientConfiguratorManager.getInstance().getStagingConfiguration().getHost()+"\n"+
"Caller VRE: "+ScopeProvider.instance.get()+"\n"+
"Target VRE: "+rProtoVREName+"\n";
}

View File

@ -10,20 +10,11 @@ import java.util.Arrays;
import java.util.LinkedList;
import java.util.List;
import org.gcube.dataanalysis.dataminer.poolmanager.clients.configuration.Configuration;
import org.gcube.dataanalysis.dataminer.poolmanager.clients.configuration.DMPMClientConfiguratorManager;
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Algorithm;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.jcraft.jsch.Channel;
import com.jcraft.jsch.ChannelSftp;
import com.jcraft.jsch.JSch;
import com.jcraft.jsch.Session;
import com.jcraft.jsch.SftpATTRS;
import com.jcraft.jsch.SftpException;
public class CheckMethod {
public abstract class CheckMethod {
private Logger logger;
@ -37,14 +28,14 @@ public class CheckMethod {
this.logger.debug("Checking method for machine "+machine);
this.logger.debug("By using tocken "+token);
System.out.println("Machine: " + machine);
String getCapabilitesRequest = new String();
String getCapabilitesResponse = new String();
// String getCapabilitesRequest = new String();
// String getCapabilitesResponse = new String();
System.out.println(" Token: " + token);
String request = "http://" + machine
+ "/wps/WebProcessingService?Request=GetCapabilities&Service=WPS&gcube-token=" + token;
String response = machine + "___" + token + ".xml";
getCapabilitesRequest = request;
getCapabilitesResponse = response;
// getCapabilitesRequest = request;
// getCapabilitesResponse = response;
String baseDescriptionRequest = "http://" + machine
+ "/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0" + "&gcube-token="
+ token + "&Identifier=";
@ -111,359 +102,22 @@ public class CheckMethod {
public boolean algoExists(Algorithm algo/*, String env*/) throws Exception{
public abstract boolean algoExists(Algorithm algo/*, String env*/) throws Exception;
this.logger.debug("Looking if algo "+algo.getName()+ " exists");
Configuration stagingConfiguration = DMPMClientConfiguratorManager.getInstance().getStagingConfiguration();
File file = new File(stagingConfiguration.getGhostAlgoDirectory()+"/"+algo.getName()+".jar");
File file2 = new File(stagingConfiguration.getGhostAlgoDirectory()+"/"+algo.getName()+"_interface.jar");
this.logger.debug("Looking for files "+file.getPath()+ " "+file.getPath());
System.out.println("First file is located to: "+file.getPath());
System.out.println("Second file is located to: "+file2.getPath());
if ((this.doesExist(file.getPath()/*,env*/)) && (this.doesExist(file2.getPath()/*,env*/)))
{
this.logger.debug("Files found");
this.copyFromDmToSVN(file/*,env*/);
this.copyFromDmToSVN(file2/*,env*/);
System.out.println("Files have been copied to SVN");
return true;
}
else
{
this.logger.debug("Files not found");
System.out.println("Algorithm "+algo.getName()+".jar"+ " and "+algo.getName()+"_interface.jar files are not present at the expected path");
return false;
}
}
public abstract void deleteFiles(Algorithm a/*,String env*/) throws Exception;
public boolean algoExistsProd(Algorithm a/*, String env*/) throws Exception{
Configuration productionConfiguration = DMPMClientConfiguratorManager.getInstance().getProductionConfiguration();
File file = new File(productionConfiguration.getGhostAlgoDirectory()+"/"+a.getName()+".jar");
File file2 = new File(productionConfiguration.getGhostAlgoDirectory()+"/"+a.getName()+"_interface.jar");
System.out.println("First file is located to: "+file.getPath());
System.out.println("Second file is located to: "+file2.getPath());
if ((this.doesExistProd(file.getPath()/*,env*/)) && (this.doesExistProd(file2.getPath()/*,env*/))){
this.copyFromDmToSVNProd(file/*,env*/);
this.copyFromDmToSVNProd(file2/*,env*/);
return true;
}
else
System.out.println("Algorithm "+a.getName()+".jar"+ " and "+a.getName()+"_interface.jar files are not present at the expected path");
return false;
}
public abstract boolean doesExist(String path/*, String env*/) throws Exception;
public void deleteFiles(Algorithm a/*,String env*/) throws Exception{
JSch jsch = new JSch();
Session session = null;
Channel channel = null;
ChannelSftp c = null;
Configuration stagingConfiguration = DMPMClientConfiguratorManager.getInstance().getStagingConfiguration();
System.out.println("checking existing in env: " + stagingConfiguration.getHost());
File file = new File(stagingConfiguration.getGhostAlgoDirectory()+"/"+a.getName()+".jar");
File file2 = new File(stagingConfiguration.getGhostAlgoDirectory()+"/"+a.getName()+"_interface.jar");
System.out.println("First file is located to: "+file.getPath());
System.out.println("Second file is located to: "+file2.getPath());
jsch.setKnownHosts("~/.ssh/known_hosts");
String privateKey = "~/.ssh/id_rsa";
jsch.addIdentity(privateKey);
System.out.println("Private Key Added.");
session = jsch.getSession("root", stagingConfiguration.getHost());
System.out.println("session created.");
java.util.Properties config = new java.util.Properties();
config.put("StrictHostKeyChecking", "no");
session.setConfig(config);
session.connect();
channel = session.openChannel("sftp");
channel.connect();
System.out.println("shell channel connected....");
c = (ChannelSftp) channel;
if(doesExist(file.getPath()/*,env*/)&&(doesExist(file2.getPath()/*,env*/))){
c.rm(file.getPath());
c.rm(file2.getPath());
System.out.println("Both the files have been deleted");
}
else System.out.println("Files not found");
channel.disconnect();
c.disconnect();
session.disconnect();
}
public void deleteFilesProd(Algorithm a/*,String env*/) throws Exception{
JSch jsch = new JSch();
Session session = null;
Channel channel = null;
ChannelSftp c = null;
Configuration productionConfiguration = DMPMClientConfiguratorManager.getInstance().getProductionConfiguration();
System.out.println("checking existing in env: " + productionConfiguration.getHost());
File file = new File(productionConfiguration.getGhostAlgoDirectory()+"/"+a.getName()+".jar");
File file2 = new File(productionConfiguration.getGhostAlgoDirectory()+"/"+a.getName()+"_interface.jar");
System.out.println("First file is located to: "+file.getPath());
System.out.println("Second file is located to: "+file2.getPath());
jsch.setKnownHosts("~/.ssh/known_hosts");
String privateKey = "~/.ssh/id_rsa";
jsch.addIdentity(privateKey);
System.out.println("Private Key Added.");
session = jsch.getSession("root", productionConfiguration.getHost());
System.out.println("session created.");
java.util.Properties config = new java.util.Properties();
config.put("StrictHostKeyChecking", "no");
session.setConfig(config);
session.connect();
channel = session.openChannel("sftp");
channel.connect();
System.out.println("shell channel connected....");
c = (ChannelSftp) channel;
if(doesExistProd(file.getPath()/*,env*/)&&(doesExistProd(file2.getPath()/*,env*/))){
c.rm(file.getPath());
c.rm(file2.getPath());
System.out.println("Both the files have been deleted");
}
else System.out.println("Files not found");
channel.disconnect();
c.disconnect();
session.disconnect();
}
public boolean doesExist(String path/*, String env*/) throws Exception {
this.logger.debug("Looking if file "+path + " exists");
JSch jsch = new JSch();
Session session = null;
Channel channel = null;
ChannelSftp c = null;
boolean success = false;
Configuration stagingConfiguration = DMPMClientConfiguratorManager.getInstance().getStagingConfiguration();
jsch.setKnownHosts("~/.ssh/known_hosts");
String privateKey = "~/.ssh/id_rsa";
jsch.addIdentity(privateKey);
System.out.println("Private Key Added.");
this.logger.debug("Staging configuration host "+stagingConfiguration.getHost());
session = jsch.getSession("root",stagingConfiguration.getHost() );
System.out.println("session created.");
java.util.Properties config = new java.util.Properties();
config.put("StrictHostKeyChecking", "no");
session.setConfig(config);
session.connect();
channel = session.openChannel("sftp");
channel.connect();
System.out.println("shell channel connected....");
c = (ChannelSftp) channel;
SftpATTRS is = null;
System.out.println(path);
try {
is = c.lstat(path);
this.logger.debug("File found");
success = true;
} catch (SftpException e)
{
this.logger.error("File not found",e);
if (e.id == ChannelSftp.SSH_FX_NO_SUCH_FILE)
{
// file doesn't exist
success = false;
}
//success = true; // something else went wrong
}
channel.disconnect();
c.disconnect();
session.disconnect();
this.logger.debug("Operation result "+success);
return success;
}
public boolean doesExistProd(String path/*, String env*/) throws Exception {
JSch jsch = new JSch();
Session session = null;
Channel channel = null;
ChannelSftp c = null;
boolean success = false;
Configuration productionConfiguration = DMPMClientConfiguratorManager.getInstance().getProductionConfiguration();
jsch.setKnownHosts("~/.ssh/known_hosts");
String privateKey = "~/.ssh/id_rsa";
jsch.addIdentity(privateKey);
System.out.println("Private Key Added.");
session = jsch.getSession("root", productionConfiguration.getHost());
System.out.println("session created.");
java.util.Properties config = new java.util.Properties();
config.put("StrictHostKeyChecking", "no");
session.setConfig(config);
session.connect();
channel = session.openChannel("sftp");
channel.connect();
System.out.println("shell channel connected....");
c = (ChannelSftp) channel;
SftpATTRS is = null;
System.out.println(path);
try {
is = c.lstat(path);
success = true;
} catch (SftpException e) {
if (e.id == ChannelSftp.SSH_FX_NO_SUCH_FILE) {
// file doesn't exist
success = false;
}
//success = true; // something else went wrong
}
channel.disconnect();
c.disconnect();
session.disconnect();
return success;
}
public void copyFromDmToSVN(File a/*,String env*/) throws Exception {
JSch jsch = new JSch();
Session session = null;
SVNUpdater svnUpdater = new SVNUpdater();
Configuration stagingConfiguration = DMPMClientConfiguratorManager.getInstance().getStagingConfiguration();
jsch.setKnownHosts("~/.ssh/known_hosts");
String privateKey = "~/.ssh/id_rsa";
jsch.addIdentity(privateKey);
System.out.println("Private Key Added.");
session = jsch.getSession("root", stagingConfiguration.getHost());
System.out.println("session created.");
java.util.Properties config = new java.util.Properties();
config.put("StrictHostKeyChecking", "no");
session.setConfig(config);
session.connect();
Channel channel = session.openChannel("sftp");
channel.connect();
ChannelSftp sftp = (ChannelSftp) channel;
sftp.cd(stagingConfiguration.getGhostAlgoDirectory());
System.out.println("REMOTE : "+stagingConfiguration.getGhostAlgoDirectory()+"/"+a.getName());
System.out.println("LOCAL : /tmp/"+a.getName());
sftp.get(stagingConfiguration.getGhostAlgoDirectory()+"/"+a.getName(),"/tmp/"+a.getName());
channel.disconnect();
session.disconnect();
File f = new File("/tmp/"+a.getName());
svnUpdater.updateAlgorithmFiles(f);
f.delete();
}
public abstract void copyFromDmToSVN(File a/*,String env*/) throws Exception;
public void copyFromDmToSVNProd(File a/*,String env*/) throws Exception {
JSch jsch = new JSch();
Session session = null;
SVNUpdater svnUpdater = new SVNUpdater();
Configuration productionConfiguration = DMPMClientConfiguratorManager.getInstance().getProductionConfiguration();
jsch.setKnownHosts("~/.ssh/known_hosts");
String privateKey = "~/.ssh/id_rsa";
jsch.addIdentity(privateKey);
System.out.println("Private Key Added.");
session = jsch.getSession("root", productionConfiguration.getHost());
System.out.println("session created.");
java.util.Properties config = new java.util.Properties();
config.put("StrictHostKeyChecking", "no");
session.setConfig(config);
session.connect();
Channel channel = session.openChannel("sftp");
channel.connect();
ChannelSftp sftp = (ChannelSftp) channel;
sftp.cd(productionConfiguration.getGhostAlgoDirectory());
System.out.println("REMOTE : "+productionConfiguration.getGhostAlgoDirectory()+"/"+a.getName());
System.out.println("LOCAL : /tmp/"+a.getName());
sftp.get(productionConfiguration.getGhostAlgoDirectory()+"/"+a.getName(),"/tmp/"+a.getName());
channel.disconnect();
session.disconnect();
File f = new File("/tmp/"+a.getName());
svnUpdater.updateAlgorithmFilesProd(f);
f.delete();
}
public List<String> getFiles(String a){
public static List<String> getFiles(String a){
String[] array = a.split(",");
ArrayList<String> list = new ArrayList<>(Arrays.asList(array));
@ -477,71 +131,5 @@ public class CheckMethod {
}
public static void main(String[] args) throws Exception {
// ServiceConfiguration a = new ServiceConfiguration();
// System.out.println(a.getStagingHost());
CheckMethod a = new CheckMethod();
//a.getFiles("/trunk/data-analysis/RConfiguration/RPackagesManagement/r_deb_pkgs.txt, /trunk/data-analysis/RConfiguration/RPackagesManagement/r_cran_pkgs.txt, /trunk/data-analysis/RConfiguration/RPackagesManagement/r_github_pkgs.txt");
// File aa = new File("OCTAVEBLACKBOX.jar");
// System.out.println(aa.getName());
// System.out.println(aa.getPath());
//a.copyFromDmToSVN(aa);
// if (a.checkMethod("dataminer-ghost-d.dev.d4science.org", "708e7eb8-11a7-4e9a-816b-c9ed7e7e99fe-98187548")){
// System.out.println("AAA"); }
//
// if (a.doesExist("/home/gcube/wps_algorithms/algorithms/WINDOWS_BLACK_BOX_EXAMPLE.jar")){
// System.out.println("BBBB");
//
// }
// if (a.doesExist("/home/gcube/wps_algorithms/algorithms/WINDOWS_BLACK_BOX_EXAMPLE_interface.jar")){
// System.out.println("CCCC");}
//
// File aa = new File("/home/gcube/wps_algorithms/algorithms/RBLACKBOX_interface.jar");
// a.copyFromDmToSVN(aa, "Dev");
//
//System.out.println(a.checkMethod("dataminer-ghost-t.pre.d4science.org",
// "2eceaf27-0e22-4dbe-8075-e09eff199bf9-98187548"));
//System.out.println(a.checkMethod("dataminer-proto-ghost.d4science.org",
// "3a23bfa4-4dfe-44fc-988f-194b91071dd2-843339462"));
System.out.println(a.checkMethod("dataminer-ghost-d.dev.d4science.org",
"708e7eb8-11a7-4e9a-816b-c9ed7e7e99fe-98187548"));
//Algorithm aa = new Algorithm();
//aa.setName("UDPIPE_WRAPPER");
//System.out.println(a.algoExists(aa));
////
//ServiceConfiguration bp = new ServiceConfiguration();
////
//SecurityTokenProvider.instance.set("708e7eb8-11a7-4e9a-816b-c9ed7e7e99fe-98187548");
////
//if (a.checkMethod(bp.getStagingHost(), SecurityTokenProvider.instance.get())&&a.algoExists(aa)); {
//System.out.println("ciao");
//
//}
//
//Algorithm al = new Algorithm();
// al.setName("UDPIPE_WRAPPER");
// a.deleteFiles(al);
}
}

View File

@ -1,11 +1,9 @@
package org.gcube.dataanalysis.dataminer.poolmanager.util;
import static org.gcube.common.authorization.client.Constants.authorizationService;
import org.gcube.common.authorization.client.exceptions.ObjectNotFound;
import org.gcube.common.authorization.library.AuthorizationEntry;
import org.gcube.common.authorization.library.provider.SecurityTokenProvider;
import org.gcube.common.scope.api.ScopeProvider;
import static org.gcube.common.authorization.client.Constants.authorizationService;
public class CheckPermission {

View File

@ -6,58 +6,37 @@ import java.io.IOException;
import org.gcube.common.authorization.library.provider.SecurityTokenProvider;
import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.dataanalysis.dataminer.poolmanager.clients.HAProxy;
import org.gcube.dataanalysis.dataminer.poolmanager.clients.configuration.DMPMClientConfiguratorManager;
import org.gcube.dataanalysis.dataminer.poolmanager.clients.configuration.Configuration;
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Cluster;
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Host;
public class ClusterBuilder {
public abstract class ClusterBuilder {
private Configuration configuration;
public ClusterBuilder (Configuration configuration)
{
this.configuration = configuration;
}
//1. to complete
public static Cluster getStagingDataminerCluster() throws FileNotFoundException{
public Cluster getDataminerCluster() throws FileNotFoundException{
Cluster cluster = new Cluster();
Host h = new Host();
h.setName(DMPMClientConfiguratorManager.getInstance().getStagingConfiguration().getHost());
h.setName(this.configuration.getHost());
cluster.addHost(h);
// if (env.equals("Dev")){
// h.setName(p.getDevStagingHost());
// cluster.addHost(h);
// }
//
// if ((env.equals("Prod")||(env.equals("Proto")))){
// h.setName(p.getProtoProdStagingHost());
// cluster.addHost(h);
// }
return cluster;
}
public static Cluster getProdDataminerCluster() throws FileNotFoundException{
Cluster cluster = new Cluster();
Host h = new Host();
h.setName(DMPMClientConfiguratorManager.getInstance().getProductionConfiguration().getHost());
cluster.addHost(h);
// if (env.equals("Dev")){
// h.setName(p.getDevStagingHost());
// cluster.addHost(h);
// }
//
// if ((env.equals("Prod")||(env.equals("Proto")))){
// h.setName(p.getProtoProdStagingHost());
// cluster.addHost(h);
// }
return cluster;
}
public static Cluster getVRECluster(String targetVREToken, String targetVRE) throws IOException{
public Cluster getVRECluster(String targetVREToken, String targetVRE) throws IOException{
Cluster cluster = new Cluster();
for (Host h : new HAProxy().listDataMinersByCluster(targetVREToken,targetVRE)) {
cluster.addHost(h);
@ -65,13 +44,13 @@ public class ClusterBuilder {
return cluster;
}
public static Cluster getRProtoCluster() throws IOException{
public Cluster getRProtoCluster() throws IOException{
//Assumes the service is running in RPrototypingLab
String token = SecurityTokenProvider.instance.get();
String targetVRE = ScopeProvider.instance.get();
return getVRECluster(token, targetVRE);
return this.getVRECluster(token, targetVRE);
}

View File

@ -1,53 +1,28 @@
package org.gcube.dataanalysis.dataminer.poolmanager.util;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import org.apache.commons.lang.StringUtils;
//import scala.actors.threadpool.Arrays;
public class NotificationHelper {
public abstract class NotificationHelper {
private Exception executionException;
// private Exception executionException;
public NotificationHelper() {
}
private String getSubjectHeader() {
return "[DataMinerGhostStagingInstallationRequestReport]";
}
// private boolean isError() {
// return this.executionException!=null;
// }
private String getSubjectHeaderRelease() {
return "[DataMinerGhostProductionInstallationRequestReport]";
}
// public void setExecutionException(Exception executionException) {
// this.executionException = executionException;
// }
private boolean isError() {
return this.executionException!=null;
}
public void setExecutionException(Exception executionException) {
this.executionException = executionException;
}
public abstract String getSuccessSubject();
public String getSuccessSubject() {
return this.getSubjectHeader()+" is SUCCESS";
}
public String getSuccessSubjectRelease() {
return this.getSubjectHeaderRelease()+" is SUCCESS";
}
public String getFailedSubjectRelease() {
return this.getSubjectHeaderRelease()+" is FAILED";
}
public abstract String getFailedSubject();
public String getFailedSubject() {
return String.format(this.getSubjectHeader()+" is FAILED");
}
public String getSuccessBody(String info) {
String message = String.format("The installation of the algorithm is completed successfully.");
message+="\n\nYou can retrieve experiment results under the '/DataMiner' e-Infrastructure Workspace folder or from the DataMiner interface.\n\n"+ info;

View File

@ -0,0 +1,38 @@
package org.gcube.dataanalysis.dataminer.poolmanager.util;
import org.gcube.dataanalysis.dataminer.poolmanager.clients.configuration.Configuration;
import org.tmatesoft.svn.core.SVNException;
import org.tmatesoft.svn.core.SVNURL;
import org.tmatesoft.svn.core.auth.ISVNAuthenticationManager;
import org.tmatesoft.svn.core.io.SVNRepository;
import org.tmatesoft.svn.core.io.SVNRepositoryFactory;
import org.tmatesoft.svn.core.wc.SVNWCUtil;
public class SVNRepositoryManager {
private SVNRepository svnRepository;
private static SVNRepositoryManager instance;
private SVNRepositoryManager (Configuration configuration) throws SVNException
{
this.svnRepository = SVNRepositoryFactory.create(SVNURL.parseURIEncoded(configuration.getSVNRepository()));
ISVNAuthenticationManager authManager = SVNWCUtil.createDefaultAuthenticationManager();
this.svnRepository.setAuthenticationManager(authManager);
}
public static SVNRepositoryManager getInstance (Configuration configuration) throws SVNException
{
if (instance == null) instance = new SVNRepositoryManager(configuration);
return instance;
}
public SVNRepository getSvnRepository() {
return svnRepository;
}
}

View File

@ -21,33 +21,28 @@ import java.util.TimeZone;
import org.apache.commons.io.FileUtils;
import org.gcube.dataanalysis.dataminer.poolmanager.clients.configuration.Configuration;
import org.gcube.dataanalysis.dataminer.poolmanager.clients.configuration.DMPMClientConfiguratorManager;
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Algorithm;
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Dependency;
import org.tmatesoft.svn.core.SVNException;
import org.tmatesoft.svn.core.SVNNodeKind;
import org.tmatesoft.svn.core.SVNURL;
import org.tmatesoft.svn.core.auth.ISVNAuthenticationManager;
import org.tmatesoft.svn.core.internal.wc.SVNFileUtil;
import org.tmatesoft.svn.core.internal.wc.admin.SVNChecksumInputStream;
import org.tmatesoft.svn.core.io.ISVNEditor;
import org.tmatesoft.svn.core.io.SVNRepository;
import org.tmatesoft.svn.core.io.SVNRepositoryFactory;
import org.tmatesoft.svn.core.io.diff.SVNDeltaGenerator;
import org.tmatesoft.svn.core.wc.SVNWCUtil;
/**
* Created by ggiammat on 5/9/17.
*/
public class SVNUpdater {
public abstract class SVNUpdater {
private SVNRepository svnRepository;
private Configuration configuration;
public SVNUpdater() throws SVNException {
this.svnRepository = SVNRepositoryFactory.create(SVNURL.parseURIEncoded(DMPMClientConfiguratorManager.getInstance().getProductionConfiguration().getSVNRepository()));
ISVNAuthenticationManager authManager = SVNWCUtil.createDefaultAuthenticationManager();
this.svnRepository.setAuthenticationManager(authManager);
public SVNUpdater(Configuration configuration) throws SVNException {
this.configuration = configuration;
this.svnRepository = SVNRepositoryManager.getInstance(configuration).getSvnRepository();
}
// public void updateRPRotoDeps(Algorithm algorithm) {
@ -58,113 +53,14 @@ public class SVNUpdater {
public String getDependencyFile(String language/*, String env*/){
String a = "";
// if (env.equals("Dev")){
// a= this.getDevDependencyFile(language);
// }
//
// if (env.equals("Prod")){
// a= this.getProdDependencyFile(language);
// }
//
// if (env.equals("Proto")){
// a= this.getRProtoDependencyFile(language);
// }
//
// if (env.equals("Preprod")){
// a= this.getPreProdDependencyFile(language);
// }
a = this.getStagingDependencyFile(language);
return a;
public String getDependencyFile(String language/*, String env*/)
{
return getDependencyFile(this.configuration,language);
}
public String getDependencyFileProd(String language/*, String env*/){
String a = "";
// if (env.equals("Dev")){
// a= this.getDevDependencyFile(language);
// }
//
// if (env.equals("Prod")){
// a= this.getProdDependencyFile(language);
// }
//
// if (env.equals("Proto")){
// a= this.getRProtoDependencyFile(language);
// }
//
// if (env.equals("Preprod")){
// a= this.getPreProdDependencyFile(language);
// }
a = this.getProdDependencyFile(language);
return a;
}
public String getStagingDependencyFile(String language) {
return getDependencyFile(DMPMClientConfiguratorManager.getInstance().getStagingConfiguration(),language);
// switch (language) {
// case "R":
// return DMPMClientConfigurator.getInstance().getSVNStagingCRANDepsList();
// case "R-blackbox":
// return DMPMClientConfigurator.getInstance().getSVNStagingRBDepsList();
// case "Java":
// return DMPMClientConfigurator.getInstance().getSVNStagingJavaDepsList();
// case "Knime-Workflow":
// return DMPMClientConfigurator.getInstance().getSVNStagingKWDepsList();
// case "Linux-compiled":
// return DMPMClientConfigurator.getInstance().getSVNStagingLinuxCompiledDepsList();
// case "Octave":
// return DMPMClientConfigurator.getInstance().getSVNStagingOctaveDepsList();
// case "Python":
// return DMPMClientConfigurator.getInstance().getSVNStagingPythonDepsList();
// case "Pre-Installed":
// return DMPMClientConfigurator.getInstance().getSVNStagingPreInstalledDepsList();
// case "Windows-compiled":
// return DMPMClientConfigurator.getInstance().getSVNStagingWCDepsList();
// default:
// return null;
// }
}
public String getProdDependencyFile(String language) {
return getDependencyFile(DMPMClientConfiguratorManager.getInstance().getProductionConfiguration(),language);
// switch (language) {
// case "R":
// return this.configuration.getSVNProdCRANDepsList();
// case "R-blackbox":
// return this.configuration.getSVNProdRBDepsList();
// case "Java":
// return this.configuration.getSVNProdJavaDepsList();
// case "Knime-Workflow":
// return this.configuration.getSVNProdKWDepsList();
// case "Linux-compiled":
// return this.configuration.getSVNProdLinuxCompiledDepsList();
// case "Octave":
// return this.configuration.getSVNProdOctaveDepsList();
// case "Python":
// return this.configuration.getSVNProdPythonDepsList();
// case "Pre-Installed":
// return this.configuration.getSVNProdPreInstalledDepsList();
// case "Windows-compiled":
// return this.configuration.getSVNProdWCDepsList();
// default:
// return null;
// }
}
private String getDependencyFile (Configuration configuration, String language)
{
@ -193,29 +89,24 @@ public String getDependencyFileProd(String language/*, String env*/){
}
public void updateSVNStagingAlgorithmList(Algorithm algorithm, String targetVRE, String category, String algorithm_type, String user/*, String env*/) {
this.updateSVNAlgorithmList(DMPMClientConfiguratorManager.getInstance().getStagingConfiguration().getSVNAlgorithmsList(), algorithm, targetVRE, category, algorithm_type, user);
}
public void updateSVNProdAlgorithmList(Algorithm algorithm, String targetVRE, String category, String algorithm_type, String user/*, String env*/) {
this.updateSVNAlgorithmList(DMPMClientConfiguratorManager.getInstance().getProductionConfiguration().getSVNAlgorithmsList(), algorithm, targetVRE, category, algorithm_type, user);
public void updateSVNAlgorithmList(Algorithm algorithm, String targetVRE, String category, String algorithm_type, String user/*, String env*/)
{
this.updateSVNAlgorithmList(this.configuration.getSVNAlgorithmsList(), algorithm, targetVRE, category, algorithm_type, user);
}
public void updateAlgorithmFiles(File a) throws SVNException{
//this.updateAlgorithmList(this.configuration.getSVNMainAlgoRepo(), a);
this.updateAlgorithmList(DMPMClientConfiguratorManager.getInstance().getStagingConfiguration().getRepository(), a);
this.updateAlgorithmList(this.configuration.getRepository(), a);
}
public void updateAlgorithmFilesProd(File a) throws SVNException{
//this.updateAlgorithmList(this.configuration.getSVNMainAlgoRepo(), a);
this.updateAlgorithmList(DMPMClientConfiguratorManager.getInstance().getProductionConfiguration().getRepository(), a);
}
public void updateAlgorithmList(String svnMainAlgoRepo, File a) throws SVNException {
private void updateAlgorithmList(String svnMainAlgoRepo, File a) throws SVNException {
try {
System.out.println("Adding .jar file: " + a + " to repository " + svnMainAlgoRepo);
@ -312,7 +203,7 @@ public String getDependencyFileProd(String language/*, String env*/){
}
public void updateSVNAlgorithmList(String file, Algorithm algorithm, String targetVRE, String category, String algorithm_type, String user/*, String env*/) {
private void updateSVNAlgorithmList(String file, Algorithm algorithm, String targetVRE, String category, String algorithm_type, String user/*, String env*/) {
try {
System.out.println("Updating algorithm list: " + file);
final ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
@ -404,10 +295,10 @@ public String getDependencyFileProd(String language/*, String env*/){
System.out.println("Checking dependencies list: " + file);
CheckMethod cm = new CheckMethod();
List<String> validDependencies = new LinkedList<String>();
for (String singlefile: cm.getFiles(file)){
for (String singlefile: CheckMethod.getFiles(file)){
final ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
svnRepository.getFile(singlefile, SVNRepository.INVALID_REVISION, null, byteArrayOutputStream);
@ -477,8 +368,8 @@ public String getDependencyFileProd(String language/*, String env*/){
}
public boolean checkIfAvaialable(String file, Collection<Dependency> deps) throws SVNException {
SendMail sm = new SendMail();
NotificationHelper nh = new NotificationHelper();
//SendMail sm = new SendMail();
//NotificationHelper nh = new NotificationHelper();
boolean check = false;
try {
@ -608,7 +499,7 @@ public String getDependencyFileProd(String language/*, String env*/){
SVNUpdater c = new SVNUpdater();
// SVNUpdater c = new SVNUpdater();
//File a = new File("/home/ngalante/Desktop/testCiro");
//File b = new File ("/home/ngalante/Desktop/testB");

View File

@ -8,6 +8,7 @@ import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import java.io.UnsupportedEncodingException;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLConnection;
@ -23,16 +24,10 @@ import org.gcube.common.authorization.client.exceptions.ObjectNotFound;
import org.gcube.common.authorization.library.AuthorizationEntry;
import org.gcube.common.authorization.library.provider.SecurityTokenProvider;
import org.gcube.common.resources.gcore.GCoreEndpoint;
import org.gcube.common.resources.gcore.GenericResource;
import org.gcube.common.resources.gcore.ServiceEndpoint;
import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Host;
import org.gcube.dataanalysis.dataminer.poolmanager.util.exception.EMailException;
import org.gcube.resources.discovery.client.api.DiscoveryClient;
import org.gcube.resources.discovery.client.queries.api.SimpleQuery;
import org.gcube.smartgears.ContextProvider;
import org.gcube.smartgears.context.application.ApplicationContext;
import org.json.JSONArray;
import org.json.JSONObject;
@ -43,12 +38,14 @@ public class SendMail {
// TODO Auto-generated constructor stub
}
public void sendNotification(String subject, String body) throws Exception {
public void sendNotification(String subject, String body) throws EMailException {
//AnalysisLogger.getLogger().debug("Emailing System->Starting request of email in scope " + ScopeProvider.instance.get());
//String serviceAddress = InfraRetrieval.findEmailingSystemAddress(ScopeProvider.instance.get());
String serviceAddress = this.getSocialService();
@ -62,18 +59,25 @@ public class SendMail {
//AnalysisLogger.getLogger().debug("Emailing System->Request url is going to be " + requestForMessage);
// put the sender, the recipients, subject and body of the mail here
subject = URLEncoder.encode(subject, "UTF-8");
body = URLEncoder.encode(body, "UTF-8");
try
{
subject = URLEncoder.encode(subject, "UTF-8");
body = URLEncoder.encode(body, "UTF-8");
}
catch (UnsupportedEncodingException e)
{
throw new EMailException(e);
}
String requestParameters = "&sender=dataminer&recipients=" + this.getAdmins() + "&subject=" + subject + "&body="
+ body;
String response = this.sendPostRequest(requestForMessage, requestParameters);
//AnalysisLogger.getLogger().debug("Emailing System->Emailing response OK ");
if (response == null) {
Exception e = new Exception("Error in email sending response");
throw e;
}
if (response == null) throw new EMailException();
}
// public void notifySubmitter(String a, String b) throws Exception {
@ -191,7 +195,7 @@ public class SendMail {
public String getAdmins() throws Exception {
public String getAdmins(){
try{
List<String> s = new LinkedList<String>();
JSONObject obj = new JSONObject(this.getAdminRoles());
@ -206,7 +210,7 @@ public class SendMail {
}
return s.toString().replace("[", "").replace("]", "");
}
catch(Exception a){return "ngalante, lucio.lelii, roberto.cirillo, gianpaolo.coro, giancarlo.panichi, scarponi"; }
catch(Exception a){return "ciro.formisano, lucio.lelii, roberto.cirillo, gianpaolo.coro, giancarlo.panichi, scarponi"; }
}

View File

@ -0,0 +1,19 @@
package org.gcube.dataanalysis.dataminer.poolmanager.util.exception;
public class EMailException extends Exception {
/**
*
*/
private static final long serialVersionUID = 1L;
public EMailException() {
super ("Unable to send email notification");
}
public EMailException(Throwable cause) {
super ("Unable to send email notification",cause);
}
}

View File

@ -0,0 +1,272 @@
package org.gcube.dataanalysis.dataminer.poolmanager.util.impl;
import java.io.File;
import org.gcube.dataanalysis.dataminer.poolmanager.clients.configuration.Configuration;
import org.gcube.dataanalysis.dataminer.poolmanager.clients.configuration.DMPMClientConfiguratorManager;
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Algorithm;
import org.gcube.dataanalysis.dataminer.poolmanager.util.CheckMethod;
import org.gcube.dataanalysis.dataminer.poolmanager.util.SVNUpdater;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.jcraft.jsch.Channel;
import com.jcraft.jsch.ChannelSftp;
import com.jcraft.jsch.JSch;
import com.jcraft.jsch.Session;
import com.jcraft.jsch.SftpException;
public class CheckMethodProduction extends CheckMethod{
private Logger logger;
public CheckMethodProduction()
{
this.logger = LoggerFactory.getLogger(CheckMethodProduction.class);
}
@Override
public boolean algoExists(Algorithm algo/*, String env*/) throws Exception{
this.logger.debug("Looking if algo "+algo.getName()+ " exists in production");
Configuration productionConfiguration = DMPMClientConfiguratorManager.getInstance().getProductionConfiguration();
File file = new File(productionConfiguration.getGhostAlgoDirectory()+"/"+algo.getName()+".jar");
File file2 = new File(productionConfiguration.getGhostAlgoDirectory()+"/"+algo.getName()+"_interface.jar");
System.out.println("First file is located to: "+file.getPath());
System.out.println("Second file is located to: "+file2.getPath());
if ((this.doesExist(file.getPath()/*,env*/)) && (this.doesExist(file2.getPath()/*,env*/))){
this.copyFromDmToSVN(file/*,env*/);
this.copyFromDmToSVN(file2/*,env*/);
return true;
}
else
System.out.println("Algorithm "+algo.getName()+".jar"+ " and "+algo.getName()+"_interface.jar files are not present at the expected path");
return false;
}
@Override
public void deleteFiles(Algorithm a/*,String env*/) throws Exception
{
JSch jsch = new JSch();
Session session = null;
Channel channel = null;
ChannelSftp c = null;
Configuration productionConfiguration = DMPMClientConfiguratorManager.getInstance().getProductionConfiguration();
System.out.println("checking existing in env: " + productionConfiguration.getHost());
File file = new File(productionConfiguration.getGhostAlgoDirectory()+"/"+a.getName()+".jar");
File file2 = new File(productionConfiguration.getGhostAlgoDirectory()+"/"+a.getName()+"_interface.jar");
System.out.println("First file is located to: "+file.getPath());
System.out.println("Second file is located to: "+file2.getPath());
jsch.setKnownHosts("~/.ssh/known_hosts");
String privateKey = "~/.ssh/id_rsa";
jsch.addIdentity(privateKey);
System.out.println("Private Key Added.");
session = jsch.getSession("root", productionConfiguration.getHost());
System.out.println("session created.");
java.util.Properties config = new java.util.Properties();
config.put("StrictHostKeyChecking", "no");
session.setConfig(config);
session.connect();
channel = session.openChannel("sftp");
channel.connect();
System.out.println("shell channel connected....");
c = (ChannelSftp) channel;
if(doesExist(file.getPath()/*,env*/)&&(doesExist(file2.getPath()/*,env*/))){
c.rm(file.getPath());
c.rm(file2.getPath());
System.out.println("Both the files have been deleted");
}
else System.out.println("Files not found");
channel.disconnect();
c.disconnect();
session.disconnect();
}
@Override
public boolean doesExist(String path/*, String env*/) throws Exception {
JSch jsch = new JSch();
Session session = null;
Channel channel = null;
ChannelSftp c = null;
boolean success = false;
Configuration productionConfiguration = DMPMClientConfiguratorManager.getInstance().getProductionConfiguration();
jsch.setKnownHosts("~/.ssh/known_hosts");
String privateKey = "~/.ssh/id_rsa";
jsch.addIdentity(privateKey);
System.out.println("Private Key Added.");
session = jsch.getSession("root", productionConfiguration.getHost());
System.out.println("session created.");
java.util.Properties config = new java.util.Properties();
config.put("StrictHostKeyChecking", "no");
session.setConfig(config);
session.connect();
channel = session.openChannel("sftp");
channel.connect();
System.out.println("shell channel connected....");
c = (ChannelSftp) channel;
System.out.println(path);
try {
c.lstat(path);
success = true;
} catch (SftpException e) {
if (e.id == ChannelSftp.SSH_FX_NO_SUCH_FILE) {
// file doesn't exist
success = false;
}
//success = true; // something else went wrong
}
channel.disconnect();
c.disconnect();
session.disconnect();
return success;
}
@Override
public void copyFromDmToSVN(File a/*,String env*/) throws Exception {
JSch jsch = new JSch();
Session session = null;
SVNUpdater svnUpdater = new SVNUpdaterProduction();
Configuration productionConfiguration = DMPMClientConfiguratorManager.getInstance().getProductionConfiguration();
jsch.setKnownHosts("~/.ssh/known_hosts");
String privateKey = "~/.ssh/id_rsa";
jsch.addIdentity(privateKey);
System.out.println("Private Key Added.");
session = jsch.getSession("root", productionConfiguration.getHost());
System.out.println("session created.");
java.util.Properties config = new java.util.Properties();
config.put("StrictHostKeyChecking", "no");
session.setConfig(config);
session.connect();
Channel channel = session.openChannel("sftp");
channel.connect();
ChannelSftp sftp = (ChannelSftp) channel;
sftp.cd(productionConfiguration.getGhostAlgoDirectory());
System.out.println("REMOTE : "+productionConfiguration.getGhostAlgoDirectory()+"/"+a.getName());
System.out.println("LOCAL : /tmp/"+a.getName());
sftp.get(productionConfiguration.getGhostAlgoDirectory()+"/"+a.getName(),"/tmp/"+a.getName());
channel.disconnect();
session.disconnect();
File f = new File("/tmp/"+a.getName());
svnUpdater.updateAlgorithmFiles(f);
f.delete();
}
public static void main(String[] args) throws Exception {
// ServiceConfiguration a = new ServiceConfiguration();
// System.out.println(a.getStagingHost());
CheckMethodProduction a = new CheckMethodProduction();
//a.getFiles("/trunk/data-analysis/RConfiguration/RPackagesManagement/r_deb_pkgs.txt, /trunk/data-analysis/RConfiguration/RPackagesManagement/r_cran_pkgs.txt, /trunk/data-analysis/RConfiguration/RPackagesManagement/r_github_pkgs.txt");
// File aa = new File("OCTAVEBLACKBOX.jar");
// System.out.println(aa.getName());
// System.out.println(aa.getPath());
//a.copyFromDmToSVN(aa);
// if (a.checkMethod("dataminer-ghost-d.dev.d4science.org", "708e7eb8-11a7-4e9a-816b-c9ed7e7e99fe-98187548")){
// System.out.println("AAA"); }
//
// if (a.doesExist("/home/gcube/wps_algorithms/algorithms/WINDOWS_BLACK_BOX_EXAMPLE.jar")){
// System.out.println("BBBB");
//
// }
// if (a.doesExist("/home/gcube/wps_algorithms/algorithms/WINDOWS_BLACK_BOX_EXAMPLE_interface.jar")){
// System.out.println("CCCC");}
//
// File aa = new File("/home/gcube/wps_algorithms/algorithms/RBLACKBOX_interface.jar");
// a.copyFromDmToSVN(aa, "Dev");
//
//System.out.println(a.checkMethod("dataminer-ghost-t.pre.d4science.org",
// "2eceaf27-0e22-4dbe-8075-e09eff199bf9-98187548"));
//System.out.println(a.checkMethod("dataminer-proto-ghost.d4science.org",
// "3a23bfa4-4dfe-44fc-988f-194b91071dd2-843339462"));
System.out.println(a.checkMethod("dataminer-ghost-d.dev.d4science.org",
"708e7eb8-11a7-4e9a-816b-c9ed7e7e99fe-98187548"));
//Algorithm aa = new Algorithm();
//aa.setName("UDPIPE_WRAPPER");
//System.out.println(a.algoExists(aa));
////
//ServiceConfiguration bp = new ServiceConfiguration();
////
//SecurityTokenProvider.instance.set("708e7eb8-11a7-4e9a-816b-c9ed7e7e99fe-98187548");
////
//if (a.checkMethod(bp.getStagingHost(), SecurityTokenProvider.instance.get())&&a.algoExists(aa)); {
//System.out.println("ciao");
//
//}
//
//Algorithm al = new Algorithm();
// al.setName("UDPIPE_WRAPPER");
// a.deleteFiles(al);
}
}

View File

@ -0,0 +1,288 @@
package org.gcube.dataanalysis.dataminer.poolmanager.util.impl;
import java.io.File;
import org.gcube.dataanalysis.dataminer.poolmanager.clients.configuration.Configuration;
import org.gcube.dataanalysis.dataminer.poolmanager.clients.configuration.DMPMClientConfiguratorManager;
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Algorithm;
import org.gcube.dataanalysis.dataminer.poolmanager.util.CheckMethod;
import org.gcube.dataanalysis.dataminer.poolmanager.util.SVNUpdater;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.jcraft.jsch.Channel;
import com.jcraft.jsch.ChannelSftp;
import com.jcraft.jsch.JSch;
import com.jcraft.jsch.Session;
import com.jcraft.jsch.SftpException;
public class CheckMethodStaging extends CheckMethod{
private Logger logger;
public CheckMethodStaging()
{
this.logger = LoggerFactory.getLogger(CheckMethodStaging.class);
}
@Override
public boolean algoExists(Algorithm algo/*, String env*/) throws Exception{
this.logger.debug("Looking if algo "+algo.getName()+ " exists");
Configuration stagingConfiguration = DMPMClientConfiguratorManager.getInstance().getStagingConfiguration();
File file = new File(stagingConfiguration.getGhostAlgoDirectory()+"/"+algo.getName()+".jar");
File file2 = new File(stagingConfiguration.getGhostAlgoDirectory()+"/"+algo.getName()+"_interface.jar");
this.logger.debug("Looking for files "+file.getPath()+ " "+file.getPath());
System.out.println("First file is located to: "+file.getPath());
System.out.println("Second file is located to: "+file2.getPath());
if ((this.doesExist(file.getPath()/*,env*/)) && (this.doesExist(file2.getPath()/*,env*/)))
{
this.logger.debug("Files found");
this.copyFromDmToSVN(file/*,env*/);
this.copyFromDmToSVN(file2/*,env*/);
System.out.println("Files have been copied to SVN");
return true;
}
else
{
this.logger.debug("Files not found");
System.out.println("Algorithm "+algo.getName()+".jar"+ " and "+algo.getName()+"_interface.jar files are not present at the expected path");
return false;
}
}
@Override
public void deleteFiles(Algorithm a/*,String env*/) throws Exception{
JSch jsch = new JSch();
Session session = null;
Channel channel = null;
ChannelSftp c = null;
Configuration stagingConfiguration = DMPMClientConfiguratorManager.getInstance().getStagingConfiguration();
System.out.println("checking existing in env: " + stagingConfiguration.getHost());
File file = new File(stagingConfiguration.getGhostAlgoDirectory()+"/"+a.getName()+".jar");
File file2 = new File(stagingConfiguration.getGhostAlgoDirectory()+"/"+a.getName()+"_interface.jar");
System.out.println("First file is located to: "+file.getPath());
System.out.println("Second file is located to: "+file2.getPath());
jsch.setKnownHosts("~/.ssh/known_hosts");
String privateKey = "~/.ssh/id_rsa";
jsch.addIdentity(privateKey);
System.out.println("Private Key Added.");
session = jsch.getSession("root", stagingConfiguration.getHost());
System.out.println("session created.");
java.util.Properties config = new java.util.Properties();
config.put("StrictHostKeyChecking", "no");
session.setConfig(config);
session.connect();
channel = session.openChannel("sftp");
channel.connect();
System.out.println("shell channel connected....");
c = (ChannelSftp) channel;
if(doesExist(file.getPath()/*,env*/)&&(doesExist(file2.getPath()/*,env*/))){
c.rm(file.getPath());
c.rm(file2.getPath());
System.out.println("Both the files have been deleted");
}
else System.out.println("Files not found");
channel.disconnect();
c.disconnect();
session.disconnect();
}
@Override
public boolean doesExist(String path/*, String env*/) throws Exception {
this.logger.debug("Looking if file "+path + " exists");
JSch jsch = new JSch();
Session session = null;
Channel channel = null;
ChannelSftp c = null;
boolean success = false;
Configuration stagingConfiguration = DMPMClientConfiguratorManager.getInstance().getStagingConfiguration();
jsch.setKnownHosts("~/.ssh/known_hosts");
String privateKey = "~/.ssh/id_rsa";
jsch.addIdentity(privateKey);
System.out.println("Private Key Added.");
this.logger.debug("Staging configuration host "+stagingConfiguration.getHost());
session = jsch.getSession("root",stagingConfiguration.getHost() );
System.out.println("session created.");
java.util.Properties config = new java.util.Properties();
config.put("StrictHostKeyChecking", "no");
session.setConfig(config);
session.connect();
channel = session.openChannel("sftp");
channel.connect();
System.out.println("shell channel connected....");
c = (ChannelSftp) channel;
//SftpATTRS is = null;
System.out.println(path);
try {
c.lstat(path);
this.logger.debug("File found");
success = true;
} catch (SftpException e)
{
this.logger.error("File not found",e);
if (e.id == ChannelSftp.SSH_FX_NO_SUCH_FILE)
{
// file doesn't exist
success = false;
}
//success = true; // something else went wrong
}
channel.disconnect();
c.disconnect();
session.disconnect();
this.logger.debug("Operation result "+success);
return success;
}
@Override
public void copyFromDmToSVN(File a/*,String env*/) throws Exception {
JSch jsch = new JSch();
Session session = null;
SVNUpdater svnUpdater = new SVNUpdaterStaging();
Configuration stagingConfiguration = DMPMClientConfiguratorManager.getInstance().getStagingConfiguration();
jsch.setKnownHosts("~/.ssh/known_hosts");
String privateKey = "~/.ssh/id_rsa";
jsch.addIdentity(privateKey);
System.out.println("Private Key Added.");
session = jsch.getSession("root", stagingConfiguration.getHost());
System.out.println("session created.");
java.util.Properties config = new java.util.Properties();
config.put("StrictHostKeyChecking", "no");
session.setConfig(config);
session.connect();
Channel channel = session.openChannel("sftp");
channel.connect();
ChannelSftp sftp = (ChannelSftp) channel;
sftp.cd(stagingConfiguration.getGhostAlgoDirectory());
System.out.println("REMOTE : "+stagingConfiguration.getGhostAlgoDirectory()+"/"+a.getName());
System.out.println("LOCAL : /tmp/"+a.getName());
sftp.get(stagingConfiguration.getGhostAlgoDirectory()+"/"+a.getName(),"/tmp/"+a.getName());
channel.disconnect();
session.disconnect();
File f = new File("/tmp/"+a.getName());
svnUpdater.updateAlgorithmFiles(f);
f.delete();
}
public static void main(String[] args) throws Exception {
// ServiceConfiguration a = new ServiceConfiguration();
// System.out.println(a.getStagingHost());
CheckMethodStaging a = new CheckMethodStaging();
//a.getFiles("/trunk/data-analysis/RConfiguration/RPackagesManagement/r_deb_pkgs.txt, /trunk/data-analysis/RConfiguration/RPackagesManagement/r_cran_pkgs.txt, /trunk/data-analysis/RConfiguration/RPackagesManagement/r_github_pkgs.txt");
// File aa = new File("OCTAVEBLACKBOX.jar");
// System.out.println(aa.getName());
// System.out.println(aa.getPath());
//a.copyFromDmToSVN(aa);
// if (a.checkMethod("dataminer-ghost-d.dev.d4science.org", "708e7eb8-11a7-4e9a-816b-c9ed7e7e99fe-98187548")){
// System.out.println("AAA"); }
//
// if (a.doesExist("/home/gcube/wps_algorithms/algorithms/WINDOWS_BLACK_BOX_EXAMPLE.jar")){
// System.out.println("BBBB");
//
// }
// if (a.doesExist("/home/gcube/wps_algorithms/algorithms/WINDOWS_BLACK_BOX_EXAMPLE_interface.jar")){
// System.out.println("CCCC");}
//
// File aa = new File("/home/gcube/wps_algorithms/algorithms/RBLACKBOX_interface.jar");
// a.copyFromDmToSVN(aa, "Dev");
//
//System.out.println(a.checkMethod("dataminer-ghost-t.pre.d4science.org",
// "2eceaf27-0e22-4dbe-8075-e09eff199bf9-98187548"));
//System.out.println(a.checkMethod("dataminer-proto-ghost.d4science.org",
// "3a23bfa4-4dfe-44fc-988f-194b91071dd2-843339462"));
System.out.println(a.checkMethod("dataminer-ghost-d.dev.d4science.org",
"708e7eb8-11a7-4e9a-816b-c9ed7e7e99fe-98187548"));
//Algorithm aa = new Algorithm();
//aa.setName("UDPIPE_WRAPPER");
//System.out.println(a.algoExists(aa));
////
//ServiceConfiguration bp = new ServiceConfiguration();
////
//SecurityTokenProvider.instance.set("708e7eb8-11a7-4e9a-816b-c9ed7e7e99fe-98187548");
////
//if (a.checkMethod(bp.getStagingHost(), SecurityTokenProvider.instance.get())&&a.algoExists(aa)); {
//System.out.println("ciao");
//
//}
//
//Algorithm al = new Algorithm();
// al.setName("UDPIPE_WRAPPER");
// a.deleteFiles(al);
}
}

View File

@ -0,0 +1,14 @@
package org.gcube.dataanalysis.dataminer.poolmanager.util.impl;
import org.gcube.dataanalysis.dataminer.poolmanager.clients.configuration.DMPMClientConfiguratorManager;
import org.gcube.dataanalysis.dataminer.poolmanager.util.ClusterBuilder;
public class ClusterBuilderProduction extends ClusterBuilder{
public ClusterBuilderProduction() {
super (DMPMClientConfiguratorManager.getInstance().getProductionConfiguration());
}
}

View File

@ -0,0 +1,13 @@
package org.gcube.dataanalysis.dataminer.poolmanager.util.impl;
import org.gcube.dataanalysis.dataminer.poolmanager.clients.configuration.DMPMClientConfiguratorManager;
import org.gcube.dataanalysis.dataminer.poolmanager.util.ClusterBuilder;
public class ClusterBuilderStaging extends ClusterBuilder{
public ClusterBuilderStaging() {
super (DMPMClientConfiguratorManager.getInstance().getStagingConfiguration());
}
}

View File

@ -0,0 +1,30 @@
package org.gcube.dataanalysis.dataminer.poolmanager.util.impl;
import org.gcube.dataanalysis.dataminer.poolmanager.util.NotificationHelper;
//import scala.actors.threadpool.Arrays;
public class NotificationHelperProduction extends NotificationHelper{
// private Exception executionException;
private String getSubjectHeader() {
return "[DataMinerGhostProductionInstallationRequestReport]";
}
@Override
public String getSuccessSubject() {
return this.getSubjectHeader()+" is SUCCESS";
}
@Override
public String getFailedSubject() {
return String.format(this.getSubjectHeader()+" is FAILED");
}
}

View File

@ -0,0 +1,32 @@
package org.gcube.dataanalysis.dataminer.poolmanager.util.impl;
import org.gcube.dataanalysis.dataminer.poolmanager.util.NotificationHelper;
//import scala.actors.threadpool.Arrays;
public class NotificationHelperStaging extends NotificationHelper {
// private Exception executionException;
private String getSubjectHeader() {
return "[DataMinerGhostStagingInstallationRequestReport]";
}
@Override
public String getSuccessSubject() {
return this.getSubjectHeader()+" is SUCCESS";
}
@Override
public String getFailedSubject() {
return String.format(this.getSubjectHeader()+" is FAILED");
}
}

View File

@ -0,0 +1,21 @@
package org.gcube.dataanalysis.dataminer.poolmanager.util.impl;
import org.gcube.dataanalysis.dataminer.poolmanager.clients.configuration.DMPMClientConfiguratorManager;
import org.gcube.dataanalysis.dataminer.poolmanager.util.SVNUpdater;
import org.tmatesoft.svn.core.SVNException;
/**
* Created by ggiammat on 5/9/17.
*/
public class SVNUpdaterProduction extends SVNUpdater{
public SVNUpdaterProduction() throws SVNException {
super (DMPMClientConfiguratorManager.getInstance().getProductionConfiguration());
}
}

View File

@ -0,0 +1,21 @@
package org.gcube.dataanalysis.dataminer.poolmanager.util.impl;
import org.gcube.dataanalysis.dataminer.poolmanager.clients.configuration.DMPMClientConfiguratorManager;
import org.gcube.dataanalysis.dataminer.poolmanager.util.SVNUpdater;
import org.tmatesoft.svn.core.SVNException;
/**
* Created by ggiammat on 5/9/17.
*/
public class SVNUpdaterStaging extends SVNUpdater{
public SVNUpdaterStaging() throws SVNException {
super (DMPMClientConfiguratorManager.getInstance().getStagingConfiguration());
}
}

View File

@ -6,6 +6,7 @@ import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Algorithm;
import org.gcube.dataanalysis.dataminer.poolmanager.util.AlgorithmBuilder;
import org.gcube.dataanalysis.dataminer.poolmanager.util.SVNUpdater;
import org.gcube.dataanalysis.dataminer.poolmanager.util.impl.SVNUpdaterProduction;
import org.tmatesoft.svn.core.SVNException;
public class JobTest {
@ -14,7 +15,7 @@ public class JobTest {
ScopeProvider.instance.set("/d4science.research-infrastructures.eu/gCubeApps/RPrototypingLab");
SVNUpdater svnUpdater = new SVNUpdater();
SVNUpdater svnUpdater = new SVNUpdaterProduction();
Algorithm algo = AlgorithmBuilder.create("http://data.d4science.org/dENQTTMxdjNZcGRpK0NHd2pvU0owMFFzN0VWemw3Zy9HbWJQNStIS0N6Yz0");
//test phase

View File

@ -5,6 +5,7 @@ import java.io.IOException;
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Algorithm;
import org.gcube.dataanalysis.dataminer.poolmanager.util.AlgorithmBuilder;
import org.gcube.dataanalysis.dataminer.poolmanager.util.SVNUpdater;
import org.gcube.dataanalysis.dataminer.poolmanager.util.impl.SVNUpdaterStaging;
import org.tmatesoft.svn.core.SVNException;
/**
@ -17,7 +18,7 @@ public class SVNTests {
public static void main(String[] args) throws SVNException, IOException, InterruptedException {
SVNUpdater svnUpdater = new SVNUpdater();
SVNUpdater svnUpdater = new SVNUpdaterStaging();
Algorithm algo = AlgorithmBuilder.create("http://data.d4science.org/YjJ3TmJab1dqYzVoTmppdjlsK0l0b1ZXWGtzWlQ1NHNHbWJQNStIS0N6Yz0");
//algo.setClazz(algo.getClazz() + "TEST");
@ -27,7 +28,7 @@ public class SVNTests {
//System.out.println(algo.getCategory());
//System.out.println(algo.getSkipJava());
svnUpdater.updateSVNAlgorithmList("/trunk/data-analysis/DataMinerConfiguration/algorithms/dev/algorithms", algo, "/gcube/devNext/NextNext", algo.getCategory(), algo.getAlgorithmType(), "");
//svnUpdater.updateSVNAlgorithmList("/trunk/data-analysis/DataMinerConfiguration/algorithms/dev/algorithms", algo, "/gcube/devNext/NextNext", algo.getCategory(), algo.getAlgorithmType(), "");
//svnUpdater.updateSVNRProtoAlgorithmList(algo, "/d4science.research-infrastructures.eu/gCubeApps/RPrototypingLab", "Dataminer Pool Manager", "Proto");
//svnUpdater.readRPRotoDeps(algo);