This commit is contained in:
Ciro Formisano 2017-11-29 15:29:01 +00:00
parent f61562bcd9
commit 45edc13078
25 changed files with 1087 additions and 1002 deletions

View File

@ -12,7 +12,7 @@
<groupId>org.gcube.dataAnalysis</groupId>
<artifactId>dataminer-pool-manager</artifactId>
<packaging>war</packaging>
<version>2.2.0-SNAPSHOT</version>
<version>2.3.0-SNAPSHOT</version>
<name>dataminer-pool-manager</name>
<description>

View File

@ -1,41 +1,16 @@
package org.gcube.dataanalysis.dataminer.poolmanager.ansible;
***REMOVED***
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
***REMOVED***
import java.io.InputStream;
***REMOVED***
import java.io.PrintStream;
import java.io.PrintWriter;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
***REMOVED***
***REMOVED***
import java.util.Scanner;
import java.util.Set;
import org.gcube.dataanalysis.dataminer.poolmanager.ansible.model.Inventory;
import org.gcube.dataanalysis.dataminer.poolmanager.ansible.model.Playbook;
import org.gcube.dataanalysis.dataminer.poolmanager.ansible.model.Role;
import org.gcube.dataanalysis.dataminer.poolmanager.ansiblebridge.AnsibleSerializeHelper;
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Algorithm;
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.AlgorithmSet;
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Dependency;
import org.gcube.dataanalysis.dataminer.poolmanager.service.DataminerPoolManager;
import org.tmatesoft.svn.core.SVNException;
import org.tmatesoft.svn.core.SVNURL;
import org.tmatesoft.svn.core.auth.ISVNAuthenticationManager;
import org.tmatesoft.svn.core.internal.wc.SVNFileUtil;
import org.tmatesoft.svn.core.internal.wc.admin.SVNChecksumInputStream;
import org.tmatesoft.svn.core.io.ISVNEditor;
import org.tmatesoft.svn.core.io.SVNRepository;
import org.tmatesoft.svn.core.io.SVNRepositoryFactory;
import org.tmatesoft.svn.core.io.diff.SVNDeltaGenerator;
import org.tmatesoft.svn.core.wc.SVNWCUtil;
/**
* This class is responsible for the interface with ansible, retrieving log,
@ -72,12 +47,12 @@ public class AnsibleWorker {
this.ensureWorkStructure();
***REMOVED***
public File getWorkdir() {
return this.workerRoot;
***REMOVED***
***REMOVED*** public File getWorkdir() {
***REMOVED*** return this.workerRoot;
***REMOVED*** ***REMOVED***
public File getRolesDir() {
return new File(this.getWorkdir(), ROLES_DIR);
return new File(this.workerRoot, ROLES_DIR);
***REMOVED***
public String getWorkerId() {
@ -86,20 +61,20 @@ public class AnsibleWorker {
public void ensureWorkStructure() {
***REMOVED*** generate root
this.getWorkdir().mkdirs();
this.workerRoot.mkdirs();
***REMOVED***
public void removeWorkStructure() {
***REMOVED*** remove the working dir
this.getWorkdir().delete();
this.workerRoot.delete();
***REMOVED***
public File getPlaybookFile() {
return new File(this.getWorkdir(), PLAYBOOK_NAME);
return new File(this.workerRoot, PLAYBOOK_NAME);
***REMOVED***
public File getInventoryFile() {
return new File(this.getWorkdir(), INVENTORY_NAME);
return new File(this.workerRoot, INVENTORY_NAME);
***REMOVED***
@ -122,7 +97,7 @@ public class AnsibleWorker {
public int execute(PrintStream ps)
throws IOException, InterruptedException, SVNException {
System.out.println(this.getWorkdir());
System.out.println(this.workerRoot);
try {
Process p = Runtime.getRuntime().exec("ansible-playbook -v -i " + this.getInventoryFile().getAbsolutePath()
+ " " + this.getPlaybookFile().getAbsolutePath());
@ -149,6 +124,7 @@ public class AnsibleWorker {
while (sc.hasNextLine()) {
dest.println(sc.nextLine());
***REMOVED***
sc.close();
***REMOVED***
***REMOVED***).start();
***REMOVED***

View File

@ -1,10 +1,7 @@
package org.gcube.dataanalysis.dataminer.poolmanager.ansiblebridge;
import java.io.File;
import java.io.FileOutputStream;
***REMOVED***
import java.io.InputStream;
import java.io.PrintStream;
import java.util.Collection;
***REMOVED***
import java.util.Map;
@ -32,21 +29,19 @@ import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.AlgorithmSet;
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Dependency;
***REMOVED***
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.comparator.HostComparator;
***REMOVED***
import org.tmatesoft.svn.core.SVNException;
public class AnsibleBridge {
private static final org.slf4j.Logger LOGGER = LoggerFactory.getLogger(AnsibleBridge.class);
***REMOVED***private static final org.slf4j.Logger LOGGER = LoggerFactory.getLogger(AnsibleBridge.class);
private String dpmRoot;
public AnsibleBridge() {
this(System.getProperty("user.home")+File.separator+"dataminer-pool-manager");
***REMOVED***this(System.getProperty("/home/gcube/dataminer-pool-manager"));
***REMOVED***
***REMOVED*** public AnsibleBridge() {
***REMOVED*** this(System.getProperty("user.home")+File.separator+"dataminer-pool-manager");
***REMOVED*** ***REMOVED***this(System.getProperty("/home/gcube/dataminer-pool-manager"));
***REMOVED***
***REMOVED*** ***REMOVED***
public AnsibleBridge(String root) {
this.dpmRoot = root;

View File

@ -5,34 +5,71 @@ import java.io.FileOutputStream;
***REMOVED***
import java.io.PrintStream;
import java.io.PrintWriter;
***REMOVED***
***REMOVED***
import java.util.Collection;
import java.util.UUID;
***REMOVED***
***REMOVED***
import org.gcube.dataanalysis.dataminer.poolmanager.ansible.AnsibleWorker;
import org.gcube.dataanalysis.dataminer.poolmanager.ansiblebridge.AnsibleBridge;
import org.gcube.dataanalysis.dataminer.poolmanager.clients.configuration.Configuration;
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Algorithm;
***REMOVED***
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Dependency;
import org.gcube.dataanalysis.dataminer.poolmanager.util.CheckMethod;
import org.gcube.dataanalysis.dataminer.poolmanager.util.NotificationHelper;
import org.gcube.dataanalysis.dataminer.poolmanager.util.SVNUpdater;
import org.gcube.dataanalysis.dataminer.poolmanager.util.SendMail;
import org.gcube.dataanalysis.dataminer.poolmanager.util.exception.EMailException;
***REMOVED***
***REMOVED***
import org.tmatesoft.svn.core.SVNException;
public abstract class DMPMJob {
private Configuration configuration;
private String dmpmHomeDirectory;
private SVNUpdater svnUpdater;
private File jobLogs;
private String id;
private Algorithm algorithm;
private Cluster cluster;
private String vREName;
private String category;
private String algorithm_type;
***REMOVED***
protected SVNUpdater svnUpdater;
protected File jobLogs;
private enum STATUS
{
PROGRESS ("IN PROGRESS"),
COMPLETED ("COMPLETED"),
FAILED ("FAILED");
private String status;
STATUS (String status)
{
this.status = status;
***REMOVED***
***REMOVED***
protected String id;
public DMPMJob(SVNUpdater svnUpdater,Configuration configuration,Algorithm algorithm, Cluster cluster,String vREName,
String category, String algorithm_type){
this.logger = LoggerFactory.getLogger(DMPMJob.class);
public DMPMJob(SVNUpdater svnUpdater){
this.configuration = configuration;
this.algorithm = algorithm;
this.cluster = cluster;
this.vREName = vREName;
this.category = category;
this.algorithm_type = algorithm_type;
this.svnUpdater = svnUpdater;
this.dmpmHomeDirectory = new String (System.getProperty("user.home")+File.separator+"dataminer-pool-manager");
this.id = UUID.randomUUID().toString();
***REMOVED***TODO: dmpm work directory should be loaded from configuration file
this.jobLogs = new File(System.getProperty("user.home")+File.separator+"dataminer-pool-manager"+File.separator+"jobs");
this.jobLogs = new File(this.dmpmHomeDirectory+File.separator+"jobs");
this.jobLogs.mkdirs();
***REMOVED***
@ -56,7 +93,7 @@ public abstract class DMPMJob {
Cluster dataminerCluster,
boolean includeAlgorithmDependencies,
String user){
AnsibleBridge ansibleBridge = new AnsibleBridge();
AnsibleBridge ansibleBridge = new AnsibleBridge(this.dmpmHomeDirectory);
try {
return ansibleBridge.createWorker(algo, dataminerCluster, includeAlgorithmDependencies, user);
***REMOVED*** catch (IOException e) {
@ -66,14 +103,148 @@ public abstract class DMPMJob {
***REMOVED***
protected abstract void execute();
protected int executeAnsibleWorker(AnsibleWorker worker) throws IOException, InterruptedException, SVNException{
File path = new File(worker.getWorkdir() + File.separator + "jobs");
path.mkdirs();
public void setStatusInformation(STATUS exitStatus) {
File n = new File(this.jobLogs + File.separator + this.id);
FileOutputStream fos = new FileOutputStream(n, true);
try
{
File statusFile = new File (this.jobLogs,this.id + "_exitStatus");
***REMOVED***File m = new File ( this.jobLogs + File.separator + this.id + "_exitStatus");
PrintWriter writer = new PrintWriter(statusFile, "UTF-8");
writer.println(exitStatus.status);
writer.close();
***REMOVED*** catch (Exception e)
{
this.logger.error ("Unable to update exit status file with status "+exitStatus.status,e);
***REMOVED***
***REMOVED***
private void updateLogFile (File logFile, String message)
{
try
{
PrintWriter writer = new PrintWriter(logFile,"UTF-8");
writer.print(message);
writer.close();
***REMOVED*** catch (Exception e)
{
this.logger.error("Unable to log the error message: "+message,e);
***REMOVED***
***REMOVED***
protected abstract void execute ();
private boolean preInstallation (SendMail sm,NotificationHelper nh, File logFile ) throws SVNException, EMailException
{
this.logger.debug("Checking dependencies...");
Collection<String> undefinedDependencies = this.svnUpdater.getUndefinedDependencies(
this.svnUpdater.getDependencyFile(this.algorithm.getLanguage()),
this.algorithm.getDependencies());
if (!undefinedDependencies.isEmpty())
{
this.logger.debug("Some dependencies are not defined");
String message = "Following dependencies are not defined:\n";
for (String n : undefinedDependencies) {
message += "\n" + n +"\n";
***REMOVED***
this.setStatusInformation(STATUS.FAILED);
String errorMessage = nh.getFailedBody(message+"\n\n"+this.buildInfo());
this.updateLogFile(logFile, errorMessage);
sm.sendNotification(nh.getFailedSubject() +" for "+this.algorithm.getName()+ " algorithm", errorMessage);
return false;
***REMOVED***
else return true;
***REMOVED***
private void installation (SendMail sm,NotificationHelper nh,CheckMethod methodChecker,File logFile ) throws Exception
{
methodChecker.deleteFiles(this.algorithm/*, env*/);
int ret = this.executeAnsibleWorker(createWorker(this.algorithm, this.cluster, false, "root"),logFile);
System.out.println("Return code= "+ret);
if (ret != 0)
{
this.logger.debug("Ansible work failed, return code "+ret);
this.setStatusInformation(STATUS.FAILED);
String errorMessage = nh.getFailedBody("Installation failed. Return code=" + ret)+"\n\n"+this.buildInfo();
sm.sendNotification(nh.getFailedSubject() + " for "+this.algorithm.getName()+ " algorithm",errorMessage);
***REMOVED***
else if (ret == 0)
{
this.logger.debug("Operation completed");
this.setStatusInformation(STATUS.PROGRESS);
this.logger.debug("Checking the method...");
if (methodChecker.checkMethod(this.configuration.getHost(), SecurityTokenProvider.instance.get())&&(methodChecker.algoExists(this.algorithm)))
{
this.logger.debug("Method OK and algo exists");
System.out.println("Interface check ok!");
System.out.println("Both the files exist at the correct path!");
this.svnUpdater.updateSVNAlgorithmList(this.algorithm, this.vREName,this.category, this.algorithm_type,
this.algorithm.getFullname());
this.setStatusInformation(STATUS.COMPLETED);
sm.sendNotification(nh.getSuccessSubject() + " for "+this.algorithm.getName()+ " algorithm", nh.getSuccessBody("\n\n"+this.buildInfo()));
return;
***REMOVED*** else
{
this.logger.debug("Operation failed");
this.setStatusInformation(STATUS.FAILED);
sm.sendNotification(nh.getFailedSubject() + " for "+this.algorithm.getName()+ " algorithm",
nh.getFailedBody(
"\n"+
"Installation completed but DataMiner Interface not working correctly or files "
+ this.algorithm.getName() + ".jar and " + this.algorithm.getName()
+ "_interface.jar not availables at the expected path")+"\n\n"+this.buildInfo());
***REMOVED***
***REMOVED***
***REMOVED***
protected void execute(NotificationHelper nh, CheckMethod methodChecker)
{
SendMail sm = new SendMail();
try {
this.logger.debug("Pre installation operations");
File logFile = new File(this.jobLogs,this.id);
***REMOVED***File logFile = new File(this.jobLogs + File.separator + this.id);
boolean preInstallationResponse = preInstallation(sm, nh, logFile);
this.logger.debug("Pre installation operation completed with result "+preInstallationResponse);
if (preInstallationResponse)
{
this.logger.debug("Installation...");
installation(sm, nh, methodChecker, logFile);
this.logger.debug("Installation completed");
***REMOVED***
***REMOVED*** catch (EMailException eme)
{
this.logger.error("Operation failed and unable to send notification email",eme);
***REMOVED***
catch (Exception e) {
e.printStackTrace();
***REMOVED***
***REMOVED***
protected int executeAnsibleWorker(AnsibleWorker worker, File logFile) throws IOException, InterruptedException, SVNException{
FileOutputStream fos = new FileOutputStream(logFile, true);
PrintStream ps = new PrintStream(fos);
***REMOVED*** File m = new File(this.jobLogs + File.separator + this.id + "_exitStatus");
@ -81,5 +252,19 @@ public abstract class DMPMJob {
return worker.execute(ps);
***REMOVED***
public String buildInfo() {
return
"\n"+
"Algorithm details:\n"+"\n"+
"User: "+this.algorithm.getFullname()+"\n"+
"Algorithm name: "+this.algorithm.getName()+"\n"+
"Staging DataMiner Host: "+ this.configuration.getHost()+"\n"+
"Caller VRE: "+ScopeProvider.instance.get()+"\n"+
"Target VRE: "+this.vREName+"\n";
***REMOVED***
***REMOVED***

View File

@ -8,7 +8,10 @@ import java.util.Scanner;
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Algorithm;
***REMOVED***
import org.gcube.dataanalysis.dataminer.poolmanager.util.ClusterBuilder;
import org.gcube.dataanalysis.dataminer.poolmanager.util.SVNUpdater;
import org.gcube.dataanalysis.dataminer.poolmanager.util.impl.ClusterBuilderProduction;
import org.gcube.dataanalysis.dataminer.poolmanager.util.impl.ClusterBuilderStaging;
import org.gcube.dataanalysis.dataminer.poolmanager.util.impl.SVNUpdaterProduction;
import org.gcube.dataanalysis.dataminer.poolmanager.util.impl.SVNUpdaterStaging;
***REMOVED***
***REMOVED***
import org.tmatesoft.svn.core.SVNException;
@ -17,14 +20,15 @@ import org.tmatesoft.svn.core.SVNException;
public class DataminerPoolManager {
***REMOVED***
private SVNUpdater svnUpdater;
private SVNUpdaterStaging svnUpdaterStaging;
private SVNUpdaterProduction svnUpdaterProduction;
public DataminerPoolManager() {
this.logger = LoggerFactory.getLogger(this.getClass());
try {
***REMOVED***TODO: read this from configuration
this.svnUpdater = new SVNUpdater();
this.svnUpdaterStaging = new SVNUpdaterStaging();
this.svnUpdaterProduction = new SVNUpdaterProduction();
***REMOVED*** catch (SVNException e) {
e.printStackTrace();
***REMOVED***
@ -37,9 +41,10 @@ public class DataminerPoolManager {
this.logger.debug("Algo "+algo);
this.logger.debug("Category "+category);
this.logger.debug("Algo type "+algorithm_type);
Cluster stagingCluster = ClusterBuilder.getStagingDataminerCluster();
ClusterBuilder stagingClusterBuilder = new ClusterBuilderStaging();
Cluster stagingCluster = stagingClusterBuilder.getDataminerCluster();
***REMOVED***Cluster rProtoCluster = ClusterBuilder.getRProtoCluster();
DMPMJob job = new StagingJob(this.svnUpdater, algo, stagingCluster, /*rProtoCluster,*/ targetVRE, category, algorithm_type/*,env*/);
DMPMJob job = new StagingJob(this.svnUpdaterStaging, algo, stagingCluster, /*rProtoCluster,*/ targetVRE, category, algorithm_type/*,env*/);
String id = job.start();
return id;
***REMOVED***
@ -50,8 +55,9 @@ public class DataminerPoolManager {
this.logger.debug("Algo "+algo);
this.logger.debug("Category "+category);
this.logger.debug("Algo type "+algorithm_type);
Cluster prodCluster = ClusterBuilder.getProdDataminerCluster();
DMPMJob job = new ProductionPublishingJob(this.svnUpdater, algo, prodCluster, targetVRE, category, algorithm_type/*,env*/);
ClusterBuilder productionClusterBuilder = new ClusterBuilderProduction();
Cluster prodCluster = productionClusterBuilder.getDataminerCluster();
DMPMJob job = new ProductionPublishingJob(this.svnUpdaterProduction, algo, prodCluster, targetVRE, category, algorithm_type/*,env*/);
String id = job.start();
return id;
***REMOVED***

View File

@ -1,164 +1,44 @@
package org.gcube.dataanalysis.dataminer.poolmanager.service;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.PrintWriter;
import java.io.UnsupportedEncodingException;
import java.util.Collection;
***REMOVED***
***REMOVED***
import org.gcube.dataanalysis.dataminer.poolmanager.clients.configuration.DMPMClientConfiguratorManager;
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Algorithm;
***REMOVED***
import org.gcube.dataanalysis.dataminer.poolmanager.util.CheckMethod;
import org.gcube.dataanalysis.dataminer.poolmanager.util.NotificationHelper;
import org.gcube.dataanalysis.dataminer.poolmanager.util.SVNUpdater;
import org.gcube.dataanalysis.dataminer.poolmanager.util.SendMail;
import org.gcube.dataanalysis.dataminer.poolmanager.util.impl.CheckMethodProduction;
import org.gcube.dataanalysis.dataminer.poolmanager.util.impl.NotificationHelperProduction;
***REMOVED***
***REMOVED***
public class ProductionPublishingJob extends DMPMJob {
private Algorithm algorithm;
private Cluster prodCluster;
private String targetVREName;
private String category;
private String algorithm_type;
***REMOVED***private String targetVREToken;
***REMOVED***private String env;
***REMOVED***
public ProductionPublishingJob(SVNUpdater svnUpdater, Algorithm algorithm,
Cluster prodCluster, String targetVREName, String category,String algorithm_type/*, String env*/) throws FileNotFoundException, UnsupportedEncodingException {
super(svnUpdater);
this.jobLogs = new File(
System.getProperty("user.home") + File.separator + "dataminer-pool-manager" + File.separator + "jobs");
this.jobLogs.mkdirs();
this.algorithm = algorithm;
this.prodCluster = prodCluster;
this.targetVREName = targetVREName;
this.category = category;
this.algorithm_type = algorithm_type;
***REMOVED***this.targetVREToken = targetVREToken;
***REMOVED***this.env= env;
this.getStatus(0);
super(svnUpdater,DMPMClientConfiguratorManager.getInstance().getProductionConfiguration(),algorithm,prodCluster,targetVREName,category,algorithm_type);
this.logger = LoggerFactory.getLogger(StagingJob.class);***REMOVED*** this.jobLogs = new File(
***REMOVED***
@Override
protected void execute() {
SendMail sm = new SendMail();
NotificationHelper nh = new NotificationHelper();
CheckMethod b = new CheckMethod();
try {
Collection<String> undefinedDependencies = this.svnUpdater.getUndefinedDependencies(
this.svnUpdater.getDependencyFileProd(this.algorithm.getLanguage()/*,env*/),
this.algorithm.getDependencies());
if (!undefinedDependencies.isEmpty()) {
String message = "Following dependencies are not defined:\n";
for (String n : undefinedDependencies) {
message += "\n" + n +"\n";
***REMOVED***
this.getStatus(2);
sm.sendNotification(nh.getFailedSubjectRelease() +" for "+this.algorithm.getName()+ " algorithm", nh.getFailedBody(message+"\n\n"+this.buildInfo()));
return;
***REMOVED***
b.deleteFilesProd(this.algorithm);
int ret = this.executeAnsibleWorker(createWorker(this.algorithm, this.prodCluster, false, "root"));
if (ret != 0) {
this.getStatus(2);
sm.sendNotification(nh.getFailedSubjectRelease() + " for "+this.algorithm.getName()+ " algorithm", nh.getFailedBody("Installation failed. Return code=" + ret)+"\n\n"+this.buildInfo());
return;
***REMOVED***
if (ret == 0) {
this.getStatus(0);
if (b.checkMethod(DMPMClientConfiguratorManager.getInstance().getProductionConfiguration().getHost(), SecurityTokenProvider.instance.get())&&(b.algoExistsProd(this.algorithm/*, env*/))) {
System.out.println("Interface check ok!");
System.out.println("Both the files exist at the correct path!");
this.svnUpdater.updateSVNProdAlgorithmList(this.algorithm, this.targetVREName,this.category, this.algorithm_type,
this.algorithm.getFullname()/*, env*/);
this.getStatus(9);
sm.sendNotification(nh.getSuccessSubjectRelease() + " for "+this.algorithm.getName()+ " algorithm", nh.getSuccessBody("\n\n"+this.buildInfo()));
return;
***REMOVED*** else
this.getStatus(2);
sm.sendNotification(nh.getFailedSubjectRelease() + " for "+this.algorithm.getName()+ " algorithm",
nh.getFailedBody(
"\n"+
"Installation completed but DataMiner Interface not working correctly or files "
+ this.algorithm.getName() + ".jar and " + this.algorithm.getName()
+ "_interface.jar not availables at the expected path")+"\n\n"+this.buildInfo());
return;
***REMOVED***
***REMOVED*** catch (Exception e) {
try {
this.getStatus(0);
***REMOVED*** catch (FileNotFoundException e1) {
***REMOVED*** TODO Auto-generated catch block
e1.printStackTrace();
***REMOVED*** catch (UnsupportedEncodingException e1) {
***REMOVED*** TODO Auto-generated catch block
e1.printStackTrace();
***REMOVED***
e.printStackTrace();
***REMOVED***
this.logger.debug("Executing staging job...");
super.execute(new NotificationHelperProduction(), new CheckMethodProduction());
***REMOVED***
public String getStatus(int exitstatus) throws FileNotFoundException, UnsupportedEncodingException {
File m = new File(this.jobLogs + File.separator + this.id + "_exitStatus");
PrintWriter writer = new PrintWriter(m, "UTF-8");
String response = "";
if (exitstatus == 0) {
response = "IN PROGRESS";
writer.println(response);
***REMOVED***writer.close();
***REMOVED***
if (exitstatus == 9) {
response = "COMPLETED";
writer.println(response);
***REMOVED***writer.close();
***REMOVED***
if (exitstatus == 2) {
response = "FAILED";
writer.println(response);
***REMOVED***writer.close();
***REMOVED***
writer.close();
return response;
***REMOVED***
public String buildInfo() throws UnsupportedEncodingException{
return
"\n"+
"Algorithm details:\n"+"\n"+
"User: "+this.algorithm.getFullname()+"\n"+
"Algorithm name: "+this.algorithm.getName()+"\n"+
"Staging DataMiner Host: "+ DMPMClientConfiguratorManager.getInstance().getProductionConfiguration().getHost()+"\n"+
"Caller VRE: "+ScopeProvider.instance.get()+"\n"+
"Target VRE: "+this.targetVREName+"\n";
***REMOVED***

View File

@ -1,184 +1,42 @@
package org.gcube.dataanalysis.dataminer.poolmanager.service;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.PrintWriter;
import java.io.UnsupportedEncodingException;
import java.util.Collection;
***REMOVED***
***REMOVED***
import org.gcube.dataanalysis.dataminer.poolmanager.clients.configuration.DMPMClientConfiguratorManager;
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Algorithm;
***REMOVED***
import org.gcube.dataanalysis.dataminer.poolmanager.util.CheckMethod;
import org.gcube.dataanalysis.dataminer.poolmanager.util.NotificationHelper;
import org.gcube.dataanalysis.dataminer.poolmanager.util.SVNUpdater;
import org.gcube.dataanalysis.dataminer.poolmanager.util.SendMail;
import org.gcube.dataanalysis.dataminer.poolmanager.util.impl.CheckMethodStaging;
import org.gcube.dataanalysis.dataminer.poolmanager.util.impl.NotificationHelperStaging;
***REMOVED***
***REMOVED***
public class StagingJob extends DMPMJob {
private Algorithm algorithm;
private Cluster stagingCluster;
***REMOVED*** private Cluster rProtoCluster;
private String rProtoVREName;
***REMOVED***private String env;
private String category;
private String algorithm_type;
***REMOVED***
public StagingJob(SVNUpdater svnUpdater, Algorithm algorithm,
Cluster stagingCluster, /* Cluster rProtoCluster, */
String rProtoVREName, String category, String algorithm_type/*, String env*/) throws FileNotFoundException, UnsupportedEncodingException {
super(svnUpdater);
super(svnUpdater,DMPMClientConfiguratorManager.getInstance().getStagingConfiguration(),algorithm,stagingCluster,rProtoVREName,category,algorithm_type);
this.logger = LoggerFactory.getLogger(StagingJob.class);
this.jobLogs = new File(
System.getProperty("user.home") + File.separator + "dataminer-pool-manager" + File.separator + "jobs");
this.jobLogs.mkdirs();
this.algorithm = algorithm;
this.stagingCluster = stagingCluster;
***REMOVED*** this.rProtoCluster = rProtoCluster;
this.rProtoVREName = rProtoVREName;
***REMOVED***this.env = env;
this.category = category;
this.algorithm_type = algorithm_type;
***REMOVED***File m = new File(this.jobLogs + File.separator + this.id + "_exitStatus");
***REMOVED***PrintWriter writer = new PrintWriter(m, "UTF-8");
this.getStatus(0);
***REMOVED***writer.close();
***REMOVED***
@Override
protected void execute() {
this.logger.debug("Executing staging job...");
super.execute(new NotificationHelperStaging(), new CheckMethodStaging());
CheckMethod methodChecker = new CheckMethod();
SendMail sm = new SendMail();
NotificationHelper nh = new NotificationHelper();
try {
this.logger.debug("Checking dependencies...");
Collection<String> undefinedDependencies = this.svnUpdater.getUndefinedDependencies(
this.svnUpdater.getDependencyFile(this.algorithm.getLanguage()/*,env*/),
this.algorithm.getDependencies());
if (!undefinedDependencies.isEmpty())
{
this.logger.debug("Some dependencies are not defined");
String message = "Following dependencies are not defined:\n";
for (String n : undefinedDependencies) {
message += "\n" + n +"\n";
***REMOVED***
this.getStatus(2);
sm.sendNotification(nh.getFailedSubject() +" for "+this.algorithm.getName()+ " algorithm", nh.getFailedBody(message+"\n\n"+this.buildInfo()));
return;
***REMOVED***
***REMOVED***before the installation to check if the files exist
methodChecker.deleteFiles(this.algorithm/*, env*/);
int ret = this.executeAnsibleWorker(createWorker(this.algorithm, this.stagingCluster, false, "root"));
System.out.println("Return code= "+ret);
if (ret != 0)
{
this.logger.debug("Ansible work failed, return code "+ret);
this.getStatus(2);
sm.sendNotification(nh.getFailedSubject() + " for "+this.algorithm.getName()+ " algorithm", nh.getFailedBody("Installation failed. Return code=" + ret)+"\n\n"+this.buildInfo());
return;
***REMOVED***
if (ret == 0)
{
this.logger.debug("Operation completed");
this.getStatus(0);
***REMOVED***System.out.println("1 - Checking existing in env: "+ env);
***REMOVED***System.out.println("2 - Checking existing in env: "+ this.env);
this.logger.debug("Checking the method...");
if (methodChecker.checkMethod(DMPMClientConfiguratorManager.getInstance().getStagingConfiguration().getHost(), SecurityTokenProvider.instance.get())&&(methodChecker.algoExists(this.algorithm)))
{
this.logger.debug("Method OK and algo exists");
System.out.println("Interface check ok!");
System.out.println("Both the files exist at the correct path!");
this.svnUpdater.updateSVNStagingAlgorithmList(this.algorithm, this.rProtoVREName,this.category, this.algorithm_type,
this.algorithm.getFullname()/*, env*/);
this.getStatus(9);
sm.sendNotification(nh.getSuccessSubject() + " for "+this.algorithm.getName()+ " algorithm", nh.getSuccessBody("\n\n"+this.buildInfo()));
return;
***REMOVED*** else
{
this.logger.debug("Operation failed");
this.getStatus(2);
sm.sendNotification(nh.getFailedSubject() + " for "+this.algorithm.getName()+ " algorithm",
nh.getFailedBody(
"\n"+
"Installation completed but DataMiner Interface not working correctly or files "
+ this.algorithm.getName() + ".jar and " + this.algorithm.getName()
+ "_interface.jar not availables at the expected path")+"\n\n"+this.buildInfo());
return;
***REMOVED***
***REMOVED***
***REMOVED*** catch (Exception e) {
e.printStackTrace();
***REMOVED***
***REMOVED***
public String getStatus(int exitstatus) throws FileNotFoundException, UnsupportedEncodingException {
File m = new File(this.jobLogs + File.separator + this.id + "_exitStatus");
PrintWriter writer = new PrintWriter(m, "UTF-8");
String response = "";
if (exitstatus == 0) {
response = "IN PROGRESS";
writer.println(response);
***REMOVED***writer.close();
***REMOVED***
if (exitstatus == 9) {
response = "COMPLETED";
writer.println(response);
***REMOVED***writer.close();
***REMOVED***
if (exitstatus == 2) {
response = "FAILED";
writer.println(response);
***REMOVED***writer.close();
***REMOVED***
writer.close();
return response;
***REMOVED***
public String buildInfo() throws UnsupportedEncodingException{
return
"\n"+
"Algorithm details:\n"+"\n"+
"User: "+this.algorithm.getFullname()+"\n"+
"Algorithm name: "+this.algorithm.getName()+"\n"+
"Staging DataMiner Host: "+ DMPMClientConfiguratorManager.getInstance().getStagingConfiguration().getHost()+"\n"+
"Caller VRE: "+ScopeProvider.instance.get()+"\n"+
"Target VRE: "+rProtoVREName+"\n";
***REMOVED***

View File

@ -10,20 +10,11 @@ import java.util.Arrays;
***REMOVED***
***REMOVED***
import org.gcube.dataanalysis.dataminer.poolmanager.clients.configuration.Configuration;
import org.gcube.dataanalysis.dataminer.poolmanager.clients.configuration.DMPMClientConfiguratorManager;
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Algorithm;
***REMOVED***
***REMOVED***
import com.jcraft.jsch.Channel;
import com.jcraft.jsch.ChannelSftp;
import com.jcraft.jsch.JSch;
import com.jcraft.jsch.Session;
import com.jcraft.jsch.SftpATTRS;
import com.jcraft.jsch.SftpException;
public class CheckMethod {
public abstract class CheckMethod {
***REMOVED***
@ -37,14 +28,14 @@ public class CheckMethod {
this.logger.debug("Checking method for machine "+machine);
this.logger.debug("By using tocken "+token);
System.out.println("Machine: " + machine);
String getCapabilitesRequest = new String();
String getCapabilitesResponse = new String();
***REMOVED*** String getCapabilitesRequest = new String();
***REMOVED*** String getCapabilitesResponse = new String();
System.out.println(" Token: " + token);
String request = "http:***REMOVED***" + machine
+ "/wps/WebProcessingService?Request=GetCapabilities&Service=WPS&gcube-token=" + token;
String response = machine + "___" + token + ".xml";
getCapabilitesRequest = request;
getCapabilitesResponse = response;
***REMOVED*** getCapabilitesRequest = request;
***REMOVED*** getCapabilitesResponse = response;
String baseDescriptionRequest = "http:***REMOVED***" + machine
+ "/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0" + "&gcube-token="
+ token + "&Identifier=";
@ -111,359 +102,22 @@ public class CheckMethod {
public boolean algoExists(Algorithm algo/*, String env*/) throws Exception{
public abstract boolean algoExists(Algorithm algo/*, String env*/) throws Exception;
this.logger.debug("Looking if algo "+algo.getName()+ " exists");
Configuration stagingConfiguration = DMPMClientConfiguratorManager.getInstance().getStagingConfiguration();
File file = new File(stagingConfiguration.getGhostAlgoDirectory()+"/"+algo.getName()+".jar");
File file2 = new File(stagingConfiguration.getGhostAlgoDirectory()+"/"+algo.getName()+"_interface.jar");
this.logger.debug("Looking for files "+file.getPath()+ " "+file.getPath());
System.out.println("First file is located to: "+file.getPath());
System.out.println("Second file is located to: "+file2.getPath());
if ((this.doesExist(file.getPath()/*,env*/)) && (this.doesExist(file2.getPath()/*,env*/)))
{
this.logger.debug("Files found");
this.copyFromDmToSVN(file/*,env*/);
this.copyFromDmToSVN(file2/*,env*/);
System.out.println("Files have been copied to SVN");
return true;
***REMOVED***
else
{
this.logger.debug("Files not found");
System.out.println("Algorithm "+algo.getName()+".jar"+ " and "+algo.getName()+"_interface.jar files are not present at the expected path");
return false;
***REMOVED***
***REMOVED***
public abstract void deleteFiles(Algorithm a/*,String env*/) throws Exception;
public boolean algoExistsProd(Algorithm a/*, String env*/) throws Exception{
Configuration productionConfiguration = DMPMClientConfiguratorManager.getInstance().getProductionConfiguration();
File file = new File(productionConfiguration.getGhostAlgoDirectory()+"/"+a.getName()+".jar");
File file2 = new File(productionConfiguration.getGhostAlgoDirectory()+"/"+a.getName()+"_interface.jar");
System.out.println("First file is located to: "+file.getPath());
System.out.println("Second file is located to: "+file2.getPath());
if ((this.doesExistProd(file.getPath()/*,env*/)) && (this.doesExistProd(file2.getPath()/*,env*/))){
this.copyFromDmToSVNProd(file/*,env*/);
this.copyFromDmToSVNProd(file2/*,env*/);
return true;
***REMOVED***
else
System.out.println("Algorithm "+a.getName()+".jar"+ " and "+a.getName()+"_interface.jar files are not present at the expected path");
return false;
***REMOVED***
public abstract boolean doesExist(String path/*, String env*/) throws Exception;
public void deleteFiles(Algorithm a/*,String env*/) throws Exception{
JSch jsch = new JSch();
Session session = null;
Channel channel = null;
ChannelSftp c = null;
Configuration stagingConfiguration = DMPMClientConfiguratorManager.getInstance().getStagingConfiguration();
System.out.println("checking existing in env: " + stagingConfiguration.getHost());
File file = new File(stagingConfiguration.getGhostAlgoDirectory()+"/"+a.getName()+".jar");
File file2 = new File(stagingConfiguration.getGhostAlgoDirectory()+"/"+a.getName()+"_interface.jar");
System.out.println("First file is located to: "+file.getPath());
System.out.println("Second file is located to: "+file2.getPath());
jsch.setKnownHosts("~/.ssh/known_hosts");
String privateKey = "~/.ssh/id_rsa";
jsch.addIdentity(privateKey);
System.out.println("Private Key Added.");
session = jsch.getSession("root", stagingConfiguration.getHost());
System.out.println("session created.");
java.util.Properties config = new java.util.Properties();
config.put("StrictHostKeyChecking", "no");
session.setConfig(config);
session.connect();
channel = session.openChannel("sftp");
channel.connect();
System.out.println("shell channel connected....");
c = (ChannelSftp) channel;
if(doesExist(file.getPath()/*,env*/)&&(doesExist(file2.getPath()/*,env*/))){
c.rm(file.getPath());
c.rm(file2.getPath());
System.out.println("Both the files have been deleted");
***REMOVED***
else System.out.println("Files not found");
channel.disconnect();
c.disconnect();
session.disconnect();
***REMOVED***
public void deleteFilesProd(Algorithm a/*,String env*/) throws Exception{
JSch jsch = new JSch();
Session session = null;
Channel channel = null;
ChannelSftp c = null;
Configuration productionConfiguration = DMPMClientConfiguratorManager.getInstance().getProductionConfiguration();
System.out.println("checking existing in env: " + productionConfiguration.getHost());
File file = new File(productionConfiguration.getGhostAlgoDirectory()+"/"+a.getName()+".jar");
File file2 = new File(productionConfiguration.getGhostAlgoDirectory()+"/"+a.getName()+"_interface.jar");
System.out.println("First file is located to: "+file.getPath());
System.out.println("Second file is located to: "+file2.getPath());
jsch.setKnownHosts("~/.ssh/known_hosts");
String privateKey = "~/.ssh/id_rsa";
jsch.addIdentity(privateKey);
System.out.println("Private Key Added.");
session = jsch.getSession("root", productionConfiguration.getHost());
System.out.println("session created.");
java.util.Properties config = new java.util.Properties();
config.put("StrictHostKeyChecking", "no");
session.setConfig(config);
session.connect();
channel = session.openChannel("sftp");
channel.connect();
System.out.println("shell channel connected....");
c = (ChannelSftp) channel;
if(doesExistProd(file.getPath()/*,env*/)&&(doesExistProd(file2.getPath()/*,env*/))){
c.rm(file.getPath());
c.rm(file2.getPath());
System.out.println("Both the files have been deleted");
***REMOVED***
else System.out.println("Files not found");
channel.disconnect();
c.disconnect();
session.disconnect();
***REMOVED***
public boolean doesExist(String path/*, String env*/) throws Exception {
this.logger.debug("Looking if file "+path + " exists");
JSch jsch = new JSch();
Session session = null;
Channel channel = null;
ChannelSftp c = null;
boolean success = false;
Configuration stagingConfiguration = DMPMClientConfiguratorManager.getInstance().getStagingConfiguration();
jsch.setKnownHosts("~/.ssh/known_hosts");
String privateKey = "~/.ssh/id_rsa";
jsch.addIdentity(privateKey);
System.out.println("Private Key Added.");
this.logger.debug("Staging configuration host "+stagingConfiguration.getHost());
session = jsch.getSession("root",stagingConfiguration.getHost() );
System.out.println("session created.");
java.util.Properties config = new java.util.Properties();
config.put("StrictHostKeyChecking", "no");
session.setConfig(config);
session.connect();
channel = session.openChannel("sftp");
channel.connect();
System.out.println("shell channel connected....");
c = (ChannelSftp) channel;
SftpATTRS is = null;
System.out.println(path);
try {
is = c.lstat(path);
this.logger.debug("File found");
success = true;
***REMOVED*** catch (SftpException e)
{
this.logger.error("File not found",e);
if (e.id == ChannelSftp.SSH_FX_NO_SUCH_FILE)
{
***REMOVED*** file doesn't exist
success = false;
***REMOVED***
***REMOVED***success = true; ***REMOVED*** something else went wrong
***REMOVED***
channel.disconnect();
c.disconnect();
session.disconnect();
this.logger.debug("Operation result "+success);
return success;
***REMOVED***
public boolean doesExistProd(String path/*, String env*/) throws Exception {
JSch jsch = new JSch();
Session session = null;
Channel channel = null;
ChannelSftp c = null;
boolean success = false;
Configuration productionConfiguration = DMPMClientConfiguratorManager.getInstance().getProductionConfiguration();
jsch.setKnownHosts("~/.ssh/known_hosts");
String privateKey = "~/.ssh/id_rsa";
jsch.addIdentity(privateKey);
System.out.println("Private Key Added.");
session = jsch.getSession("root", productionConfiguration.getHost());
System.out.println("session created.");
java.util.Properties config = new java.util.Properties();
config.put("StrictHostKeyChecking", "no");
session.setConfig(config);
session.connect();
channel = session.openChannel("sftp");
channel.connect();
System.out.println("shell channel connected....");
c = (ChannelSftp) channel;
SftpATTRS is = null;
System.out.println(path);
try {
is = c.lstat(path);
success = true;
***REMOVED*** catch (SftpException e) {
if (e.id == ChannelSftp.SSH_FX_NO_SUCH_FILE) {
***REMOVED*** file doesn't exist
success = false;
***REMOVED***
***REMOVED***success = true; ***REMOVED*** something else went wrong
***REMOVED***
channel.disconnect();
c.disconnect();
session.disconnect();
return success;
***REMOVED***
public void copyFromDmToSVN(File a/*,String env*/) throws Exception {
JSch jsch = new JSch();
Session session = null;
SVNUpdater svnUpdater = new SVNUpdater();
Configuration stagingConfiguration = DMPMClientConfiguratorManager.getInstance().getStagingConfiguration();
jsch.setKnownHosts("~/.ssh/known_hosts");
String privateKey = "~/.ssh/id_rsa";
jsch.addIdentity(privateKey);
System.out.println("Private Key Added.");
session = jsch.getSession("root", stagingConfiguration.getHost());
System.out.println("session created.");
java.util.Properties config = new java.util.Properties();
config.put("StrictHostKeyChecking", "no");
session.setConfig(config);
session.connect();
Channel channel = session.openChannel("sftp");
channel.connect();
ChannelSftp sftp = (ChannelSftp) channel;
sftp.cd(stagingConfiguration.getGhostAlgoDirectory());
System.out.println("REMOTE : "+stagingConfiguration.getGhostAlgoDirectory()+"/"+a.getName());
System.out.println("LOCAL : /tmp/"+a.getName());
sftp.get(stagingConfiguration.getGhostAlgoDirectory()+"/"+a.getName(),"/tmp/"+a.getName());
channel.disconnect();
session.disconnect();
File f = new File("/tmp/"+a.getName());
svnUpdater.updateAlgorithmFiles(f);
f.delete();
***REMOVED***
public abstract void copyFromDmToSVN(File a/*,String env*/) throws Exception;
public void copyFromDmToSVNProd(File a/*,String env*/) throws Exception {
JSch jsch = new JSch();
Session session = null;
SVNUpdater svnUpdater = new SVNUpdater();
Configuration productionConfiguration = DMPMClientConfiguratorManager.getInstance().getProductionConfiguration();
jsch.setKnownHosts("~/.ssh/known_hosts");
String privateKey = "~/.ssh/id_rsa";
jsch.addIdentity(privateKey);
System.out.println("Private Key Added.");
session = jsch.getSession("root", productionConfiguration.getHost());
System.out.println("session created.");
java.util.Properties config = new java.util.Properties();
config.put("StrictHostKeyChecking", "no");
session.setConfig(config);
session.connect();
Channel channel = session.openChannel("sftp");
channel.connect();
ChannelSftp sftp = (ChannelSftp) channel;
sftp.cd(productionConfiguration.getGhostAlgoDirectory());
System.out.println("REMOTE : "+productionConfiguration.getGhostAlgoDirectory()+"/"+a.getName());
System.out.println("LOCAL : /tmp/"+a.getName());
sftp.get(productionConfiguration.getGhostAlgoDirectory()+"/"+a.getName(),"/tmp/"+a.getName());
channel.disconnect();
session.disconnect();
File f = new File("/tmp/"+a.getName());
svnUpdater.updateAlgorithmFilesProd(f);
f.delete();
***REMOVED***
public List<String> getFiles(String a){
public static List<String> getFiles(String a){
String[] array = a.split(",");
ArrayList<String> list = new ArrayList<>(Arrays.asList(array));
@ -477,71 +131,5 @@ public class CheckMethod {
***REMOVED***
public static void main(String[] args) throws Exception {
***REMOVED*** ServiceConfiguration a = new ServiceConfiguration();
***REMOVED*** System.out.println(a.getStagingHost());
CheckMethod a = new CheckMethod();
***REMOVED***a.getFiles("/trunk/data-analysis/RConfiguration/RPackagesManagement/r_deb_pkgs.txt, /trunk/data-analysis/RConfiguration/RPackagesManagement/r_cran_pkgs.txt, /trunk/data-analysis/RConfiguration/RPackagesManagement/r_github_pkgs.txt");
***REMOVED*** File aa = new File("OCTAVEBLACKBOX.jar");
***REMOVED*** System.out.println(aa.getName());
***REMOVED*** System.out.println(aa.getPath());
***REMOVED***a.copyFromDmToSVN(aa);
***REMOVED*** if (a.checkMethod("dataminer-ghost-d.dev.d4science.org", "708e7eb8-11a7-4e9a-816b-c9ed7e7e99fe-98187548")){
***REMOVED*** System.out.println("AAA");***REMOVED***
***REMOVED***
***REMOVED*** if (a.doesExist("/home/gcube/wps_algorithms/algorithms/WINDOWS_BLACK_BOX_EXAMPLE.jar")){
***REMOVED*** System.out.println("BBBB");
***REMOVED***
***REMOVED******REMOVED***
***REMOVED*** if (a.doesExist("/home/gcube/wps_algorithms/algorithms/WINDOWS_BLACK_BOX_EXAMPLE_interface.jar")){
***REMOVED*** System.out.println("CCCC");***REMOVED***
***REMOVED***
***REMOVED*** File aa = new File("/home/gcube/wps_algorithms/algorithms/RBLACKBOX_interface.jar");
***REMOVED*** a.copyFromDmToSVN(aa, "Dev");
***REMOVED***
***REMOVED***System.out.println(a.checkMethod("dataminer-ghost-t.pre.d4science.org",
***REMOVED*** "2eceaf27-0e22-4dbe-8075-e09eff199bf9-98187548"));
***REMOVED***System.out.println(a.checkMethod("dataminer-proto-ghost.d4science.org",
***REMOVED*** "3a23bfa4-4dfe-44fc-988f-194b91071dd2-843339462"));
System.out.println(a.checkMethod("dataminer-ghost-d.dev.d4science.org",
"708e7eb8-11a7-4e9a-816b-c9ed7e7e99fe-98187548"));
***REMOVED***Algorithm aa = new Algorithm();
***REMOVED***aa.setName("UDPIPE_WRAPPER");
***REMOVED***System.out.println(a.algoExists(aa));
***REMOVED******REMOVED***
***REMOVED***ServiceConfiguration bp = new ServiceConfiguration();
***REMOVED******REMOVED***
***REMOVED***SecurityTokenProvider.instance.set("708e7eb8-11a7-4e9a-816b-c9ed7e7e99fe-98187548");
***REMOVED******REMOVED***
***REMOVED***if (a.checkMethod(bp.getStagingHost(), SecurityTokenProvider.instance.get())&&a.algoExists(aa)); {
***REMOVED***System.out.println("ciao");
***REMOVED***
***REMOVED******REMOVED***
***REMOVED***
***REMOVED***Algorithm al = new Algorithm();
***REMOVED*** al.setName("UDPIPE_WRAPPER");
***REMOVED*** a.deleteFiles(al);
***REMOVED***
***REMOVED***

View File

@ -1,11 +1,9 @@
package org.gcube.dataanalysis.dataminer.poolmanager.util;
import static org.gcube.common.authorization.client.Constants.authorizationService;
***REMOVED***
import org.gcube.common.authorization.library.AuthorizationEntry;
***REMOVED***
***REMOVED***
import static org.gcube.common.authorization.client.Constants.authorizationService;
public class CheckPermission {

View File

@ -6,58 +6,37 @@ import java.io.FileNotFoundException;
***REMOVED***
***REMOVED***
import org.gcube.dataanalysis.dataminer.poolmanager.clients.HAProxy;
import org.gcube.dataanalysis.dataminer.poolmanager.clients.configuration.DMPMClientConfiguratorManager;
import org.gcube.dataanalysis.dataminer.poolmanager.clients.configuration.Configuration;
***REMOVED***
***REMOVED***
public class ClusterBuilder {
public abstract class ClusterBuilder {
private Configuration configuration;
public ClusterBuilder (Configuration configuration)
{
this.configuration = configuration;
***REMOVED***
***REMOVED***1. to complete
public static Cluster getStagingDataminerCluster() throws FileNotFoundException{
public Cluster getDataminerCluster() throws FileNotFoundException{
Cluster cluster = new Cluster();
Host h = new Host();
h.setName(DMPMClientConfiguratorManager.getInstance().getStagingConfiguration().getHost());
h.setName(this.configuration.getHost());
cluster.addHost(h);
***REMOVED*** if (env.equals("Dev")){
***REMOVED*** h.setName(p.getDevStagingHost());
***REMOVED*** cluster.addHost(h);
***REMOVED*** ***REMOVED***
***REMOVED***
***REMOVED*** if ((env.equals("Prod")||(env.equals("Proto")))){
***REMOVED*** h.setName(p.getProtoProdStagingHost());
***REMOVED*** cluster.addHost(h);
***REMOVED*** ***REMOVED***
return cluster;
***REMOVED***
public static Cluster getProdDataminerCluster() throws FileNotFoundException{
Cluster cluster = new Cluster();
Host h = new Host();
h.setName(DMPMClientConfiguratorManager.getInstance().getProductionConfiguration().getHost());
cluster.addHost(h);
***REMOVED*** if (env.equals("Dev")){
***REMOVED*** h.setName(p.getDevStagingHost());
***REMOVED*** cluster.addHost(h);
***REMOVED*** ***REMOVED***
***REMOVED***
***REMOVED*** if ((env.equals("Prod")||(env.equals("Proto")))){
***REMOVED*** h.setName(p.getProtoProdStagingHost());
***REMOVED*** cluster.addHost(h);
***REMOVED*** ***REMOVED***
return cluster;
***REMOVED***
public static Cluster getVRECluster(String targetVREToken, String targetVRE) throws IOException{
public Cluster getVRECluster(String targetVREToken, String targetVRE) throws IOException{
Cluster cluster = new Cluster();
for (Host h : new HAProxy().listDataMinersByCluster(targetVREToken,targetVRE)) {
cluster.addHost(h);
@ -65,13 +44,13 @@ public class ClusterBuilder {
return cluster;
***REMOVED***
public static Cluster getRProtoCluster() throws IOException{
public Cluster getRProtoCluster() throws IOException{
***REMOVED***Assumes the service is running in RPrototypingLab
String token = SecurityTokenProvider.instance.get();
String targetVRE = ScopeProvider.instance.get();
return getVRECluster(token, targetVRE);
return this.getVRECluster(token, targetVRE);
***REMOVED***

View File

@ -1,53 +1,28 @@
package org.gcube.dataanalysis.dataminer.poolmanager.util;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import org.apache.commons.lang.StringUtils;
***REMOVED***import scala.actors.threadpool.Arrays;
public class NotificationHelper {
public abstract class NotificationHelper {
private Exception executionException;
***REMOVED*** private Exception executionException;
public NotificationHelper() {
***REMOVED***
private String getSubjectHeader() {
return "[DataMinerGhostStagingInstallationRequestReport]";
***REMOVED***
***REMOVED*** private boolean isError() {
***REMOVED*** return this.executionException!=null;
***REMOVED*** ***REMOVED***
private String getSubjectHeaderRelease() {
return "[DataMinerGhostProductionInstallationRequestReport]";
***REMOVED***
***REMOVED*** public void setExecutionException(Exception executionException) {
***REMOVED*** this.executionException = executionException;
***REMOVED*** ***REMOVED***
private boolean isError() {
return this.executionException!=null;
***REMOVED***
public void setExecutionException(Exception executionException) {
this.executionException = executionException;
***REMOVED***
public abstract String getSuccessSubject();
public String getSuccessSubject() {
return this.getSubjectHeader()+" is SUCCESS";
***REMOVED***
public String getSuccessSubjectRelease() {
return this.getSubjectHeaderRelease()+" is SUCCESS";
***REMOVED***
public String getFailedSubjectRelease() {
return this.getSubjectHeaderRelease()+" is FAILED";
***REMOVED***
public abstract String getFailedSubject();
public String getFailedSubject() {
return String.format(this.getSubjectHeader()+" is FAILED");
***REMOVED***
public String getSuccessBody(String info) {
String message = String.format("The installation of the algorithm is completed successfully.");
message+="\n\nYou can retrieve experiment results under the '/DataMiner' e-Infrastructure Workspace folder or from the DataMiner interface.\n\n"+ info;

View File

@ -0,0 +1,38 @@
package org.gcube.dataanalysis.dataminer.poolmanager.util;
import org.gcube.dataanalysis.dataminer.poolmanager.clients.configuration.Configuration;
import org.tmatesoft.svn.core.SVNException;
import org.tmatesoft.svn.core.SVNURL;
import org.tmatesoft.svn.core.auth.ISVNAuthenticationManager;
import org.tmatesoft.svn.core.io.SVNRepository;
import org.tmatesoft.svn.core.io.SVNRepositoryFactory;
import org.tmatesoft.svn.core.wc.SVNWCUtil;
public class SVNRepositoryManager {
private SVNRepository svnRepository;
private static SVNRepositoryManager instance;
private SVNRepositoryManager (Configuration configuration) throws SVNException
{
this.svnRepository = SVNRepositoryFactory.create(SVNURL.parseURIEncoded(configuration.getSVNRepository()));
ISVNAuthenticationManager authManager = SVNWCUtil.createDefaultAuthenticationManager();
this.svnRepository.setAuthenticationManager(authManager);
***REMOVED***
public static SVNRepositoryManager getInstance (Configuration configuration) throws SVNException
{
if (instance == null) instance = new SVNRepositoryManager(configuration);
return instance;
***REMOVED***
public SVNRepository getSvnRepository() {
return svnRepository;
***REMOVED***
***REMOVED***

View File

@ -21,33 +21,28 @@ import java.util.TimeZone;
import org.apache.commons.io.FileUtils;
import org.gcube.dataanalysis.dataminer.poolmanager.clients.configuration.Configuration;
import org.gcube.dataanalysis.dataminer.poolmanager.clients.configuration.DMPMClientConfiguratorManager;
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Algorithm;
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Dependency;
import org.tmatesoft.svn.core.SVNException;
import org.tmatesoft.svn.core.SVNNodeKind;
import org.tmatesoft.svn.core.SVNURL;
import org.tmatesoft.svn.core.auth.ISVNAuthenticationManager;
import org.tmatesoft.svn.core.internal.wc.SVNFileUtil;
import org.tmatesoft.svn.core.internal.wc.admin.SVNChecksumInputStream;
import org.tmatesoft.svn.core.io.ISVNEditor;
import org.tmatesoft.svn.core.io.SVNRepository;
import org.tmatesoft.svn.core.io.SVNRepositoryFactory;
import org.tmatesoft.svn.core.io.diff.SVNDeltaGenerator;
import org.tmatesoft.svn.core.wc.SVNWCUtil;
/**
* Created by ggiammat on 5/9/17.
*/
public class SVNUpdater {
public abstract class SVNUpdater {
private SVNRepository svnRepository;
private Configuration configuration;
public SVNUpdater() throws SVNException {
this.svnRepository = SVNRepositoryFactory.create(SVNURL.parseURIEncoded(DMPMClientConfiguratorManager.getInstance().getProductionConfiguration().getSVNRepository()));
ISVNAuthenticationManager authManager = SVNWCUtil.createDefaultAuthenticationManager();
this.svnRepository.setAuthenticationManager(authManager);
public SVNUpdater(Configuration configuration) throws SVNException {
this.configuration = configuration;
this.svnRepository = SVNRepositoryManager.getInstance(configuration).getSvnRepository();
***REMOVED***
***REMOVED*** public void updateRPRotoDeps(Algorithm algorithm) {
@ -58,113 +53,14 @@ public class SVNUpdater {
public String getDependencyFile(String language/*, String env*/){
String a = "";
***REMOVED*** if (env.equals("Dev")){
***REMOVED*** a= this.getDevDependencyFile(language);
***REMOVED*** ***REMOVED***
***REMOVED***
***REMOVED*** if (env.equals("Prod")){
***REMOVED*** a= this.getProdDependencyFile(language);
***REMOVED*** ***REMOVED***
***REMOVED***
***REMOVED*** if (env.equals("Proto")){
***REMOVED*** a= this.getRProtoDependencyFile(language);
***REMOVED*** ***REMOVED***
***REMOVED***
***REMOVED*** if (env.equals("Preprod")){
***REMOVED*** a= this.getPreProdDependencyFile(language);
***REMOVED*** ***REMOVED***
a = this.getStagingDependencyFile(language);
return a;
public String getDependencyFile(String language/*, String env*/)
{
return getDependencyFile(this.configuration,language);
***REMOVED***
public String getDependencyFileProd(String language/*, String env*/){
String a = "";
***REMOVED*** if (env.equals("Dev")){
***REMOVED*** a= this.getDevDependencyFile(language);
***REMOVED*** ***REMOVED***
***REMOVED***
***REMOVED*** if (env.equals("Prod")){
***REMOVED*** a= this.getProdDependencyFile(language);
***REMOVED*** ***REMOVED***
***REMOVED***
***REMOVED*** if (env.equals("Proto")){
***REMOVED*** a= this.getRProtoDependencyFile(language);
***REMOVED*** ***REMOVED***
***REMOVED***
***REMOVED*** if (env.equals("Preprod")){
***REMOVED*** a= this.getPreProdDependencyFile(language);
***REMOVED*** ***REMOVED***
a = this.getProdDependencyFile(language);
return a;
***REMOVED***
public String getStagingDependencyFile(String language) {
return getDependencyFile(DMPMClientConfiguratorManager.getInstance().getStagingConfiguration(),language);
***REMOVED*** switch (language) {
***REMOVED*** case "R":
***REMOVED*** return DMPMClientConfigurator.getInstance().getSVNStagingCRANDepsList();
***REMOVED*** case "R-blackbox":
***REMOVED*** return DMPMClientConfigurator.getInstance().getSVNStagingRBDepsList();
***REMOVED*** case "Java":
***REMOVED*** return DMPMClientConfigurator.getInstance().getSVNStagingJavaDepsList();
***REMOVED*** case "Knime-Workflow":
***REMOVED*** return DMPMClientConfigurator.getInstance().getSVNStagingKWDepsList();
***REMOVED*** case "Linux-compiled":
***REMOVED*** return DMPMClientConfigurator.getInstance().getSVNStagingLinuxCompiledDepsList();
***REMOVED*** case "Octave":
***REMOVED*** return DMPMClientConfigurator.getInstance().getSVNStagingOctaveDepsList();
***REMOVED*** case "Python":
***REMOVED*** return DMPMClientConfigurator.getInstance().getSVNStagingPythonDepsList();
***REMOVED*** case "Pre-Installed":
***REMOVED*** return DMPMClientConfigurator.getInstance().getSVNStagingPreInstalledDepsList();
***REMOVED*** case "Windows-compiled":
***REMOVED*** return DMPMClientConfigurator.getInstance().getSVNStagingWCDepsList();
***REMOVED*** default:
***REMOVED*** return null;
***REMOVED*** ***REMOVED***
***REMOVED***
public String getProdDependencyFile(String language) {
return getDependencyFile(DMPMClientConfiguratorManager.getInstance().getProductionConfiguration(),language);
***REMOVED*** switch (language) {
***REMOVED*** case "R":
***REMOVED*** return this.configuration.getSVNProdCRANDepsList();
***REMOVED*** case "R-blackbox":
***REMOVED*** return this.configuration.getSVNProdRBDepsList();
***REMOVED*** case "Java":
***REMOVED*** return this.configuration.getSVNProdJavaDepsList();
***REMOVED*** case "Knime-Workflow":
***REMOVED*** return this.configuration.getSVNProdKWDepsList();
***REMOVED*** case "Linux-compiled":
***REMOVED*** return this.configuration.getSVNProdLinuxCompiledDepsList();
***REMOVED*** case "Octave":
***REMOVED*** return this.configuration.getSVNProdOctaveDepsList();
***REMOVED*** case "Python":
***REMOVED*** return this.configuration.getSVNProdPythonDepsList();
***REMOVED*** case "Pre-Installed":
***REMOVED*** return this.configuration.getSVNProdPreInstalledDepsList();
***REMOVED*** case "Windows-compiled":
***REMOVED*** return this.configuration.getSVNProdWCDepsList();
***REMOVED*** default:
***REMOVED*** return null;
***REMOVED*** ***REMOVED***
***REMOVED***
private String getDependencyFile (Configuration configuration, String language)
{
@ -193,29 +89,24 @@ public String getDependencyFileProd(String language/*, String env*/){
***REMOVED***
public void updateSVNStagingAlgorithmList(Algorithm algorithm, String targetVRE, String category, String algorithm_type, String user/*, String env*/) {
this.updateSVNAlgorithmList(DMPMClientConfiguratorManager.getInstance().getStagingConfiguration().getSVNAlgorithmsList(), algorithm, targetVRE, category, algorithm_type, user);
***REMOVED***
public void updateSVNProdAlgorithmList(Algorithm algorithm, String targetVRE, String category, String algorithm_type, String user/*, String env*/) {
this.updateSVNAlgorithmList(DMPMClientConfiguratorManager.getInstance().getProductionConfiguration().getSVNAlgorithmsList(), algorithm, targetVRE, category, algorithm_type, user);
public void updateSVNAlgorithmList(Algorithm algorithm, String targetVRE, String category, String algorithm_type, String user/*, String env*/)
{
this.updateSVNAlgorithmList(this.configuration.getSVNAlgorithmsList(), algorithm, targetVRE, category, algorithm_type, user);
***REMOVED***
public void updateAlgorithmFiles(File a) throws SVNException{
***REMOVED***this.updateAlgorithmList(this.configuration.getSVNMainAlgoRepo(), a);
this.updateAlgorithmList(DMPMClientConfiguratorManager.getInstance().getStagingConfiguration().getRepository(), a);
this.updateAlgorithmList(this.configuration.getRepository(), a);
***REMOVED***
public void updateAlgorithmFilesProd(File a) throws SVNException{
***REMOVED***this.updateAlgorithmList(this.configuration.getSVNMainAlgoRepo(), a);
this.updateAlgorithmList(DMPMClientConfiguratorManager.getInstance().getProductionConfiguration().getRepository(), a);
***REMOVED***
public void updateAlgorithmList(String svnMainAlgoRepo, File a) throws SVNException {
private void updateAlgorithmList(String svnMainAlgoRepo, File a) throws SVNException {
try {
System.out.println("Adding .jar file: " + a + " to repository " + svnMainAlgoRepo);
@ -312,7 +203,7 @@ public String getDependencyFileProd(String language/*, String env*/){
***REMOVED***
public void updateSVNAlgorithmList(String file, Algorithm algorithm, String targetVRE, String category, String algorithm_type, String user/*, String env*/) {
private void updateSVNAlgorithmList(String file, Algorithm algorithm, String targetVRE, String category, String algorithm_type, String user/*, String env*/) {
try {
System.out.println("Updating algorithm list: " + file);
final ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
@ -404,10 +295,10 @@ public String getDependencyFileProd(String language/*, String env*/){
System.out.println("Checking dependencies list: " + file);
CheckMethod cm = new CheckMethod();
List<String> validDependencies = new LinkedList<String>();
for (String singlefile: cm.getFiles(file)){
for (String singlefile: CheckMethod.getFiles(file)){
final ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
svnRepository.getFile(singlefile, SVNRepository.INVALID_REVISION, null, byteArrayOutputStream);
@ -477,8 +368,8 @@ public String getDependencyFileProd(String language/*, String env*/){
***REMOVED***
public boolean checkIfAvaialable(String file, Collection<Dependency> deps) throws SVNException {
SendMail sm = new SendMail();
NotificationHelper nh = new NotificationHelper();
***REMOVED***SendMail sm = new SendMail();
***REMOVED***NotificationHelper nh = new NotificationHelper();
boolean check = false;
try {
@ -608,7 +499,7 @@ public String getDependencyFileProd(String language/*, String env*/){
SVNUpdater c = new SVNUpdater();
***REMOVED*** SVNUpdater c = new SVNUpdater();
***REMOVED***File a = new File("/home/ngalante/Desktop/testCiro");
***REMOVED***File b = new File ("/home/ngalante/Desktop/testB");

View File

@ -8,6 +8,7 @@ import static org.gcube.resources.discovery.icclient.ICFactory.queryFor;
***REMOVED***
***REMOVED***
import java.io.OutputStreamWriter;
import java.io.UnsupportedEncodingException;
import java.net.MalformedURLException;
***REMOVED***
import java.net.URLConnection;
@ -23,16 +24,10 @@ import org.apache.http.util.EntityUtils;
import org.gcube.common.authorization.library.AuthorizationEntry;
***REMOVED***
import org.gcube.common.resources.gcore.GCoreEndpoint;
import org.gcube.common.resources.gcore.GenericResource;
import org.gcube.common.resources.gcore.ServiceEndpoint;
***REMOVED***
***REMOVED***
import org.gcube.dataanalysis.dataminer.poolmanager.util.exception.EMailException;
import org.gcube.resources.discovery.client.api.DiscoveryClient;
import org.gcube.resources.discovery.client.queries.api.SimpleQuery;
import org.gcube.smartgears.ContextProvider;
import org.gcube.smartgears.context.application.ApplicationContext;
import org.json.JSONArray;
import org.json.JSONObject;
@ -43,12 +38,14 @@ public class SendMail {
***REMOVED*** TODO Auto-generated constructor stub
***REMOVED***
public void sendNotification(String subject, String body) throws Exception {
public void sendNotification(String subject, String body) throws EMailException {
***REMOVED***AnalysisLogger.getLogger().debug("Emailing System->Starting request of email in scope " + ScopeProvider.instance.get());
***REMOVED***String serviceAddress = InfraRetrieval.findEmailingSystemAddress(ScopeProvider.instance.get());
String serviceAddress = this.getSocialService();
@ -62,18 +59,25 @@ public class SendMail {
***REMOVED***AnalysisLogger.getLogger().debug("Emailing System->Request url is going to be " + requestForMessage);
***REMOVED*** put the sender, the recipients, subject and body of the mail here
subject = URLEncoder.encode(subject, "UTF-8");
body = URLEncoder.encode(body, "UTF-8");
try
{
subject = URLEncoder.encode(subject, "UTF-8");
body = URLEncoder.encode(body, "UTF-8");
***REMOVED***
catch (UnsupportedEncodingException e)
{
throw new EMailException(e);
***REMOVED***
String requestParameters = "&sender=dataminer&recipients=" + this.getAdmins() + "&subject=" + subject + "&body="
+ body;
String response = this.sendPostRequest(requestForMessage, requestParameters);
***REMOVED***AnalysisLogger.getLogger().debug("Emailing System->Emailing response OK ");
if (response == null) {
Exception e = new Exception("Error in email sending response");
throw e;
***REMOVED***
if (response == null) throw new EMailException();
***REMOVED***
***REMOVED*** public void notifySubmitter(String a, String b) throws Exception {
@ -191,7 +195,7 @@ public class SendMail {
public String getAdmins() throws Exception {
public String getAdmins(){
try{
List<String> s = new LinkedList<String>();
JSONObject obj = new JSONObject(this.getAdminRoles());
@ -206,7 +210,7 @@ public class SendMail {
***REMOVED***
return s.toString().replace("[", "").replace("]", "");
***REMOVED***
catch(Exception a){return "ngalante, lucio.lelii, roberto.cirillo, gianpaolo.coro, giancarlo.panichi, scarponi"; ***REMOVED***
catch(Exception a){return "ciro.formisano, lucio.lelii, roberto.cirillo, gianpaolo.coro, giancarlo.panichi, scarponi"; ***REMOVED***
***REMOVED***

View File

@ -0,0 +1,19 @@
package org.gcube.dataanalysis.dataminer.poolmanager.util.exception;
public class EMailException extends Exception {
/**
*
*/
private static final long serialVersionUID = 1L;
public EMailException() {
super ("Unable to send email notification");
***REMOVED***
public EMailException(Throwable cause) {
super ("Unable to send email notification",cause);
***REMOVED***
***REMOVED***

View File

@ -0,0 +1,272 @@
package org.gcube.dataanalysis.dataminer.poolmanager.util.impl;
import java.io.File;
import org.gcube.dataanalysis.dataminer.poolmanager.clients.configuration.Configuration;
import org.gcube.dataanalysis.dataminer.poolmanager.clients.configuration.DMPMClientConfiguratorManager;
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Algorithm;
import org.gcube.dataanalysis.dataminer.poolmanager.util.CheckMethod;
import org.gcube.dataanalysis.dataminer.poolmanager.util.SVNUpdater;
***REMOVED***
***REMOVED***
import com.jcraft.jsch.Channel;
import com.jcraft.jsch.ChannelSftp;
import com.jcraft.jsch.JSch;
import com.jcraft.jsch.Session;
import com.jcraft.jsch.SftpException;
public class CheckMethodProduction extends CheckMethod{
***REMOVED***
public CheckMethodProduction()
{
this.logger = LoggerFactory.getLogger(CheckMethodProduction.class);
***REMOVED***
@Override
public boolean algoExists(Algorithm algo/*, String env*/) throws Exception{
this.logger.debug("Looking if algo "+algo.getName()+ " exists in production");
Configuration productionConfiguration = DMPMClientConfiguratorManager.getInstance().getProductionConfiguration();
File file = new File(productionConfiguration.getGhostAlgoDirectory()+"/"+algo.getName()+".jar");
File file2 = new File(productionConfiguration.getGhostAlgoDirectory()+"/"+algo.getName()+"_interface.jar");
System.out.println("First file is located to: "+file.getPath());
System.out.println("Second file is located to: "+file2.getPath());
if ((this.doesExist(file.getPath()/*,env*/)) && (this.doesExist(file2.getPath()/*,env*/))){
this.copyFromDmToSVN(file/*,env*/);
this.copyFromDmToSVN(file2/*,env*/);
return true;
***REMOVED***
else
System.out.println("Algorithm "+algo.getName()+".jar"+ " and "+algo.getName()+"_interface.jar files are not present at the expected path");
return false;
***REMOVED***
@Override
public void deleteFiles(Algorithm a/*,String env*/) throws Exception
{
JSch jsch = new JSch();
Session session = null;
Channel channel = null;
ChannelSftp c = null;
Configuration productionConfiguration = DMPMClientConfiguratorManager.getInstance().getProductionConfiguration();
System.out.println("checking existing in env: " + productionConfiguration.getHost());
File file = new File(productionConfiguration.getGhostAlgoDirectory()+"/"+a.getName()+".jar");
File file2 = new File(productionConfiguration.getGhostAlgoDirectory()+"/"+a.getName()+"_interface.jar");
System.out.println("First file is located to: "+file.getPath());
System.out.println("Second file is located to: "+file2.getPath());
jsch.setKnownHosts("~/.ssh/known_hosts");
String privateKey = "~/.ssh/id_rsa";
jsch.addIdentity(privateKey);
System.out.println("Private Key Added.");
session = jsch.getSession("root", productionConfiguration.getHost());
System.out.println("session created.");
java.util.Properties config = new java.util.Properties();
config.put("StrictHostKeyChecking", "no");
session.setConfig(config);
session.connect();
channel = session.openChannel("sftp");
channel.connect();
System.out.println("shell channel connected....");
c = (ChannelSftp) channel;
if(doesExist(file.getPath()/*,env*/)&&(doesExist(file2.getPath()/*,env*/))){
c.rm(file.getPath());
c.rm(file2.getPath());
System.out.println("Both the files have been deleted");
***REMOVED***
else System.out.println("Files not found");
channel.disconnect();
c.disconnect();
session.disconnect();
***REMOVED***
@Override
public boolean doesExist(String path/*, String env*/) throws Exception {
JSch jsch = new JSch();
Session session = null;
Channel channel = null;
ChannelSftp c = null;
boolean success = false;
Configuration productionConfiguration = DMPMClientConfiguratorManager.getInstance().getProductionConfiguration();
jsch.setKnownHosts("~/.ssh/known_hosts");
String privateKey = "~/.ssh/id_rsa";
jsch.addIdentity(privateKey);
System.out.println("Private Key Added.");
session = jsch.getSession("root", productionConfiguration.getHost());
System.out.println("session created.");
java.util.Properties config = new java.util.Properties();
config.put("StrictHostKeyChecking", "no");
session.setConfig(config);
session.connect();
channel = session.openChannel("sftp");
channel.connect();
System.out.println("shell channel connected....");
c = (ChannelSftp) channel;
System.out.println(path);
try {
c.lstat(path);
success = true;
***REMOVED*** catch (SftpException e) {
if (e.id == ChannelSftp.SSH_FX_NO_SUCH_FILE) {
***REMOVED*** file doesn't exist
success = false;
***REMOVED***
***REMOVED***success = true; ***REMOVED*** something else went wrong
***REMOVED***
channel.disconnect();
c.disconnect();
session.disconnect();
return success;
***REMOVED***
@Override
public void copyFromDmToSVN(File a/*,String env*/) throws Exception {
JSch jsch = new JSch();
Session session = null;
SVNUpdater svnUpdater = new SVNUpdaterProduction();
Configuration productionConfiguration = DMPMClientConfiguratorManager.getInstance().getProductionConfiguration();
jsch.setKnownHosts("~/.ssh/known_hosts");
String privateKey = "~/.ssh/id_rsa";
jsch.addIdentity(privateKey);
System.out.println("Private Key Added.");
session = jsch.getSession("root", productionConfiguration.getHost());
System.out.println("session created.");
java.util.Properties config = new java.util.Properties();
config.put("StrictHostKeyChecking", "no");
session.setConfig(config);
session.connect();
Channel channel = session.openChannel("sftp");
channel.connect();
ChannelSftp sftp = (ChannelSftp) channel;
sftp.cd(productionConfiguration.getGhostAlgoDirectory());
System.out.println("REMOTE : "+productionConfiguration.getGhostAlgoDirectory()+"/"+a.getName());
System.out.println("LOCAL : /tmp/"+a.getName());
sftp.get(productionConfiguration.getGhostAlgoDirectory()+"/"+a.getName(),"/tmp/"+a.getName());
channel.disconnect();
session.disconnect();
File f = new File("/tmp/"+a.getName());
svnUpdater.updateAlgorithmFiles(f);
f.delete();
***REMOVED***
public static void main(String[] args) throws Exception {
***REMOVED*** ServiceConfiguration a = new ServiceConfiguration();
***REMOVED*** System.out.println(a.getStagingHost());
CheckMethodProduction a = new CheckMethodProduction();
***REMOVED***a.getFiles("/trunk/data-analysis/RConfiguration/RPackagesManagement/r_deb_pkgs.txt, /trunk/data-analysis/RConfiguration/RPackagesManagement/r_cran_pkgs.txt, /trunk/data-analysis/RConfiguration/RPackagesManagement/r_github_pkgs.txt");
***REMOVED*** File aa = new File("OCTAVEBLACKBOX.jar");
***REMOVED*** System.out.println(aa.getName());
***REMOVED*** System.out.println(aa.getPath());
***REMOVED***a.copyFromDmToSVN(aa);
***REMOVED*** if (a.checkMethod("dataminer-ghost-d.dev.d4science.org", "708e7eb8-11a7-4e9a-816b-c9ed7e7e99fe-98187548")){
***REMOVED*** System.out.println("AAA");***REMOVED***
***REMOVED***
***REMOVED*** if (a.doesExist("/home/gcube/wps_algorithms/algorithms/WINDOWS_BLACK_BOX_EXAMPLE.jar")){
***REMOVED*** System.out.println("BBBB");
***REMOVED***
***REMOVED******REMOVED***
***REMOVED*** if (a.doesExist("/home/gcube/wps_algorithms/algorithms/WINDOWS_BLACK_BOX_EXAMPLE_interface.jar")){
***REMOVED*** System.out.println("CCCC");***REMOVED***
***REMOVED***
***REMOVED*** File aa = new File("/home/gcube/wps_algorithms/algorithms/RBLACKBOX_interface.jar");
***REMOVED*** a.copyFromDmToSVN(aa, "Dev");
***REMOVED***
***REMOVED***System.out.println(a.checkMethod("dataminer-ghost-t.pre.d4science.org",
***REMOVED*** "2eceaf27-0e22-4dbe-8075-e09eff199bf9-98187548"));
***REMOVED***System.out.println(a.checkMethod("dataminer-proto-ghost.d4science.org",
***REMOVED*** "3a23bfa4-4dfe-44fc-988f-194b91071dd2-843339462"));
System.out.println(a.checkMethod("dataminer-ghost-d.dev.d4science.org",
"708e7eb8-11a7-4e9a-816b-c9ed7e7e99fe-98187548"));
***REMOVED***Algorithm aa = new Algorithm();
***REMOVED***aa.setName("UDPIPE_WRAPPER");
***REMOVED***System.out.println(a.algoExists(aa));
***REMOVED******REMOVED***
***REMOVED***ServiceConfiguration bp = new ServiceConfiguration();
***REMOVED******REMOVED***
***REMOVED***SecurityTokenProvider.instance.set("708e7eb8-11a7-4e9a-816b-c9ed7e7e99fe-98187548");
***REMOVED******REMOVED***
***REMOVED***if (a.checkMethod(bp.getStagingHost(), SecurityTokenProvider.instance.get())&&a.algoExists(aa)); {
***REMOVED***System.out.println("ciao");
***REMOVED***
***REMOVED******REMOVED***
***REMOVED***
***REMOVED***Algorithm al = new Algorithm();
***REMOVED*** al.setName("UDPIPE_WRAPPER");
***REMOVED*** a.deleteFiles(al);
***REMOVED***
***REMOVED***

View File

@ -0,0 +1,288 @@
package org.gcube.dataanalysis.dataminer.poolmanager.util.impl;
import java.io.File;
import org.gcube.dataanalysis.dataminer.poolmanager.clients.configuration.Configuration;
import org.gcube.dataanalysis.dataminer.poolmanager.clients.configuration.DMPMClientConfiguratorManager;
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Algorithm;
import org.gcube.dataanalysis.dataminer.poolmanager.util.CheckMethod;
import org.gcube.dataanalysis.dataminer.poolmanager.util.SVNUpdater;
***REMOVED***
***REMOVED***
import com.jcraft.jsch.Channel;
import com.jcraft.jsch.ChannelSftp;
import com.jcraft.jsch.JSch;
import com.jcraft.jsch.Session;
import com.jcraft.jsch.SftpException;
public class CheckMethodStaging extends CheckMethod{
***REMOVED***
public CheckMethodStaging()
{
this.logger = LoggerFactory.getLogger(CheckMethodStaging.class);
***REMOVED***
@Override
public boolean algoExists(Algorithm algo/*, String env*/) throws Exception{
this.logger.debug("Looking if algo "+algo.getName()+ " exists");
Configuration stagingConfiguration = DMPMClientConfiguratorManager.getInstance().getStagingConfiguration();
File file = new File(stagingConfiguration.getGhostAlgoDirectory()+"/"+algo.getName()+".jar");
File file2 = new File(stagingConfiguration.getGhostAlgoDirectory()+"/"+algo.getName()+"_interface.jar");
this.logger.debug("Looking for files "+file.getPath()+ " "+file.getPath());
System.out.println("First file is located to: "+file.getPath());
System.out.println("Second file is located to: "+file2.getPath());
if ((this.doesExist(file.getPath()/*,env*/)) && (this.doesExist(file2.getPath()/*,env*/)))
{
this.logger.debug("Files found");
this.copyFromDmToSVN(file/*,env*/);
this.copyFromDmToSVN(file2/*,env*/);
System.out.println("Files have been copied to SVN");
return true;
***REMOVED***
else
{
this.logger.debug("Files not found");
System.out.println("Algorithm "+algo.getName()+".jar"+ " and "+algo.getName()+"_interface.jar files are not present at the expected path");
return false;
***REMOVED***
***REMOVED***
@Override
public void deleteFiles(Algorithm a/*,String env*/) throws Exception{
JSch jsch = new JSch();
Session session = null;
Channel channel = null;
ChannelSftp c = null;
Configuration stagingConfiguration = DMPMClientConfiguratorManager.getInstance().getStagingConfiguration();
System.out.println("checking existing in env: " + stagingConfiguration.getHost());
File file = new File(stagingConfiguration.getGhostAlgoDirectory()+"/"+a.getName()+".jar");
File file2 = new File(stagingConfiguration.getGhostAlgoDirectory()+"/"+a.getName()+"_interface.jar");
System.out.println("First file is located to: "+file.getPath());
System.out.println("Second file is located to: "+file2.getPath());
jsch.setKnownHosts("~/.ssh/known_hosts");
String privateKey = "~/.ssh/id_rsa";
jsch.addIdentity(privateKey);
System.out.println("Private Key Added.");
session = jsch.getSession("root", stagingConfiguration.getHost());
System.out.println("session created.");
java.util.Properties config = new java.util.Properties();
config.put("StrictHostKeyChecking", "no");
session.setConfig(config);
session.connect();
channel = session.openChannel("sftp");
channel.connect();
System.out.println("shell channel connected....");
c = (ChannelSftp) channel;
if(doesExist(file.getPath()/*,env*/)&&(doesExist(file2.getPath()/*,env*/))){
c.rm(file.getPath());
c.rm(file2.getPath());
System.out.println("Both the files have been deleted");
***REMOVED***
else System.out.println("Files not found");
channel.disconnect();
c.disconnect();
session.disconnect();
***REMOVED***
@Override
public boolean doesExist(String path/*, String env*/) throws Exception {
this.logger.debug("Looking if file "+path + " exists");
JSch jsch = new JSch();
Session session = null;
Channel channel = null;
ChannelSftp c = null;
boolean success = false;
Configuration stagingConfiguration = DMPMClientConfiguratorManager.getInstance().getStagingConfiguration();
jsch.setKnownHosts("~/.ssh/known_hosts");
String privateKey = "~/.ssh/id_rsa";
jsch.addIdentity(privateKey);
System.out.println("Private Key Added.");
this.logger.debug("Staging configuration host "+stagingConfiguration.getHost());
session = jsch.getSession("root",stagingConfiguration.getHost() );
System.out.println("session created.");
java.util.Properties config = new java.util.Properties();
config.put("StrictHostKeyChecking", "no");
session.setConfig(config);
session.connect();
channel = session.openChannel("sftp");
channel.connect();
System.out.println("shell channel connected....");
c = (ChannelSftp) channel;
***REMOVED***SftpATTRS is = null;
System.out.println(path);
try {
c.lstat(path);
this.logger.debug("File found");
success = true;
***REMOVED*** catch (SftpException e)
{
this.logger.error("File not found",e);
if (e.id == ChannelSftp.SSH_FX_NO_SUCH_FILE)
{
***REMOVED*** file doesn't exist
success = false;
***REMOVED***
***REMOVED***success = true; ***REMOVED*** something else went wrong
***REMOVED***
channel.disconnect();
c.disconnect();
session.disconnect();
this.logger.debug("Operation result "+success);
return success;
***REMOVED***
@Override
public void copyFromDmToSVN(File a/*,String env*/) throws Exception {
JSch jsch = new JSch();
Session session = null;
SVNUpdater svnUpdater = new SVNUpdaterStaging();
Configuration stagingConfiguration = DMPMClientConfiguratorManager.getInstance().getStagingConfiguration();
jsch.setKnownHosts("~/.ssh/known_hosts");
String privateKey = "~/.ssh/id_rsa";
jsch.addIdentity(privateKey);
System.out.println("Private Key Added.");
session = jsch.getSession("root", stagingConfiguration.getHost());
System.out.println("session created.");
java.util.Properties config = new java.util.Properties();
config.put("StrictHostKeyChecking", "no");
session.setConfig(config);
session.connect();
Channel channel = session.openChannel("sftp");
channel.connect();
ChannelSftp sftp = (ChannelSftp) channel;
sftp.cd(stagingConfiguration.getGhostAlgoDirectory());
System.out.println("REMOTE : "+stagingConfiguration.getGhostAlgoDirectory()+"/"+a.getName());
System.out.println("LOCAL : /tmp/"+a.getName());
sftp.get(stagingConfiguration.getGhostAlgoDirectory()+"/"+a.getName(),"/tmp/"+a.getName());
channel.disconnect();
session.disconnect();
File f = new File("/tmp/"+a.getName());
svnUpdater.updateAlgorithmFiles(f);
f.delete();
***REMOVED***
public static void main(String[] args) throws Exception {
***REMOVED*** ServiceConfiguration a = new ServiceConfiguration();
***REMOVED*** System.out.println(a.getStagingHost());
CheckMethodStaging a = new CheckMethodStaging();
***REMOVED***a.getFiles("/trunk/data-analysis/RConfiguration/RPackagesManagement/r_deb_pkgs.txt, /trunk/data-analysis/RConfiguration/RPackagesManagement/r_cran_pkgs.txt, /trunk/data-analysis/RConfiguration/RPackagesManagement/r_github_pkgs.txt");
***REMOVED*** File aa = new File("OCTAVEBLACKBOX.jar");
***REMOVED*** System.out.println(aa.getName());
***REMOVED*** System.out.println(aa.getPath());
***REMOVED***a.copyFromDmToSVN(aa);
***REMOVED*** if (a.checkMethod("dataminer-ghost-d.dev.d4science.org", "708e7eb8-11a7-4e9a-816b-c9ed7e7e99fe-98187548")){
***REMOVED*** System.out.println("AAA");***REMOVED***
***REMOVED***
***REMOVED*** if (a.doesExist("/home/gcube/wps_algorithms/algorithms/WINDOWS_BLACK_BOX_EXAMPLE.jar")){
***REMOVED*** System.out.println("BBBB");
***REMOVED***
***REMOVED******REMOVED***
***REMOVED*** if (a.doesExist("/home/gcube/wps_algorithms/algorithms/WINDOWS_BLACK_BOX_EXAMPLE_interface.jar")){
***REMOVED*** System.out.println("CCCC");***REMOVED***
***REMOVED***
***REMOVED*** File aa = new File("/home/gcube/wps_algorithms/algorithms/RBLACKBOX_interface.jar");
***REMOVED*** a.copyFromDmToSVN(aa, "Dev");
***REMOVED***
***REMOVED***System.out.println(a.checkMethod("dataminer-ghost-t.pre.d4science.org",
***REMOVED*** "2eceaf27-0e22-4dbe-8075-e09eff199bf9-98187548"));
***REMOVED***System.out.println(a.checkMethod("dataminer-proto-ghost.d4science.org",
***REMOVED*** "3a23bfa4-4dfe-44fc-988f-194b91071dd2-843339462"));
System.out.println(a.checkMethod("dataminer-ghost-d.dev.d4science.org",
"708e7eb8-11a7-4e9a-816b-c9ed7e7e99fe-98187548"));
***REMOVED***Algorithm aa = new Algorithm();
***REMOVED***aa.setName("UDPIPE_WRAPPER");
***REMOVED***System.out.println(a.algoExists(aa));
***REMOVED******REMOVED***
***REMOVED***ServiceConfiguration bp = new ServiceConfiguration();
***REMOVED******REMOVED***
***REMOVED***SecurityTokenProvider.instance.set("708e7eb8-11a7-4e9a-816b-c9ed7e7e99fe-98187548");
***REMOVED******REMOVED***
***REMOVED***if (a.checkMethod(bp.getStagingHost(), SecurityTokenProvider.instance.get())&&a.algoExists(aa)); {
***REMOVED***System.out.println("ciao");
***REMOVED***
***REMOVED******REMOVED***
***REMOVED***
***REMOVED***Algorithm al = new Algorithm();
***REMOVED*** al.setName("UDPIPE_WRAPPER");
***REMOVED*** a.deleteFiles(al);
***REMOVED***
***REMOVED***

View File

@ -0,0 +1,14 @@
package org.gcube.dataanalysis.dataminer.poolmanager.util.impl;
import org.gcube.dataanalysis.dataminer.poolmanager.clients.configuration.DMPMClientConfiguratorManager;
import org.gcube.dataanalysis.dataminer.poolmanager.util.ClusterBuilder;
public class ClusterBuilderProduction extends ClusterBuilder{
public ClusterBuilderProduction() {
super (DMPMClientConfiguratorManager.getInstance().getProductionConfiguration());
***REMOVED***
***REMOVED***

View File

@ -0,0 +1,13 @@
package org.gcube.dataanalysis.dataminer.poolmanager.util.impl;
import org.gcube.dataanalysis.dataminer.poolmanager.clients.configuration.DMPMClientConfiguratorManager;
import org.gcube.dataanalysis.dataminer.poolmanager.util.ClusterBuilder;
public class ClusterBuilderStaging extends ClusterBuilder{
public ClusterBuilderStaging() {
super (DMPMClientConfiguratorManager.getInstance().getStagingConfiguration());
***REMOVED***
***REMOVED***

View File

@ -0,0 +1,30 @@
package org.gcube.dataanalysis.dataminer.poolmanager.util.impl;
import org.gcube.dataanalysis.dataminer.poolmanager.util.NotificationHelper;
***REMOVED***import scala.actors.threadpool.Arrays;
public class NotificationHelperProduction extends NotificationHelper{
***REMOVED*** private Exception executionException;
private String getSubjectHeader() {
return "[DataMinerGhostProductionInstallationRequestReport]";
***REMOVED***
@Override
public String getSuccessSubject() {
return this.getSubjectHeader()+" is SUCCESS";
***REMOVED***
@Override
public String getFailedSubject() {
return String.format(this.getSubjectHeader()+" is FAILED");
***REMOVED***
***REMOVED***

View File

@ -0,0 +1,32 @@
package org.gcube.dataanalysis.dataminer.poolmanager.util.impl;
import org.gcube.dataanalysis.dataminer.poolmanager.util.NotificationHelper;
***REMOVED***import scala.actors.threadpool.Arrays;
public class NotificationHelperStaging extends NotificationHelper {
***REMOVED*** private Exception executionException;
private String getSubjectHeader() {
return "[DataMinerGhostStagingInstallationRequestReport]";
***REMOVED***
@Override
public String getSuccessSubject() {
return this.getSubjectHeader()+" is SUCCESS";
***REMOVED***
@Override
public String getFailedSubject() {
return String.format(this.getSubjectHeader()+" is FAILED");
***REMOVED***
***REMOVED***

View File

@ -0,0 +1,21 @@
package org.gcube.dataanalysis.dataminer.poolmanager.util.impl;
import org.gcube.dataanalysis.dataminer.poolmanager.clients.configuration.DMPMClientConfiguratorManager;
import org.gcube.dataanalysis.dataminer.poolmanager.util.SVNUpdater;
import org.tmatesoft.svn.core.SVNException;
/**
* Created by ggiammat on 5/9/17.
*/
public class SVNUpdaterProduction extends SVNUpdater{
public SVNUpdaterProduction() throws SVNException {
super (DMPMClientConfiguratorManager.getInstance().getProductionConfiguration());
***REMOVED***
***REMOVED***

View File

@ -0,0 +1,21 @@
package org.gcube.dataanalysis.dataminer.poolmanager.util.impl;
import org.gcube.dataanalysis.dataminer.poolmanager.clients.configuration.DMPMClientConfiguratorManager;
import org.gcube.dataanalysis.dataminer.poolmanager.util.SVNUpdater;
import org.tmatesoft.svn.core.SVNException;
/**
* Created by ggiammat on 5/9/17.
*/
public class SVNUpdaterStaging extends SVNUpdater{
public SVNUpdaterStaging() throws SVNException {
super (DMPMClientConfiguratorManager.getInstance().getStagingConfiguration());
***REMOVED***
***REMOVED***

View File

@ -6,6 +6,7 @@ package org.gcube.dataanalysis.dataminerpoolmanager;
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Algorithm;
import org.gcube.dataanalysis.dataminer.poolmanager.util.AlgorithmBuilder;
import org.gcube.dataanalysis.dataminer.poolmanager.util.SVNUpdater;
import org.gcube.dataanalysis.dataminer.poolmanager.util.impl.SVNUpdaterProduction;
import org.tmatesoft.svn.core.SVNException;
public class JobTest {
@ -14,7 +15,7 @@ public class JobTest {
ScopeProvider.instance.set("/d4science.research-infrastructures.eu/gCubeApps/RPrototypingLab");
SVNUpdater svnUpdater = new SVNUpdater();
SVNUpdater svnUpdater = new SVNUpdaterProduction();
Algorithm algo = AlgorithmBuilder.create("http:***REMOVED***data.d4science.org/dENQTTMxdjNZcGRpK0NHd2pvU0owMFFzN0VWemw3Zy9HbWJQNStIS0N6Yz0");
***REMOVED***test phase

View File

@ -5,6 +5,7 @@ package org.gcube.dataanalysis.dataminerpoolmanager;
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Algorithm;
import org.gcube.dataanalysis.dataminer.poolmanager.util.AlgorithmBuilder;
import org.gcube.dataanalysis.dataminer.poolmanager.util.SVNUpdater;
import org.gcube.dataanalysis.dataminer.poolmanager.util.impl.SVNUpdaterStaging;
import org.tmatesoft.svn.core.SVNException;
/**
@ -17,7 +18,7 @@ public class SVNTests {
public static void main(String[] args) throws SVNException, IOException, InterruptedException {
SVNUpdater svnUpdater = new SVNUpdater();
SVNUpdater svnUpdater = new SVNUpdaterStaging();
Algorithm algo = AlgorithmBuilder.create("http:***REMOVED***data.d4science.org/YjJ3TmJab1dqYzVoTmppdjlsK0l0b1ZXWGtzWlQ1NHNHbWJQNStIS0N6Yz0");
***REMOVED***algo.setClazz(algo.getClazz() + "TEST");
@ -27,7 +28,7 @@ public class SVNTests {
***REMOVED***System.out.println(algo.getCategory());
***REMOVED***System.out.println(algo.getSkipJava());
svnUpdater.updateSVNAlgorithmList("/trunk/data-analysis/DataMinerConfiguration/algorithms/dev/algorithms", algo, "/gcube/devNext/NextNext", algo.getCategory(), algo.getAlgorithmType(), "");
***REMOVED***svnUpdater.updateSVNAlgorithmList("/trunk/data-analysis/DataMinerConfiguration/algorithms/dev/algorithms", algo, "/gcube/devNext/NextNext", algo.getCategory(), algo.getAlgorithmType(), "");
***REMOVED***svnUpdater.updateSVNRProtoAlgorithmList(algo, "/d4science.research-infrastructures.eu/gCubeApps/RPrototypingLab", "Dataminer Pool Manager", "Proto");
***REMOVED***svnUpdater.readRPRotoDeps(algo);