This commit is contained in:
Nunzio Andrea Galante 2017-05-12 15:32:14 +00:00
parent 5dbe0f441e
commit 7fe368f8be
14 changed files with 367 additions and 298 deletions

View File

@ -5,6 +5,8 @@ import java.io.BufferedReader;
import java.io.InputStreamReader; import java.io.InputStreamReader;
***REMOVED*** ***REMOVED***
import java.util.LinkedList; import java.util.LinkedList;
***REMOVED***
***REMOVED*** ***REMOVED***
***REMOVED*** ***REMOVED***
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Cluster; import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Cluster;
@ -74,7 +76,11 @@ public class HAProxy ***REMOVED***
***REMOVED*** ***REMOVED***
public List<Host> listDataMinersByCluster() throws IOException ***REMOVED*** public List<Host> listDataMinersByCluster(String targetVREToken,String targetVRE) throws IOException ***REMOVED***
SecurityTokenProvider.instance.set(targetVREToken);
ScopeProvider.instance.set(targetVRE);
***REMOVED*** next op to use when Cluster info available in the IS ***REMOVED*** next op to use when Cluster info available in the IS
***REMOVED*** Cluster cluster = this.getClusterByHProxy(); ***REMOVED*** Cluster cluster = this.getClusterByHProxy();
Cluster cluster = this.MapCluster(); Cluster cluster = this.MapCluster();
@ -94,9 +100,9 @@ public class HAProxy ***REMOVED***
***REMOVED******REMOVED*** ***REMOVED******REMOVED***
else ***REMOVED*** else ***REMOVED***
***REMOVED*** prod ***REMOVED*** prod
***REMOVED***URL stockURL = new URL stockURL = new
***REMOVED***URL("http:***REMOVED***data.d4science.org/Yk4zSFF6V3JOSytNd3JkRDlnRFpDUUR5TnRJZEw2QjRHbWJQNStIS0N6Yz0"); URL("http:***REMOVED***data.d4science.org/Yk4zSFF6V3JOSytNd3JkRDlnRFpDUUR5TnRJZEw2QjRHbWJQNStIS0N6Yz0");
URL stockURL = new URL("http:***REMOVED***"+ ISClient.getHProxy() +":8880/;csv"); ***REMOVED***URL stockURL = new URL("http:***REMOVED***"+ ISClient.getHProxy() +":8880/;csv");
***REMOVED***URL stockURL = new URL("http:***REMOVED***data.d4science.org/c29KTUluTkZnRlB0WXE5NVNaZnRoR0dtYThUSmNTVlhHbWJQNStIS0N6Yz0"); ***REMOVED***URL stockURL = new URL("http:***REMOVED***data.d4science.org/c29KTUluTkZnRlB0WXE5NVNaZnRoR0dtYThUSmNTVlhHbWJQNStIS0N6Yz0");
***REMOVED***System.out.println(stockURL); ***REMOVED***System.out.println(stockURL);
***REMOVED*** dev ***REMOVED*** dev
@ -124,11 +130,14 @@ public class HAProxy ***REMOVED***
public static void main(String[] args) throws IOException, SVNException ***REMOVED*** public static void main(String[] args) throws IOException, SVNException ***REMOVED***
HAProxy a = new HAProxy(); HAProxy a = new HAProxy();
ScopeProvider.instance.set("/gcube/devNext/NextNext"); ***REMOVED***ScopeProvider.instance.set("/gcube/devNext/NextNext");
ScopeProvider.instance.set("/d4science.research-infrastructures.eu/gCubeApps/RPrototypingLab");
***REMOVED***SecurityTokenProvider.instance.set("***REMOVED***");
***REMOVED***ScopeProvider.instance.set("/d4science.research-infrastructures.eu/gCubeApps/RPrototypingLab"); ***REMOVED***ScopeProvider.instance.set("/d4science.research-infrastructures.eu/gCubeApps/RPrototypingLab");
***REMOVED*** System.out.println(a.getHProxy()); ***REMOVED*** System.out.println(a.getHProxy());
***REMOVED*** System.out.println(a.MapCluster()); ***REMOVED*** System.out.println(a.MapCluster());
System.out.println(a.listDataMinersByCluster()); System.out.println(a.listDataMinersByCluster("***REMOVED***","/gcube/devNext/NextNext"));
***REMOVED*** System.out.println(a.listDataMinersByCluster()); ***REMOVED*** System.out.println(a.listDataMinersByCluster());
***REMOVED*** List<Dependency> list = new LinkedList<Dependency>(); ***REMOVED*** List<Dependency> list = new LinkedList<Dependency>();

View File

@ -12,11 +12,11 @@ public class Algorithm ***REMOVED***
private String name; private String name;
private String description; private String description;
private String category;
private String clazz; private String clazz;
private String category;
private String algorithmType; private String algorithmType;
private String skipJava; private String skipJava;
@ -31,6 +31,10 @@ public class Algorithm ***REMOVED***
this.actions = new Vector<>(); this.actions = new Vector<>();
this.dependencies = new Vector<>(); this.dependencies = new Vector<>();
Dependency p = new Dependency(); Dependency p = new Dependency();
***REMOVED***init with default values
this.skipJava = "N";
this.algorithmType = "transducerers";
***REMOVED*** ***REMOVED***
public void addDependency(Dependency dep) ***REMOVED*** public void addDependency(Dependency dep) ***REMOVED***

View File

@ -6,10 +6,15 @@ public class Host ***REMOVED***
private Domain domain; private Domain domain;
public Host() ***REMOVED*** public Host(String hostname) ***REMOVED***
this.setName(hostname);
***REMOVED*** ***REMOVED***
public String getFullyQualifiedName() ***REMOVED*** public Host() ***REMOVED***
***REMOVED*** TODO Auto-generated constructor stub
***REMOVED***
public String getFullyQualifiedName() ***REMOVED***
if(this.domain!=null && this.domain.getName()!=null) if(this.domain!=null && this.domain.getName()!=null)
return this.getName()+"."+this.getDomain().getName(); return this.getName()+"."+this.getDomain().getName();
else else

View File

@ -44,8 +44,8 @@ public interface PoolManager ***REMOVED***
String addAlgorithmToHost(Algorithm algo, String host,boolean test) throws IOException, InterruptedException; String addAlgorithmToHost(Algorithm algo, String host,boolean test) throws IOException, InterruptedException;
String stageAlgorithm(String algorithmPackageURL) throws IOException, InterruptedException; String stageAlgorithm(String algorithmPackageURL, String category) throws IOException, InterruptedException;
String publishAlgorithm(String algorithmPackageURL, String targetVREToken) throws IOException, InterruptedException; String publishAlgorithm(String algorithmPackageURL, String targetVREToken, String targetVRE, String category) throws IOException, InterruptedException;
Algorithm extractAlgorithm(String url) throws IOException; Algorithm extractAlgorithm(String url) throws IOException;

View File

@ -0,0 +1,100 @@
package org.gcube.dataanalysis.dataminer.poolmanager.service;
import java.io.File;
import java.io.FileOutputStream;
***REMOVED***
import java.io.PrintStream;
import java.util.LinkedList;
***REMOVED***
import java.util.UUID;
import org.gcube.dataanalysis.dataminer.poolmanager.ansible.AnsibleWorker;
import org.gcube.dataanalysis.dataminer.poolmanager.ansiblebridge.AnsibleBridge;
***REMOVED***
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Cluster;
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Dependency;
import org.gcube.dataanalysis.dataminer.poolmanager.util.SVNUpdater;
***REMOVED***
public abstract class DMPMJob ***REMOVED***
protected SVNUpdater svnUpdater;
protected File jobLogs;
protected String id;
public DMPMJob(SVNUpdater svnUpdater)***REMOVED***
this.svnUpdater = svnUpdater;
this.id = UUID.randomUUID().toString();
***REMOVED***TODO: dmpm work directory should be loaded from configuration file
this.jobLogs = new File(System.getProperty("user.home")+File.separator+"dataminer-pool-manager"+File.separator+"jobs");
this.jobLogs.mkdirs();
***REMOVED***
public String start()***REMOVED***
new Thread(new Runnable() ***REMOVED***
***REMOVED***
public void run() ***REMOVED***
try ***REMOVED***
execute();
***REMOVED*** catch (Exception e) ***REMOVED***
e.printStackTrace();
***REMOVED***
***REMOVED***
***REMOVED***).start();
return this.id;
***REMOVED***
protected AnsibleWorker createWorker(Algorithm algo,
Cluster dataminerCluster,
boolean includeAlgorithmDependencies,
String user)***REMOVED***
AnsibleBridge ansibleBridge = new AnsibleBridge();
try ***REMOVED***
return ansibleBridge.createWorker(algo, dataminerCluster, includeAlgorithmDependencies, user);
***REMOVED*** catch (IOException e) ***REMOVED***
e.printStackTrace();
***REMOVED***
***REMOVED***
***REMOVED***
protected abstract void execute();
protected int executeAnsibleWorker(AnsibleWorker worker) throws IOException, InterruptedException, SVNException***REMOVED***
File path = new File(worker.getWorkdir() + File.separator + "jobs");
path.mkdirs();
File n = new File(this.jobLogs + File.separator + this.id);
FileOutputStream fos = new FileOutputStream(n);
PrintStream ps = new PrintStream(fos);
return worker.execute(ps);
***REMOVED***
protected void updateSVNDependencies(Algorithm algo, boolean stagingVRE) throws IOException, SVNException ***REMOVED***
for (Dependency d : algo.getDependencies()) ***REMOVED***
if (d.getType().equals("os")) ***REMOVED***
List<String> ls = new LinkedList<String>();
ls.add(d.getName());
this.svnUpdater.updateSVN((stagingVRE ? "test_": "") + "r_deb_pkgs.txt", ls);
***REMOVED***
if (d.getType().equals("cran")) ***REMOVED***
List<String> ls = new LinkedList<String>();
ls.add(d.getName());
this.svnUpdater.updateSVN((stagingVRE ? "test_": "") + "r_cran_pkgs.txt", ls);
***REMOVED***
if (d.getType().equals("github")) ***REMOVED***
List<String> ls = new LinkedList<String>();
ls.add(d.getName());
this.svnUpdater.updateSVN((stagingVRE ? "test_": "") + "r_github_pkgs.txt", ls);
***REMOVED***
***REMOVED***
***REMOVED***
***REMOVED***

View File

@ -1,40 +1,20 @@
package org.gcube.dataanalysis.dataminer.poolmanager.service; package org.gcube.dataanalysis.dataminer.poolmanager.service;
import java.io.File;
import java.io.FileNotFoundException;
***REMOVED*** ***REMOVED***
import org.gcube.common.resources.gcore.GenericResource; import java.util.Scanner;
import org.gcube.common.resources.gcore.Resources;
***REMOVED*** ***REMOVED***
import org.gcube.dataanalysis.dataminer.poolmanager.ansible.AnsibleWorker; import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Cluster;
import org.gcube.dataanalysis.dataminer.poolmanager.ansiblebridge.AnsibleBridge; import org.gcube.dataanalysis.dataminer.poolmanager.util.ClusterBuilder;
import org.gcube.dataanalysis.dataminer.poolmanager.clients.HAProxy;
import org.gcube.dataanalysis.dataminer.poolmanager.clients.ISClient;
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.*;
import org.gcube.dataanalysis.dataminer.poolmanager.util.Props; import org.gcube.dataanalysis.dataminer.poolmanager.util.Props;
import org.gcube.dataanalysis.dataminer.poolmanager.util.SVNUpdater; import org.gcube.dataanalysis.dataminer.poolmanager.util.SVNUpdater;
import org.gcube.informationsystem.publisher.AdvancedScopedPublisher;
import org.gcube.informationsystem.publisher.RegistryPublisherFactory;
import org.gcube.informationsystem.publisher.ScopedPublisher;
import org.gcube.informationsystem.publisher.exception.RegistryNotFoundException;
import org.gcube.resources.discovery.client.api.DiscoveryClient;
import org.gcube.resources.discovery.client.queries.api.SimpleQuery;
***REMOVED*** ***REMOVED***
***REMOVED***
***REMOVED***
import java.io.*;
***REMOVED***
***REMOVED***
import java.net.URLConnection;
***REMOVED***
import java.util.*;
import static org.gcube.resources.discovery.icclient.ICFactory.clientFor;
import static org.gcube.resources.discovery.icclient.ICFactory.queryFor;
public class DataminerPoolManager ***REMOVED*** public class DataminerPoolManager ***REMOVED***
private SVNUpdater svnUpdater; private SVNUpdater svnUpdater;
public DataminerPoolManager()***REMOVED*** public DataminerPoolManager()***REMOVED***
@ -50,260 +30,30 @@ public class DataminerPoolManager ***REMOVED***
***REMOVED***Algorithm algo) throws IOException, InterruptedException ***REMOVED*** ***REMOVED***Algorithm algo) throws IOException, InterruptedException ***REMOVED***
Cluster stagingCluster = ClusterBuilder.getStagingDataminerCluster();
Cluster cluster = getStagingDataminerCluster(); Cluster rProtoCluster = ClusterBuilder.getRProtoCluster();
***REMOVED***Assumes the service is running in RPrototypingLab DMPMJob job = new StagingJob(this.svnUpdater, algo, stagingCluster, rProtoCluster);
String token = SecurityTokenProvider.instance.get(); String id = job.start();
return id;
return addAlgorithmToCluster(algo, cluster, true, "root", true, token);
***REMOVED*** ***REMOVED***
***REMOVED***Algorithm algo, String targetVREToken) throws IOException, InterruptedException ***REMOVED*** ***REMOVED***Algorithm algo, String targetVREToken, String targetVRE) throws IOException, InterruptedException ***REMOVED***
Cluster cluster = new Cluster();
SecurityTokenProvider.instance.set(targetVREToken); Cluster prodCluster = ClusterBuilder.getVRECluster(targetVREToken, targetVRE);
for (Host h : new HAProxy().listDataMinersByCluster()) ***REMOVED***
cluster.addHost(h); DMPMJob job = new ProductionPublishingJob(this.svnUpdater, algo, prodCluster);
***REMOVED*** String id = job.start();
return addAlgorithmToCluster(algo, cluster, false, "gcube", false, targetVREToken); return id;
***REMOVED***
***REMOVED***1. to complete
private Cluster getStagingDataminerCluster()***REMOVED***
Cluster cluster = new Cluster();
Host h = new Host();
Props p = new Props();
***REMOVED***TODO: read this from configuration or IS?
h.setName(p.getStagingHost());
cluster.addHost(h);
return cluster;
***REMOVED***
private void createISResource(Algorithm algo, String vreToken) ***REMOVED***
ISClient client = new ISClient();
for (Algorithm a : client.getAlgoFromIs()) ***REMOVED***
if (a.getName().equals(algo.getName())) ***REMOVED***
continue;
***REMOVED***
if (!a.getName().equals(algo.getName()))***REMOVED***
new ISClient().addAlgToIs(algo, vreToken);
***REMOVED***
***REMOVED***
***REMOVED*** TODO: create the resource only if not already present
***REMOVED*** ***REMOVED***
private void updateSVNDependencies(Algorithm algo, boolean stagingVRE) throws IOException, SVNException ***REMOVED*** public String getLogById(String id) throws FileNotFoundException***REMOVED***
for (Dependency d : algo.getDependencies()) ***REMOVED***
***REMOVED***TODO: load dir from configuration file
if (d.getType().equals("os")) ***REMOVED*** File path = new File(System.getProperty("user.home") + File.separator + "dataminer-pool-manager/jobs/"
List<String> ls = new LinkedList<String>(); + id);
ls.add(d.getName());
this.svnUpdater.updateSVN(stagingVRE ? "test_": "" + "r_deb_pkgs.txt", ls); return new Scanner(path).useDelimiter("\\Z").next();
***REMOVED***
if (d.getType().equals("cran")) ***REMOVED***
List<String> ls = new LinkedList<String>();
ls.add(d.getName());
this.svnUpdater.updateSVN(stagingVRE ? "test_": "" + "r_cran_pkgs.txt", ls);
***REMOVED***
if (d.getType().equals("github")) ***REMOVED***
List<String> ls = new LinkedList<String>();
ls.add(d.getName());
this.svnUpdater.updateSVN(stagingVRE ? "test_": "" + "r_github_pkgs.txt", ls);
***REMOVED***
***REMOVED***
***REMOVED*** ***REMOVED***
***REMOVED****
*
* @param algo
* @param dataminerCluster
* @return uuid of the execution
***REMOVED***
public String addAlgorithmToCluster(
final Algorithm algo,
Cluster dataminerCluster,
boolean includeAlgorithmDependencies,
String user,
final boolean stagingVRE,
final String targetVREToken) throws IOException ***REMOVED***
AnsibleBridge ansibleBridge = new AnsibleBridge();
final AnsibleWorker worker = ansibleBridge.createWorker(algo, dataminerCluster, includeAlgorithmDependencies, user);
new Thread(new Runnable() ***REMOVED***
***REMOVED***
public void run() ***REMOVED***
try ***REMOVED***
File path = new File(worker.getWorkdir() + File.separator + "logs");
path.mkdirs();
File n = new File(path + File.separator + worker.getWorkerId());
FileOutputStream fos = new FileOutputStream(n);
PrintStream ps = new PrintStream(fos);
int retValue = worker.execute(ps);
System.out.println("Log stored to to " + n.getAbsolutePath());
if(retValue == 0) ***REMOVED***
updateSVNDependencies(algo, stagingVRE);
***REMOVED***createISResource(algo, targetVREToken);
***REMOVED***
***REMOVED*** destroy the worker
worker.destroy();
***REMOVED*** catch (Exception e) ***REMOVED***
e.printStackTrace();
***REMOVED***
***REMOVED***
***REMOVED***).start();
***REMOVED*** this line will execute immediately, not waiting for task to
***REMOVED*** complete
return worker.getWorkerId();
***REMOVED***
public String getScriptFromURL(URL url) throws IOException ***REMOVED***
if (url == null) ***REMOVED***
***REMOVED***
***REMOVED***
URLConnection yc = url.openConnection();
BufferedReader input = new BufferedReader(new InputStreamReader(yc.getInputStream()));
String line;
StringBuffer buffer = new StringBuffer();
while ((line = input.readLine()) != null) ***REMOVED***
buffer.append(line + "\n");
***REMOVED***
String bufferScript = buffer.substring(0, buffer.length());
input.close();
return bufferScript;
***REMOVED***
***REMOVED*** public String addAlgorithmToVRE(Algorithm algorithm, final String vre, /*final boolean updateSVN*/ final boolean test) throws IOException ***REMOVED***
***REMOVED*** ***REMOVED*** create a fake algorithm set
***REMOVED*** final AlgorithmSet algoSet = new AlgorithmSet();
***REMOVED*** algoSet.setName("fake");
***REMOVED*** algoSet.addAlgorithm(algorithm);
***REMOVED*** final String uuid = UUID.randomUUID().toString();
***REMOVED***
***REMOVED*** new Thread(new Runnable() ***REMOVED***
***REMOVED*** ***REMOVED***
***REMOVED*** public void run() ***REMOVED***
***REMOVED*** ***REMOVED***
***REMOVED*** try ***REMOVED***
***REMOVED*** try ***REMOVED***
***REMOVED*** addAlgorithmsToVRE(algoSet, vre, uuid, /*updateSVN*/test);
***REMOVED*** ***REMOVED*** catch (SVNException e) ***REMOVED***
***REMOVED*** ***REMOVED*** TODO Auto-generated catch block
***REMOVED*** e.printStackTrace();
***REMOVED*** ***REMOVED***
***REMOVED*** ***REMOVED*** catch (IOException e) ***REMOVED***
***REMOVED*** ***REMOVED*** TODO Auto-generated catch block
***REMOVED*** e.printStackTrace();
***REMOVED*** ***REMOVED*** catch (InterruptedException e) ***REMOVED***
***REMOVED*** ***REMOVED*** TODO Auto-generated catch block
***REMOVED*** e.printStackTrace();
***REMOVED*** ***REMOVED***
***REMOVED*** ***REMOVED***
***REMOVED*** ***REMOVED***).start();
***REMOVED*** ***REMOVED*** this line will execute immediately, not waiting for task to
***REMOVED*** ***REMOVED*** complete
***REMOVED*** System.out.println(uuid);
***REMOVED*** return uuid;
***REMOVED******REMOVED***
***REMOVED*** public String addAlgorithmToHost(Algorithm algorithm, final String hostname, /*final boolean updateSVN*/ final boolean test) throws IOException ***REMOVED***
***REMOVED*** ***REMOVED*** create a fake algorithm set
***REMOVED*** final AlgorithmSet algoSet = new AlgorithmSet();
***REMOVED*** algoSet.setName("fake");
***REMOVED*** algoSet.addAlgorithm(algorithm);
***REMOVED*** final String uuid = UUID.randomUUID().toString();
***REMOVED***
***REMOVED*** new Thread(new Runnable() ***REMOVED***
***REMOVED*** ***REMOVED***
***REMOVED*** public void run() ***REMOVED***
***REMOVED*** ***REMOVED***
***REMOVED*** try ***REMOVED***
***REMOVED*** if(test)***REMOVED***
***REMOVED*** addAlgorithmsToStagingHost(algoSet, hostname, uuid, /*updateSVN,*/test);***REMOVED***
***REMOVED*** ***REMOVED*** catch (IOException e) ***REMOVED***
***REMOVED*** ***REMOVED*** TODO Auto-generated catch block
***REMOVED*** e.printStackTrace();
***REMOVED*** ***REMOVED*** catch (InterruptedException e) ***REMOVED***
***REMOVED*** ***REMOVED*** TODO Auto-generated catch block
***REMOVED*** e.printStackTrace();
***REMOVED*** ***REMOVED*** catch (SVNException e) ***REMOVED***
***REMOVED*** ***REMOVED*** TODO Auto-generated catch block
***REMOVED*** e.printStackTrace();
***REMOVED*** ***REMOVED***
***REMOVED*** ***REMOVED***
***REMOVED*** ***REMOVED***).start();
***REMOVED*** ***REMOVED*** this line will execute immediately, not waiting for your task to
***REMOVED*** ***REMOVED*** complete
***REMOVED*** System.out.println(uuid);
***REMOVED*** return uuid;
***REMOVED******REMOVED***
public URL getURLfromWorkerLog(String a) throws MalformedURLException, UnknownHostException ***REMOVED***
File path = new File(System.getProperty("user.home") + File.separator + "dataminer-pool-manager/work/"
+ a + File.separator + "logs");
path.mkdirs();
File n = new File(path + File.separator + a);
***REMOVED*** String addr = InetAddress.getLocalHost().getHostAddress();
return new File(n.getPath()).toURI().toURL();
***REMOVED***
***REMOVED*** public String addAlgorithmsToVRE(AlgorithmSet algorithms, String vre, String uuid, /*boolean updateSVN,*/ boolean test) throws IOException, InterruptedException, SVNException ***REMOVED***
***REMOVED***
***REMOVED*** ***REMOVED*** create the cluster (dataminers in the vre)
***REMOVED*** Cluster cluster = new Cluster();
***REMOVED*** for (Host h : new HAProxy().listDataMinersByCluster()) ***REMOVED***
***REMOVED*** ***REMOVED***for (Host h : new ISClient().listDataminersInVRE()) ***REMOVED***
***REMOVED*** cluster.addHost(h);
***REMOVED*** ***REMOVED***
***REMOVED***
***REMOVED*** ***REMOVED*** apply the changes
***REMOVED*** AnsibleBridge a = new AnsibleBridge();
***REMOVED*** return a.applyAlgorithmSetToCluster(algorithms, cluster, uuid, /*updateSVN,*/ test).getWorkerId();
***REMOVED***
***REMOVED******REMOVED***
***REMOVED*** public String addAlgorithmsToHost(AlgorithmSet algorithms, String hostname, String uuid, /*boolean updateSVN,*/boolean test)
***REMOVED*** throws IOException, InterruptedException, SVNException ***REMOVED***
***REMOVED***
***REMOVED*** ***REMOVED*** create the cluster (dataminers in the vre)
***REMOVED*** Cluster cluster = new Cluster();
***REMOVED*** for (Host h : new HAProxy().listDataMinersByCluster()) ***REMOVED***
***REMOVED*** if (h.getName().equals(hostname)) ***REMOVED***
***REMOVED*** cluster.addHost(h);
***REMOVED*** ***REMOVED***
***REMOVED*** ***REMOVED***
***REMOVED*** ***REMOVED*** if(ISClient.getHProxy().equals(hostname))***REMOVED***
***REMOVED*** ***REMOVED*** cluster.addHost(new ISClient().getDataminer(hostname));
***REMOVED*** ***REMOVED*** ***REMOVED***
***REMOVED*** ***REMOVED*** apply the changes
***REMOVED*** AnsibleBridge a = new AnsibleBridge();
***REMOVED*** return a.applyAlgorithmSetToCluster(algorithms, cluster, uuid, /*updateSVN,*/test).getWorkerId();
***REMOVED***
***REMOVED******REMOVED***
***REMOVED***
***REMOVED*** public String addAlgorithmsToStagingHost(AlgorithmSet algorithms, String hostname, String uuid, /*boolean updateSVN,*/boolean test)
***REMOVED*** throws IOException, InterruptedException, SVNException ***REMOVED***
***REMOVED*** Cluster cluster = new Cluster();
***REMOVED*** Host h = new Host();
***REMOVED*** h.setName(hostname);
***REMOVED*** cluster.addHost(h);
***REMOVED***
***REMOVED*** AnsibleBridge a = new AnsibleBridge();
***REMOVED*** return a.applyAlgorithmSetToCluster(algorithms, cluster, uuid, /*updateSVN,*/test).getWorkerId();
***REMOVED***
***REMOVED******REMOVED***
***REMOVED*** ***REMOVED***

View File

@ -0,0 +1,33 @@
package org.gcube.dataanalysis.dataminer.poolmanager.service;
***REMOVED***
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Cluster;
import org.gcube.dataanalysis.dataminer.poolmanager.util.SVNUpdater;
public class ProductionPublishingJob extends DMPMJob ***REMOVED***
private Algorithm algorithm;
private Cluster prodCluster;
public ProductionPublishingJob(SVNUpdater svnUpdater, Algorithm algorithm, Cluster prodCluster) ***REMOVED***
super(svnUpdater);
this.algorithm = algorithm;
this.prodCluster = prodCluster;
***REMOVED***
***REMOVED***
protected void execute() ***REMOVED***
try ***REMOVED***
this.updateSVNDependencies(this.algorithm, false);
int ret = this.executeAnsibleWorker(
createWorker(this.algorithm, this.prodCluster, false, "gcube"));
***REMOVED*** catch (Exception e) ***REMOVED***
e.printStackTrace();
***REMOVED***
***REMOVED***
***REMOVED***

View File

@ -0,0 +1,46 @@
package org.gcube.dataanalysis.dataminer.poolmanager.service;
***REMOVED***
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Cluster;
import org.gcube.dataanalysis.dataminer.poolmanager.util.SVNUpdater;
public class StagingJob extends DMPMJob ***REMOVED***
private Algorithm algorithm;
private Cluster stagingCluster;
private Cluster rProtoCluster;
public StagingJob(SVNUpdater svnUpdater, Algorithm algorithm, Cluster stagingCluster, Cluster rProtoCluster) ***REMOVED***
super(svnUpdater);
this.algorithm = algorithm;
this.stagingCluster = stagingCluster;
this.rProtoCluster = rProtoCluster;
***REMOVED***
***REMOVED***
protected void execute() ***REMOVED***
try ***REMOVED***
int ret = this.executeAnsibleWorker(
createWorker(this.algorithm, this.stagingCluster, true, "root"));
if(ret == 0)***REMOVED***
this.updateSVNDependencies(this.algorithm, true);
int ret2 = this.executeAnsibleWorker(
createWorker(this.algorithm, this.rProtoCluster, false, "gcube"));
***REMOVED***
***REMOVED*** catch (Exception e) ***REMOVED***
e.printStackTrace();
***REMOVED***
***REMOVED***
***REMOVED***

View File

@ -11,13 +11,15 @@ import org.gcube.dataanalysis.dataminer.poolmanager.process.AlgorithmPackagePars
public class AlgorithmBuilder ***REMOVED*** public class AlgorithmBuilder ***REMOVED***
public static Algorithm create(String algorithmPackageURL) throws IOException, InterruptedException ***REMOVED*** public static Algorithm create(String algorithmPackageURL, String category) throws IOException, InterruptedException ***REMOVED***
return create(algorithmPackageURL, null, null, null, null, null, null, null); return create(algorithmPackageURL, null, null, null, null, category, null, null);
***REMOVED*** ***REMOVED***
public static Algorithm create(String algorithmPackageURL, String vre, String hostname, String name, String description, public static Algorithm create(String algorithmPackageURL, String vre, String hostname, String name, String description,
String category, String algorithmType, String skipJava) throws IOException, InterruptedException ***REMOVED*** String category, String algorithmType, String skipJava) throws IOException, InterruptedException ***REMOVED***
Algorithm algo = new AlgorithmPackageParser().parsePackage(algorithmPackageURL);
Algorithm algo = new AlgorithmPackageParser().parsePackage(algorithmPackageURL);
if(category != null)***REMOVED*** if(category != null)***REMOVED***

View File

@ -0,0 +1,44 @@
package org.gcube.dataanalysis.dataminer.poolmanager.util;
***REMOVED***
***REMOVED***
***REMOVED***
import org.gcube.dataanalysis.dataminer.poolmanager.clients.HAProxy;
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Cluster;
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Host;
public class ClusterBuilder ***REMOVED***
***REMOVED***1. to complete
public static Cluster getStagingDataminerCluster()***REMOVED***
Cluster cluster = new Cluster();
Props p = new Props();
Host h = new Host(p.getStagingHost());
***REMOVED***TODO: read this from configuration or IS?
h.setName(p.getStagingHost());
cluster.addHost(h);
return cluster;
***REMOVED***
public static Cluster getVRECluster(String targetVREToken, String targetVRE) throws IOException***REMOVED***
Cluster cluster = new Cluster();
for (Host h : new HAProxy().listDataMinersByCluster(targetVREToken,targetVRE)) ***REMOVED***
cluster.addHost(h);
***REMOVED***
return cluster;
***REMOVED***
public static Cluster getRProtoCluster() throws IOException***REMOVED***
***REMOVED***Assumes the service is running in RPrototypingLab
String token = SecurityTokenProvider.instance.get();
String targetVRE = ScopeProvider.instance.get();
return getVRECluster(token, targetVRE);
***REMOVED***
***REMOVED***

View File

@ -25,9 +25,8 @@ public class SVNUpdater ***REMOVED***
private SVNRepository svnRepository; private SVNRepository svnRepository;
public SVNUpdater(String rootURL) throws SVNException ***REMOVED*** public SVNUpdater(String rootURL) throws SVNException ***REMOVED***
final SVNRepository svnRepository = this.getSvnRepository( this.svnRepository = this.getSvnRepository(
rootURL); rootURL);
***REMOVED*** ***REMOVED***
@ -35,7 +34,7 @@ public class SVNUpdater ***REMOVED***
SVNRepository repository = SVNRepositoryFactory.create(SVNURL.parseURIEncoded(url)); SVNRepository repository = SVNRepositoryFactory.create(SVNURL.parseURIEncoded(url));
ISVNAuthenticationManager authManager = SVNWCUtil.createDefaultAuthenticationManager(); ISVNAuthenticationManager authManager = SVNWCUtil.createDefaultAuthenticationManager();
repository.setAuthenticationManager(authManager); repository.setAuthenticationManager(authManager);
***REMOVED***System.out.println(repository.getLocation()); System.out.println(repository.getLocation());
return repository; return repository;
***REMOVED*** ***REMOVED***
@ -43,7 +42,7 @@ public class SVNUpdater ***REMOVED***
public List<String> updateSVN(String file, List<String> ldep) throws SVNException, IOException ***REMOVED*** public void updateSVN(String file, List<String> ldep) ***REMOVED***
try ***REMOVED*** try ***REMOVED***
final ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream(); final ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
svnRepository.getFile(file, SVNRepository.INVALID_REVISION, null, byteArrayOutputStream); svnRepository.getFile(file, SVNRepository.INVALID_REVISION, null, byteArrayOutputStream);
@ -73,9 +72,13 @@ public class SVNUpdater ***REMOVED***
new ByteArrayInputStream(bytes), commitEditor, true); new ByteArrayInputStream(bytes), commitEditor, true);
commitEditor.closeFile(file, checksum); commitEditor.closeFile(file, checksum);
commitEditor.closeEdit(); commitEditor.closeEdit();
return aa;
***REMOVED***catch(Exception ex)***REMOVED***
ex.printStackTrace();
***REMOVED***
***REMOVED*** finally ***REMOVED*** finally ***REMOVED***
svnRepository.closeSession(); svnRepository.closeSession();
***REMOVED*** ***REMOVED***
***REMOVED*** ***REMOVED***

View File

@ -1,4 +1,4 @@
#YML node file #YML node file
STAGING_HOST: dataminer1-devnext.d4science.org STAGING_HOST: dataminer-proto-ghost.d4science.org
SVN_REPO: https:***REMOVED***svn.d4science.research-infrastructures.eu/gcube/trunk/data-analysis/RConfiguration/RPackagesManagement/ SVN_REPO: https:***REMOVED***svn.d4science.research-infrastructures.eu/gcube/trunk/data-analysis/RConfiguration/RPackagesManagement/
#HAPROXY_CSV: http:***REMOVED***data.d4science.org/Yk4zSFF6V3JOSytNd3JkRDlnRFpDUUR5TnRJZEw2QjRHbWJQNStIS0N6Yz0 #HAPROXY_CSV: http:***REMOVED***data.d4science.org/Yk4zSFF6V3JOSytNd3JkRDlnRFpDUUR5TnRJZEw2QjRHbWJQNStIS0N6Yz0

View File

@ -0,0 +1,46 @@
package org.gcube.dataanalysis.dataminerpoolmanager;
***REMOVED***
***REMOVED***
***REMOVED***
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Cluster;
import org.gcube.dataanalysis.dataminer.poolmanager.service.DMPMJob;
import org.gcube.dataanalysis.dataminer.poolmanager.service.ProductionPublishingJob;
import org.gcube.dataanalysis.dataminer.poolmanager.service.StagingJob;
***REMOVED***
import org.gcube.dataanalysis.dataminer.poolmanager.util.ClusterBuilder;
import org.gcube.dataanalysis.dataminer.poolmanager.util.SVNUpdater;
***REMOVED***
public class JobTest ***REMOVED***
public static void main(String[] args) throws SVNException, IOException, InterruptedException***REMOVED***
ScopeProvider.instance.set("/d4science.research-infrastructures.eu/gCubeApps/RPrototypingLab");
SVNUpdater svnUpdater = new SVNUpdater("https:***REMOVED***svn.d4science.research-infrastructures.eu/gcube/trunk/data-analysis/RConfiguration/RPackagesManagement/");
Algorithm algo = AlgorithmBuilder.create("http:***REMOVED***data.d4science.org/dENQTTMxdjNZcGRpK0NHd2pvU0owMFFzN0VWemw3Zy9HbWJQNStIS0N6Yz0", "ICHTHYOP_MODEL");
Cluster stagingCluster = ClusterBuilder.getStagingDataminerCluster();
Cluster rProtoCluster = ClusterBuilder.getRProtoCluster();
DMPMJob job = new StagingJob(svnUpdater, algo, stagingCluster, rProtoCluster);
job.start();
***REMOVED*** Cluster prodCluster = ClusterBuilder.getVRECluster(targetVREToken, targetVRE);
***REMOVED***
***REMOVED*** DMPMJob job2 = new ProductionPublishingJob(svnUpdater, algo, prodCluster);
***REMOVED***
***REMOVED***

View File

@ -0,0 +1,27 @@
package org.gcube.dataanalysis.dataminerpoolmanager;
***REMOVED***
import java.util.LinkedList;
***REMOVED***
***REMOVED***
import org.gcube.dataanalysis.dataminer.poolmanager.util.SVNUpdater;
***REMOVED***
public class SVNUpdaterTest ***REMOVED***
public static void main(String[] args) throws SVNException, IOException ***REMOVED***
ScopeProvider.instance.set("/d4science.research-infrastructures.eu/gCubeApps/RPrototypingLab");
SVNUpdater svnUpdater = new SVNUpdater("https:***REMOVED***svn.d4science.research-infrastructures.eu/gcube/trunk/data-analysis/RConfiguration/RPackagesManagement/");
String test = "testDep";
List<String> ldep = new LinkedList<>();
ldep.add(test);
svnUpdater.updateSVN("test_r_cran_pkgs.txt", ldep);
***REMOVED***
***REMOVED***