This commit is contained in:
Nunzio Andrea Galante 2017-05-12 15:32:14 +00:00
parent 08188449c1
commit 407a097b50
15 changed files with 398 additions and 328 deletions

View File

@ -6,6 +6,8 @@ import java.io.InputStreamReader;
import java.net.URL;
import java.util.LinkedList;
import java.util.List;
import org.gcube.common.authorization.library.provider.SecurityTokenProvider;
import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Cluster;
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Host;
@ -74,7 +76,11 @@ public class HAProxy {
}
public List<Host> listDataMinersByCluster() throws IOException {
public List<Host> listDataMinersByCluster(String targetVREToken,String targetVRE) throws IOException {
SecurityTokenProvider.instance.set(targetVREToken);
ScopeProvider.instance.set(targetVRE);
// next op to use when Cluster info available in the IS
// Cluster cluster = this.getClusterByHProxy();
Cluster cluster = this.MapCluster();
@ -94,9 +100,9 @@ public class HAProxy {
//}
else {
// prod
//URL stockURL = new
//URL("http://data.d4science.org/Yk4zSFF6V3JOSytNd3JkRDlnRFpDUUR5TnRJZEw2QjRHbWJQNStIS0N6Yz0");
URL stockURL = new URL("http://"+ ISClient.getHProxy() +":8880/;csv");
URL stockURL = new
URL("http://data.d4science.org/Yk4zSFF6V3JOSytNd3JkRDlnRFpDUUR5TnRJZEw2QjRHbWJQNStIS0N6Yz0");
//URL stockURL = new URL("http://"+ ISClient.getHProxy() +":8880/;csv");
//URL stockURL = new URL("http://data.d4science.org/c29KTUluTkZnRlB0WXE5NVNaZnRoR0dtYThUSmNTVlhHbWJQNStIS0N6Yz0");
//System.out.println(stockURL);
// dev
@ -124,11 +130,14 @@ public class HAProxy {
public static void main(String[] args) throws IOException, SVNException {
HAProxy a = new HAProxy();
ScopeProvider.instance.set("/gcube/devNext/NextNext");
//ScopeProvider.instance.set("/gcube/devNext/NextNext");
ScopeProvider.instance.set("/d4science.research-infrastructures.eu/gCubeApps/RPrototypingLab");
//SecurityTokenProvider.instance.set("3a23bfa4-4dfe-44fc-988f-194b91071dd2-843339462");
//ScopeProvider.instance.set("/d4science.research-infrastructures.eu/gCubeApps/RPrototypingLab");
// System.out.println(a.getHProxy());
// System.out.println(a.MapCluster());
System.out.println(a.listDataMinersByCluster());
System.out.println(a.listDataMinersByCluster("708e7eb8-11a7-4e9a-816b-c9ed7e7e99fe-98187548","/gcube/devNext/NextNext"));
// System.out.println(a.listDataMinersByCluster());
// List<Dependency> list = new LinkedList<Dependency>();

View File

@ -12,11 +12,11 @@ public class Algorithm {
private String name;
private String description;
private String category;
private String clazz;
private String category;
private String algorithmType;
private String skipJava;
@ -31,6 +31,10 @@ public class Algorithm {
this.actions = new Vector<>();
this.dependencies = new Vector<>();
Dependency p = new Dependency();
//init with default values
this.skipJava = "N";
this.algorithmType = "transducerers";
}
public void addDependency(Dependency dep) {

View File

@ -6,10 +6,15 @@ public class Host {
private Domain domain;
public Host() {
public Host(String hostname) {
this.setName(hostname);
}
public String getFullyQualifiedName() {
public Host() {
// TODO Auto-generated constructor stub
}
public String getFullyQualifiedName() {
if(this.domain!=null && this.domain.getName()!=null)
return this.getName()+"."+this.getDomain().getName();
else

View File

@ -44,8 +44,8 @@ public interface PoolManager {
String addAlgorithmToHost(Algorithm algo, String host,boolean test) throws IOException, InterruptedException;
String stageAlgorithm(String algorithmPackageURL) throws IOException, InterruptedException;
String publishAlgorithm(String algorithmPackageURL, String targetVREToken) throws IOException, InterruptedException;
String stageAlgorithm(String algorithmPackageURL, String category) throws IOException, InterruptedException;
String publishAlgorithm(String algorithmPackageURL, String targetVREToken, String targetVRE, String category) throws IOException, InterruptedException;
Algorithm extractAlgorithm(String url) throws IOException;

View File

@ -1,15 +1,5 @@
package org.gcube.dataanalysis.dataminer.poolmanager.rest;
import org.gcube.common.authorization.library.provider.SecurityTokenProvider;
import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Algorithm;
import org.gcube.dataanalysis.dataminer.poolmanager.service.DataminerPoolManager;
import org.gcube.dataanalysis.dataminer.poolmanager.util.AlgorithmBuilder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.tmatesoft.svn.core.SVNException;
import javax.ws.rs.*;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URL;
@ -17,6 +7,18 @@ import java.net.UnknownHostException;
import java.util.List;
import java.util.Set;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Algorithm;
import org.gcube.dataanalysis.dataminer.poolmanager.service.DataminerPoolManager;
import org.gcube.dataanalysis.dataminer.poolmanager.util.AlgorithmBuilder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.tmatesoft.svn.core.SVNException;
@Path("/")
public class RestPoolManager implements PoolManager {
@ -26,12 +28,11 @@ public class RestPoolManager implements PoolManager {
//TODO: for all methods accept also a callback url to send the log.
@GET
@Path("/algorithm/stage")
@Produces("text/plain")
public String stageAlgorithm(String algorithmPackageURL) throws IOException, InterruptedException {
Algorithm algo = AlgorithmBuilder.create(algorithmPackageURL);
public String stageAlgorithm(String algorithmPackageURL, String category) throws IOException, InterruptedException {
Algorithm algo = AlgorithmBuilder.create(algorithmPackageURL, category);
return this.service.stageAlgorithm(algo);
}
@ -39,9 +40,9 @@ public class RestPoolManager implements PoolManager {
@GET
@Path("/algorithm/add")
@Produces("text/plain")
public String publishAlgorithm(String algorithmPackageURL, String targetVREToken) throws IOException, InterruptedException {
Algorithm algo = AlgorithmBuilder.create(algorithmPackageURL);
return this.service.publishAlgorithm(algo, targetVREToken);
public String publishAlgorithm(String algorithmPackageURL, String targetVREToken, String targetVRE, String category) throws IOException, InterruptedException {
Algorithm algo = AlgorithmBuilder.create(algorithmPackageURL, category);
return this.service.publishAlgorithm(algo, targetVREToken, targetVRE);
}
/*
@ -55,7 +56,7 @@ public class RestPoolManager implements PoolManager {
public String getLogById(@QueryParam("logUrl") String logUrl) throws IOException {
// TODO Auto-generated method stub
LOGGER.debug("Returning Log =" + logUrl);
return service.getScriptFromURL(service.getURLfromWorkerLog(logUrl));
return service.getLogById(logUrl);
}
@Override
@ -65,17 +66,17 @@ public class RestPoolManager implements PoolManager {
}
public static void main(String[] args) throws IOException, InterruptedException, SVNException {
// System.out.println(System.getProperty("user.home")+File.separator+"/gcube/dataminer-pool-manager");
// ProxySelector.setDefault(new
// PropertiesBasedProxySelector("/home/ngalante/.proxy-settings"));
ScopeProvider.instance.set("/d4science.research-infrastructures.eu/gCubeApps/RPrototypingLab");
//SecurityTokenProvider.instance.set("3a23bfa4-4dfe-44fc-988f-194b91071dd2-843339462");
RestPoolManager a = new RestPoolManager();
//a.stageAlgorithm("http://data.d4science.org/MnovRjZIdGV5WlB0WXE5NVNaZnRoRVg0SU8xZWpWQlFHbWJQNStIS0N6Yz0");
a.publishAlgorithm("http://data.d4science.org/MnovRjZIdGV5WlB0WXE5NVNaZnRoRVg0SU8xZWpWQlFHbWJQNStIS0N6Yz0", "708e7eb8-11a7-4e9a-816b-c9ed7e7e99fe-98187548");
// PoolManager aa = new DataminerPoolManager();
// // System.out.println(System.getProperty("user.home")+File.separator+"/gcube/dataminer-pool-manager");
// // ProxySelector.setDefault(new
// // PropertiesBasedProxySelector("/home/ngalante/.proxy-settings"));
//
// ScopeProvider.instance.set("/d4science.research-infrastructures.eu/gCubeApps/RPrototypingLab");
// //SecurityTokenProvider.instance.set("3a23bfa4-4dfe-44fc-988f-194b91071dd2-843339462");
// RestPoolManager a = new RestPoolManager();
// a.stageAlgorithm("http://data.d4science.org/MnovRjZIdGV5WlB0WXE5NVNaZnRoRVg0SU8xZWpWQlFHbWJQNStIS0N6Yz0", category);
// //a.publishAlgorithm("http://data.d4science.org/MnovRjZIdGV5WlB0WXE5NVNaZnRoRVg0SU8xZWpWQlFHbWJQNStIS0N6Yz0", "708e7eb8-11a7-4e9a-816b-c9ed7e7e99fe-98187548","/gcube/devNext/NextNext");
// // PoolManager aa = new DataminerPoolManager();
//
}

View File

@ -0,0 +1,100 @@
package org.gcube.dataanalysis.dataminer.poolmanager.service;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.PrintStream;
import java.util.LinkedList;
import java.util.List;
import java.util.UUID;
import org.gcube.dataanalysis.dataminer.poolmanager.ansible.AnsibleWorker;
import org.gcube.dataanalysis.dataminer.poolmanager.ansiblebridge.AnsibleBridge;
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Algorithm;
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Cluster;
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Dependency;
import org.gcube.dataanalysis.dataminer.poolmanager.util.SVNUpdater;
import org.tmatesoft.svn.core.SVNException;
public abstract class DMPMJob {
protected SVNUpdater svnUpdater;
protected File jobLogs;
protected String id;
public DMPMJob(SVNUpdater svnUpdater){
this.svnUpdater = svnUpdater;
this.id = UUID.randomUUID().toString();
//TODO: dmpm work directory should be loaded from configuration file
this.jobLogs = new File(System.getProperty("user.home")+File.separator+"dataminer-pool-manager"+File.separator+"jobs");
this.jobLogs.mkdirs();
}
public String start(){
new Thread(new Runnable() {
@Override
public void run() {
try {
execute();
} catch (Exception e) {
e.printStackTrace();
}
}
}).start();
return this.id;
}
protected AnsibleWorker createWorker(Algorithm algo,
Cluster dataminerCluster,
boolean includeAlgorithmDependencies,
String user){
AnsibleBridge ansibleBridge = new AnsibleBridge();
try {
return ansibleBridge.createWorker(algo, dataminerCluster, includeAlgorithmDependencies, user);
} catch (IOException e) {
e.printStackTrace();
}
return null;
}
protected abstract void execute();
protected int executeAnsibleWorker(AnsibleWorker worker) throws IOException, InterruptedException, SVNException{
File path = new File(worker.getWorkdir() + File.separator + "jobs");
path.mkdirs();
File n = new File(this.jobLogs + File.separator + this.id);
FileOutputStream fos = new FileOutputStream(n);
PrintStream ps = new PrintStream(fos);
return worker.execute(ps);
}
protected void updateSVNDependencies(Algorithm algo, boolean stagingVRE) throws IOException, SVNException {
for (Dependency d : algo.getDependencies()) {
if (d.getType().equals("os")) {
List<String> ls = new LinkedList<String>();
ls.add(d.getName());
this.svnUpdater.updateSVN((stagingVRE ? "test_": "") + "r_deb_pkgs.txt", ls);
}
if (d.getType().equals("cran")) {
List<String> ls = new LinkedList<String>();
ls.add(d.getName());
this.svnUpdater.updateSVN((stagingVRE ? "test_": "") + "r_cran_pkgs.txt", ls);
}
if (d.getType().equals("github")) {
List<String> ls = new LinkedList<String>();
ls.add(d.getName());
this.svnUpdater.updateSVN((stagingVRE ? "test_": "") + "r_github_pkgs.txt", ls);
}
}
}
}

View File

@ -1,40 +1,20 @@
package org.gcube.dataanalysis.dataminer.poolmanager.service;
import org.gcube.common.authorization.library.provider.SecurityTokenProvider;
import org.gcube.common.resources.gcore.GenericResource;
import org.gcube.common.resources.gcore.Resources;
import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.dataanalysis.dataminer.poolmanager.ansible.AnsibleWorker;
import org.gcube.dataanalysis.dataminer.poolmanager.ansiblebridge.AnsibleBridge;
import org.gcube.dataanalysis.dataminer.poolmanager.clients.HAProxy;
import org.gcube.dataanalysis.dataminer.poolmanager.clients.ISClient;
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.*;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.Scanner;
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Algorithm;
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Cluster;
import org.gcube.dataanalysis.dataminer.poolmanager.util.ClusterBuilder;
import org.gcube.dataanalysis.dataminer.poolmanager.util.Props;
import org.gcube.dataanalysis.dataminer.poolmanager.util.SVNUpdater;
import org.gcube.informationsystem.publisher.AdvancedScopedPublisher;
import org.gcube.informationsystem.publisher.RegistryPublisherFactory;
import org.gcube.informationsystem.publisher.ScopedPublisher;
import org.gcube.informationsystem.publisher.exception.RegistryNotFoundException;
import org.gcube.resources.discovery.client.api.DiscoveryClient;
import org.gcube.resources.discovery.client.queries.api.SimpleQuery;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.tmatesoft.svn.core.SVNException;
import java.io.*;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLConnection;
import java.net.UnknownHostException;
import java.util.*;
import static org.gcube.resources.discovery.icclient.ICFactory.clientFor;
import static org.gcube.resources.discovery.icclient.ICFactory.queryFor;
public class DataminerPoolManager {
private SVNUpdater svnUpdater;
public DataminerPoolManager(){
@ -50,260 +30,30 @@ public class DataminerPoolManager {
public String stageAlgorithm(Algorithm algo) throws IOException, InterruptedException {
Cluster cluster = getStagingDataminerCluster();
//Assumes the service is running in RPrototypingLab
String token = SecurityTokenProvider.instance.get();
return addAlgorithmToCluster(algo, cluster, true, "root", true, token);
Cluster stagingCluster = ClusterBuilder.getStagingDataminerCluster();
Cluster rProtoCluster = ClusterBuilder.getRProtoCluster();
DMPMJob job = new StagingJob(this.svnUpdater, algo, stagingCluster, rProtoCluster);
String id = job.start();
return id;
}
public String publishAlgorithm(Algorithm algo, String targetVREToken) throws IOException, InterruptedException {
Cluster cluster = new Cluster();
SecurityTokenProvider.instance.set(targetVREToken);
for (Host h : new HAProxy().listDataMinersByCluster()) {
cluster.addHost(h);
}
return addAlgorithmToCluster(algo, cluster, false, "gcube", false, targetVREToken);
}
//1. to complete
private Cluster getStagingDataminerCluster(){
Cluster cluster = new Cluster();
Host h = new Host();
Props p = new Props();
//TODO: read this from configuration or IS?
h.setName(p.getStagingHost());
cluster.addHost(h);
return cluster;
}
private void createISResource(Algorithm algo, String vreToken) {
ISClient client = new ISClient();
for (Algorithm a : client.getAlgoFromIs()) {
if (a.getName().equals(algo.getName())) {
continue;
}
if (!a.getName().equals(algo.getName())){
new ISClient().addAlgToIs(algo, vreToken);
}
}
// TODO: create the resource only if not already present
public String publishAlgorithm(Algorithm algo, String targetVREToken, String targetVRE) throws IOException, InterruptedException {
Cluster prodCluster = ClusterBuilder.getVRECluster(targetVREToken, targetVRE);
DMPMJob job = new ProductionPublishingJob(this.svnUpdater, algo, prodCluster);
String id = job.start();
return id;
}
private void updateSVNDependencies(Algorithm algo, boolean stagingVRE) throws IOException, SVNException {
for (Dependency d : algo.getDependencies()) {
if (d.getType().equals("os")) {
List<String> ls = new LinkedList<String>();
ls.add(d.getName());
this.svnUpdater.updateSVN(stagingVRE ? "test_": "" + "r_deb_pkgs.txt", ls);
}
if (d.getType().equals("cran")) {
List<String> ls = new LinkedList<String>();
ls.add(d.getName());
this.svnUpdater.updateSVN(stagingVRE ? "test_": "" + "r_cran_pkgs.txt", ls);
}
if (d.getType().equals("github")) {
List<String> ls = new LinkedList<String>();
ls.add(d.getName());
this.svnUpdater.updateSVN(stagingVRE ? "test_": "" + "r_github_pkgs.txt", ls);
}
}
public String getLogById(String id) throws FileNotFoundException{
//TODO: load dir from configuration file
File path = new File(System.getProperty("user.home") + File.separator + "dataminer-pool-manager/jobs/"
+ id);
return new Scanner(path).useDelimiter("\\Z").next();
}
/**
*
* @param algo
* @param dataminerCluster
* @return uuid of the execution
*/
public String addAlgorithmToCluster(
final Algorithm algo,
Cluster dataminerCluster,
boolean includeAlgorithmDependencies,
String user,
final boolean stagingVRE,
final String targetVREToken) throws IOException {
AnsibleBridge ansibleBridge = new AnsibleBridge();
final AnsibleWorker worker = ansibleBridge.createWorker(algo, dataminerCluster, includeAlgorithmDependencies, user);
new Thread(new Runnable() {
@Override
public void run() {
try {
File path = new File(worker.getWorkdir() + File.separator + "logs");
path.mkdirs();
File n = new File(path + File.separator + worker.getWorkerId());
FileOutputStream fos = new FileOutputStream(n);
PrintStream ps = new PrintStream(fos);
int retValue = worker.execute(ps);
System.out.println("Log stored to to " + n.getAbsolutePath());
if(retValue == 0) {
updateSVNDependencies(algo, stagingVRE);
//createISResource(algo, targetVREToken);
}
// destroy the worker
worker.destroy();
} catch (Exception e) {
e.printStackTrace();
}
}
}).start();
// this line will execute immediately, not waiting for task to
// complete
return worker.getWorkerId();
}
public String getScriptFromURL(URL url) throws IOException {
if (url == null) {
return null;
}
URLConnection yc = url.openConnection();
BufferedReader input = new BufferedReader(new InputStreamReader(yc.getInputStream()));
String line;
StringBuffer buffer = new StringBuffer();
while ((line = input.readLine()) != null) {
buffer.append(line + "\n");
}
String bufferScript = buffer.substring(0, buffer.length());
input.close();
return bufferScript;
}
// public String addAlgorithmToVRE(Algorithm algorithm, final String vre, /*final boolean updateSVN*/ final boolean test) throws IOException {
// // create a fake algorithm set
// final AlgorithmSet algoSet = new AlgorithmSet();
// algoSet.setName("fake");
// algoSet.addAlgorithm(algorithm);
// final String uuid = UUID.randomUUID().toString();
//
// new Thread(new Runnable() {
// @Override
// public void run() {
// // TODO Auto-generated method stub
// try {
// try {
// addAlgorithmsToVRE(algoSet, vre, uuid, /*updateSVN*/test);
// } catch (SVNException e) {
// // TODO Auto-generated catch block
// e.printStackTrace();
// }
// } catch (IOException e) {
// // TODO Auto-generated catch block
// e.printStackTrace();
// } catch (InterruptedException e) {
// // TODO Auto-generated catch block
// e.printStackTrace();
// }
// }
// }).start();
// // this line will execute immediately, not waiting for task to
// // complete
// System.out.println(uuid);
// return uuid;
// }
// public String addAlgorithmToHost(Algorithm algorithm, final String hostname, /*final boolean updateSVN*/ final boolean test) throws IOException {
// // create a fake algorithm set
// final AlgorithmSet algoSet = new AlgorithmSet();
// algoSet.setName("fake");
// algoSet.addAlgorithm(algorithm);
// final String uuid = UUID.randomUUID().toString();
//
// new Thread(new Runnable() {
// @Override
// public void run() {
// // TODO Auto-generated method stub
// try {
// if(test){
// addAlgorithmsToStagingHost(algoSet, hostname, uuid, /*updateSVN,*/test);}
// } catch (IOException e) {
// // TODO Auto-generated catch block
// e.printStackTrace();
// } catch (InterruptedException e) {
// // TODO Auto-generated catch block
// e.printStackTrace();
// } catch (SVNException e) {
// // TODO Auto-generated catch block
// e.printStackTrace();
// }
// }
// }).start();
// // this line will execute immediately, not waiting for your task to
// // complete
// System.out.println(uuid);
// return uuid;
// }
public URL getURLfromWorkerLog(String a) throws MalformedURLException, UnknownHostException {
File path = new File(System.getProperty("user.home") + File.separator + "dataminer-pool-manager/work/"
+ a + File.separator + "logs");
path.mkdirs();
File n = new File(path + File.separator + a);
// String addr = InetAddress.getLocalHost().getHostAddress();
return new File(n.getPath()).toURI().toURL();
}
// public String addAlgorithmsToVRE(AlgorithmSet algorithms, String vre, String uuid, /*boolean updateSVN,*/ boolean test) throws IOException, InterruptedException, SVNException {
//
// // create the cluster (dataminers in the vre)
// Cluster cluster = new Cluster();
// for (Host h : new HAProxy().listDataMinersByCluster()) {
// //for (Host h : new ISClient().listDataminersInVRE()) {
// cluster.addHost(h);
// }
//
// // apply the changes
// AnsibleBridge a = new AnsibleBridge();
// return a.applyAlgorithmSetToCluster(algorithms, cluster, uuid, /*updateSVN,*/ test).getWorkerId();
//
// }
// public String addAlgorithmsToHost(AlgorithmSet algorithms, String hostname, String uuid, /*boolean updateSVN,*/boolean test)
// throws IOException, InterruptedException, SVNException {
//
// // create the cluster (dataminers in the vre)
// Cluster cluster = new Cluster();
// for (Host h : new HAProxy().listDataMinersByCluster()) {
// if (h.getName().equals(hostname)) {
// cluster.addHost(h);
// }
// }
// // if(ISClient.getHProxy().equals(hostname)){
// // cluster.addHost(new ISClient().getDataminer(hostname));
// // }
// // apply the changes
// AnsibleBridge a = new AnsibleBridge();
// return a.applyAlgorithmSetToCluster(algorithms, cluster, uuid, /*updateSVN,*/test).getWorkerId();
//
// }
//
// public String addAlgorithmsToStagingHost(AlgorithmSet algorithms, String hostname, String uuid, /*boolean updateSVN,*/boolean test)
// throws IOException, InterruptedException, SVNException {
// Cluster cluster = new Cluster();
// Host h = new Host();
// h.setName(hostname);
// cluster.addHost(h);
//
// AnsibleBridge a = new AnsibleBridge();
// return a.applyAlgorithmSetToCluster(algorithms, cluster, uuid, /*updateSVN,*/test).getWorkerId();
//
// }
}

View File

@ -0,0 +1,33 @@
package org.gcube.dataanalysis.dataminer.poolmanager.service;
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Algorithm;
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Cluster;
import org.gcube.dataanalysis.dataminer.poolmanager.util.SVNUpdater;
public class ProductionPublishingJob extends DMPMJob {
private Algorithm algorithm;
private Cluster prodCluster;
public ProductionPublishingJob(SVNUpdater svnUpdater, Algorithm algorithm, Cluster prodCluster) {
super(svnUpdater);
this.algorithm = algorithm;
this.prodCluster = prodCluster;
}
@Override
protected void execute() {
try {
this.updateSVNDependencies(this.algorithm, false);
int ret = this.executeAnsibleWorker(
createWorker(this.algorithm, this.prodCluster, false, "gcube"));
} catch (Exception e) {
e.printStackTrace();
}
}
}

View File

@ -0,0 +1,46 @@
package org.gcube.dataanalysis.dataminer.poolmanager.service;
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Algorithm;
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Cluster;
import org.gcube.dataanalysis.dataminer.poolmanager.util.SVNUpdater;
public class StagingJob extends DMPMJob {
private Algorithm algorithm;
private Cluster stagingCluster;
private Cluster rProtoCluster;
public StagingJob(SVNUpdater svnUpdater, Algorithm algorithm, Cluster stagingCluster, Cluster rProtoCluster) {
super(svnUpdater);
this.algorithm = algorithm;
this.stagingCluster = stagingCluster;
this.rProtoCluster = rProtoCluster;
}
@Override
protected void execute() {
try {
int ret = this.executeAnsibleWorker(
createWorker(this.algorithm, this.stagingCluster, true, "root"));
if(ret == 0){
this.updateSVNDependencies(this.algorithm, true);
int ret2 = this.executeAnsibleWorker(
createWorker(this.algorithm, this.rProtoCluster, false, "gcube"));
}
} catch (Exception e) {
e.printStackTrace();
}
}
}

View File

@ -11,13 +11,15 @@ import java.io.IOException;
public class AlgorithmBuilder {
public static Algorithm create(String algorithmPackageURL) throws IOException, InterruptedException {
return create(algorithmPackageURL, null, null, null, null, null, null, null);
public static Algorithm create(String algorithmPackageURL, String category) throws IOException, InterruptedException {
return create(algorithmPackageURL, null, null, null, null, category, null, null);
}
public static Algorithm create(String algorithmPackageURL, String vre, String hostname, String name, String description,
String category, String algorithmType, String skipJava) throws IOException, InterruptedException {
Algorithm algo = new AlgorithmPackageParser().parsePackage(algorithmPackageURL);
Algorithm algo = new AlgorithmPackageParser().parsePackage(algorithmPackageURL);
if(category != null){

View File

@ -0,0 +1,44 @@
package org.gcube.dataanalysis.dataminer.poolmanager.util;
import java.io.IOException;
import org.gcube.common.authorization.library.provider.SecurityTokenProvider;
import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.dataanalysis.dataminer.poolmanager.clients.HAProxy;
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Cluster;
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Host;
public class ClusterBuilder {
//1. to complete
public static Cluster getStagingDataminerCluster(){
Cluster cluster = new Cluster();
Props p = new Props();
Host h = new Host(p.getStagingHost());
//TODO: read this from configuration or IS?
h.setName(p.getStagingHost());
cluster.addHost(h);
return cluster;
}
public static Cluster getVRECluster(String targetVREToken, String targetVRE) throws IOException{
Cluster cluster = new Cluster();
for (Host h : new HAProxy().listDataMinersByCluster(targetVREToken,targetVRE)) {
cluster.addHost(h);
}
return cluster;
}
public static Cluster getRProtoCluster() throws IOException{
//Assumes the service is running in RPrototypingLab
String token = SecurityTokenProvider.instance.get();
String targetVRE = ScopeProvider.instance.get();
return getVRECluster(token, targetVRE);
}
}

View File

@ -25,9 +25,8 @@ public class SVNUpdater {
private SVNRepository svnRepository;
public SVNUpdater(String rootURL) throws SVNException {
final SVNRepository svnRepository = this.getSvnRepository(
this.svnRepository = this.getSvnRepository(
rootURL);
}
@ -35,7 +34,7 @@ public class SVNUpdater {
SVNRepository repository = SVNRepositoryFactory.create(SVNURL.parseURIEncoded(url));
ISVNAuthenticationManager authManager = SVNWCUtil.createDefaultAuthenticationManager();
repository.setAuthenticationManager(authManager);
//System.out.println(repository.getLocation());
System.out.println(repository.getLocation());
return repository;
}
@ -43,7 +42,7 @@ public class SVNUpdater {
public List<String> updateSVN(String file, List<String> ldep) throws SVNException, IOException {
public void updateSVN(String file, List<String> ldep) {
try {
final ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
svnRepository.getFile(file, SVNRepository.INVALID_REVISION, null, byteArrayOutputStream);
@ -73,9 +72,13 @@ public class SVNUpdater {
new ByteArrayInputStream(bytes), commitEditor, true);
commitEditor.closeFile(file, checksum);
commitEditor.closeEdit();
return aa;
}catch(Exception ex){
ex.printStackTrace();
}
} finally {
finally {
svnRepository.closeSession();
}
}

View File

@ -1,4 +1,4 @@
#YML node file
STAGING_HOST: dataminer1-devnext.d4science.org
STAGING_HOST: dataminer-proto-ghost.d4science.org
SVN_REPO: https://svn.d4science.research-infrastructures.eu/gcube/trunk/data-analysis/RConfiguration/RPackagesManagement/
#HAPROXY_CSV: http://data.d4science.org/Yk4zSFF6V3JOSytNd3JkRDlnRFpDUUR5TnRJZEw2QjRHbWJQNStIS0N6Yz0

View File

@ -0,0 +1,46 @@
package org.gcube.dataanalysis.dataminerpoolmanager;
import java.io.IOException;
import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Algorithm;
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Cluster;
import org.gcube.dataanalysis.dataminer.poolmanager.service.DMPMJob;
import org.gcube.dataanalysis.dataminer.poolmanager.service.ProductionPublishingJob;
import org.gcube.dataanalysis.dataminer.poolmanager.service.StagingJob;
import org.gcube.dataanalysis.dataminer.poolmanager.util.AlgorithmBuilder;
import org.gcube.dataanalysis.dataminer.poolmanager.util.ClusterBuilder;
import org.gcube.dataanalysis.dataminer.poolmanager.util.SVNUpdater;
import org.tmatesoft.svn.core.SVNException;
public class JobTest {
public static void main(String[] args) throws SVNException, IOException, InterruptedException{
ScopeProvider.instance.set("/d4science.research-infrastructures.eu/gCubeApps/RPrototypingLab");
SVNUpdater svnUpdater = new SVNUpdater("https://svn.d4science.research-infrastructures.eu/gcube/trunk/data-analysis/RConfiguration/RPackagesManagement/");
Algorithm algo = AlgorithmBuilder.create("http://data.d4science.org/dENQTTMxdjNZcGRpK0NHd2pvU0owMFFzN0VWemw3Zy9HbWJQNStIS0N6Yz0", "ICHTHYOP_MODEL");
Cluster stagingCluster = ClusterBuilder.getStagingDataminerCluster();
Cluster rProtoCluster = ClusterBuilder.getRProtoCluster();
DMPMJob job = new StagingJob(svnUpdater, algo, stagingCluster, rProtoCluster);
job.start();
// Cluster prodCluster = ClusterBuilder.getVRECluster(targetVREToken, targetVRE);
//
// DMPMJob job2 = new ProductionPublishingJob(svnUpdater, algo, prodCluster);
}
}

View File

@ -0,0 +1,27 @@
package org.gcube.dataanalysis.dataminerpoolmanager;
import java.io.IOException;
import java.util.LinkedList;
import java.util.List;
import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.dataanalysis.dataminer.poolmanager.util.SVNUpdater;
import org.tmatesoft.svn.core.SVNException;
public class SVNUpdaterTest {
public static void main(String[] args) throws SVNException, IOException {
ScopeProvider.instance.set("/d4science.research-infrastructures.eu/gCubeApps/RPrototypingLab");
SVNUpdater svnUpdater = new SVNUpdater("https://svn.d4science.research-infrastructures.eu/gcube/trunk/data-analysis/RConfiguration/RPackagesManagement/");
String test = "testDep";
List<String> ldep = new LinkedList<>();
ldep.add(test);
svnUpdater.updateSVN("test_r_cran_pkgs.txt", ldep);
}
}