This commit is contained in:
Paolo Fabriani 2017-02-15 14:45:07 +00:00
parent 1b6cea2db3
commit 3edeeac4fa
4 changed files with 401 additions and 0 deletions

View File

@ -0,0 +1,119 @@
package org.gcube.dataanalysis.dataminerpoolmanager;
***REMOVED***
import java.io.InputStream;
import java.net.ProxySelector;
***REMOVED***
***REMOVED***
import java.util.Vector;
import org.gcube.dataanalysys.dataminerpoolmanager.datamodel.Algorithm;
import org.gcube.dataanalysys.dataminerpoolmanager.datamodel.AlgorithmSet;
import org.gcube.dataanalysys.dataminerpoolmanager.process.AddAlgorithmCommand;
import org.gcube.dataanalysys.dataminerpoolmanager.process.AlgorithmPackageParser;
import org.gcube.dataanalysys.dataminerpoolmanager.service.DataminerPoolManager;
import org.gcube.dataanalysys.dataminerpoolmanager.util.PropertiesBasedProxySelector;
public class AlgorithmPackageParserTest {
private static int BUFFER_SIZE = 2048;
private void extractAllAlgorithms() throws IOException {
String url = "http:***REMOVED***svn.research-infrastructures.eu/public/d4science/gcube/trunk/data-analysis/DataMinerConfiguration/algorithms/dev/algorithms";
List<String> commands = this.extractAddAlgorithmCommands(url);
AlgorithmSet algorithms = new AlgorithmSet();
for (String cmd : commands) {
System.out.println("-----------------------------------------");
System.out.println(cmd);
AddAlgorithmCommand aac = new AddAlgorithmCommand(cmd);
System.out.println(aac);
***REMOVED*** start creating the algo from the command
Algorithm algo = new Algorithm();
algo.setAlgorithmType(aac.getAlgorithmType());
algo.setCategory(aac.getCategory());
algo.setClazz(aac.getClazz());
algo.setDescription(aac.getDescription());
algo.setName(aac.getName());
algo.setPackageURL(aac.getUrl());
algo.setSkipJava(aac.getSkipjava());
***REMOVED*** then override with info from the package
if (aac.getUrl().length() > 4) {
Algorithm packagedAlgo = this.extractAlgorithm(aac.getUrl());
if (packagedAlgo != null) {
algo.setDependencies(packagedAlgo.getDependencies());
***REMOVED***
***REMOVED***
algorithms.addAlgorithm(algo);
break;
***REMOVED***
new DataminerPoolManager().addAlgorithmsToVRE(algorithms,
"/gcube/devNext/NextNext");
***REMOVED***
/**
* Extract 'addAlgorithm' commands from a file containing wiki-table-style
* entries for algorithm.
*
* @return
* @throws IOException
*/
private List<String> extractAddAlgorithmCommands(String listUrl)
throws IOException {
URL url = new URL(listUrl);
InputStream is = url.openStream();
StringBuilder s = new StringBuilder();
byte[] buffer = new byte[BUFFER_SIZE];
int read = 0;
while ((read = is.read(buffer)) != -1) {
s.append(new String(buffer, 0, read));
***REMOVED***
List<String> out = new Vector<>();
String[] lines = s.toString().split("\n");
for (String line : lines) {
System.out.println("--------------------");
if (!line.isEmpty()) {
String[] parts = line.split("\\|");
int c = 1;
for (String part : parts) {
if (part == null || part.trim().isEmpty()) {
continue;
***REMOVED***
System.out.println(c + ". " + part);
c++;
if (part.contains("addAlgorithm.sh")) {
String cmd = part.trim();
cmd = cmd.replaceAll("<notextile>", "");
cmd = cmd.replaceAll("</notextile>", "");
System.out.println(cmd);
***REMOVED*** AddAlgorithmCommand aac = new AddAlgorithmCommand(cmd);
***REMOVED*** System.out.println(aac);
out.add(cmd);
***REMOVED***
***REMOVED***
***REMOVED***
***REMOVED***
return out;
***REMOVED***
/**
* Create an Algorithm starting from the algorithm jar.
*
* @param url
* @return
* @throws IOException
*/
private Algorithm extractAlgorithm(String url) throws IOException {
return new AlgorithmPackageParser().parsePackage(url);
***REMOVED***
public static void main(String[] args) throws Exception {
ProxySelector.setDefault(new PropertiesBasedProxySelector(
"/home/paolo/.proxy-settings"));
new AlgorithmPackageParserTest().extractAllAlgorithms();
***REMOVED***
***REMOVED***

View File

@ -0,0 +1,33 @@
package org.gcube.dataanalysis.dataminerpoolmanager;
import java.io.File;
***REMOVED***
import java.util.UUID;
import org.gcube.dataanalysys.dataminerpoolmanager.ansible.AnsibleWorker;
import org.gcube.dataanalysys.dataminerpoolmanager.ansible.model.Inventory;
import org.gcube.dataanalysys.dataminerpoolmanager.ansible.model.Playbook;
import org.gcube.dataanalysys.dataminerpoolmanager.ansible.model.Role;
import org.gcube.dataanalysys.dataminerpoolmanager.ansible.model.RoleFile;
public class AnsibleWorkerTest {
public static void main(String[] args) throws IOException {
AnsibleWorker worker = new AnsibleWorker(new File("/tmp/dataminer-pool-manager/work/"+UUID.randomUUID().toString()));
System.out.println("created worker named " + worker.getWorkerId());
worker.setInventory(new Inventory());
worker.setPlaybook(new Playbook());
Role r = new Role();
r.setName("latex");
RoleFile tf = new RoleFile("main", "do something special for " + r.getName());
r.addTaskFile(tf);
worker.addRole(r);
worker.apply();
***REMOVED***
***REMOVED***

View File

@ -0,0 +1,232 @@
package org.gcube.dataanalysis.dataminerpoolmanager;
import java.net.ProxySelector;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import java.util.TreeSet;
import java.util.Vector;
***REMOVED***
import org.gcube.dataanalysys.dataminerpoolmanager.ansible.AnsibleWorker;
import org.gcube.dataanalysys.dataminerpoolmanager.ansible.model.Role;
import org.gcube.dataanalysys.dataminerpoolmanager.ansiblebridge.AnsibleBridge;
import org.gcube.dataanalysys.dataminerpoolmanager.datamodel.Algorithm;
import org.gcube.dataanalysys.dataminerpoolmanager.datamodel.AlgorithmSet;
import org.gcube.dataanalysys.dataminerpoolmanager.datamodel.Cluster;
import org.gcube.dataanalysys.dataminerpoolmanager.datamodel.Dependency;
import org.gcube.dataanalysys.dataminerpoolmanager.datamodel.Domain;
import org.gcube.dataanalysys.dataminerpoolmanager.datamodel.Host;
import org.gcube.dataanalysys.dataminerpoolmanager.datamodel.comparator.AlgorithmComparator;
import org.gcube.dataanalysys.dataminerpoolmanager.datamodel.comparator.DependencyComparator;
import org.gcube.dataanalysys.dataminerpoolmanager.service.DataminerPoolManager;
import org.gcube.dataanalysys.dataminerpoolmanager.util.PropertiesBasedProxySelector;
public class DataminerPoolManagerTest {
/*
private static Map<String, Domain> domains = new HashMap<>();
private static Map<String, Cluster> clusters = new HashMap<>();
private static Dependency createDependency(String depName) {
String[] parts = depName.split(":");
Dependency out = new Dependency();
if(parts.length>1) {
out.setType(parts[0]);
out.setName(parts[1]);
***REMOVED*** else {
out.setType("os");
out.setName(depName);
***REMOVED***
return out;
***REMOVED***
private static Algorithm createAlgorithm(String name, String ... deps) {
Algorithm a = new Algorithm();
a.setName(name);
for(String dep:deps) {
a.addDependency(createDependency(dep));
***REMOVED***
return a;
***REMOVED***
private static AlgorithmSet createAlgorithmSet(String name, Algorithm ... algs) {
AlgorithmSet out = new AlgorithmSet();
out.setName(name);
for(Algorithm a:algs) {
out.addAlgorithm(a);
***REMOVED***
return out;
***REMOVED***
private static Domain getDomain(String name) {
if(domains.get(name)==null) {
Domain d = new Domain();
d.setName(name);
domains.put(name, d);
return d;
***REMOVED*** else {
return domains.get(name);
***REMOVED***
***REMOVED***
private static Host createHost(String hostname, String domainName) {
Host out = new Host();
out.setName(hostname);
Domain d = getDomain(domainName);
out.setDomain(d);
return out;
***REMOVED***
private static Cluster getCluster(String name) {
if(clusters.get(name)==null) {
Cluster d = new Cluster();
d.setName(name);
clusters.put(name, d);
return d;
***REMOVED*** else {
return clusters.get(name);
***REMOVED***
***REMOVED***
private static Collection<Dependency> extractDependencies() {
Collection<Dependency> out = new TreeSet<>(new DependencyComparator());
for(Cluster c:clusters.values()) {
for(AlgorithmSet as:c.getAlgorithmSets()) {
for(Algorithm a:as.getAlgorithms()) {
for(Dependency d:a.getDependencies()) {
out.add(d);
***REMOVED***
***REMOVED***
***REMOVED***
***REMOVED***
return out;
***REMOVED***
private static Collection<Algorithm> extractAlgorithms() {
Collection<Algorithm> out = new TreeSet<>(new AlgorithmComparator());
for(Cluster c:clusters.values()) {
for(AlgorithmSet as:c.getAlgorithmSets()) {
for(Algorithm a:as.getAlgorithms()) {
out.add(a);
***REMOVED***
***REMOVED***
***REMOVED***
return out;
***REMOVED***
static {
Algorithm ewe = createAlgorithm("ewe", "mono", "latex", "cran:some_R_package", "custom:some_git_package");
Algorithm ensemble = createAlgorithm("ensemble", "python");
Algorithm voodoo = createAlgorithm("voodoo", "os:latex", "custom:blah");
AlgorithmSet as1 = createAlgorithmSet("as1-fishes", ewe);
AlgorithmSet as2 = createAlgorithmSet("as2-stat", ensemble);
AlgorithmSet as3 = createAlgorithmSet("as3-blackmagic", voodoo, ewe);
Cluster cluster1 = getCluster("cluster-1");
cluster1.addHost(createHost("host1", "domain1"));
cluster1.addHost(createHost("host2", "domain1"));
cluster1.addHost(createHost("host3", "domain1"));
cluster1.addHost(createHost("host1", "domain2"));
cluster1.addHost(createHost("host2", "domain2"));
Cluster cluster2 = getCluster("cluster-2");
cluster2.addHost(createHost("host4", "domain1"));
cluster2.addHost(createHost("host5", "domain1"));
cluster2.addHost(createHost("host6", "domain1"));
cluster2.addHost(createHost("host3", "domain2"));
cluster2.addHost(createHost("host4", "domain2"));
cluster2.addHost(createHost("host5", "domain2"));
cluster1.addAlgorithmSet(as1);
cluster1.addAlgorithmSet(as2);
cluster2.addAlgorithmSet(as1);
cluster2.addAlgorithmSet(as3);
***REMOVED***
*/
public static void main(String[] args) throws Exception {
/*
AnsibleBridge ab = new AnsibleBridge();
ab.printInventoryByDomainAndSets(clusters.values());
System.out.println("-----------");
ab.printInventoryBySets(clusters.values());
AnsibleWorker worker = ab.createWorker();
for(Algorithm a:extractAlgorithms()) {
for(Role r:ab.generateRoles(a)) {
worker.addRole(r);
***REMOVED***
***REMOVED***
for(Dependency d:extractDependencies()) {
for(Role r:ab.generateRoles(d)) {
worker.addRole(r);
***REMOVED***
***REMOVED***
*/
ScopeProvider.instance.set("/gcube/devNext/NextNext");
ProxySelector.setDefault(new PropertiesBasedProxySelector("/home/paolo/.proxy-settings"));
***REMOVED*** create the algorithm (download it, etc etc)
Algorithm algorithm = new Algorithm();
algorithm.setName("ichtyop");
algorithm.setClazz("org.gcube...");
algorithm.setDescription("some description");
Dependency d = new Dependency();
d.setName("libpng");
d.setType("os");
algorithm.addDependency(d);
d = new Dependency();
d.setName("some-r-package");
d.setType("cran");
algorithm.addDependency(d);
d = new Dependency();
d.setName("some-other-r-package");
d.setType("cran");
algorithm.addDependency(d);
d = new Dependency();
d.setName("voodoo");
d.setType("custom");
algorithm.addDependency(d);
***REMOVED*** create the algorithm (download it, etc etc)
Algorithm ewe = new Algorithm();
ewe.setName("ewe");
d = new Dependency();
d.setName("voodoo");
d.setType("custom");
ewe.addDependency(d);
AlgorithmSet algorithms = new AlgorithmSet();
algorithms.setName("dummy-set");
algorithms.addAlgorithm(algorithm);
algorithms.addAlgorithm(ewe);
Algorithm ensemble = new Algorithm();
ensemble.setName("ensemble");
d = new Dependency();
d.setName("libpng");
d.setType("os");
ensemble.addDependency(d);
algorithms.addAlgorithm(ensemble);
new DataminerPoolManager().addAlgorithmsToVRE(algorithms, "/gcube/devNext/NextNext");
***REMOVED***
***REMOVED***

View File

@ -0,0 +1,17 @@
package org.gcube.dataanalysis.dataminerpoolmanager;
import java.net.ProxySelector;
***REMOVED***
import org.gcube.dataanalysys.dataminerpoolmanager.clients.ISClient;
import org.gcube.dataanalysys.dataminerpoolmanager.util.PropertiesBasedProxySelector;
public class ISClientTest {
public static void main(String[] args) {
ProxySelector.setDefault(new PropertiesBasedProxySelector("/home/paolo/.proxy-settings"));
ScopeProvider.instance.set("/gcube/devNext/NextNext");
System.out.println(new ISClient().listDataminersInVRE());
***REMOVED***
***REMOVED***