refactoring

git-svn-id: https://svn.d4science.research-infrastructures.eu/gcube/trunk/data-analysis/dataminer-pool-manager@148420 82a268e6-3cf1-43bd-a215-b396298e98cf
This commit is contained in:
Gabriele Giammatteo 2017-05-09 16:38:15 +00:00
parent 6315560648
commit a7ddc72bc8
7 changed files with 249 additions and 153 deletions

View File

@ -1,34 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<classpath>
<classpathentry kind="src" output="target/classes" path="src/main/java">
<attributes>
<attribute name="optional" value="true"/>
<attribute name="maven.pomderived" value="true"/>
</attributes>
</classpathentry>
<classpathentry excluding="**" kind="src" output="target/classes" path="src/main/resources">
<attributes>
<attribute name="maven.pomderived" value="true"/>
</attributes>
</classpathentry>
<classpathentry kind="src" output="target/test-classes" path="src/test/java">
<attributes>
<attribute name="optional" value="true"/>
<attribute name="maven.pomderived" value="true"/>
</attributes>
</classpathentry>
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.7">
<attributes>
<attribute name="maven.pomderived" value="true"/>
</attributes>
</classpathentry>
<classpathentry kind="con" path="org.eclipse.m2e.MAVEN2_CLASSPATH_CONTAINER">
<attributes>
<attribute name="maven.pomderived" value="true"/>
</attributes>
</classpathentry>
<classpathentry kind="con" path="org.eclipse.jst.j2ee.internal.web.container"/>
<classpathentry kind="con" path="org.eclipse.jst.j2ee.internal.module.container"/>
<classpathentry kind="lib" path="/home/ngalante/Downloads/servlet-api-2.5.jar"/>
<classpathentry kind="output" path="target/classes"/>
</classpath>

View File

@ -1,42 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<projectDescription>
<name>dataminer-pool-manager</name>
<comment></comment>
<projects>
</projects>
<buildSpec>
<buildCommand>
<name>org.eclipse.wst.jsdt.core.javascriptValidator</name>
<arguments>
</arguments>
</buildCommand>
<buildCommand>
<name>org.eclipse.wst.common.project.facet.core.builder</name>
<arguments>
</arguments>
</buildCommand>
<buildCommand>
<name>org.eclipse.jdt.core.javabuilder</name>
<arguments>
</arguments>
</buildCommand>
<buildCommand>
<name>org.eclipse.wst.validation.validationbuilder</name>
<arguments>
</arguments>
</buildCommand>
<buildCommand>
<name>org.eclipse.m2e.core.maven2Builder</name>
<arguments>
</arguments>
</buildCommand>
</buildSpec>
<natures>
<nature>org.eclipse.jem.workbench.JavaEMFNature</nature>
<nature>org.eclipse.wst.common.modulecore.ModuleCoreNature</nature>
<nature>org.eclipse.jdt.core.javanature</nature>
<nature>org.eclipse.m2e.core.maven2Nature</nature>
<nature>org.eclipse.wst.common.project.facet.core.nature</nature>
<nature>org.eclipse.wst.jsdt.core.jsNature</nature>
</natures>
</projectDescription>

View File

@ -116,9 +116,32 @@ public class AnsibleWorker ***REMOVED***
***REMOVED*** Serialize role in the workdir
AnsibleSerializeHelper.serializeRole(r, this.getRolesDir());
***REMOVED***
public int execute(PrintStream ps)
throws IOException, InterruptedException, SVNException ***REMOVED***
System.out.println(this.getWorkdir());
try ***REMOVED***
Process p = Runtime.getRuntime().exec("ansible-playbook -v -i " + this.getInventoryFile().getAbsolutePath()
+ " " + this.getPlaybookFile().getAbsolutePath());
inheritIO(p.getInputStream(), ps);
inheritIO(p.getErrorStream(), ps);
return p.waitFor();
***REMOVED*** catch (IOException e) ***REMOVED***
e.printStackTrace();
***REMOVED***
return -1;
***REMOVED***
public void apply(AlgorithmSet as, PrintStream ps, boolean test)
throws IOException, InterruptedException, SVNException ***REMOVED***
***REMOVED*** TODO execute the playbook and return output

View File

@ -89,9 +89,55 @@ public class AnsibleBridge ***REMOVED***
public AnsibleWorker createWorker() ***REMOVED***
public AnsibleWorker createWorker(Algorithm algorithm, Cluster dataminerCluster, boolean includeAlgorithmDependencies, String user) throws IOException ***REMOVED***
File workerRoot = new File(this.getWorkDir(), UUID.randomUUID().toString());
AnsibleWorker worker = new AnsibleWorker(workerRoot);
List<Role> algoRoles = new Vector<>();
***REMOVED*** add algorithms and dependencies to the worker
for (Role r : this.generateRoles(algorithm)) ***REMOVED***
algoRoles.add(r);
worker.addRole(r);
***REMOVED***
***REMOVED***to comment the for in case of just install algo
if(includeAlgorithmDependencies)***REMOVED***
for (Dependency d : algorithm.getDependencies()) ***REMOVED***
for (Role r : this.generateRoles(d)) ***REMOVED***
worker.addRole(r);
***REMOVED***
***REMOVED***
***REMOVED***
***REMOVED*** add static roles
for(Role r:this.getStaticRoleManager().getStaticRoles()) ***REMOVED***
worker.addRole(r);
***REMOVED***
***REMOVED*** generate the inventory
Inventory inventory = new Inventory();
for (Host h : dataminerCluster.getHosts()) ***REMOVED***
AnsibleHost ah = new AnsibleHost(h.getName());
inventory.addHost(ah, "universe");
inventory.addHost(ah, "d4science");
***REMOVED***
worker.setInventory(inventory);
***REMOVED*** generate the playbook
Playbook playbook = new Playbook();
playbook.setRemote_user(user);
playbook.applyTo("universe");
for(Role r:algoRoles) ***REMOVED***
***REMOVED*** add only 'add' roles
if(!r.getName().endsWith("remove")) ***REMOVED***
playbook.addRole(r.getName());
***REMOVED***
***REMOVED***
worker.setPlaybook(playbook);
return worker;
***REMOVED***

View File

@ -42,7 +42,12 @@ public interface PoolManager ***REMOVED***
String addAlgorithmToVRE(Algorithm algo, String vre, boolean test ) throws IOException, InterruptedException;
String addAlgorithmToHost(Algorithm algo, String host,boolean test) throws IOException, InterruptedException;
String stageAlgorithm(String algorithmPackageURL) throws IOException, InterruptedException;
String publishAlgorithm(String algorithmPackageURL, String targetVREToken) throws IOException, InterruptedException;
Algorithm extractAlgorithm(String url) throws IOException;
String getLogById(String logId) throws IOException;

View File

@ -1,68 +1,142 @@
package org.gcube.dataanalysis.dataminer.poolmanager.service;
import static org.gcube.resources.discovery.icclient.ICFactory.clientFor;
import static org.gcube.resources.discovery.icclient.ICFactory.queryFor;
import java.io.BufferedReader;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
***REMOVED***
import java.io.InputStreamReader;
import java.io.StringWriter;
***REMOVED***
***REMOVED***
import java.net.URLConnection;
***REMOVED***
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
***REMOVED***
***REMOVED***
import java.util.SortedSet;
import java.util.UUID;
import org.gcube.common.resources.gcore.GenericResource;
import org.gcube.common.resources.gcore.HostingNode;
import org.gcube.common.resources.gcore.Resources;
import org.gcube.common.resources.gcore.Software.Profile.Dependency;
***REMOVED***
import org.gcube.dataanalysis.dataminer.poolmanager.ansible.AnsibleWorker;
import org.gcube.dataanalysis.dataminer.poolmanager.ansiblebridge.AnsibleBridge;
import org.gcube.dataanalysis.dataminer.poolmanager.clients.HAProxy;
import org.gcube.dataanalysis.dataminer.poolmanager.clients.ISClient;
***REMOVED***
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.AlgorithmSet;
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Cluster;
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Host;
import org.gcube.dataanalysis.dataminer.poolmanager.process.AlgorithmPackageParser;
import org.gcube.dataanalysis.dataminer.poolmanager.rest.PoolManager;
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.*;
import org.gcube.informationsystem.publisher.AdvancedScopedPublisher;
import org.gcube.informationsystem.publisher.RegistryPublisherFactory;
import org.gcube.informationsystem.publisher.ScopedPublisher;
import org.gcube.informationsystem.publisher.exception.RegistryNotFoundException;
import org.gcube.resources.discovery.client.api.DiscoveryClient;
import org.gcube.resources.discovery.client.queries.api.SimpleQuery;
***REMOVED***
***REMOVED***
***REMOVED***
import java.io.*;
***REMOVED***
***REMOVED***
import org.tmatesoft.svn.core.SVNCommitInfo;
import java.net.URLConnection;
***REMOVED***
import org.tmatesoft.svn.core.SVNURL;
import org.tmatesoft.svn.core.auth.ISVNAuthenticationManager;
import org.tmatesoft.svn.core.internal.wc.SVNFileUtil;
import org.tmatesoft.svn.core.internal.wc.admin.SVNChecksumInputStream;
import org.tmatesoft.svn.core.io.ISVNEditor;
import org.tmatesoft.svn.core.io.SVNRepository;
import org.tmatesoft.svn.core.io.SVNRepositoryFactory;
import org.tmatesoft.svn.core.io.diff.SVNDeltaGenerator;
import org.tmatesoft.svn.core.wc.SVNWCUtil;
import java.util.*;
public class DataminerPoolManager implements PoolManager ***REMOVED***
import static org.gcube.resources.discovery.icclient.ICFactory.clientFor;
import static org.gcube.resources.discovery.icclient.ICFactory.queryFor;
public class DataminerPoolManager ***REMOVED***
private static final Logger LOGGER = LoggerFactory.getLogger(DataminerPoolManager.class);
***REMOVED***Algorithm algo) throws IOException, InterruptedException ***REMOVED***
Cluster cluster = new Cluster();
Host h = new Host();
h.setName(getStagingDataminerHostname());
cluster.addHost(h);
***REMOVED***Assumes the service is running in RPrototypingLab
String token = SecurityTokenProvider.instance.get();
return addAlgorithmToCluster(algo, cluster, true, "root", true, token);
***REMOVED***
***REMOVED***Algorithm algo, String targetVREToken) throws IOException, InterruptedException ***REMOVED***
Cluster cluster = new Cluster();
for (Host h : new HAProxy().listDataMinersByCluster()) ***REMOVED***
cluster.addHost(h);
***REMOVED***
return addAlgorithmToCluster(algo, cluster, false, "gcube", false, targetVREToken);
***REMOVED***
private String getStagingDataminerHostname()***REMOVED***
***REMOVED***
***REMOVED***
private void updateSVNDependencies(Algorithm algo, boolean stagingVRE) throws IOException, SVNException ***REMOVED***
for (Dependency d : algo.getDependencies()) ***REMOVED***
if (d.getType().equals("os")) ***REMOVED***
List<String> ls = new LinkedList<String>();
ls.add(d.getName());
this.updateSVN(stagingVRE ? "test_": "" + "r_deb_pkgs.txt", ls);
***REMOVED***
if (d.getType().equals("cran")) ***REMOVED***
List<String> ls = new LinkedList<String>();
ls.add(d.getName());
this.updateSVN(stagingVRE ? "test_": "" + "r_cran_pkgs.txt", ls);
***REMOVED***
if (d.getType().equals("github")) ***REMOVED***
List<String> ls = new LinkedList<String>();
ls.add(d.getName());
this.updateSVN(stagingVRE ? "test_": "" + "r_github_pkgs.txt", ls);
***REMOVED***
***REMOVED***
***REMOVED***
private void createISResource(Algorithm algo, String vreToken)***REMOVED***
***REMOVED***TODO: implement method
***REMOVED***TODO: create the resource only if not already present
***REMOVED***
***REMOVED****
*
* @param algo
* @param dataminerCluster
* @return uuid of the execution
***REMOVED***
public String addAlgorithmToCluster(
final Algorithm algo,
Cluster dataminerCluster,
boolean includeAlgorithmDependencies,
String user,
final boolean stagingVRE,
final String targetVREToken) throws IOException ***REMOVED***
AnsibleBridge ansibleBridge = new AnsibleBridge();
final AnsibleWorker worker = ansibleBridge.createWorker(algo, dataminerCluster, includeAlgorithmDependencies, user);
new Thread(new Runnable() ***REMOVED***
***REMOVED***
public void run() ***REMOVED***
try ***REMOVED***
File path = new File(worker.getWorkdir() + File.separator + "logs");
path.mkdirs();
File n = new File(path + File.separator + worker.getWorkerId());
FileOutputStream fos = new FileOutputStream(n);
PrintStream ps = new PrintStream(fos);
int retValue = worker.execute(ps);
System.out.println("Log stored to to " + n.getAbsolutePath());
if(retValue == 0) ***REMOVED***
updateSVNDependencies(algo, stagingVRE);
createISResource(algo, targetVREToken);
***REMOVED***
***REMOVED*** destroy the worker
worker.destroy();
***REMOVED*** catch (Exception e) ***REMOVED***
e.printStackTrace();
***REMOVED***
***REMOVED***
***REMOVED***).start();
***REMOVED*** this line will execute immediately, not waiting for task to
***REMOVED*** complete
return worker.getWorkerId();
***REMOVED***
public String getScriptFromURL(URL url) throws IOException ***REMOVED***
if (url == null) ***REMOVED***
@ -152,6 +226,7 @@ public class DataminerPoolManager implements PoolManager ***REMOVED***
return uuid;
***REMOVED***
public URL getURLfromWorkerLog(String a) throws MalformedURLException, UnknownHostException ***REMOVED***
File path = new File(System.getProperty("user.home") + File.separator + "dataminer-pool-manager/work/"
@ -209,23 +284,7 @@ public class DataminerPoolManager implements PoolManager ***REMOVED***
return a.applyAlgorithmSetToCluster(algorithms, cluster, uuid, /*updateSVN,*/test).getWorkerId();
***REMOVED***
public Algorithm extractAlgorithm(String url) throws IOException ***REMOVED***
return new AlgorithmPackageParser().parsePackage(url);
***REMOVED***
***REMOVED***
public void getLogId(Algorithm algo, String vre) ***REMOVED***
***REMOVED***
***REMOVED***
***REMOVED***
public String getLogById(String logId) throws IOException ***REMOVED***
***REMOVED***
***REMOVED***
***REMOVED***
***REMOVED*** 2017 March 29
@ -256,7 +315,6 @@ public class DataminerPoolManager implements PoolManager ***REMOVED***
***REMOVED***
***REMOVED***
***REMOVED***
public void addAlgToIs(Algorithm algo) ***REMOVED***
GenericResource a = new GenericResource();
a.newProfile().name(algo.getName()).type("StatisticalManagerAlgorithm").description(algo.getDescription());
@ -317,16 +375,6 @@ public class DataminerPoolManager implements PoolManager ***REMOVED***
return out;
***REMOVED***
***REMOVED*** public Algorithm getAlgoById(String id) ***REMOVED***
***REMOVED*** for (Algorithm aa : this.getAlgoFromIs()) ***REMOVED***
***REMOVED*** if (aa.getId().equals(id)) ***REMOVED***
***REMOVED*** return aa;
***REMOVED*** ***REMOVED***
***REMOVED*** ***REMOVED***
***REMOVED*** return null;
***REMOVED*** ***REMOVED***
***REMOVED***
public Set<Algorithm> getAlgoFromIs() ***REMOVED***
***REMOVED***
@ -343,8 +391,6 @@ public class DataminerPoolManager implements PoolManager ***REMOVED***
***REMOVED***
***REMOVED***
public List<String> updateSVN(String file, List<String> ldep) throws SVNException, IOException ***REMOVED***
***REMOVED***
***REMOVED***

View File

@ -0,0 +1,52 @@
package org.gcube.dataanalysis.dataminer.poolmanager.util;
***REMOVED***
import org.gcube.dataanalysis.dataminer.poolmanager.process.AlgorithmPackageParser;
***REMOVED***
/**
* Created by ggiammat on 5/9/17.
*/
public class AlgorithmBuilder ***REMOVED***
public static Algorithm create(String algorithmPackageURL) throws IOException, InterruptedException ***REMOVED***
return create(algorithmPackageURL, null, null, null, null, null, null, null);
***REMOVED***
public static Algorithm create(String algorithmPackageURL, String vre, String hostname, String name, String description,
String category, String algorithmType, String skipJava) throws IOException, InterruptedException ***REMOVED***
Algorithm algo = new AlgorithmPackageParser().parsePackage(algorithmPackageURL);
if(category != null)***REMOVED***
algo.setCategory(category);
***REMOVED***
***REMOVED***FIXME: do the same done for category
if (algo.getAlgorithmType() == null) ***REMOVED***
algo.setAlgorithmType(algorithmType);
***REMOVED*** else
algo.setAlgorithmType(algo.getCategory());
if (algo.getSkipJava() == null) ***REMOVED***
algo.setSkipJava(skipJava);
***REMOVED*** else
algo.setSkipJava(algo.getSkipJava());
if (algo.getName() == null) ***REMOVED***
algo.setName(name);
***REMOVED*** else
algo.setName(algo.getName());
if (algo.getDescription() == null) ***REMOVED***
algo.setDescription(description);
;
***REMOVED*** else
algo.setDescription(algo.getDescription());
return algo;
***REMOVED***
***REMOVED***