Share project "dataminer-pool-manager" into "http://svn.research-infrastructures.eu/d4science/gcube"
git-svn-id: https://svn.d4science.research-infrastructures.eu/gcube/trunk/data-analysis/dataminer-pool-manager@144668 82a268e6-3cf1-43bd-a215-b396298e98cf
This commit is contained in:
parent
1ec0af6eb3
commit
ad7006ef2d
34
.classpath
34
.classpath
|
@ -1,34 +0,0 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<classpath>
|
||||
<classpathentry kind="src" output="target/classes" path="src/main/java">
|
||||
<attributes>
|
||||
<attribute name="optional" value="true"/>
|
||||
<attribute name="maven.pomderived" value="true"/>
|
||||
</attributes>
|
||||
</classpathentry>
|
||||
<classpathentry excluding="**" kind="src" output="target/classes" path="src/main/resources">
|
||||
<attributes>
|
||||
<attribute name="maven.pomderived" value="true"/>
|
||||
</attributes>
|
||||
</classpathentry>
|
||||
<classpathentry kind="src" output="target/test-classes" path="src/test/java">
|
||||
<attributes>
|
||||
<attribute name="optional" value="true"/>
|
||||
<attribute name="maven.pomderived" value="true"/>
|
||||
</attributes>
|
||||
</classpathentry>
|
||||
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.7">
|
||||
<attributes>
|
||||
<attribute name="maven.pomderived" value="true"/>
|
||||
</attributes>
|
||||
</classpathentry>
|
||||
<classpathentry kind="con" path="org.eclipse.m2e.MAVEN2_CLASSPATH_CONTAINER">
|
||||
<attributes>
|
||||
<attribute name="maven.pomderived" value="true"/>
|
||||
</attributes>
|
||||
</classpathentry>
|
||||
<classpathentry kind="con" path="org.eclipse.jst.j2ee.internal.web.container"/>
|
||||
<classpathentry kind="con" path="org.eclipse.jst.j2ee.internal.module.container"/>
|
||||
<classpathentry kind="lib" path="/home/ngalante/Downloads/servlet-api-2.5.jar"/>
|
||||
<classpathentry kind="output" path="target/classes"/>
|
||||
</classpath>
|
42
.project
42
.project
|
@ -1,42 +0,0 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<projectDescription>
|
||||
<name>dataminer-pool-manager</name>
|
||||
<comment></comment>
|
||||
<projects>
|
||||
</projects>
|
||||
<buildSpec>
|
||||
<buildCommand>
|
||||
<name>org.eclipse.wst.jsdt.core.javascriptValidator</name>
|
||||
<arguments>
|
||||
</arguments>
|
||||
</buildCommand>
|
||||
<buildCommand>
|
||||
<name>org.eclipse.wst.common.project.facet.core.builder</name>
|
||||
<arguments>
|
||||
</arguments>
|
||||
</buildCommand>
|
||||
<buildCommand>
|
||||
<name>org.eclipse.jdt.core.javabuilder</name>
|
||||
<arguments>
|
||||
</arguments>
|
||||
</buildCommand>
|
||||
<buildCommand>
|
||||
<name>org.eclipse.wst.validation.validationbuilder</name>
|
||||
<arguments>
|
||||
</arguments>
|
||||
</buildCommand>
|
||||
<buildCommand>
|
||||
<name>org.eclipse.m2e.core.maven2Builder</name>
|
||||
<arguments>
|
||||
</arguments>
|
||||
</buildCommand>
|
||||
</buildSpec>
|
||||
<natures>
|
||||
<nature>org.eclipse.jem.workbench.JavaEMFNature</nature>
|
||||
<nature>org.eclipse.wst.common.modulecore.ModuleCoreNature</nature>
|
||||
<nature>org.eclipse.jdt.core.javanature</nature>
|
||||
<nature>org.eclipse.m2e.core.maven2Nature</nature>
|
||||
<nature>org.eclipse.wst.common.project.facet.core.nature</nature>
|
||||
<nature>org.eclipse.wst.jsdt.core.jsNature</nature>
|
||||
</natures>
|
||||
</projectDescription>
|
|
@ -1 +0,0 @@
|
|||
${gcube.license}
|
|
@ -1,60 +0,0 @@
|
|||
The gCube System - ${name}
|
||||
--------------------------------------------------
|
||||
|
||||
${description}
|
||||
|
||||
${gcube.description}
|
||||
|
||||
${gcube.funding}
|
||||
|
||||
|
||||
Version
|
||||
--------------------------------------------------
|
||||
${version} (${buildDate})
|
||||
|
||||
Please see the file named "changelog.xml" in this directory for the release notes.
|
||||
|
||||
|
||||
Authors
|
||||
--------------------------------------------------
|
||||
* Paolo Fabriani (paolo.fabriani-AT-eng.it)
|
||||
Engineering Ingegneria Informatica S.p.A., Italy
|
||||
|
||||
|
||||
Maintainers
|
||||
--------------------------------------------------
|
||||
* Paolo Fabriani (paolo.fabriani-AT-eng.it)
|
||||
Engineering Ingegneria Informatica S.p.A., Italy
|
||||
|
||||
|
||||
Download information
|
||||
--------------------------------------------------
|
||||
Source code is available from SVN:
|
||||
${scm.url}
|
||||
|
||||
Binaries can be downloaded from the gCube website:
|
||||
${gcube.website}
|
||||
|
||||
|
||||
Installation
|
||||
--------------------------------------------------
|
||||
Installation documentation is available on-line in the gCube Wiki:
|
||||
${gcube.wikiRoot}/[admin guide page name here]
|
||||
|
||||
|
||||
Documentation
|
||||
--------------------------------------------------
|
||||
Documentation is available on-line in the gCube Wiki:
|
||||
${gcube.wikiRoot}[user guide page name here]
|
||||
${gcube.wikiRoot}[developer guide page name here]
|
||||
|
||||
|
||||
Support
|
||||
--------------------------------------------------
|
||||
Bugs and support requests can be reported in the gCube issue tracking tool:
|
||||
${gcube.issueTracking}
|
||||
|
||||
|
||||
Licensing
|
||||
--------------------------------------------------
|
||||
This software is licensed under the terms you may find in the file named "LICENSE" in this directory.
|
|
@ -1,36 +0,0 @@
|
|||
<assembly
|
||||
xmlns="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0 http://maven.apache.org/xsd/assembly-1.1.0.xsd">
|
||||
|
||||
<id>servicearchive</id>
|
||||
|
||||
<formats>
|
||||
<format>tar.gz</format>
|
||||
</formats>
|
||||
|
||||
<baseDirectory>/</baseDirectory>
|
||||
|
||||
<fileSets>
|
||||
<fileSet>
|
||||
<directory>${distroDirectory}</directory>
|
||||
<outputDirectory>/</outputDirectory>
|
||||
<useDefaultExcludes>true</useDefaultExcludes>
|
||||
<includes>
|
||||
<include>README</include>
|
||||
<include>LICENSE</include>
|
||||
<include>profile.xml</include>
|
||||
</includes>
|
||||
<fileMode>755</fileMode>
|
||||
<filtered>true</filtered>
|
||||
</fileSet>
|
||||
</fileSets>
|
||||
|
||||
<files>
|
||||
<file>
|
||||
<source>target/${build.finalName}.war</source>
|
||||
<outputDirectory>/${artifactId}</outputDirectory>
|
||||
</file>
|
||||
</files>
|
||||
|
||||
</assembly>
|
|
@ -1,25 +0,0 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<Resource xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
|
||||
<ID />
|
||||
<Type>Library</Type>
|
||||
<Profile>
|
||||
<Description>${description}</Description>
|
||||
<Class>DataminerPoolManager</Class>
|
||||
<Name>${artifactId}</Name>
|
||||
<Version>1.0.0</Version>
|
||||
<Packages>
|
||||
<Software>
|
||||
<Name>${artifactId}</Name>
|
||||
<Version>${version}</Version>
|
||||
<MavenCoordinates>
|
||||
<groupId>${groupId}</groupId>
|
||||
<artifactId>${artifactId}</artifactId>
|
||||
<version>${version}</version>
|
||||
</MavenCoordinates>
|
||||
<Files>
|
||||
<File>${build.finalName}.jar</File>
|
||||
</Files>
|
||||
</Software>
|
||||
</Packages>
|
||||
</Profile>
|
||||
</Resource>
|
125
pom.xml
125
pom.xml
|
@ -1,125 +0,0 @@
|
|||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
<parent>
|
||||
<artifactId>maven-parent</artifactId>
|
||||
<groupId>org.gcube.tools</groupId>
|
||||
<version>1.0.0</version>
|
||||
<relativePath />
|
||||
</parent>
|
||||
|
||||
<groupId>org.gcube.dataanalysis</groupId>
|
||||
<artifactId>dataminer-pool-manager</artifactId>
|
||||
<version>1.0.0-SNAPSHOT</version>
|
||||
<packaging>war</packaging>
|
||||
|
||||
<name>dataminer-pool-manager</name>
|
||||
<description>
|
||||
</description>
|
||||
|
||||
<properties>
|
||||
<distroDirectory>distro</distroDirectory>
|
||||
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
|
||||
</properties>
|
||||
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>commons-io</groupId>
|
||||
<artifactId>commons-io</artifactId>
|
||||
<version>[2.5.0,2.6.0)</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.antlr</groupId>
|
||||
<artifactId>stringtemplate</artifactId>
|
||||
<version>[4.0.0, 4.1.0)</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>commons-configuration</groupId>
|
||||
<artifactId>commons-configuration</artifactId>
|
||||
<version>1.10</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.gcube.resources.discovery</groupId>
|
||||
<artifactId>ic-client</artifactId>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.yaml</groupId>
|
||||
<artifactId>snakeyaml</artifactId>
|
||||
<version>1.16</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.glassfish.jersey.media</groupId>
|
||||
<artifactId>jersey-media-json-jackson</artifactId>
|
||||
<version>2.23.1</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.glassfish.jersey.core</groupId>
|
||||
<artifactId>jersey-client</artifactId>
|
||||
<version>2.22.1</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.glassfish.jersey.containers</groupId>
|
||||
<artifactId>jersey-container-servlet</artifactId>
|
||||
<version>2.22.1</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.slf4j</groupId>
|
||||
<artifactId>slf4j-api</artifactId>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
|
||||
|
||||
|
||||
</dependencies>
|
||||
|
||||
<dependencyManagement>
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>org.gcube.distribution</groupId>
|
||||
<artifactId>maven-smartgears-bom</artifactId>
|
||||
<version>LATEST</version>
|
||||
<type>pom</type>
|
||||
<scope>import</scope>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
</dependencyManagement>
|
||||
|
||||
<build>
|
||||
<plugins>
|
||||
|
||||
<plugin>
|
||||
<artifactId>maven-compiler-plugin</artifactId>
|
||||
<configuration>
|
||||
<source>1.7</source>
|
||||
<target>1.7</target>
|
||||
</configuration>
|
||||
</plugin>
|
||||
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-assembly-plugin</artifactId>
|
||||
<version>2.2</version>
|
||||
<configuration>
|
||||
<descriptors>
|
||||
<descriptor>${distroDirectory}/descriptor.xml</descriptor>
|
||||
</descriptors>
|
||||
</configuration>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>servicearchive</id>
|
||||
<phase>install</phase>
|
||||
<goals>
|
||||
<goal>single</goal>
|
||||
</goals>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
|
||||
</plugins>
|
||||
</build>
|
||||
|
||||
</project>
|
|
@ -1,135 +0,0 @@
|
|||
package org.gcube.dataanalysis.dataminer.poolmanager.ansible;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.InputStreamReader;
|
||||
import java.io.PrintStream;
|
||||
import java.util.Scanner;
|
||||
|
||||
import org.gcube.dataanalysis.dataminer.poolmanager.ansible.model.Inventory;
|
||||
import org.gcube.dataanalysis.dataminer.poolmanager.ansible.model.Playbook;
|
||||
import org.gcube.dataanalysis.dataminer.poolmanager.ansible.model.Role;
|
||||
import org.gcube.dataanalysis.dataminer.poolmanager.ansiblebridge.AnsibleSerializeHelper;
|
||||
|
||||
/**
|
||||
* This class is responsible for the interface with ansible, retrieving log,
|
||||
* etc. etc. It's not supposed to access templates and static stuff files. It
|
||||
* does not know the service datamodel.
|
||||
*
|
||||
* @author paolo
|
||||
*
|
||||
*/
|
||||
public class AnsibleWorker {
|
||||
|
||||
/**
|
||||
* The name of the inventory
|
||||
*/
|
||||
private static String INVENTORY_NAME = "inventory.yaml";
|
||||
|
||||
/**
|
||||
* The directory containing roles
|
||||
*/
|
||||
private static String ROLES_DIR = "roles";
|
||||
|
||||
/**
|
||||
* The name of the playbook
|
||||
*/
|
||||
private static String PLAYBOOK_NAME = "playbook.yaml";
|
||||
|
||||
/**
|
||||
* The root of the worker. This corresponds to a standard ansible working dir.
|
||||
*/
|
||||
private File workerRoot;
|
||||
|
||||
public AnsibleWorker(File root) {
|
||||
this.workerRoot = root;
|
||||
this.ensureWorkStructure();
|
||||
}
|
||||
|
||||
public File getWorkdir() {
|
||||
return this.workerRoot;
|
||||
}
|
||||
|
||||
public File getRolesDir() {
|
||||
return new File(this.getWorkdir(), ROLES_DIR);
|
||||
}
|
||||
|
||||
public String getWorkerId() {
|
||||
return this.workerRoot.getName();
|
||||
}
|
||||
|
||||
public void ensureWorkStructure() {
|
||||
// generate root
|
||||
this.getWorkdir().mkdirs();
|
||||
}
|
||||
|
||||
public void removeWorkStructure() {
|
||||
// remove the working dir
|
||||
this.getWorkdir().delete();
|
||||
}
|
||||
|
||||
public File getPlaybookFile() {
|
||||
return new File(this.getWorkdir(), PLAYBOOK_NAME);
|
||||
}
|
||||
|
||||
public File getInventoryFile() {
|
||||
return new File(this.getWorkdir(), INVENTORY_NAME);
|
||||
}
|
||||
|
||||
|
||||
public void setInventory(Inventory inventory) throws IOException {
|
||||
// serialize the string to the 'inventory' file
|
||||
AnsibleSerializeHelper.serialize(inventory, this.getInventoryFile());
|
||||
}
|
||||
|
||||
public void setPlaybook(Playbook playbook) throws IOException {
|
||||
// serialize the string to the 'playbook' file
|
||||
AnsibleSerializeHelper.serialize(playbook, this.getPlaybookFile());
|
||||
}
|
||||
|
||||
public void addRole(Role r) throws IOException {
|
||||
// Serialize role in the workdir
|
||||
AnsibleSerializeHelper.serializeRole(r, this.getRolesDir());
|
||||
}
|
||||
|
||||
|
||||
|
||||
public void apply() throws IOException {
|
||||
// TODO execute the playbook and return output
|
||||
System.out.println(this.getWorkdir());
|
||||
try {
|
||||
Process p = Runtime.getRuntime().exec("ansible-playbook -v -i " + this.getInventoryFile().getAbsolutePath() + " " + this.getPlaybookFile().getAbsolutePath());
|
||||
|
||||
inheritIO(p.getInputStream(), System.out);
|
||||
inheritIO(p.getErrorStream(), System.err);
|
||||
|
||||
} catch (IOException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
//System.out.println("TODO: execute: ansible-playbook -v -i " + this.getInventoryFile().getName() + " " + this.getPlaybookFile().getName());
|
||||
}
|
||||
|
||||
|
||||
|
||||
private static void inheritIO(final InputStream src, final PrintStream dest) {
|
||||
new Thread(new Runnable() {
|
||||
public void run() {
|
||||
Scanner sc = new Scanner(src);
|
||||
while (sc.hasNextLine()) {
|
||||
dest.println(sc.nextLine());
|
||||
}
|
||||
}
|
||||
}).start();
|
||||
}
|
||||
|
||||
/**
|
||||
* Destroy the worker:
|
||||
* - remove the working dir
|
||||
*/
|
||||
public void destroy() {
|
||||
this.removeWorkStructure();
|
||||
}
|
||||
|
||||
}
|
|
@ -1,19 +0,0 @@
|
|||
package org.gcube.dataanalysis.dataminer.poolmanager.ansible.model;
|
||||
|
||||
public class AnsibleHost {
|
||||
|
||||
private String name;
|
||||
|
||||
public AnsibleHost(String name) {
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public void setName(String name) {
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
}
|
|
@ -1,29 +0,0 @@
|
|||
package org.gcube.dataanalysis.dataminer.poolmanager.ansible.model;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.Vector;
|
||||
|
||||
public class HostGroup {
|
||||
|
||||
private String name;
|
||||
|
||||
private Collection<AnsibleHost> hosts;
|
||||
|
||||
public HostGroup(String name) {
|
||||
this.name = name;
|
||||
this.hosts = new Vector<>();
|
||||
}
|
||||
|
||||
public void addHost(AnsibleHost h) {
|
||||
this.hosts.add(h);
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return this.name;
|
||||
}
|
||||
|
||||
public Collection<AnsibleHost> getHosts() {
|
||||
return new Vector<>(this.hosts);
|
||||
}
|
||||
|
||||
}
|
|
@ -1,37 +0,0 @@
|
|||
package org.gcube.dataanalysis.dataminer.poolmanager.ansible.model;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.Vector;
|
||||
|
||||
public class Inventory {
|
||||
|
||||
private Collection<HostGroup> groups;
|
||||
|
||||
public Inventory() {
|
||||
this.groups = new Vector<>();
|
||||
}
|
||||
|
||||
public void addGroup(HostGroup group) {
|
||||
this.groups.add(group);
|
||||
}
|
||||
|
||||
public void addHost(AnsibleHost h, String groupName) {
|
||||
this.getGroup(groupName).addHost(h);
|
||||
}
|
||||
|
||||
private HostGroup getGroup(String groupName) {
|
||||
for (HostGroup hg : this.groups) {
|
||||
if (groupName.equals(hg.getName())) {
|
||||
return hg;
|
||||
}
|
||||
}
|
||||
HostGroup hg = new HostGroup(groupName);
|
||||
this.groups.add(hg);
|
||||
return hg;
|
||||
}
|
||||
|
||||
public Collection<HostGroup> getHostGroups() {
|
||||
return new Vector<>(this.groups);
|
||||
}
|
||||
|
||||
}
|
|
@ -1,50 +0,0 @@
|
|||
package org.gcube.dataanalysis.dataminer.poolmanager.ansible.model;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Vector;
|
||||
|
||||
public class Playbook {
|
||||
|
||||
private String hostGroupName;
|
||||
|
||||
private List<String> roles;
|
||||
|
||||
private String remote_user;
|
||||
|
||||
public Playbook() {
|
||||
this.roles = new Vector<>();
|
||||
}
|
||||
|
||||
public void addRole(String role) {
|
||||
roles.add(role);
|
||||
}
|
||||
|
||||
public void applyTo(String hostGroupName) {
|
||||
this.hostGroupName = hostGroupName;
|
||||
}
|
||||
|
||||
public String getHostGroupName() {
|
||||
return hostGroupName;
|
||||
}
|
||||
|
||||
public List<String> getRoles() {
|
||||
return new Vector<>(roles);
|
||||
}
|
||||
|
||||
public String getRemote_user() {
|
||||
return remote_user;
|
||||
}
|
||||
|
||||
public void setRemote_user(String remote_user) {
|
||||
this.remote_user = remote_user;
|
||||
}
|
||||
|
||||
public void setHostGroupName(String hostGroupName) {
|
||||
this.hostGroupName = hostGroupName;
|
||||
}
|
||||
|
||||
public void setRoles(List<String> roles) {
|
||||
this.roles = roles;
|
||||
}
|
||||
|
||||
}
|
|
@ -1,51 +0,0 @@
|
|||
package org.gcube.dataanalysis.dataminer.poolmanager.ansible.model;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.Vector;
|
||||
|
||||
public class Role {
|
||||
|
||||
/**
|
||||
* The name of the role
|
||||
*/
|
||||
private String name;
|
||||
|
||||
private Collection<RoleFile> tasks;
|
||||
|
||||
private Collection<RoleFile> meta;
|
||||
|
||||
public Role() {
|
||||
this.tasks = new Vector<>();
|
||||
this.meta = new Vector<>();
|
||||
}
|
||||
|
||||
public Role(String name) {
|
||||
this();
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
public void addTaskFile(RoleFile tf) {
|
||||
this.tasks.add(tf);
|
||||
}
|
||||
|
||||
public void addMeta(RoleFile tf) {
|
||||
this.meta.add(tf);
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public void setName(String name) {
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
public Collection<RoleFile> getTaskFiles() {
|
||||
return new Vector<>(this.tasks);
|
||||
}
|
||||
|
||||
public Collection<RoleFile> getMeta() {
|
||||
return new Vector<>(this.meta);
|
||||
}
|
||||
|
||||
}
|
|
@ -1,54 +0,0 @@
|
|||
package org.gcube.dataanalysis.dataminer.poolmanager.ansible.model;
|
||||
|
||||
public class RoleFile {
|
||||
|
||||
/**
|
||||
* The path to the file, starting from the role root
|
||||
*/
|
||||
private String path;
|
||||
|
||||
/**
|
||||
* The name of the task file
|
||||
*/
|
||||
private String name;
|
||||
|
||||
/**
|
||||
* The content of the task file
|
||||
* @return
|
||||
*/
|
||||
private String content;
|
||||
|
||||
public RoleFile() {
|
||||
}
|
||||
|
||||
public RoleFile(String name, String content) {
|
||||
this();
|
||||
this.setName(name);
|
||||
this.setContent(content);
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public void setName(String name) {
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
public String getContent() {
|
||||
return content;
|
||||
}
|
||||
|
||||
public void setContent(String content) {
|
||||
this.content = content;
|
||||
}
|
||||
|
||||
public String getPath() {
|
||||
return path;
|
||||
}
|
||||
|
||||
public void setPath(String path) {
|
||||
this.path = path;
|
||||
}
|
||||
|
||||
}
|
|
@ -1,276 +0,0 @@
|
|||
package org.gcube.dataanalysis.dataminer.poolmanager.ansiblebridge;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.PrintStream;
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.TreeMap;
|
||||
import java.util.TreeSet;
|
||||
import java.util.UUID;
|
||||
import java.util.Vector;
|
||||
|
||||
import org.gcube.dataanalysis.dataminer.poolmanager.ansible.AnsibleWorker;
|
||||
import org.gcube.dataanalysis.dataminer.poolmanager.ansible.model.AnsibleHost;
|
||||
import org.gcube.dataanalysis.dataminer.poolmanager.ansible.model.Inventory;
|
||||
import org.gcube.dataanalysis.dataminer.poolmanager.ansible.model.Playbook;
|
||||
import org.gcube.dataanalysis.dataminer.poolmanager.ansible.model.Role;
|
||||
import org.gcube.dataanalysis.dataminer.poolmanager.ansiblebridge.template.AlgorithmPackage;
|
||||
import org.gcube.dataanalysis.dataminer.poolmanager.ansiblebridge.template.CranDependencyPackage;
|
||||
import org.gcube.dataanalysis.dataminer.poolmanager.ansiblebridge.template.CustomDependencyPackage;
|
||||
import org.gcube.dataanalysis.dataminer.poolmanager.ansiblebridge.template.CustomRoleManager;
|
||||
import org.gcube.dataanalysis.dataminer.poolmanager.ansiblebridge.template.OSDependencyPackage;
|
||||
import org.gcube.dataanalysis.dataminer.poolmanager.ansiblebridge.template.StaticRoleManager;
|
||||
import org.gcube.dataanalysis.dataminer.poolmanager.ansiblebridge.template.TemplateManager;
|
||||
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Algorithm;
|
||||
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.AlgorithmSet;
|
||||
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Cluster;
|
||||
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Dependency;
|
||||
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Host;
|
||||
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.comparator.HostComparator;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
public class AnsibleBridge {
|
||||
private static final org.slf4j.Logger LOGGER = LoggerFactory.getLogger(AnsibleBridge.class);
|
||||
|
||||
|
||||
/**
|
||||
* The workdir for this service
|
||||
*/
|
||||
private String dpmRoot;
|
||||
|
||||
public AnsibleBridge() {
|
||||
this(System.getProperty("user.home")+File.separator+"/gcube/dataminer-pool-manager");
|
||||
}
|
||||
|
||||
public AnsibleBridge(String root) {
|
||||
this.dpmRoot = root;
|
||||
this.ensureServiceRoot();
|
||||
}
|
||||
|
||||
private void ensureServiceRoot() {
|
||||
// generate root
|
||||
new File(dpmRoot).mkdirs();
|
||||
// 'template' is for template roles
|
||||
this.getTemplatesDir().mkdirs();
|
||||
// 'static' is for custom roles
|
||||
this.getCustomDir().mkdirs();
|
||||
// 'work' is for temporary working directories
|
||||
this.getWorkDir().mkdirs();
|
||||
}
|
||||
|
||||
private File getWorkDir() {
|
||||
return new File(this.dpmRoot, "work");
|
||||
}
|
||||
|
||||
private File getTemplatesDir() {
|
||||
return new File(this.dpmRoot, "templates");
|
||||
}
|
||||
|
||||
private File getCustomDir() {
|
||||
return new File(this.dpmRoot, "custom");
|
||||
}
|
||||
|
||||
public AnsibleWorker createWorker() {
|
||||
File workerRoot = new File(this.getWorkDir(), UUID.randomUUID().toString());
|
||||
AnsibleWorker worker = new AnsibleWorker(workerRoot);
|
||||
return worker;
|
||||
}
|
||||
|
||||
/**
|
||||
* Groups hosts by domain and algorithm sets
|
||||
* @param clusters
|
||||
*/
|
||||
public void printInventoryByDomainAndSets(Collection<Cluster> clusters) {
|
||||
Map<String, Set<Host>> inventory = new TreeMap<>();
|
||||
for(Cluster cluster:clusters) {
|
||||
for(AlgorithmSet as:cluster.getAlgorithmSets()) {
|
||||
String asName = as.getName();
|
||||
for(Host h:cluster.getHosts()) {
|
||||
String domain = h.getDomain().getName();
|
||||
String key = String.format("[%s@%s]", asName, domain);
|
||||
Set<Host> hosts = inventory.get(key);
|
||||
if(hosts==null) {
|
||||
hosts = new TreeSet<>(new HostComparator());
|
||||
inventory.put(key, hosts);
|
||||
}
|
||||
hosts.add(h);
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
for(String key:inventory.keySet()) {
|
||||
System.out.println(key);
|
||||
Collection<Host> hosts = inventory.get(key);
|
||||
for(Host h:hosts) {
|
||||
System.out.println(h.getName()+"."+h.getDomain().getName());
|
||||
}
|
||||
System.out.println();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Groups hosts by algorithm sets only
|
||||
* @param clusters
|
||||
*/
|
||||
public void printInventoryBySets(Collection<Cluster> clusters) {
|
||||
Map<String, Set<Host>> inventory = new TreeMap<>();
|
||||
for (Cluster cluster : clusters) {
|
||||
for (AlgorithmSet as : cluster.getAlgorithmSets()) {
|
||||
String asName = as.getName();
|
||||
for (Host h : cluster.getHosts()) {
|
||||
String key = String.format("[%s]", asName);
|
||||
Set<Host> hosts = inventory.get(key);
|
||||
if (hosts == null) {
|
||||
hosts = new TreeSet<>(new HostComparator());
|
||||
inventory.put(key, hosts);
|
||||
}
|
||||
hosts.add(h);
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
for (String key : inventory.keySet()) {
|
||||
System.out.println(key);
|
||||
Collection<Host> hosts = inventory.get(key);
|
||||
for (Host h : hosts) {
|
||||
System.out.println(h.getName()+"."+h.getDomain().getName());
|
||||
}
|
||||
System.out.println();
|
||||
}
|
||||
}
|
||||
|
||||
public AnsibleWorker applyAlgorithmSetToCluster(AlgorithmSet as, Cluster cluster) throws IOException {
|
||||
|
||||
|
||||
return applyAlgorithmSetToCluster (as,cluster,UUID.randomUUID().toString());
|
||||
}
|
||||
|
||||
public AnsibleWorker applyAlgorithmSetToCluster(AlgorithmSet as, Cluster cluster,String uuid) throws IOException {
|
||||
AnsibleWorker worker = new AnsibleWorker(new File(this.getWorkDir(), uuid));
|
||||
|
||||
|
||||
List<Role> algoRoles = new Vector<>();
|
||||
|
||||
// add algorithms and dependencies to the worker
|
||||
for (Algorithm a : as.getAlgorithms()) {
|
||||
for (Role r : this.generateRoles(a)) {
|
||||
algoRoles.add(r);
|
||||
worker.addRole(r);
|
||||
}
|
||||
for (Dependency d : a.getDependencies()) {
|
||||
for (Role r : this.generateRoles(d)) {
|
||||
worker.addRole(r);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// add static roles
|
||||
for(Role r:this.getStaticRoleManager().getStaticRoles()) {
|
||||
worker.addRole(r);
|
||||
}
|
||||
|
||||
// generate the inventory
|
||||
Inventory inventory = new Inventory();
|
||||
for (Host h : cluster.getHosts()) {
|
||||
AnsibleHost ah = new AnsibleHost(h.getName());
|
||||
inventory.addHost(ah, "universe");
|
||||
inventory.addHost(ah, "d4science");
|
||||
}
|
||||
worker.setInventory(inventory);
|
||||
|
||||
// generate the playbook
|
||||
Playbook playbook = new Playbook();
|
||||
playbook.setRemote_user("root");
|
||||
playbook.applyTo("universe");
|
||||
for(Role r:algoRoles) {
|
||||
// add only 'add' roles
|
||||
if(!r.getName().endsWith("remove")) {
|
||||
playbook.addRole(r.getName());
|
||||
}
|
||||
}
|
||||
|
||||
worker.setPlaybook(playbook);
|
||||
|
||||
// execute and save log locally
|
||||
PrintStream console = System.out;
|
||||
File path = new File(worker.getWorkdir() + File.separator + "logs");
|
||||
path.mkdirs();
|
||||
File n = new File(path + File.separator + worker.getWorkerId());
|
||||
FileOutputStream fos = new FileOutputStream(n);
|
||||
PrintStream ps = new PrintStream(fos);
|
||||
System.setOut(ps);
|
||||
System.setErr(ps);
|
||||
//System.setErr(console);
|
||||
|
||||
worker.apply();
|
||||
//System.setOut(console);
|
||||
//worker.apply();
|
||||
System.out.println("Log stored to to " + n.getAbsolutePath());
|
||||
|
||||
// destroy the worker
|
||||
worker.destroy();
|
||||
return worker;
|
||||
}
|
||||
|
||||
|
||||
|
||||
private TemplateManager getTemplateManager() {
|
||||
return new TemplateManager(this.dpmRoot+"/templates");
|
||||
}
|
||||
|
||||
private CustomRoleManager getCustomRoleManager() {
|
||||
return new CustomRoleManager(this.dpmRoot+"/custom");
|
||||
}
|
||||
|
||||
private StaticRoleManager getStaticRoleManager() {
|
||||
return new StaticRoleManager(this.dpmRoot+"/static");
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate all roles for this dependency
|
||||
* @param d
|
||||
*/
|
||||
public Collection<Role> generateRoles(Dependency d) {
|
||||
Collection<Role> roles = new Vector<>();
|
||||
|
||||
|
||||
if("os".equalsIgnoreCase(d.getType())) {
|
||||
OSDependencyPackage pkg = new OSDependencyPackage(d);
|
||||
if(pkg!=null) {
|
||||
roles.addAll(pkg.getRoles(this.getTemplateManager()));
|
||||
}
|
||||
|
||||
} else if("custom".equalsIgnoreCase(d.getType())) {
|
||||
CustomDependencyPackage pkg = new CustomDependencyPackage(d);
|
||||
if(pkg!=null) {
|
||||
roles.addAll(pkg.getRoles(this.getCustomRoleManager()));
|
||||
}
|
||||
}
|
||||
|
||||
else if("github".equalsIgnoreCase(d.getType())) {
|
||||
CranDependencyPackage pkg = new CranDependencyPackage(d);
|
||||
if(pkg!=null) {
|
||||
roles.addAll(pkg.getRoles(this.getTemplateManager()));
|
||||
}
|
||||
}
|
||||
else if("cran".equalsIgnoreCase(d.getType())) {
|
||||
CranDependencyPackage pkg = new CranDependencyPackage(d);
|
||||
if(pkg!=null) {
|
||||
roles.addAll(pkg.getRoles(this.getTemplateManager()));
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
return roles;
|
||||
}
|
||||
|
||||
public Collection<Role> generateRoles(Algorithm a) {
|
||||
AlgorithmPackage pkg = new AlgorithmPackage(a);
|
||||
return pkg.getRoles(this.getTemplateManager());
|
||||
}
|
||||
|
||||
}
|
|
@ -1,119 +0,0 @@
|
|||
package org.gcube.dataanalysis.dataminer.poolmanager.ansiblebridge;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.IOException;
|
||||
import java.io.PrintWriter;
|
||||
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.gcube.dataanalysis.dataminer.poolmanager.ansible.model.AnsibleHost;
|
||||
import org.gcube.dataanalysis.dataminer.poolmanager.ansible.model.HostGroup;
|
||||
import org.gcube.dataanalysis.dataminer.poolmanager.ansible.model.Inventory;
|
||||
import org.gcube.dataanalysis.dataminer.poolmanager.ansible.model.Playbook;
|
||||
import org.gcube.dataanalysis.dataminer.poolmanager.ansible.model.Role;
|
||||
import org.gcube.dataanalysis.dataminer.poolmanager.ansible.model.RoleFile;
|
||||
|
||||
public class AnsibleSerializeHelper {
|
||||
|
||||
public static void serialize(Inventory inventory, File inventoryFile) throws IOException {
|
||||
String out = "";
|
||||
for(HostGroup hg:inventory.getHostGroups()) {
|
||||
out+=String.format("[%s]\n", hg.getName());
|
||||
for(AnsibleHost h:hg.getHosts()) {
|
||||
out+=h.getName()+"\n";
|
||||
}
|
||||
out+="\n";
|
||||
}
|
||||
out = out.trim();
|
||||
serialize(out, inventoryFile);
|
||||
}
|
||||
|
||||
public static void serialize(Playbook playbook, File playbookFile) throws IOException {
|
||||
String out = "- hosts: " + playbook.getHostGroupName() + "\n";
|
||||
out += " remote_user: "+playbook.getRemote_user()+"\n";
|
||||
out+=" roles:\n";
|
||||
for(String r:playbook.getRoles()) {
|
||||
out+=" - " + r+"\n";
|
||||
}
|
||||
out+=" vars:\n";
|
||||
out+=" os_package_state: present\n";
|
||||
out = out.trim();
|
||||
serialize(out, playbookFile);
|
||||
}
|
||||
|
||||
public static void serializeRole(Role r, File dir) throws IOException {
|
||||
// create root
|
||||
File root = new File(dir, r.getName());
|
||||
root.mkdirs();
|
||||
|
||||
// create tasks
|
||||
if(r.getTaskFiles().size()>0) {
|
||||
File tasks = new File(root, "tasks");
|
||||
tasks.mkdirs();
|
||||
for(RoleFile tf: r.getTaskFiles()) {
|
||||
serializeTask(tf, tasks);
|
||||
}
|
||||
}
|
||||
|
||||
// create meta
|
||||
if(r.getMeta().size()>0) {
|
||||
File meta = new File(root, "meta");
|
||||
meta.mkdirs();
|
||||
for(RoleFile tf: r.getMeta()) {
|
||||
serializeTask(tf, meta);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static void serializeTask(RoleFile tf, File dir) throws IOException {
|
||||
File f = new File(dir, tf.getName());
|
||||
serialize(tf.getContent().trim(), f);
|
||||
}
|
||||
|
||||
public static void serialize(String s, File f) throws IOException {
|
||||
PrintWriter out = new PrintWriter(f);
|
||||
out.println(s);
|
||||
out.close();
|
||||
}
|
||||
|
||||
public static Role deserializeRoleFromFilesystem(File roleDir) throws IOException {
|
||||
Role out = new Role();
|
||||
out.setName(roleDir.getName());
|
||||
|
||||
if(!roleDir.exists()) {
|
||||
throw new FileNotFoundException();
|
||||
}
|
||||
|
||||
try {
|
||||
File tasksDir = new File(roleDir, "tasks");
|
||||
if(tasksDir.exists()) {
|
||||
for(File main:tasksDir.listFiles()) {
|
||||
String content = IOUtils.toString(new FileInputStream(main), "UTF-8");
|
||||
RoleFile tf = new RoleFile(main.getName(), content);
|
||||
tf.setPath(main.getAbsolutePath().substring(roleDir.getAbsolutePath().length()+1));
|
||||
out.addTaskFile(tf);
|
||||
}
|
||||
}
|
||||
} catch(FileNotFoundException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
|
||||
try {
|
||||
File metaDir = new File(roleDir, "meta");
|
||||
if(metaDir.exists()) {
|
||||
for(File main:metaDir.listFiles()) {
|
||||
String content = IOUtils.toString(new FileInputStream(main), "UTF-8");
|
||||
RoleFile tf = new RoleFile(main.getName(), content);
|
||||
tf.setPath(main.getAbsolutePath().substring(roleDir.getAbsolutePath().length()+1));
|
||||
out.addMeta(tf);
|
||||
}
|
||||
}
|
||||
} catch(FileNotFoundException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
|
||||
return out;
|
||||
}
|
||||
|
||||
}
|
|
@ -1,71 +0,0 @@
|
|||
package org.gcube.dataanalysis.dataminer.poolmanager.ansiblebridge.template;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.NoSuchElementException;
|
||||
import java.util.Vector;
|
||||
|
||||
import org.gcube.common.scope.api.ScopeProvider;
|
||||
import org.gcube.dataanalysis.dataminer.poolmanager.ansible.model.Role;
|
||||
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Algorithm;
|
||||
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Dependency;
|
||||
|
||||
public class AlgorithmPackage {
|
||||
|
||||
private Algorithm algorithm;
|
||||
|
||||
public AlgorithmPackage(Algorithm a) {
|
||||
this.algorithm = a;
|
||||
}
|
||||
|
||||
protected Map<String, String> getDictionary(Algorithm a) {
|
||||
Map<String, String> out = new HashMap<String, String>();
|
||||
out.put("name", a.getName());
|
||||
out.put("category", a.getCategory());
|
||||
out.put("class", a.getClazz());
|
||||
out.put("atype", a.getAlgorithmType());
|
||||
out.put("skipjava", a.getSkipJava());
|
||||
out.put("vre", ScopeProvider.instance.get());
|
||||
//out.put("vre", "FAKE_VRE");
|
||||
out.put("packageurl", a.getPackageURL());
|
||||
out.put("description", a.getDescription());
|
||||
String deps = "";
|
||||
|
||||
for(Dependency d:a.getDependencies()) {
|
||||
deps+=String.format("- { role: %s }\n", d.getType()+"-"+d.getName().replaceAll("/", "-"));
|
||||
}
|
||||
deps = deps.trim();
|
||||
out.put("dependencies", deps);
|
||||
return out;
|
||||
}
|
||||
|
||||
protected Algorithm getAlgorithm() {
|
||||
return this.algorithm;
|
||||
}
|
||||
|
||||
public Collection<Role> getRoles(TemplateManager tm) {
|
||||
Collection<Role> out = new Vector<>();
|
||||
for(String mode:new String[]{"add"}) { // "remove", "update"
|
||||
String roleName = "gcube-algorithm-"+this.getAlgorithm().getName()+("add".equals(mode) ? "" : "-"+mode);
|
||||
try {
|
||||
// find template
|
||||
Role template = tm.getRoleTemplate("gcube-algorithm-" + mode);
|
||||
//
|
||||
if(template!=null) {
|
||||
Map<String, String> dictionary = this.getDictionary(this.getAlgorithm());
|
||||
Role r = tm.fillRoleTemplate(template, dictionary);
|
||||
r.setName(roleName);
|
||||
out.add(r);
|
||||
} else {
|
||||
System.out.println("WARNING: template is null");
|
||||
}
|
||||
} catch (NoSuchElementException e) {
|
||||
// e.printStackTrace();
|
||||
System.out.println("WARNING: no template found for " + roleName);
|
||||
}
|
||||
}
|
||||
return out;
|
||||
}
|
||||
|
||||
}
|
|
@ -1,11 +0,0 @@
|
|||
package org.gcube.dataanalysis.dataminer.poolmanager.ansiblebridge.template;
|
||||
|
||||
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Dependency;
|
||||
|
||||
public class CranDependencyPackage extends DependencyPackage {
|
||||
|
||||
public CranDependencyPackage(Dependency d) {
|
||||
super(d);
|
||||
}
|
||||
|
||||
}
|
|
@ -1,65 +0,0 @@
|
|||
package org.gcube.dataanalysis.dataminer.poolmanager.ansiblebridge.template;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.NoSuchElementException;
|
||||
import java.util.Vector;
|
||||
|
||||
import org.gcube.dataanalysis.dataminer.poolmanager.ansible.model.Role;
|
||||
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Dependency;
|
||||
|
||||
public class CustomDependencyPackage extends DependencyPackage {
|
||||
|
||||
public CustomDependencyPackage(Dependency d) {
|
||||
super(d);
|
||||
}
|
||||
|
||||
private String getCustomRepositoryLocation(String ansibleRoot) {
|
||||
return ansibleRoot+"/custom";
|
||||
}
|
||||
|
||||
/*
|
||||
public void serializeTo(String ansibleRoot) {
|
||||
for(String mode:new String[]{"add", "remove", "update"}) {
|
||||
// look for roles in the 'custom' repository
|
||||
try {
|
||||
// role name
|
||||
String roleName = this.getDependency().getType()+"-"+this.getDependency().getName()+("add".equals(mode) ? "" : "-"+mode);
|
||||
// look for the custom role
|
||||
File src = new File(this.getCustomRepositoryLocation(ansibleRoot)+"/"+roleName);
|
||||
System.out.println("** CUSTOM ** " + src);
|
||||
if(src.exists()) {
|
||||
// do copy
|
||||
System.out.println("copying CUSTOM role");
|
||||
File dest = new File(ansibleRoot+"/work/"+roleName);
|
||||
FileUtils.copyDirectory(src, dest);
|
||||
}
|
||||
} catch(IOException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
}
|
||||
*/
|
||||
|
||||
public Collection<Role> getRoles(CustomRoleManager crm) {
|
||||
Collection<Role> out = new Vector<>();
|
||||
// for(String mode:new String[]{"add", "remove", "update"}) {
|
||||
for(String mode:new String[]{"add"}) { // "remove", "update"
|
||||
// role name
|
||||
String roleName = this.getDependency().getType()+"-"+this.getDependency().getName()+("add".equals(mode) ? "" : "-"+mode);
|
||||
try {
|
||||
// look for custom role
|
||||
Role role = crm.getRole(roleName);
|
||||
if(role!=null) {
|
||||
out.add(role);
|
||||
}
|
||||
} catch (NoSuchElementException e) {
|
||||
// e.printStackTrace();
|
||||
System.out.println("WARNING: no custom role found for " + roleName);
|
||||
}
|
||||
}
|
||||
return out;
|
||||
}
|
||||
|
||||
|
||||
|
||||
}
|
|
@ -1,32 +0,0 @@
|
|||
package org.gcube.dataanalysis.dataminer.poolmanager.ansiblebridge.template;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.util.NoSuchElementException;
|
||||
|
||||
import org.gcube.dataanalysis.dataminer.poolmanager.ansible.model.Role;
|
||||
import org.gcube.dataanalysis.dataminer.poolmanager.ansiblebridge.AnsibleSerializeHelper;
|
||||
|
||||
public class CustomRoleManager {
|
||||
|
||||
private String root;
|
||||
|
||||
public CustomRoleManager(String root) {
|
||||
this.root = root;
|
||||
}
|
||||
|
||||
public String getRoot() {
|
||||
return this.root;
|
||||
}
|
||||
|
||||
public Role getRole(String roleName) throws NoSuchElementException {
|
||||
File f = new File(this.getRoot(), roleName);
|
||||
try {
|
||||
return AnsibleSerializeHelper.deserializeRoleFromFilesystem(f);
|
||||
} catch (IOException e) {
|
||||
// e.printStackTrace();
|
||||
throw new NoSuchElementException("unable to find " + roleName);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -1,55 +0,0 @@
|
|||
package org.gcube.dataanalysis.dataminer.poolmanager.ansiblebridge.template;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.NoSuchElementException;
|
||||
import java.util.Vector;
|
||||
|
||||
import org.gcube.dataanalysis.dataminer.poolmanager.ansible.model.Role;
|
||||
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Dependency;
|
||||
|
||||
public class DependencyPackage {
|
||||
|
||||
private Dependency dependency;
|
||||
|
||||
public DependencyPackage(Dependency d) {
|
||||
this.dependency = d;
|
||||
}
|
||||
|
||||
protected Map<String, String> getDictionary(Dependency d) {
|
||||
Map<String, String> out = new HashMap<String, String>();
|
||||
out.put("name", d.getName());
|
||||
out.put("type", d.getType());
|
||||
return out;
|
||||
}
|
||||
|
||||
protected Dependency getDependency() {
|
||||
return this.dependency;
|
||||
}
|
||||
|
||||
public Collection<Role> getRoles(TemplateManager tm) {
|
||||
Collection<Role> out = new Vector<>();
|
||||
for(String mode:new String[]{"add"}) { // "remove", "update"
|
||||
String roleName = this.getDependency().getType()+"-"+this.getDependency().getName().replaceAll("/", "-")+("add".equals(mode) ? "" : "-"+mode);
|
||||
try {
|
||||
// find template
|
||||
Role template = tm.getRoleTemplate(this.getDependency().getType()+"-package-"+mode);
|
||||
//
|
||||
if(template!=null) {
|
||||
Map<String, String> dictionary = this.getDictionary(this.getDependency());
|
||||
Role r = tm.fillRoleTemplate(template, dictionary);
|
||||
r.setName(roleName);
|
||||
out.add(r);
|
||||
} else {
|
||||
System.out.println("WARNING: template is null");
|
||||
}
|
||||
} catch (NoSuchElementException e) {
|
||||
// e.printStackTrace();
|
||||
System.out.println("WARNING: no template found for " + roleName);
|
||||
}
|
||||
}
|
||||
return out;
|
||||
}
|
||||
|
||||
}
|
|
@ -1,11 +0,0 @@
|
|||
package org.gcube.dataanalysis.dataminer.poolmanager.ansiblebridge.template;
|
||||
|
||||
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Dependency;
|
||||
|
||||
public class OSDependencyPackage extends DependencyPackage {
|
||||
|
||||
public OSDependencyPackage(Dependency d) {
|
||||
super(d);
|
||||
}
|
||||
|
||||
}
|
|
@ -1,37 +0,0 @@
|
|||
package org.gcube.dataanalysis.dataminer.poolmanager.ansiblebridge.template;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.util.Collection;
|
||||
import java.util.Vector;
|
||||
|
||||
import org.gcube.dataanalysis.dataminer.poolmanager.ansible.model.Role;
|
||||
import org.gcube.dataanalysis.dataminer.poolmanager.ansiblebridge.AnsibleSerializeHelper;
|
||||
|
||||
public class StaticRoleManager {
|
||||
|
||||
private String root;
|
||||
|
||||
public StaticRoleManager(String root) {
|
||||
this.root = root;
|
||||
}
|
||||
|
||||
public String getRoot() {
|
||||
return this.root;
|
||||
}
|
||||
|
||||
public Collection<Role> getStaticRoles() {
|
||||
Collection<Role> out = new Vector<>();
|
||||
for(File f: new File(this.getRoot()).listFiles()) {
|
||||
try {
|
||||
out.add(AnsibleSerializeHelper.deserializeRoleFromFilesystem(f));
|
||||
} catch(IOException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
return out;
|
||||
}
|
||||
|
||||
|
||||
|
||||
}
|
|
@ -1,95 +0,0 @@
|
|||
package org.gcube.dataanalysis.dataminer.poolmanager.ansiblebridge.template;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
import java.util.NoSuchElementException;
|
||||
|
||||
import org.gcube.dataanalysis.dataminer.poolmanager.ansible.model.Role;
|
||||
import org.gcube.dataanalysis.dataminer.poolmanager.ansible.model.RoleFile;
|
||||
import org.gcube.dataanalysis.dataminer.poolmanager.ansiblebridge.AnsibleSerializeHelper;
|
||||
import org.stringtemplate.v4.ST;
|
||||
|
||||
public class TemplateManager {
|
||||
|
||||
private String root;
|
||||
|
||||
public TemplateManager(String root) {
|
||||
this.root = root;
|
||||
}
|
||||
|
||||
public String getTemplateRoot() {
|
||||
return this.root;
|
||||
}
|
||||
|
||||
/**
|
||||
* Read the given template
|
||||
* @param templateName
|
||||
* @return
|
||||
* @throws IOException
|
||||
*/
|
||||
// private String readTemplate(String templateName) throws IOException {
|
||||
// File templateFile = new File(this.getTemplateRoot(), templateName + ".yaml");
|
||||
// System.out.println("looking for file " + templateFile.getName());
|
||||
// String out = IOUtils.toString(new FileInputStream(templateFile), "UTF-8");
|
||||
// return out;
|
||||
// }
|
||||
|
||||
/**
|
||||
* Return the content of the given template
|
||||
* @param templateName
|
||||
* @return
|
||||
* @throws NoSuchElementException if no such template exists
|
||||
*/
|
||||
// public String getTemplate(String templateName) throws NoSuchElementException {
|
||||
// String template = null;
|
||||
// try {
|
||||
// template = this.readTemplate(templateName);
|
||||
// } catch (IOException e) {
|
||||
// throw new NoSuchElementException();
|
||||
// }
|
||||
// return template;
|
||||
// }
|
||||
|
||||
public Role fillRoleTemplate(Role template, Map<String, String> dictionary) {
|
||||
Role out = new Role();
|
||||
out.setName(template.getName());
|
||||
for(RoleFile tf:template.getTaskFiles()) {
|
||||
out.addTaskFile(this.fillTaskTemplate(tf, dictionary));
|
||||
}
|
||||
for(RoleFile tf:template.getMeta()) {
|
||||
out.addMeta(this.fillTaskTemplate(tf, dictionary));
|
||||
}
|
||||
return out;
|
||||
}
|
||||
|
||||
private RoleFile fillTaskTemplate(RoleFile template, Map<String, String> dictionary) {
|
||||
RoleFile out = new RoleFile();
|
||||
out.setName(template.getName());
|
||||
out.setContent(this.fillTemplate(template.getContent(), dictionary));
|
||||
return out;
|
||||
}
|
||||
|
||||
private String fillTemplate(String template, Map<String, String> dictionary) {
|
||||
if (template != null) {
|
||||
ST t = new ST(template);
|
||||
for (String key : dictionary.keySet()) {
|
||||
t.add(key, dictionary.get(key));
|
||||
}
|
||||
String output = t.render();
|
||||
return output;
|
||||
}
|
||||
return template;
|
||||
}
|
||||
|
||||
public Role getRoleTemplate(String roleName) throws NoSuchElementException {
|
||||
File f = new File(this.getTemplateRoot(), roleName);
|
||||
try {
|
||||
return AnsibleSerializeHelper.deserializeRoleFromFilesystem(f);
|
||||
} catch (IOException e) {
|
||||
// e.printStackTrace();
|
||||
throw new NoSuchElementException("unable to find " + roleName);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -1,66 +0,0 @@
|
|||
package org.gcube.dataanalysis.dataminer.poolmanager.clients;
|
||||
|
||||
import static org.gcube.resources.discovery.icclient.ICFactory.clientFor;
|
||||
import static org.gcube.resources.discovery.icclient.ICFactory.queryFor;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
import java.util.Vector;
|
||||
|
||||
import org.gcube.common.resources.gcore.ServiceEndpoint;
|
||||
import org.gcube.common.scope.api.ScopeProvider;
|
||||
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Domain;
|
||||
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Host;
|
||||
import org.gcube.resources.discovery.client.api.DiscoveryClient;
|
||||
import org.gcube.resources.discovery.client.queries.api.SimpleQuery;
|
||||
|
||||
public class ISClient {
|
||||
|
||||
/**
|
||||
* Return the list of hosts (dataminers) in a given VRE
|
||||
*
|
||||
* @param vreName
|
||||
* @return
|
||||
*/
|
||||
public Collection<Host> listDataminersInVRE() {
|
||||
|
||||
boolean remote = false;
|
||||
|
||||
if (!remote) {
|
||||
Collection<Host> out = new Vector<>();
|
||||
Host h = new Host();
|
||||
//h.setName("bb-dataminer.res.eng.it");
|
||||
//h.setName("vm101.ui.savba.sk");
|
||||
h.setName("dataminer1-devnext.d4science.org");
|
||||
out.add(h);
|
||||
return out;
|
||||
} else {
|
||||
|
||||
SimpleQuery query = queryFor(ServiceEndpoint.class);
|
||||
|
||||
//old version
|
||||
//query.addCondition("$resource/Profile/Category/text() eq 'DataAnalysis'")
|
||||
//.addCondition("$resource/Profile/Name/text() eq 'DataMiner'");
|
||||
|
||||
query.addCondition("$resource/Profile/Platform/Name/text() eq 'DataMiner'");
|
||||
|
||||
DiscoveryClient<ServiceEndpoint> client = clientFor(ServiceEndpoint.class);
|
||||
|
||||
List<ServiceEndpoint> resources = client.submit(query);
|
||||
|
||||
Collection<Host> out = new Vector<>();
|
||||
for (ServiceEndpoint r : resources) {
|
||||
Host h = new Host();
|
||||
h.setName(r.profile().runtime().hostedOn());
|
||||
out.add(h);
|
||||
}
|
||||
return out;
|
||||
}
|
||||
}
|
||||
|
||||
public static void main(String[] args) {
|
||||
ISClient a = new ISClient();
|
||||
ScopeProvider.instance.set("/gcube/devNext/NextNext");
|
||||
System.out.println(a.listDataminersInVRE());
|
||||
}
|
||||
}
|
|
@ -1,35 +0,0 @@
|
|||
package org.gcube.dataanalysis.dataminer.poolmanager.datamodel;
|
||||
|
||||
public class Action {
|
||||
|
||||
private String name;
|
||||
|
||||
private String description;
|
||||
|
||||
private String script;
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public void setName(String name) {
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
public String getDescription() {
|
||||
return description;
|
||||
}
|
||||
|
||||
public void setDescription(String description) {
|
||||
this.description = description;
|
||||
}
|
||||
|
||||
public String getScript() {
|
||||
return script;
|
||||
}
|
||||
|
||||
public void setScript(String script) {
|
||||
this.script = script;
|
||||
}
|
||||
|
||||
}
|
|
@ -1,118 +0,0 @@
|
|||
package org.gcube.dataanalysis.dataminer.poolmanager.datamodel;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.Vector;
|
||||
|
||||
public class Algorithm {
|
||||
|
||||
private String name;
|
||||
|
||||
private String description;
|
||||
|
||||
private String category;
|
||||
|
||||
private String clazz;
|
||||
|
||||
private String algorithmType;
|
||||
|
||||
private String skipJava;
|
||||
|
||||
private String packageURL;
|
||||
|
||||
private Collection<Action> actions;
|
||||
|
||||
private Collection<Dependency> dependencies;
|
||||
|
||||
public Algorithm() {
|
||||
this.actions = new Vector<>();
|
||||
this.dependencies = new Vector<>();
|
||||
Dependency p = new Dependency();
|
||||
}
|
||||
|
||||
public void addDependency(Dependency dep) {
|
||||
this.dependencies.add(dep);
|
||||
}
|
||||
|
||||
public void addAction(Action action) {
|
||||
this.actions.add(action);
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public void setName(String name) {
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
public String getDescription() {
|
||||
return description;
|
||||
}
|
||||
|
||||
public void setDescription(String description) {
|
||||
this.description = description;
|
||||
}
|
||||
|
||||
public String getCategory() {
|
||||
return category;
|
||||
}
|
||||
|
||||
public void setCategory(String category) {
|
||||
this.category = category;
|
||||
}
|
||||
|
||||
public Collection<Action> getActions() {
|
||||
return actions;
|
||||
}
|
||||
|
||||
public Collection<Dependency> getDependencies() {
|
||||
return dependencies;
|
||||
}
|
||||
|
||||
public void setDependencies(Collection<Dependency> deps) {
|
||||
this.dependencies = deps;
|
||||
}
|
||||
|
||||
public String toString() {
|
||||
String out = "Algorithm: " + this.getName()+"\n";
|
||||
out+=" Class Name: " + this.getClazz()+"\n";
|
||||
out+=" Description: " + this.getDescription()+"\n";
|
||||
out+=" Dependencies: " + this.getDependencies()+"\n";
|
||||
return out;
|
||||
}
|
||||
|
||||
public String getClazz() {
|
||||
return clazz;
|
||||
}
|
||||
|
||||
public void setClazz(String clazz) {
|
||||
this.clazz = clazz;
|
||||
}
|
||||
|
||||
public String getPackageURL() {
|
||||
return packageURL;
|
||||
}
|
||||
|
||||
public void setPackageURL(String packageURL) {
|
||||
this.packageURL = packageURL;
|
||||
}
|
||||
|
||||
public String getAlgorithmType() {
|
||||
return algorithmType;
|
||||
}
|
||||
|
||||
public void setAlgorithmType(String algorithmType) {
|
||||
this.algorithmType = algorithmType;
|
||||
}
|
||||
|
||||
public String getSkipJava() {
|
||||
return skipJava;
|
||||
}
|
||||
|
||||
public void setSkipJava(String skipJava) {
|
||||
this.skipJava = skipJava;
|
||||
}
|
||||
|
||||
|
||||
|
||||
}
|
|
@ -1,49 +0,0 @@
|
|||
package org.gcube.dataanalysis.dataminer.poolmanager.datamodel;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.Vector;
|
||||
|
||||
public class AlgorithmSet {
|
||||
|
||||
private String name;
|
||||
|
||||
private Collection<Algorithm> algorithms;
|
||||
|
||||
public AlgorithmSet() {
|
||||
this.algorithms = new Vector<>();
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public void setName(String name) {
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
public Collection<Algorithm> getAlgorithms() {
|
||||
return new Vector<>(algorithms);
|
||||
}
|
||||
|
||||
public void addAlgorithm(Algorithm algoritm) {
|
||||
this.algorithms.add(algoritm);
|
||||
}
|
||||
|
||||
public Boolean hasAlgorithm(Algorithm algorithm) {
|
||||
for (Algorithm a : this.algorithms) {
|
||||
if (a.getName().equals(algorithm.getName())) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
public String toString() {
|
||||
String out = "ALGOSET: " + this.name + "\n";
|
||||
for(Algorithm a:this.algorithms) {
|
||||
out+=a+"\n";
|
||||
}
|
||||
return out;
|
||||
}
|
||||
|
||||
}
|
|
@ -1,73 +0,0 @@
|
|||
package org.gcube.dataanalysis.dataminer.poolmanager.datamodel;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.Vector;
|
||||
|
||||
public class Cluster {
|
||||
|
||||
/**
|
||||
* The set of hosts belonging to the cluster.
|
||||
*/
|
||||
private Collection<Host> hosts;
|
||||
|
||||
/**
|
||||
* A name for this cluster.
|
||||
*/
|
||||
private String name;
|
||||
|
||||
/**
|
||||
* A description of this cluster.
|
||||
*/
|
||||
private String description;
|
||||
|
||||
/**
|
||||
* The set of algorithms deployed on this cluster (i.e. on all its hosts)
|
||||
*/
|
||||
private Collection<AlgorithmSet> algoSets;
|
||||
|
||||
public Cluster() {
|
||||
this.hosts = new Vector<>();
|
||||
this.algoSets = new Vector<>();
|
||||
}
|
||||
|
||||
public void addAlgorithmSet(AlgorithmSet set) {
|
||||
this.algoSets.add(set);
|
||||
}
|
||||
|
||||
public void addHost(Host host) {
|
||||
this.hosts.add(host);
|
||||
}
|
||||
|
||||
public Collection<Host> getHosts() {
|
||||
return hosts;
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public void setName(String name) {
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
public String getDescription() {
|
||||
return description;
|
||||
}
|
||||
|
||||
public void setDescription(String description) {
|
||||
this.description = description;
|
||||
}
|
||||
|
||||
public Collection<AlgorithmSet> getAlgorithmSets() {
|
||||
return algoSets;
|
||||
}
|
||||
|
||||
public String toString() {
|
||||
String out = "Cluster: "+this.name+"\n";
|
||||
for(Host h:this.getHosts()) {
|
||||
out+=" "+h+"\n";
|
||||
}
|
||||
return out;
|
||||
}
|
||||
|
||||
}
|
|
@ -1,29 +0,0 @@
|
|||
package org.gcube.dataanalysis.dataminer.poolmanager.datamodel;
|
||||
|
||||
public class Dependency {
|
||||
|
||||
private String name;
|
||||
|
||||
private String type;
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public void setName(String name) {
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
public String getType() {
|
||||
return type;
|
||||
}
|
||||
|
||||
public void setType(String type) {
|
||||
this.type = type;
|
||||
}
|
||||
|
||||
public String toString() {
|
||||
return this.type+":"+this.name;
|
||||
}
|
||||
|
||||
}
|
|
@ -1,15 +0,0 @@
|
|||
package org.gcube.dataanalysis.dataminer.poolmanager.datamodel;
|
||||
|
||||
public class Domain {
|
||||
|
||||
private String name;
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public void setName(String name) {
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
}
|
|
@ -1,39 +0,0 @@
|
|||
package org.gcube.dataanalysis.dataminer.poolmanager.datamodel;
|
||||
|
||||
public class Host {
|
||||
|
||||
private String name;
|
||||
|
||||
private Domain domain;
|
||||
|
||||
public Host() {
|
||||
}
|
||||
|
||||
public String getFullyQualifiedName() {
|
||||
if(this.domain!=null && this.domain.getName()!=null)
|
||||
return this.getName()+"."+this.getDomain().getName();
|
||||
else
|
||||
return this.getName();
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public void setName(String name) {
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
public Domain getDomain() {
|
||||
return domain;
|
||||
}
|
||||
|
||||
public void setDomain(Domain domain) {
|
||||
this.domain = domain;
|
||||
}
|
||||
|
||||
public String toString() {
|
||||
return this.name + "@" + this.domain;
|
||||
}
|
||||
|
||||
}
|
|
@ -1,15 +0,0 @@
|
|||
package org.gcube.dataanalysis.dataminer.poolmanager.datamodel.comparator;
|
||||
|
||||
import java.util.Comparator;
|
||||
|
||||
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Algorithm;
|
||||
|
||||
public class AlgorithmComparator implements Comparator<Algorithm> {
|
||||
|
||||
@Override
|
||||
public int compare(Algorithm a1, Algorithm a2) {
|
||||
return a1.getName().compareTo(a2.getName());
|
||||
}
|
||||
|
||||
|
||||
}
|
|
@ -1,18 +0,0 @@
|
|||
package org.gcube.dataanalysis.dataminer.poolmanager.datamodel.comparator;
|
||||
|
||||
import java.util.Comparator;
|
||||
|
||||
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Dependency;
|
||||
|
||||
public class DependencyComparator implements Comparator<Dependency> {
|
||||
|
||||
@Override
|
||||
public int compare(Dependency a1, Dependency a2) {
|
||||
int out = a1.getType().compareTo(a2.getType());
|
||||
if(out!=0)
|
||||
return out;
|
||||
return a1.getName().compareTo(a2.getName());
|
||||
}
|
||||
|
||||
|
||||
}
|
|
@ -1,17 +0,0 @@
|
|||
package org.gcube.dataanalysis.dataminer.poolmanager.datamodel.comparator;
|
||||
|
||||
import java.util.Comparator;
|
||||
|
||||
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Host;
|
||||
|
||||
public class HostComparator implements Comparator<Host> {
|
||||
|
||||
@Override
|
||||
public int compare(Host h1, Host h2) {
|
||||
int out = h1.getDomain().getName().compareTo(h2.getDomain().getName());
|
||||
if(out!=0)
|
||||
return out;
|
||||
return h1.getName().compareTo(h2.getName());
|
||||
}
|
||||
|
||||
}
|
|
@ -1,103 +0,0 @@
|
|||
package org.gcube.dataanalysis.dataminer.poolmanager.process;
|
||||
|
||||
import java.util.StringTokenizer;
|
||||
|
||||
public class AddAlgorithmCommand {
|
||||
|
||||
private String command;
|
||||
private String name;
|
||||
private String category;
|
||||
private String clazz;
|
||||
private String scope;
|
||||
private String algorithmType;
|
||||
private String skipJava;
|
||||
private String url;
|
||||
private String description;
|
||||
|
||||
public AddAlgorithmCommand(String cmd) {
|
||||
StringTokenizer st = new StringTokenizer(cmd, " ");
|
||||
if (st.hasMoreElements())
|
||||
command = st.nextToken();
|
||||
if (st.hasMoreElements())
|
||||
name = st.nextToken();
|
||||
if (st.hasMoreElements())
|
||||
category = st.nextToken();
|
||||
if (st.hasMoreElements())
|
||||
clazz = st.nextToken();
|
||||
if (st.hasMoreElements())
|
||||
scope = st.nextToken();
|
||||
if (st.hasMoreElements())
|
||||
algorithmType = st.nextToken();
|
||||
if (st.hasMoreElements())
|
||||
skipJava = st.nextToken();
|
||||
if (st.hasMoreElements())
|
||||
url = st.nextToken();
|
||||
|
||||
String d = "";
|
||||
while (st.hasMoreElements())
|
||||
d = d + st.nextToken() + " ";
|
||||
this.setDescription(d);
|
||||
|
||||
}
|
||||
|
||||
public void setDescription(String d) {
|
||||
if(d!=null) {
|
||||
d = d.trim();
|
||||
if(d.startsWith("\"") && d.endsWith("\"")) {
|
||||
d = d.substring(1, d.length()-1).trim();
|
||||
}
|
||||
}
|
||||
this.description = d;
|
||||
}
|
||||
|
||||
public String getCommand() {
|
||||
return command;
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public String getCategory() {
|
||||
return category;
|
||||
}
|
||||
|
||||
public String getClazz() {
|
||||
return clazz;
|
||||
}
|
||||
|
||||
public String getVRE() {
|
||||
return scope;
|
||||
}
|
||||
|
||||
public String getAlgorithmType() {
|
||||
return algorithmType;
|
||||
}
|
||||
|
||||
public String getSkipjava() {
|
||||
return skipJava;
|
||||
}
|
||||
|
||||
public String getUrl() {
|
||||
return url;
|
||||
}
|
||||
|
||||
public String getDescription() {
|
||||
return description;
|
||||
}
|
||||
|
||||
public String toString() {
|
||||
String out = "";
|
||||
out += String.format("%-12s: %s\n", "command", command);
|
||||
out += String.format("%-12s: %s\n", "algo name", name);
|
||||
out += String.format("%-12s: %s\n", "category", category);
|
||||
out += String.format("%-12s: %s\n", "class", clazz);
|
||||
out += String.format("%-12s: %s\n", "scope", scope);
|
||||
out += String.format("%-12s: %s\n", "algo type", algorithmType);
|
||||
out += String.format("%-12s: %s\n", "skip java", skipJava);
|
||||
out += String.format("%-12s: %s\n", "url", url);
|
||||
out += String.format("%-12s: %s\n", "description", this.description);
|
||||
return out;
|
||||
}
|
||||
|
||||
}
|
|
@ -1,290 +0,0 @@
|
|||
package org.gcube.dataanalysis.dataminer.poolmanager.process;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.net.URL;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Vector;
|
||||
import java.util.zip.ZipEntry;
|
||||
import java.util.zip.ZipInputStream;
|
||||
|
||||
import org.apache.commons.lang.StringEscapeUtils;
|
||||
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Algorithm;
|
||||
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Dependency;
|
||||
|
||||
public class AlgorithmPackageParser {
|
||||
|
||||
/**
|
||||
* The name of the file containing algorithm metadata. Expected in the root
|
||||
* directory of the package.
|
||||
*/
|
||||
private static final String METADATA_FILE_NAME = "Info.txt";
|
||||
|
||||
private static final String METADATA_ALGORITHM_NAME = "Algorithm Name";
|
||||
|
||||
private static final String METADATA_ALGORITHM_DESCRIPTION = "Algorithm Description";
|
||||
|
||||
private static final String METADATA_CLASS_NAME = "Class Name";
|
||||
|
||||
private static final String METADATA_PACKAGES = "Packages";
|
||||
|
||||
private static final String METADATA_KEY_VALUE_SEPARATOR = ":";
|
||||
|
||||
private static final int BUFFER_SIZE = 4096;
|
||||
|
||||
/**
|
||||
* Given an URL to an algorithm package, create an Algorithm object with its
|
||||
* metadata. Metadata are extracted from the 'info.txt' file, if any, in the
|
||||
* package.
|
||||
*
|
||||
* @param url
|
||||
* @return An Algorithm object or null if no 'info.txt' is found in the
|
||||
* package.
|
||||
* @throws IOException
|
||||
*/
|
||||
public Algorithm parsePackage(String url) throws IOException {
|
||||
String packageMetadata = this.getPackageMetadata(url);
|
||||
if (packageMetadata == null) {
|
||||
System.out.println("WARNING: No metadata found for " + url);
|
||||
return null;
|
||||
} else {
|
||||
Map<String, List<String>> parsedMetadata = this.parseMetadata(packageMetadata);
|
||||
Algorithm a = this.createAlgorithm(parsedMetadata);
|
||||
a.setPackageURL(url);
|
||||
return a;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract the content of the metadata file from the package.
|
||||
*
|
||||
* @param url
|
||||
* @return
|
||||
* @throws IOException
|
||||
*/
|
||||
private String getPackageMetadata(String url) throws IOException {
|
||||
InputStream is = new URL(url).openStream();
|
||||
ZipInputStream zipIs = new ZipInputStream(is);
|
||||
ZipEntry entry = zipIs.getNextEntry();
|
||||
String out = null;
|
||||
while (entry != null) {
|
||||
if (METADATA_FILE_NAME.equalsIgnoreCase(entry.getName())) {
|
||||
out = this.getEntryContent(zipIs);
|
||||
break;
|
||||
}
|
||||
entry = zipIs.getNextEntry();
|
||||
}
|
||||
is.close();
|
||||
zipIs.close();
|
||||
return out;
|
||||
}
|
||||
|
||||
/**
|
||||
* Read the content of a zip entry and place it in a string.
|
||||
* @param zipIn
|
||||
* @return
|
||||
* @throws IOException
|
||||
*/
|
||||
private String getEntryContent(ZipInputStream zipIn) throws IOException {
|
||||
StringBuilder s = new StringBuilder();
|
||||
byte[] buffer = new byte[BUFFER_SIZE];
|
||||
int read = 0;
|
||||
while ((read = zipIn.read(buffer)) != -1) {
|
||||
s.append(new String(buffer, 0, read));
|
||||
}
|
||||
return s.toString();
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse the content of the metadata file and create a key+multivalue map.
|
||||
* @param metadata
|
||||
* @return
|
||||
*/
|
||||
private Map<String, List<String>> parseMetadata(String metadata) {
|
||||
Map<String, List<String>> out = new HashMap<String, List<String>>();
|
||||
String[] lines = metadata.split("\n");
|
||||
|
||||
String key = null;
|
||||
String value = null;
|
||||
|
||||
for (String line : lines) {
|
||||
// skip empty lines
|
||||
if (line.trim().isEmpty()) {
|
||||
continue;
|
||||
}
|
||||
// scan lines one by one, looking for key and values
|
||||
String[] parts = line.split(METADATA_KEY_VALUE_SEPARATOR);
|
||||
if (parts.length > 1) {
|
||||
// key and value on the same line
|
||||
key = parts[0].trim();
|
||||
value = line.substring(parts[0].length() + 1).trim();
|
||||
} else if (parts.length == 1) {
|
||||
// either a key or a value
|
||||
if (line.trim().endsWith(METADATA_KEY_VALUE_SEPARATOR)) {
|
||||
// key
|
||||
key = parts[0].trim();
|
||||
value = null;
|
||||
} else {
|
||||
// value
|
||||
value = line.trim();
|
||||
}
|
||||
}
|
||||
// add key+value to the map
|
||||
if (key != null && value != null) {
|
||||
List<String> values = out.get(key);
|
||||
if (values == null) {
|
||||
values = new Vector<>();
|
||||
out.put(key, values);
|
||||
}
|
||||
values.add(value);
|
||||
System.out.println(key + METADATA_KEY_VALUE_SEPARATOR + " " + values);
|
||||
}
|
||||
}
|
||||
return out;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create an Algorithm starting from its metadata
|
||||
* @param metadata
|
||||
* @return
|
||||
*/
|
||||
// private Algorithm createAlgorithm(Map<String, List<String>> metadata) {
|
||||
// Algorithm out = new Algorithm();
|
||||
// out.setName(extractSingleValue(metadata, METADATA_ALGORITHM_NAME));
|
||||
// out.setDescription(extractSingleValue(metadata, METADATA_ALGORITHM_DESCRIPTION));
|
||||
// out.setClazz(extractSingleValue(metadata, METADATA_CLASS_NAME));
|
||||
// List<String> dependencies = extractMultipleValues(metadata, METADATA_PACKAGES);
|
||||
// if (dependencies != null) {
|
||||
// for (String pkg : dependencies) {
|
||||
// Dependency dep = new Dependency();
|
||||
// dep.setName(pkg);
|
||||
// dep.setType("os");
|
||||
// out.addDependency(dep);
|
||||
// }
|
||||
// }
|
||||
// return out;
|
||||
// }
|
||||
|
||||
|
||||
private Algorithm createAlgorithm(Map<String, List<String>> metadata) {
|
||||
Algorithm out = new Algorithm();
|
||||
out.setName(extractSingleValue(metadata, METADATA_ALGORITHM_NAME));
|
||||
out.setDescription(extractSingleValue(metadata, METADATA_ALGORITHM_DESCRIPTION));
|
||||
out.setClazz(extractSingleValue(metadata, METADATA_CLASS_NAME));
|
||||
//List<String> dependencies = extractMultipleValues(metadata, METADATA_PACKAGES);
|
||||
|
||||
|
||||
List<String> rdependencies = extractMultipleValues(metadata, "cran");
|
||||
if (rdependencies != null) {
|
||||
for (String pkg : rdependencies) {
|
||||
Dependency dep = new Dependency();
|
||||
|
||||
//if (pkg.startsWith("os:")){
|
||||
dep.setName(pkg);
|
||||
dep.setType("cran");
|
||||
out.addDependency(dep);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
List<String> defdependencies = extractMultipleValues(metadata, "Packages");
|
||||
if (defdependencies != null) {
|
||||
for (String pkg : defdependencies) {
|
||||
Dependency dep = new Dependency();
|
||||
|
||||
//if (pkg.startsWith("os:")){
|
||||
dep.setName(pkg);
|
||||
dep.setType("os");
|
||||
out.addDependency(dep);
|
||||
}
|
||||
}
|
||||
|
||||
List<String> osdependencies = extractMultipleValues(metadata, "os");
|
||||
if (osdependencies != null) {
|
||||
for (String pkg : osdependencies) {
|
||||
Dependency dep = new Dependency();
|
||||
|
||||
//if (pkg.startsWith("os:")){
|
||||
dep.setName(pkg);
|
||||
dep.setType("os");
|
||||
out.addDependency(dep);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
List<String> gitdependencies = extractMultipleValues(metadata, "github");
|
||||
if (gitdependencies != null) {
|
||||
for (String pkg : gitdependencies) {
|
||||
Dependency dep = new Dependency();
|
||||
|
||||
//if (pkg.startsWith("os:")){
|
||||
dep.setName(pkg);
|
||||
dep.setType("github");
|
||||
out.addDependency(dep);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
List<String> cdependencies = extractMultipleValues(metadata, "custom");
|
||||
if (cdependencies != null) {
|
||||
for (String pkg : cdependencies) {
|
||||
Dependency dep = new Dependency();
|
||||
|
||||
//if (pkg.startsWith("os:")){
|
||||
dep.setName(pkg);
|
||||
dep.setType("custom");
|
||||
out.addDependency(dep);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// if (pkg.startsWith("r:")){
|
||||
// //String results = StringEscapeUtils.escapeJava(pkg);
|
||||
// dep.setName(pkg);
|
||||
// dep.setType("cran");
|
||||
// }
|
||||
// if (pkg.startsWith("custom:")){
|
||||
// dep.setName(pkg);
|
||||
// dep.setType("custom");
|
||||
// }
|
||||
// if (!pkg.startsWith("os:")&&!pkg.startsWith("r:")&&!pkg.startsWith("custom:")){
|
||||
// dep.setName(pkg);
|
||||
// dep.setType("os");
|
||||
// }
|
||||
|
||||
|
||||
|
||||
return out;
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
private static String extractSingleValue(Map<String, List<String>> metadata,
|
||||
String key) {
|
||||
List<String> l = metadata.get(key);
|
||||
if (l != null && l.size() == 1) {
|
||||
return l.get(0);
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
private static List<String> extractMultipleValues(
|
||||
Map<String, List<String>> metadata, String key) {
|
||||
List<String> l = metadata.get(key);
|
||||
if (l != null) {
|
||||
return new Vector<>(l);
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -1,24 +0,0 @@
|
|||
package org.gcube.dataanalysis.dataminer.poolmanager.rest;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.net.MalformedURLException;
|
||||
import java.net.URL;
|
||||
import java.net.UnknownHostException;
|
||||
|
||||
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Algorithm;
|
||||
|
||||
public interface PoolManager {
|
||||
|
||||
String addAlgorithmToVRE(Algorithm algo, String vre) throws IOException, InterruptedException;
|
||||
|
||||
Algorithm extractAlgorithm(String url) throws IOException;
|
||||
|
||||
String getLogById(String logId) throws IOException;
|
||||
|
||||
void getLogId(Algorithm algo, String vre);
|
||||
|
||||
String getScriptFromURL(URL logId) throws IOException;
|
||||
|
||||
URL getURLfromWorkerLog(String logUrl) throws MalformedURLException, UnknownHostException;
|
||||
|
||||
}
|
|
@ -1,144 +0,0 @@
|
|||
package org.gcube.dataanalysis.dataminer.poolmanager.rest;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.net.MalformedURLException;
|
||||
import java.net.ProxySelector;
|
||||
import java.net.URL;
|
||||
import java.net.UnknownHostException;
|
||||
|
||||
import javax.ws.rs.GET;
|
||||
import javax.ws.rs.Path;
|
||||
import javax.ws.rs.Produces;
|
||||
import javax.ws.rs.QueryParam;
|
||||
|
||||
import org.gcube.common.scope.api.ScopeProvider;
|
||||
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Algorithm;
|
||||
import org.gcube.dataanalysis.dataminer.poolmanager.service.DataminerPoolManager;
|
||||
import org.gcube.dataanalysis.dataminer.poolmanager.util.PropertiesBasedProxySelector;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
|
||||
|
||||
|
||||
@Path("/")
|
||||
public class RestPoolManager implements PoolManager {
|
||||
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(RestPoolManager.class);
|
||||
|
||||
private PoolManager service = new DataminerPoolManager();
|
||||
|
||||
|
||||
@GET
|
||||
@Path("/add")
|
||||
@Produces("text/plain")
|
||||
public String addAlgorithmToVRE(
|
||||
@QueryParam("algorithm") String algorithm,
|
||||
@QueryParam("vre") String vre,
|
||||
@QueryParam("name") String name,
|
||||
@QueryParam("description") String description,
|
||||
@QueryParam("category") String category,
|
||||
@QueryParam("algorithmType") String algorithmType,
|
||||
@QueryParam("skipJava") String skipJava) throws IOException, InterruptedException {
|
||||
// TODO Auto-generated method stub
|
||||
LOGGER.debug("Adding algorithm =" + algorithm + " to VRE =" + vre);
|
||||
Algorithm algo = service.extractAlgorithm(algorithm);
|
||||
|
||||
if (algo.getCategory() == null){
|
||||
algo.setCategory(category);
|
||||
} else category = algo.getCategory();
|
||||
|
||||
if (algo.getAlgorithmType() == null){
|
||||
algo.setAlgorithmType(algorithmType);
|
||||
} else algorithmType = algo.getCategory();
|
||||
|
||||
if (algo.getSkipJava() == null){
|
||||
algo.setSkipJava(skipJava);
|
||||
} else skipJava = algo.getSkipJava();
|
||||
|
||||
if (algo.getName() == null){
|
||||
algo.setCategory(name);
|
||||
} else name = algo.getName();
|
||||
|
||||
if (algo.getDescription() == null){
|
||||
algo.setDescription(description);;
|
||||
} else description = algo.getDescription();
|
||||
|
||||
return service.addAlgorithmToVRE(algo, vre);
|
||||
}
|
||||
|
||||
|
||||
@GET
|
||||
@Path("/log")
|
||||
@Produces("text/plain")
|
||||
public String getLogById(@QueryParam("logUrl") String logUrl) throws IOException {
|
||||
// TODO Auto-generated method stub
|
||||
LOGGER.debug("Returning Log =" + logUrl);
|
||||
return service.getScriptFromURL(service.getURLfromWorkerLog(logUrl));
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
@Override
|
||||
public Algorithm extractAlgorithm(String url) throws IOException {
|
||||
// TODO Auto-generated method stub
|
||||
return null;
|
||||
}
|
||||
|
||||
|
||||
public static void main(String[] args) throws IOException, InterruptedException {
|
||||
RestPoolManager a = new RestPoolManager();
|
||||
//System.out.println(System.getProperty("user.home")+File.separator+"/gcube/dataminer-pool-manager");
|
||||
|
||||
|
||||
//ProxySelector.setDefault(new PropertiesBasedProxySelector("/home/ngalante/.proxy-settings"));
|
||||
|
||||
ScopeProvider.instance.set("/gcube/devNext/NextNext");
|
||||
a.addAlgorithmToVRE(
|
||||
"http://data.d4science.org/S2h1RHZGd0JpWnBjZk9qTytQTndqcDRLVHNrQUt6QjhHbWJQNStIS0N6Yz0",
|
||||
"/gcube/devNext/NextNext",
|
||||
null, null, "test", "transducerers", "N");
|
||||
|
||||
//System.out.println(a.getLogById("34ac474d-b9df-4929-87e1-2a0ae26cf898"));
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public void getLogId(Algorithm algo, String vre) {
|
||||
// TODO Auto-generated method stub
|
||||
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public String getScriptFromURL(URL logId) throws IOException {
|
||||
// TODO Auto-generated method stub
|
||||
return null;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public String addAlgorithmToVRE(Algorithm algo, String vre) throws IOException, InterruptedException {
|
||||
// TODO Auto-generated method stub
|
||||
return null;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public URL getURLfromWorkerLog(String logUrl) throws MalformedURLException, UnknownHostException {
|
||||
// TODO Auto-generated method stub
|
||||
return null;
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
}
|
|
@ -1,262 +0,0 @@
|
|||
package org.gcube.dataanalysis.dataminer.poolmanager.service;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStreamReader;
|
||||
import java.net.MalformedURLException;
|
||||
import java.net.URL;
|
||||
import java.net.URLConnection;
|
||||
import java.net.UnknownHostException;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.gcube.dataanalysis.dataminer.poolmanager.ansiblebridge.AnsibleBridge;
|
||||
import org.gcube.dataanalysis.dataminer.poolmanager.clients.ISClient;
|
||||
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Algorithm;
|
||||
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.AlgorithmSet;
|
||||
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Cluster;
|
||||
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Host;
|
||||
import org.gcube.dataanalysis.dataminer.poolmanager.process.AlgorithmPackageParser;
|
||||
import org.gcube.dataanalysis.dataminer.poolmanager.rest.PoolManager;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
public class DataminerPoolManager implements PoolManager {
|
||||
|
||||
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(DataminerPoolManager.class);
|
||||
|
||||
|
||||
// static Collection<Algorithm> algorithms;
|
||||
//
|
||||
// static Collection<AlgorithmSet> sets;
|
||||
//
|
||||
// static {
|
||||
// algorithms = new Vector<>();
|
||||
// }
|
||||
//
|
||||
// public DataminerPoolManager() {
|
||||
// }
|
||||
//
|
||||
// /**
|
||||
// * Add a new algorithm to the set of known ones. No further action is expected
|
||||
// * on the pool.
|
||||
// */
|
||||
// public void publishAlgorithm(Algorithm algorithm) {
|
||||
// algorithms.add(algorithm);
|
||||
// }
|
||||
//
|
||||
// /**
|
||||
// * Re-deploy the given algorithm wherever it's installed
|
||||
// *
|
||||
// * @param algorithm
|
||||
// */
|
||||
// /*
|
||||
// * public void updateAlgorithm(Algorithm algorithm) { // TODO implement this }
|
||||
// */
|
||||
//
|
||||
// /**
|
||||
// * Add the give algorithm to the given set
|
||||
// *
|
||||
// * @param algorithmId
|
||||
// * @param setId
|
||||
// */
|
||||
// public void addAlgorithmToSet(String algorithmName, String setName) {
|
||||
// AlgorithmSet set = this.getAlgorithmSet(setName);
|
||||
// Algorithm algorithm = this.getAlgorithm(algorithmName);
|
||||
// if (set != null && algorithm != null) {
|
||||
// set.addAlgorithm(algorithm);
|
||||
// this.updateClusters();
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// /**
|
||||
// * Apply the given set of algorithms to the given cluster
|
||||
// *
|
||||
// * @param setId
|
||||
// * @param clusterId
|
||||
// */
|
||||
// public void applyAlgorithmSetToCluster(String setName, String clusterName) {
|
||||
// AlgorithmSet set = this.getAlgorithmSet(setName);
|
||||
// Cluster cluster = new ISClient().getCluster(clusterName);
|
||||
// if (set != null && cluster != null) {
|
||||
// cluster.addAlgorithmSet(set);
|
||||
// this.updateClusters();
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// private AlgorithmSet getAlgorithmSet(String name) {
|
||||
// for (AlgorithmSet set : sets) {
|
||||
// if (name.equals(set.getName())) {
|
||||
// return set;
|
||||
// }
|
||||
// }
|
||||
// return null;
|
||||
// }
|
||||
//
|
||||
// private Algorithm getAlgorithm(String name) {
|
||||
// for (Algorithm a : algorithms) {
|
||||
// if (name.equals(a.getName())) {
|
||||
// return a;
|
||||
// }
|
||||
// }
|
||||
// return null;
|
||||
// }
|
||||
|
||||
|
||||
|
||||
|
||||
//
|
||||
// public void getLogId(final Algorithm algorithm, final String vre) {
|
||||
// new Thread() {
|
||||
// public void run() {
|
||||
// while (true) {
|
||||
// try {
|
||||
// addAlgorithmToVRE(algorithm, vre);
|
||||
// } catch (Exception e) {
|
||||
// //log here
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
// }.start();
|
||||
// }
|
||||
//
|
||||
//
|
||||
|
||||
// public String getLogId(){
|
||||
// PrintStream console = System.out;
|
||||
// File path = new File(worker.getWorkdir() + File.separator + "logs");
|
||||
// path.mkdirs();
|
||||
// File n = new File(path + File.separator + worker.getWorkerId());
|
||||
// FileOutputStream fos = new FileOutputStream(n);
|
||||
// PrintStream ps = new PrintStream(fos);
|
||||
// System.setOut(ps);
|
||||
// worker.apply();
|
||||
// System.setOut(console);
|
||||
// worker.apply();
|
||||
// System.out.println("Log stored to to " + n.getAbsolutePath());
|
||||
// }
|
||||
|
||||
|
||||
|
||||
|
||||
// public String getLogById(String id) throws IOException {
|
||||
// String strLine = null;
|
||||
// try{
|
||||
// FileInputStream fstream = new FileInputStream("/tmp/dataminer-pool-manager/work/"+id+"/logs/"+id);
|
||||
// BufferedReader br = new BufferedReader(new InputStreamReader(fstream));
|
||||
// /* read log line by line */
|
||||
// while ((strLine = br.readLine()) != null) {
|
||||
// /* parse strLine to obtain what you want */
|
||||
// System.out.println (strLine);
|
||||
// }
|
||||
// br.close();
|
||||
// } catch (Exception e) {
|
||||
// System.err.println("Error: " + e.getMessage());
|
||||
// }
|
||||
// return strLine;
|
||||
// }
|
||||
|
||||
|
||||
public String getScriptFromURL(URL url) throws IOException {
|
||||
if (url == null) {
|
||||
return null;
|
||||
}
|
||||
URLConnection yc = url.openConnection();
|
||||
BufferedReader input = new BufferedReader(new InputStreamReader(
|
||||
yc.getInputStream()));
|
||||
String line;
|
||||
StringBuffer buffer = new StringBuffer();
|
||||
while ((line = input.readLine()) != null) {
|
||||
buffer.append(line + "\n");
|
||||
}
|
||||
String bufferScript = buffer.substring(0, buffer.length());
|
||||
input.close();
|
||||
return bufferScript;
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* Publish the given algorithm in the given VRE
|
||||
*
|
||||
* @param algorithmName
|
||||
* @param vre
|
||||
*
|
||||
*/
|
||||
public String addAlgorithmToVRE(Algorithm algorithm, final String vre) throws IOException {
|
||||
// create a fake algorithm set
|
||||
final AlgorithmSet algoSet = new AlgorithmSet();
|
||||
algoSet.setName("fake");
|
||||
algoSet.addAlgorithm(algorithm);
|
||||
final String uuid = UUID.randomUUID().toString();
|
||||
|
||||
new Thread(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
// TODO Auto-generated method stub
|
||||
try {
|
||||
addAlgorithmsToVRE(algoSet, vre, uuid);
|
||||
} catch (IOException e) {
|
||||
// TODO Auto-generated catch block
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
}).start();
|
||||
//this line will execute immediately, not waiting for your task to complete
|
||||
System.out.println(uuid);
|
||||
return uuid;
|
||||
}
|
||||
|
||||
|
||||
|
||||
public URL getURLfromWorkerLog(String a) throws MalformedURLException, UnknownHostException{
|
||||
|
||||
File path = new File(System.getProperty("user.home")+File.separator+"/gcube/dataminer-pool-manager/work/"+a+File.separator+"logs");
|
||||
path.mkdirs();
|
||||
File n = new File(path + File.separator +a);
|
||||
//String addr = InetAddress.getLocalHost().getHostAddress();
|
||||
|
||||
return new File(n.getPath()).toURI().toURL();
|
||||
}
|
||||
|
||||
|
||||
public String addAlgorithmsToVRE(AlgorithmSet algorithms, String vre, String uuid) throws IOException {
|
||||
|
||||
// create the cluster (dataminers in the vre)
|
||||
Cluster cluster = new Cluster();
|
||||
for(Host h:new ISClient().listDataminersInVRE()) {
|
||||
cluster.addHost(h);
|
||||
}
|
||||
|
||||
// apply the changes
|
||||
AnsibleBridge a = new AnsibleBridge();
|
||||
return a.applyAlgorithmSetToCluster(algorithms, cluster,uuid).getWorkerId();
|
||||
|
||||
}
|
||||
|
||||
public Algorithm extractAlgorithm(String url) throws IOException {
|
||||
return new AlgorithmPackageParser().parsePackage(url);
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public void getLogId(Algorithm algo, String vre) {
|
||||
// TODO Auto-generated method stub
|
||||
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public String getLogById(String logId) throws IOException {
|
||||
// TODO Auto-generated method stub
|
||||
return null;
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
}
|
|
@ -1,145 +0,0 @@
|
|||
package org.gcube.dataanalysis.dataminer.poolmanager.util;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.net.Authenticator;
|
||||
import java.net.InetSocketAddress;
|
||||
import java.net.PasswordAuthentication;
|
||||
import java.net.Proxy;
|
||||
import java.net.ProxySelector;
|
||||
import java.net.SocketAddress;
|
||||
import java.net.URI;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.commons.configuration.ConfigurationException;
|
||||
import org.apache.commons.configuration.PropertiesConfiguration;
|
||||
import org.apache.commons.configuration.reloading.FileChangedReloadingStrategy;
|
||||
|
||||
interface NetworkConfiguration {
|
||||
|
||||
public String getProxyHost();
|
||||
|
||||
public String getProxyPort();
|
||||
|
||||
public String getProxyUser();
|
||||
|
||||
public String getProxyPassword();
|
||||
|
||||
public String getNonProxyHosts();
|
||||
|
||||
}
|
||||
|
||||
class FileBasedProxyConfiguration implements NetworkConfiguration {
|
||||
|
||||
private static PropertiesConfiguration configuration;
|
||||
|
||||
public FileBasedProxyConfiguration(String path) {
|
||||
try {
|
||||
// load the configuration
|
||||
configuration = new PropertiesConfiguration(path);
|
||||
// set the reloading strategy to enable hot-configuration
|
||||
FileChangedReloadingStrategy fcrs = new FileChangedReloadingStrategy();
|
||||
configuration.setReloadingStrategy(fcrs);
|
||||
} catch (ConfigurationException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getProxyHost() {
|
||||
return configuration.getString("proxyHost");
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getProxyPort() {
|
||||
return configuration.getString("proxyPort");
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getProxyUser() {
|
||||
return configuration.getString("proxyUser");
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getProxyPassword() {
|
||||
return configuration.getString("proxyPassword");
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getNonProxyHosts() {
|
||||
return configuration.getString("nonProxyHosts");
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public class PropertiesBasedProxySelector extends ProxySelector {
|
||||
|
||||
List<Proxy> proxies = null;
|
||||
|
||||
List<String> nonProxyHosts = null;
|
||||
|
||||
public PropertiesBasedProxySelector(String proxySettingsPath) {
|
||||
this(new FileBasedProxyConfiguration(proxySettingsPath));
|
||||
}
|
||||
|
||||
public PropertiesBasedProxySelector(NetworkConfiguration config) {
|
||||
if (config == null || config.getProxyHost() == null) {
|
||||
this.proxies = null;
|
||||
return;
|
||||
}
|
||||
|
||||
String host = config.getProxyHost();
|
||||
|
||||
int port = 80;
|
||||
|
||||
if (config.getProxyPort() != null) {
|
||||
port = Integer.valueOf(config.getProxyPort());
|
||||
}
|
||||
|
||||
if (config.getNonProxyHosts() != null) {
|
||||
this.nonProxyHosts = Arrays
|
||||
.asList(config.getNonProxyHosts().split("\\|"));
|
||||
}
|
||||
|
||||
this.proxies = new ArrayList<Proxy>();
|
||||
this.proxies.add(new Proxy(Proxy.Type.HTTP, new InetSocketAddress(host,
|
||||
port)));
|
||||
|
||||
if (config.getProxyUser() != null) {
|
||||
final String username = config.getProxyUser();
|
||||
final String password = config.getProxyPassword();
|
||||
|
||||
Authenticator.setDefault(new Authenticator() {
|
||||
@Override
|
||||
protected PasswordAuthentication getPasswordAuthentication() {
|
||||
return new PasswordAuthentication(username, password.toCharArray());
|
||||
}
|
||||
});
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Proxy> select(URI uri) {
|
||||
if (this.nonProxyHosts == null) {
|
||||
return Arrays.asList(Proxy.NO_PROXY);
|
||||
} else {
|
||||
for (String entry : this.nonProxyHosts) {
|
||||
entry = entry.trim();
|
||||
if (entry.startsWith("*") && uri.getHost().endsWith(entry.substring(1))) {
|
||||
return Arrays.asList(Proxy.NO_PROXY);
|
||||
}
|
||||
if (uri.getHost().equals(entry)) {
|
||||
return Arrays.asList(Proxy.NO_PROXY);
|
||||
}
|
||||
}
|
||||
return this.proxies;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void connectFailed(URI uri, SocketAddress socketAddress, IOException e) {
|
||||
|
||||
}
|
||||
}
|
|
@ -1,13 +0,0 @@
|
|||
<application mode="online">
|
||||
|
||||
<name>dataminer-pool-manager</name>
|
||||
<group>dataanalysis</group>
|
||||
<version>0.0.1</version>
|
||||
|
||||
<!--<description>Lorem ipsum dolor sit amet...</description>-->
|
||||
|
||||
<!--<persistence location="/some/custom/location</persistence" />-->
|
||||
|
||||
<!-- <exclude>*</exclude> -->
|
||||
|
||||
</application>
|
|
@ -1,11 +0,0 @@
|
|||
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||
<handlers>
|
||||
<lifecycle>
|
||||
<profile-management />
|
||||
<!-- <plugin-registration-handler /> -->
|
||||
</lifecycle>
|
||||
<request>
|
||||
<request-validation />
|
||||
<request-accounting />
|
||||
</request>
|
||||
</handlers>
|
|
@ -1,24 +0,0 @@
|
|||
<!DOCTYPE web-app PUBLIC
|
||||
"-//Sun Microsystems, Inc.//DTD Web Application 2.3//EN"
|
||||
"http://java.sun.com/dtd/web-app_2_3.dtd" >
|
||||
|
||||
<web-app>
|
||||
<display-name>Archetype Created Web Application</display-name>
|
||||
|
||||
|
||||
<servlet>
|
||||
<servlet-name>REST-API</servlet-name>
|
||||
<servlet-class>org.glassfish.jersey.servlet.ServletContainer</servlet-class>
|
||||
<init-param>
|
||||
<param-name>jersey.config.server.provider.packages</param-name>
|
||||
<param-value>org.gcube.dataanalysis.dataminer.poolmanager.rest</param-value>
|
||||
</init-param>
|
||||
<load-on-startup>1</load-on-startup>
|
||||
</servlet>
|
||||
|
||||
<servlet-mapping>
|
||||
<servlet-name>REST-API</servlet-name>
|
||||
<url-pattern>/rest/*</url-pattern>
|
||||
</servlet-mapping>
|
||||
|
||||
</web-app>
|
|
@ -1,117 +0,0 @@
|
|||
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.net.URL;
|
||||
import java.util.List;
|
||||
import java.util.Vector;
|
||||
|
||||
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Algorithm;
|
||||
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.AlgorithmSet;
|
||||
import org.gcube.dataanalysis.dataminer.poolmanager.process.AddAlgorithmCommand;
|
||||
import org.gcube.dataanalysis.dataminer.poolmanager.process.AlgorithmPackageParser;
|
||||
|
||||
public class AlgorithmPackageParserTest {
|
||||
|
||||
private static int BUFFER_SIZE = 2048;
|
||||
|
||||
public void extractAllAlgorithms() throws IOException {
|
||||
String url = "http://svn.research-infrastructures.eu/public/d4science/gcube/trunk/data-analysis/DataMinerConfiguration/algorithms/dev/algorithms";
|
||||
List<String> commands = this.extractAddAlgorithmCommands(url);
|
||||
AlgorithmSet algorithms = new AlgorithmSet();
|
||||
for (String cmd : commands) {
|
||||
System.out.println("-----------------------------------------");
|
||||
System.out.println(cmd);
|
||||
AddAlgorithmCommand aac = new AddAlgorithmCommand(cmd);
|
||||
System.out.println(aac);
|
||||
|
||||
// start creating the algo from the command
|
||||
Algorithm algo = new Algorithm();
|
||||
algo.setAlgorithmType(aac.getAlgorithmType());
|
||||
algo.setCategory(aac.getCategory());
|
||||
algo.setClazz(aac.getClazz());
|
||||
algo.setDescription(aac.getDescription());
|
||||
algo.setName(aac.getName());
|
||||
algo.setPackageURL(aac.getUrl());
|
||||
algo.setSkipJava(aac.getSkipjava());
|
||||
|
||||
// then override with info from the package
|
||||
if (aac.getUrl().length() > 4) {
|
||||
Algorithm packagedAlgo = this.extractAlgorithm(aac.getUrl());
|
||||
if (packagedAlgo != null) {
|
||||
algo.setDependencies(packagedAlgo.getDependencies());
|
||||
}
|
||||
}
|
||||
algorithms.addAlgorithm(algo);
|
||||
break;
|
||||
}
|
||||
//to uncomment
|
||||
// new DataminerPoolManager().addAlgorithmsToVRE(algorithms,
|
||||
// "/gcube/devNext/NextNext");
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract 'addAlgorithm' commands from a file containing wiki-table-style
|
||||
* entries for algorithm.
|
||||
*
|
||||
* @return
|
||||
* @throws IOException
|
||||
*/
|
||||
private List<String> extractAddAlgorithmCommands(String listUrl)
|
||||
throws IOException {
|
||||
URL url = new URL(listUrl);
|
||||
InputStream is = url.openStream();
|
||||
|
||||
StringBuilder s = new StringBuilder();
|
||||
byte[] buffer = new byte[BUFFER_SIZE];
|
||||
int read = 0;
|
||||
while ((read = is.read(buffer)) != -1) {
|
||||
s.append(new String(buffer, 0, read));
|
||||
}
|
||||
List<String> out = new Vector<>();
|
||||
String[] lines = s.toString().split("\n");
|
||||
for (String line : lines) {
|
||||
System.out.println("--------------------");
|
||||
if (!line.isEmpty()) {
|
||||
String[] parts = line.split("\\|");
|
||||
int c = 1;
|
||||
for (String part : parts) {
|
||||
if (part == null || part.trim().isEmpty()) {
|
||||
continue;
|
||||
}
|
||||
System.out.println(c + ". " + part);
|
||||
c++;
|
||||
if (part.contains("addAlgorithm.sh")) {
|
||||
String cmd = part.trim();
|
||||
cmd = cmd.replaceAll("<notextile>", "");
|
||||
cmd = cmd.replaceAll("</notextile>", "");
|
||||
System.out.println(cmd);
|
||||
// AddAlgorithmCommand aac = new AddAlgorithmCommand(cmd);
|
||||
// System.out.println(aac);
|
||||
out.add(cmd);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return out;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create an Algorithm starting from the algorithm jar.
|
||||
*
|
||||
* @param url
|
||||
* @return
|
||||
* @throws IOException
|
||||
*/
|
||||
private Algorithm extractAlgorithm(String url) throws IOException {
|
||||
return new AlgorithmPackageParser().parsePackage(url);
|
||||
}
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
// ProxySelector.setDefault(new PropertiesBasedProxySelector(
|
||||
// "/home/ngalante/.proxy-settings"));
|
||||
|
||||
new AlgorithmPackageParserTest().extractAllAlgorithms();
|
||||
}
|
||||
|
||||
}
|
|
@ -1,33 +0,0 @@
|
|||
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.gcube.dataanalysis.dataminer.poolmanager.ansible.AnsibleWorker;
|
||||
import org.gcube.dataanalysis.dataminer.poolmanager.ansible.model.Inventory;
|
||||
import org.gcube.dataanalysis.dataminer.poolmanager.ansible.model.Playbook;
|
||||
import org.gcube.dataanalysis.dataminer.poolmanager.ansible.model.Role;
|
||||
import org.gcube.dataanalysis.dataminer.poolmanager.ansible.model.RoleFile;
|
||||
|
||||
public class AnsibleWorkerTest {
|
||||
|
||||
public static void main(String[] args) throws IOException {
|
||||
AnsibleWorker worker = new AnsibleWorker(new File("/home/nagalante/gcube/dataminer-pool-manager/work/"+UUID.randomUUID().toString()));
|
||||
|
||||
System.out.println("created worker named " + worker.getWorkerId());
|
||||
|
||||
worker.setInventory(new Inventory());
|
||||
worker.setPlaybook(new Playbook());
|
||||
|
||||
Role r = new Role();
|
||||
r.setName("latex");
|
||||
|
||||
RoleFile tf = new RoleFile("main", "do something special for " + r.getName());
|
||||
r.addTaskFile(tf);
|
||||
worker.addRole(r);
|
||||
|
||||
worker.apply();
|
||||
}
|
||||
|
||||
}
|
|
@ -1,227 +0,0 @@
|
|||
|
||||
|
||||
import java.net.ProxySelector;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.gcube.common.scope.api.ScopeProvider;
|
||||
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Algorithm;
|
||||
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.AlgorithmSet;
|
||||
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Dependency;
|
||||
import org.gcube.dataanalysis.dataminer.poolmanager.service.DataminerPoolManager;
|
||||
import org.gcube.dataanalysis.dataminer.poolmanager.util.PropertiesBasedProxySelector;
|
||||
|
||||
public class DataminerPoolManagerTest {
|
||||
|
||||
/*
|
||||
private static Map<String, Domain> domains = new HashMap<>();
|
||||
private static Map<String, Cluster> clusters = new HashMap<>();
|
||||
|
||||
private static Dependency createDependency(String depName) {
|
||||
String[] parts = depName.split(":");
|
||||
Dependency out = new Dependency();
|
||||
if(parts.length>1) {
|
||||
out.setType(parts[0]);
|
||||
out.setName(parts[1]);
|
||||
} else {
|
||||
out.setType("os");
|
||||
out.setName(depName);
|
||||
}
|
||||
return out;
|
||||
}
|
||||
|
||||
private static Algorithm createAlgorithm(String name, String ... deps) {
|
||||
Algorithm a = new Algorithm();
|
||||
a.setName(name);
|
||||
for(String dep:deps) {
|
||||
a.addDependency(createDependency(dep));
|
||||
}
|
||||
return a;
|
||||
}
|
||||
|
||||
private static AlgorithmSet createAlgorithmSet(String name, Algorithm ... algs) {
|
||||
AlgorithmSet out = new AlgorithmSet();
|
||||
out.setName(name);
|
||||
for(Algorithm a:algs) {
|
||||
out.addAlgorithm(a);
|
||||
}
|
||||
return out;
|
||||
}
|
||||
|
||||
private static Domain getDomain(String name) {
|
||||
if(domains.get(name)==null) {
|
||||
Domain d = new Domain();
|
||||
d.setName(name);
|
||||
domains.put(name, d);
|
||||
return d;
|
||||
} else {
|
||||
return domains.get(name);
|
||||
}
|
||||
}
|
||||
|
||||
private static Host createHost(String hostname, String domainName) {
|
||||
Host out = new Host();
|
||||
out.setName(hostname);
|
||||
Domain d = getDomain(domainName);
|
||||
out.setDomain(d);
|
||||
return out;
|
||||
}
|
||||
|
||||
private static Cluster getCluster(String name) {
|
||||
if(clusters.get(name)==null) {
|
||||
Cluster d = new Cluster();
|
||||
d.setName(name);
|
||||
clusters.put(name, d);
|
||||
return d;
|
||||
} else {
|
||||
return clusters.get(name);
|
||||
}
|
||||
}
|
||||
|
||||
private static Collection<Dependency> extractDependencies() {
|
||||
Collection<Dependency> out = new TreeSet<>(new DependencyComparator());
|
||||
for(Cluster c:clusters.values()) {
|
||||
for(AlgorithmSet as:c.getAlgorithmSets()) {
|
||||
for(Algorithm a:as.getAlgorithms()) {
|
||||
for(Dependency d:a.getDependencies()) {
|
||||
out.add(d);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return out;
|
||||
}
|
||||
|
||||
private static Collection<Algorithm> extractAlgorithms() {
|
||||
Collection<Algorithm> out = new TreeSet<>(new AlgorithmComparator());
|
||||
for(Cluster c:clusters.values()) {
|
||||
for(AlgorithmSet as:c.getAlgorithmSets()) {
|
||||
for(Algorithm a:as.getAlgorithms()) {
|
||||
out.add(a);
|
||||
}
|
||||
}
|
||||
}
|
||||
return out;
|
||||
}
|
||||
|
||||
static {
|
||||
|
||||
Algorithm ewe = createAlgorithm("ewe", "mono", "latex", "cran:some_R_package", "custom:some_git_package");
|
||||
Algorithm ensemble = createAlgorithm("ensemble", "python");
|
||||
Algorithm voodoo = createAlgorithm("voodoo", "os:latex", "custom:blah");
|
||||
|
||||
AlgorithmSet as1 = createAlgorithmSet("as1-fishes", ewe);
|
||||
AlgorithmSet as2 = createAlgorithmSet("as2-stat", ensemble);
|
||||
AlgorithmSet as3 = createAlgorithmSet("as3-blackmagic", voodoo, ewe);
|
||||
|
||||
Cluster cluster1 = getCluster("cluster-1");
|
||||
cluster1.addHost(createHost("host1", "domain1"));
|
||||
cluster1.addHost(createHost("host2", "domain1"));
|
||||
cluster1.addHost(createHost("host3", "domain1"));
|
||||
cluster1.addHost(createHost("host1", "domain2"));
|
||||
cluster1.addHost(createHost("host2", "domain2"));
|
||||
|
||||
Cluster cluster2 = getCluster("cluster-2");
|
||||
cluster2.addHost(createHost("host4", "domain1"));
|
||||
cluster2.addHost(createHost("host5", "domain1"));
|
||||
cluster2.addHost(createHost("host6", "domain1"));
|
||||
cluster2.addHost(createHost("host3", "domain2"));
|
||||
cluster2.addHost(createHost("host4", "domain2"));
|
||||
cluster2.addHost(createHost("host5", "domain2"));
|
||||
|
||||
cluster1.addAlgorithmSet(as1);
|
||||
cluster1.addAlgorithmSet(as2);
|
||||
|
||||
cluster2.addAlgorithmSet(as1);
|
||||
cluster2.addAlgorithmSet(as3);
|
||||
|
||||
}
|
||||
*/
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
/*
|
||||
AnsibleBridge ab = new AnsibleBridge();
|
||||
ab.printInventoryByDomainAndSets(clusters.values());
|
||||
System.out.println("-----------");
|
||||
ab.printInventoryBySets(clusters.values());
|
||||
|
||||
AnsibleWorker worker = ab.createWorker();
|
||||
|
||||
for(Algorithm a:extractAlgorithms()) {
|
||||
for(Role r:ab.generateRoles(a)) {
|
||||
worker.addRole(r);
|
||||
}
|
||||
}
|
||||
|
||||
for(Dependency d:extractDependencies()) {
|
||||
for(Role r:ab.generateRoles(d)) {
|
||||
worker.addRole(r);
|
||||
}
|
||||
}
|
||||
*/
|
||||
|
||||
ScopeProvider.instance.set("/gcube/devNext/NextNext");
|
||||
|
||||
ProxySelector.setDefault(new PropertiesBasedProxySelector("/home/ngalante/.proxy-settings"));
|
||||
//
|
||||
// // create the algorithm (download it, etc etc)
|
||||
// Algorithm algorithm = new Algorithm();
|
||||
// algorithm.setName("ichtyop");
|
||||
// algorithm.setClazz("org.gcube...");
|
||||
// algorithm.setDescription("some description");
|
||||
//
|
||||
// Dependency d = new Dependency();
|
||||
// d.setName("libpng");
|
||||
// d.setType("os");
|
||||
// algorithm.addDependency(d);
|
||||
//
|
||||
// d = new Dependency();
|
||||
// d.setName("some-r-package");
|
||||
// d.setType("cran");
|
||||
// algorithm.addDependency(d);
|
||||
//
|
||||
// d = new Dependency();
|
||||
// d.setName("some-other-r-package");
|
||||
// d.setType("cran");
|
||||
// algorithm.addDependency(d);
|
||||
//
|
||||
// d = new Dependency();
|
||||
// d.setName("voodoo");
|
||||
// d.setType("custom");
|
||||
// algorithm.addDependency(d);
|
||||
//
|
||||
// // create the algorithm (download it, etc etc)
|
||||
// Algorithm ewe = new Algorithm();
|
||||
// ewe.setName("ewe");
|
||||
//
|
||||
// d = new Dependency();
|
||||
// d.setName("voodoo");
|
||||
// d.setType("custom");
|
||||
// ewe.addDependency(d);
|
||||
|
||||
AlgorithmSet algorithms = new AlgorithmSet();
|
||||
algorithms.setName("dummy-set");
|
||||
|
||||
|
||||
// algorithms.addAlgorithm(algorithm);
|
||||
// algorithms.addAlgorithm(ewe);
|
||||
|
||||
Algorithm ensemble = new Algorithm();
|
||||
ensemble.setName("ewe");
|
||||
ensemble.setCategory("a");
|
||||
ensemble.setAlgorithmType("transducerers");
|
||||
ensemble.setPackageURL("http://data.d4science.org/eDd5b2ovMmpSVEZaZWYvY3g2bDZNcGRFNUIxNi85UFlHbWJQNStIS0N6Yz0");
|
||||
ensemble.setClazz("org.gcube.dataanalysis.executor.rscripts.Ichthyopmodelonebyone");
|
||||
ensemble.setSkipJava("N");
|
||||
ensemble.setDescription("test");
|
||||
|
||||
Dependency d = new Dependency();
|
||||
d.setName("libpng3");
|
||||
d.setType("os");
|
||||
ensemble.addDependency(d);
|
||||
algorithms.addAlgorithm(ensemble);
|
||||
|
||||
new DataminerPoolManager().addAlgorithmsToVRE(algorithms, "/gcube/devNext/NextNext", "test"+UUID.randomUUID());
|
||||
|
||||
}
|
||||
|
||||
}
|
|
@ -1,17 +0,0 @@
|
|||
|
||||
|
||||
import java.net.ProxySelector;
|
||||
|
||||
import org.gcube.common.scope.api.ScopeProvider;
|
||||
import org.gcube.dataanalysis.dataminer.poolmanager.clients.ISClient;
|
||||
import org.gcube.dataanalysis.dataminer.poolmanager.util.PropertiesBasedProxySelector;
|
||||
|
||||
public class ISClientTest {
|
||||
|
||||
public static void main(String[] args) {
|
||||
ProxySelector.setDefault(new PropertiesBasedProxySelector("/home/ngalante/.proxy-settings"));
|
||||
ScopeProvider.instance.set("/gcube/devNext/NextNext");
|
||||
System.out.println(new ISClient().listDataminersInVRE());
|
||||
}
|
||||
|
||||
}
|
|
@ -1,119 +0,0 @@
|
|||
package org.gcube.dataanalysis.dataminerpoolmanager;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.net.ProxySelector;
|
||||
import java.net.URL;
|
||||
import java.util.List;
|
||||
import java.util.Vector;
|
||||
|
||||
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Algorithm;
|
||||
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.AlgorithmSet;
|
||||
import org.gcube.dataanalysis.dataminer.poolmanager.process.AddAlgorithmCommand;
|
||||
import org.gcube.dataanalysis.dataminer.poolmanager.process.AlgorithmPackageParser;
|
||||
import org.gcube.dataanalysis.dataminer.poolmanager.util.PropertiesBasedProxySelector;
|
||||
|
||||
public class AlgorithmPackageParserTest {
|
||||
|
||||
private static int BUFFER_SIZE = 2048;
|
||||
|
||||
public void extractAllAlgorithms() throws IOException {
|
||||
String url = "http://svn.research-infrastructures.eu/public/d4science/gcube/trunk/data-analysis/DataMinerConfiguration/algorithms/dev/algorithms";
|
||||
List<String> commands = this.extractAddAlgorithmCommands(url);
|
||||
AlgorithmSet algorithms = new AlgorithmSet();
|
||||
for (String cmd : commands) {
|
||||
System.out.println("-----------------------------------------");
|
||||
System.out.println(cmd);
|
||||
AddAlgorithmCommand aac = new AddAlgorithmCommand(cmd);
|
||||
System.out.println(aac);
|
||||
|
||||
// start creating the algo from the command
|
||||
Algorithm algo = new Algorithm();
|
||||
algo.setAlgorithmType(aac.getAlgorithmType());
|
||||
algo.setCategory(aac.getCategory());
|
||||
algo.setClazz(aac.getClazz());
|
||||
algo.setDescription(aac.getDescription());
|
||||
algo.setName(aac.getName());
|
||||
algo.setPackageURL(aac.getUrl());
|
||||
algo.setSkipJava(aac.getSkipjava());
|
||||
|
||||
// then override with info from the package
|
||||
if (aac.getUrl().length() > 4) {
|
||||
Algorithm packagedAlgo = this.extractAlgorithm(aac.getUrl());
|
||||
if (packagedAlgo != null) {
|
||||
algo.setDependencies(packagedAlgo.getDependencies());
|
||||
}
|
||||
}
|
||||
algorithms.addAlgorithm(algo);
|
||||
break;
|
||||
}
|
||||
//to uncomment
|
||||
// new DataminerPoolManager().addAlgorithmsToVRE(algorithms,
|
||||
// "/gcube/devNext/NextNext");
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract 'addAlgorithm' commands from a file containing wiki-table-style
|
||||
* entries for algorithm.
|
||||
*
|
||||
* @return
|
||||
* @throws IOException
|
||||
*/
|
||||
private List<String> extractAddAlgorithmCommands(String listUrl)
|
||||
throws IOException {
|
||||
URL url = new URL(listUrl);
|
||||
InputStream is = url.openStream();
|
||||
|
||||
StringBuilder s = new StringBuilder();
|
||||
byte[] buffer = new byte[BUFFER_SIZE];
|
||||
int read = 0;
|
||||
while ((read = is.read(buffer)) != -1) {
|
||||
s.append(new String(buffer, 0, read));
|
||||
}
|
||||
List<String> out = new Vector<>();
|
||||
String[] lines = s.toString().split("\n");
|
||||
for (String line : lines) {
|
||||
System.out.println("--------------------");
|
||||
if (!line.isEmpty()) {
|
||||
String[] parts = line.split("\\|");
|
||||
int c = 1;
|
||||
for (String part : parts) {
|
||||
if (part == null || part.trim().isEmpty()) {
|
||||
continue;
|
||||
}
|
||||
System.out.println(c + ". " + part);
|
||||
c++;
|
||||
if (part.contains("addAlgorithm.sh")) {
|
||||
String cmd = part.trim();
|
||||
cmd = cmd.replaceAll("<notextile>", "");
|
||||
cmd = cmd.replaceAll("</notextile>", "");
|
||||
System.out.println(cmd);
|
||||
// AddAlgorithmCommand aac = new AddAlgorithmCommand(cmd);
|
||||
// System.out.println(aac);
|
||||
out.add(cmd);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return out;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create an Algorithm starting from the algorithm jar.
|
||||
*
|
||||
* @param url
|
||||
* @return
|
||||
* @throws IOException
|
||||
*/
|
||||
private Algorithm extractAlgorithm(String url) throws IOException {
|
||||
return new AlgorithmPackageParser().parsePackage(url);
|
||||
}
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
// ProxySelector.setDefault(new PropertiesBasedProxySelector(
|
||||
// "/home/ngalante/.proxy-settings"));
|
||||
|
||||
new AlgorithmPackageParserTest().extractAllAlgorithms();
|
||||
}
|
||||
|
||||
}
|
|
@ -1,33 +0,0 @@
|
|||
package org.gcube.dataanalysis.dataminerpoolmanager;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.gcube.dataanalysis.dataminer.poolmanager.ansible.AnsibleWorker;
|
||||
import org.gcube.dataanalysis.dataminer.poolmanager.ansible.model.Inventory;
|
||||
import org.gcube.dataanalysis.dataminer.poolmanager.ansible.model.Playbook;
|
||||
import org.gcube.dataanalysis.dataminer.poolmanager.ansible.model.Role;
|
||||
import org.gcube.dataanalysis.dataminer.poolmanager.ansible.model.RoleFile;
|
||||
|
||||
public class AnsibleWorkerTest {
|
||||
|
||||
public static void main(String[] args) throws IOException {
|
||||
AnsibleWorker worker = new AnsibleWorker(new File("/home/nagalante/gcube/dataminer-pool-manager/work/"+UUID.randomUUID().toString()));
|
||||
|
||||
System.out.println("created worker named " + worker.getWorkerId());
|
||||
|
||||
worker.setInventory(new Inventory());
|
||||
worker.setPlaybook(new Playbook());
|
||||
|
||||
Role r = new Role();
|
||||
r.setName("latex");
|
||||
|
||||
RoleFile tf = new RoleFile("main", "do something special for " + r.getName());
|
||||
r.addTaskFile(tf);
|
||||
worker.addRole(r);
|
||||
|
||||
worker.apply();
|
||||
}
|
||||
|
||||
}
|
|
@ -1,227 +0,0 @@
|
|||
package org.gcube.dataanalysis.dataminerpoolmanager;
|
||||
|
||||
import java.net.ProxySelector;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.gcube.common.scope.api.ScopeProvider;
|
||||
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Algorithm;
|
||||
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.AlgorithmSet;
|
||||
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Dependency;
|
||||
import org.gcube.dataanalysis.dataminer.poolmanager.service.DataminerPoolManager;
|
||||
import org.gcube.dataanalysis.dataminer.poolmanager.util.PropertiesBasedProxySelector;
|
||||
|
||||
public class DataminerPoolManagerTest {
|
||||
|
||||
/*
|
||||
private static Map<String, Domain> domains = new HashMap<>();
|
||||
private static Map<String, Cluster> clusters = new HashMap<>();
|
||||
|
||||
private static Dependency createDependency(String depName) {
|
||||
String[] parts = depName.split(":");
|
||||
Dependency out = new Dependency();
|
||||
if(parts.length>1) {
|
||||
out.setType(parts[0]);
|
||||
out.setName(parts[1]);
|
||||
} else {
|
||||
out.setType("os");
|
||||
out.setName(depName);
|
||||
}
|
||||
return out;
|
||||
}
|
||||
|
||||
private static Algorithm createAlgorithm(String name, String ... deps) {
|
||||
Algorithm a = new Algorithm();
|
||||
a.setName(name);
|
||||
for(String dep:deps) {
|
||||
a.addDependency(createDependency(dep));
|
||||
}
|
||||
return a;
|
||||
}
|
||||
|
||||
private static AlgorithmSet createAlgorithmSet(String name, Algorithm ... algs) {
|
||||
AlgorithmSet out = new AlgorithmSet();
|
||||
out.setName(name);
|
||||
for(Algorithm a:algs) {
|
||||
out.addAlgorithm(a);
|
||||
}
|
||||
return out;
|
||||
}
|
||||
|
||||
private static Domain getDomain(String name) {
|
||||
if(domains.get(name)==null) {
|
||||
Domain d = new Domain();
|
||||
d.setName(name);
|
||||
domains.put(name, d);
|
||||
return d;
|
||||
} else {
|
||||
return domains.get(name);
|
||||
}
|
||||
}
|
||||
|
||||
private static Host createHost(String hostname, String domainName) {
|
||||
Host out = new Host();
|
||||
out.setName(hostname);
|
||||
Domain d = getDomain(domainName);
|
||||
out.setDomain(d);
|
||||
return out;
|
||||
}
|
||||
|
||||
private static Cluster getCluster(String name) {
|
||||
if(clusters.get(name)==null) {
|
||||
Cluster d = new Cluster();
|
||||
d.setName(name);
|
||||
clusters.put(name, d);
|
||||
return d;
|
||||
} else {
|
||||
return clusters.get(name);
|
||||
}
|
||||
}
|
||||
|
||||
private static Collection<Dependency> extractDependencies() {
|
||||
Collection<Dependency> out = new TreeSet<>(new DependencyComparator());
|
||||
for(Cluster c:clusters.values()) {
|
||||
for(AlgorithmSet as:c.getAlgorithmSets()) {
|
||||
for(Algorithm a:as.getAlgorithms()) {
|
||||
for(Dependency d:a.getDependencies()) {
|
||||
out.add(d);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return out;
|
||||
}
|
||||
|
||||
private static Collection<Algorithm> extractAlgorithms() {
|
||||
Collection<Algorithm> out = new TreeSet<>(new AlgorithmComparator());
|
||||
for(Cluster c:clusters.values()) {
|
||||
for(AlgorithmSet as:c.getAlgorithmSets()) {
|
||||
for(Algorithm a:as.getAlgorithms()) {
|
||||
out.add(a);
|
||||
}
|
||||
}
|
||||
}
|
||||
return out;
|
||||
}
|
||||
|
||||
static {
|
||||
|
||||
Algorithm ewe = createAlgorithm("ewe", "mono", "latex", "cran:some_R_package", "custom:some_git_package");
|
||||
Algorithm ensemble = createAlgorithm("ensemble", "python");
|
||||
Algorithm voodoo = createAlgorithm("voodoo", "os:latex", "custom:blah");
|
||||
|
||||
AlgorithmSet as1 = createAlgorithmSet("as1-fishes", ewe);
|
||||
AlgorithmSet as2 = createAlgorithmSet("as2-stat", ensemble);
|
||||
AlgorithmSet as3 = createAlgorithmSet("as3-blackmagic", voodoo, ewe);
|
||||
|
||||
Cluster cluster1 = getCluster("cluster-1");
|
||||
cluster1.addHost(createHost("host1", "domain1"));
|
||||
cluster1.addHost(createHost("host2", "domain1"));
|
||||
cluster1.addHost(createHost("host3", "domain1"));
|
||||
cluster1.addHost(createHost("host1", "domain2"));
|
||||
cluster1.addHost(createHost("host2", "domain2"));
|
||||
|
||||
Cluster cluster2 = getCluster("cluster-2");
|
||||
cluster2.addHost(createHost("host4", "domain1"));
|
||||
cluster2.addHost(createHost("host5", "domain1"));
|
||||
cluster2.addHost(createHost("host6", "domain1"));
|
||||
cluster2.addHost(createHost("host3", "domain2"));
|
||||
cluster2.addHost(createHost("host4", "domain2"));
|
||||
cluster2.addHost(createHost("host5", "domain2"));
|
||||
|
||||
cluster1.addAlgorithmSet(as1);
|
||||
cluster1.addAlgorithmSet(as2);
|
||||
|
||||
cluster2.addAlgorithmSet(as1);
|
||||
cluster2.addAlgorithmSet(as3);
|
||||
|
||||
}
|
||||
*/
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
/*
|
||||
AnsibleBridge ab = new AnsibleBridge();
|
||||
ab.printInventoryByDomainAndSets(clusters.values());
|
||||
System.out.println("-----------");
|
||||
ab.printInventoryBySets(clusters.values());
|
||||
|
||||
AnsibleWorker worker = ab.createWorker();
|
||||
|
||||
for(Algorithm a:extractAlgorithms()) {
|
||||
for(Role r:ab.generateRoles(a)) {
|
||||
worker.addRole(r);
|
||||
}
|
||||
}
|
||||
|
||||
for(Dependency d:extractDependencies()) {
|
||||
for(Role r:ab.generateRoles(d)) {
|
||||
worker.addRole(r);
|
||||
}
|
||||
}
|
||||
*/
|
||||
|
||||
ScopeProvider.instance.set("/gcube/devNext/NextNext");
|
||||
|
||||
ProxySelector.setDefault(new PropertiesBasedProxySelector("/home/ngalante/.proxy-settings"));
|
||||
//
|
||||
// // create the algorithm (download it, etc etc)
|
||||
// Algorithm algorithm = new Algorithm();
|
||||
// algorithm.setName("ichtyop");
|
||||
// algorithm.setClazz("org.gcube...");
|
||||
// algorithm.setDescription("some description");
|
||||
//
|
||||
// Dependency d = new Dependency();
|
||||
// d.setName("libpng");
|
||||
// d.setType("os");
|
||||
// algorithm.addDependency(d);
|
||||
//
|
||||
// d = new Dependency();
|
||||
// d.setName("some-r-package");
|
||||
// d.setType("cran");
|
||||
// algorithm.addDependency(d);
|
||||
//
|
||||
// d = new Dependency();
|
||||
// d.setName("some-other-r-package");
|
||||
// d.setType("cran");
|
||||
// algorithm.addDependency(d);
|
||||
//
|
||||
// d = new Dependency();
|
||||
// d.setName("voodoo");
|
||||
// d.setType("custom");
|
||||
// algorithm.addDependency(d);
|
||||
//
|
||||
// // create the algorithm (download it, etc etc)
|
||||
// Algorithm ewe = new Algorithm();
|
||||
// ewe.setName("ewe");
|
||||
//
|
||||
// d = new Dependency();
|
||||
// d.setName("voodoo");
|
||||
// d.setType("custom");
|
||||
// ewe.addDependency(d);
|
||||
|
||||
AlgorithmSet algorithms = new AlgorithmSet();
|
||||
algorithms.setName("dummy-set");
|
||||
|
||||
|
||||
// algorithms.addAlgorithm(algorithm);
|
||||
// algorithms.addAlgorithm(ewe);
|
||||
|
||||
Algorithm ensemble = new Algorithm();
|
||||
ensemble.setName("ensemble");
|
||||
ensemble.setCategory("ICHTHYOP_MODEL");
|
||||
ensemble.setAlgorithmType("transducerers");
|
||||
ensemble.setPackageURL("http://data.d4science.org/R0FqV2lNOW1jMkxuUEIrWXY4aUhvSENHSmVMQks4NjdHbWJQNStIS0N6Yz0");
|
||||
ensemble.setClazz("org.gcube.dataanalysis.executor.rscripts.Ichthyopmodelonebyone");
|
||||
ensemble.setSkipJava("N");
|
||||
ensemble.setDescription("test");
|
||||
|
||||
Dependency d = new Dependency();
|
||||
d.setName("libpng3");
|
||||
d.setType("os");
|
||||
ensemble.addDependency(d);
|
||||
algorithms.addAlgorithm(ensemble);
|
||||
|
||||
new DataminerPoolManager().addAlgorithmsToVRE(algorithms, "/gcube/devNext/NextNext", "test"+UUID.randomUUID());
|
||||
|
||||
}
|
||||
|
||||
}
|
|
@ -1,17 +0,0 @@
|
|||
package org.gcube.dataanalysis.dataminerpoolmanager;
|
||||
|
||||
import java.net.ProxySelector;
|
||||
|
||||
import org.gcube.common.scope.api.ScopeProvider;
|
||||
import org.gcube.dataanalysis.dataminer.poolmanager.clients.ISClient;
|
||||
import org.gcube.dataanalysis.dataminer.poolmanager.util.PropertiesBasedProxySelector;
|
||||
|
||||
public class ISClientTest {
|
||||
|
||||
public static void main(String[] args) {
|
||||
ProxySelector.setDefault(new PropertiesBasedProxySelector("/home/ngalante/.proxy-settings"));
|
||||
ScopeProvider.instance.set("/gcube/devNext/NextNext");
|
||||
System.out.println(new ISClient().listDataminersInVRE());
|
||||
}
|
||||
|
||||
}
|
Loading…
Reference in New Issue