This commit is contained in:
Nunzio Andrea Galante 2017-02-20 13:52:25 +00:00
parent d431298f80
commit 098269242f
62 changed files with 773 additions and 291 deletions

34
.classpath Normal file
View File

@ -0,0 +1,34 @@
<?xml version="1.0" encoding="UTF-8"?>
<classpath>
<classpathentry kind="src" output="target/classes" path="src/main/java">
<attributes>
<attribute name="optional" value="true"/>
<attribute name="maven.pomderived" value="true"/>
</attributes>
</classpathentry>
<classpathentry excluding="**" kind="src" output="target/classes" path="src/main/resources">
<attributes>
<attribute name="maven.pomderived" value="true"/>
</attributes>
</classpathentry>
<classpathentry kind="src" output="target/test-classes" path="src/test/java">
<attributes>
<attribute name="optional" value="true"/>
<attribute name="maven.pomderived" value="true"/>
</attributes>
</classpathentry>
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.7">
<attributes>
<attribute name="maven.pomderived" value="true"/>
</attributes>
</classpathentry>
<classpathentry kind="con" path="org.eclipse.m2e.MAVEN2_CLASSPATH_CONTAINER">
<attributes>
<attribute name="maven.pomderived" value="true"/>
</attributes>
</classpathentry>
<classpathentry kind="con" path="org.eclipse.jst.j2ee.internal.web.container"/>
<classpathentry kind="con" path="org.eclipse.jst.j2ee.internal.module.container"/>
<classpathentry kind="lib" path="/home/ngalante/Downloads/servlet-api-2.5.jar"/>
<classpathentry kind="output" path="target/classes"/>
</classpath>

42
.project Normal file
View File

@ -0,0 +1,42 @@
<?xml version="1.0" encoding="UTF-8"?>
<projectDescription>
<name>dataminer-pool-manager</name>
<comment></comment>
<projects>
</projects>
<buildSpec>
<buildCommand>
<name>org.eclipse.wst.jsdt.core.javascriptValidator</name>
<arguments>
</arguments>
</buildCommand>
<buildCommand>
<name>org.eclipse.wst.common.project.facet.core.builder</name>
<arguments>
</arguments>
</buildCommand>
<buildCommand>
<name>org.eclipse.jdt.core.javabuilder</name>
<arguments>
</arguments>
</buildCommand>
<buildCommand>
<name>org.eclipse.wst.validation.validationbuilder</name>
<arguments>
</arguments>
</buildCommand>
<buildCommand>
<name>org.eclipse.m2e.core.maven2Builder</name>
<arguments>
</arguments>
</buildCommand>
</buildSpec>
<natures>
<nature>org.eclipse.jem.workbench.JavaEMFNature</nature>
<nature>org.eclipse.wst.common.modulecore.ModuleCoreNature</nature>
<nature>org.eclipse.jdt.core.javanature</nature>
<nature>org.eclipse.m2e.core.maven2Nature</nature>
<nature>org.eclipse.wst.common.project.facet.core.nature</nature>
<nature>org.eclipse.wst.jsdt.core.jsNature</nature>
</natures>
</projectDescription>

12
.settings/.jsdtscope Normal file
View File

@ -0,0 +1,12 @@
<?xml version="1.0" encoding="UTF-8"?>
<classpath>
<classpathentry excluding="**/*.min.js|**/node_modules/*|**/bower_components/*" kind="src" path="WebContent"/>
<classpathentry kind="con" path="org.eclipse.wst.jsdt.launching.JRE_CONTAINER"/>
<classpathentry kind="con" path="org.eclipse.wst.jsdt.launching.WebProject">
<attributes>
<attribute name="hide" value="true"/>
</attributes>
</classpathentry>
<classpathentry kind="con" path="org.eclipse.wst.jsdt.launching.baseBrowserLibrary"/>
<classpathentry kind="output" path=""/>
</classpath>

View File

@ -0,0 +1,5 @@
eclipse.preferences.version=1
encoding//src/main/java=UTF-8
encoding//src/main/resources=UTF-8
encoding//src/test/java=UTF-8
encoding/<project>=UTF-8

View File

@ -0,0 +1,8 @@
eclipse.preferences.version=1
org.eclipse.jdt.core.compiler.codegen.inlineJsrBytecode=enabled
org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.7
org.eclipse.jdt.core.compiler.compliance=1.7
org.eclipse.jdt.core.compiler.problem.assertIdentifier=error
org.eclipse.jdt.core.compiler.problem.enumIdentifier=error
org.eclipse.jdt.core.compiler.problem.forbiddenReference=warning
org.eclipse.jdt.core.compiler.source=1.7

View File

@ -0,0 +1,4 @@
activeProfiles=
eclipse.preferences.version=1
resolveWorkspaceProjects=true
version=1

View File

@ -0,0 +1,10 @@
<?xml version="1.0" encoding="UTF-8"?><project-modules id="moduleCoreId" project-version="1.5.0">
<wb-module deploy-name="dataminer-pool-manager">
<wb-resource deploy-path="/" source-path="/WebContent" tag="defaultRootSource"/>
<wb-resource deploy-path="/WEB-INF/classes" source-path="/src/main/java"/>
<wb-resource deploy-path="/WEB-INF/classes" source-path="/src/main/resources"/>
<wb-resource deploy-path="/WEB-INF/classes" source-path="/src/test/java"/>
<property name="context-root" value="dataminer-pool-manager"/>
<property name="java-output-path" value="/dataminer-pool-manager/target/classes"/>
</wb-module>
</project-modules>

View File

@ -0,0 +1,7 @@
<root>
<facet id="jst.jaxrs">
<node name="libprov">
<attribute name="provider-id" value="jaxrs-no-op-library-provider"/>
</node>
</facet>
</root>

View File

@ -0,0 +1,7 @@
<?xml version="1.0" encoding="UTF-8"?>
<faceted-project>
<installed facet="java" version="1.7"/>
<installed facet="jst.web" version="3.0"/>
<installed facet="wst.jsdt.web" version="1.0"/>
<installed facet="jst.jaxrs" version="2.0"/>
</faceted-project>

View File

@ -0,0 +1 @@
org.eclipse.wst.jsdt.launching.baseBrowserLibrary

View File

@ -0,0 +1 @@
Window

View File

@ -0,0 +1,2 @@
disabled=06target
eclipse.preferences.version=1

View File

@ -0,0 +1,4 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!DOCTYPE log SYSTEM "logger.dtd">
<log>
</log>

View File

@ -0,0 +1,3 @@
Manifest-Version: 1.0
Class-Path:

View File

@ -28,7 +28,7 @@
<files>
<file>
<source>target/${build.finalName}.jar</source>
<source>target/${build.finalName}.war</source>
<outputDirectory>/${artifactId}</outputDirectory>
</file>
</files>

View File

@ -0,0 +1,4 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!DOCTYPE log SYSTEM "logger.dtd">
<log>
</log>

0
log Normal file
View File

32
pom.xml
View File

@ -12,7 +12,7 @@
<groupId>org.gcube.dataanalysis</groupId>
<artifactId>dataminer-pool-manager</artifactId>
<version>1.0.0-SNAPSHOT</version>
<packaging>jar</packaging>
<packaging>war</packaging>
<name>dataminer-pool-manager</name>
<description>
@ -44,6 +44,36 @@
<artifactId>ic-client</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.yaml</groupId>
<artifactId>snakeyaml</artifactId>
<version>1.16</version>
</dependency>
<dependency>
<groupId>org.glassfish.jersey.media</groupId>
<artifactId>jersey-media-json-jackson</artifactId>
<version>2.23.1</version>
</dependency>
<dependency>
<groupId>org.glassfish.jersey.core</groupId>
<artifactId>jersey-client</artifactId>
<version>2.22.1</version>
</dependency>
<dependency>
<groupId>org.glassfish.jersey.containers</groupId>
<artifactId>jersey-container-servlet</artifactId>
<version>2.22.1</version>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
<scope>provided</scope>
</dependency>
</dependencies>
<dependencyManagement>

View File

@ -1,14 +1,12 @@
package org.gcube.dataanalysys.dataminerpoolmanager.ansible;
package org.gcube.dataanalysis.dataminer.poolmanager.ansible;
import java.io.File;
import java.io.IOException;
import java.util.UUID;
import org.gcube.dataanalysys.dataminerpoolmanager.ansible.model.Inventory;
import org.gcube.dataanalysys.dataminerpoolmanager.ansible.model.Playbook;
import org.gcube.dataanalysys.dataminerpoolmanager.ansible.model.Role;
import org.gcube.dataanalysys.dataminerpoolmanager.ansible.model.RoleFile;
import org.gcube.dataanalysys.dataminerpoolmanager.ansiblebridge.AnsibleSerializeHelper;
import org.gcube.dataanalysis.dataminer.poolmanager.ansible.model.Inventory;
import org.gcube.dataanalysis.dataminer.poolmanager.ansible.model.Playbook;
import org.gcube.dataanalysis.dataminer.poolmanager.ansible.model.Role;
import org.gcube.dataanalysis.dataminer.poolmanager.ansiblebridge.AnsibleSerializeHelper;
/**
* This class is responsible for the interface with ansible, retrieving log,
@ -45,11 +43,11 @@ public class AnsibleWorker {
this.ensureWorkStructure();
}
private File getWorkdir() {
public File getWorkdir() {
return this.workerRoot;
}
private File getRolesDir() {
public File getRolesDir() {
return new File(this.getWorkdir(), ROLES_DIR);
}
@ -57,12 +55,12 @@ public class AnsibleWorker {
return this.workerRoot.getName();
}
private void ensureWorkStructure() {
public void ensureWorkStructure() {
// generate root
this.getWorkdir().mkdirs();
}
private void removeWorkStructure() {
public void removeWorkStructure() {
// remove the working dir
// this.getWorkdir().delete();
}
@ -91,6 +89,8 @@ public class AnsibleWorker {
AnsibleSerializeHelper.serializeRole(r, this.getRolesDir());
}
public void apply() {
// TODO execute the playbook and return output
System.out.println("TODO: execute: ansible-playbook -v -i " + this.getInventoryFile().getName() + " " + this.getPlaybookFile().getName());

View File

@ -1,4 +1,4 @@
package org.gcube.dataanalysys.dataminerpoolmanager.ansible.model;
package org.gcube.dataanalysis.dataminer.poolmanager.ansible.model;
public class AnsibleHost {

View File

@ -1,4 +1,4 @@
package org.gcube.dataanalysys.dataminerpoolmanager.ansible.model;
package org.gcube.dataanalysis.dataminer.poolmanager.ansible.model;
import java.util.Collection;
import java.util.Vector;

View File

@ -1,4 +1,4 @@
package org.gcube.dataanalysys.dataminerpoolmanager.ansible.model;
package org.gcube.dataanalysis.dataminer.poolmanager.ansible.model;
import java.util.Collection;
import java.util.Vector;

View File

@ -1,4 +1,4 @@
package org.gcube.dataanalysys.dataminerpoolmanager.ansible.model;
package org.gcube.dataanalysis.dataminer.poolmanager.ansible.model;
import java.util.List;
import java.util.Vector;

View File

@ -1,4 +1,4 @@
package org.gcube.dataanalysys.dataminerpoolmanager.ansible.model;
package org.gcube.dataanalysis.dataminer.poolmanager.ansible.model;
import java.util.Collection;
import java.util.Vector;

View File

@ -1,4 +1,4 @@
package org.gcube.dataanalysys.dataminerpoolmanager.ansible.model;
package org.gcube.dataanalysis.dataminer.poolmanager.ansible.model;
public class RoleFile {

View File

@ -1,7 +1,9 @@
package org.gcube.dataanalysys.dataminerpoolmanager.ansiblebridge;
package org.gcube.dataanalysis.dataminer.poolmanager.ansiblebridge;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.PrintStream;
import java.util.Collection;
import java.util.List;
import java.util.Map;
@ -11,30 +13,29 @@ import java.util.TreeSet;
import java.util.UUID;
import java.util.Vector;
import org.gcube.dataanalysys.dataminerpoolmanager.ansible.AnsibleWorker;
import org.gcube.dataanalysys.dataminerpoolmanager.ansible.model.AnsibleHost;
import org.gcube.dataanalysys.dataminerpoolmanager.ansible.model.Inventory;
import org.gcube.dataanalysys.dataminerpoolmanager.ansible.model.Playbook;
import org.gcube.dataanalysys.dataminerpoolmanager.ansible.model.Role;
import org.gcube.dataanalysys.dataminerpoolmanager.ansible.model.RoleFile;
import org.gcube.dataanalysys.dataminerpoolmanager.ansiblebridge.template.AlgorithmPackage;
import org.gcube.dataanalysys.dataminerpoolmanager.ansiblebridge.template.CranDependencyPackage;
import org.gcube.dataanalysys.dataminerpoolmanager.ansiblebridge.template.CustomDependencyPackage;
import org.gcube.dataanalysys.dataminerpoolmanager.ansiblebridge.template.CustomRoleManager;
import org.gcube.dataanalysys.dataminerpoolmanager.ansiblebridge.template.DependencyPackage;
import org.gcube.dataanalysys.dataminerpoolmanager.ansiblebridge.template.OSDependencyPackage;
import org.gcube.dataanalysys.dataminerpoolmanager.ansiblebridge.template.StaticRoleManager;
import org.gcube.dataanalysys.dataminerpoolmanager.ansiblebridge.template.TemplateManager;
import org.gcube.dataanalysys.dataminerpoolmanager.datamodel.Algorithm;
import org.gcube.dataanalysys.dataminerpoolmanager.datamodel.AlgorithmSet;
import org.gcube.dataanalysys.dataminerpoolmanager.datamodel.Cluster;
import org.gcube.dataanalysys.dataminerpoolmanager.datamodel.Dependency;
import org.gcube.dataanalysys.dataminerpoolmanager.datamodel.Host;
import org.gcube.dataanalysys.dataminerpoolmanager.datamodel.comparator.AlgorithmComparator;
import org.gcube.dataanalysys.dataminerpoolmanager.datamodel.comparator.DependencyComparator;
import org.gcube.dataanalysys.dataminerpoolmanager.datamodel.comparator.HostComparator;
import org.gcube.dataanalysis.dataminer.poolmanager.ansible.AnsibleWorker;
import org.gcube.dataanalysis.dataminer.poolmanager.ansible.model.AnsibleHost;
import org.gcube.dataanalysis.dataminer.poolmanager.ansible.model.Inventory;
import org.gcube.dataanalysis.dataminer.poolmanager.ansible.model.Playbook;
import org.gcube.dataanalysis.dataminer.poolmanager.ansible.model.Role;
import org.gcube.dataanalysis.dataminer.poolmanager.ansiblebridge.template.AlgorithmPackage;
import org.gcube.dataanalysis.dataminer.poolmanager.ansiblebridge.template.CranDependencyPackage;
import org.gcube.dataanalysis.dataminer.poolmanager.ansiblebridge.template.CustomDependencyPackage;
import org.gcube.dataanalysis.dataminer.poolmanager.ansiblebridge.template.CustomRoleManager;
import org.gcube.dataanalysis.dataminer.poolmanager.ansiblebridge.template.OSDependencyPackage;
import org.gcube.dataanalysis.dataminer.poolmanager.ansiblebridge.template.StaticRoleManager;
import org.gcube.dataanalysis.dataminer.poolmanager.ansiblebridge.template.TemplateManager;
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Algorithm;
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.AlgorithmSet;
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Cluster;
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Dependency;
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Host;
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.comparator.HostComparator;
import org.slf4j.LoggerFactory;
public class AnsibleBridge {
private static final org.slf4j.Logger LOGGER = LoggerFactory.getLogger(AnsibleBridge.class);
/**
* The workdir for this service
@ -42,7 +43,7 @@ public class AnsibleBridge {
private String dpmRoot;
public AnsibleBridge() {
this("/home/paolo/tmp/dataminer-pool-manager");
this("/tmp/dataminer-pool-manager");
}
public AnsibleBridge(String root) {
@ -142,8 +143,15 @@ public class AnsibleBridge {
}
}
public void applyAlgorithmSetToCluster(AlgorithmSet as, Cluster cluster) throws IOException {
AnsibleWorker worker = new AnsibleWorker(new File(this.getWorkDir(), UUID.randomUUID().toString()));
public AnsibleWorker applyAlgorithmSetToCluster(AlgorithmSet as, Cluster cluster) throws IOException {
return applyAlgorithmSetToCluster (as,cluster,UUID.randomUUID().toString());
}
public AnsibleWorker applyAlgorithmSetToCluster(AlgorithmSet as, Cluster cluster,String uuid) throws IOException {
AnsibleWorker worker = new AnsibleWorker(new File(this.getWorkDir(), uuid));
List<Role> algoRoles = new Vector<>();
@ -186,14 +194,26 @@ public class AnsibleBridge {
worker.setPlaybook(playbook);
// execute
worker.apply();
// execute and save log locally
PrintStream console = System.out;
File path = new File(worker.getWorkdir() + File.separator + "logs");
path.mkdirs();
File n = new File(path + File.separator + worker.getWorkerId());
FileOutputStream fos = new FileOutputStream(n);
PrintStream ps = new PrintStream(fos);
System.setOut(ps);
worker.apply();
System.setOut(console);
worker.apply();
System.out.println("Log stored to to " + n.getAbsolutePath());
// destroy the worker
worker.destroy();
return worker;
}
private TemplateManager getTemplateManager() {
return new TemplateManager(this.dpmRoot+"/templates");
}

View File

@ -1,4 +1,4 @@
package org.gcube.dataanalysys.dataminerpoolmanager.ansiblebridge;
package org.gcube.dataanalysis.dataminer.poolmanager.ansiblebridge;
import java.io.File;
import java.io.FileInputStream;
@ -7,12 +7,12 @@ import java.io.IOException;
import java.io.PrintWriter;
import org.apache.commons.io.IOUtils;
import org.gcube.dataanalysys.dataminerpoolmanager.ansible.model.AnsibleHost;
import org.gcube.dataanalysys.dataminerpoolmanager.ansible.model.HostGroup;
import org.gcube.dataanalysys.dataminerpoolmanager.ansible.model.Inventory;
import org.gcube.dataanalysys.dataminerpoolmanager.ansible.model.Playbook;
import org.gcube.dataanalysys.dataminerpoolmanager.ansible.model.Role;
import org.gcube.dataanalysys.dataminerpoolmanager.ansible.model.RoleFile;
import org.gcube.dataanalysis.dataminer.poolmanager.ansible.model.AnsibleHost;
import org.gcube.dataanalysis.dataminer.poolmanager.ansible.model.HostGroup;
import org.gcube.dataanalysis.dataminer.poolmanager.ansible.model.Inventory;
import org.gcube.dataanalysis.dataminer.poolmanager.ansible.model.Playbook;
import org.gcube.dataanalysis.dataminer.poolmanager.ansible.model.Role;
import org.gcube.dataanalysis.dataminer.poolmanager.ansible.model.RoleFile;
public class AnsibleSerializeHelper {

View File

@ -1,4 +1,4 @@
package org.gcube.dataanalysys.dataminerpoolmanager.ansiblebridge.template;
package org.gcube.dataanalysis.dataminer.poolmanager.ansiblebridge.template;
import java.util.Collection;
import java.util.HashMap;
@ -6,10 +6,9 @@ import java.util.Map;
import java.util.NoSuchElementException;
import java.util.Vector;
import org.gcube.dataanalysys.dataminerpoolmanager.ansible.model.Role;
import org.gcube.dataanalysys.dataminerpoolmanager.ansible.model.RoleFile;
import org.gcube.dataanalysys.dataminerpoolmanager.datamodel.Algorithm;
import org.gcube.dataanalysys.dataminerpoolmanager.datamodel.Dependency;
import org.gcube.dataanalysis.dataminer.poolmanager.ansible.model.Role;
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Algorithm;
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Dependency;
public class AlgorithmPackage {

View File

@ -0,0 +1,11 @@
package org.gcube.dataanalysis.dataminer.poolmanager.ansiblebridge.template;
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Dependency;
public class CranDependencyPackage extends DependencyPackage {
public CranDependencyPackage(Dependency d) {
super(d);
}
}

View File

@ -1,11 +1,11 @@
package org.gcube.dataanalysys.dataminerpoolmanager.ansiblebridge.template;
package org.gcube.dataanalysis.dataminer.poolmanager.ansiblebridge.template;
import java.util.Collection;
import java.util.NoSuchElementException;
import java.util.Vector;
import org.gcube.dataanalysys.dataminerpoolmanager.ansible.model.Role;
import org.gcube.dataanalysys.dataminerpoolmanager.datamodel.Dependency;
import org.gcube.dataanalysis.dataminer.poolmanager.ansible.model.Role;
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Dependency;
public class CustomDependencyPackage extends DependencyPackage {

View File

@ -1,16 +1,11 @@
package org.gcube.dataanalysys.dataminerpoolmanager.ansiblebridge.template;
package org.gcube.dataanalysis.dataminer.poolmanager.ansiblebridge.template;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.util.Collection;
import java.util.NoSuchElementException;
import java.util.Vector;
import org.apache.commons.io.IOUtils;
import org.gcube.dataanalysys.dataminerpoolmanager.ansible.model.Role;
import org.gcube.dataanalysys.dataminerpoolmanager.ansible.model.RoleFile;
import org.gcube.dataanalysys.dataminerpoolmanager.ansiblebridge.AnsibleSerializeHelper;
import org.gcube.dataanalysis.dataminer.poolmanager.ansible.model.Role;
import org.gcube.dataanalysis.dataminer.poolmanager.ansiblebridge.AnsibleSerializeHelper;
public class CustomRoleManager {

View File

@ -1,4 +1,4 @@
package org.gcube.dataanalysys.dataminerpoolmanager.ansiblebridge.template;
package org.gcube.dataanalysis.dataminer.poolmanager.ansiblebridge.template;
import java.util.Collection;
import java.util.HashMap;
@ -6,9 +6,8 @@ import java.util.Map;
import java.util.NoSuchElementException;
import java.util.Vector;
import org.gcube.dataanalysys.dataminerpoolmanager.ansible.model.Role;
import org.gcube.dataanalysys.dataminerpoolmanager.ansible.model.RoleFile;
import org.gcube.dataanalysys.dataminerpoolmanager.datamodel.Dependency;
import org.gcube.dataanalysis.dataminer.poolmanager.ansible.model.Role;
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Dependency;
public class DependencyPackage {

View File

@ -0,0 +1,11 @@
package org.gcube.dataanalysis.dataminer.poolmanager.ansiblebridge.template;
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Dependency;
public class OSDependencyPackage extends DependencyPackage {
public OSDependencyPackage(Dependency d) {
super(d);
}
}

View File

@ -1,12 +1,12 @@
package org.gcube.dataanalysys.dataminerpoolmanager.ansiblebridge.template;
package org.gcube.dataanalysis.dataminer.poolmanager.ansiblebridge.template;
import java.io.File;
import java.io.IOException;
import java.util.Collection;
import java.util.Vector;
import org.gcube.dataanalysys.dataminerpoolmanager.ansible.model.Role;
import org.gcube.dataanalysys.dataminerpoolmanager.ansiblebridge.AnsibleSerializeHelper;
import org.gcube.dataanalysis.dataminer.poolmanager.ansible.model.Role;
import org.gcube.dataanalysis.dataminer.poolmanager.ansiblebridge.AnsibleSerializeHelper;
public class StaticRoleManager {

View File

@ -1,17 +1,13 @@
package org.gcube.dataanalysys.dataminerpoolmanager.ansiblebridge.template;
package org.gcube.dataanalysis.dataminer.poolmanager.ansiblebridge.template;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import java.util.NoSuchElementException;
import org.apache.commons.io.IOUtils;
import org.gcube.dataanalysys.dataminerpoolmanager.ansible.model.Role;
import org.gcube.dataanalysys.dataminerpoolmanager.ansible.model.RoleFile;
import org.gcube.dataanalysys.dataminerpoolmanager.ansiblebridge.AnsibleSerializeHelper;
import org.gcube.dataanalysys.dataminerpoolmanager.datamodel.Dependency;
import org.gcube.dataanalysis.dataminer.poolmanager.ansible.model.Role;
import org.gcube.dataanalysis.dataminer.poolmanager.ansible.model.RoleFile;
import org.gcube.dataanalysis.dataminer.poolmanager.ansiblebridge.AnsibleSerializeHelper;
import org.stringtemplate.v4.ST;
public class TemplateManager {

View File

@ -1,4 +1,4 @@
package org.gcube.dataanalysys.dataminerpoolmanager.clients;
package org.gcube.dataanalysis.dataminer.poolmanager.clients;
import static org.gcube.resources.discovery.icclient.ICFactory.clientFor;
import static org.gcube.resources.discovery.icclient.ICFactory.queryFor;
@ -8,9 +8,7 @@ import java.util.List;
import java.util.Vector;
import org.gcube.common.resources.gcore.ServiceEndpoint;
import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.dataanalysys.dataminerpoolmanager.datamodel.Cluster;
import org.gcube.dataanalysys.dataminerpoolmanager.datamodel.Host;
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Host;
import org.gcube.resources.discovery.client.api.DiscoveryClient;
import org.gcube.resources.discovery.client.queries.api.SimpleQuery;

View File

@ -1,4 +1,4 @@
package org.gcube.dataanalysys.dataminerpoolmanager.datamodel;
package org.gcube.dataanalysis.dataminer.poolmanager.datamodel;
public class Action {

View File

@ -1,4 +1,4 @@
package org.gcube.dataanalysys.dataminerpoolmanager.datamodel;
package org.gcube.dataanalysis.dataminer.poolmanager.datamodel;
import java.util.Collection;
import java.util.Vector;

View File

@ -1,4 +1,4 @@
package org.gcube.dataanalysys.dataminerpoolmanager.datamodel;
package org.gcube.dataanalysis.dataminer.poolmanager.datamodel;
import java.util.Collection;
import java.util.Vector;

View File

@ -1,4 +1,4 @@
package org.gcube.dataanalysys.dataminerpoolmanager.datamodel;
package org.gcube.dataanalysis.dataminer.poolmanager.datamodel;
import java.util.Collection;
import java.util.Vector;

View File

@ -1,4 +1,4 @@
package org.gcube.dataanalysys.dataminerpoolmanager.datamodel;
package org.gcube.dataanalysis.dataminer.poolmanager.datamodel;
public class Dependency {

View File

@ -1,4 +1,4 @@
package org.gcube.dataanalysys.dataminerpoolmanager.datamodel;
package org.gcube.dataanalysis.dataminer.poolmanager.datamodel;
public class Domain {

View File

@ -1,4 +1,4 @@
package org.gcube.dataanalysys.dataminerpoolmanager.datamodel;
package org.gcube.dataanalysis.dataminer.poolmanager.datamodel;
public class Host {

View File

@ -1,8 +1,8 @@
package org.gcube.dataanalysys.dataminerpoolmanager.datamodel.comparator;
package org.gcube.dataanalysis.dataminer.poolmanager.datamodel.comparator;
import java.util.Comparator;
import org.gcube.dataanalysys.dataminerpoolmanager.datamodel.Algorithm;
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Algorithm;
public class AlgorithmComparator implements Comparator<Algorithm> {

View File

@ -1,9 +1,8 @@
package org.gcube.dataanalysys.dataminerpoolmanager.datamodel.comparator;
package org.gcube.dataanalysis.dataminer.poolmanager.datamodel.comparator;
import java.util.Comparator;
import org.gcube.dataanalysys.dataminerpoolmanager.datamodel.Algorithm;
import org.gcube.dataanalysys.dataminerpoolmanager.datamodel.Dependency;
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Dependency;
public class DependencyComparator implements Comparator<Dependency> {

View File

@ -1,8 +1,8 @@
package org.gcube.dataanalysys.dataminerpoolmanager.datamodel.comparator;
package org.gcube.dataanalysis.dataminer.poolmanager.datamodel.comparator;
import java.util.Comparator;
import org.gcube.dataanalysys.dataminerpoolmanager.datamodel.Host;
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Host;
public class HostComparator implements Comparator<Host> {

View File

@ -1,4 +1,4 @@
package org.gcube.dataanalysys.dataminerpoolmanager.process;
package org.gcube.dataanalysis.dataminer.poolmanager.process;
import java.util.StringTokenizer;

View File

@ -1,4 +1,4 @@
package org.gcube.dataanalysys.dataminerpoolmanager.process;
package org.gcube.dataanalysis.dataminer.poolmanager.process;
import java.io.IOException;
import java.io.InputStream;
@ -10,8 +10,8 @@ import java.util.Vector;
import java.util.zip.ZipEntry;
import java.util.zip.ZipInputStream;
import org.gcube.dataanalysys.dataminerpoolmanager.datamodel.Algorithm;
import org.gcube.dataanalysys.dataminerpoolmanager.datamodel.Dependency;
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Algorithm;
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Dependency;
public class AlgorithmPackageParser {

View File

@ -0,0 +1,24 @@
package org.gcube.dataanalysis.dataminer.poolmanager.rest;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.UnknownHostException;
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Algorithm;
public interface PoolManager {
String addAlgorithmToVRE(Algorithm algo, String vre) throws IOException, InterruptedException;
Algorithm extractAlgorithm(String url) throws IOException;
String getLogById(String logId) throws IOException;
void getLogId(Algorithm algo, String vre);
String getScriptFromURL(URL logId) throws IOException;
URL getURLfromWorkerLog(String logUrl) throws MalformedURLException, UnknownHostException;
}

View File

@ -0,0 +1,101 @@
package org.gcube.dataanalysis.dataminer.poolmanager.rest;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.UnknownHostException;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Algorithm;
import org.gcube.dataanalysis.dataminer.poolmanager.service.DataminerPoolManager;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@Path("/")
public class RestPoolManager implements PoolManager {
private static final Logger LOGGER = LoggerFactory.getLogger(RestPoolManager.class);
private PoolManager service = new DataminerPoolManager();
@GET
@Path("/add")
@Produces("text/plain")
public String addAlgorithmToVRE(@QueryParam("algorithm") String algorithm, @QueryParam("vre") String vre) throws IOException, InterruptedException {
// TODO Auto-generated method stub
LOGGER.debug("Adding algorithm =" + algorithm + " to VRE =" + vre);
Algorithm algo = service.extractAlgorithm(algorithm);
return service.addAlgorithmToVRE(algo, vre);
}
@GET
@Path("/log")
@Produces("text/plain")
public String getLogById(@QueryParam("logUrl") String logUrl) throws IOException {
// TODO Auto-generated method stub
LOGGER.debug("Returning Log =" + logUrl);
return service.getScriptFromURL(service.getURLfromWorkerLog(logUrl));
}
@Override
public Algorithm extractAlgorithm(String url) throws IOException {
// TODO Auto-generated method stub
return null;
}
public static void main(String[] args) throws IOException, InterruptedException {
RestPoolManager a = new RestPoolManager();
a.addAlgorithmToVRE("http://data.d4science.org/R0FqV2lNOW1jMkxuUEIrWXY4aUhvSENHSmVMQks4NjdHbWJQNStIS0N6Yz0", "/gcube/devNext/NextNext");
//System.out.println(a.getLogById("dadcb059-69e5-48c3-aa58-3b290ae0419d"));
}
@Override
public void getLogId(Algorithm algo, String vre) {
// TODO Auto-generated method stub
}
@Override
public String getScriptFromURL(URL logId) throws IOException {
// TODO Auto-generated method stub
return null;
}
@Override
public String addAlgorithmToVRE(Algorithm algo, String vre) throws IOException, InterruptedException {
// TODO Auto-generated method stub
return null;
}
@Override
public URL getURLfromWorkerLog(String logUrl) throws MalformedURLException, UnknownHostException {
// TODO Auto-generated method stub
return null;
}
}

View File

@ -0,0 +1,265 @@
package org.gcube.dataanalysis.dataminer.poolmanager.service;
import java.io.BufferedReader;
import java.io.File;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLConnection;
import java.net.UnknownHostException;
import java.util.UUID;
import org.gcube.dataanalysis.dataminer.poolmanager.ansiblebridge.AnsibleBridge;
import org.gcube.dataanalysis.dataminer.poolmanager.clients.ISClient;
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Algorithm;
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.AlgorithmSet;
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Cluster;
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Host;
import org.gcube.dataanalysis.dataminer.poolmanager.process.AlgorithmPackageParser;
import org.gcube.dataanalysis.dataminer.poolmanager.rest.PoolManager;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class DataminerPoolManager implements PoolManager {
private static final Logger LOGGER = LoggerFactory.getLogger(DataminerPoolManager.class);
// static Collection<Algorithm> algorithms;
//
// static Collection<AlgorithmSet> sets;
//
// static {
// algorithms = new Vector<>();
// }
//
// public DataminerPoolManager() {
// }
//
// /**
// * Add a new algorithm to the set of known ones. No further action is expected
// * on the pool.
// */
// public void publishAlgorithm(Algorithm algorithm) {
// algorithms.add(algorithm);
// }
//
// /**
// * Re-deploy the given algorithm wherever it's installed
// *
// * @param algorithm
// */
// /*
// * public void updateAlgorithm(Algorithm algorithm) { // TODO implement this }
// */
//
// /**
// * Add the give algorithm to the given set
// *
// * @param algorithmId
// * @param setId
// */
// public void addAlgorithmToSet(String algorithmName, String setName) {
// AlgorithmSet set = this.getAlgorithmSet(setName);
// Algorithm algorithm = this.getAlgorithm(algorithmName);
// if (set != null && algorithm != null) {
// set.addAlgorithm(algorithm);
// this.updateClusters();
// }
// }
//
// /**
// * Apply the given set of algorithms to the given cluster
// *
// * @param setId
// * @param clusterId
// */
// public void applyAlgorithmSetToCluster(String setName, String clusterName) {
// AlgorithmSet set = this.getAlgorithmSet(setName);
// Cluster cluster = new ISClient().getCluster(clusterName);
// if (set != null && cluster != null) {
// cluster.addAlgorithmSet(set);
// this.updateClusters();
// }
// }
//
// private AlgorithmSet getAlgorithmSet(String name) {
// for (AlgorithmSet set : sets) {
// if (name.equals(set.getName())) {
// return set;
// }
// }
// return null;
// }
//
// private Algorithm getAlgorithm(String name) {
// for (Algorithm a : algorithms) {
// if (name.equals(a.getName())) {
// return a;
// }
// }
// return null;
// }
//
// public void getLogId(final Algorithm algorithm, final String vre) {
// new Thread() {
// public void run() {
// while (true) {
// try {
// addAlgorithmToVRE(algorithm, vre);
// } catch (Exception e) {
// //log here
// }
// }
// }
// }.start();
// }
//
//
// public String getLogId(){
// PrintStream console = System.out;
// File path = new File(worker.getWorkdir() + File.separator + "logs");
// path.mkdirs();
// File n = new File(path + File.separator + worker.getWorkerId());
// FileOutputStream fos = new FileOutputStream(n);
// PrintStream ps = new PrintStream(fos);
// System.setOut(ps);
// worker.apply();
// System.setOut(console);
// worker.apply();
// System.out.println("Log stored to to " + n.getAbsolutePath());
// }
// public String getLogById(String id) throws IOException {
// String strLine = null;
// try{
// FileInputStream fstream = new FileInputStream("/tmp/dataminer-pool-manager/work/"+id+"/logs/"+id);
// BufferedReader br = new BufferedReader(new InputStreamReader(fstream));
// /* read log line by line */
// while ((strLine = br.readLine()) != null) {
// /* parse strLine to obtain what you want */
// System.out.println (strLine);
// }
// br.close();
// } catch (Exception e) {
// System.err.println("Error: " + e.getMessage());
// }
// return strLine;
// }
public String getScriptFromURL(URL url) throws IOException {
if (url == null) {
return null;
}
URLConnection yc = url.openConnection();
BufferedReader input = new BufferedReader(new InputStreamReader(
yc.getInputStream()));
String line;
StringBuffer buffer = new StringBuffer();
while ((line = input.readLine()) != null) {
buffer.append(line + "\n");
}
String bufferScript = buffer.substring(0, buffer.length());
input.close();
return bufferScript;
}
/**
* Publish the given algorithm in the given VRE
*
* @param algorithmName
* @param vre
*
*/
public String addAlgorithmToVRE(Algorithm algorithm, final String vre) throws IOException {
// create a fake algorithm set
final AlgorithmSet algoSet = new AlgorithmSet();
algoSet.setName("fake");
algoSet.addAlgorithm(algorithm);
final String uuid = UUID.randomUUID().toString();
Runnable r = new Runnable() {
@Override
public void run() {
// TODO Auto-generated method stub
try {
addAlgorithmsToVRE(algoSet, vre, uuid);
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
};
new Thread(r).start();
//this line will execute immediately, not waiting for your task to complete
System.out.println(uuid);
return uuid;
}
public URL getURLfromWorkerLog(String a) throws MalformedURLException, UnknownHostException{
File path = new File("/tmp/dataminer-pool-manager/work/" + File.separator + a+File.separator +"logs");
path.mkdirs();
File n = new File(path + File.separator +a);
//String addr = InetAddress.getLocalHost().getHostAddress();
return new File(n.getPath()).toURI().toURL();
}
public String addAlgorithmsToVRE(AlgorithmSet algorithms, String vre, String uuid) throws IOException {
// create the cluster (dataminers in the vre)
Cluster cluster = new Cluster();
for(Host h:new ISClient().listDataminersInVRE()) {
cluster.addHost(h);
}
// apply the changes
AnsibleBridge a = new AnsibleBridge();
return a.applyAlgorithmSetToCluster(algorithms, cluster,uuid).getWorkerId();
}
public Algorithm extractAlgorithm(String url) throws IOException {
return new AlgorithmPackageParser().parsePackage(url);
}
@Override
public void getLogId(Algorithm algo, String vre) {
// TODO Auto-generated method stub
}
@Override
public String getLogById(String logId) throws IOException {
// TODO Auto-generated method stub
return null;
}
}

View File

@ -1,4 +1,4 @@
package org.gcube.dataanalysys.dataminerpoolmanager.util;
package org.gcube.dataanalysis.dataminer.poolmanager.util;
import java.io.IOException;
import java.net.Authenticator;

View File

@ -1,11 +0,0 @@
package org.gcube.dataanalysys.dataminerpoolmanager.ansiblebridge.template;
import org.gcube.dataanalysys.dataminerpoolmanager.datamodel.Dependency;
public class CranDependencyPackage extends DependencyPackage {
public CranDependencyPackage(Dependency d) {
super(d);
}
}

View File

@ -1,11 +0,0 @@
package org.gcube.dataanalysys.dataminerpoolmanager.ansiblebridge.template;
import org.gcube.dataanalysys.dataminerpoolmanager.datamodel.Dependency;
public class OSDependencyPackage extends DependencyPackage {
public OSDependencyPackage(Dependency d) {
super(d);
}
}

View File

@ -1,122 +0,0 @@
package org.gcube.dataanalysys.dataminerpoolmanager.service;
import java.io.IOException;
import java.util.Collection;
import java.util.Vector;
import org.gcube.dataanalysys.dataminerpoolmanager.ansiblebridge.AnsibleBridge;
import org.gcube.dataanalysys.dataminerpoolmanager.clients.ISClient;
import org.gcube.dataanalysys.dataminerpoolmanager.datamodel.Algorithm;
import org.gcube.dataanalysys.dataminerpoolmanager.datamodel.AlgorithmSet;
import org.gcube.dataanalysys.dataminerpoolmanager.datamodel.Cluster;
import org.gcube.dataanalysys.dataminerpoolmanager.datamodel.Dependency;
import org.gcube.dataanalysys.dataminerpoolmanager.datamodel.Host;
public class DataminerPoolManager {
// static Collection<Algorithm> algorithms;
//
// static Collection<AlgorithmSet> sets;
//
// static {
// algorithms = new Vector<>();
// }
//
// public DataminerPoolManager() {
// }
//
// /**
// * Add a new algorithm to the set of known ones. No further action is expected
// * on the pool.
// */
// public void publishAlgorithm(Algorithm algorithm) {
// algorithms.add(algorithm);
// }
//
// /**
// * Re-deploy the given algorithm wherever it's installed
// *
// * @param algorithm
// */
// /*
// * public void updateAlgorithm(Algorithm algorithm) { // TODO implement this }
// */
//
// /**
// * Add the give algorithm to the given set
// *
// * @param algorithmId
// * @param setId
// */
// public void addAlgorithmToSet(String algorithmName, String setName) {
// AlgorithmSet set = this.getAlgorithmSet(setName);
// Algorithm algorithm = this.getAlgorithm(algorithmName);
// if (set != null && algorithm != null) {
// set.addAlgorithm(algorithm);
// this.updateClusters();
// }
// }
//
// /**
// * Apply the given set of algorithms to the given cluster
// *
// * @param setId
// * @param clusterId
// */
// public void applyAlgorithmSetToCluster(String setName, String clusterName) {
// AlgorithmSet set = this.getAlgorithmSet(setName);
// Cluster cluster = new ISClient().getCluster(clusterName);
// if (set != null && cluster != null) {
// cluster.addAlgorithmSet(set);
// this.updateClusters();
// }
// }
//
// private AlgorithmSet getAlgorithmSet(String name) {
// for (AlgorithmSet set : sets) {
// if (name.equals(set.getName())) {
// return set;
// }
// }
// return null;
// }
//
// private Algorithm getAlgorithm(String name) {
// for (Algorithm a : algorithms) {
// if (name.equals(a.getName())) {
// return a;
// }
// }
// return null;
// }
/**
* Publish the given algorithm in the given VRE
*
* @param algorithmName
* @param vre
*/
public void addAlgorithmToVRE(Algorithm algorithm, String vre) throws IOException {
// create a fake algorithm set
AlgorithmSet algoSet = new AlgorithmSet();
algoSet.setName("fake");
algoSet.addAlgorithm(algorithm);
this.addAlgorithmsToVRE(algoSet, vre);
}
public void addAlgorithmsToVRE(AlgorithmSet algorithms, String vre) throws IOException {
// create the cluster (dataminers in the vre)
Cluster cluster = new Cluster();
for(Host h:new ISClient().listDataminersInVRE()) {
cluster.addHost(h);
}
// apply the changes
new AnsibleBridge().applyAlgorithmSetToCluster(algorithms, cluster);
}
}

View File

@ -0,0 +1,13 @@
<application mode="online">
<name>dataminer-pool-manager</name>
<group>dataanalysis</group>
<version>0.0.1</version>
<!--<description>Lorem ipsum dolor sit amet...</description>-->
<!--<persistence location="/some/custom/location</persistence" />-->
<!-- <exclude>*</exclude> -->
</application>

View File

@ -0,0 +1,11 @@
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
<handlers>
<lifecycle>
<profile-management />
<!-- <plugin-registration-handler /> -->
</lifecycle>
<request>
<request-validation />
<request-accounting />
</request>
</handlers>

View File

@ -0,0 +1,24 @@
<!DOCTYPE web-app PUBLIC
"-//Sun Microsystems, Inc.//DTD Web Application 2.3//EN"
"http://java.sun.com/dtd/web-app_2_3.dtd" >
<web-app>
<display-name>Archetype Created Web Application</display-name>
<servlet>
<servlet-name>REST-API</servlet-name>
<servlet-class>org.glassfish.jersey.servlet.ServletContainer</servlet-class>
<init-param>
<param-name>jersey.config.server.provider.packages</param-name>
<param-value>org.gcube.dataanalysis.dataminer.poolmanager.rest</param-value>
</init-param>
<load-on-startup>1</load-on-startup>
</servlet>
<servlet-mapping>
<servlet-name>REST-API</servlet-name>
<url-pattern>/rest/*</url-pattern>
</servlet-mapping>
</web-app>

View File

@ -7,12 +7,11 @@ import java.net.URL;
import java.util.List;
import java.util.Vector;
import org.gcube.dataanalysys.dataminerpoolmanager.datamodel.Algorithm;
import org.gcube.dataanalysys.dataminerpoolmanager.datamodel.AlgorithmSet;
import org.gcube.dataanalysys.dataminerpoolmanager.process.AddAlgorithmCommand;
import org.gcube.dataanalysys.dataminerpoolmanager.process.AlgorithmPackageParser;
import org.gcube.dataanalysys.dataminerpoolmanager.service.DataminerPoolManager;
import org.gcube.dataanalysys.dataminerpoolmanager.util.PropertiesBasedProxySelector;
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Algorithm;
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.AlgorithmSet;
import org.gcube.dataanalysis.dataminer.poolmanager.process.AddAlgorithmCommand;
import org.gcube.dataanalysis.dataminer.poolmanager.process.AlgorithmPackageParser;
import org.gcube.dataanalysis.dataminer.poolmanager.util.PropertiesBasedProxySelector;
public class AlgorithmPackageParserTest {
@ -48,8 +47,9 @@ public class AlgorithmPackageParserTest {
algorithms.addAlgorithm(algo);
break;
}
new DataminerPoolManager().addAlgorithmsToVRE(algorithms,
"/gcube/devNext/NextNext");
//to uncomment
// new DataminerPoolManager().addAlgorithmsToVRE(algorithms,
// "/gcube/devNext/NextNext");
}
/**
@ -111,7 +111,7 @@ public class AlgorithmPackageParserTest {
public static void main(String[] args) throws Exception {
ProxySelector.setDefault(new PropertiesBasedProxySelector(
"/home/paolo/.proxy-settings"));
"/home/ngalante/.proxy-settings"));
new AlgorithmPackageParserTest().extractAllAlgorithms();
}

View File

@ -4,11 +4,11 @@ import java.io.File;
import java.io.IOException;
import java.util.UUID;
import org.gcube.dataanalysys.dataminerpoolmanager.ansible.AnsibleWorker;
import org.gcube.dataanalysys.dataminerpoolmanager.ansible.model.Inventory;
import org.gcube.dataanalysys.dataminerpoolmanager.ansible.model.Playbook;
import org.gcube.dataanalysys.dataminerpoolmanager.ansible.model.Role;
import org.gcube.dataanalysys.dataminerpoolmanager.ansible.model.RoleFile;
import org.gcube.dataanalysis.dataminer.poolmanager.ansible.AnsibleWorker;
import org.gcube.dataanalysis.dataminer.poolmanager.ansible.model.Inventory;
import org.gcube.dataanalysis.dataminer.poolmanager.ansible.model.Playbook;
import org.gcube.dataanalysis.dataminer.poolmanager.ansible.model.Role;
import org.gcube.dataanalysis.dataminer.poolmanager.ansible.model.RoleFile;
public class AnsibleWorkerTest {

View File

@ -1,26 +1,12 @@
package org.gcube.dataanalysis.dataminerpoolmanager;
import java.net.ProxySelector;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import java.util.TreeSet;
import java.util.Vector;
import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.dataanalysys.dataminerpoolmanager.ansible.AnsibleWorker;
import org.gcube.dataanalysys.dataminerpoolmanager.ansible.model.Role;
import org.gcube.dataanalysys.dataminerpoolmanager.ansiblebridge.AnsibleBridge;
import org.gcube.dataanalysys.dataminerpoolmanager.datamodel.Algorithm;
import org.gcube.dataanalysys.dataminerpoolmanager.datamodel.AlgorithmSet;
import org.gcube.dataanalysys.dataminerpoolmanager.datamodel.Cluster;
import org.gcube.dataanalysys.dataminerpoolmanager.datamodel.Dependency;
import org.gcube.dataanalysys.dataminerpoolmanager.datamodel.Domain;
import org.gcube.dataanalysys.dataminerpoolmanager.datamodel.Host;
import org.gcube.dataanalysys.dataminerpoolmanager.datamodel.comparator.AlgorithmComparator;
import org.gcube.dataanalysys.dataminerpoolmanager.datamodel.comparator.DependencyComparator;
import org.gcube.dataanalysys.dataminerpoolmanager.service.DataminerPoolManager;
import org.gcube.dataanalysys.dataminerpoolmanager.util.PropertiesBasedProxySelector;
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Algorithm;
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.AlgorithmSet;
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Dependency;
import org.gcube.dataanalysis.dataminer.poolmanager.util.PropertiesBasedProxySelector;
public class DataminerPoolManagerTest {
@ -173,7 +159,7 @@ public class DataminerPoolManagerTest {
ScopeProvider.instance.set("/gcube/devNext/NextNext");
ProxySelector.setDefault(new PropertiesBasedProxySelector("/home/paolo/.proxy-settings"));
ProxySelector.setDefault(new PropertiesBasedProxySelector("/home/ngalante/.proxy-settings"));
// create the algorithm (download it, etc etc)
Algorithm algorithm = new Algorithm();
@ -225,7 +211,7 @@ public class DataminerPoolManagerTest {
ensemble.addDependency(d);
algorithms.addAlgorithm(ensemble);
new DataminerPoolManager().addAlgorithmsToVRE(algorithms, "/gcube/devNext/NextNext");
//new DataminerPoolManager().addAlgorithmsToVRE(algorithms, "/gcube/devNext/NextNext");
}

View File

@ -3,13 +3,13 @@ package org.gcube.dataanalysis.dataminerpoolmanager;
import java.net.ProxySelector;
import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.dataanalysys.dataminerpoolmanager.clients.ISClient;
import org.gcube.dataanalysys.dataminerpoolmanager.util.PropertiesBasedProxySelector;
import org.gcube.dataanalysis.dataminer.poolmanager.clients.ISClient;
import org.gcube.dataanalysis.dataminer.poolmanager.util.PropertiesBasedProxySelector;
public class ISClientTest {
public static void main(String[] args) {
ProxySelector.setDefault(new PropertiesBasedProxySelector("/home/paolo/.proxy-settings"));
ProxySelector.setDefault(new PropertiesBasedProxySelector("/home/ngalante/.proxy-settings"));
ScopeProvider.instance.set("/gcube/devNext/NextNext");
System.out.println(new ISClient().listDataminersInVRE());
}