This commit is contained in:
Nunzio Andrea Galante 2017-04-19 14:59:36 +00:00
parent 0002e935e8
commit 3136d4e8f7
3 changed files with 191 additions and 108 deletions

View File

@ -0,0 +1,163 @@
package org.gcube.dataanalysis.dataminer.poolmanager.clients;
import static org.gcube.resources.discovery.icclient.ICFactory.clientFor;
import static org.gcube.resources.discovery.icclient.ICFactory.queryFor;
import java.io.BufferedReader;
import java.io.ByteArrayOutputStream;
import java.io.File;
***REMOVED***
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.PrintStream;
***REMOVED***
***REMOVED***
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.LinkedList;
***REMOVED***
import java.util.Map;
import java.util.Scanner;
***REMOVED***
import java.util.TreeMap;
import java.util.TreeSet;
import java.util.Vector;
import org.gcube.common.resources.gcore.ServiceEndpoint;
***REMOVED***
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Cluster;
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Dependency;
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Domain;
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Host;
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.comparator.HostComparator;
import org.gcube.resources.discovery.client.api.DiscoveryClient;
import org.gcube.resources.discovery.client.queries.api.SimpleQuery;
import org.tmatesoft.svn.core.SVNDepth;
***REMOVED***
import org.tmatesoft.svn.core.SVNURL;
import org.tmatesoft.svn.core.auth.ISVNAuthenticationManager;
import org.tmatesoft.svn.core.internal.io.dav.DAVRepositoryFactory;
import org.tmatesoft.svn.core.io.SVNRepository;
import org.tmatesoft.svn.core.io.SVNRepositoryFactory;
import org.tmatesoft.svn.core.wc.ISVNOptions;
import org.tmatesoft.svn.core.wc.SVNClientManager;
import org.tmatesoft.svn.core.wc.SVNRevision;
import org.tmatesoft.svn.core.wc.SVNUpdateClient;
import org.tmatesoft.svn.core.wc.SVNWCUtil;
import au.com.bytecode.opencsv.CSVReader;
public class HAProxy ***REMOVED***
private CSVReader reader;
public Cluster getClusterByHProxy() throws IOException ***REMOVED***
Cluster cl = new Cluster();
String HProxy = ISClient.getHProxy();
URL stockURL = new URL("http:***REMOVED***data.d4science.org/Yk4zSFF6V3JOSytNd3JkRDlnRFpDUUR5TnRJZEw2QjRHbWJQNStIS0N6Yz0");
BufferedReader in = new BufferedReader(new InputStreamReader(stockURL.openStream()));
reader = new CSVReader(in);
String [] nextLine;
while ((nextLine = reader.readNext()) != null) ***REMOVED***
***REMOVED***rules to add
if (HProxy.contains(nextLine[0]))***REMOVED***
cl.setName(nextLine[0]);
***REMOVED***
***REMOVED***
return cl;
***REMOVED***
public Cluster MapCluster() throws IOException ***REMOVED***
Cluster cl = new Cluster();
String HProxy = ISClient.getHProxy();
if (HProxy.equals("dataminer-cluster1.d4science.org")) ***REMOVED***
cl.setName("dataminer_cluster1");
***REMOVED***
if (HProxy.equals("dataminer-bigdata.d4science.org")) ***REMOVED***
cl.setName("bigdata");
***REMOVED***
if (HProxy.equals("dataminer-cluster1.d4science.org")) ***REMOVED***
cl.setName("dataminer_cluster1");
***REMOVED***
if (HProxy.equals("dataminer-cloud1.d4science.org")) ***REMOVED***
cl.setName("dataminer_cloud1");
***REMOVED***
if (HProxy.equals("dataminer-prototypes.d4science.org")) ***REMOVED***
cl.setName("prototypes");
***REMOVED***
if (HProxy.equals("dataminer.d4science.org")) ***REMOVED***
cl.setName("gcubeapps");
***REMOVED***
if (HProxy.equals("dataminer-genericworkers.d4science.org")) ***REMOVED***
cl.setName("genericworkers");
***REMOVED***
if (HProxy.equals("dataminer-genericworkers-proto.d4science.org")) ***REMOVED***
cl.setName("genericworkers_proto");
***REMOVED***
if (HProxy.equals("dataminer-d-workers.d4science.org")) ***REMOVED***
cl.setName("devnext_backend");
***REMOVED***
return cl;
***REMOVED***
public List<Host> listDataMinersByCluster() throws IOException ***REMOVED***
***REMOVED***next op to use when Cluster info available in the IS
***REMOVED***Cluster cluster = this.getClusterByHProxy();
Cluster cluster = this.MapCluster();
List<Host> out = new LinkedList<Host>();
***REMOVED***prod
URL stockURL = new URL("http:***REMOVED***data.d4science.org/Yk4zSFF6V3JOSytNd3JkRDlnRFpDUUR5TnRJZEw2QjRHbWJQNStIS0N6Yz0");
***REMOVED***dev
***REMOVED***URL stockURL = new URL("http:***REMOVED***data.d4science.org/c29KTUluTkZnRlB0WXE5NVNaZnRoR0dtYThUSmNTVlhHbWJQNStIS0N6Yz0");
BufferedReader in = new BufferedReader(new InputStreamReader(stockURL.openStream()));
reader = new CSVReader(in, ',');
String[] nextLine;
while ((nextLine = reader.readNext()) != null) ***REMOVED***
if (nextLine[1].equals("BACKEND")||(nextLine[1].equals("FRONTEND")))***REMOVED***
continue;
***REMOVED***
if (nextLine[0].equals(cluster.getName())) ***REMOVED***
Host a = new Host();
a.setName(nextLine[1]);
out.add(a);
System.out.println(a.getFullyQualifiedName());
***REMOVED***
***REMOVED***
return out;
***REMOVED***
public static void main(String[] args) throws IOException, SVNException ***REMOVED***
HAProxy a = new HAProxy();
ScopeProvider.instance.set("/gcube/devNext/NextNext");
***REMOVED***System.out.println(a.getHProxy());
***REMOVED***System.out.println(a.MapCluster());
System.out.println(a.listDataMinersByCluster());
***REMOVED***System.out.println(a.listDataMinersByCluster());
***REMOVED***List<Dependency> list = new LinkedList<Dependency>();
***REMOVED***Dependency aa = new Dependency();
***REMOVED***aa.setName("testnunzio");
***REMOVED***aa.setType("cran:");
***REMOVED***list.add(aa);
***REMOVED***a.checkSVNdep();
***REMOVED***System.out.println(a.getDataminer("dataminer1-devnext.d4science.org").getDomain());
***REMOVED***System.out.println(a.listDataminersInVRE());
***REMOVED***
***REMOVED***

View File

@ -142,7 +142,7 @@ public class ISClient ***REMOVED***
***REMOVED*** return the HProxy hostname in the VRE
public String getHProxy()***REMOVED***
public static String getHProxy()***REMOVED***
Host h = new Host();
SimpleQuery query = queryFor(ServiceEndpoint.class);
query.addCondition("$resource/Profile/Platform/Name/text() eq 'DataMiner'");
@ -154,115 +154,34 @@ public String getHProxy()***REMOVED***
***REMOVED***return the Cluster hostname from the IS
***REMOVED***to develop
public Cluster getCluster()***REMOVED***
Cluster cl = new Cluster();
String HProxy = this.getHProxy();
SimpleQuery query = queryFor(ServiceEndpoint.class);
query.addCondition("$resource/Profile/Platform/Name/text() eq 'DataMiner'");
DiscoveryClient<ServiceEndpoint> client = clientFor(ServiceEndpoint.class);
List<ServiceEndpoint> resources = client.submit(query);
cl.setName(resources.get(0).profile().runtime().hostedOn());
return null;
***REMOVED***
***REMOVED***public Cluster getCluster()***REMOVED***
***REMOVED*** Cluster cl = new Cluster();
***REMOVED*** String HProxy = this.getHProxy();
***REMOVED*** SimpleQuery query = queryFor(ServiceEndpoint.class);
***REMOVED*** query.addCondition("$resource/Profile/Platform/Name/text() eq 'DataMiner'");
***REMOVED*** DiscoveryClient<ServiceEndpoint> client = clientFor(ServiceEndpoint.class);
***REMOVED*** List<ServiceEndpoint> resources = client.submit(query);
***REMOVED*** cl.setName(resources.get(0).profile().runtime().hostedOn());
***REMOVED*** return null;
***REMOVED***
***REMOVED******REMOVED***
***REMOVED***return the dataminer hostnames from the IS
***REMOVED***to develop
public List<Host> getDM()***REMOVED***
Cluster cl = new Cluster();
String HProxy = this.getHProxy();
SimpleQuery query = queryFor(ServiceEndpoint.class);
query.addCondition("$resource/Profile/Platform/Name/text() eq 'DataMiner'");
DiscoveryClient<ServiceEndpoint> client = clientFor(ServiceEndpoint.class);
List<ServiceEndpoint> resources = client.submit(query);
cl.setName(resources.get(0).profile().runtime().hostedOn());
return null;
***REMOVED***
public Cluster getClusterByHProxy() throws IOException ***REMOVED***
Cluster cl = new Cluster();
String HProxy = this.getHProxy();
URL stockURL = new URL("http:***REMOVED***data.d4science.org/Yk4zSFF6V3JOSytNd3JkRDlnRFpDUUR5TnRJZEw2QjRHbWJQNStIS0N6Yz0");
BufferedReader in = new BufferedReader(new InputStreamReader(stockURL.openStream()));
reader = new CSVReader(in);
String [] nextLine;
while ((nextLine = reader.readNext()) != null) ***REMOVED***
***REMOVED***rules to add
if (HProxy.contains(nextLine[0]))***REMOVED***
cl.setName(nextLine[0]);
***REMOVED***
***REMOVED***
return cl;
***REMOVED***
public Cluster MapCluster() throws IOException ***REMOVED***
Cluster cl = new Cluster();
String HProxy = this.getHProxy();
if (HProxy.equals("dataminer-cluster1.d4science.org")) ***REMOVED***
cl.setName("dataminer_cluster1");
***REMOVED***
if (HProxy.equals("dataminer-bigdata.d4science.org")) ***REMOVED***
cl.setName("bigdata");
***REMOVED***
if (HProxy.equals("dataminer-cluster1.d4science.org")) ***REMOVED***
cl.setName("dataminer_cluster1");
***REMOVED***
if (HProxy.equals("dataminer-cloud1.d4science.org")) ***REMOVED***
cl.setName("dataminer_cloud1");
***REMOVED***
if (HProxy.equals("dataminer-prototypes.d4science.org")) ***REMOVED***
cl.setName("prototypes");
***REMOVED***
if (HProxy.equals("dataminer.d4science.org")) ***REMOVED***
cl.setName("gcubeapps");
***REMOVED***
if (HProxy.equals("dataminer-genericworkers.d4science.org")) ***REMOVED***
cl.setName("genericworkers");
***REMOVED***
if (HProxy.equals("dataminer-genericworkers-proto.d4science.org")) ***REMOVED***
cl.setName("genericworkers_proto");
***REMOVED***
if (HProxy.equals("dataminer-d-workers.d4science.org")) ***REMOVED***
cl.setName("devnext_backend");
***REMOVED***
return cl;
***REMOVED***
public List<Host> listDataMinersByCluster() throws IOException ***REMOVED***
***REMOVED***next op to use when Cluster info available in the IS
***REMOVED***Cluster cluster = this.getClusterByHProxy();
Cluster cluster = this.MapCluster();
List<Host> out = new LinkedList<Host>();
***REMOVED***prod
***REMOVED***URL stockURL = new URL("http:***REMOVED***data.d4science.org/Yk4zSFF6V3JOSytNd3JkRDlnRFpDUUR5TnRJZEw2QjRHbWJQNStIS0N6Yz0");
***REMOVED***dev
URL stockURL = new URL("http:***REMOVED***data.d4science.org/c29KTUluTkZnRlB0WXE5NVNaZnRoR0dtYThUSmNTVlhHbWJQNStIS0N6Yz0");
BufferedReader in = new BufferedReader(new InputStreamReader(stockURL.openStream()));
reader = new CSVReader(in, ',');
String[] nextLine;
while ((nextLine = reader.readNext()) != null) ***REMOVED***
if (nextLine[1].equals("BACKEND")||(nextLine[1].equals("FRONTEND")))***REMOVED***
continue;
***REMOVED***
if (nextLine[0].equals(cluster.getName())) ***REMOVED***
Host a = new Host();
a.setName(nextLine[1]);
out.add(a);
System.out.println(a.getFullyQualifiedName());
***REMOVED***
***REMOVED***
return out;
***REMOVED***public List<Host> getDM()***REMOVED***
***REMOVED*** Cluster cl = new Cluster();
***REMOVED*** String HProxy = this.getHProxy();
***REMOVED*** SimpleQuery query = queryFor(ServiceEndpoint.class);
***REMOVED*** query.addCondition("$resource/Profile/Platform/Name/text() eq 'DataMiner'");
***REMOVED*** DiscoveryClient<ServiceEndpoint> client = clientFor(ServiceEndpoint.class);
***REMOVED*** List<ServiceEndpoint> resources = client.submit(query);
***REMOVED*** cl.setName(resources.get(0).profile().runtime().hostedOn());
***REMOVED*** return null;
***REMOVED***
***REMOVED******REMOVED***
/**
* Return the list of hosts (dataminers) in a given VRE
*
@ -310,7 +229,7 @@ public List<Host> listDataMinersByCluster() throws IOException ***REMOVED***
ScopeProvider.instance.set("/gcube/devNext/NextNext");
***REMOVED***System.out.println(a.getHProxy());
***REMOVED***System.out.println(a.MapCluster());
System.out.println(a.listDataMinersByCluster());
***REMOVED***System.out.println(a.listDataMinersByCluster());
***REMOVED***System.out.println(a.listDataMinersByCluster());

View File

@ -292,6 +292,7 @@ import org.gcube.common.resources.gcore.Resources;
import org.gcube.common.resources.gcore.Software.Profile.Dependency;
***REMOVED***
import org.gcube.dataanalysis.dataminer.poolmanager.ansiblebridge.AnsibleBridge;
import org.gcube.dataanalysis.dataminer.poolmanager.clients.HAProxy;
import org.gcube.dataanalysis.dataminer.poolmanager.clients.ISClient;
***REMOVED***
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.AlgorithmSet;
@ -540,7 +541,7 @@ public class DataminerPoolManager implements PoolManager ***REMOVED***
***REMOVED*** create the cluster (dataminers in the vre)
Cluster cluster = new Cluster();
for (Host h : new ISClient().listDataMinersByCluster()) ***REMOVED***
for (Host h : new HAProxy().listDataMinersByCluster()) ***REMOVED***
***REMOVED***for (Host h : new ISClient().listDataminersInVRE()) ***REMOVED***
cluster.addHost(h);
***REMOVED***