git-svn-id: https://svn.d4science.research-infrastructures.eu/gcube/trunk/data-analysis/dataminer-pool-manager@146964 82a268e6-3cf1-43bd-a215-b396298e98cf
This commit is contained in:
parent
0002e935e8
commit
3136d4e8f7
|
@ -0,0 +1,163 @@
|
||||||
|
package org.gcube.dataanalysis.dataminer.poolmanager.clients;
|
||||||
|
|
||||||
|
import static org.gcube.resources.discovery.icclient.ICFactory.clientFor;
|
||||||
|
import static org.gcube.resources.discovery.icclient.ICFactory.queryFor;
|
||||||
|
|
||||||
|
import java.io.BufferedReader;
|
||||||
|
import java.io.ByteArrayOutputStream;
|
||||||
|
import java.io.File;
|
||||||
|
***REMOVED***
|
||||||
|
import java.io.InputStream;
|
||||||
|
import java.io.InputStreamReader;
|
||||||
|
import java.io.PrintStream;
|
||||||
|
***REMOVED***
|
||||||
|
***REMOVED***
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.Collection;
|
||||||
|
import java.util.Collections;
|
||||||
|
import java.util.HashSet;
|
||||||
|
import java.util.LinkedHashMap;
|
||||||
|
import java.util.LinkedList;
|
||||||
|
***REMOVED***
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.Scanner;
|
||||||
|
***REMOVED***
|
||||||
|
import java.util.TreeMap;
|
||||||
|
import java.util.TreeSet;
|
||||||
|
import java.util.Vector;
|
||||||
|
|
||||||
|
import org.gcube.common.resources.gcore.ServiceEndpoint;
|
||||||
|
***REMOVED***
|
||||||
|
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Cluster;
|
||||||
|
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Dependency;
|
||||||
|
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Domain;
|
||||||
|
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Host;
|
||||||
|
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.comparator.HostComparator;
|
||||||
|
import org.gcube.resources.discovery.client.api.DiscoveryClient;
|
||||||
|
import org.gcube.resources.discovery.client.queries.api.SimpleQuery;
|
||||||
|
import org.tmatesoft.svn.core.SVNDepth;
|
||||||
|
***REMOVED***
|
||||||
|
import org.tmatesoft.svn.core.SVNURL;
|
||||||
|
import org.tmatesoft.svn.core.auth.ISVNAuthenticationManager;
|
||||||
|
import org.tmatesoft.svn.core.internal.io.dav.DAVRepositoryFactory;
|
||||||
|
import org.tmatesoft.svn.core.io.SVNRepository;
|
||||||
|
import org.tmatesoft.svn.core.io.SVNRepositoryFactory;
|
||||||
|
import org.tmatesoft.svn.core.wc.ISVNOptions;
|
||||||
|
import org.tmatesoft.svn.core.wc.SVNClientManager;
|
||||||
|
import org.tmatesoft.svn.core.wc.SVNRevision;
|
||||||
|
import org.tmatesoft.svn.core.wc.SVNUpdateClient;
|
||||||
|
import org.tmatesoft.svn.core.wc.SVNWCUtil;
|
||||||
|
|
||||||
|
import au.com.bytecode.opencsv.CSVReader;
|
||||||
|
|
||||||
|
public class HAProxy ***REMOVED***
|
||||||
|
|
||||||
|
private CSVReader reader;
|
||||||
|
|
||||||
|
|
||||||
|
public Cluster getClusterByHProxy() throws IOException ***REMOVED***
|
||||||
|
Cluster cl = new Cluster();
|
||||||
|
String HProxy = ISClient.getHProxy();
|
||||||
|
URL stockURL = new URL("http:***REMOVED***data.d4science.org/Yk4zSFF6V3JOSytNd3JkRDlnRFpDUUR5TnRJZEw2QjRHbWJQNStIS0N6Yz0");
|
||||||
|
BufferedReader in = new BufferedReader(new InputStreamReader(stockURL.openStream()));
|
||||||
|
reader = new CSVReader(in);
|
||||||
|
String [] nextLine;
|
||||||
|
while ((nextLine = reader.readNext()) != null) ***REMOVED***
|
||||||
|
***REMOVED***rules to add
|
||||||
|
if (HProxy.contains(nextLine[0]))***REMOVED***
|
||||||
|
cl.setName(nextLine[0]);
|
||||||
|
***REMOVED***
|
||||||
|
***REMOVED***
|
||||||
|
return cl;
|
||||||
|
|
||||||
|
***REMOVED***
|
||||||
|
|
||||||
|
|
||||||
|
public Cluster MapCluster() throws IOException ***REMOVED***
|
||||||
|
Cluster cl = new Cluster();
|
||||||
|
String HProxy = ISClient.getHProxy();
|
||||||
|
if (HProxy.equals("dataminer-cluster1.d4science.org")) ***REMOVED***
|
||||||
|
cl.setName("dataminer_cluster1");
|
||||||
|
***REMOVED***
|
||||||
|
if (HProxy.equals("dataminer-bigdata.d4science.org")) ***REMOVED***
|
||||||
|
cl.setName("bigdata");
|
||||||
|
***REMOVED***
|
||||||
|
if (HProxy.equals("dataminer-cluster1.d4science.org")) ***REMOVED***
|
||||||
|
cl.setName("dataminer_cluster1");
|
||||||
|
***REMOVED***
|
||||||
|
if (HProxy.equals("dataminer-cloud1.d4science.org")) ***REMOVED***
|
||||||
|
cl.setName("dataminer_cloud1");
|
||||||
|
***REMOVED***
|
||||||
|
if (HProxy.equals("dataminer-prototypes.d4science.org")) ***REMOVED***
|
||||||
|
cl.setName("prototypes");
|
||||||
|
***REMOVED***
|
||||||
|
if (HProxy.equals("dataminer.d4science.org")) ***REMOVED***
|
||||||
|
cl.setName("gcubeapps");
|
||||||
|
***REMOVED***
|
||||||
|
if (HProxy.equals("dataminer-genericworkers.d4science.org")) ***REMOVED***
|
||||||
|
cl.setName("genericworkers");
|
||||||
|
***REMOVED***
|
||||||
|
if (HProxy.equals("dataminer-genericworkers-proto.d4science.org")) ***REMOVED***
|
||||||
|
cl.setName("genericworkers_proto");
|
||||||
|
***REMOVED***
|
||||||
|
if (HProxy.equals("dataminer-d-workers.d4science.org")) ***REMOVED***
|
||||||
|
cl.setName("devnext_backend");
|
||||||
|
***REMOVED***
|
||||||
|
|
||||||
|
return cl;
|
||||||
|
|
||||||
|
***REMOVED***
|
||||||
|
|
||||||
|
|
||||||
|
public List<Host> listDataMinersByCluster() throws IOException ***REMOVED***
|
||||||
|
***REMOVED***next op to use when Cluster info available in the IS
|
||||||
|
***REMOVED***Cluster cluster = this.getClusterByHProxy();
|
||||||
|
Cluster cluster = this.MapCluster();
|
||||||
|
List<Host> out = new LinkedList<Host>();
|
||||||
|
***REMOVED***prod
|
||||||
|
URL stockURL = new URL("http:***REMOVED***data.d4science.org/Yk4zSFF6V3JOSytNd3JkRDlnRFpDUUR5TnRJZEw2QjRHbWJQNStIS0N6Yz0");
|
||||||
|
|
||||||
|
***REMOVED***dev
|
||||||
|
***REMOVED***URL stockURL = new URL("http:***REMOVED***data.d4science.org/c29KTUluTkZnRlB0WXE5NVNaZnRoR0dtYThUSmNTVlhHbWJQNStIS0N6Yz0");
|
||||||
|
BufferedReader in = new BufferedReader(new InputStreamReader(stockURL.openStream()));
|
||||||
|
reader = new CSVReader(in, ',');
|
||||||
|
String[] nextLine;
|
||||||
|
while ((nextLine = reader.readNext()) != null) ***REMOVED***
|
||||||
|
if (nextLine[1].equals("BACKEND")||(nextLine[1].equals("FRONTEND")))***REMOVED***
|
||||||
|
continue;
|
||||||
|
***REMOVED***
|
||||||
|
|
||||||
|
if (nextLine[0].equals(cluster.getName())) ***REMOVED***
|
||||||
|
Host a = new Host();
|
||||||
|
a.setName(nextLine[1]);
|
||||||
|
out.add(a);
|
||||||
|
System.out.println(a.getFullyQualifiedName());
|
||||||
|
***REMOVED***
|
||||||
|
***REMOVED***
|
||||||
|
return out;
|
||||||
|
|
||||||
|
***REMOVED***
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
public static void main(String[] args) throws IOException, SVNException ***REMOVED***
|
||||||
|
HAProxy a = new HAProxy();
|
||||||
|
ScopeProvider.instance.set("/gcube/devNext/NextNext");
|
||||||
|
***REMOVED***System.out.println(a.getHProxy());
|
||||||
|
***REMOVED***System.out.println(a.MapCluster());
|
||||||
|
System.out.println(a.listDataMinersByCluster());
|
||||||
|
***REMOVED***System.out.println(a.listDataMinersByCluster());
|
||||||
|
|
||||||
|
|
||||||
|
***REMOVED***List<Dependency> list = new LinkedList<Dependency>();
|
||||||
|
***REMOVED***Dependency aa = new Dependency();
|
||||||
|
***REMOVED***aa.setName("testnunzio");
|
||||||
|
***REMOVED***aa.setType("cran:");
|
||||||
|
***REMOVED***list.add(aa);
|
||||||
|
|
||||||
|
***REMOVED***a.checkSVNdep();
|
||||||
|
***REMOVED***System.out.println(a.getDataminer("dataminer1-devnext.d4science.org").getDomain());
|
||||||
|
***REMOVED***System.out.println(a.listDataminersInVRE());
|
||||||
|
***REMOVED***
|
||||||
|
***REMOVED***
|
|
@ -142,7 +142,7 @@ public class ISClient ***REMOVED***
|
||||||
|
|
||||||
|
|
||||||
***REMOVED*** return the HProxy hostname in the VRE
|
***REMOVED*** return the HProxy hostname in the VRE
|
||||||
public String getHProxy()***REMOVED***
|
public static String getHProxy()***REMOVED***
|
||||||
Host h = new Host();
|
Host h = new Host();
|
||||||
SimpleQuery query = queryFor(ServiceEndpoint.class);
|
SimpleQuery query = queryFor(ServiceEndpoint.class);
|
||||||
query.addCondition("$resource/Profile/Platform/Name/text() eq 'DataMiner'");
|
query.addCondition("$resource/Profile/Platform/Name/text() eq 'DataMiner'");
|
||||||
|
@ -154,114 +154,33 @@ public String getHProxy()***REMOVED***
|
||||||
|
|
||||||
***REMOVED***return the Cluster hostname from the IS
|
***REMOVED***return the Cluster hostname from the IS
|
||||||
***REMOVED***to develop
|
***REMOVED***to develop
|
||||||
public Cluster getCluster()***REMOVED***
|
***REMOVED***public Cluster getCluster()***REMOVED***
|
||||||
Cluster cl = new Cluster();
|
***REMOVED*** Cluster cl = new Cluster();
|
||||||
String HProxy = this.getHProxy();
|
***REMOVED*** String HProxy = this.getHProxy();
|
||||||
SimpleQuery query = queryFor(ServiceEndpoint.class);
|
***REMOVED*** SimpleQuery query = queryFor(ServiceEndpoint.class);
|
||||||
query.addCondition("$resource/Profile/Platform/Name/text() eq 'DataMiner'");
|
***REMOVED*** query.addCondition("$resource/Profile/Platform/Name/text() eq 'DataMiner'");
|
||||||
DiscoveryClient<ServiceEndpoint> client = clientFor(ServiceEndpoint.class);
|
***REMOVED*** DiscoveryClient<ServiceEndpoint> client = clientFor(ServiceEndpoint.class);
|
||||||
List<ServiceEndpoint> resources = client.submit(query);
|
***REMOVED*** List<ServiceEndpoint> resources = client.submit(query);
|
||||||
cl.setName(resources.get(0).profile().runtime().hostedOn());
|
***REMOVED*** cl.setName(resources.get(0).profile().runtime().hostedOn());
|
||||||
return null;
|
***REMOVED*** return null;
|
||||||
|
|
||||||
***REMOVED***
|
***REMOVED***
|
||||||
|
***REMOVED******REMOVED***
|
||||||
|
|
||||||
***REMOVED***return the dataminer hostnames from the IS
|
***REMOVED***return the dataminer hostnames from the IS
|
||||||
***REMOVED***to develop
|
***REMOVED***to develop
|
||||||
public List<Host> getDM()***REMOVED***
|
***REMOVED***public List<Host> getDM()***REMOVED***
|
||||||
Cluster cl = new Cluster();
|
***REMOVED*** Cluster cl = new Cluster();
|
||||||
String HProxy = this.getHProxy();
|
***REMOVED*** String HProxy = this.getHProxy();
|
||||||
SimpleQuery query = queryFor(ServiceEndpoint.class);
|
***REMOVED*** SimpleQuery query = queryFor(ServiceEndpoint.class);
|
||||||
query.addCondition("$resource/Profile/Platform/Name/text() eq 'DataMiner'");
|
***REMOVED*** query.addCondition("$resource/Profile/Platform/Name/text() eq 'DataMiner'");
|
||||||
DiscoveryClient<ServiceEndpoint> client = clientFor(ServiceEndpoint.class);
|
***REMOVED*** DiscoveryClient<ServiceEndpoint> client = clientFor(ServiceEndpoint.class);
|
||||||
List<ServiceEndpoint> resources = client.submit(query);
|
***REMOVED*** List<ServiceEndpoint> resources = client.submit(query);
|
||||||
cl.setName(resources.get(0).profile().runtime().hostedOn());
|
***REMOVED*** cl.setName(resources.get(0).profile().runtime().hostedOn());
|
||||||
return null;
|
***REMOVED*** return null;
|
||||||
|
|
||||||
***REMOVED***
|
|
||||||
|
|
||||||
public Cluster getClusterByHProxy() throws IOException ***REMOVED***
|
|
||||||
Cluster cl = new Cluster();
|
|
||||||
String HProxy = this.getHProxy();
|
|
||||||
URL stockURL = new URL("http:***REMOVED***data.d4science.org/Yk4zSFF6V3JOSytNd3JkRDlnRFpDUUR5TnRJZEw2QjRHbWJQNStIS0N6Yz0");
|
|
||||||
BufferedReader in = new BufferedReader(new InputStreamReader(stockURL.openStream()));
|
|
||||||
reader = new CSVReader(in);
|
|
||||||
String [] nextLine;
|
|
||||||
while ((nextLine = reader.readNext()) != null) ***REMOVED***
|
|
||||||
***REMOVED***rules to add
|
|
||||||
if (HProxy.contains(nextLine[0]))***REMOVED***
|
|
||||||
cl.setName(nextLine[0]);
|
|
||||||
***REMOVED***
|
|
||||||
***REMOVED***
|
|
||||||
return cl;
|
|
||||||
|
|
||||||
***REMOVED***
|
***REMOVED***
|
||||||
|
***REMOVED******REMOVED***
|
||||||
|
|
||||||
|
|
||||||
public Cluster MapCluster() throws IOException ***REMOVED***
|
|
||||||
Cluster cl = new Cluster();
|
|
||||||
String HProxy = this.getHProxy();
|
|
||||||
if (HProxy.equals("dataminer-cluster1.d4science.org")) ***REMOVED***
|
|
||||||
cl.setName("dataminer_cluster1");
|
|
||||||
***REMOVED***
|
|
||||||
if (HProxy.equals("dataminer-bigdata.d4science.org")) ***REMOVED***
|
|
||||||
cl.setName("bigdata");
|
|
||||||
***REMOVED***
|
|
||||||
if (HProxy.equals("dataminer-cluster1.d4science.org")) ***REMOVED***
|
|
||||||
cl.setName("dataminer_cluster1");
|
|
||||||
***REMOVED***
|
|
||||||
if (HProxy.equals("dataminer-cloud1.d4science.org")) ***REMOVED***
|
|
||||||
cl.setName("dataminer_cloud1");
|
|
||||||
***REMOVED***
|
|
||||||
if (HProxy.equals("dataminer-prototypes.d4science.org")) ***REMOVED***
|
|
||||||
cl.setName("prototypes");
|
|
||||||
***REMOVED***
|
|
||||||
if (HProxy.equals("dataminer.d4science.org")) ***REMOVED***
|
|
||||||
cl.setName("gcubeapps");
|
|
||||||
***REMOVED***
|
|
||||||
if (HProxy.equals("dataminer-genericworkers.d4science.org")) ***REMOVED***
|
|
||||||
cl.setName("genericworkers");
|
|
||||||
***REMOVED***
|
|
||||||
if (HProxy.equals("dataminer-genericworkers-proto.d4science.org")) ***REMOVED***
|
|
||||||
cl.setName("genericworkers_proto");
|
|
||||||
***REMOVED***
|
|
||||||
if (HProxy.equals("dataminer-d-workers.d4science.org")) ***REMOVED***
|
|
||||||
cl.setName("devnext_backend");
|
|
||||||
***REMOVED***
|
|
||||||
|
|
||||||
return cl;
|
|
||||||
|
|
||||||
***REMOVED***
|
|
||||||
|
|
||||||
|
|
||||||
public List<Host> listDataMinersByCluster() throws IOException ***REMOVED***
|
|
||||||
***REMOVED***next op to use when Cluster info available in the IS
|
|
||||||
***REMOVED***Cluster cluster = this.getClusterByHProxy();
|
|
||||||
Cluster cluster = this.MapCluster();
|
|
||||||
List<Host> out = new LinkedList<Host>();
|
|
||||||
***REMOVED***prod
|
|
||||||
***REMOVED***URL stockURL = new URL("http:***REMOVED***data.d4science.org/Yk4zSFF6V3JOSytNd3JkRDlnRFpDUUR5TnRJZEw2QjRHbWJQNStIS0N6Yz0");
|
|
||||||
|
|
||||||
***REMOVED***dev
|
|
||||||
URL stockURL = new URL("http:***REMOVED***data.d4science.org/c29KTUluTkZnRlB0WXE5NVNaZnRoR0dtYThUSmNTVlhHbWJQNStIS0N6Yz0");
|
|
||||||
BufferedReader in = new BufferedReader(new InputStreamReader(stockURL.openStream()));
|
|
||||||
reader = new CSVReader(in, ',');
|
|
||||||
String[] nextLine;
|
|
||||||
while ((nextLine = reader.readNext()) != null) ***REMOVED***
|
|
||||||
if (nextLine[1].equals("BACKEND")||(nextLine[1].equals("FRONTEND")))***REMOVED***
|
|
||||||
continue;
|
|
||||||
***REMOVED***
|
|
||||||
|
|
||||||
if (nextLine[0].equals(cluster.getName())) ***REMOVED***
|
|
||||||
Host a = new Host();
|
|
||||||
a.setName(nextLine[1]);
|
|
||||||
out.add(a);
|
|
||||||
System.out.println(a.getFullyQualifiedName());
|
|
||||||
***REMOVED***
|
|
||||||
***REMOVED***
|
|
||||||
return out;
|
|
||||||
|
|
||||||
***REMOVED***
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Return the list of hosts (dataminers) in a given VRE
|
* Return the list of hosts (dataminers) in a given VRE
|
||||||
|
@ -310,7 +229,7 @@ public List<Host> listDataMinersByCluster() throws IOException ***REMOVED***
|
||||||
ScopeProvider.instance.set("/gcube/devNext/NextNext");
|
ScopeProvider.instance.set("/gcube/devNext/NextNext");
|
||||||
***REMOVED***System.out.println(a.getHProxy());
|
***REMOVED***System.out.println(a.getHProxy());
|
||||||
***REMOVED***System.out.println(a.MapCluster());
|
***REMOVED***System.out.println(a.MapCluster());
|
||||||
System.out.println(a.listDataMinersByCluster());
|
***REMOVED***System.out.println(a.listDataMinersByCluster());
|
||||||
***REMOVED***System.out.println(a.listDataMinersByCluster());
|
***REMOVED***System.out.println(a.listDataMinersByCluster());
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -292,6 +292,7 @@ import org.gcube.common.resources.gcore.Resources;
|
||||||
import org.gcube.common.resources.gcore.Software.Profile.Dependency;
|
import org.gcube.common.resources.gcore.Software.Profile.Dependency;
|
||||||
***REMOVED***
|
***REMOVED***
|
||||||
import org.gcube.dataanalysis.dataminer.poolmanager.ansiblebridge.AnsibleBridge;
|
import org.gcube.dataanalysis.dataminer.poolmanager.ansiblebridge.AnsibleBridge;
|
||||||
|
import org.gcube.dataanalysis.dataminer.poolmanager.clients.HAProxy;
|
||||||
import org.gcube.dataanalysis.dataminer.poolmanager.clients.ISClient;
|
import org.gcube.dataanalysis.dataminer.poolmanager.clients.ISClient;
|
||||||
***REMOVED***
|
***REMOVED***
|
||||||
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.AlgorithmSet;
|
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.AlgorithmSet;
|
||||||
|
@ -540,7 +541,7 @@ public class DataminerPoolManager implements PoolManager ***REMOVED***
|
||||||
|
|
||||||
***REMOVED*** create the cluster (dataminers in the vre)
|
***REMOVED*** create the cluster (dataminers in the vre)
|
||||||
Cluster cluster = new Cluster();
|
Cluster cluster = new Cluster();
|
||||||
for (Host h : new ISClient().listDataMinersByCluster()) ***REMOVED***
|
for (Host h : new HAProxy().listDataMinersByCluster()) ***REMOVED***
|
||||||
***REMOVED***for (Host h : new ISClient().listDataminersInVRE()) ***REMOVED***
|
***REMOVED***for (Host h : new ISClient().listDataminersInVRE()) ***REMOVED***
|
||||||
cluster.addHost(h);
|
cluster.addHost(h);
|
||||||
***REMOVED***
|
***REMOVED***
|
||||||
|
|
Loading…
Reference in New Issue