git-svn-id: https://svn.d4science.research-infrastructures.eu/gcube/trunk/data-analysis/dataminer-pool-manager@150435 82a268e6-3cf1-43bd-a215-b396298e98cf
This commit is contained in:
parent
03f973a7de
commit
6704678c94
|
@ -39,9 +39,7 @@ public class AnsibleBridge ***REMOVED***
|
|||
private static final org.slf4j.Logger LOGGER = LoggerFactory.getLogger(AnsibleBridge.class);
|
||||
|
||||
|
||||
/**
|
||||
* The workdir for this service
|
||||
*/
|
||||
|
||||
private String dpmRoot;
|
||||
|
||||
public AnsibleBridge() ***REMOVED***
|
||||
|
@ -141,10 +139,7 @@ public class AnsibleBridge ***REMOVED***
|
|||
return worker;
|
||||
***REMOVED***
|
||||
|
||||
/**
|
||||
* Groups hosts by domain and algorithm sets
|
||||
* @param clusters
|
||||
*/
|
||||
|
||||
public void printInventoryByDomainAndSets(Collection<Cluster> clusters) ***REMOVED***
|
||||
Map<String, Set<Host>> inventory = new TreeMap<>();
|
||||
for(Cluster cluster:clusters) ***REMOVED***
|
||||
|
@ -173,10 +168,7 @@ public class AnsibleBridge ***REMOVED***
|
|||
***REMOVED***
|
||||
***REMOVED***
|
||||
|
||||
/**
|
||||
* Groups hosts by algorithm sets only
|
||||
* @param clusters
|
||||
*/
|
||||
|
||||
public void printInventoryBySets(Collection<Cluster> clusters) ***REMOVED***
|
||||
Map<String, Set<Host>> inventory = new TreeMap<>();
|
||||
for (Cluster cluster : clusters) ***REMOVED***
|
||||
|
|
|
@ -22,12 +22,7 @@ public class TemplateManager ***REMOVED***
|
|||
return input;
|
||||
***REMOVED***
|
||||
|
||||
/**
|
||||
* Read the given template
|
||||
* @param templateName
|
||||
* @return
|
||||
* @throws IOException
|
||||
*/
|
||||
|
||||
***REMOVED*** private String readTemplate(String templateName) throws IOException ***REMOVED***
|
||||
***REMOVED*** File templateFile = new File(this.getTemplateRoot(), templateName + ".yaml");
|
||||
***REMOVED*** System.out.println("looking for file " + templateFile.getName());
|
||||
|
@ -35,12 +30,7 @@ public class TemplateManager ***REMOVED***
|
|||
***REMOVED*** return out;
|
||||
***REMOVED*** ***REMOVED***
|
||||
|
||||
/**
|
||||
* Return the content of the given template
|
||||
* @param templateName
|
||||
* @return
|
||||
* @throws NoSuchElementException if no such template exists
|
||||
*/
|
||||
|
||||
***REMOVED*** public String getTemplate(String templateName) throws NoSuchElementException ***REMOVED***
|
||||
***REMOVED*** String template = null;
|
||||
***REMOVED*** try ***REMOVED***
|
||||
|
|
|
@ -7,10 +7,12 @@ import java.io.InputStreamReader;
|
|||
import java.util.LinkedList;
|
||||
***REMOVED***
|
||||
|
||||
***REMOVED***
|
||||
***REMOVED***
|
||||
***REMOVED***
|
||||
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Cluster;
|
||||
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Host;
|
||||
import org.gcube.dataanalysis.dataminer.poolmanager.util.CheckPermission;
|
||||
***REMOVED***
|
||||
import au.com.bytecode.opencsv.CSVReader;
|
||||
|
||||
|
@ -128,16 +130,20 @@ public class HAProxy ***REMOVED***
|
|||
|
||||
***REMOVED***
|
||||
|
||||
public static void main(String[] args) throws IOException, SVNException ***REMOVED***
|
||||
public static void main(String[] args) throws ObjectNotFound, Exception ***REMOVED***
|
||||
HAProxy a = new HAProxy();
|
||||
***REMOVED***ScopeProvider.instance.set("/gcube/devNext/NextNext");
|
||||
ScopeProvider.instance.set("/d4science.research-infrastructures.eu/gCubeApps/RPrototypingLab");
|
||||
***REMOVED***SecurityTokenProvider.instance.set("***REMOVED***");
|
||||
|
||||
***REMOVED***ScopeProvider.instance.set("/d4science.research-infrastructures.eu/gCubeApps/RPrototypingLab");
|
||||
SecurityTokenProvider.instance.set("***REMOVED***");
|
||||
|
||||
CheckPermission test = new CheckPermission();
|
||||
CheckPermission.apply("***REMOVED***", "/gcube/devNext/NextNext");
|
||||
|
||||
|
||||
***REMOVED***ScopeProvider.instance.set("/d4science.research-infrastructures.eu/gCubeApps/RPrototypingLab");
|
||||
***REMOVED*** System.out.println(a.getHProxy());
|
||||
***REMOVED*** System.out.println(a.MapCluster());
|
||||
System.out.println(a.listDataMinersByCluster("***REMOVED***","/gcube/devNext/NextNext"));
|
||||
***REMOVED***System.out.println(a.listDataMinersByCluster("***REMOVED***","/gcube/devNext/NextNext"));
|
||||
***REMOVED*** System.out.println(a.listDataMinersByCluster());
|
||||
|
||||
***REMOVED*** List<Dependency> list = new LinkedList<Dependency>();
|
||||
|
|
|
@ -130,12 +130,7 @@ public class ISClient ***REMOVED***
|
|||
***REMOVED***
|
||||
***REMOVED*** ***REMOVED***
|
||||
|
||||
***REMOVED****
|
||||
* Return the list of hosts (dataminers) in a given VRE
|
||||
*
|
||||
* @param vreName
|
||||
* @return
|
||||
***REMOVED***
|
||||
|
||||
public Collection<Host> listDataminersInVRE() ***REMOVED***
|
||||
|
||||
boolean remote = false;
|
||||
|
|
|
@ -34,16 +34,7 @@ public class AlgorithmPackageParser ***REMOVED***
|
|||
|
||||
private static final int BUFFER_SIZE = 4096;
|
||||
|
||||
/**
|
||||
* Given an URL to an algorithm package, create an Algorithm object with its
|
||||
* metadata. Metadata are extracted from the 'info.txt' file, if any, in the
|
||||
* package.
|
||||
*
|
||||
* @param url
|
||||
* @return An Algorithm object or null if no 'info.txt' is found in the
|
||||
* package.
|
||||
* @throws IOException
|
||||
*/
|
||||
|
||||
public Algorithm parsePackage(String url) throws IOException ***REMOVED***
|
||||
String packageMetadata = this.getPackageMetadata(url);
|
||||
if (packageMetadata == null) ***REMOVED***
|
||||
|
@ -57,13 +48,7 @@ public class AlgorithmPackageParser ***REMOVED***
|
|||
***REMOVED***
|
||||
***REMOVED***
|
||||
|
||||
/**
|
||||
* Extract the content of the metadata file from the package.
|
||||
*
|
||||
* @param url
|
||||
* @return
|
||||
* @throws IOException
|
||||
*/
|
||||
|
||||
private String getPackageMetadata(String url) throws IOException ***REMOVED***
|
||||
InputStream is = new URL(url).openStream();
|
||||
ZipInputStream zipIs = new ZipInputStream(is);
|
||||
|
@ -81,12 +66,7 @@ public class AlgorithmPackageParser ***REMOVED***
|
|||
return out;
|
||||
***REMOVED***
|
||||
|
||||
/**
|
||||
* Read the content of a zip entry and place it in a string.
|
||||
* @param zipIn
|
||||
* @return
|
||||
* @throws IOException
|
||||
*/
|
||||
|
||||
private String getEntryContent(ZipInputStream zipIn) throws IOException ***REMOVED***
|
||||
StringBuilder s = new StringBuilder();
|
||||
byte[] buffer = new byte[BUFFER_SIZE];
|
||||
|
@ -97,11 +77,7 @@ public class AlgorithmPackageParser ***REMOVED***
|
|||
return s.toString();
|
||||
***REMOVED***
|
||||
|
||||
/**
|
||||
* Parse the content of the metadata file and create a key+multivalue map.
|
||||
* @param metadata
|
||||
* @return
|
||||
*/
|
||||
|
||||
private Map<String, List<String>> parseMetadata(String metadata) ***REMOVED***
|
||||
Map<String, List<String>> out = new HashMap<String, List<String>>();
|
||||
String[] lines = metadata.split("\n");
|
||||
|
@ -145,11 +121,7 @@ public class AlgorithmPackageParser ***REMOVED***
|
|||
return out;
|
||||
***REMOVED***
|
||||
|
||||
/**
|
||||
* Create an Algorithm starting from its metadata
|
||||
* @param metadata
|
||||
* @return
|
||||
*/
|
||||
|
||||
***REMOVED*** private Algorithm createAlgorithm(Map<String, List<String>> metadata) ***REMOVED***
|
||||
***REMOVED*** Algorithm out = new Algorithm();
|
||||
***REMOVED*** out.setName(extractSingleValue(metadata, METADATA_ALGORITHM_NAME));
|
||||
|
|
|
@ -28,7 +28,6 @@ public class DataminerPoolManager ***REMOVED***
|
|||
***REMOVED***
|
||||
|
||||
|
||||
|
||||
***REMOVED***Algorithm algo) throws IOException, InterruptedException ***REMOVED***
|
||||
|
||||
Cluster stagingCluster = ClusterBuilder.getStagingDataminerCluster();
|
||||
|
@ -43,7 +42,7 @@ public class DataminerPoolManager ***REMOVED***
|
|||
|
||||
Cluster prodCluster = ClusterBuilder.getVRECluster(targetVREToken, targetVRE);
|
||||
|
||||
DMPMJob job = new ProductionPublishingJob(this.svnUpdater, algo, prodCluster, targetVRE);
|
||||
DMPMJob job = new ProductionPublishingJob(this.svnUpdater, algo, prodCluster, targetVRE, targetVREToken);
|
||||
String id = job.start();
|
||||
return id;
|
||||
***REMOVED***
|
||||
|
|
|
@ -1,7 +1,10 @@
|
|||
package org.gcube.dataanalysis.dataminer.poolmanager.service;
|
||||
|
||||
***REMOVED***
|
||||
***REMOVED***
|
||||
***REMOVED***
|
||||
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Cluster;
|
||||
import org.gcube.dataanalysis.dataminer.poolmanager.util.CheckPermission;
|
||||
import org.gcube.dataanalysis.dataminer.poolmanager.util.SVNUpdater;
|
||||
|
||||
public class ProductionPublishingJob extends DMPMJob ***REMOVED***
|
||||
|
@ -9,21 +12,28 @@ public class ProductionPublishingJob extends DMPMJob ***REMOVED***
|
|||
private Algorithm algorithm;
|
||||
private Cluster prodCluster;
|
||||
private String targetVREName;
|
||||
private String targetVREToken;
|
||||
|
||||
public ProductionPublishingJob(SVNUpdater svnUpdater, Algorithm algorithm, Cluster prodCluster, String targetVREName) ***REMOVED***
|
||||
public ProductionPublishingJob(SVNUpdater svnUpdater, Algorithm algorithm, Cluster prodCluster, String targetVREName, String targetVREToken) ***REMOVED***
|
||||
super(svnUpdater);
|
||||
this.algorithm = algorithm;
|
||||
this.prodCluster = prodCluster;
|
||||
this.targetVREName = targetVREName;
|
||||
this.targetVREToken = targetVREToken;
|
||||
***REMOVED***
|
||||
|
||||
|
||||
***REMOVED***
|
||||
protected void execute() ***REMOVED***
|
||||
try ***REMOVED***
|
||||
this.svnUpdater.updateProdDeps(this.algorithm);
|
||||
|
||||
if (CheckPermission.apply(targetVREToken,targetVREName))***REMOVED***
|
||||
|
||||
this.svnUpdater.updateProdDeps(this.algorithm);
|
||||
|
||||
this.svnUpdater.updateSVNProdAlgorithmList(this.algorithm, this.targetVREName, "DataMiner Pool Manager", "Prod");
|
||||
this.svnUpdater.updateSVNProdAlgorithmList(this.algorithm, this.targetVREName, "DataMiner Pool Manager", "Prod");
|
||||
|
||||
***REMOVED***
|
||||
|
||||
***REMOVED*** int ret = this.executeAnsibleWorker(
|
||||
***REMOVED*** createWorker(this.algorithm, this.prodCluster, false, "gcube"));
|
||||
|
||||
|
|
|
@ -1,5 +1,9 @@
|
|||
package org.gcube.dataanalysis.dataminer.poolmanager.service;
|
||||
|
||||
import static org.gcube.common.authorization.client.Constants.authorizationService;
|
||||
|
||||
import org.gcube.common.authorization.library.AuthorizationEntry;
|
||||
***REMOVED***
|
||||
***REMOVED***
|
||||
import org.gcube.dataanalysis.dataminer.poolmanager.datamodel.Cluster;
|
||||
import org.gcube.dataanalysis.dataminer.poolmanager.util.SVNUpdater;
|
||||
|
|
|
@ -0,0 +1,27 @@
|
|||
package org.gcube.dataanalysis.dataminer.poolmanager.util;
|
||||
|
||||
***REMOVED***
|
||||
import org.gcube.common.authorization.library.AuthorizationEntry;
|
||||
***REMOVED***
|
||||
***REMOVED***
|
||||
|
||||
import static org.gcube.common.authorization.client.Constants.authorizationService;
|
||||
|
||||
|
||||
public class CheckPermission ***REMOVED***
|
||||
|
||||
public CheckPermission() ***REMOVED***
|
||||
***REMOVED***
|
||||
|
||||
public static boolean apply(String VREToken, String vre) throws ObjectNotFound, Exception ***REMOVED***
|
||||
|
||||
AuthorizationEntry entry = authorizationService().get(VREToken);
|
||||
if (entry.getContext().equals(vre)) ***REMOVED***
|
||||
System.out.println("OK!");
|
||||
return true;
|
||||
***REMOVED***
|
||||
|
||||
return false;
|
||||
|
||||
***REMOVED***
|
||||
***REMOVED***
|
|
@ -1,6 +1,6 @@
|
|||
#YML node file
|
||||
#STAGING_HOST: dataminer-proto-ghost.d4science.org
|
||||
STAGING_HOST: dataminer1-devnext.d4science.org
|
||||
STAGING_HOST: dataminer-proto-ghost.d4science.org
|
||||
#STAGING_HOST: dataminer1-devnext.d4science.org
|
||||
SVN_REPO: https:***REMOVED***svn.d4science.research-infrastructures.eu/gcube/trunk/data-analysis/RConfiguration/RPackagesManagement/
|
||||
#HAPROXY_CSV: http:***REMOVED***data.d4science.org/Yk4zSFF6V3JOSytNd3JkRDlnRFpDUUR5TnRJZEw2QjRHbWJQNStIS0N6Yz0
|
||||
|
||||
|
|
Loading…
Reference in New Issue