Initial import.
git-svn-id: http://svn.research-infrastructures.eu/public/d4science/gcube/trunk/application-support-layer/aslvre@74379 82a268e6-3cf1-43bd-a215-b396298e98cf
This commit is contained in:
parent
8af2a415cd
commit
ce7a015ec8
|
@ -0,0 +1,26 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<classpath>
|
||||
<classpathentry including="**/*.java" kind="src" output="target/classes" path="src/main/java">
|
||||
<attributes>
|
||||
<attribute name="optional" value="true"/>
|
||||
<attribute name="maven.pomderived" value="true"/>
|
||||
</attributes>
|
||||
</classpathentry>
|
||||
<classpathentry kind="src" output="target/test-classes" path="src/test/java">
|
||||
<attributes>
|
||||
<attribute name="optional" value="true"/>
|
||||
<attribute name="maven.pomderived" value="true"/>
|
||||
</attributes>
|
||||
</classpathentry>
|
||||
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.6">
|
||||
<attributes>
|
||||
<attribute name="maven.pomderived" value="true"/>
|
||||
</attributes>
|
||||
</classpathentry>
|
||||
<classpathentry kind="con" path="org.eclipse.m2e.MAVEN2_CLASSPATH_CONTAINER">
|
||||
<attributes>
|
||||
<attribute name="maven.pomderived" value="true"/>
|
||||
</attributes>
|
||||
</classpathentry>
|
||||
<classpathentry kind="output" path="target/classes"/>
|
||||
</classpath>
|
|
@ -0,0 +1,23 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<projectDescription>
|
||||
<name>applicationSupportLayerVREManagement</name>
|
||||
<comment></comment>
|
||||
<projects>
|
||||
</projects>
|
||||
<buildSpec>
|
||||
<buildCommand>
|
||||
<name>org.eclipse.jdt.core.javabuilder</name>
|
||||
<arguments>
|
||||
</arguments>
|
||||
</buildCommand>
|
||||
<buildCommand>
|
||||
<name>org.eclipse.m2e.core.maven2Builder</name>
|
||||
<arguments>
|
||||
</arguments>
|
||||
</buildCommand>
|
||||
</buildSpec>
|
||||
<natures>
|
||||
<nature>org.eclipse.m2e.core.maven2Nature</nature>
|
||||
<nature>org.eclipse.jdt.core.javanature</nature>
|
||||
</natures>
|
||||
</projectDescription>
|
|
@ -0,0 +1,4 @@
|
|||
#Thu May 02 12:29:42 CEST 2013
|
||||
eclipse.preferences.version=1
|
||||
encoding//src/main/java=UTF-8
|
||||
encoding/<project>=UTF-8
|
|
@ -0,0 +1,6 @@
|
|||
#Thu May 02 12:28:02 CEST 2013
|
||||
eclipse.preferences.version=1
|
||||
org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.6
|
||||
org.eclipse.jdt.core.compiler.compliance=1.6
|
||||
org.eclipse.jdt.core.compiler.problem.forbiddenReference=warning
|
||||
org.eclipse.jdt.core.compiler.source=1.6
|
|
@ -0,0 +1,5 @@
|
|||
#Thu May 02 12:23:01 CEST 2013
|
||||
activeProfiles=
|
||||
eclipse.preferences.version=1
|
||||
resolveWorkspaceProjects=true
|
||||
version=1
|
|
@ -0,0 +1 @@
|
|||
Used as a library in the gCube Framework
|
|
@ -0,0 +1,6 @@
|
|||
gCube System - License
|
||||
------------------------------------------------------------
|
||||
|
||||
The gCube/gCore software is licensed as Free Open Source software conveying to the EUPL (http://ec.europa.eu/idabc/eupl).
|
||||
The software and documentation is provided by its authors/distributors "as is" and no expressed or
|
||||
implied warranty is given for its use, quality or fitness for a particular case.
|
|
@ -0,0 +1,2 @@
|
|||
Rena Tsantouli (e.tsantoylh@di.uoa.gr),
|
||||
National Kapodistrian University of Athens, Department Informatics.
|
|
@ -0,0 +1,50 @@
|
|||
The gCube System - Process Optimisation - Planner Service
|
||||
------------------------------------------------------------
|
||||
|
||||
This work is partially funded by the European Commission in the
|
||||
context of the D4Science project (www.d4science.eu), under the 1st call of FP7 IST priority.
|
||||
|
||||
|
||||
Authors
|
||||
-------
|
||||
|
||||
* Valia Tsagkalidou (v.tsagkalidou@di.uoa.gr),
|
||||
National Kapodistrian University of Athens, Department Informatics.
|
||||
|
||||
* Rena Tsantouli (e.tsantoylh@di.uoa.gr),
|
||||
National Kapodistrian University of Athens, Department Informatics.
|
||||
|
||||
* Massimiliano Assante
|
||||
Institute of Information Science and Technologies (ISTI), CNR , Italy
|
||||
|
||||
Version and Release Date
|
||||
------------------------
|
||||
|
||||
v. 3.0.0, 17/05/2010
|
||||
|
||||
|
||||
Description
|
||||
-----------
|
||||
Provides functionality that helps presentation layer to use gCube services
|
||||
|
||||
|
||||
Download information
|
||||
--------------------
|
||||
Source code is available from SVN:
|
||||
https://svn.d4science.research-infrastructures.eu/gcube/trunk/application-support-layer/applicationSupportLayerVREManagement/
|
||||
|
||||
Binaries can be downloaded from:
|
||||
http://software.d4science.research-infrastructures.eu/
|
||||
|
||||
|
||||
Documentation
|
||||
-------------
|
||||
Documentation is available on-line from the Projects Documentation Wiki:
|
||||
|
||||
https://gcube.wiki.gcube-system.org/gcube/index.php/ASL
|
||||
|
||||
|
||||
Licensing
|
||||
---------
|
||||
|
||||
This software is licensed under the terms you may find in the file named "LICENSE" in this directory.
|
|
@ -0,0 +1,6 @@
|
|||
<ReleaseNotes>
|
||||
<Changeset component="org.gcube.application-support-layer.application-support-layer-vremanagement.3-2-0" date="2011-02-07">
|
||||
<Change>Added support to the new VRE Deployer portlet</Change>
|
||||
<Change>Added support to the new VRE Manager service</Change>
|
||||
</Changeset>
|
||||
</ReleaseNotes>
|
|
@ -0,0 +1,48 @@
|
|||
<assembly
|
||||
xmlns="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0 http://maven.apache.org/xsd/assembly-1.1.0.xsd">
|
||||
<id>servicearchive</id>
|
||||
<formats>
|
||||
<format>tar.gz</format>
|
||||
</formats>
|
||||
<baseDirectory>/</baseDirectory>
|
||||
<fileSets>
|
||||
<fileSet>
|
||||
<directory>${distroDirectory}</directory>
|
||||
<outputDirectory>/</outputDirectory>
|
||||
<useDefaultExcludes>true</useDefaultExcludes>
|
||||
<includes>
|
||||
<include>README</include>
|
||||
<include>LICENSE</include>
|
||||
<include>INSTALL</include>
|
||||
<include>MAINTAINERS</include>
|
||||
<include>changelog.xml</include>
|
||||
</includes>
|
||||
<fileMode>755</fileMode>
|
||||
<filtered>true</filtered>
|
||||
</fileSet>
|
||||
<fileSet>
|
||||
<directory>target/apidocs</directory>
|
||||
<outputDirectory>/${artifactId}/doc/api</outputDirectory>
|
||||
<useDefaultExcludes>true</useDefaultExcludes>
|
||||
<fileMode>755</fileMode>
|
||||
</fileSet>
|
||||
</fileSets>
|
||||
<files>
|
||||
<file>
|
||||
<source>${distroDirectory}/profile.xml</source>
|
||||
<outputDirectory>./</outputDirectory>
|
||||
<filtered>true</filtered>
|
||||
</file>
|
||||
<file>
|
||||
<source>target/${build.finalName}.jar</source>
|
||||
<outputDirectory>/${artifactId}</outputDirectory>
|
||||
</file>
|
||||
<file>
|
||||
<source>${distroDirectory}/svnpath.txt</source>
|
||||
<outputDirectory>/${artifactId}</outputDirectory>
|
||||
<filtered>true</filtered>
|
||||
</file>
|
||||
</files>
|
||||
</assembly>
|
|
@ -0,0 +1,25 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<Resource xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
|
||||
<ID></ID>
|
||||
<Type>Library</Type>
|
||||
<Profile>
|
||||
<Description>ASL Social Library</Description>
|
||||
<Class>ApplicationSupportLayer</Class>
|
||||
<Name>${artifactId}</Name>
|
||||
<Version>1.0.0</Version>
|
||||
<Packages>
|
||||
<Software>
|
||||
<Name>${artifactId}</Name>
|
||||
<Version>${version}</Version>
|
||||
<MavenCoordinates>
|
||||
<groupId>${groupId}</groupId>
|
||||
<artifactId>${artifactId}</artifactId>
|
||||
<version>${version}</version>
|
||||
</MavenCoordinates>
|
||||
<Files>
|
||||
<File>${build.finalName}.jar</File>
|
||||
</Files>
|
||||
</Software>
|
||||
</Packages>
|
||||
</Profile>
|
||||
</Resource>
|
|
@ -0,0 +1 @@
|
|||
${scm.url}
|
|
@ -0,0 +1,538 @@
|
|||
<ehcache xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:noNamespaceSchemaLocation="ehcache.xsd">
|
||||
|
||||
<!--
|
||||
CacheManager Configuration
|
||||
==========================
|
||||
An ehcache.xml corresponds to a single CacheManager.
|
||||
|
||||
See instructions below or the ehcache schema (ehcache.xsd) on how to configure.
|
||||
|
||||
DiskStore configuration
|
||||
=======================
|
||||
|
||||
Sets the path to the directory where cache files are created.
|
||||
|
||||
If the path is a Java System Property it is replaced by its value in the
|
||||
running VM.
|
||||
|
||||
The following properties are translated:
|
||||
* user.home - User's home directory
|
||||
* user.dir - User's current working directory
|
||||
* java.io.tmpdir - Default temp file path
|
||||
|
||||
Subdirectories can be specified below the property e.g. java.io.tmpdir/one
|
||||
-->
|
||||
<diskStore path="java.io.tmpdir"/>
|
||||
|
||||
<!--
|
||||
CacheManagerEventListener
|
||||
=========================
|
||||
Specifies a CacheManagerEventListenerFactory, be used to create a CacheManagerPeerProvider,
|
||||
which is notified when Caches are added or removed from the CacheManager.
|
||||
|
||||
The attributes of CacheManagerEventListenerFactory are:
|
||||
* class - a fully qualified factory class name
|
||||
* properties - comma separated properties having meaning only to the factory.
|
||||
|
||||
Sets the fully qualified class name to be registered as the CacheManager event listener.
|
||||
|
||||
The events include:
|
||||
* adding a Cache
|
||||
* removing a Cache
|
||||
|
||||
Callbacks to listener methods are synchronous and unsynchronized. It is the responsibility
|
||||
of the implementer to safely handle the potential performance and thread safety issues
|
||||
depending on what their listener is doing.
|
||||
|
||||
If no class is specified, no listener is created. There is no default.
|
||||
-->
|
||||
<cacheManagerEventListenerFactory class="" properties=""/>
|
||||
|
||||
|
||||
<!--
|
||||
CacheManagerPeerProvider
|
||||
========================
|
||||
(Enable for distributed operation)
|
||||
|
||||
Specifies a CacheManagerPeerProviderFactory which will be used to create a
|
||||
CacheManagerPeerProvider, which discovers other CacheManagers in the cluster.
|
||||
|
||||
The attributes of cacheManagerPeerProviderFactory are:
|
||||
* class - a fully qualified factory class name
|
||||
* properties - comma separated properties having meaning only to the factory.
|
||||
|
||||
Ehcache comes with a built-in RMI-based distribution system with two means of discovery of
|
||||
CacheManager peers participating in the cluster:
|
||||
* automatic, using a multicast group. This one automatically discovers peers and detects
|
||||
changes such as peers entering and leaving the group
|
||||
* manual, using manual rmiURL configuration. A hardcoded list of peers is provided at
|
||||
configuration time.
|
||||
|
||||
Configuring Automatic Discovery:
|
||||
Automatic discovery is configured as per the following example:
|
||||
<cacheManagerPeerProviderFactory
|
||||
class="net.sf.ehcache.distribution.RMICacheManagerPeerProviderFactory"
|
||||
properties="peerDiscovery=automatic, multicastGroupAddress=230.0.0.1,
|
||||
multicastGroupPort=4446, timeToLive=32"/>
|
||||
|
||||
Valid properties are:
|
||||
* peerDiscovery (mandatory) - specify "automatic"
|
||||
* multicastGroupAddress (mandatory) - specify a valid multicast group address
|
||||
* multicastGroupPort (mandatory) - specify a dedicated port for the multicast heartbeat
|
||||
traffic
|
||||
* timeToLive - specify a value between 0 and 255 which determines how far the packets will
|
||||
propagate.
|
||||
|
||||
By convention, the restrictions are:
|
||||
0 - the same host
|
||||
1 - the same subnet
|
||||
32 - the same site
|
||||
64 - the same region
|
||||
128 - the same continent
|
||||
255 - unrestricted
|
||||
|
||||
Configuring Manual Discovery:
|
||||
Manual discovery is configured as per the following example:
|
||||
<cacheManagerPeerProviderFactory class=
|
||||
"net.sf.ehcache.distribution.RMICacheManagerPeerProviderFactory"
|
||||
properties="peerDiscovery=manual,
|
||||
rmiUrls=//server1:40000/sampleCache1|//server2:40000/sampleCache1
|
||||
| //server1:40000/sampleCache2|//server2:40000/sampleCache2"
|
||||
propertySeparator="," />
|
||||
|
||||
Valid properties are:
|
||||
* peerDiscovery (mandatory) - specify "manual"
|
||||
* rmiUrls (mandatory) - specify a pipe separated list of rmiUrls, in the form
|
||||
//hostname:port
|
||||
|
||||
The hostname is the hostname of the remote CacheManager peer. The port is the listening
|
||||
port of the RMICacheManagerPeerListener of the remote CacheManager peer.
|
||||
|
||||
-->
|
||||
<cacheManagerPeerProviderFactory
|
||||
class="net.sf.ehcache.distribution.RMICacheManagerPeerProviderFactory"
|
||||
properties="peerDiscovery=automatic,
|
||||
multicastGroupAddress=230.0.0.1,
|
||||
multicastGroupPort=4446, timeToLive=1"
|
||||
propertySeparator=","
|
||||
/>
|
||||
|
||||
|
||||
<!--
|
||||
CacheManagerPeerListener
|
||||
========================
|
||||
(Enable for distributed operation)
|
||||
|
||||
Specifies a CacheManagerPeerListenerFactory which will be used to create a
|
||||
CacheManagerPeerListener, which
|
||||
listens for messages from cache replicators participating in the cluster.
|
||||
|
||||
The attributes of cacheManagerPeerListenerFactory are:
|
||||
class - a fully qualified factory class name
|
||||
properties - comma separated properties having meaning only to the factory.
|
||||
|
||||
Ehcache comes with a built-in RMI-based distribution system. The listener component is
|
||||
RMICacheManagerPeerListener which is configured using
|
||||
RMICacheManagerPeerListenerFactory. It is configured as per the following example:
|
||||
|
||||
<cacheManagerPeerListenerFactory
|
||||
class="net.sf.ehcache.distribution.RMICacheManagerPeerListenerFactory"
|
||||
properties="hostName=fully_qualified_hostname_or_ip,
|
||||
port=40001,
|
||||
socketTimeoutMillis=120000"
|
||||
propertySeparator="," />
|
||||
|
||||
All properties are optional. They are:
|
||||
* hostName - the hostName of the host the listener is running on. Specify
|
||||
where the host is multihomed and you want to control the interface over which cluster
|
||||
messages are received. Defaults to the host name of the default interface if not
|
||||
specified.
|
||||
* port - the port the listener listens on. This defaults to a free port if not specified.
|
||||
* socketTimeoutMillis - the number of ms client sockets will stay open when sending
|
||||
messages to the listener. This should be long enough for the slowest message.
|
||||
If not specified it defaults 120000ms.
|
||||
|
||||
-->
|
||||
<cacheManagerPeerListenerFactory
|
||||
class="net.sf.ehcache.distribution.RMICacheManagerPeerListenerFactory"/>
|
||||
|
||||
|
||||
<!--
|
||||
Cache configuration
|
||||
===================
|
||||
|
||||
The following attributes are required.
|
||||
|
||||
name:
|
||||
Sets the name of the cache. This is used to identify the cache. It must be unique.
|
||||
|
||||
maxElementsInMemory:
|
||||
Sets the maximum number of objects that will be created in memory
|
||||
|
||||
maxElementsOnDisk:
|
||||
Sets the maximum number of objects that will be maintained in the DiskStore
|
||||
The default value is zero, meaning unlimited.
|
||||
|
||||
eternal:
|
||||
Sets whether elements are eternal. If eternal, timeouts are ignored and the
|
||||
element is never expired.
|
||||
|
||||
overflowToDisk:
|
||||
Sets whether elements can overflow to disk when the memory store
|
||||
has reached the maxInMemory limit.
|
||||
|
||||
The following attributes and elements are optional.
|
||||
|
||||
timeToIdleSeconds:
|
||||
Sets the time to idle for an element before it expires.
|
||||
i.e. The maximum amount of time between accesses before an element expires
|
||||
Is only used if the element is not eternal.
|
||||
Optional attribute. A value of 0 means that an Element can idle for infinity.
|
||||
The default value is 0.
|
||||
|
||||
timeToLiveSeconds:
|
||||
Sets the time to live for an element before it expires.
|
||||
i.e. The maximum time between creation time and when an element expires.
|
||||
Is only used if the element is not eternal.
|
||||
Optional attribute. A value of 0 means that and Element can live for infinity.
|
||||
The default value is 0.
|
||||
|
||||
diskPersistent:
|
||||
Whether the disk store persists between restarts of the Virtual Machine.
|
||||
The default value is false.
|
||||
|
||||
diskExpiryThreadIntervalSeconds:
|
||||
The number of seconds between runs of the disk expiry thread. The default value
|
||||
is 120 seconds.
|
||||
|
||||
diskSpoolBufferSizeMB:
|
||||
This is the size to allocate the DiskStore for a spool buffer. Writes are made
|
||||
to this area and then asynchronously written to disk. The default size is 30MB.
|
||||
Each spool buffer is used only by its cache. If you get OutOfMemory errors consider
|
||||
lowering this value. To improve DiskStore performance consider increasing it. Trace level
|
||||
logging in the DiskStore will show if put back ups are occurring.
|
||||
|
||||
memoryStoreEvictionPolicy:
|
||||
Policy would be enforced upon reaching the maxElementsInMemory limit. Default
|
||||
policy is Least Recently Used (specified as LRU). Other policies available -
|
||||
First In First Out (specified as FIFO) and Less Frequently Used
|
||||
(specified as LFU)
|
||||
|
||||
Cache elements can also contain sub elements which take the same format of a factory class
|
||||
and properties. Defined sub-elements are:
|
||||
|
||||
* cacheEventListenerFactory - Enables registration of listeners for cache events, such as
|
||||
put, remove, update, and expire.
|
||||
|
||||
* bootstrapCacheLoaderFactory - Specifies a BootstrapCacheLoader, which is called by a
|
||||
cache on initialisation to prepopulate itself.
|
||||
|
||||
* cacheExtensionFactory - Specifies a CacheExtension, a generic mechansim to tie a class
|
||||
which holds a reference to a cache to the cache lifecycle.
|
||||
|
||||
* cacheExceptionHandlerFactory - Specifies a CacheExceptionHandler, which is called when
|
||||
cache exceptions occur.
|
||||
|
||||
* cacheLoaderFactory - Specifies a CacheLoader, which can be used both asynchronously and
|
||||
synchronously to load objects into a cache.
|
||||
|
||||
RMI Cache Replication
|
||||
|
||||
Each cache that will be distributed needs to set a cache event listener which replicates
|
||||
messages to the other CacheManager peers. For the built-in RMI implementation this is done
|
||||
by adding a cacheEventListenerFactory element of type RMICacheReplicatorFactory to each
|
||||
distributed cache's configuration as per the following example:
|
||||
|
||||
<cacheEventListenerFactory class="net.sf.ehcache.distribution.RMICacheReplicatorFactory"
|
||||
properties="replicateAsynchronously=true,
|
||||
replicatePuts=true,
|
||||
replicateUpdates=true,
|
||||
replicateUpdatesViaCopy=true,
|
||||
replicateRemovals=true
|
||||
asynchronousReplicationIntervalMillis=<number of milliseconds"
|
||||
propertySeparator="," />
|
||||
|
||||
The RMICacheReplicatorFactory recognises the following properties:
|
||||
|
||||
* replicatePuts=true|false - whether new elements placed in a cache are
|
||||
replicated to others. Defaults to true.
|
||||
|
||||
* replicateUpdates=true|false - whether new elements which override an
|
||||
element already existing with the same key are replicated. Defaults to true.
|
||||
|
||||
* replicateRemovals=true - whether element removals are replicated. Defaults to true.
|
||||
|
||||
* replicateAsynchronously=true | false - whether replications are
|
||||
asynchronous (true) or synchronous (false). Defaults to true.
|
||||
|
||||
* replicateUpdatesViaCopy=true | false - whether the new elements are
|
||||
copied to other caches (true), or whether a remove message is sent. Defaults to true.
|
||||
|
||||
* asynchronousReplicationIntervalMillis=<number of milliseconds> - The asynchronous
|
||||
replicator runs at a set interval of milliseconds. The default is 1000. The minimum
|
||||
is 10. This property is only applicable if replicateAsynchronously=true
|
||||
|
||||
|
||||
Cluster Bootstrapping
|
||||
|
||||
The RMIBootstrapCacheLoader bootstraps caches in clusters where RMICacheReplicators are
|
||||
used. It is configured as per the following example:
|
||||
|
||||
<bootstrapCacheLoaderFactory
|
||||
class="net.sf.ehcache.distribution.RMIBootstrapCacheLoaderFactory"
|
||||
properties="bootstrapAsynchronously=true, maximumChunkSizeBytes=5000000"
|
||||
propertySeparator="," />
|
||||
|
||||
The RMIBootstrapCacheLoaderFactory recognises the following optional properties:
|
||||
|
||||
* bootstrapAsynchronously=true|false - whether the bootstrap happens in the background
|
||||
after the cache has started. If false, bootstrapping must complete before the cache is
|
||||
made available. The default value is true.
|
||||
|
||||
* maximumChunkSizeBytes=<integer> - Caches can potentially be very large, larger than the
|
||||
memory limits of the VM. This property allows the bootstraper to fetched elements in
|
||||
chunks. The default chunk size is 5000000 (5MB).
|
||||
|
||||
|
||||
Cache Exception Handling
|
||||
|
||||
By default, most cache operations will propagate a runtime CacheException on failure. An
|
||||
interceptor, using a dynamic proxy, may be configured so that a CacheExceptionHandler can
|
||||
be configured to intercept Exceptions. Errors are not intercepted.
|
||||
|
||||
It is configured as per the following example:
|
||||
|
||||
<cacheExceptionHandlerFactory class="com.example.ExampleExceptionHandlerFactory"
|
||||
properties="logLevel=FINE"/>
|
||||
|
||||
Caches with ExceptionHandling configured are not of type Cache, but are of type Ehcache only,
|
||||
and are not available using CacheManager.getCache(), but using CacheManager.getEhcache().
|
||||
|
||||
|
||||
Cache Loader
|
||||
|
||||
A default CacheLoader may be set which loads objects into the cache through asynchronous and
|
||||
synchronous methods on Cache. This is different to the bootstrap cache loader, which is used
|
||||
only in distributed caching.
|
||||
|
||||
It is configured as per the following example:
|
||||
|
||||
<cacheLoaderFactory class="com.example.ExampleCacheLoaderFactory"
|
||||
properties="type=int,startCounter=10"/>
|
||||
|
||||
Cache Extension
|
||||
|
||||
CacheExtensions are a general purpose mechanism to allow generic extensions to a Cache.
|
||||
CacheExtensions are tied into the Cache lifecycle.
|
||||
|
||||
CacheExtensions are created using the CacheExtensionFactory which has a
|
||||
<code>createCacheCacheExtension()</code> method which takes as a parameter a
|
||||
Cache and properties. It can thus call back into any public method on Cache, including, of
|
||||
course, the load methods.
|
||||
|
||||
Extensions are added as per the following example:
|
||||
|
||||
<cacheExtensionFactory class="com.example.FileWatchingCacheRefresherExtensionFactory"
|
||||
properties="refreshIntervalMillis=18000, loaderTimeout=3000,
|
||||
flushPeriod=whatever, someOtherProperty=someValue ..."/>
|
||||
|
||||
-->
|
||||
|
||||
|
||||
<!--
|
||||
Mandatory Default Cache configuration. These settings will be applied to caches
|
||||
created programmtically using CacheManager.add(String cacheName).
|
||||
|
||||
The defaultCache has an implicit name "default" which is a reserved cache name.
|
||||
-->
|
||||
<defaultCache
|
||||
maxElementsInMemory="10000"
|
||||
eternal="false"
|
||||
timeToIdleSeconds="120"
|
||||
timeToLiveSeconds="120"
|
||||
overflowToDisk="true"
|
||||
diskSpoolBufferSizeMB="30"
|
||||
maxElementsOnDisk="10000000"
|
||||
diskPersistent="false"
|
||||
diskExpiryThreadIntervalSeconds="120"
|
||||
memoryStoreEvictionPolicy="LRU"
|
||||
/>
|
||||
|
||||
<!--
|
||||
Sample caches. Following are some example caches. Remove these before use.
|
||||
-->
|
||||
|
||||
<!--
|
||||
Sample cache named sampleCache1
|
||||
This cache contains a maximum in memory of 10000 elements, and will expire
|
||||
an element if it is idle for more than 5 minutes and lives for more than
|
||||
10 minutes.
|
||||
|
||||
If there are more than 10000 elements it will overflow to the
|
||||
disk cache, which in this configuration will go to wherever java.io.tmp is
|
||||
defined on your system. On a standard Linux system this will be /tmp"
|
||||
-->
|
||||
<cache name="profiles"
|
||||
maxElementsInMemory="10000"
|
||||
maxElementsOnDisk="1000"
|
||||
eternal="false"
|
||||
overflowToDisk="true"
|
||||
diskSpoolBufferSizeMB="20"
|
||||
timeToIdleSeconds="300"
|
||||
timeToLiveSeconds="600"
|
||||
memoryStoreEvictionPolicy="LRU"
|
||||
/>
|
||||
<cache name="genericResources"
|
||||
maxElementsInMemory="10000"
|
||||
maxElementsOnDisk="1000"
|
||||
eternal="false"
|
||||
overflowToDisk="true"
|
||||
diskSpoolBufferSizeMB="20"
|
||||
timeToIdleSeconds="300"
|
||||
timeToLiveSeconds="600"
|
||||
memoryStoreEvictionPolicy="LRU"
|
||||
/>
|
||||
<cache name="searchConfiguration"
|
||||
maxElementsInMemory="10000"
|
||||
maxElementsOnDisk="1000"
|
||||
eternal="false"
|
||||
overflowToDisk="true"
|
||||
diskSpoolBufferSizeMB="20"
|
||||
timeToIdleSeconds="300"
|
||||
timeToLiveSeconds="600"
|
||||
memoryStoreEvictionPolicy="LRU"
|
||||
/>
|
||||
<cache name="collections"
|
||||
maxElementsInMemory="10000"
|
||||
maxElementsOnDisk="1000"
|
||||
eternal="false"
|
||||
overflowToDisk="true"
|
||||
diskSpoolBufferSizeMB="20"
|
||||
timeToIdleSeconds="300"
|
||||
timeToLiveSeconds="600"
|
||||
memoryStoreEvictionPolicy="LRU"
|
||||
/>
|
||||
<cache name="content"
|
||||
maxElementsInMemory="10000"
|
||||
maxElementsOnDisk="1000"
|
||||
eternal="false"
|
||||
overflowToDisk="true"
|
||||
diskSpoolBufferSizeMB="20"
|
||||
timeToIdleSeconds="300"
|
||||
timeToLiveSeconds="600"
|
||||
memoryStoreEvictionPolicy="LRU"
|
||||
/>
|
||||
<cache name="thumbnail"
|
||||
maxElementsInMemory="10000"
|
||||
maxElementsOnDisk="1000"
|
||||
eternal="false"
|
||||
overflowToDisk="true"
|
||||
diskSpoolBufferSizeMB="20"
|
||||
timeToIdleSeconds="300"
|
||||
timeToLiveSeconds="600"
|
||||
memoryStoreEvictionPolicy="LRU"
|
||||
/>
|
||||
<cache name="schemata"
|
||||
maxElementsInMemory="10000"
|
||||
maxElementsOnDisk="1000"
|
||||
eternal="false"
|
||||
overflowToDisk="true"
|
||||
diskSpoolBufferSizeMB="20"
|
||||
timeToIdleSeconds="300"
|
||||
timeToLiveSeconds="600"
|
||||
memoryStoreEvictionPolicy="LRU"
|
||||
/>
|
||||
<cache name="metadata"
|
||||
maxElementsInMemory="10000"
|
||||
maxElementsOnDisk="1000"
|
||||
eternal="false"
|
||||
overflowToDisk="true"
|
||||
diskSpoolBufferSizeMB="20"
|
||||
timeToIdleSeconds="300"
|
||||
timeToLiveSeconds="600"
|
||||
memoryStoreEvictionPolicy="LRU"
|
||||
/>
|
||||
|
||||
<!--
|
||||
Sample cache named sampleCache2
|
||||
This cache has a maximum of 1000 elements in memory. There is no overflow to disk, so 1000
|
||||
is also the maximum cache size. Note that when a cache is eternal, timeToLive and
|
||||
timeToIdle are not used and do not need to be specified.
|
||||
-->
|
||||
<cache name="sampleCache2"
|
||||
maxElementsInMemory="1000"
|
||||
eternal="true"
|
||||
overflowToDisk="false"
|
||||
memoryStoreEvictionPolicy="FIFO"
|
||||
/>
|
||||
|
||||
|
||||
<!--
|
||||
Sample cache named sampleCache3. This cache overflows to disk. The disk store is
|
||||
persistent between cache and VM restarts. The disk expiry thread interval is set to 10
|
||||
minutes, overriding the default of 2 minutes.
|
||||
-->
|
||||
<cache name="sampleCache3"
|
||||
maxElementsInMemory="500"
|
||||
eternal="false"
|
||||
overflowToDisk="true"
|
||||
timeToIdleSeconds="300"
|
||||
timeToLiveSeconds="600"
|
||||
diskPersistent="true"
|
||||
diskExpiryThreadIntervalSeconds="1"
|
||||
memoryStoreEvictionPolicy="LFU"
|
||||
/>
|
||||
|
||||
|
||||
<!--
|
||||
Sample distributed cache named sampleDistributedCache1.
|
||||
This cache replicates using defaults.
|
||||
It also bootstraps from the cluster, using default properties.
|
||||
-->
|
||||
<cache name="sampleDistributedCache1"
|
||||
maxElementsInMemory="10"
|
||||
eternal="false"
|
||||
timeToIdleSeconds="100"
|
||||
timeToLiveSeconds="100"
|
||||
overflowToDisk="false">
|
||||
<cacheEventListenerFactory
|
||||
class="net.sf.ehcache.distribution.RMICacheReplicatorFactory"/>
|
||||
<bootstrapCacheLoaderFactory
|
||||
class="net.sf.ehcache.distribution.RMIBootstrapCacheLoaderFactory"/>
|
||||
</cache>
|
||||
|
||||
|
||||
<!--
|
||||
Sample distributed cache named sampleDistributedCache2.
|
||||
This cache replicates using specific properties.
|
||||
It only replicates updates and does so synchronously via copy
|
||||
-->
|
||||
<cache name="sampleDistributedCache2"
|
||||
maxElementsInMemory="10"
|
||||
eternal="false"
|
||||
timeToIdleSeconds="100"
|
||||
timeToLiveSeconds="100"
|
||||
overflowToDisk="false">
|
||||
<cacheEventListenerFactory
|
||||
class="net.sf.ehcache.distribution.RMICacheReplicatorFactory"
|
||||
properties="replicateAsynchronously=false, replicatePuts=false,
|
||||
replicateUpdates=true, replicateUpdatesViaCopy=true,
|
||||
replicateRemovals=false"/>
|
||||
</cache>
|
||||
|
||||
<!--
|
||||
Sample distributed cache named sampleDistributedCache3.
|
||||
This cache replicates using defaults except that the asynchronous replication
|
||||
interval is set to 200ms.
|
||||
-->
|
||||
<cache name="sampleDistributedCache3"
|
||||
maxElementsInMemory="10"
|
||||
eternal="false"
|
||||
timeToIdleSeconds="100"
|
||||
timeToLiveSeconds="100"
|
||||
overflowToDisk="false">
|
||||
<cacheEventListenerFactory
|
||||
class="net.sf.ehcache.distribution.RMICacheReplicatorFactory"
|
||||
properties="asynchronousReplicationIntervalMillis=200"/>
|
||||
</cache>
|
||||
</ehcache>
|
|
@ -0,0 +1,67 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<Resource xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
|
||||
<ID/>
|
||||
<Type>Library</Type>
|
||||
<Profile>
|
||||
<Description>ASLVRE</Description>
|
||||
<Class>ApplicationSupportLayer</Class>
|
||||
<Name>ASLVRE</Name>
|
||||
<Version>1.0.0</Version>
|
||||
<Packages>
|
||||
<Software>
|
||||
<Description>ASLVRE</Description>
|
||||
<Name>ASLVRE</Name>
|
||||
<Version>2.0.3</Version>
|
||||
<Dependencies>
|
||||
<Dependency>
|
||||
<Service>
|
||||
<Class>ApplicationSupportLayer</Class>
|
||||
<Name>ASLCore</Name>
|
||||
<Version>1.0.0</Version>
|
||||
</Service>
|
||||
<Package>ASLCore</Package>
|
||||
<Version>[2.0.0,3.0.0)</Version>
|
||||
<Scope level="GHN"/>
|
||||
<Optional>false</Optional>
|
||||
</Dependency>
|
||||
<Dependency>
|
||||
<Service>
|
||||
<Class>InformationSystem</Class>
|
||||
<Name>ISCache</Name>
|
||||
<Version>1.0.0</Version>
|
||||
</Service>
|
||||
<Package>ISCache</Package>
|
||||
<Version>[1.0.0,2.0.0)</Version>
|
||||
<Scope level="GHN"/>
|
||||
<Optional>false</Optional>
|
||||
</Dependency>
|
||||
<Dependency>
|
||||
<Service>
|
||||
<Class>VREManagement</Class>
|
||||
<Name>SoftwareRepository</Name>
|
||||
<Version>1.0.0</Version>
|
||||
</Service>
|
||||
<Package>SoftwareRepository-stubs</Package>
|
||||
<Version>[1.0.0,2.0.0)</Version>
|
||||
<Scope level="GHN"/>
|
||||
<Optional>false</Optional>
|
||||
</Dependency>
|
||||
<Dependency>
|
||||
<Service>
|
||||
<Class>VREManagement</Class>
|
||||
<Name>VREModeler</Name>
|
||||
<Version>1.0.0</Version>
|
||||
</Service>
|
||||
<Package>VREModeler-stubs</Package>
|
||||
<Version>[1.0.0,2.0.0)</Version>
|
||||
<Scope level="GHN"/>
|
||||
<Optional>false</Optional>
|
||||
</Dependency>
|
||||
</Dependencies>
|
||||
<Files>
|
||||
<File>ApplicationSupportLibraryVRE.jar</File>
|
||||
</Files>
|
||||
</Software>
|
||||
</Packages>
|
||||
</Profile>
|
||||
</Resource>
|
|
@ -0,0 +1 @@
|
|||
sharedDir=${catalina.home}/shared/d4s
|
|
@ -0,0 +1,18 @@
|
|||
MYPROXY_HOST=grids04.eng.it
|
||||
MYPROXY_PORT=7512
|
||||
PROXIES_DIR=#path to dir where the proxy files will be storeed#
|
||||
HOST_CERT=#myproxy certificate#
|
||||
HOST_KEY=#myproxy key#
|
||||
|
||||
NEW_SERVLET_HOST=dl14.di.uoa.gr #hostname for voms servlet#
|
||||
NEW_SERVLET_PORT=8888 #port where voms servlet is listening#
|
||||
NEW_SERVLET_PATH=/VOMSServlet/VOMSServlet #voms servlet path#
|
||||
NEW_SERVLET_PROTOCOL=http # protocol: http/https#
|
||||
|
||||
KEY_STORE=#path to *.p12 key file#
|
||||
KEY_STORE_TYPE=PKCS12
|
||||
KEY_STORE_PWD=#password for the *.p12 key file#
|
||||
|
||||
TRUST_STORE=#path to trust store file#
|
||||
TRUST_STORE_PWD=#password for the trust store file#
|
||||
TRUST_STORE_TYPE=JKS
|
|
@ -0,0 +1,173 @@
|
|||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
<parent>
|
||||
<artifactId>maven-parent</artifactId>
|
||||
<groupId>org.gcube.tools</groupId>
|
||||
<version>1.0.0</version>
|
||||
<relativePath />
|
||||
</parent>
|
||||
|
||||
<groupId>org.gcube.applicationsupportlayer</groupId>
|
||||
<artifactId>aslvre</artifactId>
|
||||
<version>3.2.0-SNAPSHOT</version>
|
||||
<packaging>jar</packaging>
|
||||
<name>VRE Management ASL Extension</name>
|
||||
<description>
|
||||
VRE Management
|
||||
</description>
|
||||
<scm>
|
||||
<connection>scm:svn:http://svn.d4science.research-infrastructures.eu/gcube/trunk/application-support-layer/${project.artifactId}</connection>
|
||||
<developerConnection>scm:https://svn.d4science.research-infrastructures.eu/gcube/trunk/trunk/application-support-layer/${project.artifactId}</developerConnection>
|
||||
<url>http://svn.d4science.research-infrastructures.eu/gcube/trunk/application-support-layer/${project.artifactId}</url>
|
||||
</scm>
|
||||
<properties>
|
||||
<distroDirectory>distro</distroDirectory>
|
||||
<maven.compiler.source>1.6</maven.compiler.source>
|
||||
<maven.compiler.target>1.6</maven.compiler.target>
|
||||
|
||||
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
|
||||
<project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
|
||||
</properties>
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>org.gcube.core</groupId>
|
||||
<artifactId>gcf</artifactId>
|
||||
<version>[1.5.0-SNAPSHOT, 2.0.0-SNAPSHOT)</version>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.gcube.applicationsupportlayer</groupId>
|
||||
<artifactId>aslcore</artifactId>
|
||||
<version>[3.2.1-SNAPSHOT, 4.0.0-SNAPSHOT)</version>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.gcube.resourcemanagement</groupId>
|
||||
<artifactId>vremodeler-stubs</artifactId>
|
||||
<version>[2.0.0-SNAPSHOT, 3.0.0-SNAPSHOT)</version>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.gcube.resourcemanagement</groupId>
|
||||
<artifactId>ghnmanager-stubs</artifactId>
|
||||
<version>[1.5.0-SNAPSHOT, 2.0.0-SNAPSHOT)</version>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.liferay.portal</groupId>
|
||||
<artifactId>portal-service</artifactId>
|
||||
<version>6.0.6</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>log4j</groupId>
|
||||
<artifactId>log4j</artifactId>
|
||||
<version>1.2.6</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>javax.portlet</groupId>
|
||||
<artifactId>portlet-api</artifactId>
|
||||
<version>2.0</version>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>junit</groupId>
|
||||
<artifactId>junit</artifactId>
|
||||
<version>3.8.1</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
<build>
|
||||
<resources>
|
||||
<resource>
|
||||
<directory>src/main/java</directory>
|
||||
<includes>
|
||||
<include>**/*.*</include>
|
||||
</includes>
|
||||
</resource>
|
||||
</resources>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<artifactId>maven-compiler-plugin</artifactId>
|
||||
<version>3.0</version>
|
||||
<configuration>
|
||||
<source>1.6</source>
|
||||
<target>1.6</target>
|
||||
</configuration>
|
||||
</plugin>
|
||||
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-jar-plugin</artifactId>
|
||||
<version>2.2</version>
|
||||
<executions>
|
||||
<execution>
|
||||
<goals>
|
||||
<goal>test-jar</goal>
|
||||
</goals>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-surefire-plugin</artifactId>
|
||||
<version>2.12</version>
|
||||
<configuration>
|
||||
<skipTests>true</skipTests>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-resources-plugin</artifactId>
|
||||
<version>2.5</version>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>copy-profile</id>
|
||||
<phase>install</phase>
|
||||
<goals>
|
||||
<goal>copy-resources</goal>
|
||||
</goals>
|
||||
<configuration>
|
||||
<outputDirectory>target</outputDirectory>
|
||||
<resources>
|
||||
<resource>
|
||||
<directory>${distroDirectory}</directory>
|
||||
<filtering>true</filtering>
|
||||
<includes>
|
||||
<include>profile.xml</include>
|
||||
</includes>
|
||||
</resource>
|
||||
</resources>
|
||||
</configuration>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-assembly-plugin</artifactId>
|
||||
<version>2.2</version>
|
||||
<configuration>
|
||||
<descriptors>
|
||||
<descriptor>${distroDirectory}/descriptor.xml</descriptor>
|
||||
</descriptors>
|
||||
<archive>
|
||||
<manifest>
|
||||
<mainClass>fully.qualified.MainClass</mainClass>
|
||||
</manifest>
|
||||
</archive>
|
||||
</configuration>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>servicearchive</id>
|
||||
<phase>install</phase>
|
||||
<goals>
|
||||
<goal>single</goal>
|
||||
</goals>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
</project>
|
|
@ -0,0 +1,121 @@
|
|||
package org.gcube.application.framework.vremanagement.vremanagement;
|
||||
|
||||
import java.io.File;
|
||||
import java.util.List;
|
||||
|
||||
import org.gcube.common.core.informationsystem.client.RPDocument;
|
||||
import org.gcube.common.core.informationsystem.client.XMLResult;
|
||||
import org.gcube.common.core.resources.GCUBECollection;
|
||||
import org.gcube.common.core.resources.GCUBEMCollection;
|
||||
import org.gcube.common.core.scope.GCUBEScope;
|
||||
|
||||
/**
|
||||
* @author valia
|
||||
*
|
||||
*/
|
||||
public interface ISInfoI {
|
||||
|
||||
/**
|
||||
* @param query an XQuery to be submitted on IS
|
||||
* @return a list of XMLResults that represent resources form IS (like RIs, Generic Resources, Collections, etc)
|
||||
*/
|
||||
public List<XMLResult> queryIS(String query);
|
||||
|
||||
/**
|
||||
* @param type the of the WSs to be retrieved
|
||||
* @return a list of RPDocuments
|
||||
*/
|
||||
public List<RPDocument> getWS(String type);
|
||||
|
||||
/**
|
||||
* Adds a GHN to the active VRE
|
||||
* @param url the GHN url
|
||||
*/
|
||||
public void addGHNToScope(String url);
|
||||
|
||||
/**
|
||||
* Adds a service to the active VRE
|
||||
* @param url the Running Instance url
|
||||
* @param className the name of the class where the RI will belong
|
||||
* @param name the name of teh service it provides
|
||||
*/
|
||||
public void addRIToScope(String url, String className, String name);
|
||||
|
||||
/**
|
||||
* @param namePort GHN name:port
|
||||
* @return true if GHN exists, otherwise false
|
||||
*/
|
||||
public boolean existsGHN(String namePort);
|
||||
|
||||
|
||||
/**
|
||||
* @param id the id of the external running instance to be removed
|
||||
*/
|
||||
public void removeExternalRIToVRE(String id);
|
||||
|
||||
/**
|
||||
* @param file the file that contains the external running instance to be added
|
||||
*/
|
||||
public void addExternalRIToVRE(File file);
|
||||
|
||||
/**
|
||||
* @param url server:port of the GHN to be removed
|
||||
*/
|
||||
public void removeGHNToScope(String url);
|
||||
|
||||
/**
|
||||
* @param scope the scope of the collections.
|
||||
* @return a list of collections profile.
|
||||
* @throws Exception
|
||||
*/
|
||||
public List<GCUBECollection> getCollections(GCUBEScope scope)throws Exception;
|
||||
|
||||
/**
|
||||
* @param scope the scope of the metadata collections.
|
||||
* @param collectionID the associated collection.
|
||||
* @return a list of metadata collections profile.
|
||||
* @throws Exception
|
||||
*/
|
||||
public List<GCUBEMCollection> getMCollections(GCUBEScope scope, String collectionID)throws Exception;
|
||||
|
||||
|
||||
/**
|
||||
* @param scope the indices scope.
|
||||
* @param mcollectionID the metadata collection id.
|
||||
* @return a list of Resource document.
|
||||
* @throws Exception
|
||||
*/
|
||||
public List<RPDocument> getXMLIndices(GCUBEScope scope, String mcollectionID) throws Exception;
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* Return all indices associated with a given metadata collection.
|
||||
* @param scope the indices scope.
|
||||
* @param mcollectionID the metadata collection id.
|
||||
* @return a list of Resource document.
|
||||
* @throws Exception
|
||||
*/
|
||||
public List<RPDocument> getIndices(GCUBEScope scope, String mcollectionID) throws Exception;
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* @param scope the indices scope.
|
||||
* @param collectionID the collection id.
|
||||
* @return a list of Resource document.
|
||||
* @throws Exception
|
||||
*/
|
||||
public List<RPDocument> getCollectionIndices(GCUBEScope scope, String collectionID) throws Exception;
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* @param scope the collection scope.
|
||||
* @param collectionID the collection id.
|
||||
* @return a collection profile.
|
||||
* @throws Exception
|
||||
*/
|
||||
public GCUBECollection getCollection(GCUBEScope scope, String collectionID) throws Exception;
|
||||
}
|
||||
|
|
@ -0,0 +1,97 @@
|
|||
package org.gcube.application.framework.vremanagement.vremanagement;
|
||||
|
||||
import java.rmi.RemoteException;
|
||||
import java.util.List;
|
||||
|
||||
import org.gcube.vremanagement.vremodeler.stubs.FunctionalityList;
|
||||
import org.gcube.vremanagement.vremodeler.stubs.FunctionalityNodes;
|
||||
import org.gcube.vremanagement.vremodeler.stubs.GHNType;
|
||||
import org.gcube.vremanagement.vremodeler.stubs.GHNsPerFunctionality;
|
||||
import org.gcube.vremanagement.vremodeler.stubs.SelectedResourceDescriptionType;
|
||||
import org.gcube.vremanagement.vremodeler.stubs.VREDescription;
|
||||
|
||||
public interface VREGeneratorInterface {
|
||||
|
||||
|
||||
/**
|
||||
* @return what the vVRE modeler returns
|
||||
* @throws RemoteException
|
||||
*/
|
||||
String checkVREStatus()
|
||||
throws RemoteException;
|
||||
|
||||
/**
|
||||
* @throws RemoteException
|
||||
*/
|
||||
void deployVRE() throws RemoteException;
|
||||
|
||||
/**
|
||||
* @return what the vVRE modeler returns
|
||||
* @throws RemoteException
|
||||
*/
|
||||
List<GHNType> getGHNs() throws RemoteException;
|
||||
|
||||
/**
|
||||
* @return what the vVRE modeler returns
|
||||
* @throws RemoteException
|
||||
*/
|
||||
VREDescription getVREModel() throws RemoteException;
|
||||
|
||||
/**
|
||||
* @return what the vVRE modeler returns
|
||||
* @throws RemoteException
|
||||
*/
|
||||
FunctionalityList getFunctionality() throws Exception;
|
||||
|
||||
|
||||
FunctionalityNodes getSelectedFunctionality() throws Exception;
|
||||
|
||||
|
||||
/**
|
||||
* @param selectedGHNIds the GHNs selected
|
||||
* @throws RemoteException
|
||||
*/
|
||||
void setGHNs(String[] selectedGHNIds) throws RemoteException;
|
||||
|
||||
/**
|
||||
*
|
||||
* @return GHNsPerFunctionality
|
||||
*/
|
||||
GHNsPerFunctionality[] getGHNsPerFunctionality() throws RemoteException;
|
||||
/**
|
||||
* @param VREName the VRE name
|
||||
* @param VREDescription a description for the VRE
|
||||
* @param VREDesigner the VRE designer
|
||||
* @param VREManager the VRE manager
|
||||
* @param startTime start time
|
||||
* @param endTime end time
|
||||
* @throws RemoteException
|
||||
*/
|
||||
void setVREModel(String VREName, String VREDescription, String VREDesigner, String VREManager, long startTime, long endTime) throws RemoteException;
|
||||
|
||||
/**
|
||||
* @throws RemoteException
|
||||
*/
|
||||
void setVREtoPendingState()
|
||||
throws RemoteException;
|
||||
|
||||
/**
|
||||
* @param csIDElement
|
||||
* @param functionalityIDElement
|
||||
* @throws RemoteException
|
||||
*/
|
||||
void setFunctionality(int[] funcIds, SelectedResourceDescriptionType[] selResDesc) throws RemoteException;
|
||||
|
||||
/**
|
||||
* @return
|
||||
*/
|
||||
String getVREepr();
|
||||
|
||||
boolean isCloudAvailable();
|
||||
|
||||
boolean setCloudDeploy(int virtualMachines);
|
||||
|
||||
boolean isCloudSelected();
|
||||
|
||||
int getCloudVMSelected();
|
||||
}
|
|
@ -0,0 +1,413 @@
|
|||
package org.gcube.application.framework.vremanagement.vremanagement.impl;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileReader;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.axis.message.addressing.Address;
|
||||
import org.apache.axis.message.addressing.EndpointReferenceType;
|
||||
import org.gcube.application.framework.core.security.PortalSecurityManager;
|
||||
import org.gcube.application.framework.core.security.ServiceContextManager;
|
||||
import org.gcube.application.framework.core.session.ASLSession;
|
||||
import org.gcube.application.framework.vremanagement.vremanagement.ISInfoI;
|
||||
import org.gcube.common.core.contexts.GHNContext;
|
||||
import org.gcube.common.core.informationsystem.client.AtomicCondition;
|
||||
import org.gcube.common.core.informationsystem.client.ISClient;
|
||||
import org.gcube.common.core.informationsystem.client.RPDocument;
|
||||
import org.gcube.common.core.informationsystem.client.XMLResult;
|
||||
import org.gcube.common.core.informationsystem.client.queries.GCUBECollectionQuery;
|
||||
import org.gcube.common.core.informationsystem.client.queries.GCUBEGenericQuery;
|
||||
import org.gcube.common.core.informationsystem.client.queries.GCUBEMCollectionQuery;
|
||||
import org.gcube.common.core.informationsystem.client.queries.WSResourceQuery;
|
||||
import org.gcube.common.core.informationsystem.publisher.ISPublisher;
|
||||
import org.gcube.common.core.informationsystem.publisher.ISPublisherException;
|
||||
import org.gcube.common.core.resources.GCUBECollection;
|
||||
import org.gcube.common.core.resources.GCUBEExternalRunningInstance;
|
||||
import org.gcube.common.core.resources.GCUBEMCollection;
|
||||
import org.gcube.common.core.scope.GCUBEScope;
|
||||
import org.gcube.common.vremanagement.ghnmanager.stubs.AddScopeInputParams;
|
||||
import org.gcube.common.vremanagement.ghnmanager.stubs.GHNManagerPortType;
|
||||
import org.gcube.common.vremanagement.ghnmanager.stubs.ScopeRIParams;
|
||||
import org.gcube.common.vremanagement.ghnmanager.stubs.service.GHNManagerServiceAddressingLocator;
|
||||
|
||||
/**
|
||||
* @author Valia Tsaqgkalidou (NKUA)
|
||||
*/
|
||||
public class ISInfo implements ISInfoI {
|
||||
|
||||
/**
|
||||
* ASLSession to be used
|
||||
*/
|
||||
protected ASLSession session;
|
||||
/**
|
||||
* is client in order to query IS
|
||||
*/
|
||||
protected static ISClient client = null;
|
||||
|
||||
protected static ISPublisher publisher = null;
|
||||
|
||||
/**
|
||||
* Constructs a ISInfo object
|
||||
* @param session the D4Science session to be used for retrieving information needed
|
||||
*/
|
||||
public ISInfo(ASLSession session)
|
||||
{
|
||||
this.session = session;
|
||||
try {
|
||||
publisher = GHNContext.getImplementation(ISPublisher.class);
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
if(client == null)
|
||||
{
|
||||
try {
|
||||
client = GHNContext.getImplementation(ISClient.class);
|
||||
} catch (Exception e) {
|
||||
// TODO Auto-generated catch block
|
||||
e.printStackTrace();
|
||||
client = null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/** {@inheritDoc}*/
|
||||
public List<XMLResult> queryIS(String query) {
|
||||
try {
|
||||
GCUBEGenericQuery queryMan = client.getQuery(GCUBEGenericQuery.class);
|
||||
|
||||
queryMan.setExpression(query);
|
||||
System.out.println(session.getOriginalScopeName());
|
||||
return client.execute(queryMan, session.getScope());
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
return new ArrayList<XMLResult>();
|
||||
}
|
||||
|
||||
/** {@inheritDoc}*/
|
||||
public List<RPDocument> getWS(String type)
|
||||
{
|
||||
WSResourceQuery query = null;
|
||||
try {
|
||||
query = client.getQuery(WSResourceQuery.class);
|
||||
query.addAtomicConditions(new AtomicCondition("/gc:ServiceClass", type));
|
||||
return client.execute(query, session.getScope());
|
||||
} catch (Exception e) {
|
||||
// TODO Auto-generated catch block
|
||||
e.printStackTrace();
|
||||
}
|
||||
return new ArrayList<RPDocument>();
|
||||
|
||||
}
|
||||
|
||||
|
||||
/** {@inheritDoc}*/
|
||||
public void addGHNToScope(String url) {
|
||||
//TODO: this will probably change...
|
||||
|
||||
EndpointReferenceType endpoint = new EndpointReferenceType();
|
||||
try {
|
||||
url = "http://" + url + "/wsrf/services/gcube/common/vremanagement/GHNManager";
|
||||
String[] vos = session.getOriginalScopeName().split("/");
|
||||
System.out.println("querying for vo: /" + vos[1]);
|
||||
|
||||
endpoint.setAddress(new Address(url));
|
||||
GHNManagerServiceAddressingLocator locator = new GHNManagerServiceAddressingLocator();
|
||||
GHNManagerPortType pt = locator.getGHNManagerPortTypePort(endpoint);
|
||||
pt = (GHNManagerPortType) ServiceContextManager.applySecurity(pt, GCUBEScope.getScope("/" + vos[1]), session.getCredential());
|
||||
AddScopeInputParams params = new AddScopeInputParams();
|
||||
params.setScope(session.getOriginalScopeName());
|
||||
params.setMap(""); //eventually, set here the new Service Map
|
||||
pt.addScope(params);
|
||||
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
|
||||
/** {@inheritDoc}*/
|
||||
public void removeGHNToScope(String url) {
|
||||
//TODO: this will probably change...
|
||||
|
||||
EndpointReferenceType endpoint = new EndpointReferenceType();
|
||||
try {
|
||||
url = "http://" + url + "/wsrf/services/gcube/common/vremanagement/GHNManager";
|
||||
|
||||
String[] vos = session.getOriginalScopeName().split("/");
|
||||
System.out.println("querying for vo: /" + vos[1]);
|
||||
|
||||
endpoint.setAddress(new Address(url));
|
||||
GHNManagerServiceAddressingLocator locator = new GHNManagerServiceAddressingLocator();
|
||||
GHNManagerPortType pt = locator.getGHNManagerPortTypePort(endpoint);
|
||||
pt = (GHNManagerPortType) ServiceContextManager.applySecurity(pt, GCUBEScope.getScope("/" + vos[1]), session.getCredential());
|
||||
pt.removeScope(session.getOriginalScopeName());
|
||||
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
|
||||
/** {@inheritDoc}*/
|
||||
public void addRIToScope(String url, String className, String name) {
|
||||
//TODO: this will probably change...
|
||||
|
||||
EndpointReferenceType endpoint = new EndpointReferenceType();
|
||||
try {
|
||||
endpoint.setAddress(new Address(url));
|
||||
GHNManagerServiceAddressingLocator locator = new GHNManagerServiceAddressingLocator();
|
||||
GHNManagerPortType pt = locator.getGHNManagerPortTypePort(endpoint);
|
||||
pt = (GHNManagerPortType) ServiceContextManager.applySecurity(pt, session);
|
||||
ScopeRIParams params = new ScopeRIParams();
|
||||
params.setClazz(className);
|
||||
params.setName(name);
|
||||
params.setScope(session.getOriginalScopeName());
|
||||
pt.addRIToScope(params);
|
||||
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
|
||||
/** {@inheritDoc}*/
|
||||
public void addExternalRIToVRE(File file)
|
||||
{
|
||||
GCUBEExternalRunningInstance ri = null;
|
||||
try {
|
||||
ri = GHNContext.getImplementation(GCUBEExternalRunningInstance.class);
|
||||
} catch (Exception e1) {
|
||||
// TODO Auto-generated catch block
|
||||
e1.printStackTrace();
|
||||
}
|
||||
|
||||
String ret = "";
|
||||
if(file.exists()){
|
||||
try {
|
||||
ri.load(new FileReader(file));
|
||||
} catch (Exception e) {
|
||||
String message = "Error while loading profile for the External Running Instance with id=" + ri.getID() + "\n";
|
||||
System.out.println(message);
|
||||
e.printStackTrace();
|
||||
ret += message;
|
||||
}
|
||||
}else{
|
||||
String message = "An error occur during the approval of the External Running Instance with id=" + ri.getID() + "\n";
|
||||
System.out.println(message);
|
||||
ret += message;
|
||||
}
|
||||
|
||||
|
||||
|
||||
try {
|
||||
// TODO Change this
|
||||
|
||||
publisher.registerGCUBEResource(ri, session.getScope(), new PortalSecurityManager(session));
|
||||
} catch (ISPublisherException e) {
|
||||
String message = "Registration error for the External Running Instance with id=" + ri.getID() + "\n";
|
||||
System.out.println(message);
|
||||
e.printStackTrace();
|
||||
ret += message;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/** {@inheritDoc}*/
|
||||
public void removeExternalRIToVRE(String id)
|
||||
{
|
||||
try {
|
||||
// TODO Change this
|
||||
|
||||
publisher.removeGCUBEResource(id, GCUBEExternalRunningInstance.TYPE, session.getScope(), new PortalSecurityManager(session));
|
||||
} catch (ISPublisherException e) {
|
||||
String message = "Registration error for the External Running Instance with id=" +id + "\n";
|
||||
System.out.println(message);
|
||||
e.printStackTrace();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/** {@inheritDoc}*/
|
||||
public boolean existsGHN(String namePort)
|
||||
{
|
||||
GCUBEGenericQuery query;
|
||||
try {
|
||||
System.out.println("GHN: " + namePort);
|
||||
query = client.getQuery(GCUBEGenericQuery.class);query.setExpression("for $ghn in collection(\"/db/Profiles/GHN\")//Document/Data/child::*[local-name()='Profile']/Resource where $ghn/Profile/GHNDescription/Name/string() eq '" + namePort + "' return $ghn");
|
||||
String[] vos = session.getOriginalScopeName().split("/");
|
||||
System.out.println("querying for vo: /" + vos[1]);
|
||||
List<XMLResult> resources = client.execute(query, GCUBEScope.getScope("/" + vos[1]));
|
||||
if(resources == null || resources.size() == 0)
|
||||
{
|
||||
System.out.println("Resources are null or empty");
|
||||
return false;
|
||||
}
|
||||
else
|
||||
{
|
||||
System.out.println("Found resources!!!!");
|
||||
return true;
|
||||
}
|
||||
} catch (Exception e) {
|
||||
// TODO Auto-generated catch block
|
||||
e.printStackTrace();
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Returns all profiles of collections present in the given scope.
|
||||
* @param scope the scope of the collections.
|
||||
* @return a list of collections profile.
|
||||
* @throws Exception when an error occurs.
|
||||
*/
|
||||
public List<GCUBECollection> getCollections(GCUBEScope scope) throws Exception
|
||||
{
|
||||
|
||||
|
||||
try {
|
||||
GCUBECollectionQuery collectionquery = client.getQuery(GCUBECollectionQuery.class);
|
||||
|
||||
return client.execute(collectionquery,scope);
|
||||
|
||||
} catch (Exception e) {
|
||||
System.out.println("Error during Collections retrieving in scope "+scope.getName());
|
||||
e.printStackTrace();
|
||||
throw new Exception("Error during Collections retrieving in scope "+scope.getName(), e);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Return all metadata-collections associated with the given collection.
|
||||
* @param scope the scope of the metadata collections.
|
||||
* @param collectionID the associated collection.
|
||||
* @return a list of metadata collections profile.
|
||||
* @throws Exception
|
||||
*/
|
||||
public List<GCUBEMCollection> getMCollections(GCUBEScope scope, String collectionID) throws Exception
|
||||
{
|
||||
|
||||
try {
|
||||
GCUBEMCollectionQuery mcolQuery = client.getQuery(GCUBEMCollectionQuery.class);
|
||||
|
||||
mcolQuery.addGenericCondition("$result/child::*[local-name()='Profile']/RelatedCollection/CollectionID/string() eq '"+collectionID+"'");
|
||||
|
||||
return client.execute(mcolQuery,scope);
|
||||
|
||||
} catch (Exception e) {
|
||||
System.out.println("Error during MCollections retrieving in scope "+scope.getName());
|
||||
e.printStackTrace();
|
||||
throw new Exception("Error during MCollections retrieving in scope "+scope.getName(), e);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Return all XML indices associated with the given metadata collection.
|
||||
* @param scope the indices scope.
|
||||
* @param mcollectionID the metadata collection id.
|
||||
* @return a list of Resource document.
|
||||
* @throws Exception
|
||||
*/
|
||||
public List<RPDocument> getXMLIndices(GCUBEScope scope, String mcollectionID) throws Exception
|
||||
{
|
||||
try {
|
||||
|
||||
WSResourceQuery queryXMLIndices = client.getQuery(WSResourceQuery.class);
|
||||
|
||||
queryXMLIndices.addAtomicConditions(new AtomicCondition("//gc:ServiceClass", "MetadataManagement"),
|
||||
new AtomicCondition("//gc:ServiceName","XMLIndexer"),
|
||||
new AtomicCondition("/child::*[local-name()='Id']", mcollectionID),
|
||||
new AtomicCondition("/child::*[local-name()='AccessType']", "GCUBEDaix"));
|
||||
|
||||
|
||||
return client.execute(queryXMLIndices, scope);
|
||||
|
||||
} catch (Exception e) {
|
||||
System.out.println("Error during Indices retrieving in scope "+scope.getName());
|
||||
e.printStackTrace();
|
||||
throw new Exception("Error during Indices retrieving in scope "+scope.getName(), e);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Return all indices associated with a given metadata collection.
|
||||
* @param scope the indices scope.
|
||||
* @param mcollectionID the metadata collection id.
|
||||
* @return a list of Resource document.
|
||||
* @throws Exception
|
||||
*/
|
||||
public List<RPDocument> getIndices(GCUBEScope scope, String mcollectionID) throws Exception
|
||||
{
|
||||
|
||||
try {
|
||||
|
||||
WSResourceQuery queryXMLIndices = client.getQuery(WSResourceQuery.class);
|
||||
|
||||
queryXMLIndices.addAtomicConditions(new AtomicCondition("//gc:ServiceClass", "Index"),
|
||||
new AtomicCondition("/child::*[local-name()='CollectionID']",mcollectionID));
|
||||
|
||||
return client.execute(queryXMLIndices, scope);
|
||||
|
||||
} catch (Exception e) {
|
||||
System.out.println("Error during Indices retrieving in scope "+scope.getName());
|
||||
e.printStackTrace();
|
||||
throw new Exception("Error during Indices retrieving in scope "+scope.getName(), e);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Return all indices associated with a given collection.
|
||||
* @param scope the indices scope.
|
||||
* @param collectionID the collection id.
|
||||
* @return a list of Resource document.
|
||||
* @throws Exception
|
||||
*/
|
||||
public List<RPDocument> getCollectionIndices(GCUBEScope scope, String collectionID) throws Exception
|
||||
{
|
||||
try {
|
||||
|
||||
WSResourceQuery queryXMLIndices = client.getQuery(WSResourceQuery.class);
|
||||
|
||||
queryXMLIndices.addAtomicConditions(new AtomicCondition("//gc:ServiceClass", "Index"),
|
||||
new AtomicCondition("/child::*[local-name()='CollectionID']",collectionID));
|
||||
|
||||
return client.execute(queryXMLIndices, scope);
|
||||
|
||||
} catch (Exception e) {
|
||||
System.out.println("Error during collection Indices retrieving in scope "+scope.getName());
|
||||
e.printStackTrace();
|
||||
throw new Exception("Error during collection Indices retrieving in scope "+scope.getName(), e);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* Return a collection profile.
|
||||
* @param scope the collection scope.
|
||||
* @param collectionID the collection id.
|
||||
* @return a collection profile.
|
||||
* @throws Exception
|
||||
*/
|
||||
public GCUBECollection getCollection(GCUBEScope scope, String collectionID) throws Exception
|
||||
{
|
||||
try {
|
||||
GCUBECollectionQuery Collectionquery = client.getQuery(GCUBECollectionQuery.class);
|
||||
Collectionquery.addAtomicConditions(new AtomicCondition("//ID",collectionID));
|
||||
return client.execute(Collectionquery,scope).get(0);
|
||||
|
||||
} catch (Exception e) {
|
||||
System.out.println("Error during Collection (by ID "+collectionID+") retrieving in scope "+scope.getName());
|
||||
e.printStackTrace();
|
||||
throw new Exception("Error during Collection (by ID "+collectionID+") retrieving in scope "+scope.getName(), e);
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,452 @@
|
|||
package org.gcube.application.framework.vremanagement.vremanagement.impl;
|
||||
|
||||
import java.rmi.RemoteException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Calendar;
|
||||
import java.util.List;
|
||||
import java.util.Random;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
|
||||
import javax.xml.rpc.ServiceException;
|
||||
|
||||
import org.apache.axis.message.addressing.Address;
|
||||
import org.apache.axis.message.addressing.EndpointReference;
|
||||
import org.apache.axis.message.addressing.EndpointReferenceType;
|
||||
import org.gcube.application.framework.core.cache.RIsManager;
|
||||
import org.gcube.application.framework.core.security.ServiceContextManager;
|
||||
import org.gcube.application.framework.core.session.ASLSession;
|
||||
import org.gcube.application.framework.vremanagement.vremanagement.VREGeneratorInterface;
|
||||
import org.gcube.common.core.scope.GCUBEScope;
|
||||
import org.gcube.common.core.types.VOID;
|
||||
import org.gcube.common.core.utils.logging.GCUBEClientLog;
|
||||
import org.gcube.informationsystem.cache.SrvType;
|
||||
import org.gcube.vremanagement.vremodeler.stubs.FunctionalityList;
|
||||
import org.gcube.vremanagement.vremodeler.stubs.FunctionalityNodes;
|
||||
import org.gcube.vremanagement.vremodeler.stubs.GHNArray;
|
||||
import org.gcube.vremanagement.vremodeler.stubs.GHNList;
|
||||
import org.gcube.vremanagement.vremodeler.stubs.GHNType;
|
||||
import org.gcube.vremanagement.vremodeler.stubs.GHNsPerFunctionality;
|
||||
import org.gcube.vremanagement.vremodeler.stubs.ModelerFactoryPortType;
|
||||
import org.gcube.vremanagement.vremodeler.stubs.ModelerServicePortType;
|
||||
import org.gcube.vremanagement.vremodeler.stubs.ReportList;
|
||||
import org.gcube.vremanagement.vremodeler.stubs.RunningInstanceMessage;
|
||||
import org.gcube.vremanagement.vremodeler.stubs.SelectedResourceDescriptionType;
|
||||
import org.gcube.vremanagement.vremodeler.stubs.SetFunctionality;
|
||||
import org.gcube.vremanagement.vremodeler.stubs.VREDescription;
|
||||
import org.gcube.vremanagement.vremodeler.stubs.service.ModelerFactoryServiceAddressingLocator;
|
||||
import org.gcube.vremanagement.vremodeler.stubs.service.ModelerServiceAddressingLocator;
|
||||
|
||||
|
||||
/**
|
||||
*
|
||||
* @author Massimiliano Assante - ISTI-CNR
|
||||
*
|
||||
*/
|
||||
public class VREGeneratorEvo implements VREGeneratorInterface {
|
||||
|
||||
GCUBEClientLog log = new GCUBEClientLog("ASL_VRE");
|
||||
|
||||
String scope;
|
||||
ASLSession session;
|
||||
ModelerServicePortType modelPortType;
|
||||
|
||||
private final static String MODELERS_NO = "MODELERS_NO";
|
||||
|
||||
protected static AtomicInteger vreId = new AtomicInteger(0);
|
||||
|
||||
/**
|
||||
* @param session the d4s session
|
||||
* @param epr the epr
|
||||
*/
|
||||
public VREGeneratorEvo(ASLSession session, String id) {
|
||||
this(session);
|
||||
|
||||
EndpointReferenceType epr = getEprGivenID(id);
|
||||
log.info("VREGeneratorEvo called on VRE id (epr)" + epr + " scope: " + session.getScope().toString());
|
||||
this.scope = session.getScopeName();
|
||||
this.session = session;
|
||||
modelPortType = applySecurityEPR(epr);
|
||||
|
||||
}
|
||||
|
||||
public boolean isVreModelerServiceUp() {
|
||||
return Integer.parseInt(session.getAttribute(MODELERS_NO).toString()) > 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param session the d4s session
|
||||
*/
|
||||
public VREGeneratorEvo(ASLSession session) {
|
||||
super();
|
||||
log.info("VREGeneratorEvo scope: " + session.getScope().toString());
|
||||
this.scope = session.getScopeName();
|
||||
this.session = session;
|
||||
modelPortType = null;
|
||||
getModelPortType();
|
||||
}
|
||||
/**
|
||||
* @param session the d4s session
|
||||
* @return the VRE names
|
||||
*
|
||||
*/
|
||||
public ReportList getAllVREs(ASLSession session) {
|
||||
EndpointReferenceType serviceEPR = new EndpointReferenceType();
|
||||
ModelerFactoryPortType mFPType = null;
|
||||
ModelerFactoryServiceAddressingLocator mFSLocator = new ModelerFactoryServiceAddressingLocator();
|
||||
EndpointReference[] modelerURIs;
|
||||
try {
|
||||
modelerURIs = RIsManager.getInstance().getISCache(GCUBEScope.getScope(scope)).getEPRsFor("VREManagement", "VREModeler", SrvType.FACTORY.name());
|
||||
} catch (Exception e1) {
|
||||
e1.printStackTrace();
|
||||
return null;
|
||||
}
|
||||
for (int i = 0; i < modelerURIs.length; i++) {
|
||||
try {
|
||||
System.out.println("getModelFactoryPortTypePort(epr)");
|
||||
session.setScope(scope);
|
||||
serviceEPR.setAddress(new Address(modelerURIs[vreId.getAndIncrement() % modelerURIs.length].getAddress().toString()));
|
||||
mFPType = (ModelerFactoryPortType) ServiceContextManager.applySecurity(mFSLocator.getModelerFactoryPortTypePort(serviceEPR), session);
|
||||
return mFPType.getAllVREs(new VOID());
|
||||
|
||||
} catch (ServiceException e) {
|
||||
e.printStackTrace();
|
||||
} catch (RemoteException e) {
|
||||
e.printStackTrace();
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param session the d4s session
|
||||
* @param id the id of the VRE to be removed
|
||||
*/
|
||||
public void removeVRE(ASLSession session, String id) {
|
||||
|
||||
|
||||
EndpointReferenceType serviceEPR = new EndpointReferenceType();
|
||||
ModelerFactoryPortType mFPType = null;
|
||||
ModelerFactoryServiceAddressingLocator mFSLocator = new ModelerFactoryServiceAddressingLocator();
|
||||
EndpointReference[] modelerURIs;
|
||||
try {
|
||||
modelerURIs = RIsManager.getInstance().getISCache(GCUBEScope.getScope(scope)).getEPRsFor("VREManagement", "VREModeler", SrvType.FACTORY.name());
|
||||
} catch (Exception e1) {
|
||||
e1.printStackTrace();
|
||||
return;
|
||||
}
|
||||
for (int i = 0; i < modelerURIs.length; i++) {
|
||||
try {
|
||||
System.out.println("getModelFactoryPortTypePort(epr)");
|
||||
|
||||
serviceEPR.setAddress(new Address(modelerURIs[vreId.getAndIncrement() % modelerURIs.length].getAddress().toString()));
|
||||
session.setScope(scope);
|
||||
mFPType = (ModelerFactoryPortType) ServiceContextManager.applySecurity(mFSLocator.getModelerFactoryPortTypePort(serviceEPR), session);
|
||||
|
||||
System.out.println("ID RECEIVED TO REMOVE:" + id);
|
||||
|
||||
mFPType.removeVRE(id);
|
||||
break;
|
||||
|
||||
} catch (ServiceException e) {
|
||||
e.printStackTrace();
|
||||
} catch (RemoteException e) {
|
||||
e.printStackTrace();
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
}
|
||||
/**
|
||||
*
|
||||
* @param epr
|
||||
* @return
|
||||
*/
|
||||
protected ModelerServicePortType applySecurityEPR(EndpointReferenceType epr) {
|
||||
try {
|
||||
ModelerServiceAddressingLocator mSALocator = new ModelerServiceAddressingLocator();
|
||||
session.setScope(scope);
|
||||
modelPortType = ServiceContextManager.applySecurity(mSALocator.getModelerServicePortTypePort(epr),session);
|
||||
return modelPortType;
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
return null;
|
||||
}
|
||||
|
||||
}
|
||||
@Override
|
||||
public String checkVREStatus() throws RemoteException {
|
||||
return modelPortType.checkStatus(new VOID());
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
@Override
|
||||
public void deployVRE() throws RemoteException {
|
||||
modelPortType.deployVRE(new VOID());
|
||||
|
||||
}
|
||||
|
||||
public String[] getExistingNamesVREs() {
|
||||
// TODO Auto-generated method stub
|
||||
return null;
|
||||
}
|
||||
|
||||
public FunctionalityNodes getSelectedFunctionality() throws Exception {
|
||||
FunctionalityNodes list = modelPortType.getFunctionalityNodes(new VOID());
|
||||
return list;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<GHNType> getGHNs() throws RemoteException {
|
||||
|
||||
log.debug("Asking gHN list to service");
|
||||
|
||||
List<GHNType> toReturn = new ArrayList<GHNType>();
|
||||
FunctionalityNodes list = modelPortType.getFunctionalityNodes(new VOID());
|
||||
|
||||
GHNList ghns = list.getSelectableGHNs(); //selezionabili per le missing func.
|
||||
GHNType[] types = ghns.getList();
|
||||
|
||||
for (int i = 0; i < types.length; i++) {
|
||||
try {
|
||||
log.debug("returned GHN: " + types[i].getHost());
|
||||
toReturn.add(new GHNType(types[i].getHost(), types[i].getId(), types[i].getMemory(), types[i].getRelatedRIs(), types[i].isSecurityEnabled(),
|
||||
types[i].isSelected(), types[i].getSite()));
|
||||
|
||||
} catch (NullPointerException e) {
|
||||
e.printStackTrace();
|
||||
return toReturn;
|
||||
}
|
||||
}
|
||||
return toReturn;
|
||||
}
|
||||
|
||||
@Override
|
||||
public GHNsPerFunctionality[] getGHNsPerFunctionality() throws RemoteException {
|
||||
FunctionalityNodes list = modelPortType.getFunctionalityNodes(new VOID());
|
||||
return list.getFunctionalities();
|
||||
|
||||
}
|
||||
// RunningInstanceMessage[] ris = list.getFunctionalities()[1].getMissingServices(); //ci sono n RunningInstances (Service) Mancanti
|
||||
// ris[0].
|
||||
// list.getFunctionalities()[1].getGhns(); //verranno aggiunti per la funzionalit
|
||||
|
||||
|
||||
@Override
|
||||
public void setFunctionality(int[] funcIds, SelectedResourceDescriptionType[] selResDesc) throws RemoteException {
|
||||
SetFunctionality sf = new SetFunctionality(funcIds, selResDesc);
|
||||
modelPortType.setFunctionality(sf);
|
||||
}
|
||||
|
||||
@Override
|
||||
public FunctionalityList getFunctionality() throws Exception {
|
||||
FunctionalityList list = modelPortType.getFunctionality(new VOID());
|
||||
return list;
|
||||
}
|
||||
|
||||
public String getMetadataRelatedToCollection() throws RemoteException {
|
||||
return null;
|
||||
}
|
||||
|
||||
public String getQuality() throws RemoteException {
|
||||
// TODO Auto-generated method stub
|
||||
return null;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* first call
|
||||
*/
|
||||
@Override
|
||||
public VREDescription getVREModel() throws RemoteException {
|
||||
VREDescription desc = modelPortType.getDescription(new VOID());
|
||||
|
||||
return desc;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public void setGHNs(String[] selectedGHNIds) throws RemoteException {
|
||||
modelPortType.setUseCloud(false);
|
||||
GHNArray ghnArray = new GHNArray(selectedGHNIds);
|
||||
modelPortType.setGHNs(ghnArray);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setVREModel(String VREName, String VREDescription,
|
||||
String VREDesigner, String VREManager, long startTime, long endTime)
|
||||
throws RemoteException {
|
||||
VREDescription vreDesc = new VREDescription();
|
||||
vreDesc.setDescription(VREDescription);
|
||||
vreDesc.setDesigner(VREDesigner);
|
||||
vreDesc.setManager(VREManager);
|
||||
vreDesc.setName(VREName);
|
||||
Calendar start = Calendar.getInstance();
|
||||
start.setTimeInMillis(startTime);
|
||||
vreDesc.setStartTime(start);
|
||||
Calendar end = Calendar.getInstance();
|
||||
end.setTimeInMillis(endTime);
|
||||
vreDesc.setEndTime(end);
|
||||
|
||||
log.debug("StartTime = " + start.getTime());
|
||||
log.debug("EndTime = " + end.getTime());
|
||||
|
||||
modelPortType.setDescription(vreDesc);
|
||||
}
|
||||
|
||||
public void setVREtoPendingState() throws RemoteException {
|
||||
modelPortType.setVREtoPendingState(new VOID());
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
private synchronized void getModelPortType() {
|
||||
|
||||
EndpointReferenceType serviceEPR = new EndpointReferenceType();
|
||||
ModelerFactoryPortType mFPType = null;
|
||||
|
||||
EndpointReferenceType VREEndpointReferenceType;
|
||||
if (modelPortType == null) {
|
||||
|
||||
log.warn("VREEndpointReferenceType is null");
|
||||
ModelerFactoryServiceAddressingLocator mFSLocator = new ModelerFactoryServiceAddressingLocator();
|
||||
|
||||
EndpointReference[] modelerURIs;
|
||||
try {
|
||||
|
||||
modelerURIs = RIsManager.getInstance().getISCache(GCUBEScope.getScope(scope)).getEPRsFor("VREManagement", "VREModeler", SrvType.FACTORY.name());
|
||||
|
||||
|
||||
} catch (Exception e1) {
|
||||
e1.printStackTrace();
|
||||
return;
|
||||
}
|
||||
log.debug("vre modelers: " + modelerURIs.length);
|
||||
session.setAttribute(MODELERS_NO, modelerURIs.length);
|
||||
for (int i = 0; i < modelerURIs.length; i++) {
|
||||
try {
|
||||
System.out.println("getModelFactoryPortTypePort(epr)");
|
||||
|
||||
serviceEPR.setAddress(new Address(modelerURIs[vreId.getAndIncrement() % modelerURIs.length].getAddress().toString()));
|
||||
session.setScope(scope);
|
||||
mFPType = ServiceContextManager.applySecurity(mFSLocator.getModelerFactoryPortTypePort(serviceEPR), session);
|
||||
VREEndpointReferenceType = mFPType.createResource(new VOID());
|
||||
mFPType.getAllVREs(new VOID());
|
||||
ModelerServiceAddressingLocator mSALocator = new ModelerServiceAddressingLocator();
|
||||
session.setScope(scope);
|
||||
modelPortType = ServiceContextManager.applySecurity(mSALocator.getModelerServicePortTypePort(VREEndpointReferenceType), session);
|
||||
// Attaching Credential to port type
|
||||
System.out.println("Attaching Credential to port type");
|
||||
break;
|
||||
} catch (ServiceException e) {
|
||||
e.printStackTrace();
|
||||
} catch (RemoteException e) {
|
||||
e.printStackTrace();
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
new Random(System.currentTimeMillis()).nextInt(modelerURIs.length);
|
||||
}
|
||||
}
|
||||
} else
|
||||
log.debug("modelPortType!=null");
|
||||
|
||||
}
|
||||
|
||||
/** {@inheritDoc}*/
|
||||
public String getVREepr() {
|
||||
return modelPortType.toString();
|
||||
}
|
||||
|
||||
/**
|
||||
* return the per given an id
|
||||
* @param id
|
||||
* @return
|
||||
*/
|
||||
private EndpointReferenceType getEprGivenID(String id) {
|
||||
EndpointReferenceType serviceEPR = new EndpointReferenceType();
|
||||
ModelerFactoryPortType mFPType = null;
|
||||
ModelerFactoryServiceAddressingLocator mFSLocator = new ModelerFactoryServiceAddressingLocator();
|
||||
EndpointReference[] modelerURIs;
|
||||
try {
|
||||
session.setScope(scope);
|
||||
modelerURIs = RIsManager.getInstance().getISCache(session.getScope()).getEPRsFor("VREManagement", "VREModeler", SrvType.FACTORY.name());
|
||||
} catch (Exception e1) {
|
||||
e1.printStackTrace();
|
||||
return null;
|
||||
}
|
||||
for (int i = 0; i < modelerURIs.length; i++) {
|
||||
try {
|
||||
log.debug("getModelFactoryPortTypePort(epr)");
|
||||
|
||||
serviceEPR.setAddress(new Address(modelerURIs[vreId.getAndIncrement() % modelerURIs.length].getAddress().toString()));
|
||||
session.setScope(scope);
|
||||
mFPType = (ModelerFactoryPortType) ServiceContextManager.applySecurity(mFSLocator.getModelerFactoryPortTypePort(serviceEPR), session);
|
||||
return mFPType.getEPRbyId(id);
|
||||
|
||||
} catch (ServiceException e) {
|
||||
e.printStackTrace();
|
||||
} catch (RemoteException e) {
|
||||
e.printStackTrace();
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
public boolean isCloudAvailable() {
|
||||
//TODO: check actual availability
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public boolean isCloudSelected() {
|
||||
System.out.println("isCloudSelected()");
|
||||
boolean toReturn = false;
|
||||
try {
|
||||
toReturn = modelPortType.isUseCloud(new VOID());
|
||||
} catch (RemoteException e) {
|
||||
e.printStackTrace();
|
||||
return false;
|
||||
}
|
||||
return toReturn;
|
||||
}
|
||||
|
||||
|
||||
public boolean setCloudDeploy(int virtualMachines) {
|
||||
try {
|
||||
log.debug("setUseCloud(true)");
|
||||
modelPortType.setUseCloud(true);
|
||||
log.debug("setCloudVMs #: " + virtualMachines);
|
||||
modelPortType.setCloudVMs(virtualMachines);
|
||||
} catch (RemoteException e) {
|
||||
e.printStackTrace();
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public int getCloudVMSelected() {
|
||||
int toReturn = -1;
|
||||
try {
|
||||
toReturn = modelPortType.getCloudVMs(new VOID());
|
||||
} catch (RemoteException e) {
|
||||
e.printStackTrace();
|
||||
return toReturn;
|
||||
}
|
||||
return toReturn;
|
||||
}
|
||||
|
||||
|
||||
|
||||
}
|
Loading…
Reference in New Issue