Initial import.

git-svn-id: http://svn.research-infrastructures.eu/public/d4science/gcube/trunk/application-support-layer/applicationSupportLayerCore@11340 82a268e6-3cf1-43bd-a215-b396298e98cf
This commit is contained in:
Rena Tsantouli 2009-04-14 07:55:38 +00:00
parent 784e23181b
commit 06d753a43a
27 changed files with 0 additions and 2237 deletions

View File

@ -1,11 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<classpath>
<classpathentry kind="src" path="src"/>
<classpathentry kind="con" path="org.eclipse.jdt.USER_LIBRARY/ehcache"/>
<classpathentry kind="con" path="org.eclipse.jdt.USER_LIBRARY/gCore"/>
<classpathentry kind="con" path="org.eclipse.jdt.USER_LIBRARY/dependenciesD4S"/>
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/jre1.5.0_16"/>
<classpathentry kind="lib" path="C:/Documents and Settings/rena/Desktop/ISCache.jar"/>
<classpathentry kind="lib" path="C:/Documents and Settings/rena/Desktop/lo/lib/org.gcube.thumbnailer.stubs.jar"/>
<classpathentry kind="output" path="bin"/>
</classpath>

View File

@ -1,17 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<projectDescription>
<name>applicationSupportLayerCore</name>
<comment></comment>
<projects>
</projects>
<buildSpec>
<buildCommand>
<name>org.eclipse.jdt.core.javabuilder</name>
<arguments>
</arguments>
</buildCommand>
</buildSpec>
<natures>
<nature>org.eclipse.jdt.core.javanature</nature>
</natures>
</projectDescription>

View File

@ -1,3 +0,0 @@
#Mon Sep 22 12:37:04 EEST 2008
eclipse.preferences.version=1
org.eclipse.jdt.ui.text.custom_code_templates=<?xml version\="1.0" encoding\="UTF-8" standalone\="no"?><templates/>

View File

@ -1,71 +0,0 @@
<project default="compile" basedir=".">
<property environment="env" />
<property name="globus.location" location="${env.GLOBUS_LOCATION}"/>
<property name="project.build" value="build"/>
<property name="build.classes" value="${project.build}/classes"/>
<target name="compile" depends="clean ,setenv" description="Compile project source code">
<echo>Compiling project source code</echo>
<javac srcdir="."
destdir="${build.classes}"
classpathref="classpath"
debug="on"
optimize="off"
deprecation="on"/>
<!--<copy file="etc/**" toDir="${build.classes}"/> -->
</target>
<target name="jar" depends="compile">
<jar destfile="ASL-Core.jar" >
<fileset dir="${build.classes}">
<include name="**/*.class"/>
<include name="**/*.xml"/>
<include name="**/*.properties"/>
</fileset>
<fileset dir=".">
<include name="**/etc/*.xml"/>
<include name="**/etc/*.properties"/>
</fileset>
</jar>
</target>
<target name="setenv" description="Check for libraries and print out config information">
<mkdir dir="${project.build}"/>
<mkdir dir="${build.classes}"/>
<path id="classpath">
<pathelement location="${build.classes}"/>
<pathelement path="${java.class.path}"/>
<fileset dir="${globus.location}/lib">
<include name="*.jar"/>
</fileset>
<fileset dir="${ehcache}/">
<include name="*.jar"/>
</fileset>
</path>
</target>
<target name="javadoc">
<javadoc access="public"
author="true"
destdir="doc/api"
nodeprecated="false"
nodeprecatedlist="false"
noindex="false"
nonavbar="false"
notree="false"
source="1.5"
sourcepath="src"
packagenames="org.gcube.application.framework.*"
splitindex="true"
use="true"
version="true"
failonerror="false">
<classpath>
<fileset dir="${globus.location}/lib">
<include name="*.jar"/>
</fileset>
</classpath>
</javadoc>
</target>
<target name="clean" description="Delete classes and existing library">
<delete quiet="true" dir="${project.build}"/>
</target>
</project>

View File

@ -1,538 +0,0 @@
<ehcache xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:noNamespaceSchemaLocation="ehcache.xsd">
<!--
CacheManager Configuration
==========================
An ehcache.xml corresponds to a single CacheManager.
See instructions below or the ehcache schema (ehcache.xsd) on how to configure.
DiskStore configuration
=======================
Sets the path to the directory where cache files are created.
If the path is a Java System Property it is replaced by its value in the
running VM.
The following properties are translated:
* user.home - User's home directory
* user.dir - User's current working directory
* java.io.tmpdir - Default temp file path
Subdirectories can be specified below the property e.g. java.io.tmpdir/one
-->
<diskStore path="java.io.tmpdir"/>
<!--
CacheManagerEventListener
=========================
Specifies a CacheManagerEventListenerFactory, be used to create a CacheManagerPeerProvider,
which is notified when Caches are added or removed from the CacheManager.
The attributes of CacheManagerEventListenerFactory are:
* class - a fully qualified factory class name
* properties - comma separated properties having meaning only to the factory.
Sets the fully qualified class name to be registered as the CacheManager event listener.
The events include:
* adding a Cache
* removing a Cache
Callbacks to listener methods are synchronous and unsynchronized. It is the responsibility
of the implementer to safely handle the potential performance and thread safety issues
depending on what their listener is doing.
If no class is specified, no listener is created. There is no default.
-->
<cacheManagerEventListenerFactory class="" properties=""/>
<!--
CacheManagerPeerProvider
========================
(Enable for distributed operation)
Specifies a CacheManagerPeerProviderFactory which will be used to create a
CacheManagerPeerProvider, which discovers other CacheManagers in the cluster.
The attributes of cacheManagerPeerProviderFactory are:
* class - a fully qualified factory class name
* properties - comma separated properties having meaning only to the factory.
Ehcache comes with a built-in RMI-based distribution system with two means of discovery of
CacheManager peers participating in the cluster:
* automatic, using a multicast group. This one automatically discovers peers and detects
changes such as peers entering and leaving the group
* manual, using manual rmiURL configuration. A hardcoded list of peers is provided at
configuration time.
Configuring Automatic Discovery:
Automatic discovery is configured as per the following example:
<cacheManagerPeerProviderFactory
class="net.sf.ehcache.distribution.RMICacheManagerPeerProviderFactory"
properties="peerDiscovery=automatic, multicastGroupAddress=230.0.0.1,
multicastGroupPort=4446, timeToLive=32"/>
Valid properties are:
* peerDiscovery (mandatory) - specify "automatic"
* multicastGroupAddress (mandatory) - specify a valid multicast group address
* multicastGroupPort (mandatory) - specify a dedicated port for the multicast heartbeat
traffic
* timeToLive - specify a value between 0 and 255 which determines how far the packets will
propagate.
By convention, the restrictions are:
0 - the same host
1 - the same subnet
32 - the same site
64 - the same region
128 - the same continent
255 - unrestricted
Configuring Manual Discovery:
Manual discovery is configured as per the following example:
<cacheManagerPeerProviderFactory class=
"net.sf.ehcache.distribution.RMICacheManagerPeerProviderFactory"
properties="peerDiscovery=manual,
rmiUrls=//server1:40000/sampleCache1|//server2:40000/sampleCache1
| //server1:40000/sampleCache2|//server2:40000/sampleCache2"
propertySeparator="," />
Valid properties are:
* peerDiscovery (mandatory) - specify "manual"
* rmiUrls (mandatory) - specify a pipe separated list of rmiUrls, in the form
//hostname:port
The hostname is the hostname of the remote CacheManager peer. The port is the listening
port of the RMICacheManagerPeerListener of the remote CacheManager peer.
-->
<cacheManagerPeerProviderFactory
class="net.sf.ehcache.distribution.RMICacheManagerPeerProviderFactory"
properties="peerDiscovery=automatic,
multicastGroupAddress=230.0.0.1,
multicastGroupPort=4446, timeToLive=1"
propertySeparator=","
/>
<!--
CacheManagerPeerListener
========================
(Enable for distributed operation)
Specifies a CacheManagerPeerListenerFactory which will be used to create a
CacheManagerPeerListener, which
listens for messages from cache replicators participating in the cluster.
The attributes of cacheManagerPeerListenerFactory are:
class - a fully qualified factory class name
properties - comma separated properties having meaning only to the factory.
Ehcache comes with a built-in RMI-based distribution system. The listener component is
RMICacheManagerPeerListener which is configured using
RMICacheManagerPeerListenerFactory. It is configured as per the following example:
<cacheManagerPeerListenerFactory
class="net.sf.ehcache.distribution.RMICacheManagerPeerListenerFactory"
properties="hostName=fully_qualified_hostname_or_ip,
port=40001,
socketTimeoutMillis=120000"
propertySeparator="," />
All properties are optional. They are:
* hostName - the hostName of the host the listener is running on. Specify
where the host is multihomed and you want to control the interface over which cluster
messages are received. Defaults to the host name of the default interface if not
specified.
* port - the port the listener listens on. This defaults to a free port if not specified.
* socketTimeoutMillis - the number of ms client sockets will stay open when sending
messages to the listener. This should be long enough for the slowest message.
If not specified it defaults 120000ms.
-->
<cacheManagerPeerListenerFactory
class="net.sf.ehcache.distribution.RMICacheManagerPeerListenerFactory"/>
<!--
Cache configuration
===================
The following attributes are required.
name:
Sets the name of the cache. This is used to identify the cache. It must be unique.
maxElementsInMemory:
Sets the maximum number of objects that will be created in memory
maxElementsOnDisk:
Sets the maximum number of objects that will be maintained in the DiskStore
The default value is zero, meaning unlimited.
eternal:
Sets whether elements are eternal. If eternal, timeouts are ignored and the
element is never expired.
overflowToDisk:
Sets whether elements can overflow to disk when the memory store
has reached the maxInMemory limit.
The following attributes and elements are optional.
timeToIdleSeconds:
Sets the time to idle for an element before it expires.
i.e. The maximum amount of time between accesses before an element expires
Is only used if the element is not eternal.
Optional attribute. A value of 0 means that an Element can idle for infinity.
The default value is 0.
timeToLiveSeconds:
Sets the time to live for an element before it expires.
i.e. The maximum time between creation time and when an element expires.
Is only used if the element is not eternal.
Optional attribute. A value of 0 means that and Element can live for infinity.
The default value is 0.
diskPersistent:
Whether the disk store persists between restarts of the Virtual Machine.
The default value is false.
diskExpiryThreadIntervalSeconds:
The number of seconds between runs of the disk expiry thread. The default value
is 120 seconds.
diskSpoolBufferSizeMB:
This is the size to allocate the DiskStore for a spool buffer. Writes are made
to this area and then asynchronously written to disk. The default size is 30MB.
Each spool buffer is used only by its cache. If you get OutOfMemory errors consider
lowering this value. To improve DiskStore performance consider increasing it. Trace level
logging in the DiskStore will show if put back ups are occurring.
memoryStoreEvictionPolicy:
Policy would be enforced upon reaching the maxElementsInMemory limit. Default
policy is Least Recently Used (specified as LRU). Other policies available -
First In First Out (specified as FIFO) and Less Frequently Used
(specified as LFU)
Cache elements can also contain sub elements which take the same format of a factory class
and properties. Defined sub-elements are:
* cacheEventListenerFactory - Enables registration of listeners for cache events, such as
put, remove, update, and expire.
* bootstrapCacheLoaderFactory - Specifies a BootstrapCacheLoader, which is called by a
cache on initialisation to prepopulate itself.
* cacheExtensionFactory - Specifies a CacheExtension, a generic mechansim to tie a class
which holds a reference to a cache to the cache lifecycle.
* cacheExceptionHandlerFactory - Specifies a CacheExceptionHandler, which is called when
cache exceptions occur.
* cacheLoaderFactory - Specifies a CacheLoader, which can be used both asynchronously and
synchronously to load objects into a cache.
RMI Cache Replication
Each cache that will be distributed needs to set a cache event listener which replicates
messages to the other CacheManager peers. For the built-in RMI implementation this is done
by adding a cacheEventListenerFactory element of type RMICacheReplicatorFactory to each
distributed cache's configuration as per the following example:
<cacheEventListenerFactory class="net.sf.ehcache.distribution.RMICacheReplicatorFactory"
properties="replicateAsynchronously=true,
replicatePuts=true,
replicateUpdates=true,
replicateUpdatesViaCopy=true,
replicateRemovals=true
asynchronousReplicationIntervalMillis=<number of milliseconds"
propertySeparator="," />
The RMICacheReplicatorFactory recognises the following properties:
* replicatePuts=true|false - whether new elements placed in a cache are
replicated to others. Defaults to true.
* replicateUpdates=true|false - whether new elements which override an
element already existing with the same key are replicated. Defaults to true.
* replicateRemovals=true - whether element removals are replicated. Defaults to true.
* replicateAsynchronously=true | false - whether replications are
asynchronous (true) or synchronous (false). Defaults to true.
* replicateUpdatesViaCopy=true | false - whether the new elements are
copied to other caches (true), or whether a remove message is sent. Defaults to true.
* asynchronousReplicationIntervalMillis=<number of milliseconds> - The asynchronous
replicator runs at a set interval of milliseconds. The default is 1000. The minimum
is 10. This property is only applicable if replicateAsynchronously=true
Cluster Bootstrapping
The RMIBootstrapCacheLoader bootstraps caches in clusters where RMICacheReplicators are
used. It is configured as per the following example:
<bootstrapCacheLoaderFactory
class="net.sf.ehcache.distribution.RMIBootstrapCacheLoaderFactory"
properties="bootstrapAsynchronously=true, maximumChunkSizeBytes=5000000"
propertySeparator="," />
The RMIBootstrapCacheLoaderFactory recognises the following optional properties:
* bootstrapAsynchronously=true|false - whether the bootstrap happens in the background
after the cache has started. If false, bootstrapping must complete before the cache is
made available. The default value is true.
* maximumChunkSizeBytes=<integer> - Caches can potentially be very large, larger than the
memory limits of the VM. This property allows the bootstraper to fetched elements in
chunks. The default chunk size is 5000000 (5MB).
Cache Exception Handling
By default, most cache operations will propagate a runtime CacheException on failure. An
interceptor, using a dynamic proxy, may be configured so that a CacheExceptionHandler can
be configured to intercept Exceptions. Errors are not intercepted.
It is configured as per the following example:
<cacheExceptionHandlerFactory class="com.example.ExampleExceptionHandlerFactory"
properties="logLevel=FINE"/>
Caches with ExceptionHandling configured are not of type Cache, but are of type Ehcache only,
and are not available using CacheManager.getCache(), but using CacheManager.getEhcache().
Cache Loader
A default CacheLoader may be set which loads objects into the cache through asynchronous and
synchronous methods on Cache. This is different to the bootstrap cache loader, which is used
only in distributed caching.
It is configured as per the following example:
<cacheLoaderFactory class="com.example.ExampleCacheLoaderFactory"
properties="type=int,startCounter=10"/>
Cache Extension
CacheExtensions are a general purpose mechanism to allow generic extensions to a Cache.
CacheExtensions are tied into the Cache lifecycle.
CacheExtensions are created using the CacheExtensionFactory which has a
<code>createCacheCacheExtension()</code> method which takes as a parameter a
Cache and properties. It can thus call back into any public method on Cache, including, of
course, the load methods.
Extensions are added as per the following example:
<cacheExtensionFactory class="com.example.FileWatchingCacheRefresherExtensionFactory"
properties="refreshIntervalMillis=18000, loaderTimeout=3000,
flushPeriod=whatever, someOtherProperty=someValue ..."/>
-->
<!--
Mandatory Default Cache configuration. These settings will be applied to caches
created programmtically using CacheManager.add(String cacheName).
The defaultCache has an implicit name "default" which is a reserved cache name.
-->
<defaultCache
maxElementsInMemory="10000"
eternal="false"
timeToIdleSeconds="120"
timeToLiveSeconds="120"
overflowToDisk="true"
diskSpoolBufferSizeMB="30"
maxElementsOnDisk="10000000"
diskPersistent="false"
diskExpiryThreadIntervalSeconds="120"
memoryStoreEvictionPolicy="LRU"
/>
<!--
Sample caches. Following are some example caches. Remove these before use.
-->
<!--
Sample cache named sampleCache1
This cache contains a maximum in memory of 10000 elements, and will expire
an element if it is idle for more than 5 minutes and lives for more than
10 minutes.
If there are more than 10000 elements it will overflow to the
disk cache, which in this configuration will go to wherever java.io.tmp is
defined on your system. On a standard Linux system this will be /tmp"
-->
<cache name="profiles"
maxElementsInMemory="10000"
maxElementsOnDisk="1000"
eternal="false"
overflowToDisk="true"
diskSpoolBufferSizeMB="20"
timeToIdleSeconds="300"
timeToLiveSeconds="600"
memoryStoreEvictionPolicy="LRU"
/>
<cache name="genericResources"
maxElementsInMemory="10000"
maxElementsOnDisk="1000"
eternal="false"
overflowToDisk="true"
diskSpoolBufferSizeMB="20"
timeToIdleSeconds="300"
timeToLiveSeconds="600"
memoryStoreEvictionPolicy="LRU"
/>
<cache name="searchConfiguration"
maxElementsInMemory="10000"
maxElementsOnDisk="1000"
eternal="false"
overflowToDisk="true"
diskSpoolBufferSizeMB="20"
timeToIdleSeconds="300"
timeToLiveSeconds="600"
memoryStoreEvictionPolicy="LRU"
/>
<cache name="collections"
maxElementsInMemory="10000"
maxElementsOnDisk="1000"
eternal="false"
overflowToDisk="true"
diskSpoolBufferSizeMB="20"
timeToIdleSeconds="300"
timeToLiveSeconds="600"
memoryStoreEvictionPolicy="LRU"
/>
<cache name="content"
maxElementsInMemory="10000"
maxElementsOnDisk="1000"
eternal="false"
overflowToDisk="true"
diskSpoolBufferSizeMB="20"
timeToIdleSeconds="300"
timeToLiveSeconds="600"
memoryStoreEvictionPolicy="LRU"
/>
<cache name="thumbnail"
maxElementsInMemory="10000"
maxElementsOnDisk="1000"
eternal="false"
overflowToDisk="true"
diskSpoolBufferSizeMB="20"
timeToIdleSeconds="300"
timeToLiveSeconds="600"
memoryStoreEvictionPolicy="LRU"
/>
<cache name="schemata"
maxElementsInMemory="10000"
maxElementsOnDisk="1000"
eternal="false"
overflowToDisk="true"
diskSpoolBufferSizeMB="20"
timeToIdleSeconds="300"
timeToLiveSeconds="600"
memoryStoreEvictionPolicy="LRU"
/>
<cache name="metadata"
maxElementsInMemory="10000"
maxElementsOnDisk="1000"
eternal="false"
overflowToDisk="true"
diskSpoolBufferSizeMB="20"
timeToIdleSeconds="300"
timeToLiveSeconds="600"
memoryStoreEvictionPolicy="LRU"
/>
<!--
Sample cache named sampleCache2
This cache has a maximum of 1000 elements in memory. There is no overflow to disk, so 1000
is also the maximum cache size. Note that when a cache is eternal, timeToLive and
timeToIdle are not used and do not need to be specified.
-->
<cache name="sampleCache2"
maxElementsInMemory="1000"
eternal="true"
overflowToDisk="false"
memoryStoreEvictionPolicy="FIFO"
/>
<!--
Sample cache named sampleCache3. This cache overflows to disk. The disk store is
persistent between cache and VM restarts. The disk expiry thread interval is set to 10
minutes, overriding the default of 2 minutes.
-->
<cache name="sampleCache3"
maxElementsInMemory="500"
eternal="false"
overflowToDisk="true"
timeToIdleSeconds="300"
timeToLiveSeconds="600"
diskPersistent="true"
diskExpiryThreadIntervalSeconds="1"
memoryStoreEvictionPolicy="LFU"
/>
<!--
Sample distributed cache named sampleDistributedCache1.
This cache replicates using defaults.
It also bootstraps from the cluster, using default properties.
-->
<cache name="sampleDistributedCache1"
maxElementsInMemory="10"
eternal="false"
timeToIdleSeconds="100"
timeToLiveSeconds="100"
overflowToDisk="false">
<cacheEventListenerFactory
class="net.sf.ehcache.distribution.RMICacheReplicatorFactory"/>
<bootstrapCacheLoaderFactory
class="net.sf.ehcache.distribution.RMIBootstrapCacheLoaderFactory"/>
</cache>
<!--
Sample distributed cache named sampleDistributedCache2.
This cache replicates using specific properties.
It only replicates updates and does so synchronously via copy
-->
<cache name="sampleDistributedCache2"
maxElementsInMemory="10"
eternal="false"
timeToIdleSeconds="100"
timeToLiveSeconds="100"
overflowToDisk="false">
<cacheEventListenerFactory
class="net.sf.ehcache.distribution.RMICacheReplicatorFactory"
properties="replicateAsynchronously=false, replicatePuts=false,
replicateUpdates=true, replicateUpdatesViaCopy=true,
replicateRemovals=false"/>
</cache>
<!--
Sample distributed cache named sampleDistributedCache3.
This cache replicates using defaults except that the asynchronous replication
interval is set to 200ms.
-->
<cache name="sampleDistributedCache3"
maxElementsInMemory="10"
eternal="false"
timeToIdleSeconds="100"
timeToLiveSeconds="100"
overflowToDisk="false">
<cacheEventListenerFactory
class="net.sf.ehcache.distribution.RMICacheReplicatorFactory"
properties="asynchronousReplicationIntervalMillis=200"/>
</cache>
</ehcache>

View File

@ -1 +0,0 @@
sharedDir=${catalina.home}/shared/d4s

View File

@ -1,18 +0,0 @@
MYPROXY_HOST=grids04.eng.it
MYPROXY_PORT=7512
PROXIES_DIR=#path to dir where the proxy files will be storeed#
HOST_CERT=#myproxy certificate#
HOST_KEY=#myproxy key#
NEW_SERVLET_HOST=dl14.di.uoa.gr #hostname for voms servlet#
NEW_SERVLET_PORT=8888 #port where voms servlet is listening#
NEW_SERVLET_PATH=/VOMSServlet/VOMSServlet #voms servlet path#
NEW_SERVLET_PROTOCOL=http # protocol: http/https#
KEY_STORE=#path to *.p12 key file#
KEY_STORE_TYPE=PKCS12
KEY_STORE_PWD=#password for the *.p12 key file#
TRUST_STORE=#path to trust store file#
TRUST_STORE_PWD=#password for the trust store file#
TRUST_STORE_TYPE=JKS

View File

@ -1,137 +0,0 @@
package org.gcube.application.framework.core.cache;
import java.util.HashMap;
import org.gcube.application.framework.core.cache.factories.GenericResourceCacheEntryFactory;
import net.sf.ehcache.CacheManager;
import net.sf.ehcache.Ehcache;
import net.sf.ehcache.constructs.blocking.CacheEntryFactory;
import net.sf.ehcache.constructs.blocking.SelfPopulatingCache;
/**
* @author Valia Tsagkalidou (KNUA)
*
* This class is a singleton that manages the available caches
*
*/
public class CachesManager {
protected static CachesManager cacheManager = new CachesManager();
protected CacheManager manager;
// protected Ehcache profileCache;
// protected Ehcache genericResourceCache;
// protected Ehcache searchConfigCache;
// protected Ehcache collectionCache;
// protected Ehcache contentCache;
// protected Ehcache thumbnailCache;
// protected Ehcache schemataCache;
// protected Ehcache metadataCache;
protected HashMap<String, Ehcache> caches;
/**
* The constructor
*/
protected CachesManager() {
manager = CacheManager.create(CachesManager.class.getResource("/etc/ehcache.xml"));
caches = new HashMap<String, Ehcache>();
// profileCache = manager.getEhcache("profiles");
Ehcache genericResourceCache = manager.getEhcache("genericResources");
// searchConfigCache = manager.getEhcache("searchConfiguration");
// collectionCache = manager.getEhcache("collections");
// contentCache = manager.getEhcache("content");
// thumbnailCache = manager.getEhcache("thumbnail");
// schemataCache = manager.getEhcache("schemata");
// metadataCache = manager.getEhcache("metadata");
// profileCache = new SelfPopulatingCache(profileCache, new ProfileCacheEntryFactory());
genericResourceCache = new SelfPopulatingCache(genericResourceCache, new GenericResourceCacheEntryFactory());
caches.put("genericResourceCache", genericResourceCache);
// searchConfigCache = new SelfPopulatingCache(searchConfigCache, new SearchConfigCacheEntryFactory());
// collectionCache = new SelfPopulatingCache(collectionCache, new CollectionCacheEntryFactory());
// contentCache = new SelfPopulatingCache(contentCache, new ContentInfoCacheEntryFactory());
// thumbnailCache = new SelfPopulatingCache(thumbnailCache, new ThumbnailCacheEntryFactory());
// schemataCache = new SelfPopulatingCache(schemataCache, new SchemaInfoCacheEntryFactory());
// metadataCache = new SelfPopulatingCache(metadataCache, new MetadataCacheEntryFactory());
}
/**
* @return the sigleton of CachesManager
*/
public static CachesManager getInstance() {
return cacheManager;
}
public Ehcache getEhcache (String cacheName, CacheEntryFactory cacheFactory) {
if (caches.get(cacheName) == null) {
Ehcache newCache = manager.getEhcache(cacheName);
newCache = new SelfPopulatingCache(newCache, cacheFactory);
caches.put(cacheName, newCache);
}
return caches.get(cacheName);
}
/**
* @return the cache that contains the user profiles
*/
// public Ehcache getProfileCache() {
// return profileCache;
// }
/**
* @return the cache that contains the generic resources
*/
public Ehcache getGenericResourceCache() {
return caches.get("genericResourceCache");
}
/**
* @return the cache that contains the search configurations
*/
// public Ehcache getSearchConfigCache() {
// return searchConfigCache;
// }
/**
* @return the cache that contains the collections per VRE
*/
// public Ehcache getCollectionCache() {
// return collectionCache;
// }
/**
* @return the cache that contains information about digital objects
*/
// public Ehcache getContentCache() {
// return contentCache;
// }
/**
* @return the cache that contains thumbnails
*/
// public Ehcache getThumbnailCache() {
// return thumbnailCache;
// }
/**
* @return the cache that contains the searchable fields for each metadata schema
*/
// public Ehcache getSchemataCache() {
// return schemataCache;
// }
/**
* @return the cache that contains the metadata objects
*/
// public Ehcache getMetadataCache() {
// return metadataCache;
// }
}

View File

@ -1,77 +0,0 @@
package org.gcube.application.framework.core.cache;
import java.util.HashMap;
import org.gcube.common.core.scope.GCUBEScope;
import org.gcube.common.core.utils.logging.GCUBELog;
import org.gcube.informationsystem.cache.ISCache;
import org.gcube.informationsystem.cache.ISCacheManager;
/**
* This class manages the running harvester threads.
*
* @author Valia Tsagkalidou (KNUA)
*
*/
public class RIsManager {
/**
* Defines the manager's instance
*/
private static RIsManager instance = null;
/**
* keeps the ISCache per scope
*/
protected HashMap<GCUBEScope, ISCache> isCaches;
/** Object logger. */
protected final GCUBELog logger = new GCUBELog(this);
/**
* Initializes RIsManager
*/
private RIsManager() {
isCaches = new HashMap<GCUBEScope, ISCache>();
}
/**
* Retrieves the singleton
* @return the only instance of RIsManager
*/
synchronized public static RIsManager getInstance() {
if (instance == null)
instance = new RIsManager();
return instance;
}
/**
* @param scope the GGUBEScope for which the RIs are requested
* @return the ISCache for this specific scope
*/
public synchronized ISCache getISCache(GCUBEScope scope)
{
ISCache isInfo = isCaches.get(scope);
if(isInfo == null)
{
// If the ISCache in not already created, then it creates a new instance and adds it to the HashMap
try {
ISCacheManager.addManager(scope);
isInfo = ISCacheManager.getManager(scope).getCache();
isCaches.put(scope, isInfo);
try {
Thread.sleep(5000);
} catch (InterruptedException e1) {
logger.error("", e1);
}
} catch (Exception e) {
logger.error("", e);
}
}
return isInfo;
}
}

View File

@ -1,96 +0,0 @@
package org.gcube.application.framework.core.cache.factories;
import java.util.HashMap;
import org.gcube.application.framework.core.security.PortalSecurityManager;
import org.gcube.application.framework.core.security.VOMSAdminManager;
import org.gcube.application.framework.core.util.UserCredential;
import org.gcube.common.core.scope.GCUBEScope;
import org.gcube.common.core.security.utils.VOMSAttributesReader;
import org.gcube.common.core.utils.logging.GCUBELog;
import org.gcube.vomanagement.vomsAdmin.impl.VOMSAdminImpl;
import org.gridforum.jgss.ExtendedGSSCredential;
/**
* This class is used for retrieving and updating the portal credential that is needed by the caches in order to retrieve information form gCube services
* @author Valia Tsagkalidou (NKUA)
*
*/
public class ApplicationCredentials {
private static ApplicationCredentials applCredentials = new ApplicationCredentials();
private HashMap<String, ExtendedGSSCredential> creds;
protected static String userDN = "/O=Grid/OU=GlobusTest/OU=simpleCA-gauss.eng.it/OU=eng.it/CN=";
protected static String userCA = "/O=Grid/OU=GlobusTest/OU=simpleCA-gauss.eng.it/CN=Globus Simple CA";
/** Object logger. */
protected final GCUBELog logger = new GCUBELog(this);
/**
* The basic constructor
*/
protected ApplicationCredentials()
{
creds = new HashMap<String, ExtendedGSSCredential>();
}
/**
* @return the sigleton of ApplicationCredentials
*/
public static ApplicationCredentials getInstance()
{
return applCredentials;
}
/**
* @param VREname the of the VRE for which you want to get the "portal" credential
* @return the grid credential
*/
public ExtendedGSSCredential getCredential(String VREname)
{
PortalSecurityManager secMan = new PortalSecurityManager(GCUBEScope.getScope(VREname));
if(!secMan.isSecurityEnabled())
return null;
ExtendedGSSCredential cred = creds.get(VREname);
if(cred == null)
{
// If the creedential is not available, it retrieves it from myProxy
cred = UserCredential.getCredential("application", VREname);
if(cred == null)
{
//user "application" does not exist on this VRE, so we add him and try to get credential again
VOMSAdminImpl vomsA;
try {
vomsA = VOMSAdminManager.getVOMSAdmin();
String[] roles = vomsA.listRoles();
vomsA.createUser("application", userDN+"application", userCA, "application@gcube.org");
vomsA.addMember(VREname, userDN+"application", userCA);
vomsA.assignRole(VREname, roles[0], userDN+"application", userCA);
}
catch (Exception e) {
vomsA = null;
logger.error("", e);
}
cred = UserCredential.getCredential("application", VREname);
}
creds.put(VREname, cred);
}
else
{
// credential already available
VOMSAttributesReader vomsReader = null;
try {
vomsReader = new VOMSAttributesReader(cred);
//Check if it's gonna expire in the next minute, and refresh it
if(vomsReader.getRefreshPeriod() < 60000)
{
cred = UserCredential.getCredential("application", VREname);
creds.put(VREname, cred);
}
} catch (Exception e1) {
logger.error("", e1);
}
}
return cred;
}
}

View File

@ -1,117 +0,0 @@
package org.gcube.application.framework.core.cache.factories;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.gcube.application.framework.core.genericresources.model.ISGenericResource;
import org.gcube.application.framework.core.util.CacheEntryConstants;
import org.gcube.application.framework.core.util.QueryString;
import org.gcube.application.framework.core.util.SessionConstants;
import org.gcube.common.core.contexts.GCUBEContext;
import org.gcube.common.core.contexts.GHNContext;
import org.gcube.common.core.informationsystem.client.AtomicCondition;
import org.gcube.common.core.informationsystem.client.ISClient;
import org.gcube.common.core.informationsystem.client.queries.GCUBEGenericResourceQuery;
import org.gcube.common.core.resources.GCUBEGenericResource;
import org.gcube.common.core.scope.GCUBEScope;
import org.gcube.common.core.utils.logging.GCUBELog;
import net.sf.ehcache.constructs.blocking.CacheEntryFactory;
/**
* @author Valia Tsagkalidou
*
*/
public class GenericResourceCacheEntryFactory implements CacheEntryFactory {
static ISClient client = null;
/** Object logger. */
protected final GCUBELog logger = new GCUBELog(this);
/**
* Constructor to initialize the ISClient
*/
public GenericResourceCacheEntryFactory() {
super();
if(client == null)
{
try {
client = GHNContext.getImplementation(ISClient.class);
} catch (Exception e) {
logger.error("",e);
}
}
}
/**
* @param key a QueryString representing pairs of keys and values: needed keys are "vre" and one of "id" or "name"
* @return a list containing the Generic Resources that correspond to the query
*/
public List<ISGenericResource> createEntry(Object key) throws Exception {
QueryString querySt = (QueryString) key;
logger.info("query: " + querySt.toString());
GCUBEScope scope = GCUBEScope.getScope(querySt.get(CacheEntryConstants.vre));
GCUBEGenericResourceQuery query = client.getQuery(GCUBEGenericResourceQuery.class);
logger.info("In generic resources cache entry factory");
if(querySt.containsKey(CacheEntryConstants.id))
{ //Retrieving generic resources based on their ID
query.addAtomicConditions(new AtomicCondition("/ID", querySt.get(CacheEntryConstants.id)));
}
// else if(querySt.containsKey(CacheEntryConstants.name) && querySt.get(CacheEntryConstants.name).equals(SessionConstants.ScenarioSchemaInfo))
// { //Retrieving the generic resource that represents the static search configuration
// query.addAtomicConditions(new AtomicCondition("/Profile/Name", querySt.get(CacheEntryConstants.name)));
// query.addAtomicConditions(new AtomicCondition("/Profile/Body/DL/attribute::name", querySt.get(CacheEntryConstants.vre)));
// }
else if(querySt.containsKey(CacheEntryConstants.name))
{ //Retrieving generic resources based on their name
query.addAtomicConditions(new AtomicCondition("/Profile/Name", querySt.get(CacheEntryConstants.name)));
}
else if(querySt.containsKey(CacheEntryConstants.vreResource))
{
query.addAtomicConditions(new AtomicCondition("//SecondaryType", GCUBEGenericResource.SECONDARYTYPE_VRE), new AtomicCondition("//Body/Scope", scope.toString()));
}
try{
List<GCUBEGenericResource> result = client.execute(query, scope);
List<ISGenericResource> res = new ArrayList<ISGenericResource>();
if (querySt.containsKey(CacheEntryConstants.name) && querySt.get(CacheEntryConstants.name).equals(SessionConstants.ScenarioSchemaInfo)) {
System.out.println("The number of generic Resources for ScenarioSchemaInfo returned is: " + result.size());
List<GCUBEGenericResource> newResult = new ArrayList<GCUBEGenericResource>();
for (GCUBEGenericResource resource : result) {
Map<String, GCUBEScope> scopes = resource.getScopes();
System.out.println("Number of scopes for ScenarioSchemaInfo: " + scopes.values().size());
System.out.println(scopes.values().toString());
if (scopes.containsValue(scope)) {
newResult.add(resource);
}
}
System.out.println("Number of genericResources for ScenarioSchemaInfo left after the pruning" + newResult.size());
result = newResult;
}
logger.debug("size of results: " + result.size());
for(GCUBEGenericResource resource : result)
{
ISGenericResource genResource = new ISGenericResource(resource.getID(), resource.getName(), resource.getDescription(), resource.getBody(), resource.getSecondaryType());
res.add(genResource);
}
return res;
}catch (Exception e) {
logger.error("",e);
return null;
}
}
}

View File

@ -1,114 +0,0 @@
package org.gcube.application.framework.core.genericresources.model;
/**
* @author Valia Tsaqgkalidou (NKUA)
*
*/
public class ISGenericResource {
protected String id;
protected String name;
protected String description;
protected String body;
protected String secondaryType;
/**
* @return the secondary type of the generic resource
*/
public String getSecondaryType() {
return secondaryType;
}
/**
* @param secondaryType the secondary type of the generic resource to be set
*/
public void setSecondaryType(String secondaryType) {
this.secondaryType = secondaryType;
}
/**
* Generic Constructor
*/
public ISGenericResource() {
super();
this.id = "";
this.name = "";
this.description = "";
this.body = "";
this.secondaryType = "";
}
/**
* @param id the generic resource ID
* @param name the generic resource name
* @param description the generic resource description
* @param body the generic resource body
* @param sType the generic resource secondary type
*/
public ISGenericResource(String id, String name, String description,
String body, String sType) {
super();
this.id = id;
this.name = name;
this.description = description;
this.body = body;
this.secondaryType = sType;
}
/**
* @return the id
*/
public String getId() {
return id;
}
/**
* @param id the id to set
*/
public void setId(String id) {
this.id = id;
}
/**
* @return the name
*/
public String getName() {
return name;
}
/**
* @param name the name to set
*/
public void setName(String name) {
this.name = name;
}
/**
* @return the description
*/
public String getDescription() {
return description;
}
/**
* @param description the description to set
*/
public void setDescription(String description) {
this.description = description;
}
/**
* @return the body
*/
public String getBody() {
return body;
}
/**
* @param body the body to set
*/
public void setBody(String body) {
this.body = body;
}
}

View File

@ -1,43 +0,0 @@
package org.gcube.application.framework.core.security;
import java.io.IOException;
import javax.security.auth.callback.Callback;
import javax.security.auth.callback.CallbackHandler;
import javax.security.auth.callback.NameCallback;
import javax.security.auth.callback.PasswordCallback;
import javax.security.auth.callback.UnsupportedCallbackException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
public class JaasCallbackHandler implements CallbackHandler{
protected String username = null;
protected String password = null;
private Log log = LogFactory.getLog(JaasCallbackHandler.class);
public JaasCallbackHandler(String username, String password) {
super();
this.username = username;
this.password = password;
}
public void handle(Callback callbacks[])
throws IOException, UnsupportedCallbackException {
for (int i = 0; i < callbacks.length; i++) {
if (callbacks[i] instanceof NameCallback) {
if (log.isDebugEnabled()) log.debug("responding to NameCallback");
((NameCallback) callbacks[i]).setName(username);
} else if (callbacks[i] instanceof PasswordCallback) {
if (log.isDebugEnabled()) log.debug("responding to PasswordCallback");
((PasswordCallback) callbacks[i]).setPassword(password != null ? password.toCharArray() : new char[0]);
} else {
if (log.isDebugEnabled()) log.debug("unsupported callback: " + callbacks[i].getClass());
throw new UnsupportedCallbackException(callbacks[i]);
}
}
}
}

View File

@ -1,72 +0,0 @@
package org.gcube.application.framework.core.security;
import javax.security.auth.login.AccountExpiredException;
import javax.security.auth.login.CredentialExpiredException;
import javax.security.auth.login.FailedLoginException;
import javax.security.auth.login.LoginContext;
import javax.security.auth.login.LoginException;
public class LDAPAuthenticationModule {
public static String LDAP_HOST = "LDAP_HOST";
public static String BASE_DN = "BASE_DN";
private String contextName = "Gridsphere";
//private String contextName = "AslLoginConf";
public LDAPAuthenticationModule () {
return;
}
public boolean checkAuthentication(String username, String password) throws Exception {
System.out.println("beginning authentication for " + username);
System.out.println("I am in ASL");
LoginContext loginContext;
// Create the LoginContext
try {
loginContext = new LoginContext(contextName, new JaasCallbackHandler(username, password));
} catch (SecurityException e) {
e.printStackTrace();
throw new Exception("key4", e);
} catch (LoginException e) {
e.printStackTrace();
throw new Exception("key4", e);
}
// Attempt login
try {
loginContext.login();
} catch (FailedLoginException e) {
// throw new Exception("key4", e);
e.printStackTrace();
return false;
} catch (AccountExpiredException e) {
//throw new Exception("key1");
e.printStackTrace();
return false;
} catch (CredentialExpiredException e) {
// throw new Exception("key2", e);
e.printStackTrace();
return false;
} catch (Exception e) {
// throw new Exception("key3", e);
return false;
}
System.out.println("ASL returning value true");
return true;
}
}

View File

@ -1,89 +0,0 @@
package org.gcube.application.framework.core.security;
import java.io.BufferedReader;
import java.io.StringReader;
import java.util.List;
import org.gcube.application.framework.core.cache.CachesManager;
import org.gcube.application.framework.core.genericresources.model.ISGenericResource;
import org.gcube.application.framework.core.session.ASLSession;
import org.gcube.application.framework.core.util.CacheEntryConstants;
import org.gcube.application.framework.core.util.QueryString;
import org.gcube.application.framework.core.util.SessionConstants;
import org.gcube.common.core.scope.GCUBEScope;
import org.gcube.common.core.security.GCUBESecurityManagerImpl;
import org.gcube.common.core.utils.logging.GCUBELog;
import org.kxml2.io.KXmlParser;
/**
* @author Valia Tsagkalidou (KNUA)
*
*/
public class PortalSecurityManager extends GCUBESecurityManagerImpl {
/** Object logger. */
protected final GCUBELog logger = new GCUBELog(this);
public PortalSecurityManager(GCUBEScope scope) {
super();
this.scope = scope;
}
public PortalSecurityManager(ASLSession session) {
super();
this.scope = session.getScope();
}
GCUBEScope scope;
@Override
public boolean isSecurityEnabled() {
QueryString query = new QueryString();
query.put(CacheEntryConstants.vreResource, "true");
query.put(CacheEntryConstants.vre, scope.toString());
List<ISGenericResource> res = (List<ISGenericResource>)CachesManager.getInstance().getGenericResourceCache().get(query).getValue();
if(res == null || res.size() == 0)
return false;
else
{
try {
return parseBody(res.get(0).getBody());
} catch (Exception e) {
logger.error("",e);
return false;
}
}
}
/**
* Loads from the <em>Body</em> element the resource information
* @param body the <em>Body</em> of the generic resource
* @throws Exception if the element is not valid or well formed
*/
private boolean parseBody(String body) throws Exception {
KXmlParser parser = new KXmlParser();
parser.setInput(new BufferedReader(new StringReader(body)));
loop: while (true) {
try {
switch (parser.next()) {
case KXmlParser.START_TAG:
if (parser.getName().equals("SecurityEnabled"))
{
boolean res = Boolean.valueOf(parser.nextText()).booleanValue();
logger.debug("Found value:" + res);
return res;
}
else parser.nextText();//just skip the text
break;
case KXmlParser.END_DOCUMENT: break loop;
}
} catch (Exception e) {
logger.error("",e);
throw new Exception ("Unable to parse the ScopeResource body");
}
}
return false;
}
}

View File

@ -1,37 +0,0 @@
package org.gcube.application.framework.core.security;
import java.rmi.Remote;
import org.gcube.application.framework.core.session.ASLSession;
import org.gcube.common.core.contexts.GCUBERemotePortTypeContext;
import org.gcube.common.core.scope.GCUBEScope;
import org.gcube.common.core.scope.GCUBEScope.MalformedScopeExpressionException;
import org.gcube.common.core.security.GCUBESecurityManager;
import org.ietf.jgss.GSSCredential;
public class ServiceContextManager<PORTTYPE extends Remote> {
public static <PORTTYPE extends Remote> PORTTYPE applySecurity(PORTTYPE stub, ASLSession session) throws MalformedScopeExpressionException, Exception
{
GCUBESecurityManager secManager = new PortalSecurityManager(session);
if(secManager.isSecurityEnabled())
{
secManager.useCredentials(session.getCredential());
}
return GCUBERemotePortTypeContext.getProxy(stub , session.getScope(), secManager);
}
public static <PORTTYPE extends Remote> PORTTYPE applySecurity(PORTTYPE stub, GCUBEScope scope, GSSCredential cred) throws MalformedScopeExpressionException, Exception
{
GCUBESecurityManager secManager = new PortalSecurityManager(scope);
if(secManager.isSecurityEnabled())
{
secManager.useCredentials(cred);
}
return GCUBERemotePortTypeContext.getProxy(stub , scope, secManager);
}
}

View File

@ -1,35 +0,0 @@
package org.gcube.application.framework.core.security;
import java.io.File;
import org.gcube.application.framework.core.util.Settings;
import org.gcube.vomanagement.vomsAdmin.impl.VOMSAdminImpl;
/**
* @author Valia Tsagkalidou (NKUA)
*
*/
public class VOMSAdminManager {
/**
*
*/
protected static VOMSAdminImpl vomsAdmin= null;
/**
* @return
*/
public static VOMSAdminImpl getVOMSAdmin()
{
if(vomsAdmin == null)
{
try {
vomsAdmin = new VOMSAdminImpl(Settings.getInstance().getProperty("sharedDir")+ File.separator + "vomsAPI.properties");
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
return vomsAdmin;
}
}

View File

@ -1,222 +0,0 @@
package org.gcube.application.framework.core.session;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Set;
import org.gcube.application.framework.core.cache.CachesManager;
import org.gcube.application.framework.core.security.PortalSecurityManager;
import org.gcube.application.framework.core.util.SessionConstants;
import org.gcube.application.framework.core.util.UserCredential;
import org.gcube.common.core.scope.GCUBEScope;
import org.gridforum.jgss.ExtendedGSSCredential;
/**
* @author Valia Tsagkalidou (NKUA)
*
*/
public class ASLSession{
/**
*
*/
private static final long serialVersionUID = 1L;
private HashMap<String, Object> innerSession;
private long lastUsedTime;
private String externalSessionID;
private String username;
private ExtendedGSSCredential credential;
private GCUBEScope scope;
private HashMap<String, Notifier> notifiers;
/**
* A constructor based on the user and an external ID
* @param externalSessionId the external id
* @param user the username
*/
ASLSession(String externalSessionId, String user)
{
innerSession = new HashMap<String, Object>();
notifiers = new HashMap<String, Notifier>();
lastUsedTime = System.currentTimeMillis();
username = user;
externalSessionID = externalSessionId;
}
private void initializeAttributes() {
}
/**
* @return whether the session is still valid or not
*/
public boolean isValid()
{
if((System.currentTimeMillis() - lastUsedTime) > 1800000) // 30 minutes
return false;
return true;
}
/**
* @return whether the session is empty or not
*/
public boolean isEmpty()
{
lastUsedTime = System.currentTimeMillis();
return innerSession.isEmpty();
}
/**
* @param name the name of the attribute
* @return whether the name attribute exists in the session
*/
public boolean hasAttribute(String name)
{
lastUsedTime = System.currentTimeMillis();
return innerSession.containsKey(name);
}
/**
* @return a set of all the attributes in the session
*/
public Set<String> getAttributeNames()
{
lastUsedTime = System.currentTimeMillis();
return innerSession.keySet();
}
/**
* @param name the name of the attribute
* @return the value of the named attribute
*/
public Object getAttribute(String name)
{
lastUsedTime = System.currentTimeMillis();
return innerSession.get(name);
}
/**
* @param name the name of the attribute
* @param value the value of the attribute
*/
public void setAttribute(String name, Object value)
{
lastUsedTime = System.currentTimeMillis();
innerSession.put(name, value);
}
/**
* Removes the named attribute from the session
* @param name the name of the attribute
* @return the removed object
*/
public Object removeAttribute(String name)
{
lastUsedTime = System.currentTimeMillis();
return innerSession.remove(name);
}
/**
* Removes all the attributes from the session
*/
public void removeAll()
{
lastUsedTime = System.currentTimeMillis();
innerSession.clear();
}
/**
* invalidates the session
*/
public void invalidate()
{
lastUsedTime = System.currentTimeMillis() - 2000000; //more than 30 minutes
}
/**
* @return the credential
*/
public ExtendedGSSCredential getCredential() {
return credential;
}
/**
* @return the external session id (passed to the constructor)
*/
public String getExternalSessionID() {
return externalSessionID;
}
/**
* @return the username
*/
public String getUsername() {
return username;
}
/**
* @return the scope
*/
public GCUBEScope getScope() {
return scope;
}
/**
* @return the name of the scope (VRE)
*/
public String getScopeName()
{
return scope.toString();
}
/**
* @param scope the scope name (VRE)
*/
public void setScope(String scope) {
lastUsedTime = System.currentTimeMillis();
String[] split = scope.trim().substring(1).split("/",2);
String vo = "/" + split[0].toLowerCase();
if(split.length > 1)
vo += "/" + split[1];
System.out.println("*** VRE to be set:" + vo + " ***");
this.scope = GCUBEScope.getScope(vo);
if(new PortalSecurityManager(this.scope).isSecurityEnabled())
this.credential = UserCredential.getCredential(username, scope);
innerSession.clear();
initializeAttributes();
}
/**
* @param notification the name of the notification to wait for
* @throws InterruptedException when the thread is interrupted
*/
public void waitNotification(String notification) throws InterruptedException
{
Notifier notifier = notifiers.get(notification);
if(notifier == null)
{
notifier = new Notifier();
notifiers.put(notification, notifier);
}
lastUsedTime = System.currentTimeMillis();
notifier.waitNotification();
}
/**
* @param notification the name of the notification to send notification
* @throws InterruptedException when the thread is interrupted
*/
public void notifyAllWaiting(String notification) throws InterruptedException
{
Notifier notifier = notifiers.get(notification);
if(notifier == null)
{
notifier = new Notifier();
notifiers.put(notification, notifier);
}
lastUsedTime = System.currentTimeMillis();
notifier.notifyAllWaiting();
}
}

View File

@ -1,25 +0,0 @@
package org.gcube.application.framework.core.session;
import java.util.concurrent.Semaphore;
class Notifier {
Semaphore sem;
Notifier() {
sem = new Semaphore(0, true);
}
public void waitNotification() throws InterruptedException
{
sem.acquire();
System.out.println("\n\njust woke up!!!\n\n");
}
public void notifyAllWaiting() throws InterruptedException
{
System.out.println("Sending wake up signal to " + sem.getQueueLength() + " receivers...");
sem.release(sem.getQueueLength());
}
}

View File

@ -1,75 +0,0 @@
package org.gcube.application.framework.core.session;
import java.util.Date;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Set;
public class SessionManager {
protected static Thread thread = new CleanSessionThread();
protected static SessionManager sessionManager = new SessionManager();
protected HashMap<String, ASLSession> sessions;
protected SessionManager() {
sessions = new HashMap<String, ASLSession>();
thread.setDaemon(true);
thread.start();
}
public static SessionManager getInstance() {
return sessionManager;
}
public ASLSession getD4ScienceSession(String externalSessionID, String username)
{
ASLSession session = sessions.get(externalSessionID + "_" + username);
if(session == null || !session.isValid() || !session.getUsername().equals(username))
{
session = new ASLSession(externalSessionID, username);
sessions.put(externalSessionID + "_" + username, session);
}
return session;
}
@Override
protected void finalize() throws Throwable {
thread.interrupt();
System.out.println(new Date(System.currentTimeMillis()) + " clean thread was interrupted");
thread.join();
System.out.println(new Date(System.currentTimeMillis()) + " clean thread was joint");
super.finalize();
}
protected static class CleanSessionThread extends Thread
{
public void run()
{
while(true)
{
try {
Thread.sleep(600000);
} catch (InterruptedException e) {
e.printStackTrace();
System.out.println(new Date(System.currentTimeMillis()) + " clean thread was interrupted (in clean thread)");
break;
}
//TODO: cleanup invalid sessions: add locks...
Set<String> keys = sessionManager.sessions.keySet();
Iterator<String> iter = keys.iterator();
while(iter.hasNext())
{
String extSessionID = iter.next();
if(!sessionManager.sessions.get(extSessionID).isValid())
{
sessionManager.sessions.remove(extSessionID);
}
}
}
System.out.println(new Date(System.currentTimeMillis()) + " clean thread was terminated");
}
}
}

View File

@ -1,59 +0,0 @@
package org.gcube.application.framework.core.util;
/**
* @author Valia Tsagkalidou (KNUA)
*
*/
public class CacheEntryConstants {
/**
* Cache constant for VRE
*/
public static final String vre = "vre";
/**
* Cache constant for oid
*/
public static final String oid = "oid";
/**
* Cache constant for metadataColID
*/
public static final String metadataColID = "metadataColID";
/**
* Cache constant for name
*/
public static final String name = "name";
/**
* Cache constant for id
*/
public static final String id = "id";
/**
* Cache constant for username
*/
public static final String username = "username";
/**
* Cache constant for width
*/
public static final String width = "width";
/**
* Cache constant for height
*/
public static final String height = "height";
/**
* thumbOptions
*/
public static final String thumbOptions = "thumbOptions";
/**
* vreResource
*/
public static final String vreResource = "vreResource";
}

View File

@ -1,11 +0,0 @@
package org.gcube.application.framework.core.util;
/**
* @author Valia Tsagkalidou (KNUA)
*
*/
public class NotificationConstants {
public static final String CollectionsStatusChange = "CollectionsStatusChange";
}

View File

@ -1,30 +0,0 @@
package org.gcube.application.framework.core.util;
import java.util.HashMap;
/**
* @author Valia Tsagkalidou (KNUA)
*
*/
public class QueryString extends HashMap<String, String>{
/**
*
*/
private static final long serialVersionUID = 1L;
public QueryString() {
super();
}
public void addParameter(String name, String value)
{
this.put(name, value);
}
public void removeParameter(String name)
{
this.remove(name);
}
}

View File

@ -1,119 +0,0 @@
package org.gcube.application.framework.core.util;
/**
* @author Valia Tsagkalidou (KNUA)
*
*/
public class SessionConstants {
/**
* Used for generic resource name which contains the static search configuration
*/
public static final String ScenarioSchemaInfo = "ScenarioCollectionInfo";
/**
* Used for a variable in the session representing a HashMap<String, List<SearchField>>:
* pairs: (schema, list of searchable fields for this schema)
*/
public static final String SchemataInfo = "SchemataInfo";
/**
* Used for generic resource name which contains info about searchable fields per schema...
*/
public static final String MetadataSchemaInfo = "MetadataSchemaInfo";
/**
* Used for a variable in the session representing the available collections
*/
public static final String Collections = "Collections";
/**
* Used for a variable in the session representing the available queries (queries that the user has created)
*/
public static final String Queries = "Queries";
/**
* Used for a variable in the session that contains info for the geospatial search (boundinf box, time interval, etc)
*/
public static final String Geospatial = "Geospatial";
/**
* Used for a variable in the session representing the number of the current page number in the results.
*/
public static final String page_no = "page_no";
/**
* Used for a variable in the session representing how many results were actually read from resultset last time
*/
public static final String resNo = "resNo";
/**
* Used for a variable in the session representing the total number of result pages...
*/
public static final String page_total = "page_total";
/**
* Used for a variable in the session representing the index in the current resultset part (where we stopped reading results last time)
*/
public static final String lastRes = "lastRes";
/**
* Used for a variable in the session representing whether we have reach the last page of results
*/
public static final String isLast = "isLast";
/**
* Used for a variable in the session that contains info about whether we have go beyond the edges of results... (out of index)
*/
public static final String out_of_end = "out_of_end";
/**
* the current resultset client
*/
public static final String rsClient = "rsClient";
/**
* the results objects already processed from the resultset
*/
public static final String theResultObjects = "theResultObjects";
/**
* the thumbnail urls for each result object (already processed from the resultset )
*/
public static final String theThumbnails = "theThumbnails";
/**
* the index in the result objects from where we should start reading
*/
public static final String startingPoint = "startingPoint";
/**
* what type search was applied (Simple, Advanced, Browse, Quick, Google, etc)
*/
public static final String sourcePortlet = "sourcePortlet";
/**
* the resultset EPR (not used any more)
*/
public static final String rsEPR = "rsEPR"; /* the Result Set EPR */
/**
* whether to show result rank or not
*/
public static final String showRank = "showRank";
/**
* an exception occured during search: message to the users
*/
public static final String searchException = "searchException";
/**
* the id of the active query
*/
public static final String activeQueryNo = "activeQueryNo";
/**
* the id of the active query to be presented
*/
public static final String activePresentationQueryNo = "activePresentationQueryNo";
}

View File

@ -1,58 +0,0 @@
package org.gcube.application.framework.core.util;
import java.io.*;
import java.util.*;
import java.net.URISyntaxException;
public class Settings {
static Properties props = new Properties();
static Settings settings = null;
Settings()
{
try
{
props.load(Settings.class.getResourceAsStream("/etc/settings.properties"));
}
catch (FileNotFoundException e)
{
// TODO Auto-generated catch block
e.printStackTrace();
}
catch (IOException e)
{
// TODO Auto-generated catch block
e.printStackTrace();
}
}
public static Settings getInstance()
{
if (settings == null)
settings = new Settings();
return settings;
}
/**
* @return the props
*/
public String getProperty(String key) {
String value = props.getProperty(key);
if(value.contains("${"))
{
int start = 0;
int i;
while((i= value.indexOf("${", start)) != -1)
{
start = value.indexOf("}", i) +1;
String reg = value.substring(i, start);
System.out.println(reg);
System.out.println(reg.substring(2, reg.length() -1));
value = value.replace(reg, (System.getProperty(reg.substring(2, reg.length() -1)) != null)?System.getProperty(reg.substring(2, reg.length() -1)):"");
}
}
return value;
}
}

View File

@ -1,120 +0,0 @@
package org.gcube.application.framework.core.util;
import java.io.BufferedReader;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.InputStreamReader;
import java.io.Reader;
import java.io.StringWriter;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.transform.Transformer;
import javax.xml.transform.TransformerFactory;
import javax.xml.transform.stream.StreamResult;
import javax.xml.transform.stream.StreamSource;
import org.apache.xml.serialize.OutputFormat;
import org.apache.xml.serialize.XMLSerializer;
import org.w3c.dom.Document;
/**
* @author Valia Tsagkalidou (KNUA)
*
*/
public class TransformXSLT {
/**
* Transforms an xml document based on the given xslt
* @param xslt the xslt for transforming the xml
* @param xml the xml to be transformed
* @return a string containing the transformed xml (output of the transformation)
*/
public static String transform(String xslt, String xml)
{
Transformer transformer;
try
{//Retrieve the XSLT from the DIS (generic resource), and create the transformer
ByteArrayInputStream xsltStream = new ByteArrayInputStream(xslt.getBytes());
TransformerFactory tFactory = TransformerFactory.newInstance();
transformer = tFactory.newTransformer(new StreamSource(xsltStream));
DocumentBuilderFactory dfactory = DocumentBuilderFactory.newInstance();
Document doc = null;
doc = dfactory.newDocumentBuilder().parse(xml);
// Apply the transformation
ByteArrayOutputStream ba_stream = new ByteArrayOutputStream();
OutputFormat format = new OutputFormat(doc);
format.setIndenting(false);
format.setOmitDocumentType(true);
format.setOmitXMLDeclaration(true);
StringWriter writer = new StringWriter();
XMLSerializer serial = new XMLSerializer(writer,format);
serial.serialize(doc);
transformer.transform(new StreamSource(new ByteArrayInputStream(writer.toString().getBytes())), new StreamResult(ba_stream));
//Prepares the object to be returned
StringBuffer buffer = new StringBuffer();
try {
InputStreamReader isr = new InputStreamReader( new ByteArrayInputStream(ba_stream.toByteArray()),
"UTF8");
Reader in2 = new BufferedReader(isr);
int ch;
while ((ch = in2.read()) > -1) {
buffer.append((char)ch);
}
in2.close();
return buffer.toString();
} catch (Exception e) {
e.printStackTrace();
}
}
catch (Exception e) {
e.printStackTrace();
}
return null;
}
/**
* Transforms an xml document based on the given transformer
* @param transformer the transformer based on which the transformation will be applied
* @param xml the xml document to be transformed
* @return a string containing the transformed xml (output of the transformation)
*/
public static String transform(Transformer transformer, String xml)
{
DocumentBuilderFactory dfactory = DocumentBuilderFactory.newInstance();
Document doc = null;
try
{
doc = dfactory.newDocumentBuilder().parse(xml);
ByteArrayOutputStream ba_stream = new ByteArrayOutputStream();
OutputFormat format = new OutputFormat(doc);
format.setIndenting(false);
format.setOmitDocumentType(true);
format.setOmitXMLDeclaration(true);
StringWriter writer = new StringWriter();
XMLSerializer serial = new XMLSerializer(writer,format);
serial.serialize(doc);
transformer.transform(new StreamSource(new ByteArrayInputStream(writer.toString().getBytes())), new StreamResult(ba_stream));
//Prepares the object to be returned
StringBuffer buffer = new StringBuffer();
try {
InputStreamReader isr = new InputStreamReader( new ByteArrayInputStream(ba_stream.toByteArray()),
"UTF8");
Reader in2 = new BufferedReader(isr);
int ch;
while ((ch = in2.read()) > -1) {
buffer.append((char)ch);
}
in2.close();
return buffer.toString();
} catch (Exception e) {
e.printStackTrace();
}
}
catch (Exception e) {
e.printStackTrace();
}
return null;
}
}

View File

@ -1,42 +0,0 @@
package org.gcube.application.framework.core.util;
import java.io.File;
import org.gcube.vomanagement.vomsAdmin.impl.VOMSAdminImpl;
import org.gcube.vomanagement.vomsClient.impl.CredentialsManagerImpl;
import org.gridforum.jgss.ExtendedGSSCredential;
/**
* @author Valia Tsagkalidou (NKUA)
*
*/
public class UserCredential {
/**
* Retrieves credential for users
* @param username the user name for which it will retrieve credential
* @param DLname DLname
* @return the GSS Credential
*/
public static ExtendedGSSCredential getCredential(String username, String DLname)
{
CredentialsManagerImpl man = null;
try {
String sharedDir = Settings.getInstance().getProperty("sharedDir");
System.out.println("file " + sharedDir + "/vomsAPI.properties exists: "+ new File(sharedDir + "/vomsAPI.properties").exists());
// man = new CredentialsManagerImpl(sharedDir + "/vomsAPI.properties");
VOMSAdminImpl vomsAdm = new VOMSAdminImpl(sharedDir + "/vomsAPI.properties");
man = new CredentialsManagerImpl(vomsAdm);
} catch (Exception e1) {
e1.printStackTrace();
}
ExtendedGSSCredential cred = null;
try {
//TODO: put a real password there...
cred = man.getCredentials(username, "", DLname);
} catch (Exception e) {
e.printStackTrace();
}
return cred;
}
}