data in the tree panel cached with the encache mechanism

git-svn-id: https://svn.d4science.research-infrastructures.eu/gcube/trunk/portlets/user/databases-manager-portlet@99673 82a268e6-3cf1-43bd-a215-b396298e98cf
This commit is contained in:
Loredana Liccardo 2014-09-10 12:31:20 +00:00
parent af9eb785ef
commit 60421055b7
3 changed files with 519 additions and 89 deletions

47
pom.xml
View File

@ -112,7 +112,7 @@
<version>[1.0.0-SNAPSHOT, 3.0.0-SNAPSHOT)</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.gcube.applicationsupportlayer</groupId>
<artifactId>aslcore</artifactId>
@ -151,6 +151,43 @@
<!-- <scope>provided</scope> -->
<!-- </dependency> -->
<dependency>
<groupId>net.sf.ehcache</groupId>
<artifactId>ehcache</artifactId>
<version>2.8.0</version>
</dependency>
<!-- <dependency> -->
<!-- <groupId>jcs</groupId> -->
<!-- <artifactId>jcs</artifactId> -->
<!-- <version>1.3</version> -->
<!-- </dependency> -->
<!-- <dependency> -->
<!-- <groupId>javax.transaction</groupId> -->
<!-- <artifactId>jta</artifactId> -->
<!-- <version>1.1</version> -->
<!-- </dependency> -->
<!-- <dependency> -->
<!-- <groupId>javax.sql</groupId> -->
<!-- <artifactId>jdbc-stdext</artifactId> -->
<!-- <version>2.0</version> -->
<!-- <classifier>sources</classifier> -->
<!-- </dependency> -->
<!-- <dependency> -->
<!-- <groupId>apache</groupId> -->
<!-- <artifactId>commons-logging</artifactId> -->
<!-- <version>1.1.1</version> -->
<!-- </dependency> -->
<!-- <dependency> -->
<!-- <groupId>concurrent</groupId> -->
<!-- <artifactId>concurrent</artifactId> -->
<!-- <version>1.3.4</version> -->
<!-- </dependency> -->
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
@ -206,6 +243,14 @@
</dependencies>
</dependencyManagement>
<repositories>
<repository>
<id>dnet-deps</id>
<name>dnet-deps</name>
<url>http://maven.research-infrastructures.eu/nexus/content/repositories/dnet-deps//</url>
</repository>
</repositories>
<build>
<!-- Generate compiled stuff in the folder used for developing mode -->
<outputDirectory>${webappDirectory}/WEB-INF/classes</outputDirectory>

View File

@ -5,13 +5,20 @@ import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.net.URLConnection;
import java.net.URLStreamHandler;
import javax.servlet.ServletException;
import javax.servlet.http.HttpSession;
import net.sf.ehcache.CacheManager;
import net.sf.ehcache.Ehcache;
import net.sf.ehcache.config.CacheConfiguration;
import org.apache.regexp.RE;
import org.gcube.application.framework.core.session.ASLSession;
import org.gcube.common.scope.api.ScopeProvider;
@ -51,9 +58,13 @@ import org.gcube.portlets.user.databasesmanager.server.util.WsUtil;
import com.extjs.gxt.ui.client.data.BasePagingLoadResult;
import com.extjs.gxt.ui.client.data.PagingLoadConfig;
import com.extjs.gxt.ui.client.data.PagingLoadResult;
import com.google.gwt.dom.client.Element;
import com.google.gwt.user.server.rpc.RemoteServiceServlet;
import com.thoughtworks.xstream.XStream;
import org.apache.log4j.Logger;
//import org.apache.jcs.JCS;
//import org.apache.jcs.access.CacheAccess;
//import org.apache.jcs.utils.props.PropertyLoader;
public class GWTdbManagerServiceImpl extends RemoteServiceServlet implements
GWTdbManagerService {
@ -65,10 +76,85 @@ public class GWTdbManagerServiceImpl extends RemoteServiceServlet implements
private static Logger logger = Logger
.getLogger(GWTdbManagerServiceImpl.class);
public GWTdbManagerServiceImpl() {
// private CacheManager cacheManager;
private static Ehcache employeeCache;
public GWTdbManagerServiceImpl() throws Exception {
// create the cache
// CacheManager cacheManager;
// ClassLoader contextClassLoader =
// Thread.currentThread().getContextClassLoader();
// InputStream resourceAsStream =
// contextClassLoader.getResourceAsStream("ehcache.xml");
// cacheManager = CacheManager.create(resourceAsStream);
// TODO: DA MODIFICARE LA MODALITÀ DI RECUPERO DEL FILE
try {
// CacheManager cacheManager = CacheManager
// .newInstance("/home/loredana/workspace/databases-manager-portlet-TRUNK/configCache/encache.xml");
// InputStream is =
// ClassLoader.getSystemResourceAsStream("encache.xml");
// CacheManager cacheManager = CacheManager.newInstance(is);
// is.close();
URL url = getClass().getResource("/encache.xml");
CacheManager cacheManager = CacheManager.newInstance(url);
// getcache
employeeCache = cacheManager.getEhcache("DBCache");
//set Disk Store Path in the configuration file encache.xml
// CacheConfiguration config = employeeCache.getCacheConfiguration();
// String DiskCacheFolderName="DBManagerDisk";
//// config.setDiskStorePath(this.getServletContext().getRealPath("")+DiskCacheFolderName);
//
// config.setDiskStorePath(this.getServletContext().getRealPath("")+DiskCacheFolderName);
} catch (Exception e) {
// TODO: handle exception
e.printStackTrace();
throw new Exception("Failed to get cache. " + e);
}
}
@Override
public void init() throws ServletException {
super.init();
//TODO MODIFY: SET THE NAME OF THE CACHE DISK WITH CODE AND NOT IN FILE ENCACHE.XML
// String path = System.getenv("CATALINA_TMPDIR");
// System.out.println("Path: " + this.getServletContext().getRealPath(""));
// System.out.println("Path: " + path);
//create folder for caching data
// String DiskCacheFolderName="DBManagerDisk";
// File f = new File(this.getServletContext().getRealPath("")+DiskCacheFolderName);
// CacheConfiguration config = employeeCache.getCacheConfiguration();
// String DiskCacheFolderName="DBManagerDisk";
// config.setDiskStorePath(this.getServletContext().getRealPath("")+DiskCacheFolderName);
// config.setDiskStorePath(path+"/"+DiskCacheFolderName);
// File f = new File(path+"/"+DiskCacheFolderName);
//
// if (!f.exists()){
// f.mkdir();
// }
}
private void initVariables() {
ASLSession session = WsUtil.getAslSession(this.getThreadLocalRequest()
.getSession());
@ -138,31 +224,101 @@ public class GWTdbManagerServiceImpl extends RemoteServiceServlet implements
if (inputParameters.size() != 0) {
// print check
logger.info("dbmanager-> algorithm input parameters retrieved");
}
// // print check input parameters
// for (int i = 0; i < inputParameters.size(); i++) {
// logger.info(inputParameters.get(i).getName());
// }
// // print check input parameters
// for (int i = 0; i < inputParameters.size(); i++) {
// logger.info(inputParameters.get(i).getName());
// }
// create data structure for data output
ComputationOutput outputData = new ComputationOutput();
// computationId
String computationId = startComputation(algorithmId, inputParameters,
outputData);
// check if the value is in cache. If data does not exist in cache
// the computation is started otherwise data are retrieved from
// cache.
// print check
// retrieve data
// logger.info("output data retrieved");
try {
// //get data from cache
// Ehcache employeeCache;
// employeeCache = cacheManager.getEhcache("myCache");
// data output
LinkedHashMap<String, String> mapValues = new LinkedHashMap<String, String>();
mapValues = outputData.getMapValues();
// get data from cache
// check if data exist considering as key the input parameters
String key = inputParameters.get(0).getDefaultValue();
// System.out.println("***KEY: " + key);
// net.sf.ehcache.Element dataFromCache =
// employeeCache.get(key);
net.sf.ehcache.Element dataFromCache = getDataFromCache(key);
Object value = null;
if (dataFromCache != null) {
value = dataFromCache.getObjectValue();
// System.out.println("***GETTING DATA FROM CACHE");
}
if (value != null) {
outputParameters = (List<FileModel>) value;
} else {
// start the computation
// System.out.println("***STARTING THE COMPUTATION");
// create data structure for data output
ComputationOutput outputData = new ComputationOutput();
// computationId
String computationId = startComputation(algorithmId,
inputParameters, outputData);
// print check
// retrieve data
// logger.info("output data retrieved");
// data output
LinkedHashMap<String, String> mapValues = new LinkedHashMap<String, String>();
mapValues = outputData.getMapValues();
for (int i = 0; i < mapValues.size(); i++) {
FileModel obj = new FileModel(mapValues.get(String
.valueOf(i)));
// obj.setIsLoaded(true);
outputParameters.add(obj);
}
if (outputParameters != null
&& outputParameters.size() != 0) {
// put data in cache
net.sf.ehcache.Element dataToCache = new net.sf.ehcache.Element(
inputParameters.get(0).getDefaultValue(),
outputParameters);
insertDataIntoCache(dataToCache);
// employeeCache.put(dataToCache);
}
}
} catch (Exception e) {
// TODO: handle exception
// e.printStackTrace();
throw new Exception("Failed to load data. " + e);
}
// // create data structure for data output
// ComputationOutput outputData = new ComputationOutput();
// // computationId
// String computationId = startComputation(algorithmId,
// inputParameters,
// outputData);
//
// // print check
// // retrieve data
// // logger.info("output data retrieved");
//
// // data output
// LinkedHashMap<String, String> mapValues = new
// LinkedHashMap<String, String>();
// mapValues = outputData.getMapValues();
//
// for (int i = 0; i < mapValues.size(); i++) {
// FileModel obj = new FileModel(mapValues.get(String.valueOf(i)));
// // obj.setIsLoaded(true);
// outputParameters.add(obj);
// }
for (int i = 0; i < mapValues.size(); i++) {
FileModel obj = new FileModel(mapValues.get(String.valueOf(i)));
// obj.setIsLoaded(true);
outputParameters.add(obj);
}
return outputParameters;
}
@ -197,40 +353,125 @@ public class GWTdbManagerServiceImpl extends RemoteServiceServlet implements
if (inputParameters.size() != 0) {
// print check
logger.info("dbmanager-> algorithm input parameters retrieved");
}
inputParameters.get(0).setValue(resourceName);
// print check
// logger.info(inputParameters.get(0).getName());
// create data structure
ComputationOutput outputData = new ComputationOutput();
// computation id
String computationId = startComputation(algorithmId, inputParameters,
outputData);
// print check
// retrieve data
// logger.info("output data retrieved");
// data output values
LinkedHashMap<String, String> mapValues = new LinkedHashMap<String, String>();
// data output keys
LinkedHashMap<String, String> mapKeys = new LinkedHashMap<String, String>();
mapValues = outputData.getMapValues();
mapKeys = outputData.getmapKeys();
for (int i = 0; i < mapValues.size(); i++) {
FileModel obj = new FileModel(mapValues.get(String.valueOf(i)));
// obj.setIsLoaded(true);
inputParameters.get(0).setValue(resourceName);
// print check
// logger.info("value: " + mapValues.get(String.valueOf(i)));
// logger.info("key: " + mapKeys.get(String.valueOf(i)));
outputParameters.put(mapKeys.get(String.valueOf(i)), obj);
// logger.info(inputParameters.get(0).getName());
try {
// get data from cache
// check if data exist considering as key the input parameters
String key = inputParameters.get(0).getValue();
// System.out.println("***KEY: " + key);
// net.sf.ehcache.Element dataFromCache =
// employeeCache.get(key);
net.sf.ehcache.Element dataFromCache = getDataFromCache(key);
Object value = null;
if (dataFromCache != null) {
value = dataFromCache.getObjectValue();
}
if (value != null) {
outputParameters = (LinkedHashMap<String, FileModel>) value;
// System.out.println("***GETTING DATA FROM CACHE");
} else {
// start the computation
// System.out.println("***STARTING THE COMPUTATION");
// create data structure
ComputationOutput outputData = new ComputationOutput();
// computation id
String computationId = startComputation(algorithmId,
inputParameters, outputData);
// print check
// retrieve data
// logger.info("output data retrieved");
// data output values
LinkedHashMap<String, String> mapValues = new LinkedHashMap<String, String>();
// data output keys
LinkedHashMap<String, String> mapKeys = new LinkedHashMap<String, String>();
mapValues = outputData.getMapValues();
mapKeys = outputData.getmapKeys();
for (int i = 0; i < mapValues.size(); i++) {
FileModel obj = new FileModel(mapValues.get(String
.valueOf(i)));
// obj.setIsLoaded(true);
// print check
// logger.info("value: " +
// mapValues.get(String.valueOf(i)));
// logger.info("key: " +
// mapKeys.get(String.valueOf(i)));
outputParameters.put(mapKeys.get(String.valueOf(i)),
obj);
}
// write data in cache
if (outputParameters != null
&& outputParameters.size() != 0) {
// put data in cache
net.sf.ehcache.Element dataToCache = new net.sf.ehcache.Element(
inputParameters.get(0).getValue(),
outputParameters);
insertDataIntoCache(dataToCache);
// employeeCache.put(dataToCache);
}
}
} catch (Exception e) {
// TODO: handle exception
e.printStackTrace();
throw new Exception("Failed to load data. " + e);
}
}
// // create data structure
// ComputationOutput outputData = new ComputationOutput();
// // computation id
// String computationId = startComputation(algorithmId, inputParameters,
// outputData);
//
// // print check
// // retrieve data
// // logger.info("output data retrieved");
//
// // data output values
// LinkedHashMap<String, String> mapValues = new LinkedHashMap<String,
// String>();
// // data output keys
// LinkedHashMap<String, String> mapKeys = new LinkedHashMap<String,
// String>();
//
// mapValues = outputData.getMapValues();
// mapKeys = outputData.getmapKeys();
//
// for (int i = 0; i < mapValues.size(); i++) {
// FileModel obj = new FileModel(mapValues.get(String.valueOf(i)));
// // obj.setIsLoaded(true);
//
// // print check
// // logger.info("value: " + mapValues.get(String.valueOf(i)));
// // logger.info("key: " + mapKeys.get(String.valueOf(i)));
// outputParameters.put(mapKeys.get(String.valueOf(i)), obj);
// }
//
// // write data in cache
// if (outputParameters != null && outputParameters.size() != 0) {
// // put data in cache
// net.sf.ehcache.Element dataToCache = new net.sf.ehcache.Element(
// inputParameters.get(0).getDefaultValue(), outputParameters);
// employeeCache.put(dataToCache);
// }
return outputParameters;
}
@ -262,49 +503,131 @@ public class GWTdbManagerServiceImpl extends RemoteServiceServlet implements
// get input parameters of the algorithm specified by id
inputParameters = getParameters(algorithmId);
if (inputParameters.size() != 0) {
// print check
logger.info("dbmanager-> algorithm input parameters retrieved");
}
// print check algorithm input parameters
// for (int i = 0; i < inputParameters.size(); i++) {
// logger.info(inputParameters.get(i).getName());
// }
inputParameters.get(0).setValue(dataInput.get("ResourceName"));
inputParameters.get(1).setValue(dataInput.get("DatabaseName"));
// print check
logger.info("dbmanager-> ResourceName: "
+ dataInput.get("ResourceName"));
logger.info("dbmanager-> DatabaseName: "
+ dataInput.get("DatabaseName"));
// create data structure
ComputationOutput outputData = new ComputationOutput();
// computation id
String computationId = startComputation(algorithmId, inputParameters,
outputData);
if (inputParameters.size() != 0) {
// print check
logger.info("dbmanager-> algorithm input parameters retrieved");
// print check
// retrieve data
// logger.info("dbmanager-> output data retrieved");
inputParameters.get(0).setValue(dataInput.get("ResourceName"));
inputParameters.get(1).setValue(dataInput.get("DatabaseName"));
// data output values
LinkedHashMap<String, String> mapValues = new LinkedHashMap<String, String>();
// data output keys
LinkedHashMap<String, String> mapKeys = new LinkedHashMap<String, String>();
// print check algorithm input parameters
// for (int i = 0; i < inputParameters.size(); i++) {
// logger.info(inputParameters.get(i).getName());
// }
mapValues = outputData.getMapValues();
mapKeys = outputData.getmapKeys();
try {
// get data from cache
// check if data exist considering as key the input parameters
String key = inputParameters.get(0).getValue()
+ inputParameters.get(1).getValue();
// System.out.println("key: " + key);
// net.sf.ehcache.Element dataFromCache =
// employeeCache.get(key);
net.sf.ehcache.Element dataFromCache = getDataFromCache(key);
Object value = null;
if (dataFromCache != null) {
value = dataFromCache.getObjectValue();
}
if (value != null) {
outputParameters = (List<FileModel>) value;
} else {
// start the computation
// create data structure
ComputationOutput outputData = new ComputationOutput();
// computation id
String computationId = startComputation(algorithmId,
inputParameters, outputData);
// print check
// retrieve data
// logger.info("dbmanager-> output data retrieved");
// data output values
LinkedHashMap<String, String> mapValues = new LinkedHashMap<String, String>();
// data output keys
LinkedHashMap<String, String> mapKeys = new LinkedHashMap<String, String>();
mapValues = outputData.getMapValues();
mapKeys = outputData.getmapKeys();
for (int i = 0; i < mapValues.size(); i++) {
FileModel obj = new FileModel(mapValues.get(String
.valueOf(i)));
// obj.setIsSchema(true);
// obj.setIsLoaded(true);
outputParameters.add(obj);
}
// write data in cache
if (outputParameters != null
&& outputParameters.size() != 0) {
// put data in cache
net.sf.ehcache.Element dataToCache = new net.sf.ehcache.Element(
inputParameters.get(0).getValue()
+ inputParameters.get(1).getValue(),
outputParameters);
insertDataIntoCache(dataToCache);
// employeeCache.put(dataToCache);
}
}
} catch (Exception e) {
// TODO: handle exception
e.printStackTrace();
throw new Exception("Failed to load data. " + e);
}
for (int i = 0; i < mapValues.size(); i++) {
FileModel obj = new FileModel(mapValues.get(String.valueOf(i)));
// obj.setIsSchema(true);
// obj.setIsLoaded(true);
outputParameters.add(obj);
}
// // create data structure
// ComputationOutput outputData = new ComputationOutput();
// // computation id
// String computationId = startComputation(algorithmId, inputParameters,
// outputData);
//
// // print check
// // retrieve data
// // logger.info("dbmanager-> output data retrieved");
//
// // data output values
// LinkedHashMap<String, String> mapValues = new LinkedHashMap<String,
// String>();
// // data output keys
// LinkedHashMap<String, String> mapKeys = new LinkedHashMap<String,
// String>();
//
// mapValues = outputData.getMapValues();
// mapKeys = outputData.getmapKeys();
//
// for (int i = 0; i < mapValues.size(); i++) {
// FileModel obj = new FileModel(mapValues.get(String.valueOf(i)));
// // obj.setIsSchema(true);
// // obj.setIsLoaded(true);
// outputParameters.add(obj);
// }
//
// // write data in cache
// if (outputParameters != null
// && outputParameters.size() != 0) {
// // put data in cache
// net.sf.ehcache.Element dataToCache = new net.sf.ehcache.Element(
// inputParameters.get(0).getDefaultValue(),
// outputParameters);
// employeeCache.put(dataToCache);
// }
return outputParameters;
}
@ -733,7 +1056,7 @@ public class GWTdbManagerServiceImpl extends RemoteServiceServlet implements
outputData);
// print check on retrieving data
// logger.info("dbmanager-> output data retrieved");
// logger.info("dbmanager-> output data retrieved");
// data output values
LinkedHashMap<String, String> mapValues = new LinkedHashMap<String, String>();
@ -808,7 +1131,7 @@ public class GWTdbManagerServiceImpl extends RemoteServiceServlet implements
outputData);
// print check on retrieving data
// logger.info("dbmanager-> output data retrieved");
// logger.info("dbmanager-> output data retrieved");
// data output values
LinkedHashMap<String, String> mapValues = new LinkedHashMap<String, String>();
@ -998,10 +1321,10 @@ public class GWTdbManagerServiceImpl extends RemoteServiceServlet implements
private List<String> parse(String row) throws Exception {
String delimiter = ",";
// print check
// logger.info("row: " + row);
List<String> elements = new ArrayList<String>();
String phrase = row;
int idxdelim = -1;
@ -1507,7 +1830,7 @@ public class GWTdbManagerServiceImpl extends RemoteServiceServlet implements
// System.out.println("computation removed");
} catch (Exception e) {
e.printStackTrace();
logger.info("dbmanager-> Could not remove the computation ID "
+ computationId + " corresponding to jobID "
+ uidSubmitQuery);
@ -1525,4 +1848,28 @@ public class GWTdbManagerServiceImpl extends RemoteServiceServlet implements
removeResult(submitQueryUID);
}
}
private synchronized void insertDataIntoCache(net.sf.ehcache.Element data) {
employeeCache.put(data);
}
private synchronized net.sf.ehcache.Element getDataFromCache(String key) {
net.sf.ehcache.Element data = employeeCache.get(key);
return data;
}
// clear the cache on the user request
public void refreshDataTree() throws Exception {
refreshCache();
}
private synchronized void refreshCache() throws Exception {
try {
employeeCache.removeAll();
} catch (Exception e) {
// TODO: handle exception
throw new Exception("Failure to clear the cache. " + e);
}
}
}

View File

@ -0,0 +1,38 @@
<?xml version="1.0" encoding="UTF-8"?>
<ehcache xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:noNamespaceSchemaLocation="ehcache.xsd"
updateCheck="true" monitoring="autodetect"
dynamicConfig="true">
<diskStore path="/home/loredana/Desktop/DBManagerDisk"/>
<defaultCache
maxEntriesLocalHeap="10000"
eternal="false"
timeToIdleSeconds="120"
timeToLiveSeconds="120"
diskSpoolBufferSizeMB="30"
maxEntriesLocalDisk="10000000"
diskExpiryThreadIntervalSeconds="120"
memoryStoreEvictionPolicy="LRU"
statistics="false">
<persistence strategy="localTempSwap"/>
</defaultCache>
<cache name="DBCache"
maxBytesLocalHeap="200m"
maxBytesLocalDisk="2g"
eternal="false"
diskSpoolBufferSizeMB="30"
timeToIdleSeconds="0"
timeToLiveSeconds="172800"
memoryStoreEvictionPolicy="LRU"
transactionalMode="off"
overflowToDisk="true"
diskPersistent="true">
<!-- <persistence strategy="localTempSwap"/> -->
</cache>
</ehcache>