databases-manager-portlet/src/main/java/org/gcube/portlets/user/databasesmanager/server/GWTdbManagerServiceImpl.java

3114 lines
102 KiB
Java

package org.gcube.portlets.user.databasesmanager.server;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.io.BufferedWriter;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.net.URLConnection;
import java.net.URLStreamHandler;
import java.io.OutputStreamWriter;
import java.io.FileOutputStream;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpSession;
import net.sf.ehcache.Cache;
import net.sf.ehcache.CacheManager;
import net.sf.ehcache.Status;
import net.sf.ehcache.config.CacheConfiguration;
import net.sf.ehcache.config.MemoryUnit;
import net.sf.ehcache.config.SizeOfPolicyConfiguration;
import net.sf.ehcache.config.SizeOfPolicyConfiguration.MaxDepthExceededBehavior;
import net.sf.ehcache.store.MemoryStoreEvictionPolicy;
import org.apache.regexp.RE;
import org.gcube.application.framework.core.session.ASLSession;
import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.contentmanager.storageclient.model.protocol.smp.SMPURLConnection;
import org.gcube.data.analysis.statisticalmanager.proxies.StatisticalManagerDSL;
import org.gcube.data.analysis.statisticalmanager.proxies.StatisticalManagerFactory;
import org.gcube.data.analysis.statisticalmanager.stubs.types.SMAlgorithm;
import org.gcube.data.analysis.statisticalmanager.stubs.types.SMComputationConfig;
import org.gcube.data.analysis.statisticalmanager.stubs.types.SMComputationRequest;
import org.gcube.data.analysis.statisticalmanager.stubs.types.SMGroupedAlgorithms;
import org.gcube.data.analysis.statisticalmanager.stubs.types.SMListGroupedAlgorithms;
import org.gcube.data.analysis.statisticalmanager.stubs.types.SMOperationStatus;
import org.gcube.data.analysis.statisticalmanager.stubs.types.SMParameter;
import org.gcube.data.analysis.statisticalmanager.stubs.types.SMParameters;
import org.gcube.data.analysis.statisticalmanager.stubs.types.SMResourceType;
import org.gcube.data.analysis.statisticalmanager.stubs.types.SMTypeParameter;
import org.gcube.data.analysis.statisticalmanager.stubs.types.schema.SMAbstractResource;
import org.gcube.data.analysis.statisticalmanager.stubs.types.schema.SMComputation;
import org.gcube.data.analysis.statisticalmanager.stubs.types.schema.SMEntries;
import org.gcube.data.analysis.statisticalmanager.stubs.types.schema.SMFile;
import org.gcube.data.analysis.statisticalmanager.stubs.types.schema.SMInputEntry;
import org.gcube.data.analysis.statisticalmanager.stubs.types.schema.SMObject;
import org.gcube.data.analysis.statisticalmanager.stubs.types.schema.SMOperationInfo;
import org.gcube.data.analysis.statisticalmanager.stubs.types.schema.SMResource;
import org.gcube.data.analysis.statisticalmanager.stubs.types.schema.SMTable;
import org.gcube.data.analysis.statisticalmanager.stubs.types.schema.StatisticalServiceType;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
import org.gcube.portlets.user.databasesmanager.client.GWTdbManagerService;
import org.gcube.portlets.user.databasesmanager.client.datamodel.ComputationOutput;
import org.gcube.portlets.user.databasesmanager.client.datamodel.FileModel;
import org.gcube.portlets.user.databasesmanager.client.datamodel.GeneralOutputFromServlet;
import org.gcube.portlets.user.databasesmanager.client.datamodel.Parameter;
import org.gcube.portlets.user.databasesmanager.client.datamodel.Result;
import org.gcube.portlets.user.databasesmanager.client.datamodel.SamplingResultWithFileFromServlet;
import org.gcube.portlets.user.databasesmanager.client.datamodel.Row;
import org.gcube.portlets.user.databasesmanager.client.datamodel.SubmitQueryResultWithFileFromServlet;
import org.gcube.portlets.user.databasesmanager.server.util.DataExchangedThroughQueue;
import org.gcube.portlets.user.databasesmanager.server.util.SessionUtil;
import org.gcube.portlets.user.databasesmanager.shared.ConstantsPortlet;
import org.gcube.portlets.user.databasesmanager.shared.SessionExpiredException;
import org.gcube.portlets.user.databasesmanager.shared.StatisticalManagerException;
import com.extjs.gxt.ui.client.data.BasePagingLoadResult;
import com.extjs.gxt.ui.client.data.PagingLoadConfig;
import com.extjs.gxt.ui.client.data.PagingLoadResult;
import com.google.gwt.user.server.rpc.RemoteServiceServlet;
import com.thoughtworks.xstream.XStream;
import org.apache.log4j.Logger;
public class GWTdbManagerServiceImpl extends RemoteServiceServlet implements
GWTdbManagerService {
// logger
private static Logger logger = Logger
.getLogger(GWTdbManagerServiceImpl.class);
// private CacheManager cacheManager;
private static Cache DBCache;
private static CacheManager cacheManager;
public static List<String> listAlgorithms;
// variables to check thread execution
private static HashMap<String, Boolean> threadsStarted = new HashMap<String, Boolean>();
private static boolean endThread = false;
private static boolean threadExecutionFinished = false;
private static ConcurrentLinkedQueue<DataExchangedThroughQueue> queue = new ConcurrentLinkedQueue<DataExchangedThroughQueue>();
private static ThreadDataLoader dataLoader;
private static int smComputationNumber;
private static int cacheHitsNumber;
private static int smComputationQuerySamplingNumber;
private static int cacheQuerySamplingHitsNumber;
public GWTdbManagerServiceImpl() throws Exception {
}
@Override
public void init() throws ServletException {
super.init();
try {
// cache folder
String cachePath = System.getenv("CATALINA_TMPDIR") + "/DBManager";
logger.info("dbmanager-> Creating cache in folder: " + cachePath);
// CacheManager cacheManager = CacheManager.create();
cacheManager = CacheManager.create();
if (cacheManager == null) {
logger.error("dbmanager-> Error while starting the servlet. Failed to get the cacheManager. cacheManager null");
throw new ServletException(
"Error while starting the servlet. Failed to get the cacheManager. cacheManager null");
}
if (cacheManager != null) {
// logger.info("dbmanager-> cacheManager not null");
if (cacheManager.cacheExists("DBCache")) {
// logger.info("dbmanager-> cache exists");
cacheManager.removeCache("DBCache");
logger.info("dbmanager-> cache removed");
} else {
File f = new File(cachePath + "/" + "DBCache.data");
if (f.exists()) {
logger.info("dbmanager-> File DBCache.data removed: "
+ f.delete());
}
}
createCache(cachePath);
if (DBCache == null) {
logger.error("dbmanager-> Error while starting the servlet. Failed to get the cache. cache null");
throw new ServletException(
"Error while starting the servlet. Failed to get the cache. cache null");
} else {
cacheManager.addCache(DBCache);
logger.info("dbmanager-> cache added to the cacheManager");
}
}
// create folder that will contain file samplings and submitquery
// result
// in the /webapps/folder_portlet
String path = this.getServletContext().getRealPath("") + "/"
+ "computationResult";
File computationResult = new File(path);
if (!computationResult.exists()) {
computationResult.mkdir();
logger.info("dbmanager-> Folder computationResult created in : "
+ this.getServletContext().getRealPath(""));
}
// create the thread DataLoader
dataLoader = new ThreadDataLoader();
logger.info("dbmanager-> Thread Dataloader created");
smComputationNumber = 0;
cacheHitsNumber = 0;
smComputationQuerySamplingNumber = 0;
cacheQuerySamplingHitsNumber = 0;
} catch (Exception e) {
logger.error("dbmanager-> ", e);
throw new ServletException(
"Error while starting the servlet. Exception: " + e);
}
}
public void createCache(String cachePath) throws Exception {
try {
CacheConfiguration config = new CacheConfiguration();
config.setName("DBCache");
config.memoryStoreEvictionPolicy(MemoryStoreEvictionPolicy.LRU);
config.eternal(false);
config.timeToLiveSeconds(172800);
config.timeToIdleSeconds(0);
// config.maxEntriesLocalHeap(10000);
config.diskExpiryThreadIntervalSeconds(120);
config.maxBytesLocalDisk(2, MemoryUnit.GIGABYTES);
config.maxBytesLocalHeap(200, MemoryUnit.MEGABYTES);
config.diskSpoolBufferSizeMB(30);
config.overflowToDisk(true);
config.diskPersistent(false);
config.diskStorePath(cachePath);
// SizeOfPolicyConfiguration size = new SizeOfPolicyConfiguration();
// size.setMaxDepth(1000);
// size.maxDepthExceededBehavior(MaxDepthExceededBehavior.ABORT);
// config.sizeOfPolicy(size);
DBCache = new Cache(config);
} catch (Exception e) {
// logger.error("dbmanager-> Error while starting the servlet. Failed to create the cache",
// e);
throw new Exception(
"Error while starting the servlet. Failed to create the cache. Exception: "
+ e);
}
}
@Override
public void destroy() {
super.destroy();
// set endThread variable
setEndThreadvariable(true);
try {
CacheManager cacheManager = CacheManager.getInstance();
if (cacheManager != null) {
if (cacheManager.cacheExists("DBCache")) {
// System.out.println("*** cache exist");
cacheManager.removeCache("DBCache");
// cacheManager.removalAll();
// System.out.println("*** cache removed");
logger.info("dbmanager-> DBCache removed");
}
cacheManager.shutdown();
} else {
logger.error("dbmanager-> Error while destroying the servlet. Failed to get the cacheManager. cacheManager null");
throw new Exception(
"Error while destroying the servlet. Failed to get the cacheManager. cacheManager null");
}
} catch (Exception e) {
logger.error(
"dbmanager-> Error while destroying the servlet. Exception:",
e);
// e.printStackTrace();
}
// }
}
private void initVariables(ASLSession session) {
// the result generated in the LoadTables method
// List<Result> result = new ArrayList<Result>();
// session.setAttribute("TablesResult", result);
// // map that contains the submit query result and the related uid
// HashMap<String, List<Result>> submitQueryResult = new HashMap<String,
// List<Result>>();
// session.setAttribute("submitQueryResult", submitQueryResult);
// // map that contains the submit query result parsed and the related
// // uid
// HashMap<String, List<Row>> submitQueryResultParsed = new
// HashMap<String, List<Row>>();
// session.setAttribute("submitQueryResultParsed",
// submitQueryResultParsed);
// information about a database
// String currentDB = "";
// session.setAttribute("currentDB", currentDB);
// String previousDB = "";
// session.setAttribute("previousDB", previousDB);
// information about a schema
// String currentSchema = "";
// session.setAttribute("currentSchema", currentSchema);
// String previousSchema = "";
// session.setAttribute("previousSchema", previousSchema);
// Hashmap that contains computationId with a uid key
HashMap<String, String> computationIDMap = new HashMap<String, String>();
session.setAttribute("ComputationIDList", computationIDMap);
// Hashmap that contains the job status with a uid key
HashMap<String, String> JobStatusMap = new HashMap<String, String>();
session.setAttribute("JobStatusList", JobStatusMap);
// map that keeps track if a uid submitQuery request uses cached data
// and it does not start a computation
HashMap<String, Boolean> listSubmitQueryUIDCachedData = new HashMap<String, Boolean>();
session.setAttribute("listSubmitQueryUIDCachedData",
listSubmitQueryUIDCachedData);
// map that contain key to retrieve data from cache for each uid
// submitQuery request
// map that stores information to send result of the rpc loadsubmitQuery
// to the client
HashMap<String, String> listKeySubmitQueryResult = new HashMap<String, String>();
session.setAttribute("listKeySubmitQueryResult",
listKeySubmitQueryResult);
//print data
logger.info("dbmanager-> CheckInformation: cache hits number " + cacheHitsNumber);
logger.info("dbmanager-> CheckInformation: SM computation number " + smComputationNumber);
logger.info("dbmanager-> CheckInformation: cache Query Sampling hits number " + cacheQuerySamplingHitsNumber);
logger.info("dbmanager-> CheckInformation: SM Query Sampling computation number " + smComputationQuerySamplingNumber);
}
// to get resources from IS
@Override
public List<FileModel> getResource() throws Exception {
// session check
if (isSessionExpired())
throw new SessionExpiredException();
try {
ASLSession session = SessionUtil.getAslSession(this
.getThreadLocalRequest().getSession());
// get scope
String scope = session.getScope();
// check if the thread is already started
Boolean value = getThreadStarted(scope);
if ((value == null) || (value.booleanValue() == false)) {
DataExchangedThroughQueue dataqueue = new DataExchangedThroughQueue(
scope);
queue.offer(dataqueue);
Thread t = new Thread(dataLoader);
t.start();
logger.info("dbmanager-> Thread DataLoader started in order to load data tree");
}
// initialize variables with application startup
initVariables(session);
return recoverResources(scope);
} catch (Exception e) {
logger.error("dbmanager-> ", e);
throw e;
}
}
// to get information about databases of a resource
@Override
public LinkedHashMap<String, FileModel> getDBInfo(String resourceName)
throws Exception {
// session check
if (isSessionExpired())
throw new SessionExpiredException();
try {
ASLSession session = SessionUtil.getAslSession(this
.getThreadLocalRequest().getSession());
// get scope
String scope = session.getScope();
return recoverDatabases(scope, resourceName);
} catch (Exception e) {
logger.error("dbmanager-> ", e);
throw e;
}
}
// to get schema for a database
@Override
public List<FileModel> getDBSchema(LinkedHashMap<String, String> dataInput)
throws Exception {
// session check
if (isSessionExpired())
throw new SessionExpiredException();
try {
ASLSession session = SessionUtil.getAslSession(this
.getThreadLocalRequest().getSession());
// get scope
String scope = session.getScope();
return recoverSchema(scope, dataInput);
} catch (Exception e) {
logger.error("dbmanager-> ", e);
throw e;
}
}
// to get tables
private List<Result> getTables(LinkedHashMap<String, String> dataInput,
String elementType) throws Exception {
try {
ASLSession session = SessionUtil.getAslSession(this
.getThreadLocalRequest().getSession());
// get scope
String scope = session.getScope();
return recoverTables(scope, dataInput, elementType);
} catch (Exception e) {
logger.error("dbmanager-> ", e);
throw e;
}
}
// to load tables
@Override
public PagingLoadResult<Result> LoadTables(PagingLoadConfig config,
LinkedHashMap<String, String> dataInput, String elementType,
boolean SearchTable, String keyword) throws Exception {
// tables' list stored for a user session
// ASLSession session =
// WsUtil.getAslSession(this.getThreadLocalRequest()
// .getSession());
// List<Result> result = (List<Result>) session
// .getAttribute("TablesResult");
// // check on a database
// String currentDB = "";
// currentDB = dataInput.get("DatabaseName");
// String previousDB = (String) session.getAttribute("previousDB");
//
// if (!currentDB.equals(previousDB)) {
// // result = null;
// result = new ArrayList<Result>();
// System.gc();
// }
//
// previousDB = currentDB;
// session.setAttribute("previousDB", previousDB);
//
// // check on a schema
// String currentSchema = "";
// currentSchema = dataInput.get("SchemaName");
// String previousSchema = (String)
// session.getAttribute("previousSchema");
// if (!currentSchema.equals(previousSchema)) {
// // result = null;
// result = new ArrayList<Result>();
// System.gc();
// }
//
// previousSchema = currentSchema;
// session.setAttribute("previousSchema", previousSchema);
// session check
if (isSessionExpired())
throw new SessionExpiredException();
try {
List<Result> result = new ArrayList<>();
// get tables
// if (result == null)
// result = getTables(dataInput);
if (result.size() == 0)
result = getTables(dataInput, elementType);
// Create a sublist and add data to list according
// to the limit and offset value of the config
List<Result> sublist = new ArrayList<Result>();
BasePagingLoadResult loadResult = null;
// print check on the search
// logger.info("Searching in the table: " + SearchTable);
// logger.info("Keyword to search: " + keyword);
int start = config.getOffset();
int limit = result.size();
if (config.getLimit() > 0) {
limit = Math.min(start + config.getLimit(), limit);
}
int totalNumber = result.size();
if ((SearchTable == false) || keyword == null
|| keyword.length() == 0) {
sublist = new ArrayList<Result>(result.subList(start, limit));
} else {
// print check
// logger.info("searching the table");
// search the table
for (int i = 0; i < result.size(); i++) {
if ((result.get(i).getValue().toLowerCase())
.startsWith(keyword.toLowerCase())) {
sublist.add(result.get(i));
}
}
limit = sublist.size();
int sublen = sublist.size();
totalNumber = sublen;
if (start < sublen - 1) {
limit = Math.min(sublen, limit);
totalNumber = sublist.size();
sublist = new ArrayList<Result>(sublist.subList(start,
limit));
}
}
// print check
// logger.info("result size: " + totalNumber);
// logger.info("limit: " + limit);
// logger.info("offset: " + config.getOffset());
// logger.info("start: " + start);
loadResult = new BasePagingLoadResult<Result>(sublist,
config.getOffset(), totalNumber);
// session.setAttribute("TablesResult", result);
return loadResult;
} catch (Exception e) {
logger.error("dbmanager-> ", e);
if (!(e instanceof StatisticalManagerException)) {
// GWT can't serialize all exceptions
throw new Exception(
"Error in server while loading data. Exception: " + e);
}
throw e;
}
}
// to submit a query
@Override
public SubmitQueryResultWithFileFromServlet submitQuery(
LinkedHashMap<String, String> dataDB, String query,
boolean valueReadOnlyQuery, boolean smartCorrectionQuery,
String language, String UID) throws Exception {
// session check
if (isSessionExpired())
throw new SessionExpiredException();
try {
logger.info("Submit Query Request received. Starting to manage the request.");
ASLSession session = SessionUtil.getAslSession(this
.getThreadLocalRequest().getSession());
// get scope
String scope = session.getScope();
logger.info("dbmanager-> Dialect used for smart correction: "
+ language);
// data input
List<Parameter> inputParameters = new ArrayList<Parameter>();
// data output
// List<Result> output = new ArrayList<Result>();
List<Result> output = null;
SubmitQueryResultWithFileFromServlet result = null;
// list that contains table attributes
List<String> listAttributes = null;
// converted query
String convertedQuery = "";
String algorithmId = ConstantsPortlet.ALGID_SUBMITQUERY;
// print check
String rs = dataDB.get("ResourceName");
String db = dataDB.get("DatabaseName");
// print check
logger.info("dbmanager-> ResourceName: " + rs);
logger.info("dbmanager-> DatabaseName: " + db);
logger.info("dbmanager-> Query: " + query);
logger.info("dbmanager-> SmartCorrections check: "
+ smartCorrectionQuery);
if ((rs == null) || (rs.equals(""))) {
throw new Exception("Unable to load data");
}
if ((db == null) || (db.equals(""))) {
throw new Exception("Unable to load data");
}
if ((query == null) || (query.equals(""))) {
throw new Exception("Unable to load data");
}
// set input parameters
Parameter resource = new Parameter("ResourceName", "", "String", "");
Parameter database = new Parameter("DatabaseName", "", "String", "");
Parameter readOnlyQuery = new Parameter("Read-Only Query", "",
"Boolean", "true");
Parameter applySmartCorrection = new Parameter(
"Apply Smart Correction", "", "Boolean", "true");
Parameter lng = new Parameter("Language", "", "NONE", "NONE");
Parameter q = new Parameter("Query", "", "String", "");
inputParameters.add(resource);
inputParameters.add(database);
inputParameters.add(readOnlyQuery);
inputParameters.add(applySmartCorrection);
inputParameters.add(lng);
inputParameters.add(q);
inputParameters.get(0).setValue(rs);
inputParameters.get(1).setValue(db);
inputParameters.get(2).setValue(String.valueOf(valueReadOnlyQuery));
inputParameters.get(3).setValue(
String.valueOf(smartCorrectionQuery));
inputParameters.get(4).setValue(language);
inputParameters.get(5).setValue(query);
// get data from cache
// check if data exist considering as key the input parameters
// parse the query in order to remove spaces
String queryParsed = parseQuery(inputParameters.get(5).getValue());
// get data sent to client calling the submitQuery
String keyData = scope + algorithmId
+ inputParameters.get(0).getValue()
+ inputParameters.get(1).getValue()
+ inputParameters.get(2).getValue()
+ inputParameters.get(3).getValue()
+ inputParameters.get(4).getValue() + queryParsed;
// System.out.println("submitQuery KEY:" + keyData);
net.sf.ehcache.Element dataFromCache = getDataFromCache(keyData);
// key to get query result sent to client calling loadSubmitResult
String keySubmitQueryResult = keyData + "_SubmitQueryResult";
updateListKeySubmitQueryResult(UID, keySubmitQueryResult);
net.sf.ehcache.Element submitQueryResultFromCache = getDataFromCache(keySubmitQueryResult);
Object data = null;
Object submitQueryResult = null;
if ((dataFromCache != null) && (submitQueryResultFromCache != null)) {
data = dataFromCache.getObjectValue();
submitQueryResult = submitQueryResultFromCache.getObjectValue();
// System.out.println("***GETTING DATA FROM CACHE");
}
if ((data != null) && (submitQueryResult != null)) {
result = (SubmitQueryResultWithFileFromServlet) data;
cacheHitsNumber++;
logger.info("dbmanager-> CheckDataInCache: data found in cache. cacheHitsNumber: " + cacheHitsNumber);
cacheQuerySamplingHitsNumber++;
// set variable to true value if cached data are used and a
// computation is not started
Boolean val = new Boolean(true);
updateListSubmitQueryUIDCachedData(UID, val);
} else {
smComputationNumber++;
logger.info("dbmanager-> CheckDataInCache: data not found in cache. Starting the Statistical Computation. smComputationNumber: " + smComputationNumber);
smComputationQuerySamplingNumber++;
// set variable to false value if cached data are not used and a
// computation is started
Boolean val = new Boolean(false);
updateListSubmitQueryUIDCachedData(UID, val);
// create data structure
ComputationOutput outputData = new ComputationOutput();
// computation id
String computationId = startComputation(algorithmId,
inputParameters, outputData, scope, UID);
// get JobID
if (checkJob(UID)) { // if the computation has not been removed
// the job uid is present
// computationIDMap.put(id, computationId);
// print check on retrieving data
logger.info("output data retrieved");
// data output values
LinkedHashMap<String, String> mapValues = new LinkedHashMap<String, String>();
// data output keys
LinkedHashMap<String, String> mapKeys = new LinkedHashMap<String, String>();
mapValues = outputData.getMapValues();
mapKeys = outputData.getmapKeys();
if (mapValues.size() != 0) {
output = new ArrayList<Result>();
// logger.info("build the result - started");
for (int i = 0; i < mapValues.size(); i++) {
Result row = new Result(mapKeys.get(String
.valueOf(i)), mapValues.get(String
.valueOf(i)));
output.add(row);
}
// System.out.println("output size submit: " +
// output.size());
// logger.info("build the result - finished");
// get the converted query
if (smartCorrectionQuery == true) {
convertedQuery = output.get(0).getValue();
output.remove(0);
}
// get the attributes list for the result table
listAttributes = new ArrayList<String>();
listAttributes = getListAttributes(output.get(0)
.getValue());
if (listAttributes == null) {
logger.error("dbmanager-> Error in server while loading data. variable listAttributes null");
throw new Exception(
"Error in server while loading data.");
}
// remove the header in order to parse only the result
output.remove(0);
// store the result of the submit query operation
// updateSubmitQueryResultMap(UID, output);
// remove job with the specified uid
removeJob(UID);
// generate the file csv output
String name = "SubmitQuery";
String fileName = storeResultIntoCSVFile(output, name);
// get the web application path
HttpServletRequest request = this
.getThreadLocalRequest();
String applicationPath = request.getContextPath();
// logger.info("dbmanager-> Application Path: " +
// applicationPath);
String partialPathFile = applicationPath
+ "/computationResult/" + fileName;
result = new SubmitQueryResultWithFileFromServlet(
listAttributes, convertedQuery, partialPathFile);
// put the two data in cache
net.sf.ehcache.Element dataToCache = new net.sf.ehcache.Element(
keyData, result);
insertDataIntoCache(dataToCache);
net.sf.ehcache.Element submitQueryResultToCache = new net.sf.ehcache.Element(
keySubmitQueryResult, output);
insertDataIntoCache(submitQueryResultToCache);
}
} else { // if the computation has been removed the job uid is
// not present and listAttributes is null.
listAttributes = null;
}
}
return result;
} catch (Exception e) {
// e.printStackTrace();
logger.error("dbmanager-> ", e);
// TODO Exception Statistical management to remove a
// computation
if (e.getMessage()
.contains(
"javax.xml.ws.soap.SOAPFaultException: java.lang.IndexOutOfBoundsException")) {
throw new Exception("ServerException");
}
if (!(e instanceof StatisticalManagerException)) {
// GWT can't serialize all exceptions
throw new Exception(
"Error in server while loading data. Exception: " + e);
}
throw e;
} finally {
// remove the element related to the uid submitQuery request if
// present
removeSubmitQueryUIDCachedData(UID);
// remove jobStatus
removeJobStatus(UID);
// remove job
removeJob(UID);
}
}
@Override
public SamplingResultWithFileFromServlet sample(
LinkedHashMap<String, String> dataInput, String elementType)
throws Exception {
// session check
if (isSessionExpired())
throw new SessionExpiredException();
try {
logger.info("dbmanager-> Sampling on table Request received. Starting to manage the request.");
ASLSession session = SessionUtil.getAslSession(this
.getThreadLocalRequest().getSession());
// get scope
String scope = session.getScope();
// data input
List<Parameter> inputParameters = new ArrayList<Parameter>();
// output sample result
List<Result> output = new ArrayList<Result>();
SamplingResultWithFileFromServlet result;
String algorithmId = ConstantsPortlet.ALGID_SAMPLEONTABLE;
// print check
String rs = dataInput.get("ResourceName");
String db = dataInput.get("DatabaseName");
String scm = dataInput.get("SchemaName");
String tab = dataInput.get("TableName");
// print check
logger.info("dbmanager-> ResourceName: " + rs);
logger.info("dbmanager-> DatabaseName: " + db);
logger.info("dbmanager-> SchemaName: " + scm);
logger.info("dbmanager-> TableName: " + tab);
if ((elementType != null)
&& (elementType.equals(ConstantsPortlet.SCHEMA))) {
if ((rs == null) || (rs.equals(""))) {
throw new Exception("Unable to load data");
}
if ((db == null) || (db.equals(""))) {
throw new Exception("Unable to load data");
}
if ((scm == null) || (scm.equals(""))) {
throw new Exception("Unable to load data");
}
if ((tab == null) || (tab.equals(""))) {
throw new Exception("Unable to load data");
}
}
if ((elementType != null)
&& (elementType.equals(ConstantsPortlet.DATABASE))) {
if ((rs == null) || (rs.equals(""))) {
throw new Exception("Unable to load data");
}
if ((db == null) || (db.equals(""))) {
throw new Exception("Unable to load data");
}
if ((tab == null) || (tab.equals(""))) {
throw new Exception("Unable to load data");
}
}
// set input parameters
Parameter resource = new Parameter("ResourceName", "", "String", "");
Parameter database = new Parameter("DatabaseName", "", "String", "");
Parameter schema = new Parameter("SchemaName", "", "String", "");
Parameter table = new Parameter("TableName", "", "String", "");
inputParameters.add(resource);
inputParameters.add(database);
inputParameters.add(schema);
inputParameters.add(table);
inputParameters.get(0).setValue(rs);
inputParameters.get(1).setValue(db);
inputParameters.get(2).setValue(scm);
inputParameters.get(3).setValue(tab);
// get data from cache
// check if data exist considering as key the input parameters
String key = scope + algorithmId
+ inputParameters.get(0).getValue()
+ inputParameters.get(1).getValue()
+ inputParameters.get(2).getValue()
+ inputParameters.get(3).getValue();
// System.out.println("sampling KEY: " + key);
net.sf.ehcache.Element dataFromCache = getDataFromCache(key);
Object value = null;
if (dataFromCache != null) {
value = dataFromCache.getObjectValue();
// System.out.println("***GETTING DATA FROM CACHE");
}
if (value != null) {
result = (SamplingResultWithFileFromServlet) value;
cacheHitsNumber++;
logger.info("dbmanager-> CheckDataInCache: data found in cache. cacheHitsNumber: " + cacheHitsNumber);
cacheQuerySamplingHitsNumber++;
} else {
smComputationNumber++;
logger.info("dbmanager-> CheckDataInCache: data not found in cache. Starting the Statistical Computation. smComputationNumber: " + smComputationNumber);
smComputationQuerySamplingNumber++;
// start computation
// create data structure
ComputationOutput outputData = new ComputationOutput();
// computation id
String computationId = startComputation(algorithmId,
inputParameters, outputData, scope);
// print check on retrieving data
// logger.info("output data retrieved");
// data output values
LinkedHashMap<String, String> mapValues = new LinkedHashMap<String, String>();
// data output keys
LinkedHashMap<String, String> mapKeys = new LinkedHashMap<String, String>();
mapValues = outputData.getMapValues();
mapKeys = outputData.getmapKeys();
for (int i = 0; i < mapValues.size(); i++) {
Result row = new Result(mapKeys.get(String.valueOf(i)),
mapValues.get(String.valueOf(i)));
output.add(row);
}
String name = "Sampling" + "_" + tab;
String fileName = storeResultIntoCSVFile(output, name);
HttpServletRequest request = this.getThreadLocalRequest();
String applicationPath = request.getContextPath();
// logger.info("dbmanager-> Application Path: " +
// applicationPath);
String partialPathFile = applicationPath
+ "/computationResult/" + fileName;
result = new SamplingResultWithFileFromServlet(output,
partialPathFile);
// put data in cache
net.sf.ehcache.Element dataToCache = new net.sf.ehcache.Element(
key, result);
insertDataIntoCache(dataToCache);
}
return result;
} catch (Exception e) {
logger.error("dbmanager-> ", e);
if (!(e instanceof StatisticalManagerException)) {
// GWT can't serialize all exceptions
throw new Exception(
"Error in server while loading data. Exception: " + e);
}
throw e;
}
}
@Override
public SamplingResultWithFileFromServlet smartSample(
LinkedHashMap<String, String> dataInput, String elementType)
throws Exception {
// session check
if (isSessionExpired())
throw new SessionExpiredException();
try {
logger.info("dbmanager-> Smart Sampling on table Request received. Starting to manage the request.");
ASLSession session = SessionUtil.getAslSession(this
.getThreadLocalRequest().getSession());
// get scope
String scope = session.getScope();
// data input
List<Parameter> inputParameters = new ArrayList<Parameter>();
// output sample result
List<Result> output = new ArrayList<Result>();
SamplingResultWithFileFromServlet result;
String algorithmId = ConstantsPortlet.ALGID_SMARTSAMPLEONTABLE;
// print check
String rs = dataInput.get("ResourceName");
String db = dataInput.get("DatabaseName");
String scm = dataInput.get("SchemaName");
String tab = dataInput.get("TableName");
// print check
logger.info("dbmanager-> ResourceName: " + rs);
logger.info("dbmanager-> DatabaseName: " + db);
logger.info("dbmanager-> SchemaName: " + scm);
logger.info("dbmanager-> TableName: " + tab);
if ((elementType != null)
&& (elementType.equals(ConstantsPortlet.SCHEMA))) {
if ((rs == null) || (rs.equals(""))) {
throw new Exception("Unable to load data");
}
if ((db == null) || (db.equals(""))) {
throw new Exception("Unable to load data");
}
if ((scm == null) || (scm.equals(""))) {
throw new Exception("Unable to load data");
}
if ((tab == null) || (tab.equals(""))) {
throw new Exception("Unable to load data");
}
}
if ((elementType != null)
&& (elementType.equals(ConstantsPortlet.DATABASE))) {
if ((rs == null) || (rs.equals(""))) {
throw new Exception("Unable to load data");
}
if ((db == null) || (db.equals(""))) {
throw new Exception("Unable to load data");
}
if ((tab == null) || (tab.equals(""))) {
throw new Exception("Unable to load data");
}
}
// set input parameters
Parameter resource = new Parameter("ResourceName", "", "String", "");
Parameter database = new Parameter("DatabaseName", "", "String", "");
Parameter schema = new Parameter("SchemaName", "", "String", "");
Parameter table = new Parameter("TableName", "", "String", "");
inputParameters.add(resource);
inputParameters.add(database);
inputParameters.add(schema);
inputParameters.add(table);
inputParameters.get(0).setValue(rs);
inputParameters.get(1).setValue(db);
inputParameters.get(2).setValue(scm);
inputParameters.get(3).setValue(tab);
// get data from cache
// check if data exist considering as key the input parameters
String key = scope + algorithmId
+ inputParameters.get(0).getValue()
+ inputParameters.get(1).getValue()
+ inputParameters.get(2).getValue()
+ inputParameters.get(3).getValue();
net.sf.ehcache.Element dataFromCache = getDataFromCache(key);
Object value = null;
if (dataFromCache != null) {
value = dataFromCache.getObjectValue();
// System.out.println("***GETTING DATA FROM CACHE");
}
if (value != null) {
result = (SamplingResultWithFileFromServlet) value;
cacheHitsNumber++;
logger.info("dbmanager-> CheckDataInCache: data found in cache. cacheHitsNumber: " + cacheHitsNumber);
cacheQuerySamplingHitsNumber++;
} else {
smComputationNumber++;
logger.info("dbmanager-> CheckDataInCache: data not found in cache. Starting the Statistical Computation. smComputationNumber: " + smComputationNumber);
smComputationQuerySamplingNumber++;
// create data structure
ComputationOutput outputData = new ComputationOutput();
// computation id
String computationId = startComputation(algorithmId,
inputParameters, outputData, scope);
// print check on retrieving data
// logger.info("dbmanager-> output data retrieved");
// data output values
LinkedHashMap<String, String> mapValues = new LinkedHashMap<String, String>();
// data output keys
LinkedHashMap<String, String> mapKeys = new LinkedHashMap<String, String>();
mapValues = outputData.getMapValues();
mapKeys = outputData.getmapKeys();
for (int i = 0; i < mapValues.size(); i++) {
Result row = new Result(mapKeys.get(String.valueOf(i)),
mapValues.get(String.valueOf(i)));
output.add(row);
}
String name = "SmartSampling" + "_" + tab;
String fileName = storeResultIntoCSVFile(output, name);
HttpServletRequest request = this.getThreadLocalRequest();
String applicationPath = request.getContextPath();
// logger.info("dbmanager-> Application Path: " +
// applicationPath);
String partialPathFile = applicationPath
+ "/computationResult/" + fileName;
result = new SamplingResultWithFileFromServlet(output,
partialPathFile);
// put data in cache
net.sf.ehcache.Element dataToCache = new net.sf.ehcache.Element(
key, result);
insertDataIntoCache(dataToCache);
}
return result;
} catch (Exception e) {
logger.error("dbmanager-> ", e);
if (!(e instanceof StatisticalManagerException)) {
// GWT can't serialize all exceptions
throw new Exception(
"Error in server while loading data. Exception: " + e);
}
throw e;
}
}
@Override
public SamplingResultWithFileFromServlet randomSample(
LinkedHashMap<String, String> dataInput, String elementType)
throws Exception {
// session check
if (isSessionExpired())
throw new SessionExpiredException();
try {
logger.info("dbmanager-> Random Sampling on table Request received. Starting to manage the request.");
ASLSession session = SessionUtil.getAslSession(this
.getThreadLocalRequest().getSession());
// get scope
String scope = session.getScope();
// data input
List<Parameter> inputParameters = new ArrayList<Parameter>();
// output sample result
List<Result> output = new ArrayList<Result>();
String algorithmId = ConstantsPortlet.ALGID_RANDOMSAMPLEONTABLE;
// print check
String rs = dataInput.get("ResourceName");
String db = dataInput.get("DatabaseName");
String scm = dataInput.get("SchemaName");
String tab = dataInput.get("TableName");
// print check
logger.info("dbmanager-> ResourceName: " + rs);
logger.info("dbmanager-> DatabaseName: " + db);
logger.info("dbmanager-> SchemaName: " + scm);
logger.info("dbmanager-> TableName: " + tab);
if ((elementType != null)
&& (elementType.equals(ConstantsPortlet.SCHEMA))) {
if ((rs == null) || (rs.equals(""))) {
throw new Exception("Unable to load data");
}
if ((db == null) || (db.equals(""))) {
throw new Exception("Unable to load data");
}
if ((scm == null) || (scm.equals(""))) {
throw new Exception("Unable to load data");
}
if ((tab == null) || (tab.equals(""))) {
throw new Exception("Unable to load data");
}
}
if ((elementType != null)
&& (elementType.equals(ConstantsPortlet.DATABASE))) {
if ((rs == null) || (rs.equals(""))) {
throw new Exception("Unable to load data");
}
if ((db == null) || (db.equals(""))) {
throw new Exception("Unable to load data");
}
if ((tab == null) || (tab.equals(""))) {
throw new Exception("Unable to load data");
}
}
// set input parameters
Parameter resource = new Parameter("ResourceName", "", "String", "");
Parameter database = new Parameter("DatabaseName", "", "String", "");
Parameter schema = new Parameter("SchemaName", "", "String", "");
Parameter table = new Parameter("TableName", "", "String", "");
inputParameters.add(resource);
inputParameters.add(database);
inputParameters.add(schema);
inputParameters.add(table);
inputParameters.get(0).setValue(rs);
inputParameters.get(1).setValue(db);
inputParameters.get(2).setValue(scm);
inputParameters.get(3).setValue(tab);
// create data structure
ComputationOutput outputData = new ComputationOutput();
// computation id
String computationId = startComputation(algorithmId,
inputParameters, outputData, scope);
// print check on retrieving data
// logger.info("dbmanager-> output data retrieved");
// data output values
LinkedHashMap<String, String> mapValues = new LinkedHashMap<String, String>();
// data output keys
LinkedHashMap<String, String> mapKeys = new LinkedHashMap<String, String>();
mapValues = outputData.getMapValues();
mapKeys = outputData.getmapKeys();
for (int i = 0; i < mapValues.size(); i++) {
Result row = new Result(mapKeys.get(String.valueOf(i)),
mapValues.get(String.valueOf(i)));
output.add(row);
}
String name = "RandomSampling" + "_" + tab;
String fileName = storeResultIntoCSVFile(output, name);
HttpServletRequest request = this.getThreadLocalRequest();
String applicationPath = request.getContextPath();
// logger.info("dbmanager-> Application Path: " + applicationPath);
String partialPathFile = applicationPath + "/computationResult/"
+ fileName;
SamplingResultWithFileFromServlet obj = new SamplingResultWithFileFromServlet(
output, partialPathFile);
return obj;
} catch (Exception e) {
logger.error("dbmanager-> ", e);
if (!(e instanceof StatisticalManagerException)) {
// GWT can't serialize all exceptions
throw new Exception(
"Error in server while loading data. Exception: " + e);
}
throw e;
}
}
@Override
public LinkedHashMap<String, FileModel> getTableDetails(
LinkedHashMap<String, String> dataInput) throws Exception {
// session check
if (isSessionExpired())
throw new SessionExpiredException();
try {
logger.info("dbmanager-> Table Details Recovery Request received. Starting to manage the request.");
ASLSession session = SessionUtil.getAslSession(this
.getThreadLocalRequest().getSession());
// get scope
String scope = session.getScope();
// data input
List<Parameter> inputParameters = new ArrayList<Parameter>();
// data ouptut
LinkedHashMap<String, FileModel> outputParameters = new LinkedHashMap<String, FileModel>();
String algorithmId = ConstantsPortlet.ALGID_GETTABLEDETAILS;
// print check
String rs = dataInput.get("ResourceName");
String db = dataInput.get("DatabaseName");
String scm = dataInput.get("SchemaName");
String tab = dataInput.get("TableName");
// print check
logger.info("dbmanager-> ResourceName: " + rs);
logger.info("dbmanager-> DatabaseName: " + db);
logger.info("dbmanager-> SchemaName: " + scm);
logger.info("dbmanager-> TableName: " + tab);
// set input parameters
Parameter resource = new Parameter("ResourceName", "", "String", "");
Parameter database = new Parameter("DatabaseName", "", "String", "");
Parameter schema = new Parameter("SchemaName", "", "String", "");
Parameter table = new Parameter("TableName", "", "String", "");
inputParameters.add(resource);
inputParameters.add(database);
inputParameters.add(schema);
inputParameters.add(table);
inputParameters.get(0).setValue(rs);
inputParameters.get(1).setValue(db);
inputParameters.get(2).setValue(scm);
inputParameters.get(3).setValue(tab);
// create data structure
ComputationOutput outputData = new ComputationOutput();
// computation id
String computationId = startComputation(algorithmId,
inputParameters, outputData, scope);
// print check on retrieving data
// logger.info("output data retrieved");
// output data values
LinkedHashMap<String, String> mapValues = new LinkedHashMap<String, String>();
// output data keys
LinkedHashMap<String, String> mapKeys = new LinkedHashMap<String, String>();
mapValues = outputData.getMapValues();
mapKeys = outputData.getmapKeys();
for (int i = 0; i < mapValues.size(); i++) {
FileModel obj = new FileModel(mapValues.get(String.valueOf(i)));
// obj.setIsLoaded(true);
outputParameters.put(mapKeys.get(String.valueOf(i)), obj);
// print check
// logger.info("value: " + outputMap.get(String.valueOf(i)));
// logger.info("key: " + outputKey.get(String.valueOf(i)));
}
return outputParameters;
} catch (Exception e) {
logger.error("dbmanager-> ", e);
if (!(e instanceof StatisticalManagerException)) {
// GWT can't serialize all exceptions
throw new Exception(
"Error in server while loading data. Exception: " + e);
}
throw e;
}
}
// parse result for Submit query
public PagingLoadResult<Row> loadSubmitResult(PagingLoadConfig config,
List<String> listAttributes, String UID) throws Exception {
// session check
if (isSessionExpired())
throw new SessionExpiredException();
try {
// Create a sublist and add data to list according
// to the limit and offset value of the config
List<Row> sublist = new ArrayList<Row>();
BasePagingLoadResult loadResult = null;
// data parsed
List<Row> data = new ArrayList<Row>();
// submit query result
List<Result> result = new ArrayList<Result>();
// get the key to retrieve the submitQuery result
String key = getKeySubmitQueryResult(UID);
if ((key != null) && (!key.equals(""))) {
// load data
// get data from cache
net.sf.ehcache.Element dataFromCache = getDataFromCache(key);
Object value = null;
if (dataFromCache != null) {
value = dataFromCache.getObjectValue();
}
if (value != null) {
result = (List<Result>) value;
data = parseCVSString(result, listAttributes);
int start = config.getOffset();
int limit = data.size();
if (config.getLimit() > 0) {
limit = Math.min(start + config.getLimit(), limit);
}
int totalNumber = data.size();
sublist = new ArrayList<Row>(data.subList(start, limit));
loadResult = new BasePagingLoadResult<Row>(sublist,
config.getOffset(), totalNumber);
// System.out.println("start: " + start);
// System.out.println("limit: " + limit);
// System.out.println("sublist size: " + sublist.size());
} else {
logger.error("dbmanager-> Error in server while loading data. variable value null");
throw new Exception("Error in server while loading data.");
}
} else {
logger.error("dbmanager-> Error in server while loading data. key null");
throw new Exception("Error in server while loading data.");
}
return loadResult;
} catch (Exception e) {
logger.error("dbmanager-> ", e);
throw new Exception(
"Error in server while loading data. Exception: " + e);
}
}
// get attributes list for display the result in a table
private List<String> getListAttributes(String value) {
List<String> listAttributes = new ArrayList<String>();
// recover attribute fields for the result table
String headers = value;
// logger.info("Headers fields table: " + headers);
listAttributes = parseAttributesTableResult(headers);
// logger.info("attributes number: " + listAttributes.size());
// logger.info("attributes list: ");
// print check
// for (int i = 0; i < listAttributes.size(); i++) {
// logger.info("attribute: " + listAttributes.get(i));
// }
return listAttributes;
}
private List<String> parseAttributesTableResult(String phrase) {
String delimiter = ",";
List<String> elements = new ArrayList<String>();
int idxdelim = -1;
phrase = phrase.trim();
while ((idxdelim = phrase.indexOf(delimiter)) >= 0) {
elements.add(phrase.substring(0, idxdelim));
phrase = phrase.substring(idxdelim + 1).trim();
}
elements.add(phrase);
return elements;
}
// parse a csv row in a list of values
@Override
public List<Row> parseCVSString(List<Result> result, List<String> attrNames)
throws Exception {
// session check
if (isSessionExpired())
throw new SessionExpiredException();
List<Row> rows = new ArrayList<Row>();
if (result != null) {
for (int i = 0; i < result.size(); i++) {
List<String> attrValues = parse(result.get(i).getValue());
Row element = new Row(attrNames, attrValues, i);
rows.add(element);
}
}
return rows;
}
private List<String> parse(String row) throws Exception {
String delimiter = ",";
// print check
// logger.info("row: " + row);
List<String> elements = new ArrayList<String>();
String phrase = row;
int idxdelim = -1;
boolean quot = false;
phrase = phrase.trim();
while ((idxdelim = phrase.indexOf(delimiter)) >= 0) {
quot = phrase.startsWith("\"");
if (quot) {
phrase = phrase.substring(1);
String quoted = "";
if (phrase.startsWith("\""))
phrase = phrase.substring(1);
else {
RE regexp = new RE("[^\\\\]\"");
boolean matching = regexp.match(phrase);
if (matching) {
int i0 = regexp.getParenStart(0);
quoted = phrase.substring(0, i0 + 1).trim();
phrase = phrase.substring(i0 + 2).trim();
}
}
if (phrase.startsWith(delimiter))
phrase = phrase.substring(1);
elements.add(quoted);
} else {
elements.add(phrase.substring(0, idxdelim));
phrase = phrase.substring(idxdelim + 1).trim();
}
// logger.info("server token: " + phrase);
}
if (phrase.startsWith("\""))
phrase = phrase.substring(1);
if (phrase.endsWith("\""))
phrase = phrase.substring(0, phrase.length() - 1);
elements.add(phrase);
// logger.info("size: " + elements.size());
return elements;
}
// update job with the related status
private synchronized void updateJobStatus(String jobID, String status) {
if (jobID != null) {
// add the job status
ASLSession session = SessionUtil.getAslSession(this
.getThreadLocalRequest().getSession());
HashMap<String, String> JobStatusMap = (HashMap<String, String>) session
.getAttribute("JobStatusList");
JobStatusMap.put(jobID, status);
session.setAttribute("JobStatusList", JobStatusMap);
}
}
// remove job with the related status
private synchronized void removeJobStatus(String jobID) {
ASLSession session = SessionUtil.getAslSession(this
.getThreadLocalRequest().getSession());
HashMap<String, String> JobStatusMap = (HashMap<String, String>) session
.getAttribute("JobStatusList");
String status = JobStatusMap.get(jobID);
if (status != null) {
JobStatusMap.remove(jobID);
session.setAttribute("JobStatusList", JobStatusMap);
}
}
// get job status
private synchronized String getJobStatus(String jobID) {
ASLSession session = SessionUtil.getAslSession(this
.getThreadLocalRequest().getSession());
HashMap<String, String> JobStatusMap = (HashMap<String, String>) session
.getAttribute("JobStatusList");
String status = JobStatusMap.get(jobID);
return status;
}
// update job with the computation id
private synchronized void updateJob(String jobID, String computationId) {
if (jobID != null) {
// add the computation in the map
ASLSession session = SessionUtil.getAslSession(this
.getThreadLocalRequest().getSession());
HashMap<String, String> computationIDMap = (HashMap<String, String>) session
.getAttribute("ComputationIDList");
computationIDMap.put(jobID, computationId);
session.setAttribute("ComputationIDList", computationIDMap);
}
}
// remove job with the computation id
private synchronized String removeJob(String jobID) {
if (jobID != null) {
// System.out.println("remove jobID " + job);
// add the computation in the map
ASLSession session = SessionUtil.getAslSession(this
.getThreadLocalRequest().getSession());
HashMap<String, String> computationIDMap = (HashMap<String, String>) session
.getAttribute("ComputationIDList");
String computationId = computationIDMap.get(jobID);
if (computationId != null) {
computationIDMap.remove(jobID);
session.setAttribute("ComputationIDList", computationIDMap);
return computationId;
}
}
return null;
}
private synchronized boolean checkJob(String jobID) {
boolean isContained = false;
if (jobID != null) {
ASLSession session = SessionUtil.getAslSession(this
.getThreadLocalRequest().getSession());
HashMap<String, String> computationIDMap = (HashMap<String, String>) session
.getAttribute("ComputationIDList");
if (computationIDMap.containsKey(jobID)) {
isContained = true;
} else {
isContained = false;
}
}
// System.out.println("JobID isContained: " + isContained);
return isContained;
}
private synchronized void updateListSubmitQueryUIDCachedData(String UID,
Boolean value) {
ASLSession session = SessionUtil.getAslSession(this
.getThreadLocalRequest().getSession());
HashMap<String, Boolean> listSubmitQueryUIDCachedData = (HashMap<String, Boolean>) session
.getAttribute("listSubmitQueryUIDCachedData");
listSubmitQueryUIDCachedData.put(UID, value);
session.setAttribute("listSubmitQueryUIDCachedData",
listSubmitQueryUIDCachedData);
}
private synchronized Boolean checkSubmitQueryUIDCachedData(String UID) {
ASLSession session = SessionUtil.getAslSession(this
.getThreadLocalRequest().getSession());
HashMap<String, Boolean> listSubmitQueryUIDCachedData = (HashMap<String, Boolean>) session
.getAttribute("listSubmitQueryUIDCachedData");
return listSubmitQueryUIDCachedData.get(UID);
}
private synchronized void removeSubmitQueryUIDCachedData(String UID) {
ASLSession session = SessionUtil.getAslSession(this
.getThreadLocalRequest().getSession());
HashMap<String, Boolean> listSubmitQueryUIDCachedData = (HashMap<String, Boolean>) session
.getAttribute("listSubmitQueryUIDCachedData");
if (listSubmitQueryUIDCachedData.containsKey(UID)) {
listSubmitQueryUIDCachedData.remove(UID);
session.setAttribute("listSubmitQueryUIDCachedData",
listSubmitQueryUIDCachedData);
}
}
private synchronized void removeKeySubmitQueryResult(String UID) {
ASLSession session = SessionUtil.getAslSession(this
.getThreadLocalRequest().getSession());
HashMap<String, String> listKeySubmitQueryResult = (HashMap<String, String>) session
.getAttribute("listKeySubmitQueryResult");
if (listKeySubmitQueryResult.containsKey(UID)) {
listKeySubmitQueryResult.remove(UID);
session.setAttribute("listKeySubmitQueryResult",
listKeySubmitQueryResult);
}
}
private synchronized void updateListKeySubmitQueryResult(String UID,
String value) {
ASLSession session = SessionUtil.getAslSession(this
.getThreadLocalRequest().getSession());
HashMap<String, String> listKeySubmitQueryResult = (HashMap<String, String>) session
.getAttribute("listKeySubmitQueryResult");
listKeySubmitQueryResult.put(UID, value);
session.setAttribute("listKeySubmitQueryResult",
listKeySubmitQueryResult);
}
private synchronized String getKeySubmitQueryResult(String UID) {
ASLSession session = SessionUtil.getAslSession(this
.getThreadLocalRequest().getSession());
HashMap<String, String> listKeySubmitQueryResult = (HashMap<String, String>) session
.getAttribute("listKeySubmitQueryResult");
return listKeySubmitQueryResult.get(UID);
}
private String startComputation(String algorithmName,
List<Parameter> parameters, ComputationOutput outputData,
String scope) throws Exception {
return startComputation(algorithmName, parameters, outputData, scope,
null);
}
private String startComputation(String algorithmName,
List<Parameter> parameters, ComputationOutput outputData,
String scopeValue, String jobID) throws Exception {
SMComputationConfig config = new SMComputationConfig();
SMInputEntry[] list = new SMInputEntry[parameters.size()];
int i = 0;
for (Parameter p : parameters)
list[i++] = new SMInputEntry(p.getName(), p.getValue());
config.parameters(new SMEntries(list));
config.algorithm(algorithmName);
// create a computation request
SMComputationRequest request = new SMComputationRequest();
request.user(getUsername());
request.config(config);
try {
StatisticalManagerFactory factory = getFactory(scopeValue);
String computationId = factory.executeComputation(request);
float percentage = 0;
// String scope = getScope();
String scope = scopeValue;
String username = getUsername();
updateJobStatus(jobID, "computation started");
updateJob(jobID, computationId);
logger.info("dbmanager-> startComputation: the computation has started!");
while (percentage < 100) {
percentage = checkComputationStatus(scope, computationId,
username, outputData);
Thread.sleep(3000);
}
logger.info("dbmanager-> startComputation: the computation has finished!");
updateJobStatus(jobID, "computation finished");
// removeJob(jobID);
return computationId;
} catch (Exception e) {
logger.info("dbmanager-> startComputation: the job submit has failed!");
// e.printStackTrace();
// logger.error("dbmanager-> ", e);
throw e;
}
}
private float checkComputationStatus(String scope, String computationId,
String user, ComputationOutput outputData) throws Exception {
// System.out.println("checkComputation " + computationId);
ScopeProvider.instance.set(scope);
StatisticalManagerFactory factory = StatisticalManagerDSL
.createStateful().build();
SMComputation computation = factory.getComputation(computationId);
SMOperationStatus status = SMOperationStatus.values()[computation
.operationStatus()];
float percentage = 0;
if (status == SMOperationStatus.RUNNING) {
// logger.info("RUNNING");
SMOperationInfo infos = factory.getComputationInfo(computationId,
user);
// percentage = Float.parseFloat(infos.percentage());
// logger.info("Percentage:" +
// percentage);
// computation = factory.getComputation(computationId);
status = SMOperationStatus.values()[computation.operationStatus()];
} else if ((status == SMOperationStatus.COMPLETED)
|| (status == SMOperationStatus.FAILED)) {
// logger.info("computation COMPLETED");
// logger.info("COMPLETED OR FAILED");
SMAbstractResource abstractResource = computation
.abstractResource();
SMResource smResource = abstractResource.resource();
int resourceTypeIndex = smResource.resourceType();
SMResourceType smResType = SMResourceType.values()[resourceTypeIndex];
displayOutput(smResource, smResType, outputData);
// print check
// logger.info("SM resource Name: " + smResource.name());
// logger.info("SM resource Name: " + smResource.name());
// logger.info("SM resource ID: " + smResource.resourceId());
// logger.info("SM resource ID: " + smResource.resourceId());
// logger.info("SM resource Description: " +
// smResource.description());
percentage = 100;
}
return percentage;
}
private void displayOutput(SMResource smResource, SMResourceType smResType,
ComputationOutput outputData) throws Exception {
if (smResType.equals(SMResourceType.OBJECT)) {
// switch (smResType) {
// case FILE:
// SMFile fileRes = (SMFile) smResource;
// System.out.println("Output is a file");
// break;
// case OBJECT:
SMObject objRes = (SMObject) smResource;
if (objRes.name().contentEquals(PrimitiveTypes.MAP.toString())) {
logger.info("dbmanager-> Output is a map");
getMap(objRes, outputData);
} else if (objRes.name().contentEquals(
PrimitiveTypes.IMAGES.toString())) {
// logger.info("Output are images");
}
// else
// System.out.println("Output is other");
// rootLogger.log(Level.SEVERE, "Output is other");
}
// StatisticalManager EXCEPTION MANAGEMENT
if (smResType.equals(SMResourceType.ERROR)) {
StatisticalManagerException e = new StatisticalManagerException(
smResource.description());
// e.printStackTrace();
// logger.error("dbmanager-> ", e);
throw e;
}
}
// get output result
private void getMap(SMObject objRes, ComputationOutput outputData)
throws Exception {
// output data values
LinkedHashMap<String, String> mapValues = new LinkedHashMap<String, String>();
// output data keys
LinkedHashMap<String, String> mapKeys = new LinkedHashMap<String, String>();
// logger.info("getStorageInputStream - started");
InputStream is = getStorageClientInputStream(objRes.url());
// object serializer
XStream xstream = new XStream();
xstream.alias(
"org.gcube_system.namespaces.data.analysis.statisticalmanager.types.SMObject",
SMObject.class);
xstream.alias(
"org.gcube_system.namespaces.data.analysis.statisticalmanager.types.SMFile",
SMFile.class);
xstream.alias(
"org.gcube_system.namespaces.data.analysis.statisticalmanager.types.SMResource",
SMResource.class);
xstream.alias(
"org.gcube_system.namespaces.data.analysis.statisticalmanager.types.SMTable",
SMTable.class);
// logger.info("streaming");
@SuppressWarnings("unchecked")
Map<String, SMResource> smMap = (Map<String, SMResource>) (xstream
.fromXML(is));
is.close();
// logger.info("getStorageInputStream - finished");
// logger.info("build the resultMap - started");
int i = 0;
for (String key : smMap.keySet()) {
// add key value
mapKeys.put(String.valueOf(i), key);
SMResource smres = smMap.get(key);
int resourceTypeIndex = smres.resourceType();
SMResourceType smsubResType = SMResourceType.values()[resourceTypeIndex];
// logger.info("ResourceType: " + smsubResType);
if (smsubResType == SMResourceType.OBJECT) {
SMObject obje = (SMObject) smres;
String outstring = obje.url();
// logger.info("key: " + smsubResType);
// logger.info("object: " + outstring);
mapValues.put(String.valueOf(i), outstring);
i++;
}
}
outputData.setMapValues(mapValues);
outputData.setmapKeys(mapKeys);
// logger.info("build the resultMap - finished");
}
private InputStream getStorageClientInputStream(String url)
throws Exception {
URL u = new URL(null, url, new URLStreamHandler() {
@Override
protected URLConnection openConnection(URL u) throws IOException {
return new SMPURLConnection(u);
}
});
return u.openConnection().getInputStream();
}
private StatisticalManagerFactory getFactory(String scope) {
// HttpSession httpSession = this.getThreadLocalRequest().getSession();
return SessionUtil.getFactory(scope);
}
private String getUsername() {
// set the username of the user session to value "database.manager"
// HttpSession httpSession = this.getThreadLocalRequest().getSession();
// return SessionUtil.getUsername(httpSession);
return "database.manager";
}
private String getScope() {
HttpSession httpSession = this.getThreadLocalRequest().getSession();
return SessionUtil.getScope(httpSession);
}
// remove the computation
public Boolean removeComputation(String uidSubmitQuery) throws Exception {
// System.out.println("server UID: " + uidSubmitQuery);
// session check
if (isSessionExpired())
throw new SessionExpiredException();
try {
logger.info("dbmanager-> Remove Computation Request received. Starting to manage the request.");
ASLSession session = SessionUtil.getAslSession(this
.getThreadLocalRequest().getSession());
// get scope
String scope = session.getScope();
Boolean isComputationRemoved = false;
// verify if this uid submitQuery request uses data in cache
Boolean value = checkSubmitQueryUIDCachedData(uidSubmitQuery);
if (value != null) {
// System.out.println("For uid " + uidSubmitQuery
// + " data are cached? " + value.booleanValue());
if (!value.booleanValue()) {
// remove computation
String computationId = null;
if ((uidSubmitQuery != null)
&& (!(uidSubmitQuery.equals("")))) {
// get job status
String status = getJobStatus(uidSubmitQuery);
if (status == null) {
// the computation has not started
while (computationId == null) {
computationId = removeJob(uidSubmitQuery);
}
} else if (status.equals("computation started")) {
// System.out.println("check status: computation started");
// the computation has started
computationId = removeJob(uidSubmitQuery);
}
}
if (computationId != null) {
StatisticalManagerFactory factory = getFactory(scope);
try {
factory.removeComputation(computationId);
logger.info("dbmanager-> Computation with UID: "+ uidSubmitQuery + " removed");
// remove submit query result
refreshDataOnServer(uidSubmitQuery);
// System.out.println("computation removed");
isComputationRemoved = true;
} catch (Exception e) {
// e.printStackTrace();
logger.info("dbmanager-> Could not remove the computation ID "
+ computationId
+ " corresponding to jobID "
+ uidSubmitQuery);
logger.error("dbmanager-> ", e);
}
}
// // remove job status
// removeJobStatus(uidSubmitQuery);
// //remove the element related to the uid submitQuery
// request
// removeSubmitQueryUIDCachedData(uidSubmitQuery);
}
}
return isComputationRemoved;
} catch (Exception e) {
logger.error("dbmanager-> ", e);
throw new Exception(
"Error in server while loading data. Exception: " + e);
}
// finally {
// // remove the element related to the uid submitQuery request
// removeSubmitQueryUIDCachedData(uidSubmitQuery);
// // remove job status
// System.out.println("remove job status");
// removeJobStatus(uidSubmitQuery);
// removeJob(uidSubmitQuery);
// }
}
public void refreshDataOnServer(String submitQueryUID) throws Exception {
// session check
if (isSessionExpired())
throw new SessionExpiredException();
if ((submitQueryUID != null) && (!submitQueryUID.equals(""))) {
removeKeySubmitQueryResult(submitQueryUID);
// removeResultParsed(submitQueryUID);
// removeResult(submitQueryUID);
// removeSubmitQueryUIDCachedData(submitQueryUID);
// System.out.println("data refreshed on server");
}
}
private synchronized void insertDataIntoCache(net.sf.ehcache.Element data) throws Exception{
if(cacheManager.cacheExists("DBCache")){
logger.info("dbmanager-> disk store path for cache: " + cacheManager.getDiskStorePath() +"Cache Status: "+DBCache.getStatus().toString());
if (DBCache.getStatus().toString().equals(Status.STATUS_ALIVE.toString())){
DBCache.put(data);
logger.trace("dbmanager-> element with key: " + data.getKey().toString()+" added in cache");
}
}
}
private synchronized net.sf.ehcache.Element getDataFromCache(String key) throws Exception{
net.sf.ehcache.Element data = null;
if(cacheManager.cacheExists("DBCache")){
logger.info("dbmanager-> disk store path for cache: " + cacheManager.getDiskStorePath() +". Cache Status: "+DBCache.getStatus().toString());
if (DBCache.getStatus().toString().equals(Status.STATUS_ALIVE.toString())){
data = DBCache.get(key);
if (data!=null){
logger.trace("dbmanager-> element with key: " + key +" is in cache");
}
}
}
return data;
}
// clear the cache on the user request
public GeneralOutputFromServlet refreshDataTree(String ElementType,
LinkedHashMap<String, String> inputData, FileModel element)
throws Exception {
logger.info("dbmanager-> Refresh data request received from element "+ElementType+". Starting to manage the request.");
// session check
if (isSessionExpired())
throw new SessionExpiredException();
try {
ASLSession session = SessionUtil.getAslSession(this
.getThreadLocalRequest().getSession());
// get scope
String scope = session.getScope();
// // call the method related to the element selected
String resourceName = "";
String databaseName = "";
GeneralOutputFromServlet result = null;
if (inputData != null && inputData.size() != 0) {
DataExchangedThroughQueue dataQueue = null;
String key = "";
String keyUsedForQueryRefresh = "";
String keyUsedForSamplingsRefresh = "";
String keyUsedForSmartSamplingRefresh = "";
String keyUsedForRandomSamplingRefresh = "";
boolean requestToAddInQueue=false;
if (!ElementType.equals("")) {
// build key
switch (ElementType) {
case ConstantsPortlet.RESOURCESLIST:
key = scope + inputData.get(ConstantsPortlet.RESOURCESLIST);
dataQueue = new DataExchangedThroughQueue(scope);
requestToAddInQueue = true;
break;
case ConstantsPortlet.RESOURCE:
key = scope + inputData.get("ResourceName");
dataQueue = new DataExchangedThroughQueue(scope,
ConstantsPortlet.RESOURCE, inputData.get("ResourceName"),
null, null, null);
requestToAddInQueue = true;
break;
case ConstantsPortlet.DATABASE:
if (element.getDatabaseType().equals(
ConstantsPortlet.POSTGRES)) { // refresh schema
// list
key = inputData.get("ResourceName")
+ inputData.get("DatabaseName");
// refresh submitted queries
keyUsedForQueryRefresh = scope
+ ConstantsPortlet.ALGID_SUBMITQUERY + key;
refreshSubmittedQueryInCache(keyUsedForQueryRefresh);
dataQueue = new DataExchangedThroughQueue(scope,
ConstantsPortlet.DATABASE, inputData.get("ResourceName"),
inputData.get("DatabaseName"), null,
element.getDatabaseType());
requestToAddInQueue = true;
key = scope + key;
}
if (element.getDatabaseType().equals(
ConstantsPortlet.MYSQL)) { // refresh table
// list
key = inputData.get("ResourceName")
+ inputData.get("DatabaseName")
+ inputData.get("SchemaName");
keyUsedForQueryRefresh = scope
+ ConstantsPortlet.ALGID_SUBMITQUERY + key;
refreshSubmittedQueryInCache(keyUsedForQueryRefresh);
keyUsedForSamplingsRefresh = scope
+ ConstantsPortlet.ALGID_SAMPLEONTABLE
+ key;
keyUsedForSmartSamplingRefresh = scope
+ ConstantsPortlet.ALGID_SMARTSAMPLEONTABLE
+ key;
key = scope + key;
refreshSamplingsInCache(keyUsedForSamplingsRefresh,
keyUsedForSmartSamplingRefresh);
requestToAddInQueue = false;
}
break;
case ConstantsPortlet.SCHEMA:
key = inputData.get("ResourceName")
+ inputData.get("DatabaseName")
+ inputData.get("SchemaName");
// refresh submitted query and samplings and tables list
keyUsedForQueryRefresh = scope
+ ConstantsPortlet.ALGID_SUBMITQUERY
+ inputData.get("ResourceName")
+ inputData.get("DatabaseName");
refreshSubmittedQueryInCache(keyUsedForQueryRefresh);
keyUsedForSamplingsRefresh = scope
+ ConstantsPortlet.ALGID_SAMPLEONTABLE + key;
keyUsedForSmartSamplingRefresh = scope
+ ConstantsPortlet.ALGID_SMARTSAMPLEONTABLE
+ key;
key = scope + key;
refreshSamplingsInCache(keyUsedForSamplingsRefresh,
keyUsedForSmartSamplingRefresh);
requestToAddInQueue = false;
break;
}
}
// logger.trace("dbmanager-> Check if data of the node is present in cache with key: : " + key);
net.sf.ehcache.Element dataFromCache = getDataFromCache(key);
if (dataFromCache != null) {
// logger.trace("dbmanager-> Data of the node is in cache");
logger.info("dbmanager-> Starting the data removing process in cache from the node with key: " + key);
// refresh data in cache. Remove data related to the subtree
// with the selected element as root
refreshDataInCache(element, ElementType, scope, key, null);
// recover the refresh data of the item selected
if (!ElementType.equals("")) {
switch (ElementType) {
case ConstantsPortlet.RESOURCESLIST:
List<FileModel> output1 = getResource();
result = new GeneralOutputFromServlet(output1);
// System.out.println("server-> output generated");
break;
case ConstantsPortlet.RESOURCE:
resourceName = inputData.get("ResourceName");
LinkedHashMap<String, FileModel> output2 = getDBInfo(resourceName);
result = new GeneralOutputFromServlet(output2);
break;
case ConstantsPortlet.DATABASE:
if (element.getDatabaseType() != null
&& (element.getDatabaseType()
.equals(ConstantsPortlet.POSTGRES))) { // refresh
// schema list
List<FileModel> output3 = getDBSchema(inputData);
result = new GeneralOutputFromServlet(output3);
}
if (element.getDatabaseType() != null
&& element.getDatabaseType().equals(
ConstantsPortlet.MYSQL)) { // refresh
// table list
getTables(inputData, ConstantsPortlet.DATABASE);
}
break;
case ConstantsPortlet.SCHEMA:
getTables(inputData, ConstantsPortlet.SCHEMA);
break;
}
}
// check if the thread execution is terminated. If yes, a
// new thread is started otherwise no because the thread in
// running state will also serve this request received in the queue.
// The thread will run in backgroung to load the data
// related to the subtree with the item selected as root.
if(requestToAddInQueue==true){
if (isThreadExecutionFinished()) {
logger.info("dbmanager-> Starting the launch of the Thread DataLoader execution");
queue.offer(dataQueue);
Thread t = new Thread(dataLoader);
t.start();
} else {
logger.info("dbmanager-> The Thread DataLoader is already running. Tree Refresh request put in Queue");
queue.offer(dataQueue);
}
}
}
// else{
// logger.trace("dbmanager-> Data of the node is not in cache");
// }
}
return result;
} catch (Exception e) {
logger.error("dbmanager-> ", e);
if (!(e instanceof StatisticalManagerException)) {
// GWT can't serialize all exceptions
throw new Exception(
"Error in server while loading data. Exception: " + e);
}
throw e;
}
}
public void refreshDataInCache(FileModel element, String ElementType,
String scope, String key, String DBType) {
logger.info("dbmanager-> Refresh Data Request of element "
+ ElementType + " in Cache with KEY: " + key);
try {
// get data from cache
net.sf.ehcache.Element dataFromCache = getDataFromCache(key);
Object value = null;
if (dataFromCache != null) {
// logger.info("dbmanager-> Data is in cache");
value = dataFromCache.getObjectValue();
if (value != null) {
if (!ElementType.equals("")) {
switch (ElementType) {
case ConstantsPortlet.RESOURCESLIST:
// refresh resources
List<FileModel> resources = (List<FileModel>) value;
// refresh cache
refreshCache(key);
// logger.trace("dbmanager-> element: " + ElementType
// + " with key: " + key + " removed in cache");
// apply the refresh on children
for (int i = 0; i < resources.size(); i++) {
key = scope + resources.get(i).getName();
refreshDataInCache(element, ConstantsPortlet.RESOURCE, null,
key, null);
}
break;
case ConstantsPortlet.RESOURCE:
// refresh databases
LinkedHashMap<String, FileModel> DBdata = (LinkedHashMap<String, FileModel>) value;
// refresh cache
refreshCache(key);
// logger.trace("dbmanager-> element " + ElementType
// + " with key: " + key + " removed in cache");
// apply the refresh on children
HashMap<String, String> DBlist = new HashMap<String, String>();
if (DBdata != null) {
Set<String> keys = DBdata.keySet();
Object[] array = keys.toArray();
int numIterations = (DBdata.size()) / 5;
int i = 0;
int j = 0;
for (i = 0; i < numIterations; i++) {
String DBName = "";
for (j = (i * 5); j < (i + 1) * 5; j++) {
if (array[j].toString().contains(
"Database Name")) {
DBName = DBdata.get(
array[j].toString())
.getName();
}
if (array[j].toString().contains(
"Driver Name")) {
String driver = DBdata.get(
array[j].toString())
.getName();
if (driver.toUpperCase().contains(
ConstantsPortlet.POSTGRES)) {
DBlist.put(
DBName,
ConstantsPortlet.POSTGRES);
}
if (driver.toUpperCase().contains(
ConstantsPortlet.MYSQL)) {
DBlist.put(DBName,
ConstantsPortlet.MYSQL);
}
}
}
}
}
Set<String> keys = DBlist.keySet();
Object[] array = keys.toArray();
for (int i = 0; i < array.length; i++) {
String databaseType = DBlist.get(array[i]
.toString());
// if (databaseType
// .equals(ConstantsPortlet.POSTGRES)) {
String newkey = key + array[i].toString();
refreshDataInCache(element, ConstantsPortlet.DATABASE, null,
newkey, databaseType);
// }
}
break;
case ConstantsPortlet.DATABASE:
if (DBType == null) {
DBType = element.getDatabaseType();
}
// refresh schema (db postgres) or tables (db mysql)
List<FileModel> schemaList = (List<FileModel>) value;
refreshCache(key); // refresh schema
// logger.trace("dbmanager-> element " + ElementType
// + " with key: " + key + " removed in cache");
if (DBType.equals(ConstantsPortlet.POSTGRES)) {
// SCHEMA
for (int i = 0; i < schemaList.size(); i++) {
String newkey = key
+ schemaList.get(i).getName();
refreshDataInCache(element, ConstantsPortlet.SCHEMA, null,
newkey, null);
}
}
if (DBType.equals(ConstantsPortlet.MYSQL)) {
// refresh tables
key = key + "";
refreshCache(key); // refresh tables
// logger.trace("dbmanager-> element " + ElementType
// + " with key: " + key
// + " removed in cache");
}
break;
case ConstantsPortlet.SCHEMA:
// refresh tables (db postgres)
refreshCache(key);
// logger.trace("dbmanager-> element " + ElementType
// + " with key: " + key + " removed in cache");
break;
}
}
}
}
// else {
// logger.info("dbmanager-> Data not in cache");
// }
} catch (Exception e) {
logger.error("dbmanager-> ", e);
}
}
private synchronized void refreshCache(String key) throws Exception {
if(cacheManager.cacheExists("DBCache")){
logger.info("dbmanager-> disk store path for cache: " + cacheManager.getDiskStorePath() +"Cache Status: "+DBCache.getStatus().toString());
if (DBCache.getStatus().toString().equals(Status.STATUS_ALIVE.toString())){
DBCache.remove(key);
logger.trace("dbmanager-> element with key: " + key + " removed in cache");
}
}
}
private synchronized void refreshSubmittedQueryInCache(String key)
throws Exception {
if(cacheManager.cacheExists("DBCache")){
logger.info("dbmanager-> disk store path for cache: " + cacheManager.getDiskStorePath() +"Cache Status: "+DBCache.getStatus().toString());
if (DBCache.getStatus().toString().equals(Status.STATUS_ALIVE.toString())){
// logger.info("dbmanager-> Cache Status:"+ "STATUS_ALIVE");
List<String> keysInCache = DBCache.getKeys();
int listSize = keysInCache.size();
List<String> keysToBeRemoved = new ArrayList<>();
// recover keys list that match the key
for (int i = 0; i < listSize; i++) {
if (keysInCache.get(i).startsWith(key)) {
// System.out.println("data removed with key: " +
// keysInCache.get(i));
keysToBeRemoved.add(keysInCache.get(i));
}
}
// remove keys
DBCache.removeAll(keysToBeRemoved);
logger.trace("dbmanager-> submitted queries refreshed in cache with key:"
+ key);
}
}
}
private synchronized void refreshSamplingsInCache(
String keyUsedForSamplingsRefresh,
String keyUsedForSmartSamplingRefresh) throws Exception {
if(cacheManager.cacheExists("DBCache")){
logger.info("dbmanager-> disk store path for cache: " + cacheManager.getDiskStorePath() +"Cache Status: "+DBCache.getStatus().toString());
if (DBCache.getStatus().toString().equals(Status.STATUS_ALIVE.toString())){
List<String> keysInCache = DBCache.getKeys();
int listSize = keysInCache.size();
List<String> keysToBeRemoved = new ArrayList<>();
// recover keys list that match the key
for (int i = 0; i < listSize; i++) {
if ((keysInCache.get(i).startsWith(keyUsedForSamplingsRefresh))
|| (keysInCache.get(i)
.startsWith(keyUsedForSmartSamplingRefresh))) {
keysToBeRemoved.add(keysInCache.get(i));
}
}
// remove keys
DBCache.removeAll(keysToBeRemoved);
logger.trace("dbmanager-> samplings and smart sampling refreshed in cache with keys: "
+ keyUsedForSamplingsRefresh
+ " "
+ keyUsedForSmartSamplingRefresh);
}
}
}
private String storeResultIntoCSVFile(List<Result> result, String n)
throws Exception {
if (result == null) {
logger.info("Error in server while loading data. object result null");
throw new Exception("Error in server while loading data");
}
// file that will contain result
BufferedWriter out = null;
String path = this.getServletContext().getRealPath("");
String fileName = "";
fileName = n + "_" + System.currentTimeMillis() + ".csv";
String filePath = path + "/computationResult/" + fileName;
File file = new File(filePath);
try {
// create the file
if (!file.exists()) {
file.createNewFile();
}
out = new BufferedWriter(new OutputStreamWriter(
new FileOutputStream(file), "UTF-8"));
// write into file
for (int i = 0; i < result.size(); i++) {
out.write(result.get(i).getValue());
out.newLine();
}
} catch (Exception e) {
logger.error("dbmanager-> ", e);
throw e;
} finally {
if (out != null) {
out.close();
}
}
return fileName;
}
// to check if the session is expired
private boolean isSessionExpired() throws Exception {
return SessionUtil.isSessionExpired(this.getThreadLocalRequest()
.getSession());
}
// to delete more space occurences in order to have only one space between
// two words in a query
private String parseQuery(String query) {
String queryParsed;
queryParsed = query.trim();
queryParsed = queryParsed.replaceAll(" +", " ");
return queryParsed;
}
private synchronized void setEndThreadvariable(boolean value) {
endThread = value;
logger.info("dbmanager-> Variable EndThread set in order to stop the thread execution");
}
private synchronized boolean getEndThreadvariable() {
return endThread;
}
private synchronized void setThreadExecutionFinished(boolean value) {
threadExecutionFinished = value;
}
private synchronized boolean isThreadExecutionFinished() {
return threadExecutionFinished;
}
private List<FileModel> recoverResources(String scope) throws Exception {
try {
logger.info("dbmanager-> Resources Recovery Request received. Starting to manage the request.");
// data input
List<Parameter> inputParameters = new ArrayList<Parameter>();
// data output
List<FileModel> outputParameters = new ArrayList<FileModel>();
// get algorithmId
String algorithmId = ConstantsPortlet.ALGID_GETRESOURCE;
Parameter maxNumber = new Parameter("MaxNumber", "", "String", "-1");
inputParameters.add(maxNumber);
// check if the value is in cache. If data does not exist in cache
// the computation is started otherwise data are retrieved from
// cache.
// get data from cache
// check if data exist considering as key the input parameters
// String key = inputParameters.get(0).getDefaultValue();
String key = scope + ConstantsPortlet.RESOURCESLIST;
net.sf.ehcache.Element dataFromCache = getDataFromCache(key);
Object value = null;
if (dataFromCache != null) {
value = dataFromCache.getObjectValue();
// System.out.println("***GETTING DATA FROM CACHE");
}
if (value != null) {
// System.out.println("***GETTING DATA FROM CACHE");
outputParameters = (List<FileModel>) value;
cacheHitsNumber++;
logger.info("dbmanager-> CheckDataInCache: data found in cache. cacheHitsNumber: " + cacheHitsNumber);
} else {
smComputationNumber++;
logger.info("dbmanager-> CheckDataInCache: data not found in cache. Starting the Statistical Computation. smComputationNumber: " + smComputationNumber);
// start the computation
// System.out.println("***STARTING THE COMPUTATION");
// create data structure for data output
ComputationOutput outputData = new ComputationOutput();
// computationId
String computationId = startComputation(algorithmId,
inputParameters, outputData, scope);
// print check
// retrieve data
// logger.info("output data retrieved");
// data output
LinkedHashMap<String, String> mapValues = new LinkedHashMap<String, String>();
mapValues = outputData.getMapValues();
for (int i = 0; i < mapValues.size(); i++) {
FileModel obj = new FileModel(mapValues.get(String
.valueOf(i)));
// obj.setIsLoaded(true);
outputParameters.add(obj);
}
if (outputParameters != null && outputParameters.size() != 0) {
// put data in cache
net.sf.ehcache.Element dataToCache = new net.sf.ehcache.Element(
key, outputParameters);
insertDataIntoCache(dataToCache);
// logger.trace("dbmanager-> element added in cache with key: " + key);
}
}
return outputParameters;
} catch (Exception e) {
// e.printStackTrace();
// throw new Exception("Failed to load data. " + e);
logger.error("dbmanager-> ", e);
if (!(e instanceof StatisticalManagerException)) {
// GWT can't serialize all exceptions
throw new Exception(
"Error in server while loading data. Exception: " + e);
}
throw e;
}
}
private LinkedHashMap<String, FileModel> recoverDatabases(String scope,
String resourceName) throws Exception {
try {
logger.info("dbmanager-> Databases Recovery Request received. Starting to manage the request.");
// data input
List<Parameter> inputParameters = new ArrayList<Parameter>();
// data output
LinkedHashMap<String, FileModel> outputParameters = new LinkedHashMap<String, FileModel>();
String algorithmId = ConstantsPortlet.ALGID_GETDBINFO;
// print check
logger.info("dbmanager-> ResourceName: " + resourceName);
if ((resourceName == null) || (resourceName.equals(""))) {
throw new Exception("Unable to load data");
}
Parameter resource = new Parameter("ResourceName", "", "String", "");
inputParameters.add(resource);
inputParameters.get(0).setValue(resourceName);
// get data from cache
// check if data exist considering as key the input parameters
String key = scope + inputParameters.get(0).getValue();
net.sf.ehcache.Element dataFromCache = getDataFromCache(key);
Object value = null;
if (dataFromCache != null) {
value = dataFromCache.getObjectValue();
}
if (value != null) {
outputParameters = (LinkedHashMap<String, FileModel>) value;
cacheHitsNumber++;
logger.info("dbmanager-> CheckDataInCache: data found in cache. cacheHitsNumber: " + cacheHitsNumber);
// System.out.println("***GETTING DATA FROM CACHE");
} else {
smComputationNumber++;
logger.info("dbmanager-> CheckDataInCache: data not found in cache. Starting the Statistical Computation. smComputationNumber: " + smComputationNumber);
// start the computation
// System.out.println("***STARTING THE COMPUTATION");
// create data structure
ComputationOutput outputData = new ComputationOutput();
// computation id
String computationId = startComputation(algorithmId,
inputParameters, outputData, scope);
// print check
// retrieve data
// logger.info("output data retrieved");
// data output values
LinkedHashMap<String, String> mapValues = new LinkedHashMap<String, String>();
// data output keys
LinkedHashMap<String, String> mapKeys = new LinkedHashMap<String, String>();
mapValues = outputData.getMapValues();
mapKeys = outputData.getmapKeys();
for (int i = 0; i < mapValues.size(); i++) {
FileModel obj = new FileModel(mapValues.get(String
.valueOf(i)));
// obj.setIsLoaded(true);
// print check
// logger.info("value: " +
// mapValues.get(String.valueOf(i)));
// logger.info("key: " +
// mapKeys.get(String.valueOf(i)));
outputParameters.put(mapKeys.get(String.valueOf(i)), obj);
}
// write data in cache
if (outputParameters != null && outputParameters.size() != 0) {
// put data in cache
net.sf.ehcache.Element dataToCache = new net.sf.ehcache.Element(
key, outputParameters);
insertDataIntoCache(dataToCache);
// logger.trace("dbmanager-> element added in cache with key: " + key);
}
}
return outputParameters;
} catch (Exception e) {
// e.printStackTrace();
// throw new Exception("Failed to load data " + );
logger.error("dbmanager-> ", e);
if (!(e instanceof StatisticalManagerException)) {
// GWT can't serialize all exceptions
throw new Exception(
"Error in server while loading data. Exception: " + e);
}
throw e;
}
}
private List<FileModel> recoverSchema(String scope,
LinkedHashMap<String, String> dataInput) throws Exception {
try {
logger.info("dbmanager-> Schema Recovery Request received. Starting to manage the request.");
// data input
List<Parameter> inputParameters = new ArrayList<Parameter>();
// data output
List<FileModel> outputParameters = new ArrayList<FileModel>();
String algorithmId = ConstantsPortlet.ALGID_GETDBSCHEMA;
// print check
String rs = dataInput.get("ResourceName");
String db = dataInput.get("DatabaseName");
logger.info("dbmanager-> ResourceName: " + rs);
logger.info("dbmanager-> DatabaseName: " + db);
if ((rs == null) || (rs.equals(""))) {
throw new Exception("Unable to load data");
}
if ((db == null) || (db.equals(""))) {
throw new Exception("Unable to load data");
}
// set input parameters
Parameter resource = new Parameter("ResourceName", "", "String", "");
Parameter database = new Parameter("DatabaseName", "", "String", "");
inputParameters.add(resource);
inputParameters.add(database);
inputParameters.get(0).setValue(rs);
inputParameters.get(1).setValue(db);
// print check algorithm input parameters
// for (int i = 0; i < inputParameters.size(); i++) {
// logger.info(inputParameters.get(i).getName());
// }
// get data from cache
// check if data exist considering as key the input parameters
String key = scope + inputParameters.get(0).getValue()
+ inputParameters.get(1).getValue();
net.sf.ehcache.Element dataFromCache = getDataFromCache(key);
Object value = null;
if (dataFromCache != null) {
value = dataFromCache.getObjectValue();
}
if (value != null) {
outputParameters = (List<FileModel>) value;
cacheHitsNumber++;
logger.info("dbmanager-> CheckDataInCache: data found in cache. cacheHitsNumber: " + cacheHitsNumber);
} else {
smComputationNumber++;
logger.info("dbmanager-> CheckDataInCache: data not found in cache. Starting the Statistical Computation. smComputationNumber: " + smComputationNumber);
// start the computation
// create data structure
ComputationOutput outputData = new ComputationOutput();
// computation id
String computationId = startComputation(algorithmId,
inputParameters, outputData, scope);
// print check
// retrieve data
// logger.info("dbmanager-> output data retrieved");
// data output values
LinkedHashMap<String, String> mapValues = new LinkedHashMap<String, String>();
// data output keys
LinkedHashMap<String, String> mapKeys = new LinkedHashMap<String, String>();
mapValues = outputData.getMapValues();
mapKeys = outputData.getmapKeys();
for (int i = 0; i < mapValues.size(); i++) {
FileModel obj = new FileModel(mapValues.get(String
.valueOf(i)));
// obj.setIsSchema(true);
// obj.setIsLoaded(true);
outputParameters.add(obj);
}
// write data in cache
if (outputParameters != null && outputParameters.size() != 0) {
// put data in cache
net.sf.ehcache.Element dataToCache = new net.sf.ehcache.Element(
key, outputParameters);
insertDataIntoCache(dataToCache);
// logger.trace("dbmanager-> element added in cache with key: " + key);
// DBCache.put(dataToCache);
}
}
return outputParameters;
} catch (Exception e) {
// e.printStackTrace();
// throw new Exception("Failed to load data. " + e);
logger.error("dbmanager-> ", e);
if (!(e instanceof StatisticalManagerException)) {
// GWT can't serialize all exceptions
throw new Exception(
"Error in server while loading data. Exception: " + e);
}
throw e;
}
}
private List<Result> recoverTables(String scope,
LinkedHashMap<String, String> dataInput, String elementType)
throws Exception {
try {
logger.info("dbmanager-> Tables Recovery Request received. Starting to manage the request.");
// data input
List<Parameter> inputParameters = new ArrayList<Parameter>();
// data output
List<Result> outputParameters = new ArrayList<Result>();
String algorithmId = ConstantsPortlet.ALGID_GETTABLES;
String rs = dataInput.get("ResourceName");
String db = dataInput.get("DatabaseName");
String scm = dataInput.get("SchemaName");
// print check
logger.info("dbmanager-> ResourceName: " + rs);
logger.info("dbmanager-> DatabaseName: " + db);
logger.info("dbmanager-> SchemaName: " + scm);
if ((elementType != null)
&& (elementType.equals(ConstantsPortlet.SCHEMA))) {
if ((rs == null) || (rs.equals(""))) {
throw new Exception("Unable to load data");
}
if ((db == null) || (db.equals(""))) {
throw new Exception("Unable to load data");
}
if ((scm == null) || (scm.equals(""))) {
throw new Exception("Unable to load data");
}
}
if ((elementType != null)
&& (elementType.equals(ConstantsPortlet.DATABASE))) {
if ((rs == null) || (rs.equals(""))) {
throw new Exception("Unable to load data");
}
if ((db == null) || (db.equals(""))) {
throw new Exception("Unable to load data");
}
}
// set input parameters
Parameter resource = new Parameter("ResourceName", "", "String", "");
Parameter database = new Parameter("DatabaseName", "", "String", "");
Parameter schema = new Parameter("SchemaName", "", "String", "");
inputParameters.add(resource);
inputParameters.add(database);
inputParameters.add(schema);
inputParameters.get(0).setValue(rs);
inputParameters.get(1).setValue(db);
inputParameters.get(2).setValue(scm);
// get data from cache
// check if data exist considering as key the input parameters
String key = scope + inputParameters.get(0).getValue()
+ inputParameters.get(1).getValue()
+ inputParameters.get(2).getValue();
net.sf.ehcache.Element dataFromCache = getDataFromCache(key);
Object value = null;
if (dataFromCache != null) {
value = dataFromCache.getObjectValue();
// System.out.println("***GETTING DATA FROM CACHE");
}
if (value != null) {
outputParameters = (List<Result>) value;
cacheHitsNumber++;
logger.info("dbmanager-> CheckDataInCache: data found in cache. cacheHitsNumber: " + cacheHitsNumber);
} else {
smComputationNumber++;
logger.info("dbmanager-> CheckDataInCache: data not found in cache. Starting the Statistical Computation. smComputationNumber: " + smComputationNumber);
// start computation
// create data structure
ComputationOutput outputData = new ComputationOutput();
// computation id
String computationId = startComputation(algorithmId,
inputParameters, outputData, scope);
// print check on retrieving data
// logger.info("output data retrieved");
// data output values
LinkedHashMap<String, String> mapValues = new LinkedHashMap<String, String>();
// data output keys
LinkedHashMap<String, String> mapKeys = new LinkedHashMap<String, String>();
mapValues = outputData.getMapValues();
mapKeys = outputData.getmapKeys();
for (int i = 0; i < mapValues.size(); i++) {
Result row = new Result(String.valueOf(i),
mapValues.get(String.valueOf(i)));
outputParameters.add(row);
}
// write data in cache
if (outputParameters != null && outputParameters.size() != 0) {
// put data in cache
net.sf.ehcache.Element dataToCache = new net.sf.ehcache.Element(
key, outputParameters);
insertDataIntoCache(dataToCache);
// logger.trace("dbmanager-> element added in cache with key: " + key);
}
}
return outputParameters;
} catch (Exception e) {
// e.printStackTrace();
// throw new Exception("Failed to load data. " + e);
// logger.error("dbmanager-> ", e);
if (!(e instanceof StatisticalManagerException)) {
// GWT can't serialize all exceptions
throw new Exception(
"Error in server while loading data. Exception: " + e);
}
throw e;
}
}
private synchronized void updateThreadsStarted(String scope, Boolean value) {
threadsStarted.put(scope, value);
}
private synchronized Boolean getThreadStarted(String scope) {
Boolean value = threadsStarted.get(scope);
return value;
}
// thread that loads data on the resources
private class ThreadDataLoader implements Runnable {
public ThreadDataLoader() {
}
@Override
public void run() {
logger.info("dbmanager-> Thread DataLoader running");
try {
setThreadExecutionFinished(false);
while (!queue.isEmpty()) {
logger.info("dbmanager-> Queue to exchange data with the thread not empty");
// recover data from queue
DataExchangedThroughQueue node = queue.poll();
if (node != null) {
String scope = node.getScope();
// add an element related to the thread in the hashmap
updateThreadsStarted(scope, true);
boolean loadTree = node.treeToBeLoaded();
// System.out.println("value loadTree: " + loadTree);
if (loadTree == false) { // load the subtree with the
// node as root
if (!getEndThreadvariable()) {
String elementType = node.elementType();
String resource = node.resource();
logger.info("dbmanager-> Starting the node refreshing process");
switch (elementType) {
case ConstantsPortlet.RESOURCE:
getDatabase(scope, resource);
break;
case ConstantsPortlet.DATABASE:
String DBType = node.DBType();
String database = node.database();
if (DBType
.equals(ConstantsPortlet.POSTGRES)) {
getSchema(scope, resource, database);
}
if (DBType.equals(ConstantsPortlet.MYSQL)) {
String schema = node.schema();
getTables(scope, resource, database,
schema,
ConstantsPortlet.DATABASE);
}
break;
case ConstantsPortlet.SCHEMA:
String db = node.database();
String schema = node.schema();
getTables(scope, resource, db, schema,
ConstantsPortlet.SCHEMA);
break;
}
}
} else { // load the tree
logger.info("dbmanager-> Starting the tree loading");
if (!getEndThreadvariable()) {
List<FileModel> resources = recoverResources(scope);
int i = 0;
while ((!getEndThreadvariable())
&& (i < resources.size())) {
getDatabase(scope, resources.get(i)
.getName());
i++;
// logger.info("dbmanager-> ***thread inside the while checking the EndThread variable");
}
// logger.info("dbmanager-> ***thread outside the while checking the EndThread variable");
}
}
}
}
} catch (Throwable e) {
logger.error("dbmanager-> ", e);
}finally{
// thread terminates its execution
setThreadExecutionFinished(true);
logger.info("dbmanager-> Thread DataLoader execution terminated");
}
}
private void getDatabase(String scope, String resourceName) {
try {
LinkedHashMap<String, FileModel> DBdata = recoverDatabases(
scope, resourceName);
if (DBdata != null) {
Set<String> keys = DBdata.keySet();
Object[] array = keys.toArray();
int numIterations = (DBdata.size()) / 5;
int i = 0;
int j = 0;
for (i = 0; i < numIterations; i++) {
// String DBName = "";
// for (j = (i * 5); j < (i + 1) * 5; j++) {
String DBName = "";
j = (i * 5);
while ((!getEndThreadvariable()) && (j < ((i + 1) * 5))) {
if (array[j].toString().contains("Database Name")) {
DBName = DBdata.get(array[j].toString())
.getName();
}
if (array[j].toString().contains("Driver Name")) {
String driver = DBdata.get(array[j].toString())
.getName();
if (driver.toUpperCase().contains(
ConstantsPortlet.POSTGRES)) {
// get schema
List<FileModel> schemaList = getSchema(
scope, resourceName, DBName);
}
if (driver.toUpperCase().contains(
ConstantsPortlet.MYSQL)) {
// get tables
getTables(scope, resourceName, DBName, "",
ConstantsPortlet.DATABASE);
}
}
j++;
}
}
}
} catch (Exception e) {
logger.error("dbmanager-> ", e);
}
}
private List<FileModel> getSchema(String scope, String resourceName,
String databaseName) {
List<FileModel> schemaList = null;
try {
LinkedHashMap<String, String> dataInputForSchema = new LinkedHashMap<String, String>();
dataInputForSchema.put("ResourceName", resourceName);
dataInputForSchema.put("DatabaseName", databaseName);
schemaList = recoverSchema(scope, dataInputForSchema);
// recover tables
if (schemaList != null) {
int z = 0;
while ((!getEndThreadvariable()) && (z < schemaList.size())) {
// for (int i = 0; i <
// schemaList.size(); i++) {
String schemaName = schemaList.get(z).getName();
getTables(scope, resourceName, databaseName,
schemaName, ConstantsPortlet.SCHEMA);
z++;
}
}
} catch (Exception e) {
logger.error("dbmanager-> ", e);
}
return schemaList;
}
private void getTables(String scope, String resourceName,
String databaseName, String schemaName, String elementType) {
try {
LinkedHashMap<String, String> dataInputForTables = new LinkedHashMap<String, String>();
dataInputForTables.put("ResourceName", resourceName);
dataInputForTables.put("DatabaseName", databaseName);
dataInputForTables.put("SchemaName", schemaName);
recoverTables(scope, dataInputForTables,
ConstantsPortlet.DATABASE);
} catch (Exception e) {
logger.error("dbmanager-> ", e);
}
}
}
}