databases-manager-portlet/src/main/java/org/gcube/portlets/user/databasesmanager/server/GWTdbManagerServiceImpl.java

1832 lines
58 KiB
Java

package org.gcube.portlets.user.databasesmanager.server;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.net.URLConnection;
import java.net.URLStreamHandler;
import javax.servlet.ServletException;
import javax.servlet.http.HttpSession;
import net.sf.ehcache.CacheManager;
import net.sf.ehcache.Ehcache;
import net.sf.ehcache.config.CacheConfiguration;
import org.apache.regexp.RE;
import org.gcube.application.framework.core.session.ASLSession;
import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.contentmanager.storageclient.model.protocol.smp.SMPURLConnection;
import org.gcube.data.analysis.statisticalmanager.proxies.StatisticalManagerDSL;
import org.gcube.data.analysis.statisticalmanager.proxies.StatisticalManagerFactory;
import org.gcube.data.analysis.statisticalmanager.stubs.types.SMAlgorithm;
import org.gcube.data.analysis.statisticalmanager.stubs.types.SMComputationConfig;
import org.gcube.data.analysis.statisticalmanager.stubs.types.SMComputationRequest;
import org.gcube.data.analysis.statisticalmanager.stubs.types.SMGroupedAlgorithms;
import org.gcube.data.analysis.statisticalmanager.stubs.types.SMListGroupedAlgorithms;
import org.gcube.data.analysis.statisticalmanager.stubs.types.SMOperationStatus;
import org.gcube.data.analysis.statisticalmanager.stubs.types.SMParameter;
import org.gcube.data.analysis.statisticalmanager.stubs.types.SMParameters;
import org.gcube.data.analysis.statisticalmanager.stubs.types.SMResourceType;
import org.gcube.data.analysis.statisticalmanager.stubs.types.SMTypeParameter;
import org.gcube.data.analysis.statisticalmanager.stubs.types.schema.SMAbstractResource;
import org.gcube.data.analysis.statisticalmanager.stubs.types.schema.SMComputation;
import org.gcube.data.analysis.statisticalmanager.stubs.types.schema.SMEntries;
import org.gcube.data.analysis.statisticalmanager.stubs.types.schema.SMFile;
import org.gcube.data.analysis.statisticalmanager.stubs.types.schema.SMInputEntry;
import org.gcube.data.analysis.statisticalmanager.stubs.types.schema.SMObject;
import org.gcube.data.analysis.statisticalmanager.stubs.types.schema.SMOperationInfo;
import org.gcube.data.analysis.statisticalmanager.stubs.types.schema.SMResource;
import org.gcube.data.analysis.statisticalmanager.stubs.types.schema.SMTable;
import org.gcube.data.analysis.statisticalmanager.stubs.types.schema.StatisticalServiceType;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
import org.gcube.portlets.user.databasesmanager.client.GWTdbManagerService;
import org.gcube.portlets.user.databasesmanager.client.datamodel.ComputationOutput;
import org.gcube.portlets.user.databasesmanager.client.datamodel.FileModel;
import org.gcube.portlets.user.databasesmanager.client.datamodel.GeneralOutputFromServlet;
import org.gcube.portlets.user.databasesmanager.client.datamodel.Parameter;
import org.gcube.portlets.user.databasesmanager.client.datamodel.Result;
import org.gcube.portlets.user.databasesmanager.client.datamodel.Row;
import org.gcube.portlets.user.databasesmanager.client.utils.ConstantsPortlet;
import org.gcube.portlets.user.databasesmanager.server.util.SessionUtil;
import org.gcube.portlets.user.databasesmanager.server.util.WsUtil;
import com.extjs.gxt.ui.client.core.El;
import com.extjs.gxt.ui.client.data.BasePagingLoadResult;
import com.extjs.gxt.ui.client.data.PagingLoadConfig;
import com.extjs.gxt.ui.client.data.PagingLoadResult;
import com.google.gwt.dom.client.Element;
import com.google.gwt.user.server.rpc.RemoteServiceServlet;
import com.thoughtworks.xstream.XStream;
import org.apache.log4j.Logger;
//import org.apache.jcs.JCS;
//import org.apache.jcs.access.CacheAccess;
//import org.apache.jcs.utils.props.PropertyLoader;
public class GWTdbManagerServiceImpl extends RemoteServiceServlet implements
GWTdbManagerService {
// // the result generated in the LoadTables method
// private List<Result> result = null;
// logger
private static Logger logger = Logger
.getLogger(GWTdbManagerServiceImpl.class);
// private CacheManager cacheManager;
private static Ehcache employeeCache;
public static List<String> listAlgorithms;
public GWTdbManagerServiceImpl() throws Exception {
}
@Override
public void init() throws ServletException {
super.init();
//craete cache
try {
URL url = getClass().getResource("/encache.xml");
CacheManager cacheManager = CacheManager.newInstance(url);
// getcache
employeeCache = cacheManager.getEhcache("DBCache");
String path = System.getenv("CATALINA_TMPDIR");
// System.out.println("Path: " +
// this.getServletContext().getRealPath(""));
CacheConfiguration config = employeeCache.getCacheConfiguration();
String DiskCacheFolderName = "DBManagerDisk";
// File f = new File(path+"/"+DiskCacheFolderName);
// if (!f.exists()){
// f.mkdir();
//
// }
config.setDiskStorePath(path);
// config.setDiskPersistent(true);
// config.setOverflowToDisk(true);
} catch (Exception e) {
logger.error("Failed to get cache: " + e);
e.printStackTrace();
throw e;
}
// File f = new File(path+"/"+DiskCacheFolderName);
//
// if (!f.exists()){
// f.mkdir();
// }
}
private void initVariables() {
ASLSession session = WsUtil.getAslSession(this.getThreadLocalRequest()
.getSession());
// the result generated in the LoadTables method
// List<Result> result = new ArrayList<Result>();
// session.setAttribute("TablesResult", result);
// map that contains the submit query result and the related uid
HashMap<String, List<Result>> submitQueryResult = new HashMap<String, List<Result>>();
session.setAttribute("submitQueryResult", submitQueryResult);
// map that contains the submit query result parsed and the related
// uid
HashMap<String, List<Row>> submitQueryResultParsed = new HashMap<String, List<Row>>();
session.setAttribute("submitQueryResultParsed", submitQueryResultParsed);
// information about a database
// String currentDB = "";
// session.setAttribute("currentDB", currentDB);
// String previousDB = "";
// session.setAttribute("previousDB", previousDB);
// information about a schema
// String currentSchema = "";
// session.setAttribute("currentSchema", currentSchema);
// String previousSchema = "";
// session.setAttribute("previousSchema", previousSchema);
// Hashmap that contains computationId with a uid key
HashMap<String, String> computationIDMap = new HashMap<String, String>();
session.setAttribute("ComputationIDList", computationIDMap);
// Hashmap that contains the job status with a uid key
HashMap<String, String> JobStatusMap = new HashMap<String, String>();
session.setAttribute("JobStatusList", JobStatusMap);
}
// to get resources from IS
@Override
public List<FileModel> getResource() throws Exception {
// initialize variables with application startup
initVariables();
// data input
List<Parameter> inputParameters = new ArrayList<Parameter>();
// data output
List<FileModel> outputParameters = new ArrayList<FileModel>();
// get algorithmId
String algorithmId = "LISTDBNAMES";
Parameter maxNumber = new Parameter("MaxNumber", "", "String", "-1");
inputParameters.add(maxNumber);
// check if the value is in cache. If data does not exist in cache
// the computation is started otherwise data are retrieved from
// cache.
try {
// get data from cache
// check if data exist considering as key the input parameters
// String key = inputParameters.get(0).getDefaultValue();
String key = "listResources";
// System.out.println("***KEY: " + key);
// net.sf.ehcache.Element dataFromCache =
// employeeCache.get(key);
net.sf.ehcache.Element dataFromCache = getDataFromCache(key);
Object value = null;
if (dataFromCache != null) {
value = dataFromCache.getObjectValue();
// System.out.println("***GETTING DATA FROM CACHE");
}
if (value != null) {
outputParameters = (List<FileModel>) value;
} else {
// start the computation
// System.out.println("***STARTING THE COMPUTATION");
// create data structure for data output
ComputationOutput outputData = new ComputationOutput();
// computationId
String computationId = startComputation(algorithmId,
inputParameters, outputData);
// print check
// retrieve data
// logger.info("output data retrieved");
// data output
LinkedHashMap<String, String> mapValues = new LinkedHashMap<String, String>();
mapValues = outputData.getMapValues();
for (int i = 0; i < mapValues.size(); i++) {
FileModel obj = new FileModel(mapValues.get(String
.valueOf(i)));
// obj.setIsLoaded(true);
outputParameters.add(obj);
}
if (outputParameters != null
&& outputParameters.size() != 0) {
// put data in cache
net.sf.ehcache.Element dataToCache = new net.sf.ehcache.Element(
key, outputParameters);
insertDataIntoCache(dataToCache);
// employeeCache.put(dataToCache);
}
}
} catch (Exception e) {
// TODO: handle exception
// e.printStackTrace();
throw new Exception("Failed to load data. " + e);
}
// }
return outputParameters;
}
// to get information about databases of a resource
@Override
public LinkedHashMap<String, FileModel> getDBInfo(String resourceName)
throws Exception {
// data input
List<Parameter> inputParameters = new ArrayList<Parameter>();
// data output
LinkedHashMap<String, FileModel> outputParameters = new LinkedHashMap<String, FileModel>();
String algorithmId = "LISTDBINFO";
//print check
logger.info("dbmanager-> ResourceName: "
+ resourceName);
Parameter resource = new Parameter("ResourceName", "", "String", "");
inputParameters.add(resource);
inputParameters.get(0).setValue(resourceName);
try {
// get data from cache
// check if data exist considering as key the input parameters
String key = inputParameters.get(0).getValue();
// System.out.println("***KEY: " + key);
// net.sf.ehcache.Element dataFromCache =
// employeeCache.get(key);
net.sf.ehcache.Element dataFromCache = getDataFromCache(key);
Object value = null;
if (dataFromCache != null) {
value = dataFromCache.getObjectValue();
}
if (value != null) {
outputParameters = (LinkedHashMap<String, FileModel>) value;
// System.out.println("***GETTING DATA FROM CACHE");
} else {
// start the computation
// System.out.println("***STARTING THE COMPUTATION");
// create data structure
ComputationOutput outputData = new ComputationOutput();
// computation id
String computationId = startComputation(algorithmId,
inputParameters, outputData);
// print check
// retrieve data
// logger.info("output data retrieved");
// data output values
LinkedHashMap<String, String> mapValues = new LinkedHashMap<String, String>();
// data output keys
LinkedHashMap<String, String> mapKeys = new LinkedHashMap<String, String>();
mapValues = outputData.getMapValues();
mapKeys = outputData.getmapKeys();
for (int i = 0; i < mapValues.size(); i++) {
FileModel obj = new FileModel(mapValues.get(String
.valueOf(i)));
// obj.setIsLoaded(true);
// print check
// logger.info("value: " +
// mapValues.get(String.valueOf(i)));
// logger.info("key: " +
// mapKeys.get(String.valueOf(i)));
outputParameters.put(mapKeys.get(String.valueOf(i)),
obj);
}
// write data in cache
if (outputParameters != null
&& outputParameters.size() != 0) {
// put data in cache
net.sf.ehcache.Element dataToCache = new net.sf.ehcache.Element(
inputParameters.get(0).getValue(),
outputParameters);
insertDataIntoCache(dataToCache);
// employeeCache.put(dataToCache);
}
}
} catch (Exception e) {
// TODO: handle exception
e.printStackTrace();
throw new Exception("Failed to load data. " + e);
}
return outputParameters;
}
// to get schema for a database
@Override
public List<FileModel> getDBSchema(LinkedHashMap<String, String> dataInput)
throws Exception {
// data input
List<Parameter> inputParameters = new ArrayList<Parameter>();
// data output
List<FileModel> outputParameters = new ArrayList<FileModel>();
String algorithmId = "LISTDBSCHEMA";
// print check
String rs = dataInput.get("ResourceName");
String db = dataInput.get("DatabaseName");
logger.info("dbmanager-> ResourceName: "
+ rs);
logger.info("dbmanager-> DatabaseName: "
+ db);
//set input parameters
Parameter resource = new Parameter("ResourceName", "", "String", "");
Parameter database = new Parameter("DatabaseName", "", "String", "");
inputParameters.add(resource);
inputParameters.add(database);
inputParameters.get(0).setValue(rs);
inputParameters.get(1).setValue(db);
// print check algorithm input parameters
// for (int i = 0; i < inputParameters.size(); i++) {
// logger.info(inputParameters.get(i).getName());
// }
try {
// get data from cache
// check if data exist considering as key the input parameters
String key = inputParameters.get(0).getValue()
+ inputParameters.get(1).getValue();
// System.out.println("key in GETSCHEMA: " + key);
net.sf.ehcache.Element dataFromCache = getDataFromCache(key);
Object value = null;
if (dataFromCache != null) {
value = dataFromCache.getObjectValue();
}
if (value != null) {
outputParameters = (List<FileModel>) value;
} else {
// start the computation
// create data structure
ComputationOutput outputData = new ComputationOutput();
// computation id
String computationId = startComputation(algorithmId,
inputParameters, outputData);
// print check
// retrieve data
// logger.info("dbmanager-> output data retrieved");
// data output values
LinkedHashMap<String, String> mapValues = new LinkedHashMap<String, String>();
// data output keys
LinkedHashMap<String, String> mapKeys = new LinkedHashMap<String, String>();
mapValues = outputData.getMapValues();
mapKeys = outputData.getmapKeys();
for (int i = 0; i < mapValues.size(); i++) {
FileModel obj = new FileModel(mapValues.get(String
.valueOf(i)));
// obj.setIsSchema(true);
// obj.setIsLoaded(true);
outputParameters.add(obj);
}
// write data in cache
if (outputParameters != null
&& outputParameters.size() != 0) {
// put data in cache
net.sf.ehcache.Element dataToCache = new net.sf.ehcache.Element(
inputParameters.get(0).getValue()
+ inputParameters.get(1).getValue(),
outputParameters);
insertDataIntoCache(dataToCache);
// employeeCache.put(dataToCache);
}
}
} catch (Exception e) {
// TODO: handle exception
e.printStackTrace();
throw new Exception("Failed to load data. " + e);
}
return outputParameters;
}
// to get tables
private List<Result> getTables(LinkedHashMap<String, String> dataInput)
throws Exception {
// data input
List<Parameter> inputParameters = new ArrayList<Parameter>();
// data output
List<Result> outputParameters = new ArrayList<Result>();
String algorithmId = "LISTTABLES";
String rs= dataInput.get("ResourceName");
String db= dataInput.get("DatabaseName");
String scm= dataInput.get("SchemaName");
//print check
logger.info("dbmanager-> ResourceName: "
+ rs);
logger.info("dbmanager-> DatabaseName: "
+ db);
logger.info("dbmanager-> SchemaName: "
+ scm);
//set input parameters
Parameter resource = new Parameter("ResourceName", "", "String", "");
Parameter database = new Parameter("DatabaseName", "", "String", "");
Parameter schema = new Parameter("SchemaName","", "String", "");
inputParameters.add(resource);
inputParameters.add(database);
inputParameters.add(schema);
inputParameters.get(0).setValue(rs);
inputParameters.get(1).setValue(db);
inputParameters.get(2).setValue(scm);
try{
// get data from cache
// check if data exist considering as key the input parameters
String key = inputParameters.get(0).getValue()+inputParameters.get(1).getValue()+
inputParameters.get(2).getValue();
// System.out.println("***KEY: " + key);
net.sf.ehcache.Element dataFromCache = getDataFromCache(key);
Object value = null;
if (dataFromCache != null) {
value = dataFromCache.getObjectValue();
// System.out.println("***GETTING DATA FROM CACHE");
}
if (value != null) {
outputParameters = (List<Result>) value;
} else {
//start computation
// create data structure
ComputationOutput outputData = new ComputationOutput();
// computation id
String computationId = startComputation(algorithmId, inputParameters,
outputData);
// print check on retrieving data
// logger.info("output data retrieved");
// data output values
LinkedHashMap<String, String> mapValues = new LinkedHashMap<String, String>();
// data output keys
LinkedHashMap<String, String> mapKeys = new LinkedHashMap<String, String>();
mapValues = outputData.getMapValues();
mapKeys = outputData.getmapKeys();
for (int i = 0; i < mapValues.size(); i++) {
Result row = new Result(String.valueOf(i), mapValues.get(String
.valueOf(i)));
outputParameters.add(row);
}
// write data in cache
if (outputParameters != null
&& outputParameters.size() != 0) {
// put data in cache
net.sf.ehcache.Element dataToCache = new net.sf.ehcache.Element(
inputParameters.get(0).getValue()
+ inputParameters.get(1).getValue()+inputParameters.get(2).getValue(),
outputParameters);
insertDataIntoCache(dataToCache);
// employeeCache.put(dataToCache);
}
}
}catch (Exception e) {
// e.printStackTrace();
throw new Exception("Failed to load data. " + e);
}
// // create data structure
// ComputationOutput outputData = new ComputationOutput();
// // computation id
// String computationId = startComputation(algorithmId, inputParameters,
// outputData);
//
// // print check on retrieving data
// // logger.info("output data retrieved");
//
// // data output values
// LinkedHashMap<String, String> mapValues = new LinkedHashMap<String, String>();
// // data output keys
// LinkedHashMap<String, String> mapKeys = new LinkedHashMap<String, String>();
//
// mapValues = outputData.getMapValues();
// mapKeys = outputData.getmapKeys();
//
// for (int i = 0; i < mapValues.size(); i++) {
// Result row = new Result(String.valueOf(i), mapValues.get(String
// .valueOf(i)));
// outputParameters.add(row);
// }
return outputParameters;
}
// to load tables
@Override
public PagingLoadResult<Result> LoadTables(PagingLoadConfig config,
LinkedHashMap<String, String> dataInput, boolean SearchTable,
String keyword) throws Exception {
//tables' list stored for a user session
// ASLSession session = WsUtil.getAslSession(this.getThreadLocalRequest()
// .getSession());
// List<Result> result = (List<Result>) session
// .getAttribute("TablesResult");
// // check on a database
// String currentDB = "";
// currentDB = dataInput.get("DatabaseName");
// String previousDB = (String) session.getAttribute("previousDB");
//
// if (!currentDB.equals(previousDB)) {
// // result = null;
// result = new ArrayList<Result>();
// System.gc();
// }
//
// previousDB = currentDB;
// session.setAttribute("previousDB", previousDB);
//
// // check on a schema
// String currentSchema = "";
// currentSchema = dataInput.get("SchemaName");
// String previousSchema = (String) session.getAttribute("previousSchema");
// if (!currentSchema.equals(previousSchema)) {
// // result = null;
// result = new ArrayList<Result>();
// System.gc();
// }
//
// previousSchema = currentSchema;
// session.setAttribute("previousSchema", previousSchema);
List<Result> result = new ArrayList<>();
// get tables
// if (result == null)
// result = getTables(dataInput);
if (result.size() == 0)
result = getTables(dataInput);
// Create a sublist and add data to list according
// to the limit and offset value of the config
List<Result> sublist = new ArrayList<Result>();
BasePagingLoadResult loadResult = null;
// print check on the search
// logger.info("Searching in the table: " + SearchTable);
// logger.info("Keyword to search: " + keyword);
int start = config.getOffset();
int limit = result.size();
if (config.getLimit() > 0) {
limit = Math.min(start + config.getLimit(), limit);
}
int totalNumber = result.size();
if ((SearchTable == false) || keyword == null || keyword.length() == 0) {
sublist = new ArrayList<Result>(result.subList(start, limit));
} else {
// print check
// logger.info("searching the table");
// search the table
for (int i = 0; i < result.size(); i++) {
if ((result.get(i).getValue().toLowerCase()).startsWith(keyword
.toLowerCase())) {
sublist.add(result.get(i));
}
}
limit = sublist.size();
int sublen = sublist.size();
totalNumber = sublen;
if (start < sublen - 1) {
limit = Math.min(sublen, limit);
totalNumber = sublist.size();
sublist = new ArrayList<Result>(sublist.subList(start, limit));
}
}
// print check
// logger.info("result size: " + totalNumber);
// logger.info("limit: " + limit);
// logger.info("offset: " + config.getOffset());
// logger.info("start: " + start);
loadResult = new BasePagingLoadResult<Result>(sublist,
config.getOffset(), totalNumber);
// session.setAttribute("TablesResult", result);
return loadResult;
}
// to submit a query
@Override
public List<String> submitQuery(LinkedHashMap<String, String> dataDB,
String query, boolean valueReadOnlyQuery,
boolean smartCorrectionQuery, String language, String UID)
throws Exception {
logger.info("dbmanager-> Dialect used for smart correction: "
+ language);
// data input
List<Parameter> inputParameters = new ArrayList<Parameter>();
// data output
// List<Result> output = new ArrayList<Result>();
List<Result> output = null;
// list that contains table attributes
List<String> listAttributes = null;
String algorithmId = "SUBMITQUERY";
// print check
String rs= dataDB.get("ResourceName");
String db= dataDB.get("DatabaseName");
//print check
logger.info("dbmanager-> ResourceName: "
+ rs);
logger.info("dbmanager-> DatabaseName: "
+ db);
logger.info("dbmanager-> Query: " + query);
logger.info("dbmanager-> SmartCorrections check: "
+ smartCorrectionQuery);
//set input parameters
Parameter resource = new Parameter("ResourceName", "", "String", "");
Parameter database = new Parameter("DatabaseName", "", "String", "");
Parameter readOnlyQuery = new Parameter("Read-Only Query", "", "Boolean", "true");
Parameter applySmartCorrection = new Parameter("Apply Smart Correction", "", "Boolean", "true");
Parameter lng = new Parameter("Language", "", "NONE", "NONE");
Parameter q = new Parameter("Query", "", "String", "");
inputParameters.add(resource);
inputParameters.add(database);
inputParameters.add(readOnlyQuery);
inputParameters.add(applySmartCorrection);
inputParameters.add(lng);
inputParameters.add(q);
inputParameters.get(0).setValue(rs);
inputParameters.get(1).setValue(db);
inputParameters.get(2).setValue(String.valueOf(valueReadOnlyQuery));
inputParameters.get(3).setValue(String.valueOf(smartCorrectionQuery));
inputParameters.get(4).setValue(language);
inputParameters.get(5).setValue(query);
// create data structure
ComputationOutput outputData = new ComputationOutput();
try {
// computation id
String computationId = startComputation(algorithmId,
inputParameters, outputData, UID);
// get JobID
if (checkJob(UID)) {
// computationIDMap.put(id, computationId);
// print check on retrieving data
// logger.info("output data retrieved");
// data output values
LinkedHashMap<String, String> mapValues = new LinkedHashMap<String, String>();
// data output keys
LinkedHashMap<String, String> mapKeys = new LinkedHashMap<String, String>();
mapValues = outputData.getMapValues();
mapKeys = outputData.getmapKeys();
if (mapValues.size() != 0) {
output = new ArrayList<Result>();
// logger.info("build the result - started");
for (int i = 0; i < mapValues.size(); i++) {
Result row = new Result(mapKeys.get(String.valueOf(i)),
mapValues.get(String.valueOf(i)));
output.add(row);
}
// System.out.println("output size submit: " +
// output.size());
// logger.info("build the result - finished");
// get the attributes list for the result table
listAttributes = new ArrayList<String>();
listAttributes = getListAttributes(output.get(0).getValue());
// remove the header in order to parse only the result
output.remove(0);
// store the result of the submit query operation
updateSubmitQueryResultMap(UID, output);
// remove job with the specified uid
removeJob(UID);
}
} else {
listAttributes = null;
}
} catch (Exception e) {
// e.printStackTrace();
// TODO TO REMOVE. Exception Statistical management to remove a
// computation
if (e.getMessage()
.contains(
"javax.xml.ws.soap.SOAPFaultException: java.lang.IndexOutOfBoundsException")) {
e = new Exception("ServerException");
}
throw e;
}
return listAttributes;
}
@Override
public List<Result> sample(LinkedHashMap<String, String> dataInput)
throws Exception {
// data input
List<Parameter> inputParameters = new ArrayList<Parameter>();
// output sample result
List<Result> output = new ArrayList<Result>();
String algorithmId = "SAMPLEONTABLE";
//print check
String rs= dataInput.get("ResourceName");
String db= dataInput.get("DatabaseName");
String scm= dataInput.get("SchemaName");
String tab= dataInput.get("TableName");
//print check
logger.info("dbmanager-> ResourceName: "
+ rs);
logger.info("dbmanager-> DatabaseName: "
+ db);
logger.info("dbmanager-> SchemaName: "
+ scm);
logger.info("dbmanager-> TableName: "
+ tab);
//set input parameters
Parameter resource = new Parameter("ResourceName", "", "String", "");
Parameter database = new Parameter("DatabaseName", "", "String", "");
Parameter schema = new Parameter("SchemaName","", "String", "");
Parameter table = new Parameter("TableName","", "String", "");
inputParameters.add(resource);
inputParameters.add(database);
inputParameters.add(schema);
inputParameters.add(table);
inputParameters.get(0).setValue(rs);
inputParameters.get(1).setValue(db);
inputParameters.get(2).setValue(scm);
inputParameters.get(3).setValue(tab);
// create data structure
ComputationOutput outputData = new ComputationOutput();
// computation id
String computationId = startComputation(algorithmId, inputParameters,
outputData);
// print check on retrieving data
// logger.info("output data retrieved");
// data output values
LinkedHashMap<String, String> mapValues = new LinkedHashMap<String, String>();
// data output keys
LinkedHashMap<String, String> mapKeys = new LinkedHashMap<String, String>();
mapValues = outputData.getMapValues();
mapKeys = outputData.getmapKeys();
for (int i = 0; i < mapValues.size(); i++) {
Result row = new Result(mapKeys.get(String.valueOf(i)),
mapValues.get(String.valueOf(i)));
output.add(row);
}
return output;
}
@Override
public List<Result> smartSample(LinkedHashMap<String, String> dataInput)
throws Exception {
// data input
List<Parameter> inputParameters = new ArrayList<Parameter>();
// output sample result
List<Result> output = new ArrayList<Result>();
String algorithmId = "SMARTSAMPLEONTABLE";
//print check
String rs= dataInput.get("ResourceName");
String db= dataInput.get("DatabaseName");
String scm= dataInput.get("SchemaName");
String tab= dataInput.get("TableName");
//print check
logger.info("dbmanager-> ResourceName: "
+ rs);
logger.info("dbmanager-> DatabaseName: "
+ db);
logger.info("dbmanager-> SchemaName: "
+ scm);
logger.info("dbmanager-> TableName: "
+ tab);
//set input parameters
Parameter resource = new Parameter("ResourceName", "", "String", "");
Parameter database = new Parameter("DatabaseName", "", "String", "");
Parameter schema = new Parameter("SchemaName","", "String", "");
Parameter table = new Parameter("TableName","", "String", "");
inputParameters.add(resource);
inputParameters.add(database);
inputParameters.add(schema);
inputParameters.add(table);
inputParameters.get(0).setValue(rs);
inputParameters.get(1).setValue(db);
inputParameters.get(2).setValue(scm);
inputParameters.get(3).setValue(tab);
// create data structure
ComputationOutput outputData = new ComputationOutput();
// computation id
String computationId = startComputation(algorithmId, inputParameters,
outputData);
// print check on retrieving data
// logger.info("dbmanager-> output data retrieved");
// data output values
LinkedHashMap<String, String> mapValues = new LinkedHashMap<String, String>();
// data output keys
LinkedHashMap<String, String> mapKeys = new LinkedHashMap<String, String>();
mapValues = outputData.getMapValues();
mapKeys = outputData.getmapKeys();
for (int i = 0; i < mapValues.size(); i++) {
Result row = new Result(mapKeys.get(String.valueOf(i)),
mapValues.get(String.valueOf(i)));
output.add(row);
}
return output;
}
@Override
public List<Result> randomSample(LinkedHashMap<String, String> dataInput)
throws Exception {
// data input
List<Parameter> inputParameters = new ArrayList<Parameter>();
// output sample result
List<Result> output = new ArrayList<Result>();
String algorithmId = "RANDOMSAMPLEONTABLE";
//print check
String rs= dataInput.get("ResourceName");
String db= dataInput.get("DatabaseName");
String scm= dataInput.get("SchemaName");
String tab= dataInput.get("TableName");
//print check
logger.info("dbmanager-> ResourceName: "
+ rs);
logger.info("dbmanager-> DatabaseName: "
+ db);
logger.info("dbmanager-> SchemaName: "
+ scm);
logger.info("dbmanager-> TableName: "
+ tab);
//set input parameters
Parameter resource = new Parameter("ResourceName", "", "String", "");
Parameter database = new Parameter("DatabaseName", "", "String", "");
Parameter schema = new Parameter("SchemaName","", "String", "");
Parameter table = new Parameter("TableName","", "String", "");
inputParameters.add(resource);
inputParameters.add(database);
inputParameters.add(schema);
inputParameters.add(table);
inputParameters.get(0).setValue(rs);
inputParameters.get(1).setValue(db);
inputParameters.get(2).setValue(scm);
inputParameters.get(3).setValue(tab);
// create data structure
ComputationOutput outputData = new ComputationOutput();
// computation id
String computationId = startComputation(algorithmId, inputParameters,
outputData);
// print check on retrieving data
// logger.info("dbmanager-> output data retrieved");
// data output values
LinkedHashMap<String, String> mapValues = new LinkedHashMap<String, String>();
// data output keys
LinkedHashMap<String, String> mapKeys = new LinkedHashMap<String, String>();
mapValues = outputData.getMapValues();
mapKeys = outputData.getmapKeys();
for (int i = 0; i < mapValues.size(); i++) {
Result row = new Result(mapKeys.get(String.valueOf(i)),
mapValues.get(String.valueOf(i)));
output.add(row);
}
return output;
}
@Override
public LinkedHashMap<String, FileModel> getTableDetails(
LinkedHashMap<String, String> dataInput) throws Exception {
// data input
List<Parameter> inputParameters = new ArrayList<Parameter>();
// data ouptut
LinkedHashMap<String, FileModel> outputParameters = new LinkedHashMap<String, FileModel>();
String algorithmId = "GETTABLEDETAILS";
//print check
String rs= dataInput.get("ResourceName");
String db= dataInput.get("DatabaseName");
String scm= dataInput.get("SchemaName");
String tab= dataInput.get("TableName");
//print check
logger.info("dbmanager-> ResourceName: "
+ rs);
logger.info("dbmanager-> DatabaseName: "
+ db);
logger.info("dbmanager-> SchemaName: "
+ scm);
logger.info("dbmanager-> TableName: "
+ tab);
//set input parameters
Parameter resource = new Parameter("ResourceName", "", "String", "");
Parameter database = new Parameter("DatabaseName", "", "String", "");
Parameter schema = new Parameter("SchemaName","", "String", "");
Parameter table = new Parameter("TableName","", "String", "");
inputParameters.add(resource);
inputParameters.add(database);
inputParameters.add(schema);
inputParameters.add(table);
inputParameters.get(0).setValue(rs);
inputParameters.get(1).setValue(db);
inputParameters.get(2).setValue(scm);
inputParameters.get(3).setValue(tab);
// create data structure
ComputationOutput outputData = new ComputationOutput();
// computation id
String computationId = startComputation(algorithmId, inputParameters,
outputData);
// print check on retrieving data
// logger.info("output data retrieved");
// output data values
LinkedHashMap<String, String> mapValues = new LinkedHashMap<String, String>();
// output data keys
LinkedHashMap<String, String> mapKeys = new LinkedHashMap<String, String>();
mapValues = outputData.getMapValues();
mapKeys = outputData.getmapKeys();
for (int i = 0; i < mapValues.size(); i++) {
FileModel obj = new FileModel(mapValues.get(String.valueOf(i)));
// obj.setIsLoaded(true);
outputParameters.put(mapKeys.get(String.valueOf(i)), obj);
// print check
// logger.info("value: " + outputMap.get(String.valueOf(i)));
// logger.info("key: " + outputKey.get(String.valueOf(i)));
}
return outputParameters;
}
// parse result for Submit query
public PagingLoadResult<Row> loadSubmitResult(PagingLoadConfig config,
List<String> listAttributes, String UID) throws Exception {
// System.out.println("Server - loadSubmitResultMethod");
// data parsed
List<Row> data = new ArrayList<Row>();
// submit query result
List<Result> result = new ArrayList<Result>();
// get parsed data
data = getSubmitQueryResultParsed(UID);
if ((data == null)) {
// parse the submit query result
result = getSubmitQueryResult(UID);
if ((result != null) && (result.size() != 0)) {
data = parseCVSString(result, listAttributes);
updateSubmitQueryResultParsed(UID, data);
}
}
// Create a sublist and add data to list according
// to the limit and offset value of the config
List<Row> sublist = new ArrayList<Row>();
BasePagingLoadResult loadResult = null;
int start = config.getOffset();
int limit = data.size();
if (config.getLimit() > 0) {
limit = Math.min(start + config.getLimit(), limit);
}
int totalNumber = data.size();
sublist = new ArrayList<Row>(data.subList(start, limit));
loadResult = new BasePagingLoadResult<Row>(sublist, config.getOffset(),
totalNumber);
// System.out.println("start: " + start);
// System.out.println("limit: " + limit);
// System.out.println("sublist size: " + sublist.size());
return loadResult;
}
// get attributes list for display the result in a table
private List<String> getListAttributes(String value) {
List<String> listAttributes = new ArrayList<String>();
// recover attribute fields for the result table
String headers = value;
// logger.info("Headers fields table: " + headers);
listAttributes = parseAttributesTableResult(headers);
// logger.info("attributes number: " + listAttributes.size());
// logger.info("attributes list: ");
// print check
// for (int i = 0; i < listAttributes.size(); i++) {
// logger.info("attribute: " + listAttributes.get(i));
// }
return listAttributes;
}
private List<String> parseAttributesTableResult(String phrase) {
String delimiter = ",";
List<String> elements = new ArrayList<String>();
int idxdelim = -1;
phrase = phrase.trim();
while ((idxdelim = phrase.indexOf(delimiter)) >= 0) {
elements.add(phrase.substring(0, idxdelim));
phrase = phrase.substring(idxdelim + 1).trim();
}
elements.add(phrase);
return elements;
}
// parse a csv row in a list of values
@Override
public List<Row> parseCVSString(List<Result> result, List<String> attrNames)
throws Exception {
List<Row> rows = new ArrayList<Row>();
if (result != null) {
for (int i = 0; i < result.size(); i++) {
List<String> attrValues = parse(result.get(i).getValue());
Row element = new Row(attrNames, attrValues, i);
rows.add(element);
}
}
return rows;
}
private List<String> parse(String row) throws Exception {
String delimiter = ",";
// print check
// logger.info("row: " + row);
List<String> elements = new ArrayList<String>();
String phrase = row;
int idxdelim = -1;
boolean quot = false;
phrase = phrase.trim();
while ((idxdelim = phrase.indexOf(delimiter)) >= 0) {
quot = phrase.startsWith("\"");
if (quot) {
phrase = phrase.substring(1);
String quoted = "";
if (phrase.startsWith("\""))
phrase = phrase.substring(1);
else {
RE regexp = new RE("[^\\\\]\"");
boolean matching = regexp.match(phrase);
if (matching) {
int i0 = regexp.getParenStart(0);
quoted = phrase.substring(0, i0 + 1).trim();
phrase = phrase.substring(i0 + 2).trim();
}
}
if (phrase.startsWith(delimiter))
phrase = phrase.substring(1);
elements.add(quoted);
} else {
elements.add(phrase.substring(0, idxdelim));
phrase = phrase.substring(idxdelim + 1).trim();
}
// logger.info("server token: " + phrase);
}
if (phrase.startsWith("\""))
phrase = phrase.substring(1);
if (phrase.endsWith("\""))
phrase = phrase.substring(0, phrase.length() - 1);
elements.add(phrase);
// logger.info("size: " + elements.size());
return elements;
}
//not called
// private List<String> getDatabaseManagerAlgorithms() {
// ArrayList<String> dbAlg = new ArrayList<String>();
// StatisticalManagerFactory factory = getFactory();
// // get and print algorithms
// SMListGroupedAlgorithms groups = factory.getAlgorithms();
//
// if (groups == null)
// logger.info("dbmanager-> GROUPS OF ALGORITHMS IS NULL!");
// else
// logger.info("dbmanager-> GROUPS OF ALGORITHMS IS NOT NULL!");
//
// // get list categories
// for (SMGroupedAlgorithms group : groups.thelist()) {
// for (SMAlgorithm algorithm : group.thelist()) {
// dbAlg.add(algorithm.name());
// }
// }
// return dbAlg;
// }
//not used
// private List<Parameter> getParameters(String algorithmId) {
// StatisticalManagerFactory factory = getFactory();
// SMParameters smParams = factory.getAlgorithmParameters(algorithmId);
// List<Parameter> params = new ArrayList<Parameter>();
//
// for (SMParameter smParam : smParams.list()) {
// SMTypeParameter smType = smParam.type();
// StatisticalServiceType smTypeName = smType.name();
//
// String paramName = smParam.name();
// String paramDescription = smParam.description();
// String defaultValue = smParam.defaultValue();
// String objectType = smType.values().get(0);
//
// // print CHECK
//// logger.info("parameters: ");
//// logger.info(paramName);
//// logger.info(paramDescription);
//// logger.info(objectType);
//// logger.info(defaultValue);
//
// Parameter objectParam = new Parameter(paramName, paramDescription,
// objectType, defaultValue);
// params.add(objectParam);
// }
// return params;
// }
private synchronized List<Result> getSubmitQueryResult(String submitQueryUID) {
ASLSession session = WsUtil.getAslSession(this.getThreadLocalRequest()
.getSession());
HashMap<String, List<Result>> submitQueryResult = (HashMap<String, List<Result>>) session
.getAttribute("submitQueryResult");
if (submitQueryResult.containsKey(submitQueryUID)) {
return submitQueryResult.get(submitQueryUID);
} else {
return null;
}
}
private synchronized List<Row> getSubmitQueryResultParsed(
String submitQueryUID) {
ASLSession session = WsUtil.getAslSession(this.getThreadLocalRequest()
.getSession());
HashMap<String, List<Row>> submitQueryResultParsed = (HashMap<String, List<Row>>) session
.getAttribute("submitQueryResultParsed");
if (submitQueryResultParsed.containsKey(submitQueryUID)) {
// System.out.println("ResultParsed: UID contained");
return submitQueryResultParsed.get(submitQueryUID);
} else {
return null;
}
}
private synchronized void updateSubmitQueryResultParsed(
String submitQueryUID, List<Row> data) {
ASLSession session = WsUtil.getAslSession(this.getThreadLocalRequest()
.getSession());
HashMap<String, List<Row>> submitQueryResultParsed = (HashMap<String, List<Row>>) session
.getAttribute("submitQueryResultParsed");
// add data
if (data.size() != 0) {
submitQueryResultParsed.put(submitQueryUID, data);
session.setAttribute("submitQueryResultParsed",
submitQueryResultParsed);
}
}
private synchronized void removeResultParsed(String submitQueryUID) {
ASLSession session = WsUtil.getAslSession(this.getThreadLocalRequest()
.getSession());
HashMap<String, List<Row>> submitQueryResultParsed = (HashMap<String, List<Row>>) session
.getAttribute("submitQueryResultParsed");
if (submitQueryResultParsed.containsKey(submitQueryUID)) {
// remove data parsed
submitQueryResultParsed.remove(submitQueryUID);
// update
session.setAttribute("submitQueryResultParsed",
submitQueryResultParsed);
}
}
private synchronized void updateSubmitQueryResultMap(String submitQueryUID,
List<Result> result) {
if (submitQueryUID != null) {
ASLSession session = WsUtil.getAslSession(this
.getThreadLocalRequest().getSession());
HashMap<String, List<Result>> submitQueryResult = (HashMap<String, List<Result>>) session
.getAttribute("submitQueryResult");
submitQueryResult.put(submitQueryUID, result);
session.setAttribute("submitQueryResult", submitQueryResult);
}
}
private synchronized void removeResult(String submitQueryUID) {
ASLSession session = WsUtil.getAslSession(this.getThreadLocalRequest()
.getSession());
HashMap<String, List<Result>> submitQueryResult = (HashMap<String, List<Result>>) session
.getAttribute("submitQueryResult");
if (submitQueryResult.containsKey(submitQueryUID)) {
// remove data parsed
submitQueryResult.remove(submitQueryUID);
// update
session.setAttribute("submitQueryResult", submitQueryResult);
}
}
// update job with the related status
private synchronized void updateJobStatus(String jobID, String status) {
if (jobID != null) {
// add the job status
ASLSession session = WsUtil.getAslSession(this
.getThreadLocalRequest().getSession());
HashMap<String, String> JobStatusMap = (HashMap<String, String>) session
.getAttribute("JobStatusList");
JobStatusMap.put(jobID, status);
session.setAttribute("JobStatusList", JobStatusMap);
}
}
// remove job with the related status
private synchronized void removeJobStatus(String jobID) {
ASLSession session = WsUtil.getAslSession(this.getThreadLocalRequest()
.getSession());
HashMap<String, String> JobStatusMap = (HashMap<String, String>) session
.getAttribute("JobStatusList");
String status = JobStatusMap.get(jobID);
if (status != null) {
JobStatusMap.remove(jobID);
session.setAttribute("JobStatusList", JobStatusMap);
}
}
// get job status
private synchronized String getJobStatus(String jobID) {
ASLSession session = WsUtil.getAslSession(this.getThreadLocalRequest()
.getSession());
HashMap<String, String> JobStatusMap = (HashMap<String, String>) session
.getAttribute("JobStatusList");
String status = JobStatusMap.get(jobID);
return status;
}
// update job with the computation id
private synchronized void updateJob(String jobID, String computationId) {
if (jobID != null) {
// add the computation in the map
ASLSession session = WsUtil.getAslSession(this
.getThreadLocalRequest().getSession());
HashMap<String, String> computationIDMap = (HashMap<String, String>) session
.getAttribute("ComputationIDList");
computationIDMap.put(jobID, computationId);
session.setAttribute("ComputationIDList", computationIDMap);
}
}
// remove job with the computation id
private synchronized String removeJob(String jobID) {
if (jobID != null) {
// System.out.println("remove jobID " + job);
// add the computation in the map
ASLSession session = WsUtil.getAslSession(this
.getThreadLocalRequest().getSession());
HashMap<String, String> computationIDMap = (HashMap<String, String>) session
.getAttribute("ComputationIDList");
String computationId = computationIDMap.get(jobID);
if (computationId != null) {
computationIDMap.remove(jobID);
session.setAttribute("ComputationIDList", computationIDMap);
return computationId;
}
}
return null;
}
private synchronized boolean checkJob(String jobID) {
boolean isContained = false;
if (jobID != null) {
ASLSession session = WsUtil.getAslSession(this
.getThreadLocalRequest().getSession());
HashMap<String, String> computationIDMap = (HashMap<String, String>) session
.getAttribute("ComputationIDList");
if (computationIDMap.containsKey(jobID)) {
isContained = true;
} else {
isContained = false;
}
}
// System.out.println("JobID isContained: " + isContained);
return isContained;
}
private String startComputation(String algorithmName,
List<Parameter> parameters, ComputationOutput outputData)
throws Exception {
return startComputation(algorithmName, parameters, outputData, null);
}
private String startComputation(String algorithmName,
List<Parameter> parameters, ComputationOutput outputData,
String jobID) throws Exception {
SMComputationConfig config = new SMComputationConfig();
SMInputEntry[] list = new SMInputEntry[parameters.size()];
int i = 0;
for (Parameter p : parameters)
list[i++] = new SMInputEntry(p.getName(), p.getValue());
config.parameters(new SMEntries(list));
config.algorithm(algorithmName);
// create a computation request
SMComputationRequest request = new SMComputationRequest();
request.user(getUsername());
request.config(config);
try {
StatisticalManagerFactory factory = getFactory();
String computationId = factory.executeComputation(request);
float percentage = 0;
String scope = getScope();
String username = getUsername();
updateJobStatus(jobID, "computation started");
updateJob(jobID, computationId);
logger.info("dbmanager-> startComputation: the computation has started!");
while (percentage < 100) {
percentage = checkComputationStatus(scope, computationId,
username, outputData);
Thread.sleep(3000);
}
logger.info("dbmanager-> startComputation: the computation has finished!");
updateJobStatus(jobID, "computation finished");
// removeJob(jobID);
return computationId;
} catch (Exception e) {
logger.info("dbmanager-> startComputation: the job submit has failed!");
// e.printStackTrace();
throw e;
}
}
private float checkComputationStatus(String scope, String computationId,
String user, ComputationOutput outputData) throws Exception {
// System.out.println("checkComputation " + computationId);
ScopeProvider.instance.set(scope);
StatisticalManagerFactory factory = StatisticalManagerDSL
.createStateful().build();
SMComputation computation = factory.getComputation(computationId);
SMOperationStatus status = SMOperationStatus.values()[computation
.operationStatus()];
float percentage = 0;
if (status == SMOperationStatus.RUNNING) {
// logger.info("RUNNING");
SMOperationInfo infos = factory.getComputationInfo(computationId,
user);
// percentage = Float.parseFloat(infos.percentage());
// logger.info("Percentage:" +
// percentage);
// computation = factory.getComputation(computationId);
status = SMOperationStatus.values()[computation.operationStatus()];
} else if ((status == SMOperationStatus.COMPLETED)
|| (status == SMOperationStatus.FAILED)) {
// logger.info("computation COMPLETED");
// logger.info("COMPLETED OR FAILED");
SMAbstractResource abstractResource = computation
.abstractResource();
SMResource smResource = abstractResource.resource();
int resourceTypeIndex = smResource.resourceType();
SMResourceType smResType = SMResourceType.values()[resourceTypeIndex];
displayOutput(smResource, smResType, outputData);
// print check
// logger.info("SM resource Name: " + smResource.name());
// logger.info("SM resource Name: " + smResource.name());
// logger.info("SM resource ID: " + smResource.resourceId());
// logger.info("SM resource ID: " + smResource.resourceId());
// logger.info("SM resource Description: " +
// smResource.description());
percentage = 100;
}
return percentage;
}
private void displayOutput(SMResource smResource, SMResourceType smResType,
ComputationOutput outputData) throws Exception {
if (smResType.equals(SMResourceType.OBJECT)) {
// switch (smResType) {
// case FILE:
// SMFile fileRes = (SMFile) smResource;
// System.out.println("Output is a file");
// break;
// case OBJECT:
SMObject objRes = (SMObject) smResource;
if (objRes.name().contentEquals(PrimitiveTypes.MAP.toString())) {
logger.info("dbmanager-> Output is a map");
getMap(objRes, outputData);
} else if (objRes.name().contentEquals(
PrimitiveTypes.IMAGES.toString())) {
// logger.info("Output are images");
}
// else
// System.out.println("Output is other");
// rootLogger.log(Level.SEVERE, "Output is other");
}
// EXCEPTION MANAGEMENT
if (smResType.equals(SMResourceType.ERROR)) {
Exception e = new Exception(smResource.description());
// e.printStackTrace();
logger.error("dbmanager-> " + e);
throw e;
}
}
// get output result
private void getMap(SMObject objRes, ComputationOutput outputData)
throws Exception {
// output data values
LinkedHashMap<String, String> mapValues = new LinkedHashMap<String, String>();
// output data keys
LinkedHashMap<String, String> mapKeys = new LinkedHashMap<String, String>();
// logger.info("getStorageInputStream - started");
InputStream is = getStorageClientInputStream(objRes.url());
// object serializer
XStream xstream = new XStream();
xstream.alias(
"org.gcube_system.namespaces.data.analysis.statisticalmanager.types.SMObject",
SMObject.class);
xstream.alias(
"org.gcube_system.namespaces.data.analysis.statisticalmanager.types.SMFile",
SMFile.class);
xstream.alias(
"org.gcube_system.namespaces.data.analysis.statisticalmanager.types.SMResource",
SMResource.class);
xstream.alias(
"org.gcube_system.namespaces.data.analysis.statisticalmanager.types.SMTable",
SMTable.class);
// logger.info("streaming");
@SuppressWarnings("unchecked")
Map<String, SMResource> smMap = (Map<String, SMResource>) (xstream
.fromXML(is));
is.close();
// logger.info("getStorageInputStream - finished");
// logger.info("build the resultMap - started");
int i = 0;
for (String key : smMap.keySet()) {
// add key value
mapKeys.put(String.valueOf(i), key);
SMResource smres = smMap.get(key);
int resourceTypeIndex = smres.resourceType();
SMResourceType smsubResType = SMResourceType.values()[resourceTypeIndex];
// logger.info("ResourceType: " + smsubResType);
if (smsubResType == SMResourceType.OBJECT) {
SMObject obje = (SMObject) smres;
String outstring = obje.url();
// logger.info("key: " + smsubResType);
// logger.info("object: " + outstring);
mapValues.put(String.valueOf(i), outstring);
i++;
}
}
outputData.setMapValues(mapValues);
outputData.setmapKeys(mapKeys);
// logger.info("build the resultMap - finished");
}
private InputStream getStorageClientInputStream(String url)
throws Exception {
URL u = new URL(null, url, new URLStreamHandler() {
@Override
protected URLConnection openConnection(URL u) throws IOException {
return new SMPURLConnection(u);
}
});
return u.openConnection().getInputStream();
}
private StatisticalManagerFactory getFactory() {
HttpSession httpSession = this.getThreadLocalRequest().getSession();
return SessionUtil.getFactory(httpSession);
}
private String getUsername() {
HttpSession httpSession = this.getThreadLocalRequest().getSession();
return SessionUtil.getUsername(httpSession);
}
private String getScope() {
HttpSession httpSession = this.getThreadLocalRequest().getSession();
return SessionUtil.getScope(httpSession);
}
// remove the computation
public Boolean removeComputation(String uidSubmitQuery) throws Exception {
// System.out.println("server UID: " + uidSubmitQuery);
String computationId = null;
if ((uidSubmitQuery != null) && (!(uidSubmitQuery.equals("")))) {
// get job status
// ASLSession session = WsUtil.getAslSession(this
// .getThreadLocalRequest().getSession());
// HashMap<String, String> JobStatusMap = (HashMap<String, String>)
// session
// .getAttribute("JobStatusList");
// String status = JobStatusMap.get(uidSubmitQuery);
String status = getJobStatus(uidSubmitQuery);
if (status == null) {
// the computation has not started
while (computationId == null) {
computationId = removeJob(uidSubmitQuery);
}
} else if (status.equals("computation started")) {
// System.out.println("check status: computation started");
// the computation has started
computationId = removeJob(uidSubmitQuery);
}
}
if (computationId != null) {
StatisticalManagerFactory factory = getFactory();
try {
factory.removeComputation(computationId);
// remove submit query result
refreshDataOnServer(uidSubmitQuery);
// System.out.println("computation removed");
} catch (Exception e) {
e.printStackTrace();
logger.info("dbmanager-> Could not remove the computation ID "
+ computationId + " corresponding to jobID "
+ uidSubmitQuery);
logger.error("dbmanager-> " + e);
}
}
// remove job status
removeJobStatus(uidSubmitQuery);
return (new Boolean(true));
}
public void refreshDataOnServer(String submitQueryUID) {
if ((submitQueryUID != null) && (!submitQueryUID.equals(""))) {
removeResultParsed(submitQueryUID);
removeResult(submitQueryUID);
}
}
private synchronized void insertDataIntoCache(net.sf.ehcache.Element data) {
employeeCache.put(data);
// System.out.println("data inserted");
}
private synchronized net.sf.ehcache.Element getDataFromCache(String key) {
net.sf.ehcache.Element data = employeeCache.get(key);
return data;
}
// clear the cache on the user request
public GeneralOutputFromServlet refreshDataTree(String ElementType,
LinkedHashMap<String, String> inputData, FileModel element) throws Exception {
// // call the method related to the element selected
//
String resourceName = "";
String databaseName = "";
GeneralOutputFromServlet result = null;
if (inputData != null && inputData.size() != 0) {
// System.out.println("server-> input size not null");
String key = "";
if (!ElementType.equals("")) {
// System.out.println("server-> element type not null");
switch (ElementType) {
case "listResources":
key = inputData.get("listResources");
break;
case "resource":
key = inputData.get("ResourceName");
break;
case "database":
if (element.getDatabaseType().equals(ConstantsPortlet.POSTGRES)){ //refresh schema list
key = inputData.get("ResourceName")
+ inputData.get("DatabaseName");
}
if (element.getDatabaseType().equals(ConstantsPortlet.MYSQL)){ //refresh table list
key = inputData.get("ResourceName")
+ inputData.get("DatabaseName")
+ inputData.get("SchemaName");
}
break;
case "schema":
key = inputData.get("ResourceName")
+ inputData.get("DatabaseName")
+ inputData.get("SchemaName");
break;
}
}
// System.out.println("server->KEY: " + key);
net.sf.ehcache.Element dataFromCache = getDataFromCache(key);
if (dataFromCache != null) {
// System.out.println("server-> data in cache with key: " + key);
refreshCache(key);
// System.out.println("server-> data removed from cache with key: " + key);
if (!ElementType.equals("")) {
// System.out.println("server-> element type not null");
switch (ElementType) {
case "listResources":
List<FileModel> output1 = getResource();
result = new GeneralOutputFromServlet(output1);
// System.out.println("server-> output generated");
break;
case "resource":
resourceName = inputData.get("ResourceName");
LinkedHashMap<String, FileModel> output2 = getDBInfo(resourceName);
result = new GeneralOutputFromServlet(output2);
break;
case "database":
if (element.getDatabaseType()!=null && (element.getDatabaseType().equals(ConstantsPortlet.POSTGRES))){ //refresh schema list
List<FileModel> output3 = getDBSchema(inputData);
result = new GeneralOutputFromServlet(output3);
}
if (element.getDatabaseType()!=null && element.getDatabaseType().equals(ConstantsPortlet.MYSQL)){ //refresh table list
getTables(inputData);
}
// resourceName = inputData.get("ResourceName");
// databaseName = inputData.get("DatabaseName");
break;
case "schema":
getTables(inputData);
break;
}
}
}
}
return result;
}
private synchronized void refreshCache(String key) throws Exception {
try {
employeeCache.remove(key);
} catch (Exception e) {
// TODO: handle exception
throw new Exception("Failure to clear the cache. " + e);
}
}
}