2014-07-02 12:57:14 +02:00
|
|
|
package org.gcube.portlets.user.databasesmanager.server;
|
|
|
|
|
|
|
|
import java.util.ArrayList;
|
2014-08-01 11:27:40 +02:00
|
|
|
import java.util.HashMap;
|
2014-07-02 12:57:14 +02:00
|
|
|
import java.util.LinkedHashMap;
|
|
|
|
import java.util.List;
|
|
|
|
import java.util.Map;
|
2014-10-07 11:42:14 +02:00
|
|
|
import java.util.Set;
|
2014-10-10 12:53:26 +02:00
|
|
|
import java.util.concurrent.ConcurrentLinkedQueue;
|
2014-09-19 17:36:36 +02:00
|
|
|
import java.io.BufferedWriter;
|
|
|
|
import java.io.File;
|
2014-07-02 12:57:14 +02:00
|
|
|
import java.io.IOException;
|
|
|
|
import java.io.InputStream;
|
|
|
|
import java.net.URL;
|
|
|
|
import java.net.URLConnection;
|
|
|
|
import java.net.URLStreamHandler;
|
2014-09-19 17:36:36 +02:00
|
|
|
import java.io.OutputStreamWriter;
|
|
|
|
import java.io.FileOutputStream;
|
2014-07-02 12:57:14 +02:00
|
|
|
|
2014-09-10 14:31:20 +02:00
|
|
|
import javax.servlet.ServletException;
|
2014-09-19 17:36:36 +02:00
|
|
|
import javax.servlet.http.HttpServletRequest;
|
2014-07-02 12:57:14 +02:00
|
|
|
import javax.servlet.http.HttpSession;
|
2014-09-26 16:56:42 +02:00
|
|
|
import net.sf.ehcache.Cache;
|
2014-09-10 14:31:20 +02:00
|
|
|
import net.sf.ehcache.CacheManager;
|
2014-10-22 19:16:37 +02:00
|
|
|
import net.sf.ehcache.Status;
|
2014-09-26 16:56:42 +02:00
|
|
|
import net.sf.ehcache.config.CacheConfiguration;
|
|
|
|
import net.sf.ehcache.config.MemoryUnit;
|
2014-10-13 16:34:03 +02:00
|
|
|
import net.sf.ehcache.config.SizeOfPolicyConfiguration;
|
|
|
|
import net.sf.ehcache.config.SizeOfPolicyConfiguration.MaxDepthExceededBehavior;
|
2014-09-26 16:56:42 +02:00
|
|
|
import net.sf.ehcache.store.MemoryStoreEvictionPolicy;
|
2014-07-02 12:57:14 +02:00
|
|
|
import org.apache.regexp.RE;
|
2014-08-01 11:27:40 +02:00
|
|
|
import org.gcube.application.framework.core.session.ASLSession;
|
2014-07-02 12:57:14 +02:00
|
|
|
import org.gcube.common.scope.api.ScopeProvider;
|
|
|
|
import org.gcube.contentmanager.storageclient.model.protocol.smp.SMPURLConnection;
|
|
|
|
import org.gcube.data.analysis.statisticalmanager.proxies.StatisticalManagerDSL;
|
|
|
|
import org.gcube.data.analysis.statisticalmanager.proxies.StatisticalManagerFactory;
|
|
|
|
import org.gcube.data.analysis.statisticalmanager.stubs.types.SMAlgorithm;
|
|
|
|
import org.gcube.data.analysis.statisticalmanager.stubs.types.SMComputationConfig;
|
|
|
|
import org.gcube.data.analysis.statisticalmanager.stubs.types.SMComputationRequest;
|
|
|
|
import org.gcube.data.analysis.statisticalmanager.stubs.types.SMGroupedAlgorithms;
|
|
|
|
import org.gcube.data.analysis.statisticalmanager.stubs.types.SMListGroupedAlgorithms;
|
|
|
|
import org.gcube.data.analysis.statisticalmanager.stubs.types.SMOperationStatus;
|
|
|
|
import org.gcube.data.analysis.statisticalmanager.stubs.types.SMParameter;
|
|
|
|
import org.gcube.data.analysis.statisticalmanager.stubs.types.SMParameters;
|
|
|
|
import org.gcube.data.analysis.statisticalmanager.stubs.types.SMResourceType;
|
|
|
|
import org.gcube.data.analysis.statisticalmanager.stubs.types.SMTypeParameter;
|
|
|
|
import org.gcube.data.analysis.statisticalmanager.stubs.types.schema.SMAbstractResource;
|
|
|
|
import org.gcube.data.analysis.statisticalmanager.stubs.types.schema.SMComputation;
|
|
|
|
import org.gcube.data.analysis.statisticalmanager.stubs.types.schema.SMEntries;
|
|
|
|
import org.gcube.data.analysis.statisticalmanager.stubs.types.schema.SMFile;
|
|
|
|
import org.gcube.data.analysis.statisticalmanager.stubs.types.schema.SMInputEntry;
|
|
|
|
import org.gcube.data.analysis.statisticalmanager.stubs.types.schema.SMObject;
|
|
|
|
import org.gcube.data.analysis.statisticalmanager.stubs.types.schema.SMOperationInfo;
|
|
|
|
import org.gcube.data.analysis.statisticalmanager.stubs.types.schema.SMResource;
|
|
|
|
import org.gcube.data.analysis.statisticalmanager.stubs.types.schema.SMTable;
|
|
|
|
import org.gcube.data.analysis.statisticalmanager.stubs.types.schema.StatisticalServiceType;
|
|
|
|
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
|
|
|
|
import org.gcube.portlets.user.databasesmanager.client.GWTdbManagerService;
|
2014-07-25 20:03:14 +02:00
|
|
|
import org.gcube.portlets.user.databasesmanager.client.datamodel.ComputationOutput;
|
2014-07-02 12:57:14 +02:00
|
|
|
import org.gcube.portlets.user.databasesmanager.client.datamodel.FileModel;
|
2014-09-12 14:05:22 +02:00
|
|
|
import org.gcube.portlets.user.databasesmanager.client.datamodel.GeneralOutputFromServlet;
|
2014-07-02 12:57:14 +02:00
|
|
|
import org.gcube.portlets.user.databasesmanager.client.datamodel.Parameter;
|
|
|
|
import org.gcube.portlets.user.databasesmanager.client.datamodel.Result;
|
2014-09-19 17:36:36 +02:00
|
|
|
import org.gcube.portlets.user.databasesmanager.client.datamodel.SamplingResultWithFileFromServlet;
|
2014-07-02 12:57:14 +02:00
|
|
|
import org.gcube.portlets.user.databasesmanager.client.datamodel.Row;
|
2014-09-19 17:36:36 +02:00
|
|
|
import org.gcube.portlets.user.databasesmanager.client.datamodel.SubmitQueryResultWithFileFromServlet;
|
2014-10-10 12:53:26 +02:00
|
|
|
import org.gcube.portlets.user.databasesmanager.server.util.DataExchangedThroughQueue;
|
2014-07-02 12:57:14 +02:00
|
|
|
import org.gcube.portlets.user.databasesmanager.server.util.SessionUtil;
|
2014-10-13 16:34:03 +02:00
|
|
|
import org.gcube.portlets.user.databasesmanager.shared.ConstantsPortlet;
|
2014-09-29 15:13:02 +02:00
|
|
|
import org.gcube.portlets.user.databasesmanager.shared.SessionExpiredException;
|
2014-09-29 18:07:58 +02:00
|
|
|
import org.gcube.portlets.user.databasesmanager.shared.StatisticalManagerException;
|
2014-07-11 12:35:26 +02:00
|
|
|
import com.extjs.gxt.ui.client.data.BasePagingLoadResult;
|
|
|
|
import com.extjs.gxt.ui.client.data.PagingLoadConfig;
|
|
|
|
import com.extjs.gxt.ui.client.data.PagingLoadResult;
|
2014-07-02 12:57:14 +02:00
|
|
|
import com.google.gwt.user.server.rpc.RemoteServiceServlet;
|
|
|
|
import com.thoughtworks.xstream.XStream;
|
2014-07-22 18:32:39 +02:00
|
|
|
import org.apache.log4j.Logger;
|
2014-09-17 16:43:14 +02:00
|
|
|
|
2014-07-02 12:57:14 +02:00
|
|
|
public class GWTdbManagerServiceImpl extends RemoteServiceServlet implements
|
|
|
|
GWTdbManagerService {
|
|
|
|
|
2014-07-25 20:03:14 +02:00
|
|
|
// logger
|
|
|
|
private static Logger logger = Logger
|
|
|
|
.getLogger(GWTdbManagerServiceImpl.class);
|
2014-07-02 12:57:14 +02:00
|
|
|
|
2014-09-10 14:31:20 +02:00
|
|
|
// private CacheManager cacheManager;
|
2014-10-02 12:00:58 +02:00
|
|
|
private static Cache DBCache;
|
2014-09-26 16:56:42 +02:00
|
|
|
private static CacheManager cacheManager;
|
2014-09-12 14:05:22 +02:00
|
|
|
public static List<String> listAlgorithms;
|
2014-09-10 14:31:20 +02:00
|
|
|
|
2014-10-07 11:42:14 +02:00
|
|
|
// variables to check thread execution
|
|
|
|
private static HashMap<String, Boolean> threadsStarted = new HashMap<String, Boolean>();
|
|
|
|
private static boolean endThread = false;
|
2014-10-10 12:53:26 +02:00
|
|
|
private static boolean threadExecutionFinished = false;
|
|
|
|
private static ConcurrentLinkedQueue<DataExchangedThroughQueue> queue = new ConcurrentLinkedQueue<DataExchangedThroughQueue>();
|
|
|
|
private static ThreadDataLoader dataLoader;
|
2014-10-24 14:16:09 +02:00
|
|
|
|
|
|
|
private static int smComputationNumber;
|
|
|
|
private static int cacheHitsNumber;
|
|
|
|
|
|
|
|
private static int smComputationQuerySamplingNumber;
|
|
|
|
private static int cacheQuerySamplingHitsNumber;
|
2014-10-07 11:42:14 +02:00
|
|
|
|
2014-09-10 14:31:20 +02:00
|
|
|
public GWTdbManagerServiceImpl() throws Exception {
|
2014-09-10 18:37:59 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
@Override
|
|
|
|
public void init() throws ServletException {
|
2014-09-12 14:05:22 +02:00
|
|
|
super.init();
|
2014-10-24 10:01:50 +02:00
|
|
|
|
2014-09-10 14:31:20 +02:00
|
|
|
try {
|
2014-10-24 14:16:09 +02:00
|
|
|
|
2014-09-26 16:56:42 +02:00
|
|
|
// cache folder
|
|
|
|
String cachePath = System.getenv("CATALINA_TMPDIR") + "/DBManager";
|
|
|
|
logger.info("dbmanager-> Creating cache in folder: " + cachePath);
|
2014-10-22 19:16:37 +02:00
|
|
|
// CacheManager cacheManager = CacheManager.create();
|
|
|
|
cacheManager = CacheManager.create();
|
2014-09-26 16:56:42 +02:00
|
|
|
|
|
|
|
if (cacheManager == null) {
|
2014-09-29 18:07:58 +02:00
|
|
|
logger.error("dbmanager-> Error while starting the servlet. Failed to get the cacheManager. cacheManager null");
|
2014-09-26 16:56:42 +02:00
|
|
|
throw new ServletException(
|
2014-09-29 18:07:58 +02:00
|
|
|
"Error while starting the servlet. Failed to get the cacheManager. cacheManager null");
|
2014-09-26 16:56:42 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
if (cacheManager != null) {
|
|
|
|
// logger.info("dbmanager-> cacheManager not null");
|
|
|
|
if (cacheManager.cacheExists("DBCache")) {
|
|
|
|
// logger.info("dbmanager-> cache exists");
|
|
|
|
cacheManager.removeCache("DBCache");
|
|
|
|
logger.info("dbmanager-> cache removed");
|
|
|
|
|
|
|
|
} else {
|
|
|
|
File f = new File(cachePath + "/" + "DBCache.data");
|
|
|
|
if (f.exists()) {
|
|
|
|
logger.info("dbmanager-> File DBCache.data removed: "
|
|
|
|
+ f.delete());
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
createCache(cachePath);
|
|
|
|
|
2014-10-02 12:00:58 +02:00
|
|
|
if (DBCache == null) {
|
2014-09-29 18:07:58 +02:00
|
|
|
logger.error("dbmanager-> Error while starting the servlet. Failed to get the cache. cache null");
|
2014-09-26 16:56:42 +02:00
|
|
|
throw new ServletException(
|
2014-09-29 18:07:58 +02:00
|
|
|
"Error while starting the servlet. Failed to get the cache. cache null");
|
2014-09-26 16:56:42 +02:00
|
|
|
} else {
|
2014-10-02 12:00:58 +02:00
|
|
|
cacheManager.addCache(DBCache);
|
2014-09-26 16:56:42 +02:00
|
|
|
logger.info("dbmanager-> cache added to the cacheManager");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// create folder that will contain file samplings and submitquery
|
|
|
|
// result
|
|
|
|
// in the /webapps/folder_portlet
|
|
|
|
String path = this.getServletContext().getRealPath("") + "/"
|
|
|
|
+ "computationResult";
|
|
|
|
|
|
|
|
File computationResult = new File(path);
|
|
|
|
if (!computationResult.exists()) {
|
|
|
|
computationResult.mkdir();
|
|
|
|
logger.info("dbmanager-> Folder computationResult created in : "
|
|
|
|
+ this.getServletContext().getRealPath(""));
|
|
|
|
}
|
2014-10-10 12:53:26 +02:00
|
|
|
|
|
|
|
// create the thread DataLoader
|
|
|
|
dataLoader = new ThreadDataLoader();
|
|
|
|
logger.info("dbmanager-> Thread Dataloader created");
|
2014-10-24 14:16:09 +02:00
|
|
|
|
|
|
|
smComputationNumber = 0;
|
|
|
|
cacheHitsNumber = 0;
|
|
|
|
|
|
|
|
smComputationQuerySamplingNumber = 0;
|
|
|
|
cacheQuerySamplingHitsNumber = 0;
|
|
|
|
|
2014-09-10 18:37:59 +02:00
|
|
|
} catch (Exception e) {
|
2014-09-26 16:56:42 +02:00
|
|
|
logger.error("dbmanager-> ", e);
|
2014-09-29 18:07:58 +02:00
|
|
|
|
|
|
|
throw new ServletException(
|
|
|
|
"Error while starting the servlet. Exception: " + e);
|
2014-09-10 18:37:59 +02:00
|
|
|
}
|
2014-09-19 17:36:36 +02:00
|
|
|
|
2014-09-26 16:56:42 +02:00
|
|
|
}
|
|
|
|
|
2014-09-29 18:07:58 +02:00
|
|
|
public void createCache(String cachePath) throws Exception {
|
2014-09-26 16:56:42 +02:00
|
|
|
|
|
|
|
try {
|
|
|
|
CacheConfiguration config = new CacheConfiguration();
|
|
|
|
config.setName("DBCache");
|
|
|
|
config.memoryStoreEvictionPolicy(MemoryStoreEvictionPolicy.LRU);
|
|
|
|
config.eternal(false);
|
|
|
|
config.timeToLiveSeconds(172800);
|
|
|
|
config.timeToIdleSeconds(0);
|
|
|
|
// config.maxEntriesLocalHeap(10000);
|
|
|
|
config.diskExpiryThreadIntervalSeconds(120);
|
|
|
|
config.maxBytesLocalDisk(2, MemoryUnit.GIGABYTES);
|
|
|
|
config.maxBytesLocalHeap(200, MemoryUnit.MEGABYTES);
|
|
|
|
config.diskSpoolBufferSizeMB(30);
|
|
|
|
config.overflowToDisk(true);
|
|
|
|
config.diskPersistent(false);
|
|
|
|
config.diskStorePath(cachePath);
|
2014-10-13 16:34:03 +02:00
|
|
|
// SizeOfPolicyConfiguration size = new SizeOfPolicyConfiguration();
|
|
|
|
// size.setMaxDepth(1000);
|
|
|
|
// size.maxDepthExceededBehavior(MaxDepthExceededBehavior.ABORT);
|
|
|
|
// config.sizeOfPolicy(size);
|
2014-10-02 12:00:58 +02:00
|
|
|
DBCache = new Cache(config);
|
2014-09-26 16:56:42 +02:00
|
|
|
} catch (Exception e) {
|
2014-09-30 14:32:27 +02:00
|
|
|
// logger.error("dbmanager-> Error while starting the servlet. Failed to create the cache",
|
|
|
|
// e);
|
|
|
|
throw new Exception(
|
|
|
|
"Error while starting the servlet. Failed to create the cache. Exception: "
|
|
|
|
+ e);
|
2014-09-19 17:36:36 +02:00
|
|
|
}
|
2014-09-17 10:27:30 +02:00
|
|
|
}
|
2014-09-12 14:05:22 +02:00
|
|
|
|
2014-09-29 18:07:58 +02:00
|
|
|
@Override
|
2014-09-17 10:27:30 +02:00
|
|
|
public void destroy() {
|
|
|
|
super.destroy();
|
|
|
|
|
2014-10-07 11:42:14 +02:00
|
|
|
// set endThread variable
|
2014-10-10 12:53:26 +02:00
|
|
|
setEndThreadvariable(true);
|
2014-10-07 11:42:14 +02:00
|
|
|
|
2014-09-17 10:27:30 +02:00
|
|
|
try {
|
2014-09-26 16:56:42 +02:00
|
|
|
|
2014-09-17 10:27:30 +02:00
|
|
|
CacheManager cacheManager = CacheManager.getInstance();
|
|
|
|
|
2014-09-26 16:56:42 +02:00
|
|
|
if (cacheManager != null) {
|
|
|
|
if (cacheManager.cacheExists("DBCache")) {
|
|
|
|
// System.out.println("*** cache exist");
|
|
|
|
cacheManager.removeCache("DBCache");
|
|
|
|
// cacheManager.removalAll();
|
|
|
|
// System.out.println("*** cache removed");
|
|
|
|
logger.info("dbmanager-> DBCache removed");
|
|
|
|
}
|
|
|
|
|
|
|
|
cacheManager.shutdown();
|
|
|
|
|
|
|
|
} else {
|
2014-09-29 18:07:58 +02:00
|
|
|
logger.error("dbmanager-> Error while destroying the servlet. Failed to get the cacheManager. cacheManager null");
|
2014-09-26 16:56:42 +02:00
|
|
|
throw new Exception(
|
2014-09-29 18:07:58 +02:00
|
|
|
"Error while destroying the servlet. Failed to get the cacheManager. cacheManager null");
|
2014-09-26 16:56:42 +02:00
|
|
|
}
|
2014-09-17 10:27:30 +02:00
|
|
|
|
|
|
|
} catch (Exception e) {
|
2014-09-29 18:07:58 +02:00
|
|
|
logger.error(
|
|
|
|
"dbmanager-> Error while destroying the servlet. Exception:",
|
|
|
|
e);
|
2014-09-19 17:36:36 +02:00
|
|
|
// e.printStackTrace();
|
2014-09-17 10:27:30 +02:00
|
|
|
}
|
2014-10-07 11:42:14 +02:00
|
|
|
// }
|
2014-08-01 11:27:40 +02:00
|
|
|
}
|
2014-08-01 18:11:36 +02:00
|
|
|
|
2014-10-07 11:42:14 +02:00
|
|
|
private void initVariables(ASLSession session) {
|
2014-08-01 11:27:40 +02:00
|
|
|
// the result generated in the LoadTables method
|
2014-09-17 10:27:30 +02:00
|
|
|
// List<Result> result = new ArrayList<Result>();
|
|
|
|
// session.setAttribute("TablesResult", result);
|
2014-08-01 18:11:36 +02:00
|
|
|
|
2014-10-01 15:38:16 +02:00
|
|
|
// // map that contains the submit query result and the related uid
|
|
|
|
// HashMap<String, List<Result>> submitQueryResult = new HashMap<String,
|
|
|
|
// List<Result>>();
|
|
|
|
// session.setAttribute("submitQueryResult", submitQueryResult);
|
2014-08-29 15:39:04 +02:00
|
|
|
|
2014-10-01 15:38:16 +02:00
|
|
|
// // map that contains the submit query result parsed and the related
|
|
|
|
// // uid
|
|
|
|
// HashMap<String, List<Row>> submitQueryResultParsed = new
|
|
|
|
// HashMap<String, List<Row>>();
|
|
|
|
// session.setAttribute("submitQueryResultParsed",
|
|
|
|
// submitQueryResultParsed);
|
2014-08-29 15:39:04 +02:00
|
|
|
|
2014-08-01 11:27:40 +02:00
|
|
|
// information about a database
|
2014-09-17 10:27:30 +02:00
|
|
|
// String currentDB = "";
|
|
|
|
// session.setAttribute("currentDB", currentDB);
|
|
|
|
// String previousDB = "";
|
|
|
|
// session.setAttribute("previousDB", previousDB);
|
2014-08-01 18:11:36 +02:00
|
|
|
|
2014-08-01 11:27:40 +02:00
|
|
|
// information about a schema
|
2014-09-17 10:27:30 +02:00
|
|
|
// String currentSchema = "";
|
|
|
|
// session.setAttribute("currentSchema", currentSchema);
|
|
|
|
// String previousSchema = "";
|
|
|
|
// session.setAttribute("previousSchema", previousSchema);
|
2014-08-01 18:11:36 +02:00
|
|
|
|
|
|
|
// Hashmap that contains computationId with a uid key
|
|
|
|
HashMap<String, String> computationIDMap = new HashMap<String, String>();
|
|
|
|
session.setAttribute("ComputationIDList", computationIDMap);
|
|
|
|
|
2014-08-25 17:55:53 +02:00
|
|
|
// Hashmap that contains the job status with a uid key
|
|
|
|
HashMap<String, String> JobStatusMap = new HashMap<String, String>();
|
|
|
|
session.setAttribute("JobStatusList", JobStatusMap);
|
2014-10-01 15:38:16 +02:00
|
|
|
|
|
|
|
// map that keeps track if a uid submitQuery request uses cached data
|
|
|
|
// and it does not start a computation
|
|
|
|
HashMap<String, Boolean> listSubmitQueryUIDCachedData = new HashMap<String, Boolean>();
|
|
|
|
session.setAttribute("listSubmitQueryUIDCachedData",
|
|
|
|
listSubmitQueryUIDCachedData);
|
|
|
|
|
2014-10-02 12:27:08 +02:00
|
|
|
// map that contain key to retrieve data from cache for each uid
|
2014-10-01 15:38:16 +02:00
|
|
|
// submitQuery request
|
|
|
|
// map that stores information to send result of the rpc loadsubmitQuery
|
|
|
|
// to the client
|
|
|
|
HashMap<String, String> listKeySubmitQueryResult = new HashMap<String, String>();
|
|
|
|
session.setAttribute("listKeySubmitQueryResult",
|
|
|
|
listKeySubmitQueryResult);
|
2014-10-24 14:16:09 +02:00
|
|
|
|
2014-10-27 10:44:26 +01:00
|
|
|
//map that contains for each UID the submit query result in order
|
|
|
|
//to face the cache refreshing if a pagination is used
|
|
|
|
HashMap<String, List<Result>> listSubmitQueryResult = new HashMap<String, List<Result>>();
|
|
|
|
session.setAttribute("listSubmitQueryResult",
|
|
|
|
listSubmitQueryResult);
|
|
|
|
|
2014-10-24 14:16:09 +02:00
|
|
|
//print data
|
|
|
|
logger.info("dbmanager-> CheckInformation: cache hits number " + cacheHitsNumber);
|
|
|
|
logger.info("dbmanager-> CheckInformation: SM computation number " + smComputationNumber);
|
|
|
|
logger.info("dbmanager-> CheckInformation: cache Query Sampling hits number " + cacheQuerySamplingHitsNumber);
|
|
|
|
logger.info("dbmanager-> CheckInformation: SM Query Sampling computation number " + smComputationQuerySamplingNumber);
|
2014-07-02 12:57:14 +02:00
|
|
|
}
|
|
|
|
|
2014-07-30 14:05:11 +02:00
|
|
|
// to get resources from IS
|
2014-08-01 11:27:40 +02:00
|
|
|
@Override
|
2014-07-02 12:57:14 +02:00
|
|
|
public List<FileModel> getResource() throws Exception {
|
2014-09-29 18:07:58 +02:00
|
|
|
|
|
|
|
// session check
|
|
|
|
if (isSessionExpired())
|
2014-09-29 15:13:02 +02:00
|
|
|
throw new SessionExpiredException();
|
2014-09-12 14:05:22 +02:00
|
|
|
|
2014-09-29 18:07:58 +02:00
|
|
|
try {
|
2014-08-01 18:11:36 +02:00
|
|
|
|
2014-10-07 11:42:14 +02:00
|
|
|
ASLSession session = SessionUtil.getAslSession(this
|
|
|
|
.getThreadLocalRequest().getSession());
|
|
|
|
// get scope
|
|
|
|
String scope = session.getScope();
|
2014-09-17 10:27:30 +02:00
|
|
|
|
2014-10-10 12:53:26 +02:00
|
|
|
// check if the thread is already started
|
2014-10-07 11:42:14 +02:00
|
|
|
Boolean value = getThreadStarted(scope);
|
|
|
|
if ((value == null) || (value.booleanValue() == false)) {
|
2014-10-10 12:53:26 +02:00
|
|
|
DataExchangedThroughQueue dataqueue = new DataExchangedThroughQueue(
|
|
|
|
scope);
|
|
|
|
queue.offer(dataqueue);
|
|
|
|
Thread t = new Thread(dataLoader);
|
2014-10-07 11:42:14 +02:00
|
|
|
t.start();
|
2014-09-17 10:27:30 +02:00
|
|
|
|
2014-10-10 12:53:26 +02:00
|
|
|
logger.info("dbmanager-> Thread DataLoader started in order to load data tree");
|
2014-09-10 14:31:20 +02:00
|
|
|
}
|
2014-09-17 10:27:30 +02:00
|
|
|
|
2014-10-07 11:42:14 +02:00
|
|
|
// initialize variables with application startup
|
|
|
|
initVariables(session);
|
|
|
|
return recoverResources(scope);
|
2014-09-29 18:07:58 +02:00
|
|
|
|
2014-09-17 10:27:30 +02:00
|
|
|
} catch (Exception e) {
|
2014-09-24 18:59:57 +02:00
|
|
|
logger.error("dbmanager-> ", e);
|
2014-09-17 10:27:30 +02:00
|
|
|
throw e;
|
2014-10-07 11:42:14 +02:00
|
|
|
|
2014-09-17 10:27:30 +02:00
|
|
|
}
|
2014-10-07 11:42:14 +02:00
|
|
|
|
2014-07-02 12:57:14 +02:00
|
|
|
}
|
|
|
|
|
2014-07-30 14:05:11 +02:00
|
|
|
// to get information about databases of a resource
|
2014-07-02 12:57:14 +02:00
|
|
|
@Override
|
|
|
|
public LinkedHashMap<String, FileModel> getDBInfo(String resourceName)
|
|
|
|
throws Exception {
|
2014-09-17 10:27:30 +02:00
|
|
|
|
2014-09-29 18:07:58 +02:00
|
|
|
// session check
|
|
|
|
if (isSessionExpired())
|
|
|
|
throw new SessionExpiredException();
|
2014-07-25 20:03:14 +02:00
|
|
|
|
2014-09-29 18:07:58 +02:00
|
|
|
try {
|
|
|
|
|
2014-10-07 11:42:14 +02:00
|
|
|
ASLSession session = SessionUtil.getAslSession(this
|
|
|
|
.getThreadLocalRequest().getSession());
|
|
|
|
// get scope
|
|
|
|
String scope = session.getScope();
|
|
|
|
return recoverDatabases(scope, resourceName);
|
2014-09-17 10:27:30 +02:00
|
|
|
} catch (Exception e) {
|
2014-09-24 18:59:57 +02:00
|
|
|
logger.error("dbmanager-> ", e);
|
2014-09-17 10:27:30 +02:00
|
|
|
throw e;
|
|
|
|
}
|
2014-10-07 11:42:14 +02:00
|
|
|
|
2014-07-02 12:57:14 +02:00
|
|
|
}
|
|
|
|
|
2014-07-30 14:05:11 +02:00
|
|
|
// to get schema for a database
|
2014-07-02 12:57:14 +02:00
|
|
|
@Override
|
|
|
|
public List<FileModel> getDBSchema(LinkedHashMap<String, String> dataInput)
|
|
|
|
throws Exception {
|
2014-08-01 18:11:36 +02:00
|
|
|
|
2014-09-29 18:07:58 +02:00
|
|
|
// session check
|
|
|
|
if (isSessionExpired())
|
|
|
|
throw new SessionExpiredException();
|
2014-07-02 12:57:14 +02:00
|
|
|
|
2014-09-29 18:07:58 +02:00
|
|
|
try {
|
2014-09-12 14:05:22 +02:00
|
|
|
|
2014-10-07 11:42:14 +02:00
|
|
|
ASLSession session = SessionUtil.getAslSession(this
|
|
|
|
.getThreadLocalRequest().getSession());
|
|
|
|
// get scope
|
|
|
|
String scope = session.getScope();
|
|
|
|
return recoverSchema(scope, dataInput);
|
|
|
|
} catch (Exception e) {
|
|
|
|
logger.error("dbmanager-> ", e);
|
|
|
|
throw e;
|
|
|
|
}
|
2014-09-17 10:27:30 +02:00
|
|
|
|
2014-10-07 11:42:14 +02:00
|
|
|
}
|
2014-09-29 18:07:58 +02:00
|
|
|
|
2014-10-07 11:42:14 +02:00
|
|
|
// to get tables
|
|
|
|
private List<Result> getTables(LinkedHashMap<String, String> dataInput,
|
|
|
|
String elementType) throws Exception {
|
2014-09-29 18:07:58 +02:00
|
|
|
|
2014-10-07 11:42:14 +02:00
|
|
|
try {
|
2014-09-17 10:27:30 +02:00
|
|
|
|
2014-10-07 11:42:14 +02:00
|
|
|
ASLSession session = SessionUtil.getAslSession(this
|
|
|
|
.getThreadLocalRequest().getSession());
|
|
|
|
// get scope
|
|
|
|
String scope = session.getScope();
|
|
|
|
return recoverTables(scope, dataInput, elementType);
|
|
|
|
} catch (Exception e) {
|
|
|
|
logger.error("dbmanager-> ", e);
|
|
|
|
throw e;
|
|
|
|
}
|
2014-09-10 14:31:20 +02:00
|
|
|
|
2014-10-07 11:42:14 +02:00
|
|
|
}
|
2014-09-17 10:27:30 +02:00
|
|
|
|
2014-10-07 11:42:14 +02:00
|
|
|
// to load tables
|
|
|
|
@Override
|
|
|
|
public PagingLoadResult<Result> LoadTables(PagingLoadConfig config,
|
|
|
|
LinkedHashMap<String, String> dataInput, String elementType,
|
|
|
|
boolean SearchTable, String keyword) throws Exception {
|
2014-09-17 10:27:30 +02:00
|
|
|
|
2014-10-07 11:42:14 +02:00
|
|
|
// tables' list stored for a user session
|
|
|
|
// ASLSession session =
|
|
|
|
// WsUtil.getAslSession(this.getThreadLocalRequest()
|
|
|
|
// .getSession());
|
|
|
|
// List<Result> result = (List<Result>) session
|
|
|
|
// .getAttribute("TablesResult");
|
|
|
|
// // check on a database
|
|
|
|
// String currentDB = "";
|
|
|
|
// currentDB = dataInput.get("DatabaseName");
|
|
|
|
// String previousDB = (String) session.getAttribute("previousDB");
|
|
|
|
//
|
|
|
|
// if (!currentDB.equals(previousDB)) {
|
|
|
|
// // result = null;
|
|
|
|
// result = new ArrayList<Result>();
|
|
|
|
// System.gc();
|
|
|
|
// }
|
|
|
|
//
|
|
|
|
// previousDB = currentDB;
|
|
|
|
// session.setAttribute("previousDB", previousDB);
|
|
|
|
//
|
|
|
|
// // check on a schema
|
|
|
|
// String currentSchema = "";
|
|
|
|
// currentSchema = dataInput.get("SchemaName");
|
|
|
|
// String previousSchema = (String)
|
|
|
|
// session.getAttribute("previousSchema");
|
|
|
|
// if (!currentSchema.equals(previousSchema)) {
|
|
|
|
// // result = null;
|
|
|
|
// result = new ArrayList<Result>();
|
|
|
|
// System.gc();
|
|
|
|
// }
|
|
|
|
//
|
|
|
|
// previousSchema = currentSchema;
|
|
|
|
// session.setAttribute("previousSchema", previousSchema);
|
2014-09-17 10:27:30 +02:00
|
|
|
|
2014-10-07 11:42:14 +02:00
|
|
|
// session check
|
|
|
|
if (isSessionExpired())
|
|
|
|
throw new SessionExpiredException();
|
2014-09-17 10:27:30 +02:00
|
|
|
|
2014-10-07 11:42:14 +02:00
|
|
|
try {
|
2014-09-17 10:27:30 +02:00
|
|
|
|
2014-10-07 11:42:14 +02:00
|
|
|
List<Result> result = new ArrayList<>();
|
2014-09-17 10:27:30 +02:00
|
|
|
|
2014-10-07 11:42:14 +02:00
|
|
|
// get tables
|
|
|
|
// if (result == null)
|
|
|
|
// result = getTables(dataInput);
|
|
|
|
if (result.size() == 0)
|
|
|
|
result = getTables(dataInput, elementType);
|
|
|
|
|
|
|
|
// Create a sublist and add data to list according
|
|
|
|
// to the limit and offset value of the config
|
|
|
|
List<Result> sublist = new ArrayList<Result>();
|
|
|
|
BasePagingLoadResult loadResult = null;
|
|
|
|
|
|
|
|
// print check on the search
|
|
|
|
// logger.info("Searching in the table: " + SearchTable);
|
|
|
|
// logger.info("Keyword to search: " + keyword);
|
|
|
|
|
|
|
|
int start = config.getOffset();
|
|
|
|
int limit = result.size();
|
|
|
|
|
|
|
|
if (config.getLimit() > 0) {
|
|
|
|
limit = Math.min(start + config.getLimit(), limit);
|
|
|
|
}
|
|
|
|
|
|
|
|
int totalNumber = result.size();
|
|
|
|
|
|
|
|
if ((SearchTable == false) || keyword == null
|
|
|
|
|| keyword.length() == 0) {
|
|
|
|
sublist = new ArrayList<Result>(result.subList(start, limit));
|
|
|
|
} else {
|
|
|
|
// print check
|
|
|
|
// logger.info("searching the table");
|
|
|
|
|
|
|
|
// search the table
|
|
|
|
for (int i = 0; i < result.size(); i++) {
|
|
|
|
if ((result.get(i).getValue().toLowerCase())
|
|
|
|
.startsWith(keyword.toLowerCase())) {
|
|
|
|
sublist.add(result.get(i));
|
|
|
|
}
|
2014-09-12 14:05:22 +02:00
|
|
|
}
|
2014-09-10 14:31:20 +02:00
|
|
|
|
2014-10-07 11:42:14 +02:00
|
|
|
limit = sublist.size();
|
|
|
|
int sublen = sublist.size();
|
|
|
|
totalNumber = sublen;
|
2014-09-17 10:27:30 +02:00
|
|
|
|
2014-10-07 11:42:14 +02:00
|
|
|
if (start < sublen - 1) {
|
|
|
|
limit = Math.min(sublen, limit);
|
|
|
|
totalNumber = sublist.size();
|
|
|
|
sublist = new ArrayList<Result>(sublist.subList(start,
|
|
|
|
limit));
|
2014-09-17 10:27:30 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2014-10-07 11:42:14 +02:00
|
|
|
// print check
|
|
|
|
// logger.info("result size: " + totalNumber);
|
|
|
|
// logger.info("limit: " + limit);
|
|
|
|
// logger.info("offset: " + config.getOffset());
|
|
|
|
// logger.info("start: " + start);
|
|
|
|
|
|
|
|
loadResult = new BasePagingLoadResult<Result>(sublist,
|
|
|
|
config.getOffset(), totalNumber);
|
|
|
|
// session.setAttribute("TablesResult", result);
|
|
|
|
return loadResult;
|
2014-09-29 18:07:58 +02:00
|
|
|
|
2014-09-17 10:27:30 +02:00
|
|
|
} catch (Exception e) {
|
2014-09-24 18:59:57 +02:00
|
|
|
logger.error("dbmanager-> ", e);
|
2014-10-07 11:42:14 +02:00
|
|
|
|
2014-09-29 18:07:58 +02:00
|
|
|
if (!(e instanceof StatisticalManagerException)) {
|
|
|
|
// GWT can't serialize all exceptions
|
|
|
|
throw new Exception(
|
|
|
|
"Error in server while loading data. Exception: " + e);
|
|
|
|
}
|
2014-09-17 10:27:30 +02:00
|
|
|
throw e;
|
|
|
|
}
|
2014-07-02 12:57:14 +02:00
|
|
|
}
|
|
|
|
|
2014-10-07 11:42:14 +02:00
|
|
|
// to submit a query
|
|
|
|
@Override
|
|
|
|
public SubmitQueryResultWithFileFromServlet submitQuery(
|
|
|
|
LinkedHashMap<String, String> dataDB, String query,
|
|
|
|
boolean valueReadOnlyQuery, boolean smartCorrectionQuery,
|
|
|
|
String language, String UID) throws Exception {
|
|
|
|
|
|
|
|
// session check
|
|
|
|
if (isSessionExpired())
|
|
|
|
throw new SessionExpiredException();
|
2014-07-02 12:57:14 +02:00
|
|
|
|
2014-09-29 18:07:58 +02:00
|
|
|
try {
|
2014-10-07 15:20:16 +02:00
|
|
|
|
2014-10-10 18:38:39 +02:00
|
|
|
logger.info("Submit Query Request received. Starting to manage the request.");
|
2014-10-07 11:42:14 +02:00
|
|
|
ASLSession session = SessionUtil.getAslSession(this
|
|
|
|
.getThreadLocalRequest().getSession());
|
|
|
|
// get scope
|
|
|
|
String scope = session.getScope();
|
|
|
|
|
|
|
|
logger.info("dbmanager-> Dialect used for smart correction: "
|
|
|
|
+ language);
|
2014-07-25 20:03:14 +02:00
|
|
|
|
2014-09-29 18:07:58 +02:00
|
|
|
// data input
|
|
|
|
List<Parameter> inputParameters = new ArrayList<Parameter>();
|
|
|
|
// data output
|
2014-10-07 11:42:14 +02:00
|
|
|
// List<Result> output = new ArrayList<Result>();
|
|
|
|
List<Result> output = null;
|
|
|
|
SubmitQueryResultWithFileFromServlet result = null;
|
2014-07-02 12:57:14 +02:00
|
|
|
|
2014-10-07 11:42:14 +02:00
|
|
|
// list that contains table attributes
|
|
|
|
List<String> listAttributes = null;
|
2014-09-17 10:27:30 +02:00
|
|
|
|
2014-09-29 18:07:58 +02:00
|
|
|
// converted query
|
|
|
|
String convertedQuery = "";
|
2014-10-02 14:53:13 +02:00
|
|
|
String algorithmId = ConstantsPortlet.ALGID_SUBMITQUERY;
|
2014-09-17 10:27:30 +02:00
|
|
|
|
2014-09-29 18:07:58 +02:00
|
|
|
// print check
|
|
|
|
String rs = dataDB.get("ResourceName");
|
|
|
|
String db = dataDB.get("DatabaseName");
|
2014-09-26 16:56:42 +02:00
|
|
|
|
2014-09-29 18:07:58 +02:00
|
|
|
// print check
|
|
|
|
logger.info("dbmanager-> ResourceName: " + rs);
|
|
|
|
logger.info("dbmanager-> DatabaseName: " + db);
|
|
|
|
|
|
|
|
logger.info("dbmanager-> Query: " + query);
|
|
|
|
logger.info("dbmanager-> SmartCorrections check: "
|
|
|
|
+ smartCorrectionQuery);
|
|
|
|
|
|
|
|
if ((rs == null) || (rs.equals(""))) {
|
|
|
|
throw new Exception("Unable to load data");
|
|
|
|
}
|
|
|
|
if ((db == null) || (db.equals(""))) {
|
|
|
|
throw new Exception("Unable to load data");
|
|
|
|
}
|
|
|
|
if ((query == null) || (query.equals(""))) {
|
|
|
|
throw new Exception("Unable to load data");
|
|
|
|
}
|
|
|
|
|
|
|
|
// set input parameters
|
|
|
|
Parameter resource = new Parameter("ResourceName", "", "String", "");
|
|
|
|
Parameter database = new Parameter("DatabaseName", "", "String", "");
|
|
|
|
Parameter readOnlyQuery = new Parameter("Read-Only Query", "",
|
|
|
|
"Boolean", "true");
|
|
|
|
Parameter applySmartCorrection = new Parameter(
|
|
|
|
"Apply Smart Correction", "", "Boolean", "true");
|
|
|
|
Parameter lng = new Parameter("Language", "", "NONE", "NONE");
|
|
|
|
Parameter q = new Parameter("Query", "", "String", "");
|
|
|
|
|
|
|
|
inputParameters.add(resource);
|
|
|
|
inputParameters.add(database);
|
|
|
|
inputParameters.add(readOnlyQuery);
|
|
|
|
inputParameters.add(applySmartCorrection);
|
|
|
|
inputParameters.add(lng);
|
|
|
|
inputParameters.add(q);
|
|
|
|
|
|
|
|
inputParameters.get(0).setValue(rs);
|
|
|
|
inputParameters.get(1).setValue(db);
|
|
|
|
inputParameters.get(2).setValue(String.valueOf(valueReadOnlyQuery));
|
|
|
|
inputParameters.get(3).setValue(
|
|
|
|
String.valueOf(smartCorrectionQuery));
|
|
|
|
inputParameters.get(4).setValue(language);
|
|
|
|
inputParameters.get(5).setValue(query);
|
|
|
|
|
2014-10-01 15:38:16 +02:00
|
|
|
// get data from cache
|
|
|
|
// check if data exist considering as key the input parameters
|
2014-09-12 16:47:32 +02:00
|
|
|
|
2014-10-01 15:38:16 +02:00
|
|
|
// parse the query in order to remove spaces
|
|
|
|
String queryParsed = parseQuery(inputParameters.get(5).getValue());
|
|
|
|
// get data sent to client calling the submitQuery
|
2014-10-07 15:20:16 +02:00
|
|
|
String keyData = scope + algorithmId
|
|
|
|
+ inputParameters.get(0).getValue()
|
2014-10-01 15:38:16 +02:00
|
|
|
+ inputParameters.get(1).getValue()
|
|
|
|
+ inputParameters.get(2).getValue()
|
|
|
|
+ inputParameters.get(3).getValue()
|
|
|
|
+ inputParameters.get(4).getValue() + queryParsed;
|
|
|
|
|
2014-10-02 12:27:08 +02:00
|
|
|
// System.out.println("submitQuery KEY:" + keyData);
|
2014-10-01 15:38:16 +02:00
|
|
|
net.sf.ehcache.Element dataFromCache = getDataFromCache(keyData);
|
|
|
|
|
|
|
|
// key to get query result sent to client calling loadSubmitResult
|
|
|
|
String keySubmitQueryResult = keyData + "_SubmitQueryResult";
|
|
|
|
updateListKeySubmitQueryResult(UID, keySubmitQueryResult);
|
|
|
|
net.sf.ehcache.Element submitQueryResultFromCache = getDataFromCache(keySubmitQueryResult);
|
|
|
|
|
|
|
|
Object data = null;
|
|
|
|
Object submitQueryResult = null;
|
|
|
|
|
|
|
|
if ((dataFromCache != null) && (submitQueryResultFromCache != null)) {
|
|
|
|
data = dataFromCache.getObjectValue();
|
|
|
|
submitQueryResult = submitQueryResultFromCache.getObjectValue();
|
|
|
|
// System.out.println("***GETTING DATA FROM CACHE");
|
|
|
|
}
|
|
|
|
if ((data != null) && (submitQueryResult != null)) {
|
|
|
|
result = (SubmitQueryResultWithFileFromServlet) data;
|
2014-08-01 18:11:36 +02:00
|
|
|
|
2014-10-24 14:16:09 +02:00
|
|
|
cacheHitsNumber++;
|
|
|
|
logger.info("dbmanager-> CheckDataInCache: data found in cache. cacheHitsNumber: " + cacheHitsNumber);
|
|
|
|
cacheQuerySamplingHitsNumber++;
|
2014-10-01 15:38:16 +02:00
|
|
|
// set variable to true value if cached data are used and a
|
|
|
|
// computation is not started
|
|
|
|
Boolean val = new Boolean(true);
|
|
|
|
updateListSubmitQueryUIDCachedData(UID, val);
|
2014-08-25 17:55:53 +02:00
|
|
|
|
2014-10-01 15:38:16 +02:00
|
|
|
} else {
|
2014-10-24 14:16:09 +02:00
|
|
|
|
|
|
|
smComputationNumber++;
|
|
|
|
logger.info("dbmanager-> CheckDataInCache: data not found in cache. Starting the Statistical Computation. smComputationNumber: " + smComputationNumber);
|
|
|
|
smComputationQuerySamplingNumber++;
|
2014-08-01 18:11:36 +02:00
|
|
|
|
2014-10-01 15:38:16 +02:00
|
|
|
// set variable to false value if cached data are not used and a
|
|
|
|
// computation is started
|
|
|
|
Boolean val = new Boolean(false);
|
|
|
|
updateListSubmitQueryUIDCachedData(UID, val);
|
2014-09-05 11:30:05 +02:00
|
|
|
|
2014-10-01 15:38:16 +02:00
|
|
|
// create data structure
|
|
|
|
ComputationOutput outputData = new ComputationOutput();
|
2014-10-22 10:49:08 +02:00
|
|
|
|
2014-10-01 15:38:16 +02:00
|
|
|
// computation id
|
|
|
|
String computationId = startComputation(algorithmId,
|
2014-10-07 11:42:14 +02:00
|
|
|
inputParameters, outputData, scope, UID);
|
2014-10-22 10:49:08 +02:00
|
|
|
|
2014-10-24 14:16:09 +02:00
|
|
|
|
2014-10-01 15:38:16 +02:00
|
|
|
// get JobID
|
|
|
|
if (checkJob(UID)) { // if the computation has not been removed
|
|
|
|
// the job uid is present
|
|
|
|
// computationIDMap.put(id, computationId);
|
2014-09-12 14:05:22 +02:00
|
|
|
|
2014-10-01 15:38:16 +02:00
|
|
|
// print check on retrieving data
|
|
|
|
logger.info("output data retrieved");
|
|
|
|
|
|
|
|
// data output values
|
|
|
|
LinkedHashMap<String, String> mapValues = new LinkedHashMap<String, String>();
|
|
|
|
// data output keys
|
|
|
|
LinkedHashMap<String, String> mapKeys = new LinkedHashMap<String, String>();
|
|
|
|
|
|
|
|
mapValues = outputData.getMapValues();
|
|
|
|
mapKeys = outputData.getmapKeys();
|
|
|
|
|
|
|
|
if (mapValues.size() != 0) {
|
|
|
|
output = new ArrayList<Result>();
|
|
|
|
|
|
|
|
// logger.info("build the result - started");
|
|
|
|
for (int i = 0; i < mapValues.size(); i++) {
|
|
|
|
Result row = new Result(mapKeys.get(String
|
|
|
|
.valueOf(i)), mapValues.get(String
|
|
|
|
.valueOf(i)));
|
|
|
|
output.add(row);
|
|
|
|
}
|
|
|
|
|
|
|
|
// System.out.println("output size submit: " +
|
|
|
|
// output.size());
|
|
|
|
// logger.info("build the result - finished");
|
2014-09-17 10:27:30 +02:00
|
|
|
|
2014-10-01 15:38:16 +02:00
|
|
|
// get the converted query
|
|
|
|
if (smartCorrectionQuery == true) {
|
|
|
|
convertedQuery = output.get(0).getValue();
|
|
|
|
output.remove(0);
|
|
|
|
}
|
|
|
|
|
|
|
|
// get the attributes list for the result table
|
|
|
|
listAttributes = new ArrayList<String>();
|
|
|
|
listAttributes = getListAttributes(output.get(0)
|
|
|
|
.getValue());
|
2014-09-26 16:56:42 +02:00
|
|
|
|
2014-10-01 15:38:16 +02:00
|
|
|
if (listAttributes == null) {
|
|
|
|
logger.error("dbmanager-> Error in server while loading data. variable listAttributes null");
|
|
|
|
throw new Exception(
|
|
|
|
"Error in server while loading data.");
|
|
|
|
}
|
2014-10-24 14:50:47 +02:00
|
|
|
|
2014-10-01 15:38:16 +02:00
|
|
|
// store the result of the submit query operation
|
|
|
|
// updateSubmitQueryResultMap(UID, output);
|
|
|
|
// remove job with the specified uid
|
|
|
|
removeJob(UID);
|
|
|
|
|
|
|
|
// generate the file csv output
|
|
|
|
String name = "SubmitQuery";
|
|
|
|
String fileName = storeResultIntoCSVFile(output, name);
|
|
|
|
// get the web application path
|
|
|
|
HttpServletRequest request = this
|
|
|
|
.getThreadLocalRequest();
|
|
|
|
String applicationPath = request.getContextPath();
|
|
|
|
// logger.info("dbmanager-> Application Path: " +
|
|
|
|
// applicationPath);
|
|
|
|
String partialPathFile = applicationPath
|
|
|
|
+ "/computationResult/" + fileName;
|
|
|
|
|
|
|
|
result = new SubmitQueryResultWithFileFromServlet(
|
|
|
|
listAttributes, convertedQuery, partialPathFile);
|
|
|
|
|
|
|
|
// put the two data in cache
|
|
|
|
net.sf.ehcache.Element dataToCache = new net.sf.ehcache.Element(
|
|
|
|
keyData, result);
|
|
|
|
insertDataIntoCache(dataToCache);
|
2014-10-24 14:50:47 +02:00
|
|
|
|
|
|
|
// remove the header in order to parse only the result
|
|
|
|
output.remove(0);
|
2014-10-01 15:38:16 +02:00
|
|
|
|
|
|
|
net.sf.ehcache.Element submitQueryResultToCache = new net.sf.ehcache.Element(
|
|
|
|
keySubmitQueryResult, output);
|
|
|
|
insertDataIntoCache(submitQueryResultToCache);
|
|
|
|
|
2014-09-24 11:02:22 +02:00
|
|
|
}
|
2014-10-01 15:38:16 +02:00
|
|
|
} else { // if the computation has been removed the job uid is
|
|
|
|
// not present and listAttributes is null.
|
|
|
|
listAttributes = null;
|
2014-09-17 10:27:30 +02:00
|
|
|
|
2014-09-12 14:05:22 +02:00
|
|
|
}
|
2014-10-01 15:38:16 +02:00
|
|
|
}
|
2014-09-30 14:32:27 +02:00
|
|
|
return result;
|
2014-09-17 10:27:30 +02:00
|
|
|
} catch (Exception e) {
|
|
|
|
// e.printStackTrace();
|
2014-09-29 18:07:58 +02:00
|
|
|
logger.error("dbmanager-> ", e);
|
2014-10-02 12:27:08 +02:00
|
|
|
// TODO Exception Statistical management to remove a
|
2014-09-17 10:27:30 +02:00
|
|
|
// computation
|
|
|
|
if (e.getMessage()
|
|
|
|
.contains(
|
|
|
|
"javax.xml.ws.soap.SOAPFaultException: java.lang.IndexOutOfBoundsException")) {
|
2014-09-29 18:07:58 +02:00
|
|
|
throw new Exception("ServerException");
|
2014-09-01 12:58:31 +02:00
|
|
|
}
|
2014-09-29 18:07:58 +02:00
|
|
|
if (!(e instanceof StatisticalManagerException)) {
|
|
|
|
// GWT can't serialize all exceptions
|
|
|
|
throw new Exception(
|
|
|
|
"Error in server while loading data. Exception: " + e);
|
|
|
|
}
|
2014-09-17 10:27:30 +02:00
|
|
|
throw e;
|
2014-10-01 15:38:16 +02:00
|
|
|
} finally {
|
|
|
|
// remove the element related to the uid submitQuery request if
|
|
|
|
// present
|
|
|
|
removeSubmitQueryUIDCachedData(UID);
|
|
|
|
// remove jobStatus
|
|
|
|
removeJobStatus(UID);
|
|
|
|
// remove job
|
|
|
|
removeJob(UID);
|
2014-09-17 10:27:30 +02:00
|
|
|
}
|
2014-08-01 18:11:36 +02:00
|
|
|
}
|
|
|
|
|
2014-07-03 14:44:03 +02:00
|
|
|
@Override
|
2014-09-19 17:36:36 +02:00
|
|
|
public SamplingResultWithFileFromServlet sample(
|
2014-09-26 16:56:42 +02:00
|
|
|
LinkedHashMap<String, String> dataInput, String elementType)
|
|
|
|
throws Exception {
|
2014-07-03 14:44:03 +02:00
|
|
|
|
2014-09-29 18:07:58 +02:00
|
|
|
// session check
|
|
|
|
if (isSessionExpired())
|
|
|
|
throw new SessionExpiredException();
|
2014-09-17 10:27:30 +02:00
|
|
|
|
2014-09-29 18:07:58 +02:00
|
|
|
try {
|
2014-10-10 18:38:39 +02:00
|
|
|
logger.info("dbmanager-> Sampling on table Request received. Starting to manage the request.");
|
2014-10-07 11:42:14 +02:00
|
|
|
ASLSession session = SessionUtil.getAslSession(this
|
|
|
|
.getThreadLocalRequest().getSession());
|
|
|
|
// get scope
|
|
|
|
String scope = session.getScope();
|
2014-10-07 15:20:16 +02:00
|
|
|
|
2014-09-29 18:07:58 +02:00
|
|
|
// data input
|
|
|
|
List<Parameter> inputParameters = new ArrayList<Parameter>();
|
|
|
|
// output sample result
|
|
|
|
List<Result> output = new ArrayList<Result>();
|
2014-09-30 14:32:27 +02:00
|
|
|
SamplingResultWithFileFromServlet result;
|
2014-09-17 10:27:30 +02:00
|
|
|
|
2014-10-02 14:53:13 +02:00
|
|
|
String algorithmId = ConstantsPortlet.ALGID_SAMPLEONTABLE;
|
2014-09-26 16:56:42 +02:00
|
|
|
|
2014-09-29 18:07:58 +02:00
|
|
|
// print check
|
|
|
|
String rs = dataInput.get("ResourceName");
|
|
|
|
String db = dataInput.get("DatabaseName");
|
|
|
|
String scm = dataInput.get("SchemaName");
|
|
|
|
String tab = dataInput.get("TableName");
|
|
|
|
|
|
|
|
// print check
|
|
|
|
logger.info("dbmanager-> ResourceName: " + rs);
|
|
|
|
logger.info("dbmanager-> DatabaseName: " + db);
|
|
|
|
logger.info("dbmanager-> SchemaName: " + scm);
|
|
|
|
logger.info("dbmanager-> TableName: " + tab);
|
|
|
|
|
|
|
|
if ((elementType != null)
|
|
|
|
&& (elementType.equals(ConstantsPortlet.SCHEMA))) {
|
|
|
|
if ((rs == null) || (rs.equals(""))) {
|
|
|
|
throw new Exception("Unable to load data");
|
|
|
|
}
|
|
|
|
if ((db == null) || (db.equals(""))) {
|
|
|
|
throw new Exception("Unable to load data");
|
|
|
|
}
|
|
|
|
if ((scm == null) || (scm.equals(""))) {
|
|
|
|
throw new Exception("Unable to load data");
|
|
|
|
}
|
|
|
|
if ((tab == null) || (tab.equals(""))) {
|
|
|
|
throw new Exception("Unable to load data");
|
|
|
|
}
|
2014-09-22 11:10:07 +02:00
|
|
|
}
|
2014-09-29 18:07:58 +02:00
|
|
|
if ((elementType != null)
|
|
|
|
&& (elementType.equals(ConstantsPortlet.DATABASE))) {
|
|
|
|
if ((rs == null) || (rs.equals(""))) {
|
|
|
|
throw new Exception("Unable to load data");
|
|
|
|
}
|
|
|
|
if ((db == null) || (db.equals(""))) {
|
|
|
|
throw new Exception("Unable to load data");
|
|
|
|
}
|
|
|
|
if ((tab == null) || (tab.equals(""))) {
|
|
|
|
throw new Exception("Unable to load data");
|
|
|
|
}
|
2014-09-22 11:10:07 +02:00
|
|
|
}
|
2014-09-17 10:27:30 +02:00
|
|
|
|
2014-09-29 18:07:58 +02:00
|
|
|
// set input parameters
|
|
|
|
Parameter resource = new Parameter("ResourceName", "", "String", "");
|
|
|
|
Parameter database = new Parameter("DatabaseName", "", "String", "");
|
|
|
|
Parameter schema = new Parameter("SchemaName", "", "String", "");
|
|
|
|
Parameter table = new Parameter("TableName", "", "String", "");
|
|
|
|
inputParameters.add(resource);
|
|
|
|
inputParameters.add(database);
|
|
|
|
inputParameters.add(schema);
|
|
|
|
inputParameters.add(table);
|
|
|
|
|
|
|
|
inputParameters.get(0).setValue(rs);
|
|
|
|
inputParameters.get(1).setValue(db);
|
|
|
|
inputParameters.get(2).setValue(scm);
|
|
|
|
inputParameters.get(3).setValue(tab);
|
|
|
|
|
2014-09-30 14:32:27 +02:00
|
|
|
// get data from cache
|
|
|
|
// check if data exist considering as key the input parameters
|
2014-10-07 15:20:16 +02:00
|
|
|
String key = scope + algorithmId
|
|
|
|
+ inputParameters.get(0).getValue()
|
2014-09-30 14:32:27 +02:00
|
|
|
+ inputParameters.get(1).getValue()
|
|
|
|
+ inputParameters.get(2).getValue()
|
|
|
|
+ inputParameters.get(3).getValue();
|
2014-09-17 10:27:30 +02:00
|
|
|
|
2014-10-07 15:20:16 +02:00
|
|
|
// System.out.println("sampling KEY: " + key);
|
2014-09-30 14:32:27 +02:00
|
|
|
net.sf.ehcache.Element dataFromCache = getDataFromCache(key);
|
2014-09-17 10:27:30 +02:00
|
|
|
|
2014-09-30 14:32:27 +02:00
|
|
|
Object value = null;
|
|
|
|
if (dataFromCache != null) {
|
|
|
|
value = dataFromCache.getObjectValue();
|
|
|
|
// System.out.println("***GETTING DATA FROM CACHE");
|
|
|
|
}
|
|
|
|
if (value != null) {
|
|
|
|
result = (SamplingResultWithFileFromServlet) value;
|
2014-10-24 14:16:09 +02:00
|
|
|
cacheHitsNumber++;
|
|
|
|
logger.info("dbmanager-> CheckDataInCache: data found in cache. cacheHitsNumber: " + cacheHitsNumber);
|
|
|
|
cacheQuerySamplingHitsNumber++;
|
2014-09-29 18:07:58 +02:00
|
|
|
|
2014-09-30 14:32:27 +02:00
|
|
|
} else {
|
2014-09-29 18:07:58 +02:00
|
|
|
|
2014-10-24 14:16:09 +02:00
|
|
|
smComputationNumber++;
|
|
|
|
logger.info("dbmanager-> CheckDataInCache: data not found in cache. Starting the Statistical Computation. smComputationNumber: " + smComputationNumber);
|
|
|
|
smComputationQuerySamplingNumber++;
|
2014-09-30 14:32:27 +02:00
|
|
|
// start computation
|
|
|
|
// create data structure
|
|
|
|
ComputationOutput outputData = new ComputationOutput();
|
|
|
|
// computation id
|
|
|
|
String computationId = startComputation(algorithmId,
|
2014-10-07 11:42:14 +02:00
|
|
|
inputParameters, outputData, scope);
|
2014-09-17 10:27:30 +02:00
|
|
|
|
2014-09-30 14:32:27 +02:00
|
|
|
// print check on retrieving data
|
|
|
|
// logger.info("output data retrieved");
|
2014-09-19 17:36:36 +02:00
|
|
|
|
2014-09-30 14:32:27 +02:00
|
|
|
// data output values
|
|
|
|
LinkedHashMap<String, String> mapValues = new LinkedHashMap<String, String>();
|
|
|
|
// data output keys
|
|
|
|
LinkedHashMap<String, String> mapKeys = new LinkedHashMap<String, String>();
|
|
|
|
|
|
|
|
mapValues = outputData.getMapValues();
|
|
|
|
mapKeys = outputData.getmapKeys();
|
|
|
|
|
|
|
|
for (int i = 0; i < mapValues.size(); i++) {
|
|
|
|
Result row = new Result(mapKeys.get(String.valueOf(i)),
|
|
|
|
mapValues.get(String.valueOf(i)));
|
|
|
|
output.add(row);
|
|
|
|
}
|
|
|
|
|
|
|
|
String name = "Sampling" + "_" + tab;
|
|
|
|
String fileName = storeResultIntoCSVFile(output, name);
|
|
|
|
|
|
|
|
HttpServletRequest request = this.getThreadLocalRequest();
|
|
|
|
String applicationPath = request.getContextPath();
|
|
|
|
// logger.info("dbmanager-> Application Path: " +
|
|
|
|
// applicationPath);
|
|
|
|
String partialPathFile = applicationPath
|
|
|
|
+ "/computationResult/" + fileName;
|
|
|
|
result = new SamplingResultWithFileFromServlet(output,
|
|
|
|
partialPathFile);
|
|
|
|
|
|
|
|
// put data in cache
|
|
|
|
net.sf.ehcache.Element dataToCache = new net.sf.ehcache.Element(
|
|
|
|
key, result);
|
|
|
|
insertDataIntoCache(dataToCache);
|
|
|
|
|
|
|
|
}
|
|
|
|
return result;
|
2014-09-29 18:07:58 +02:00
|
|
|
} catch (Exception e) {
|
|
|
|
logger.error("dbmanager-> ", e);
|
|
|
|
if (!(e instanceof StatisticalManagerException)) {
|
|
|
|
// GWT can't serialize all exceptions
|
|
|
|
throw new Exception(
|
|
|
|
"Error in server while loading data. Exception: " + e);
|
|
|
|
}
|
|
|
|
throw e;
|
|
|
|
}
|
2014-07-03 14:44:03 +02:00
|
|
|
}
|
2014-07-04 12:09:47 +02:00
|
|
|
|
|
|
|
@Override
|
2014-09-19 17:36:36 +02:00
|
|
|
public SamplingResultWithFileFromServlet smartSample(
|
2014-09-26 16:56:42 +02:00
|
|
|
LinkedHashMap<String, String> dataInput, String elementType)
|
|
|
|
throws Exception {
|
2014-09-17 10:27:30 +02:00
|
|
|
|
2014-09-29 18:07:58 +02:00
|
|
|
// session check
|
|
|
|
if (isSessionExpired())
|
|
|
|
throw new SessionExpiredException();
|
2014-07-04 12:09:47 +02:00
|
|
|
|
2014-09-29 18:07:58 +02:00
|
|
|
try {
|
2014-10-10 18:38:39 +02:00
|
|
|
logger.info("dbmanager-> Smart Sampling on table Request received. Starting to manage the request.");
|
2014-10-07 11:42:14 +02:00
|
|
|
ASLSession session = SessionUtil.getAslSession(this
|
|
|
|
.getThreadLocalRequest().getSession());
|
|
|
|
// get scope
|
|
|
|
String scope = session.getScope();
|
2014-10-07 15:20:16 +02:00
|
|
|
|
2014-09-29 18:07:58 +02:00
|
|
|
// data input
|
|
|
|
List<Parameter> inputParameters = new ArrayList<Parameter>();
|
|
|
|
// output sample result
|
|
|
|
List<Result> output = new ArrayList<Result>();
|
2014-09-30 14:32:27 +02:00
|
|
|
SamplingResultWithFileFromServlet result;
|
2014-07-04 12:09:47 +02:00
|
|
|
|
2014-10-02 14:53:13 +02:00
|
|
|
String algorithmId = ConstantsPortlet.ALGID_SMARTSAMPLEONTABLE;
|
2014-09-26 16:56:42 +02:00
|
|
|
|
2014-09-29 18:07:58 +02:00
|
|
|
// print check
|
|
|
|
String rs = dataInput.get("ResourceName");
|
|
|
|
String db = dataInput.get("DatabaseName");
|
|
|
|
String scm = dataInput.get("SchemaName");
|
|
|
|
String tab = dataInput.get("TableName");
|
|
|
|
|
|
|
|
// print check
|
|
|
|
logger.info("dbmanager-> ResourceName: " + rs);
|
|
|
|
logger.info("dbmanager-> DatabaseName: " + db);
|
|
|
|
logger.info("dbmanager-> SchemaName: " + scm);
|
|
|
|
logger.info("dbmanager-> TableName: " + tab);
|
|
|
|
|
|
|
|
if ((elementType != null)
|
|
|
|
&& (elementType.equals(ConstantsPortlet.SCHEMA))) {
|
|
|
|
if ((rs == null) || (rs.equals(""))) {
|
|
|
|
throw new Exception("Unable to load data");
|
|
|
|
}
|
|
|
|
if ((db == null) || (db.equals(""))) {
|
|
|
|
throw new Exception("Unable to load data");
|
|
|
|
}
|
|
|
|
if ((scm == null) || (scm.equals(""))) {
|
|
|
|
throw new Exception("Unable to load data");
|
|
|
|
}
|
|
|
|
if ((tab == null) || (tab.equals(""))) {
|
|
|
|
throw new Exception("Unable to load data");
|
|
|
|
}
|
2014-09-22 11:10:07 +02:00
|
|
|
}
|
2014-09-29 18:07:58 +02:00
|
|
|
if ((elementType != null)
|
|
|
|
&& (elementType.equals(ConstantsPortlet.DATABASE))) {
|
|
|
|
if ((rs == null) || (rs.equals(""))) {
|
|
|
|
throw new Exception("Unable to load data");
|
|
|
|
}
|
|
|
|
if ((db == null) || (db.equals(""))) {
|
|
|
|
throw new Exception("Unable to load data");
|
|
|
|
}
|
|
|
|
if ((tab == null) || (tab.equals(""))) {
|
|
|
|
throw new Exception("Unable to load data");
|
|
|
|
}
|
2014-09-22 11:10:07 +02:00
|
|
|
}
|
2014-09-17 10:27:30 +02:00
|
|
|
|
2014-09-29 18:07:58 +02:00
|
|
|
// set input parameters
|
|
|
|
Parameter resource = new Parameter("ResourceName", "", "String", "");
|
|
|
|
Parameter database = new Parameter("DatabaseName", "", "String", "");
|
|
|
|
Parameter schema = new Parameter("SchemaName", "", "String", "");
|
|
|
|
Parameter table = new Parameter("TableName", "", "String", "");
|
|
|
|
inputParameters.add(resource);
|
|
|
|
inputParameters.add(database);
|
|
|
|
inputParameters.add(schema);
|
|
|
|
inputParameters.add(table);
|
|
|
|
|
|
|
|
inputParameters.get(0).setValue(rs);
|
|
|
|
inputParameters.get(1).setValue(db);
|
|
|
|
inputParameters.get(2).setValue(scm);
|
|
|
|
inputParameters.get(3).setValue(tab);
|
|
|
|
|
2014-09-30 14:32:27 +02:00
|
|
|
// get data from cache
|
|
|
|
// check if data exist considering as key the input parameters
|
2014-10-07 15:20:16 +02:00
|
|
|
String key = scope + algorithmId
|
|
|
|
+ inputParameters.get(0).getValue()
|
2014-09-30 14:32:27 +02:00
|
|
|
+ inputParameters.get(1).getValue()
|
|
|
|
+ inputParameters.get(2).getValue()
|
|
|
|
+ inputParameters.get(3).getValue();
|
2014-07-04 12:09:47 +02:00
|
|
|
|
2014-09-30 14:32:27 +02:00
|
|
|
net.sf.ehcache.Element dataFromCache = getDataFromCache(key);
|
2014-07-04 12:09:47 +02:00
|
|
|
|
2014-09-30 14:32:27 +02:00
|
|
|
Object value = null;
|
|
|
|
if (dataFromCache != null) {
|
|
|
|
value = dataFromCache.getObjectValue();
|
|
|
|
// System.out.println("***GETTING DATA FROM CACHE");
|
|
|
|
}
|
|
|
|
if (value != null) {
|
|
|
|
result = (SamplingResultWithFileFromServlet) value;
|
2014-10-24 14:16:09 +02:00
|
|
|
cacheHitsNumber++;
|
|
|
|
logger.info("dbmanager-> CheckDataInCache: data found in cache. cacheHitsNumber: " + cacheHitsNumber);
|
|
|
|
cacheQuerySamplingHitsNumber++;
|
2014-09-30 14:32:27 +02:00
|
|
|
} else {
|
2014-10-24 14:16:09 +02:00
|
|
|
smComputationNumber++;
|
|
|
|
logger.info("dbmanager-> CheckDataInCache: data not found in cache. Starting the Statistical Computation. smComputationNumber: " + smComputationNumber);
|
|
|
|
smComputationQuerySamplingNumber++;
|
2014-09-30 14:32:27 +02:00
|
|
|
// create data structure
|
|
|
|
ComputationOutput outputData = new ComputationOutput();
|
|
|
|
// computation id
|
|
|
|
String computationId = startComputation(algorithmId,
|
2014-10-07 11:42:14 +02:00
|
|
|
inputParameters, outputData, scope);
|
2014-09-29 18:07:58 +02:00
|
|
|
|
2014-09-30 14:32:27 +02:00
|
|
|
// print check on retrieving data
|
|
|
|
// logger.info("dbmanager-> output data retrieved");
|
2014-07-25 20:03:14 +02:00
|
|
|
|
2014-09-30 14:32:27 +02:00
|
|
|
// data output values
|
|
|
|
LinkedHashMap<String, String> mapValues = new LinkedHashMap<String, String>();
|
|
|
|
// data output keys
|
|
|
|
LinkedHashMap<String, String> mapKeys = new LinkedHashMap<String, String>();
|
2014-09-19 17:36:36 +02:00
|
|
|
|
2014-09-30 14:32:27 +02:00
|
|
|
mapValues = outputData.getMapValues();
|
|
|
|
mapKeys = outputData.getmapKeys();
|
|
|
|
|
|
|
|
for (int i = 0; i < mapValues.size(); i++) {
|
|
|
|
Result row = new Result(mapKeys.get(String.valueOf(i)),
|
|
|
|
mapValues.get(String.valueOf(i)));
|
|
|
|
output.add(row);
|
|
|
|
}
|
|
|
|
|
|
|
|
String name = "SmartSampling" + "_" + tab;
|
|
|
|
String fileName = storeResultIntoCSVFile(output, name);
|
|
|
|
|
|
|
|
HttpServletRequest request = this.getThreadLocalRequest();
|
|
|
|
String applicationPath = request.getContextPath();
|
|
|
|
// logger.info("dbmanager-> Application Path: " +
|
|
|
|
// applicationPath);
|
|
|
|
String partialPathFile = applicationPath
|
|
|
|
+ "/computationResult/" + fileName;
|
|
|
|
result = new SamplingResultWithFileFromServlet(output,
|
|
|
|
partialPathFile);
|
|
|
|
|
|
|
|
// put data in cache
|
|
|
|
net.sf.ehcache.Element dataToCache = new net.sf.ehcache.Element(
|
|
|
|
key, result);
|
|
|
|
insertDataIntoCache(dataToCache);
|
|
|
|
}
|
|
|
|
|
|
|
|
return result;
|
2014-09-19 17:36:36 +02:00
|
|
|
|
2014-09-29 18:07:58 +02:00
|
|
|
} catch (Exception e) {
|
|
|
|
logger.error("dbmanager-> ", e);
|
|
|
|
if (!(e instanceof StatisticalManagerException)) {
|
|
|
|
// GWT can't serialize all exceptions
|
|
|
|
throw new Exception(
|
|
|
|
"Error in server while loading data. Exception: " + e);
|
|
|
|
}
|
|
|
|
throw e;
|
|
|
|
}
|
2014-07-04 12:09:47 +02:00
|
|
|
}
|
2014-07-11 12:35:26 +02:00
|
|
|
|
2014-07-04 12:36:43 +02:00
|
|
|
@Override
|
2014-09-19 17:36:36 +02:00
|
|
|
public SamplingResultWithFileFromServlet randomSample(
|
2014-09-26 16:56:42 +02:00
|
|
|
LinkedHashMap<String, String> dataInput, String elementType)
|
|
|
|
throws Exception {
|
2014-07-04 12:36:43 +02:00
|
|
|
|
2014-09-29 18:07:58 +02:00
|
|
|
// session check
|
|
|
|
if (isSessionExpired())
|
|
|
|
throw new SessionExpiredException();
|
2014-09-17 10:27:30 +02:00
|
|
|
|
2014-09-29 18:07:58 +02:00
|
|
|
try {
|
2014-10-10 18:38:39 +02:00
|
|
|
logger.info("dbmanager-> Random Sampling on table Request received. Starting to manage the request.");
|
2014-10-07 11:42:14 +02:00
|
|
|
ASLSession session = SessionUtil.getAslSession(this
|
|
|
|
.getThreadLocalRequest().getSession());
|
|
|
|
// get scope
|
|
|
|
String scope = session.getScope();
|
2014-10-07 15:20:16 +02:00
|
|
|
|
2014-09-29 18:07:58 +02:00
|
|
|
// data input
|
|
|
|
List<Parameter> inputParameters = new ArrayList<Parameter>();
|
|
|
|
// output sample result
|
|
|
|
List<Result> output = new ArrayList<Result>();
|
2014-07-04 12:36:43 +02:00
|
|
|
|
2014-10-02 14:53:13 +02:00
|
|
|
String algorithmId = ConstantsPortlet.ALGID_RANDOMSAMPLEONTABLE;
|
2014-07-04 12:36:43 +02:00
|
|
|
|
2014-09-29 18:07:58 +02:00
|
|
|
// print check
|
|
|
|
String rs = dataInput.get("ResourceName");
|
|
|
|
String db = dataInput.get("DatabaseName");
|
|
|
|
String scm = dataInput.get("SchemaName");
|
|
|
|
String tab = dataInput.get("TableName");
|
2014-09-26 16:56:42 +02:00
|
|
|
|
2014-09-29 18:07:58 +02:00
|
|
|
// print check
|
|
|
|
logger.info("dbmanager-> ResourceName: " + rs);
|
|
|
|
logger.info("dbmanager-> DatabaseName: " + db);
|
|
|
|
logger.info("dbmanager-> SchemaName: " + scm);
|
|
|
|
logger.info("dbmanager-> TableName: " + tab);
|
|
|
|
|
|
|
|
if ((elementType != null)
|
|
|
|
&& (elementType.equals(ConstantsPortlet.SCHEMA))) {
|
|
|
|
if ((rs == null) || (rs.equals(""))) {
|
|
|
|
throw new Exception("Unable to load data");
|
|
|
|
}
|
|
|
|
if ((db == null) || (db.equals(""))) {
|
|
|
|
throw new Exception("Unable to load data");
|
|
|
|
}
|
|
|
|
if ((scm == null) || (scm.equals(""))) {
|
|
|
|
throw new Exception("Unable to load data");
|
|
|
|
}
|
|
|
|
if ((tab == null) || (tab.equals(""))) {
|
|
|
|
throw new Exception("Unable to load data");
|
|
|
|
}
|
2014-09-22 11:10:07 +02:00
|
|
|
}
|
2014-09-29 18:07:58 +02:00
|
|
|
if ((elementType != null)
|
|
|
|
&& (elementType.equals(ConstantsPortlet.DATABASE))) {
|
|
|
|
if ((rs == null) || (rs.equals(""))) {
|
|
|
|
throw new Exception("Unable to load data");
|
|
|
|
}
|
|
|
|
if ((db == null) || (db.equals(""))) {
|
|
|
|
throw new Exception("Unable to load data");
|
|
|
|
}
|
|
|
|
if ((tab == null) || (tab.equals(""))) {
|
|
|
|
throw new Exception("Unable to load data");
|
|
|
|
}
|
2014-09-22 11:10:07 +02:00
|
|
|
}
|
2014-09-17 10:27:30 +02:00
|
|
|
|
2014-09-29 18:07:58 +02:00
|
|
|
// set input parameters
|
|
|
|
Parameter resource = new Parameter("ResourceName", "", "String", "");
|
|
|
|
Parameter database = new Parameter("DatabaseName", "", "String", "");
|
|
|
|
Parameter schema = new Parameter("SchemaName", "", "String", "");
|
|
|
|
Parameter table = new Parameter("TableName", "", "String", "");
|
|
|
|
inputParameters.add(resource);
|
|
|
|
inputParameters.add(database);
|
|
|
|
inputParameters.add(schema);
|
|
|
|
inputParameters.add(table);
|
|
|
|
|
|
|
|
inputParameters.get(0).setValue(rs);
|
|
|
|
inputParameters.get(1).setValue(db);
|
|
|
|
inputParameters.get(2).setValue(scm);
|
|
|
|
inputParameters.get(3).setValue(tab);
|
|
|
|
|
|
|
|
// create data structure
|
|
|
|
ComputationOutput outputData = new ComputationOutput();
|
|
|
|
// computation id
|
|
|
|
String computationId = startComputation(algorithmId,
|
2014-10-07 11:42:14 +02:00
|
|
|
inputParameters, outputData, scope);
|
2014-07-25 20:03:14 +02:00
|
|
|
|
2014-09-29 18:07:58 +02:00
|
|
|
// print check on retrieving data
|
|
|
|
// logger.info("dbmanager-> output data retrieved");
|
2014-07-25 20:03:14 +02:00
|
|
|
|
2014-09-29 18:07:58 +02:00
|
|
|
// data output values
|
|
|
|
LinkedHashMap<String, String> mapValues = new LinkedHashMap<String, String>();
|
|
|
|
// data output keys
|
|
|
|
LinkedHashMap<String, String> mapKeys = new LinkedHashMap<String, String>();
|
|
|
|
|
|
|
|
mapValues = outputData.getMapValues();
|
|
|
|
mapKeys = outputData.getmapKeys();
|
|
|
|
|
|
|
|
for (int i = 0; i < mapValues.size(); i++) {
|
|
|
|
Result row = new Result(mapKeys.get(String.valueOf(i)),
|
|
|
|
mapValues.get(String.valueOf(i)));
|
|
|
|
output.add(row);
|
|
|
|
}
|
2014-07-04 12:36:43 +02:00
|
|
|
|
2014-09-29 18:07:58 +02:00
|
|
|
String name = "RandomSampling" + "_" + tab;
|
|
|
|
String fileName = storeResultIntoCSVFile(output, name);
|
2014-09-19 17:36:36 +02:00
|
|
|
|
2014-09-29 18:07:58 +02:00
|
|
|
HttpServletRequest request = this.getThreadLocalRequest();
|
|
|
|
String applicationPath = request.getContextPath();
|
|
|
|
// logger.info("dbmanager-> Application Path: " + applicationPath);
|
|
|
|
String partialPathFile = applicationPath + "/computationResult/"
|
|
|
|
+ fileName;
|
|
|
|
SamplingResultWithFileFromServlet obj = new SamplingResultWithFileFromServlet(
|
|
|
|
output, partialPathFile);
|
2014-09-19 17:36:36 +02:00
|
|
|
|
2014-09-29 18:07:58 +02:00
|
|
|
return obj;
|
|
|
|
} catch (Exception e) {
|
|
|
|
logger.error("dbmanager-> ", e);
|
|
|
|
if (!(e instanceof StatisticalManagerException)) {
|
|
|
|
// GWT can't serialize all exceptions
|
|
|
|
throw new Exception(
|
|
|
|
"Error in server while loading data. Exception: " + e);
|
|
|
|
}
|
|
|
|
throw e;
|
|
|
|
}
|
2014-07-04 12:36:43 +02:00
|
|
|
}
|
2014-07-04 12:09:47 +02:00
|
|
|
|
2014-07-04 11:07:19 +02:00
|
|
|
@Override
|
|
|
|
public LinkedHashMap<String, FileModel> getTableDetails(
|
|
|
|
LinkedHashMap<String, String> dataInput) throws Exception {
|
|
|
|
|
2014-09-29 18:07:58 +02:00
|
|
|
// session check
|
|
|
|
if (isSessionExpired())
|
|
|
|
throw new SessionExpiredException();
|
2014-07-25 20:03:14 +02:00
|
|
|
|
2014-10-07 11:42:14 +02:00
|
|
|
try {
|
2014-10-10 18:38:39 +02:00
|
|
|
logger.info("dbmanager-> Table Details Recovery Request received. Starting to manage the request.");
|
2014-10-07 11:42:14 +02:00
|
|
|
ASLSession session = SessionUtil.getAslSession(this
|
|
|
|
.getThreadLocalRequest().getSession());
|
|
|
|
// get scope
|
|
|
|
String scope = session.getScope();
|
2014-10-07 15:20:16 +02:00
|
|
|
|
2014-09-29 18:07:58 +02:00
|
|
|
// data input
|
|
|
|
List<Parameter> inputParameters = new ArrayList<Parameter>();
|
|
|
|
// data ouptut
|
|
|
|
LinkedHashMap<String, FileModel> outputParameters = new LinkedHashMap<String, FileModel>();
|
2014-07-25 20:03:14 +02:00
|
|
|
|
2014-10-02 14:53:13 +02:00
|
|
|
String algorithmId = ConstantsPortlet.ALGID_GETTABLEDETAILS;
|
2014-09-17 10:27:30 +02:00
|
|
|
|
|
|
|
// print check
|
2014-09-29 18:07:58 +02:00
|
|
|
String rs = dataInput.get("ResourceName");
|
|
|
|
String db = dataInput.get("DatabaseName");
|
|
|
|
String scm = dataInput.get("SchemaName");
|
|
|
|
String tab = dataInput.get("TableName");
|
2014-09-12 16:47:32 +02:00
|
|
|
|
2014-09-29 18:07:58 +02:00
|
|
|
// print check
|
|
|
|
logger.info("dbmanager-> ResourceName: " + rs);
|
|
|
|
logger.info("dbmanager-> DatabaseName: " + db);
|
|
|
|
logger.info("dbmanager-> SchemaName: " + scm);
|
|
|
|
logger.info("dbmanager-> TableName: " + tab);
|
|
|
|
|
|
|
|
// set input parameters
|
|
|
|
Parameter resource = new Parameter("ResourceName", "", "String", "");
|
|
|
|
Parameter database = new Parameter("DatabaseName", "", "String", "");
|
|
|
|
Parameter schema = new Parameter("SchemaName", "", "String", "");
|
|
|
|
Parameter table = new Parameter("TableName", "", "String", "");
|
|
|
|
inputParameters.add(resource);
|
|
|
|
inputParameters.add(database);
|
|
|
|
inputParameters.add(schema);
|
|
|
|
inputParameters.add(table);
|
|
|
|
|
|
|
|
inputParameters.get(0).setValue(rs);
|
|
|
|
inputParameters.get(1).setValue(db);
|
|
|
|
inputParameters.get(2).setValue(scm);
|
|
|
|
inputParameters.get(3).setValue(tab);
|
|
|
|
|
|
|
|
// create data structure
|
|
|
|
ComputationOutput outputData = new ComputationOutput();
|
|
|
|
// computation id
|
|
|
|
String computationId = startComputation(algorithmId,
|
2014-10-07 11:42:14 +02:00
|
|
|
inputParameters, outputData, scope);
|
2014-09-29 18:07:58 +02:00
|
|
|
|
|
|
|
// print check on retrieving data
|
|
|
|
// logger.info("output data retrieved");
|
|
|
|
|
|
|
|
// output data values
|
|
|
|
LinkedHashMap<String, String> mapValues = new LinkedHashMap<String, String>();
|
|
|
|
// output data keys
|
|
|
|
LinkedHashMap<String, String> mapKeys = new LinkedHashMap<String, String>();
|
|
|
|
|
|
|
|
mapValues = outputData.getMapValues();
|
|
|
|
mapKeys = outputData.getmapKeys();
|
|
|
|
|
|
|
|
for (int i = 0; i < mapValues.size(); i++) {
|
|
|
|
FileModel obj = new FileModel(mapValues.get(String.valueOf(i)));
|
|
|
|
// obj.setIsLoaded(true);
|
|
|
|
outputParameters.put(mapKeys.get(String.valueOf(i)), obj);
|
|
|
|
// print check
|
|
|
|
// logger.info("value: " + outputMap.get(String.valueOf(i)));
|
|
|
|
// logger.info("key: " + outputKey.get(String.valueOf(i)));
|
|
|
|
}
|
|
|
|
|
|
|
|
return outputParameters;
|
|
|
|
} catch (Exception e) {
|
|
|
|
logger.error("dbmanager-> ", e);
|
|
|
|
if (!(e instanceof StatisticalManagerException)) {
|
|
|
|
// GWT can't serialize all exceptions
|
|
|
|
throw new Exception(
|
|
|
|
"Error in server while loading data. Exception: " + e);
|
|
|
|
}
|
|
|
|
throw e;
|
|
|
|
}
|
2014-07-04 11:07:19 +02:00
|
|
|
}
|
2014-07-03 14:44:03 +02:00
|
|
|
|
2014-08-29 15:39:04 +02:00
|
|
|
// parse result for Submit query
|
|
|
|
public PagingLoadResult<Row> loadSubmitResult(PagingLoadConfig config,
|
|
|
|
List<String> listAttributes, String UID) throws Exception {
|
2014-09-29 18:07:58 +02:00
|
|
|
|
|
|
|
// session check
|
|
|
|
if (isSessionExpired())
|
|
|
|
throw new SessionExpiredException();
|
|
|
|
|
|
|
|
try {
|
2014-10-01 15:38:16 +02:00
|
|
|
|
|
|
|
// Create a sublist and add data to list according
|
|
|
|
// to the limit and offset value of the config
|
|
|
|
List<Row> sublist = new ArrayList<Row>();
|
|
|
|
BasePagingLoadResult loadResult = null;
|
|
|
|
|
2014-09-29 18:07:58 +02:00
|
|
|
// data parsed
|
|
|
|
List<Row> data = new ArrayList<Row>();
|
|
|
|
// submit query result
|
|
|
|
List<Result> result = new ArrayList<Result>();
|
2014-10-01 15:38:16 +02:00
|
|
|
// get the key to retrieve the submitQuery result
|
|
|
|
String key = getKeySubmitQueryResult(UID);
|
2014-09-29 18:07:58 +02:00
|
|
|
|
2014-10-01 15:38:16 +02:00
|
|
|
if ((key != null) && (!key.equals(""))) {
|
|
|
|
// load data
|
2014-09-29 18:07:58 +02:00
|
|
|
|
2014-10-01 15:38:16 +02:00
|
|
|
// get data from cache
|
|
|
|
net.sf.ehcache.Element dataFromCache = getDataFromCache(key);
|
|
|
|
Object value = null;
|
|
|
|
if (dataFromCache != null) {
|
|
|
|
value = dataFromCache.getObjectValue();
|
2014-09-29 18:07:58 +02:00
|
|
|
}
|
2014-10-01 15:38:16 +02:00
|
|
|
if (value != null) {
|
|
|
|
result = (List<Result>) value;
|
2014-10-27 10:44:26 +01:00
|
|
|
updateListSubmitQueryResult(UID, result);
|
|
|
|
|
|
|
|
// data = parseCVSString(result, listAttributes);
|
|
|
|
//
|
|
|
|
// int start = config.getOffset();
|
|
|
|
// int limit = data.size();
|
|
|
|
//
|
|
|
|
// if (config.getLimit() > 0) {
|
|
|
|
// limit = Math.min(start + config.getLimit(), limit);
|
|
|
|
// }
|
|
|
|
//
|
|
|
|
// int totalNumber = data.size();
|
|
|
|
// sublist = new ArrayList<Row>(data.subList(start, limit));
|
|
|
|
// loadResult = new BasePagingLoadResult<Row>(sublist,
|
|
|
|
// config.getOffset(), totalNumber);
|
|
|
|
//
|
|
|
|
// // System.out.println("start: " + start);
|
|
|
|
// // System.out.println("limit: " + limit);
|
|
|
|
// // System.out.println("sublist size: " + sublist.size());
|
|
|
|
} else {
|
|
|
|
|
|
|
|
//get the result bound to session
|
|
|
|
result = getSubmitQueryResult(UID);
|
2014-08-29 15:39:04 +02:00
|
|
|
|
2014-10-27 10:44:26 +01:00
|
|
|
// logger.error("dbmanager-> Error in server while loading data. variable value null");
|
|
|
|
// throw new Exception("Error in server while loading data.");
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
data = parseCVSString(result, listAttributes);
|
2014-08-29 15:39:04 +02:00
|
|
|
|
2014-10-27 10:44:26 +01:00
|
|
|
int start = config.getOffset();
|
|
|
|
int limit = data.size();
|
2014-10-01 15:38:16 +02:00
|
|
|
|
2014-10-27 10:44:26 +01:00
|
|
|
if (config.getLimit() > 0) {
|
|
|
|
limit = Math.min(start + config.getLimit(), limit);
|
|
|
|
}
|
2014-10-01 15:38:16 +02:00
|
|
|
|
2014-10-27 10:44:26 +01:00
|
|
|
int totalNumber = data.size();
|
|
|
|
sublist = new ArrayList<Row>(data.subList(start, limit));
|
|
|
|
loadResult = new BasePagingLoadResult<Row>(sublist,
|
|
|
|
config.getOffset(), totalNumber);
|
2014-10-01 15:38:16 +02:00
|
|
|
|
2014-10-27 10:44:26 +01:00
|
|
|
// System.out.println("start: " + start);
|
|
|
|
// System.out.println("limit: " + limit);
|
|
|
|
// System.out.println("sublist size: " + sublist.size());
|
2014-10-01 15:38:16 +02:00
|
|
|
|
|
|
|
} else {
|
|
|
|
logger.error("dbmanager-> Error in server while loading data. key null");
|
|
|
|
throw new Exception("Error in server while loading data.");
|
2014-09-29 18:07:58 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
return loadResult;
|
|
|
|
} catch (Exception e) {
|
|
|
|
logger.error("dbmanager-> ", e);
|
|
|
|
throw new Exception(
|
|
|
|
"Error in server while loading data. Exception: " + e);
|
|
|
|
}
|
2014-10-27 10:44:26 +01:00
|
|
|
|
2014-08-29 15:39:04 +02:00
|
|
|
}
|
2014-09-05 11:30:05 +02:00
|
|
|
|
2014-08-29 15:39:04 +02:00
|
|
|
// get attributes list for display the result in a table
|
|
|
|
private List<String> getListAttributes(String value) {
|
|
|
|
|
|
|
|
List<String> listAttributes = new ArrayList<String>();
|
|
|
|
// recover attribute fields for the result table
|
|
|
|
String headers = value;
|
|
|
|
// logger.info("Headers fields table: " + headers);
|
|
|
|
listAttributes = parseAttributesTableResult(headers);
|
|
|
|
// logger.info("attributes number: " + listAttributes.size());
|
|
|
|
// logger.info("attributes list: ");
|
|
|
|
// print check
|
|
|
|
// for (int i = 0; i < listAttributes.size(); i++) {
|
|
|
|
// logger.info("attribute: " + listAttributes.get(i));
|
|
|
|
// }
|
|
|
|
return listAttributes;
|
|
|
|
}
|
|
|
|
|
|
|
|
private List<String> parseAttributesTableResult(String phrase) {
|
|
|
|
String delimiter = ",";
|
|
|
|
List<String> elements = new ArrayList<String>();
|
|
|
|
int idxdelim = -1;
|
|
|
|
phrase = phrase.trim();
|
|
|
|
|
|
|
|
while ((idxdelim = phrase.indexOf(delimiter)) >= 0) {
|
|
|
|
elements.add(phrase.substring(0, idxdelim));
|
|
|
|
phrase = phrase.substring(idxdelim + 1).trim();
|
|
|
|
}
|
|
|
|
elements.add(phrase);
|
|
|
|
return elements;
|
|
|
|
}
|
|
|
|
|
2014-07-30 14:05:11 +02:00
|
|
|
// parse a csv row in a list of values
|
2014-08-01 11:27:40 +02:00
|
|
|
@Override
|
2014-07-02 12:57:14 +02:00
|
|
|
public List<Row> parseCVSString(List<Result> result, List<String> attrNames)
|
|
|
|
throws Exception {
|
2014-09-29 18:07:58 +02:00
|
|
|
|
|
|
|
// session check
|
|
|
|
if (isSessionExpired())
|
2014-09-29 15:13:02 +02:00
|
|
|
throw new SessionExpiredException();
|
2014-07-02 12:57:14 +02:00
|
|
|
|
|
|
|
List<Row> rows = new ArrayList<Row>();
|
|
|
|
|
|
|
|
if (result != null) {
|
|
|
|
for (int i = 0; i < result.size(); i++) {
|
|
|
|
List<String> attrValues = parse(result.get(i).getValue());
|
2014-07-08 17:53:15 +02:00
|
|
|
Row element = new Row(attrNames, attrValues, i);
|
2014-07-02 12:57:14 +02:00
|
|
|
rows.add(element);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return rows;
|
|
|
|
}
|
|
|
|
|
|
|
|
private List<String> parse(String row) throws Exception {
|
|
|
|
String delimiter = ",";
|
2014-09-10 14:31:20 +02:00
|
|
|
|
2014-07-30 14:05:11 +02:00
|
|
|
// print check
|
|
|
|
// logger.info("row: " + row);
|
2014-09-10 14:31:20 +02:00
|
|
|
|
2014-07-02 12:57:14 +02:00
|
|
|
List<String> elements = new ArrayList<String>();
|
|
|
|
String phrase = row;
|
|
|
|
int idxdelim = -1;
|
|
|
|
boolean quot = false;
|
|
|
|
phrase = phrase.trim();
|
|
|
|
while ((idxdelim = phrase.indexOf(delimiter)) >= 0) {
|
|
|
|
quot = phrase.startsWith("\"");
|
|
|
|
if (quot) {
|
|
|
|
phrase = phrase.substring(1);
|
|
|
|
String quoted = "";
|
|
|
|
if (phrase.startsWith("\""))
|
|
|
|
phrase = phrase.substring(1);
|
|
|
|
else {
|
|
|
|
RE regexp = new RE("[^\\\\]\"");
|
|
|
|
boolean matching = regexp.match(phrase);
|
|
|
|
|
|
|
|
if (matching) {
|
|
|
|
int i0 = regexp.getParenStart(0);
|
|
|
|
quoted = phrase.substring(0, i0 + 1).trim();
|
|
|
|
phrase = phrase.substring(i0 + 2).trim();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if (phrase.startsWith(delimiter))
|
|
|
|
phrase = phrase.substring(1);
|
|
|
|
|
|
|
|
elements.add(quoted);
|
|
|
|
|
|
|
|
} else {
|
|
|
|
elements.add(phrase.substring(0, idxdelim));
|
|
|
|
phrase = phrase.substring(idxdelim + 1).trim();
|
|
|
|
}
|
2014-07-30 14:05:11 +02:00
|
|
|
// logger.info("server token: " + phrase);
|
2014-07-02 12:57:14 +02:00
|
|
|
}
|
|
|
|
if (phrase.startsWith("\""))
|
|
|
|
phrase = phrase.substring(1);
|
|
|
|
|
|
|
|
if (phrase.endsWith("\""))
|
|
|
|
phrase = phrase.substring(0, phrase.length() - 1);
|
|
|
|
|
|
|
|
elements.add(phrase);
|
2014-07-30 14:05:11 +02:00
|
|
|
// logger.info("size: " + elements.size());
|
2014-07-02 12:57:14 +02:00
|
|
|
return elements;
|
|
|
|
}
|
|
|
|
|
2014-08-29 15:39:04 +02:00
|
|
|
// update job with the related status
|
2014-08-25 17:55:53 +02:00
|
|
|
private synchronized void updateJobStatus(String jobID, String status) {
|
|
|
|
if (jobID != null) {
|
|
|
|
// add the job status
|
2014-09-17 16:43:14 +02:00
|
|
|
ASLSession session = SessionUtil.getAslSession(this
|
2014-08-25 17:55:53 +02:00
|
|
|
.getThreadLocalRequest().getSession());
|
|
|
|
HashMap<String, String> JobStatusMap = (HashMap<String, String>) session
|
|
|
|
.getAttribute("JobStatusList");
|
|
|
|
JobStatusMap.put(jobID, status);
|
|
|
|
session.setAttribute("JobStatusList", JobStatusMap);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2014-08-29 15:39:04 +02:00
|
|
|
// remove job with the related status
|
2014-08-25 17:55:53 +02:00
|
|
|
private synchronized void removeJobStatus(String jobID) {
|
2014-09-19 17:36:36 +02:00
|
|
|
ASLSession session = SessionUtil.getAslSession(this
|
|
|
|
.getThreadLocalRequest().getSession());
|
2014-08-25 17:55:53 +02:00
|
|
|
HashMap<String, String> JobStatusMap = (HashMap<String, String>) session
|
|
|
|
.getAttribute("JobStatusList");
|
|
|
|
String status = JobStatusMap.get(jobID);
|
|
|
|
if (status != null) {
|
|
|
|
JobStatusMap.remove(jobID);
|
|
|
|
session.setAttribute("JobStatusList", JobStatusMap);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2014-08-29 15:39:04 +02:00
|
|
|
// get job status
|
|
|
|
private synchronized String getJobStatus(String jobID) {
|
2014-09-19 17:36:36 +02:00
|
|
|
ASLSession session = SessionUtil.getAslSession(this
|
|
|
|
.getThreadLocalRequest().getSession());
|
2014-08-29 15:39:04 +02:00
|
|
|
HashMap<String, String> JobStatusMap = (HashMap<String, String>) session
|
|
|
|
.getAttribute("JobStatusList");
|
|
|
|
String status = JobStatusMap.get(jobID);
|
|
|
|
return status;
|
|
|
|
}
|
|
|
|
|
|
|
|
// update job with the computation id
|
2014-08-01 18:11:36 +02:00
|
|
|
private synchronized void updateJob(String jobID, String computationId) {
|
|
|
|
if (jobID != null) {
|
|
|
|
// add the computation in the map
|
2014-09-17 16:43:14 +02:00
|
|
|
ASLSession session = SessionUtil.getAslSession(this
|
2014-08-01 18:11:36 +02:00
|
|
|
.getThreadLocalRequest().getSession());
|
|
|
|
HashMap<String, String> computationIDMap = (HashMap<String, String>) session
|
|
|
|
.getAttribute("ComputationIDList");
|
|
|
|
computationIDMap.put(jobID, computationId);
|
|
|
|
session.setAttribute("ComputationIDList", computationIDMap);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2014-08-29 15:39:04 +02:00
|
|
|
// remove job with the computation id
|
2014-08-01 18:11:36 +02:00
|
|
|
private synchronized String removeJob(String jobID) {
|
|
|
|
if (jobID != null) {
|
2014-08-25 17:55:53 +02:00
|
|
|
// System.out.println("remove jobID " + job);
|
2014-08-01 18:11:36 +02:00
|
|
|
// add the computation in the map
|
2014-09-17 16:43:14 +02:00
|
|
|
ASLSession session = SessionUtil.getAslSession(this
|
2014-08-01 18:11:36 +02:00
|
|
|
.getThreadLocalRequest().getSession());
|
|
|
|
HashMap<String, String> computationIDMap = (HashMap<String, String>) session
|
|
|
|
.getAttribute("ComputationIDList");
|
|
|
|
String computationId = computationIDMap.get(jobID);
|
|
|
|
if (computationId != null) {
|
|
|
|
computationIDMap.remove(jobID);
|
|
|
|
session.setAttribute("ComputationIDList", computationIDMap);
|
|
|
|
return computationId;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return null;
|
|
|
|
}
|
|
|
|
|
2014-09-05 11:30:05 +02:00
|
|
|
private synchronized boolean checkJob(String jobID) {
|
|
|
|
boolean isContained = false;
|
|
|
|
if (jobID != null) {
|
2014-09-17 16:43:14 +02:00
|
|
|
ASLSession session = SessionUtil.getAslSession(this
|
2014-09-05 11:30:05 +02:00
|
|
|
.getThreadLocalRequest().getSession());
|
|
|
|
HashMap<String, String> computationIDMap = (HashMap<String, String>) session
|
|
|
|
.getAttribute("ComputationIDList");
|
|
|
|
if (computationIDMap.containsKey(jobID)) {
|
|
|
|
isContained = true;
|
|
|
|
} else {
|
|
|
|
isContained = false;
|
|
|
|
}
|
|
|
|
}
|
2014-09-08 12:36:49 +02:00
|
|
|
// System.out.println("JobID isContained: " + isContained);
|
2014-09-05 11:30:05 +02:00
|
|
|
return isContained;
|
|
|
|
}
|
|
|
|
|
2014-10-01 15:38:16 +02:00
|
|
|
private synchronized void updateListSubmitQueryUIDCachedData(String UID,
|
|
|
|
Boolean value) {
|
|
|
|
ASLSession session = SessionUtil.getAslSession(this
|
|
|
|
.getThreadLocalRequest().getSession());
|
|
|
|
|
|
|
|
HashMap<String, Boolean> listSubmitQueryUIDCachedData = (HashMap<String, Boolean>) session
|
|
|
|
.getAttribute("listSubmitQueryUIDCachedData");
|
|
|
|
listSubmitQueryUIDCachedData.put(UID, value);
|
|
|
|
session.setAttribute("listSubmitQueryUIDCachedData",
|
|
|
|
listSubmitQueryUIDCachedData);
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
private synchronized Boolean checkSubmitQueryUIDCachedData(String UID) {
|
|
|
|
ASLSession session = SessionUtil.getAslSession(this
|
|
|
|
.getThreadLocalRequest().getSession());
|
|
|
|
|
|
|
|
HashMap<String, Boolean> listSubmitQueryUIDCachedData = (HashMap<String, Boolean>) session
|
|
|
|
.getAttribute("listSubmitQueryUIDCachedData");
|
|
|
|
return listSubmitQueryUIDCachedData.get(UID);
|
|
|
|
}
|
|
|
|
|
|
|
|
private synchronized void removeSubmitQueryUIDCachedData(String UID) {
|
|
|
|
ASLSession session = SessionUtil.getAslSession(this
|
|
|
|
.getThreadLocalRequest().getSession());
|
|
|
|
|
|
|
|
HashMap<String, Boolean> listSubmitQueryUIDCachedData = (HashMap<String, Boolean>) session
|
|
|
|
.getAttribute("listSubmitQueryUIDCachedData");
|
|
|
|
|
|
|
|
if (listSubmitQueryUIDCachedData.containsKey(UID)) {
|
|
|
|
listSubmitQueryUIDCachedData.remove(UID);
|
|
|
|
session.setAttribute("listSubmitQueryUIDCachedData",
|
|
|
|
listSubmitQueryUIDCachedData);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
private synchronized void removeKeySubmitQueryResult(String UID) {
|
|
|
|
ASLSession session = SessionUtil.getAslSession(this
|
|
|
|
.getThreadLocalRequest().getSession());
|
|
|
|
|
|
|
|
HashMap<String, String> listKeySubmitQueryResult = (HashMap<String, String>) session
|
|
|
|
.getAttribute("listKeySubmitQueryResult");
|
|
|
|
|
|
|
|
if (listKeySubmitQueryResult.containsKey(UID)) {
|
|
|
|
listKeySubmitQueryResult.remove(UID);
|
|
|
|
session.setAttribute("listKeySubmitQueryResult",
|
|
|
|
listKeySubmitQueryResult);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
private synchronized void updateListKeySubmitQueryResult(String UID,
|
|
|
|
String value) {
|
|
|
|
ASLSession session = SessionUtil.getAslSession(this
|
|
|
|
.getThreadLocalRequest().getSession());
|
|
|
|
|
|
|
|
HashMap<String, String> listKeySubmitQueryResult = (HashMap<String, String>) session
|
|
|
|
.getAttribute("listKeySubmitQueryResult");
|
|
|
|
listKeySubmitQueryResult.put(UID, value);
|
|
|
|
session.setAttribute("listKeySubmitQueryResult",
|
|
|
|
listKeySubmitQueryResult);
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
private synchronized String getKeySubmitQueryResult(String UID) {
|
|
|
|
ASLSession session = SessionUtil.getAslSession(this
|
|
|
|
.getThreadLocalRequest().getSession());
|
|
|
|
|
|
|
|
HashMap<String, String> listKeySubmitQueryResult = (HashMap<String, String>) session
|
|
|
|
.getAttribute("listKeySubmitQueryResult");
|
|
|
|
return listKeySubmitQueryResult.get(UID);
|
|
|
|
}
|
2014-10-27 10:44:26 +01:00
|
|
|
|
|
|
|
private synchronized List<Result> getSubmitQueryResult(String UID){
|
|
|
|
ASLSession session = SessionUtil.getAslSession(this
|
|
|
|
.getThreadLocalRequest().getSession());
|
|
|
|
|
|
|
|
HashMap<String, List<Result>> listSubmitQueryResult = (HashMap<String, List<Result>>)session.getAttribute("listSubmitQueryResult");
|
|
|
|
return listSubmitQueryResult.get(UID);
|
|
|
|
}
|
|
|
|
|
|
|
|
private synchronized void updateListSubmitQueryResult(String UID, List<Result> value){
|
|
|
|
ASLSession session = SessionUtil.getAslSession(this
|
|
|
|
.getThreadLocalRequest().getSession());
|
|
|
|
|
|
|
|
HashMap<String, List<Result>> listSubmitQueryResult = (HashMap<String, List<Result>>)session.getAttribute("listSubmitQueryResult");
|
|
|
|
listSubmitQueryResult.put(UID, value);
|
|
|
|
session.setAttribute("listSubmitQueryResult",
|
|
|
|
listSubmitQueryResult);
|
|
|
|
}
|
|
|
|
|
|
|
|
private synchronized void removeSubmitQueryResult(String UID){
|
|
|
|
|
|
|
|
ASLSession session = SessionUtil.getAslSession(this
|
|
|
|
.getThreadLocalRequest().getSession());
|
|
|
|
|
|
|
|
HashMap<String, List<Result>> listSubmitQueryResult = (HashMap<String, List<Result>>) session
|
|
|
|
.getAttribute("listSubmitQueryResult");
|
|
|
|
|
|
|
|
if (listSubmitQueryResult.containsKey(UID)) {
|
|
|
|
listSubmitQueryResult.remove(UID);
|
|
|
|
session.setAttribute("listSubmitQueryResult",
|
|
|
|
listSubmitQueryResult);
|
|
|
|
}
|
|
|
|
}
|
2014-10-01 15:38:16 +02:00
|
|
|
|
2014-07-02 12:57:14 +02:00
|
|
|
private String startComputation(String algorithmName,
|
2014-10-07 11:42:14 +02:00
|
|
|
List<Parameter> parameters, ComputationOutput outputData,
|
|
|
|
String scope) throws Exception {
|
|
|
|
return startComputation(algorithmName, parameters, outputData, scope,
|
|
|
|
null);
|
2014-08-01 18:11:36 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
private String startComputation(String algorithmName,
|
|
|
|
List<Parameter> parameters, ComputationOutput outputData,
|
2014-10-07 11:42:14 +02:00
|
|
|
String scopeValue, String jobID) throws Exception {
|
2014-07-02 12:57:14 +02:00
|
|
|
|
|
|
|
SMComputationConfig config = new SMComputationConfig();
|
|
|
|
SMInputEntry[] list = new SMInputEntry[parameters.size()];
|
|
|
|
int i = 0;
|
|
|
|
|
|
|
|
for (Parameter p : parameters)
|
|
|
|
list[i++] = new SMInputEntry(p.getName(), p.getValue());
|
|
|
|
config.parameters(new SMEntries(list));
|
|
|
|
config.algorithm(algorithmName);
|
|
|
|
|
|
|
|
// create a computation request
|
|
|
|
SMComputationRequest request = new SMComputationRequest();
|
|
|
|
request.user(getUsername());
|
|
|
|
request.config(config);
|
|
|
|
|
|
|
|
try {
|
2014-10-07 11:42:14 +02:00
|
|
|
|
|
|
|
StatisticalManagerFactory factory = getFactory(scopeValue);
|
2014-07-02 12:57:14 +02:00
|
|
|
String computationId = factory.executeComputation(request);
|
|
|
|
|
|
|
|
float percentage = 0;
|
2014-10-07 11:42:14 +02:00
|
|
|
// String scope = getScope();
|
|
|
|
String scope = scopeValue;
|
2014-07-02 12:57:14 +02:00
|
|
|
String username = getUsername();
|
|
|
|
|
2014-08-25 17:55:53 +02:00
|
|
|
updateJobStatus(jobID, "computation started");
|
2014-08-01 18:11:36 +02:00
|
|
|
updateJob(jobID, computationId);
|
2014-09-05 11:30:05 +02:00
|
|
|
logger.info("dbmanager-> startComputation: the computation has started!");
|
2014-07-02 12:57:14 +02:00
|
|
|
while (percentage < 100) {
|
|
|
|
percentage = checkComputationStatus(scope, computationId,
|
2014-07-25 20:03:14 +02:00
|
|
|
username, outputData);
|
2014-07-31 12:40:20 +02:00
|
|
|
Thread.sleep(3000);
|
2014-07-02 12:57:14 +02:00
|
|
|
}
|
2014-09-05 11:30:05 +02:00
|
|
|
logger.info("dbmanager-> startComputation: the computation has finished!");
|
2014-08-25 17:55:53 +02:00
|
|
|
updateJobStatus(jobID, "computation finished");
|
2014-09-08 12:36:49 +02:00
|
|
|
// removeJob(jobID);
|
2014-08-01 18:11:36 +02:00
|
|
|
|
2014-07-02 12:57:14 +02:00
|
|
|
return computationId;
|
|
|
|
} catch (Exception e) {
|
2014-09-05 11:30:05 +02:00
|
|
|
logger.info("dbmanager-> startComputation: the job submit has failed!");
|
2014-07-03 14:44:03 +02:00
|
|
|
// e.printStackTrace();
|
2014-09-30 14:32:27 +02:00
|
|
|
// logger.error("dbmanager-> ", e);
|
2014-07-02 12:57:14 +02:00
|
|
|
throw e;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
private float checkComputationStatus(String scope, String computationId,
|
2014-07-25 20:03:14 +02:00
|
|
|
String user, ComputationOutput outputData) throws Exception {
|
2014-08-25 17:55:53 +02:00
|
|
|
// System.out.println("checkComputation " + computationId);
|
2014-07-02 12:57:14 +02:00
|
|
|
ScopeProvider.instance.set(scope);
|
|
|
|
|
|
|
|
StatisticalManagerFactory factory = StatisticalManagerDSL
|
|
|
|
.createStateful().build();
|
2014-10-22 10:49:08 +02:00
|
|
|
|
2014-07-02 12:57:14 +02:00
|
|
|
SMComputation computation = factory.getComputation(computationId);
|
|
|
|
SMOperationStatus status = SMOperationStatus.values()[computation
|
|
|
|
.operationStatus()];
|
|
|
|
|
|
|
|
float percentage = 0;
|
|
|
|
if (status == SMOperationStatus.RUNNING) {
|
2014-07-30 14:05:11 +02:00
|
|
|
// logger.info("RUNNING");
|
2014-07-02 12:57:14 +02:00
|
|
|
SMOperationInfo infos = factory.getComputationInfo(computationId,
|
|
|
|
user);
|
|
|
|
// percentage = Float.parseFloat(infos.percentage());
|
2014-07-30 14:05:11 +02:00
|
|
|
// logger.info("Percentage:" +
|
2014-07-02 12:57:14 +02:00
|
|
|
// percentage);
|
|
|
|
// computation = factory.getComputation(computationId);
|
|
|
|
status = SMOperationStatus.values()[computation.operationStatus()];
|
|
|
|
} else if ((status == SMOperationStatus.COMPLETED)
|
|
|
|
|| (status == SMOperationStatus.FAILED)) {
|
2014-09-08 12:36:49 +02:00
|
|
|
// logger.info("computation COMPLETED");
|
2014-07-30 14:05:11 +02:00
|
|
|
// logger.info("COMPLETED OR FAILED");
|
2014-07-02 12:57:14 +02:00
|
|
|
SMAbstractResource abstractResource = computation
|
|
|
|
.abstractResource();
|
|
|
|
SMResource smResource = abstractResource.resource();
|
|
|
|
int resourceTypeIndex = smResource.resourceType();
|
|
|
|
SMResourceType smResType = SMResourceType.values()[resourceTypeIndex];
|
2014-07-03 14:44:03 +02:00
|
|
|
|
2014-07-25 20:03:14 +02:00
|
|
|
displayOutput(smResource, smResType, outputData);
|
2014-07-02 12:57:14 +02:00
|
|
|
|
2014-07-22 11:47:28 +02:00
|
|
|
// print check
|
2014-07-30 14:05:11 +02:00
|
|
|
// logger.info("SM resource Name: " + smResource.name());
|
|
|
|
// logger.info("SM resource Name: " + smResource.name());
|
|
|
|
// logger.info("SM resource ID: " + smResource.resourceId());
|
|
|
|
// logger.info("SM resource ID: " + smResource.resourceId());
|
|
|
|
// logger.info("SM resource Description: " +
|
|
|
|
// smResource.description());
|
2014-07-02 12:57:14 +02:00
|
|
|
percentage = 100;
|
|
|
|
}
|
|
|
|
return percentage;
|
|
|
|
}
|
|
|
|
|
2014-07-25 20:03:14 +02:00
|
|
|
private void displayOutput(SMResource smResource, SMResourceType smResType,
|
|
|
|
ComputationOutput outputData) throws Exception {
|
2014-07-02 12:57:14 +02:00
|
|
|
|
|
|
|
if (smResType.equals(SMResourceType.OBJECT)) {
|
|
|
|
// switch (smResType) {
|
|
|
|
// case FILE:
|
|
|
|
// SMFile fileRes = (SMFile) smResource;
|
|
|
|
// System.out.println("Output is a file");
|
|
|
|
// break;
|
|
|
|
// case OBJECT:
|
|
|
|
SMObject objRes = (SMObject) smResource;
|
|
|
|
if (objRes.name().contentEquals(PrimitiveTypes.MAP.toString())) {
|
2014-09-05 11:30:05 +02:00
|
|
|
logger.info("dbmanager-> Output is a map");
|
2014-07-25 20:03:14 +02:00
|
|
|
getMap(objRes, outputData);
|
|
|
|
|
2014-07-02 12:57:14 +02:00
|
|
|
} else if (objRes.name().contentEquals(
|
|
|
|
PrimitiveTypes.IMAGES.toString())) {
|
2014-07-30 14:05:11 +02:00
|
|
|
// logger.info("Output are images");
|
2014-07-25 20:03:14 +02:00
|
|
|
}
|
|
|
|
// else
|
|
|
|
// System.out.println("Output is other");
|
|
|
|
// rootLogger.log(Level.SEVERE, "Output is other");
|
2014-07-02 12:57:14 +02:00
|
|
|
}
|
2014-07-03 14:44:03 +02:00
|
|
|
|
2014-09-29 18:07:58 +02:00
|
|
|
// StatisticalManager EXCEPTION MANAGEMENT
|
2014-07-03 14:44:03 +02:00
|
|
|
if (smResType.equals(SMResourceType.ERROR)) {
|
2014-09-29 18:07:58 +02:00
|
|
|
StatisticalManagerException e = new StatisticalManagerException(
|
|
|
|
smResource.description());
|
2014-07-30 14:05:11 +02:00
|
|
|
// e.printStackTrace();
|
2014-09-30 14:32:27 +02:00
|
|
|
// logger.error("dbmanager-> ", e);
|
2014-07-22 18:32:39 +02:00
|
|
|
throw e;
|
2014-07-02 12:57:14 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2014-08-01 11:27:40 +02:00
|
|
|
// get output result
|
2014-07-25 20:03:14 +02:00
|
|
|
private void getMap(SMObject objRes, ComputationOutput outputData)
|
|
|
|
throws Exception {
|
2014-07-30 14:05:11 +02:00
|
|
|
// output data values
|
2014-07-25 20:03:14 +02:00
|
|
|
LinkedHashMap<String, String> mapValues = new LinkedHashMap<String, String>();
|
2014-07-30 14:05:11 +02:00
|
|
|
// output data keys
|
2014-07-25 20:03:14 +02:00
|
|
|
LinkedHashMap<String, String> mapKeys = new LinkedHashMap<String, String>();
|
|
|
|
|
2014-09-08 12:36:49 +02:00
|
|
|
// logger.info("getStorageInputStream - started");
|
2014-07-02 12:57:14 +02:00
|
|
|
InputStream is = getStorageClientInputStream(objRes.url());
|
|
|
|
// object serializer
|
|
|
|
XStream xstream = new XStream();
|
|
|
|
xstream.alias(
|
|
|
|
"org.gcube_system.namespaces.data.analysis.statisticalmanager.types.SMObject",
|
|
|
|
SMObject.class);
|
|
|
|
xstream.alias(
|
|
|
|
"org.gcube_system.namespaces.data.analysis.statisticalmanager.types.SMFile",
|
|
|
|
SMFile.class);
|
|
|
|
xstream.alias(
|
|
|
|
"org.gcube_system.namespaces.data.analysis.statisticalmanager.types.SMResource",
|
|
|
|
SMResource.class);
|
|
|
|
xstream.alias(
|
|
|
|
"org.gcube_system.namespaces.data.analysis.statisticalmanager.types.SMTable",
|
|
|
|
SMTable.class);
|
2014-09-08 12:36:49 +02:00
|
|
|
// logger.info("streaming");
|
2014-09-05 11:30:05 +02:00
|
|
|
|
2014-07-02 12:57:14 +02:00
|
|
|
@SuppressWarnings("unchecked")
|
|
|
|
Map<String, SMResource> smMap = (Map<String, SMResource>) (xstream
|
|
|
|
.fromXML(is));
|
|
|
|
is.close();
|
2014-09-08 12:36:49 +02:00
|
|
|
// logger.info("getStorageInputStream - finished");
|
2014-07-02 12:57:14 +02:00
|
|
|
|
2014-09-08 12:36:49 +02:00
|
|
|
// logger.info("build the resultMap - started");
|
2014-07-02 12:57:14 +02:00
|
|
|
int i = 0;
|
|
|
|
for (String key : smMap.keySet()) {
|
|
|
|
// add key value
|
2014-07-25 20:03:14 +02:00
|
|
|
mapKeys.put(String.valueOf(i), key);
|
2014-07-02 12:57:14 +02:00
|
|
|
SMResource smres = smMap.get(key);
|
|
|
|
int resourceTypeIndex = smres.resourceType();
|
|
|
|
SMResourceType smsubResType = SMResourceType.values()[resourceTypeIndex];
|
2014-07-30 14:05:11 +02:00
|
|
|
// logger.info("ResourceType: " + smsubResType);
|
2014-07-03 14:44:03 +02:00
|
|
|
|
2014-07-02 12:57:14 +02:00
|
|
|
if (smsubResType == SMResourceType.OBJECT) {
|
|
|
|
SMObject obje = (SMObject) smres;
|
|
|
|
String outstring = obje.url();
|
2014-07-30 14:05:11 +02:00
|
|
|
// logger.info("key: " + smsubResType);
|
|
|
|
// logger.info("object: " + outstring);
|
2014-07-25 20:03:14 +02:00
|
|
|
mapValues.put(String.valueOf(i), outstring);
|
2014-07-02 12:57:14 +02:00
|
|
|
i++;
|
|
|
|
}
|
|
|
|
}
|
2014-07-25 20:03:14 +02:00
|
|
|
outputData.setMapValues(mapValues);
|
|
|
|
outputData.setmapKeys(mapKeys);
|
2014-09-08 12:36:49 +02:00
|
|
|
// logger.info("build the resultMap - finished");
|
2014-07-02 12:57:14 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
private InputStream getStorageClientInputStream(String url)
|
|
|
|
throws Exception {
|
|
|
|
URL u = new URL(null, url, new URLStreamHandler() {
|
|
|
|
@Override
|
|
|
|
protected URLConnection openConnection(URL u) throws IOException {
|
|
|
|
return new SMPURLConnection(u);
|
|
|
|
}
|
|
|
|
});
|
|
|
|
return u.openConnection().getInputStream();
|
|
|
|
}
|
|
|
|
|
2014-10-07 11:42:14 +02:00
|
|
|
private StatisticalManagerFactory getFactory(String scope) {
|
|
|
|
// HttpSession httpSession = this.getThreadLocalRequest().getSession();
|
|
|
|
return SessionUtil.getFactory(scope);
|
2014-07-02 12:57:14 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
private String getUsername() {
|
2014-10-02 14:53:13 +02:00
|
|
|
// set the username of the user session to value "database.manager"
|
2014-09-19 17:36:36 +02:00
|
|
|
// HttpSession httpSession = this.getThreadLocalRequest().getSession();
|
|
|
|
// return SessionUtil.getUsername(httpSession);
|
2014-09-17 16:12:32 +02:00
|
|
|
return "database.manager";
|
2014-07-02 12:57:14 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
private String getScope() {
|
|
|
|
HttpSession httpSession = this.getThreadLocalRequest().getSession();
|
|
|
|
return SessionUtil.getScope(httpSession);
|
|
|
|
}
|
|
|
|
|
2014-08-29 15:39:04 +02:00
|
|
|
// remove the computation
|
2014-08-01 18:11:36 +02:00
|
|
|
public Boolean removeComputation(String uidSubmitQuery) throws Exception {
|
|
|
|
// System.out.println("server UID: " + uidSubmitQuery);
|
2014-09-29 18:07:58 +02:00
|
|
|
|
|
|
|
// session check
|
|
|
|
if (isSessionExpired())
|
2014-09-29 15:13:02 +02:00
|
|
|
throw new SessionExpiredException();
|
2014-09-29 18:07:58 +02:00
|
|
|
|
|
|
|
try {
|
2014-10-21 17:08:22 +02:00
|
|
|
|
|
|
|
logger.info("dbmanager-> Remove Computation Request received. Starting to manage the request.");
|
|
|
|
ASLSession session = SessionUtil.getAslSession(this
|
|
|
|
.getThreadLocalRequest().getSession());
|
|
|
|
// get scope
|
|
|
|
String scope = session.getScope();
|
2014-10-01 15:38:16 +02:00
|
|
|
|
|
|
|
Boolean isComputationRemoved = false;
|
|
|
|
// verify if this uid submitQuery request uses data in cache
|
|
|
|
Boolean value = checkSubmitQueryUIDCachedData(uidSubmitQuery);
|
|
|
|
|
|
|
|
if (value != null) {
|
2014-10-02 11:57:21 +02:00
|
|
|
// System.out.println("For uid " + uidSubmitQuery
|
|
|
|
// + " data are cached? " + value.booleanValue());
|
2014-10-01 15:38:16 +02:00
|
|
|
if (!value.booleanValue()) {
|
|
|
|
// remove computation
|
|
|
|
String computationId = null;
|
|
|
|
|
|
|
|
if ((uidSubmitQuery != null)
|
|
|
|
&& (!(uidSubmitQuery.equals("")))) {
|
|
|
|
// get job status
|
|
|
|
String status = getJobStatus(uidSubmitQuery);
|
|
|
|
|
|
|
|
if (status == null) {
|
|
|
|
// the computation has not started
|
|
|
|
while (computationId == null) {
|
|
|
|
computationId = removeJob(uidSubmitQuery);
|
|
|
|
}
|
|
|
|
} else if (status.equals("computation started")) {
|
|
|
|
// System.out.println("check status: computation started");
|
|
|
|
// the computation has started
|
|
|
|
computationId = removeJob(uidSubmitQuery);
|
|
|
|
}
|
2014-09-29 18:07:58 +02:00
|
|
|
}
|
2014-08-25 17:55:53 +02:00
|
|
|
|
2014-10-01 15:38:16 +02:00
|
|
|
if (computationId != null) {
|
2014-10-21 17:08:22 +02:00
|
|
|
StatisticalManagerFactory factory = getFactory(scope);
|
2014-10-01 15:38:16 +02:00
|
|
|
try {
|
|
|
|
factory.removeComputation(computationId);
|
2014-10-22 12:00:16 +02:00
|
|
|
logger.info("dbmanager-> Computation with UID: "+ uidSubmitQuery + " removed");
|
2014-10-01 15:38:16 +02:00
|
|
|
// remove submit query result
|
|
|
|
refreshDataOnServer(uidSubmitQuery);
|
|
|
|
// System.out.println("computation removed");
|
|
|
|
isComputationRemoved = true;
|
|
|
|
} catch (Exception e) {
|
2014-10-21 17:08:22 +02:00
|
|
|
// e.printStackTrace();
|
2014-10-01 15:38:16 +02:00
|
|
|
logger.info("dbmanager-> Could not remove the computation ID "
|
|
|
|
+ computationId
|
|
|
|
+ " corresponding to jobID "
|
|
|
|
+ uidSubmitQuery);
|
|
|
|
logger.error("dbmanager-> ", e);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
// // remove job status
|
|
|
|
// removeJobStatus(uidSubmitQuery);
|
|
|
|
// //remove the element related to the uid submitQuery
|
|
|
|
// request
|
|
|
|
// removeSubmitQueryUIDCachedData(uidSubmitQuery);
|
2014-09-29 18:07:58 +02:00
|
|
|
}
|
2014-08-01 18:11:36 +02:00
|
|
|
}
|
2014-10-01 15:38:16 +02:00
|
|
|
return isComputationRemoved;
|
2014-09-29 18:07:58 +02:00
|
|
|
} catch (Exception e) {
|
|
|
|
logger.error("dbmanager-> ", e);
|
|
|
|
throw new Exception(
|
|
|
|
"Error in server while loading data. Exception: " + e);
|
2014-08-01 18:11:36 +02:00
|
|
|
}
|
2014-10-01 15:38:16 +02:00
|
|
|
// finally {
|
|
|
|
// // remove the element related to the uid submitQuery request
|
|
|
|
// removeSubmitQueryUIDCachedData(uidSubmitQuery);
|
|
|
|
// // remove job status
|
|
|
|
// System.out.println("remove job status");
|
|
|
|
// removeJobStatus(uidSubmitQuery);
|
|
|
|
// removeJob(uidSubmitQuery);
|
|
|
|
// }
|
2014-08-01 18:11:36 +02:00
|
|
|
}
|
2014-08-01 11:27:40 +02:00
|
|
|
|
2014-09-29 18:07:58 +02:00
|
|
|
public void refreshDataOnServer(String submitQueryUID) throws Exception {
|
|
|
|
|
|
|
|
// session check
|
|
|
|
if (isSessionExpired())
|
2014-09-29 15:13:02 +02:00
|
|
|
throw new SessionExpiredException();
|
2014-09-29 18:07:58 +02:00
|
|
|
|
2014-08-29 15:39:04 +02:00
|
|
|
if ((submitQueryUID != null) && (!submitQueryUID.equals(""))) {
|
2014-10-02 12:27:08 +02:00
|
|
|
removeKeySubmitQueryResult(submitQueryUID);
|
2014-10-27 10:44:26 +01:00
|
|
|
removeSubmitQueryResult(submitQueryUID);
|
2014-10-01 15:38:16 +02:00
|
|
|
// removeResultParsed(submitQueryUID);
|
|
|
|
// removeResult(submitQueryUID);
|
|
|
|
// removeSubmitQueryUIDCachedData(submitQueryUID);
|
2014-10-02 11:57:21 +02:00
|
|
|
// System.out.println("data refreshed on server");
|
2014-08-29 15:39:04 +02:00
|
|
|
}
|
|
|
|
}
|
2014-09-10 14:31:20 +02:00
|
|
|
|
2014-10-24 10:01:50 +02:00
|
|
|
private synchronized void insertDataIntoCache(net.sf.ehcache.Element data) throws Exception{
|
2014-10-22 19:16:37 +02:00
|
|
|
|
|
|
|
if(cacheManager.cacheExists("DBCache")){
|
2014-10-24 10:01:50 +02:00
|
|
|
logger.info("dbmanager-> disk store path for cache: " + cacheManager.getDiskStorePath() +"Cache Status: "+DBCache.getStatus().toString());
|
2014-10-22 19:16:37 +02:00
|
|
|
if (DBCache.getStatus().toString().equals(Status.STATUS_ALIVE.toString())){
|
|
|
|
DBCache.put(data);
|
2014-10-24 10:01:50 +02:00
|
|
|
logger.trace("dbmanager-> element with key: " + data.getKey().toString()+" added in cache");
|
2014-10-07 11:42:14 +02:00
|
|
|
}
|
2014-10-24 10:01:50 +02:00
|
|
|
}
|
2014-09-10 14:31:20 +02:00
|
|
|
}
|
|
|
|
|
2014-10-24 10:01:50 +02:00
|
|
|
private synchronized net.sf.ehcache.Element getDataFromCache(String key) throws Exception{
|
2014-10-07 11:42:14 +02:00
|
|
|
net.sf.ehcache.Element data = null;
|
2014-10-24 10:01:50 +02:00
|
|
|
|
2014-10-22 19:16:37 +02:00
|
|
|
if(cacheManager.cacheExists("DBCache")){
|
2014-10-24 10:01:50 +02:00
|
|
|
logger.info("dbmanager-> disk store path for cache: " + cacheManager.getDiskStorePath() +". Cache Status: "+DBCache.getStatus().toString());
|
2014-10-22 19:16:37 +02:00
|
|
|
if (DBCache.getStatus().toString().equals(Status.STATUS_ALIVE.toString())){
|
|
|
|
data = DBCache.get(key);
|
2014-10-24 10:01:50 +02:00
|
|
|
if (data!=null){
|
|
|
|
logger.trace("dbmanager-> element with key: " + key +" is in cache");
|
|
|
|
}
|
2014-10-22 19:16:37 +02:00
|
|
|
}
|
2014-10-07 11:42:14 +02:00
|
|
|
}
|
2014-10-24 10:01:50 +02:00
|
|
|
return data;
|
2014-09-10 14:31:20 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
// clear the cache on the user request
|
2014-09-12 14:05:22 +02:00
|
|
|
public GeneralOutputFromServlet refreshDataTree(String ElementType,
|
2014-09-17 10:27:30 +02:00
|
|
|
LinkedHashMap<String, String> inputData, FileModel element)
|
|
|
|
throws Exception {
|
2014-10-10 18:38:39 +02:00
|
|
|
|
2014-10-24 10:01:50 +02:00
|
|
|
logger.info("dbmanager-> Refresh data request received from element "+ElementType+". Starting to manage the request.");
|
2014-10-10 18:38:39 +02:00
|
|
|
|
2014-09-29 18:07:58 +02:00
|
|
|
// session check
|
|
|
|
if (isSessionExpired())
|
|
|
|
throw new SessionExpiredException();
|
2014-09-17 10:27:30 +02:00
|
|
|
|
2014-09-29 18:07:58 +02:00
|
|
|
try {
|
2014-10-07 15:20:16 +02:00
|
|
|
|
2014-10-07 11:42:14 +02:00
|
|
|
ASLSession session = SessionUtil.getAslSession(this
|
|
|
|
.getThreadLocalRequest().getSession());
|
|
|
|
// get scope
|
|
|
|
String scope = session.getScope();
|
2014-09-12 14:05:22 +02:00
|
|
|
|
2014-09-29 18:07:58 +02:00
|
|
|
// // call the method related to the element selected
|
|
|
|
String resourceName = "";
|
|
|
|
String databaseName = "";
|
2014-09-12 14:05:22 +02:00
|
|
|
|
2014-09-29 18:07:58 +02:00
|
|
|
GeneralOutputFromServlet result = null;
|
|
|
|
|
|
|
|
if (inputData != null && inputData.size() != 0) {
|
2014-10-10 12:53:26 +02:00
|
|
|
|
|
|
|
DataExchangedThroughQueue dataQueue = null;
|
|
|
|
|
2014-09-29 18:07:58 +02:00
|
|
|
String key = "";
|
2014-10-02 11:57:21 +02:00
|
|
|
String keyUsedForQueryRefresh = "";
|
|
|
|
String keyUsedForSamplingsRefresh = "";
|
|
|
|
String keyUsedForSmartSamplingRefresh = "";
|
|
|
|
String keyUsedForRandomSamplingRefresh = "";
|
2014-10-14 12:57:24 +02:00
|
|
|
boolean requestToAddInQueue=false;
|
2014-09-12 14:05:22 +02:00
|
|
|
|
|
|
|
if (!ElementType.equals("")) {
|
2014-10-10 12:53:26 +02:00
|
|
|
// build key
|
2014-09-12 14:05:22 +02:00
|
|
|
switch (ElementType) {
|
2014-10-10 18:38:39 +02:00
|
|
|
case ConstantsPortlet.RESOURCESLIST:
|
|
|
|
key = scope + inputData.get(ConstantsPortlet.RESOURCESLIST);
|
2014-10-10 12:53:26 +02:00
|
|
|
dataQueue = new DataExchangedThroughQueue(scope);
|
2014-10-14 12:57:24 +02:00
|
|
|
requestToAddInQueue = true;
|
2014-09-12 14:05:22 +02:00
|
|
|
break;
|
2014-10-10 18:38:39 +02:00
|
|
|
case ConstantsPortlet.RESOURCE:
|
2014-10-07 15:20:16 +02:00
|
|
|
key = scope + inputData.get("ResourceName");
|
2014-10-10 12:53:26 +02:00
|
|
|
dataQueue = new DataExchangedThroughQueue(scope,
|
2014-10-10 18:38:39 +02:00
|
|
|
ConstantsPortlet.RESOURCE, inputData.get("ResourceName"),
|
2014-10-10 12:53:26 +02:00
|
|
|
null, null, null);
|
2014-10-14 12:57:24 +02:00
|
|
|
requestToAddInQueue = true;
|
2014-09-12 14:05:22 +02:00
|
|
|
break;
|
2014-10-10 18:38:39 +02:00
|
|
|
case ConstantsPortlet.DATABASE:
|
2014-09-29 18:07:58 +02:00
|
|
|
if (element.getDatabaseType().equals(
|
|
|
|
ConstantsPortlet.POSTGRES)) { // refresh schema
|
|
|
|
// list
|
|
|
|
key = inputData.get("ResourceName")
|
|
|
|
+ inputData.get("DatabaseName");
|
2014-10-02 11:57:21 +02:00
|
|
|
|
|
|
|
// refresh submitted queries
|
2014-10-07 15:20:16 +02:00
|
|
|
keyUsedForQueryRefresh = scope
|
|
|
|
+ ConstantsPortlet.ALGID_SUBMITQUERY + key;
|
2014-10-07 11:42:14 +02:00
|
|
|
|
|
|
|
refreshSubmittedQueryInCache(keyUsedForQueryRefresh);
|
2014-10-24 10:01:50 +02:00
|
|
|
|
2014-10-10 12:53:26 +02:00
|
|
|
dataQueue = new DataExchangedThroughQueue(scope,
|
2014-10-10 18:38:39 +02:00
|
|
|
ConstantsPortlet.DATABASE, inputData.get("ResourceName"),
|
2014-10-10 12:53:26 +02:00
|
|
|
inputData.get("DatabaseName"), null,
|
|
|
|
element.getDatabaseType());
|
2014-10-14 12:57:24 +02:00
|
|
|
requestToAddInQueue = true;
|
2014-10-10 12:53:26 +02:00
|
|
|
key = scope + key;
|
2014-10-07 11:42:14 +02:00
|
|
|
}
|
|
|
|
if (element.getDatabaseType().equals(
|
|
|
|
ConstantsPortlet.MYSQL)) { // refresh table
|
|
|
|
// list
|
|
|
|
key = inputData.get("ResourceName")
|
|
|
|
+ inputData.get("DatabaseName")
|
|
|
|
+ inputData.get("SchemaName");
|
|
|
|
|
2014-10-07 15:20:16 +02:00
|
|
|
keyUsedForQueryRefresh = scope
|
|
|
|
+ ConstantsPortlet.ALGID_SUBMITQUERY + key;
|
2014-10-07 11:42:14 +02:00
|
|
|
refreshSubmittedQueryInCache(keyUsedForQueryRefresh);
|
|
|
|
|
2014-10-07 15:20:16 +02:00
|
|
|
keyUsedForSamplingsRefresh = scope
|
|
|
|
+ ConstantsPortlet.ALGID_SAMPLEONTABLE
|
2014-10-02 11:57:21 +02:00
|
|
|
+ key;
|
2014-10-07 15:20:16 +02:00
|
|
|
keyUsedForSmartSamplingRefresh = scope
|
|
|
|
+ ConstantsPortlet.ALGID_SMARTSAMPLEONTABLE
|
2014-10-07 11:42:14 +02:00
|
|
|
+ key;
|
2014-10-07 15:20:16 +02:00
|
|
|
|
2014-10-07 11:42:14 +02:00
|
|
|
key = scope + key;
|
|
|
|
|
|
|
|
refreshSamplingsInCache(keyUsedForSamplingsRefresh,
|
|
|
|
keyUsedForSmartSamplingRefresh);
|
2014-10-10 12:53:26 +02:00
|
|
|
|
2014-10-14 12:57:24 +02:00
|
|
|
requestToAddInQueue = false;
|
2014-10-07 11:42:14 +02:00
|
|
|
}
|
|
|
|
break;
|
|
|
|
|
2014-10-10 18:38:39 +02:00
|
|
|
case ConstantsPortlet.SCHEMA:
|
2014-10-07 11:42:14 +02:00
|
|
|
|
|
|
|
key = inputData.get("ResourceName")
|
|
|
|
+ inputData.get("DatabaseName")
|
|
|
|
+ inputData.get("SchemaName");
|
|
|
|
|
|
|
|
// refresh submitted query and samplings and tables list
|
2014-10-07 15:20:16 +02:00
|
|
|
keyUsedForQueryRefresh = scope
|
|
|
|
+ ConstantsPortlet.ALGID_SUBMITQUERY
|
2014-10-07 11:42:14 +02:00
|
|
|
+ inputData.get("ResourceName")
|
|
|
|
+ inputData.get("DatabaseName");
|
|
|
|
refreshSubmittedQueryInCache(keyUsedForQueryRefresh);
|
2014-10-07 15:20:16 +02:00
|
|
|
keyUsedForSamplingsRefresh = scope
|
|
|
|
+ ConstantsPortlet.ALGID_SAMPLEONTABLE + key;
|
|
|
|
keyUsedForSmartSamplingRefresh = scope
|
|
|
|
+ ConstantsPortlet.ALGID_SMARTSAMPLEONTABLE
|
2014-10-07 11:42:14 +02:00
|
|
|
+ key;
|
2014-10-07 15:20:16 +02:00
|
|
|
|
|
|
|
key = scope + key;
|
2014-10-07 11:42:14 +02:00
|
|
|
|
|
|
|
refreshSamplingsInCache(keyUsedForSamplingsRefresh,
|
|
|
|
keyUsedForSmartSamplingRefresh);
|
2014-10-07 15:20:16 +02:00
|
|
|
|
2014-10-14 12:57:24 +02:00
|
|
|
requestToAddInQueue = false;
|
2014-10-07 11:42:14 +02:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2014-10-24 10:01:50 +02:00
|
|
|
// logger.trace("dbmanager-> Check if data of the node is present in cache with key: : " + key);
|
2014-10-07 11:42:14 +02:00
|
|
|
net.sf.ehcache.Element dataFromCache = getDataFromCache(key);
|
|
|
|
|
|
|
|
if (dataFromCache != null) {
|
2014-10-24 10:01:50 +02:00
|
|
|
// logger.trace("dbmanager-> Data of the node is in cache");
|
2014-10-07 11:42:14 +02:00
|
|
|
|
2014-10-24 10:01:50 +02:00
|
|
|
logger.info("dbmanager-> Starting the data removing process in cache from the node with key: " + key);
|
2014-10-10 12:53:26 +02:00
|
|
|
|
|
|
|
// refresh data in cache. Remove data related to the subtree
|
|
|
|
// with the selected element as root
|
|
|
|
refreshDataInCache(element, ElementType, scope, key, null);
|
|
|
|
|
|
|
|
// recover the refresh data of the item selected
|
2014-10-07 11:42:14 +02:00
|
|
|
if (!ElementType.equals("")) {
|
|
|
|
switch (ElementType) {
|
2014-10-10 18:38:39 +02:00
|
|
|
case ConstantsPortlet.RESOURCESLIST:
|
2014-10-07 11:42:14 +02:00
|
|
|
List<FileModel> output1 = getResource();
|
|
|
|
result = new GeneralOutputFromServlet(output1);
|
|
|
|
// System.out.println("server-> output generated");
|
|
|
|
break;
|
2014-10-10 18:38:39 +02:00
|
|
|
case ConstantsPortlet.RESOURCE:
|
2014-10-07 11:42:14 +02:00
|
|
|
resourceName = inputData.get("ResourceName");
|
|
|
|
LinkedHashMap<String, FileModel> output2 = getDBInfo(resourceName);
|
|
|
|
result = new GeneralOutputFromServlet(output2);
|
|
|
|
break;
|
2014-10-10 18:38:39 +02:00
|
|
|
case ConstantsPortlet.DATABASE:
|
2014-10-07 11:42:14 +02:00
|
|
|
if (element.getDatabaseType() != null
|
|
|
|
&& (element.getDatabaseType()
|
|
|
|
.equals(ConstantsPortlet.POSTGRES))) { // refresh
|
2014-10-10 12:53:26 +02:00
|
|
|
// schema list
|
2014-10-07 11:42:14 +02:00
|
|
|
List<FileModel> output3 = getDBSchema(inputData);
|
|
|
|
result = new GeneralOutputFromServlet(output3);
|
|
|
|
}
|
|
|
|
if (element.getDatabaseType() != null
|
|
|
|
&& element.getDatabaseType().equals(
|
|
|
|
ConstantsPortlet.MYSQL)) { // refresh
|
2014-10-10 12:53:26 +02:00
|
|
|
// table list
|
2014-10-07 11:42:14 +02:00
|
|
|
getTables(inputData, ConstantsPortlet.DATABASE);
|
|
|
|
}
|
|
|
|
|
|
|
|
break;
|
2014-10-10 18:38:39 +02:00
|
|
|
case ConstantsPortlet.SCHEMA:
|
2014-10-07 11:42:14 +02:00
|
|
|
getTables(inputData, ConstantsPortlet.SCHEMA);
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
2014-10-10 12:53:26 +02:00
|
|
|
|
|
|
|
// check if the thread execution is terminated. If yes, a
|
2014-10-24 10:01:50 +02:00
|
|
|
// new thread is started otherwise no because the thread in
|
|
|
|
// running state will also serve this request received in the queue.
|
2014-10-10 12:53:26 +02:00
|
|
|
// The thread will run in backgroung to load the data
|
2014-10-24 10:01:50 +02:00
|
|
|
// related to the subtree with the item selected as root.
|
2014-10-14 12:57:24 +02:00
|
|
|
if(requestToAddInQueue==true){
|
|
|
|
if (isThreadExecutionFinished()) {
|
|
|
|
logger.info("dbmanager-> Starting the launch of the Thread DataLoader execution");
|
|
|
|
queue.offer(dataQueue);
|
|
|
|
Thread t = new Thread(dataLoader);
|
|
|
|
t.start();
|
|
|
|
} else {
|
2014-10-24 10:01:50 +02:00
|
|
|
logger.info("dbmanager-> The Thread DataLoader is already running. Tree Refresh request put in Queue");
|
2014-10-14 12:57:24 +02:00
|
|
|
queue.offer(dataQueue);
|
|
|
|
}
|
2014-10-10 12:53:26 +02:00
|
|
|
}
|
2014-10-14 12:57:24 +02:00
|
|
|
|
2014-10-07 11:42:14 +02:00
|
|
|
}
|
2014-10-24 10:01:50 +02:00
|
|
|
// else{
|
|
|
|
// logger.trace("dbmanager-> Data of the node is not in cache");
|
|
|
|
// }
|
2014-10-07 11:42:14 +02:00
|
|
|
}
|
|
|
|
return result;
|
2014-10-10 12:53:26 +02:00
|
|
|
|
2014-10-07 11:42:14 +02:00
|
|
|
} catch (Exception e) {
|
|
|
|
logger.error("dbmanager-> ", e);
|
|
|
|
if (!(e instanceof StatisticalManagerException)) {
|
|
|
|
// GWT can't serialize all exceptions
|
|
|
|
throw new Exception(
|
|
|
|
"Error in server while loading data. Exception: " + e);
|
|
|
|
}
|
|
|
|
throw e;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2014-10-10 12:53:26 +02:00
|
|
|
public void refreshDataInCache(FileModel element, String ElementType,
|
|
|
|
String scope, String key, String DBType) {
|
|
|
|
logger.info("dbmanager-> Refresh Data Request of element "
|
|
|
|
+ ElementType + " in Cache with KEY: " + key);
|
|
|
|
|
|
|
|
try {
|
|
|
|
// get data from cache
|
|
|
|
net.sf.ehcache.Element dataFromCache = getDataFromCache(key);
|
|
|
|
|
|
|
|
Object value = null;
|
|
|
|
if (dataFromCache != null) {
|
2014-10-24 10:01:50 +02:00
|
|
|
// logger.info("dbmanager-> Data is in cache");
|
2014-10-10 12:53:26 +02:00
|
|
|
|
|
|
|
value = dataFromCache.getObjectValue();
|
|
|
|
if (value != null) {
|
|
|
|
if (!ElementType.equals("")) {
|
|
|
|
switch (ElementType) {
|
2014-10-10 18:38:39 +02:00
|
|
|
case ConstantsPortlet.RESOURCESLIST:
|
2014-10-10 12:53:26 +02:00
|
|
|
// refresh resources
|
|
|
|
List<FileModel> resources = (List<FileModel>) value;
|
|
|
|
// refresh cache
|
|
|
|
refreshCache(key);
|
2014-10-24 10:01:50 +02:00
|
|
|
// logger.trace("dbmanager-> element: " + ElementType
|
|
|
|
// + " with key: " + key + " removed in cache");
|
2014-10-10 12:53:26 +02:00
|
|
|
// apply the refresh on children
|
|
|
|
for (int i = 0; i < resources.size(); i++) {
|
|
|
|
key = scope + resources.get(i).getName();
|
2014-10-10 18:38:39 +02:00
|
|
|
refreshDataInCache(element, ConstantsPortlet.RESOURCE, null,
|
2014-10-10 12:53:26 +02:00
|
|
|
key, null);
|
|
|
|
}
|
|
|
|
break;
|
2014-10-10 18:38:39 +02:00
|
|
|
case ConstantsPortlet.RESOURCE:
|
2014-10-10 12:53:26 +02:00
|
|
|
// refresh databases
|
|
|
|
LinkedHashMap<String, FileModel> DBdata = (LinkedHashMap<String, FileModel>) value;
|
|
|
|
// refresh cache
|
|
|
|
refreshCache(key);
|
2014-10-24 10:01:50 +02:00
|
|
|
// logger.trace("dbmanager-> element " + ElementType
|
|
|
|
// + " with key: " + key + " removed in cache");
|
2014-10-10 12:53:26 +02:00
|
|
|
// apply the refresh on children
|
|
|
|
HashMap<String, String> DBlist = new HashMap<String, String>();
|
|
|
|
|
|
|
|
if (DBdata != null) {
|
|
|
|
Set<String> keys = DBdata.keySet();
|
|
|
|
Object[] array = keys.toArray();
|
|
|
|
|
|
|
|
int numIterations = (DBdata.size()) / 5;
|
|
|
|
int i = 0;
|
|
|
|
int j = 0;
|
|
|
|
for (i = 0; i < numIterations; i++) {
|
|
|
|
String DBName = "";
|
|
|
|
|
|
|
|
for (j = (i * 5); j < (i + 1) * 5; j++) {
|
|
|
|
|
|
|
|
if (array[j].toString().contains(
|
|
|
|
"Database Name")) {
|
|
|
|
DBName = DBdata.get(
|
|
|
|
array[j].toString())
|
|
|
|
.getName();
|
|
|
|
}
|
|
|
|
|
|
|
|
if (array[j].toString().contains(
|
|
|
|
"Driver Name")) {
|
|
|
|
String driver = DBdata.get(
|
|
|
|
array[j].toString())
|
|
|
|
.getName();
|
|
|
|
|
|
|
|
if (driver.toUpperCase().contains(
|
|
|
|
ConstantsPortlet.POSTGRES)) {
|
|
|
|
|
|
|
|
DBlist.put(
|
|
|
|
DBName,
|
|
|
|
ConstantsPortlet.POSTGRES);
|
|
|
|
}
|
|
|
|
|
|
|
|
if (driver.toUpperCase().contains(
|
|
|
|
ConstantsPortlet.MYSQL)) {
|
|
|
|
DBlist.put(DBName,
|
|
|
|
ConstantsPortlet.MYSQL);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
Set<String> keys = DBlist.keySet();
|
|
|
|
Object[] array = keys.toArray();
|
|
|
|
|
|
|
|
for (int i = 0; i < array.length; i++) {
|
|
|
|
String databaseType = DBlist.get(array[i]
|
|
|
|
.toString());
|
|
|
|
// if (databaseType
|
|
|
|
// .equals(ConstantsPortlet.POSTGRES)) {
|
|
|
|
String newkey = key + array[i].toString();
|
2014-10-10 18:38:39 +02:00
|
|
|
refreshDataInCache(element, ConstantsPortlet.DATABASE, null,
|
2014-10-10 12:53:26 +02:00
|
|
|
newkey, databaseType);
|
|
|
|
// }
|
|
|
|
}
|
|
|
|
|
|
|
|
break;
|
2014-10-10 18:38:39 +02:00
|
|
|
case ConstantsPortlet.DATABASE:
|
2014-10-10 12:53:26 +02:00
|
|
|
if (DBType == null) {
|
|
|
|
DBType = element.getDatabaseType();
|
|
|
|
}
|
|
|
|
// refresh schema (db postgres) or tables (db mysql)
|
|
|
|
List<FileModel> schemaList = (List<FileModel>) value;
|
|
|
|
|
|
|
|
refreshCache(key); // refresh schema
|
2014-10-24 10:01:50 +02:00
|
|
|
// logger.trace("dbmanager-> element " + ElementType
|
|
|
|
// + " with key: " + key + " removed in cache");
|
2014-10-10 12:53:26 +02:00
|
|
|
if (DBType.equals(ConstantsPortlet.POSTGRES)) {
|
|
|
|
// SCHEMA
|
|
|
|
for (int i = 0; i < schemaList.size(); i++) {
|
|
|
|
String newkey = key
|
|
|
|
+ schemaList.get(i).getName();
|
2014-10-10 18:38:39 +02:00
|
|
|
refreshDataInCache(element, ConstantsPortlet.SCHEMA, null,
|
2014-10-10 12:53:26 +02:00
|
|
|
newkey, null);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if (DBType.equals(ConstantsPortlet.MYSQL)) {
|
|
|
|
// refresh tables
|
|
|
|
key = key + "";
|
|
|
|
refreshCache(key); // refresh tables
|
2014-10-24 10:01:50 +02:00
|
|
|
// logger.trace("dbmanager-> element " + ElementType
|
|
|
|
// + " with key: " + key
|
|
|
|
// + " removed in cache");
|
2014-10-10 12:53:26 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
break;
|
2014-10-10 18:38:39 +02:00
|
|
|
case ConstantsPortlet.SCHEMA:
|
2014-10-10 12:53:26 +02:00
|
|
|
// refresh tables (db postgres)
|
|
|
|
refreshCache(key);
|
2014-10-24 10:01:50 +02:00
|
|
|
// logger.trace("dbmanager-> element " + ElementType
|
|
|
|
// + " with key: " + key + " removed in cache");
|
2014-10-10 12:53:26 +02:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2014-10-24 10:01:50 +02:00
|
|
|
}
|
|
|
|
// else {
|
|
|
|
// logger.info("dbmanager-> Data not in cache");
|
|
|
|
// }
|
2014-10-10 12:53:26 +02:00
|
|
|
} catch (Exception e) {
|
|
|
|
logger.error("dbmanager-> ", e);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2014-10-07 11:42:14 +02:00
|
|
|
private synchronized void refreshCache(String key) throws Exception {
|
2014-10-22 19:16:37 +02:00
|
|
|
if(cacheManager.cacheExists("DBCache")){
|
2014-10-24 10:01:50 +02:00
|
|
|
logger.info("dbmanager-> disk store path for cache: " + cacheManager.getDiskStorePath() +"Cache Status: "+DBCache.getStatus().toString());
|
2014-10-22 19:16:37 +02:00
|
|
|
if (DBCache.getStatus().toString().equals(Status.STATUS_ALIVE.toString())){
|
|
|
|
DBCache.remove(key);
|
2014-10-24 10:01:50 +02:00
|
|
|
logger.trace("dbmanager-> element with key: " + key + " removed in cache");
|
2014-10-22 19:16:37 +02:00
|
|
|
}
|
|
|
|
}
|
2014-10-07 11:42:14 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
private synchronized void refreshSubmittedQueryInCache(String key)
|
|
|
|
throws Exception {
|
2014-10-22 19:16:37 +02:00
|
|
|
|
|
|
|
if(cacheManager.cacheExists("DBCache")){
|
2014-10-24 10:01:50 +02:00
|
|
|
logger.info("dbmanager-> disk store path for cache: " + cacheManager.getDiskStorePath() +"Cache Status: "+DBCache.getStatus().toString());
|
2014-10-22 19:16:37 +02:00
|
|
|
if (DBCache.getStatus().toString().equals(Status.STATUS_ALIVE.toString())){
|
2014-10-24 10:01:50 +02:00
|
|
|
// logger.info("dbmanager-> Cache Status:"+ "STATUS_ALIVE");
|
2014-10-22 19:16:37 +02:00
|
|
|
|
|
|
|
List<String> keysInCache = DBCache.getKeys();
|
|
|
|
int listSize = keysInCache.size();
|
|
|
|
List<String> keysToBeRemoved = new ArrayList<>();
|
|
|
|
|
|
|
|
// recover keys list that match the key
|
|
|
|
for (int i = 0; i < listSize; i++) {
|
|
|
|
if (keysInCache.get(i).startsWith(key)) {
|
|
|
|
// System.out.println("data removed with key: " +
|
|
|
|
// keysInCache.get(i));
|
|
|
|
keysToBeRemoved.add(keysInCache.get(i));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
// remove keys
|
|
|
|
DBCache.removeAll(keysToBeRemoved);
|
2014-10-24 10:01:50 +02:00
|
|
|
logger.trace("dbmanager-> submitted queries refreshed in cache with key:"
|
|
|
|
+ key);
|
2014-10-07 11:42:14 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
private synchronized void refreshSamplingsInCache(
|
|
|
|
String keyUsedForSamplingsRefresh,
|
|
|
|
String keyUsedForSmartSamplingRefresh) throws Exception {
|
|
|
|
|
2014-10-22 19:16:37 +02:00
|
|
|
if(cacheManager.cacheExists("DBCache")){
|
2014-10-24 10:01:50 +02:00
|
|
|
logger.info("dbmanager-> disk store path for cache: " + cacheManager.getDiskStorePath() +"Cache Status: "+DBCache.getStatus().toString());
|
2014-10-22 19:16:37 +02:00
|
|
|
if (DBCache.getStatus().toString().equals(Status.STATUS_ALIVE.toString())){
|
2014-10-24 10:01:50 +02:00
|
|
|
|
2014-10-22 19:16:37 +02:00
|
|
|
List<String> keysInCache = DBCache.getKeys();
|
|
|
|
int listSize = keysInCache.size();
|
|
|
|
List<String> keysToBeRemoved = new ArrayList<>();
|
|
|
|
|
|
|
|
// recover keys list that match the key
|
|
|
|
for (int i = 0; i < listSize; i++) {
|
|
|
|
if ((keysInCache.get(i).startsWith(keyUsedForSamplingsRefresh))
|
|
|
|
|| (keysInCache.get(i)
|
|
|
|
.startsWith(keyUsedForSmartSamplingRefresh))) {
|
|
|
|
keysToBeRemoved.add(keysInCache.get(i));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
// remove keys
|
|
|
|
DBCache.removeAll(keysToBeRemoved);
|
2014-10-24 10:01:50 +02:00
|
|
|
logger.trace("dbmanager-> samplings and smart sampling refreshed in cache with keys: "
|
|
|
|
+ keyUsedForSamplingsRefresh
|
|
|
|
+ " "
|
|
|
|
+ keyUsedForSmartSamplingRefresh);
|
|
|
|
}
|
|
|
|
}
|
2014-10-07 11:42:14 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
private String storeResultIntoCSVFile(List<Result> result, String n)
|
|
|
|
throws Exception {
|
|
|
|
|
|
|
|
if (result == null) {
|
|
|
|
logger.info("Error in server while loading data. object result null");
|
|
|
|
throw new Exception("Error in server while loading data");
|
|
|
|
}
|
|
|
|
|
|
|
|
// file that will contain result
|
|
|
|
BufferedWriter out = null;
|
|
|
|
|
|
|
|
String path = this.getServletContext().getRealPath("");
|
|
|
|
String fileName = "";
|
|
|
|
fileName = n + "_" + System.currentTimeMillis() + ".csv";
|
|
|
|
|
|
|
|
String filePath = path + "/computationResult/" + fileName;
|
|
|
|
File file = new File(filePath);
|
|
|
|
|
|
|
|
try {
|
|
|
|
|
|
|
|
// create the file
|
|
|
|
if (!file.exists()) {
|
|
|
|
file.createNewFile();
|
|
|
|
}
|
|
|
|
|
|
|
|
out = new BufferedWriter(new OutputStreamWriter(
|
|
|
|
new FileOutputStream(file), "UTF-8"));
|
|
|
|
|
|
|
|
// write into file
|
|
|
|
for (int i = 0; i < result.size(); i++) {
|
|
|
|
out.write(result.get(i).getValue());
|
|
|
|
out.newLine();
|
|
|
|
}
|
|
|
|
|
|
|
|
} catch (Exception e) {
|
|
|
|
logger.error("dbmanager-> ", e);
|
|
|
|
throw e;
|
|
|
|
|
|
|
|
} finally {
|
|
|
|
if (out != null) {
|
|
|
|
out.close();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return fileName;
|
|
|
|
}
|
|
|
|
|
|
|
|
// to check if the session is expired
|
|
|
|
private boolean isSessionExpired() throws Exception {
|
|
|
|
return SessionUtil.isSessionExpired(this.getThreadLocalRequest()
|
|
|
|
.getSession());
|
|
|
|
}
|
|
|
|
|
|
|
|
// to delete more space occurences in order to have only one space between
|
|
|
|
// two words in a query
|
|
|
|
private String parseQuery(String query) {
|
|
|
|
String queryParsed;
|
|
|
|
|
|
|
|
queryParsed = query.trim();
|
|
|
|
queryParsed = queryParsed.replaceAll(" +", " ");
|
|
|
|
return queryParsed;
|
|
|
|
}
|
|
|
|
|
2014-10-10 12:53:26 +02:00
|
|
|
private synchronized void setEndThreadvariable(boolean value) {
|
|
|
|
endThread = value;
|
2014-10-07 15:20:16 +02:00
|
|
|
logger.info("dbmanager-> Variable EndThread set in order to stop the thread execution");
|
2014-10-07 11:42:14 +02:00
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
private synchronized boolean getEndThreadvariable() {
|
|
|
|
return endThread;
|
|
|
|
}
|
|
|
|
|
2014-10-10 12:53:26 +02:00
|
|
|
private synchronized void setThreadExecutionFinished(boolean value) {
|
|
|
|
threadExecutionFinished = value;
|
|
|
|
}
|
|
|
|
|
|
|
|
private synchronized boolean isThreadExecutionFinished() {
|
|
|
|
return threadExecutionFinished;
|
|
|
|
}
|
|
|
|
|
2014-10-07 11:42:14 +02:00
|
|
|
private List<FileModel> recoverResources(String scope) throws Exception {
|
|
|
|
|
|
|
|
try {
|
2014-10-10 18:38:39 +02:00
|
|
|
logger.info("dbmanager-> Resources Recovery Request received. Starting to manage the request.");
|
2014-10-07 11:42:14 +02:00
|
|
|
// data input
|
|
|
|
List<Parameter> inputParameters = new ArrayList<Parameter>();
|
|
|
|
// data output
|
|
|
|
List<FileModel> outputParameters = new ArrayList<FileModel>();
|
|
|
|
|
|
|
|
// get algorithmId
|
|
|
|
String algorithmId = ConstantsPortlet.ALGID_GETRESOURCE;
|
|
|
|
|
|
|
|
Parameter maxNumber = new Parameter("MaxNumber", "", "String", "-1");
|
|
|
|
inputParameters.add(maxNumber);
|
|
|
|
|
|
|
|
// check if the value is in cache. If data does not exist in cache
|
|
|
|
// the computation is started otherwise data are retrieved from
|
|
|
|
// cache.
|
|
|
|
|
|
|
|
// get data from cache
|
|
|
|
// check if data exist considering as key the input parameters
|
|
|
|
// String key = inputParameters.get(0).getDefaultValue();
|
2014-10-10 18:38:39 +02:00
|
|
|
String key = scope + ConstantsPortlet.RESOURCESLIST;
|
2014-10-07 11:42:14 +02:00
|
|
|
net.sf.ehcache.Element dataFromCache = getDataFromCache(key);
|
2014-10-10 18:38:39 +02:00
|
|
|
|
2014-10-07 11:42:14 +02:00
|
|
|
Object value = null;
|
|
|
|
if (dataFromCache != null) {
|
|
|
|
value = dataFromCache.getObjectValue();
|
|
|
|
// System.out.println("***GETTING DATA FROM CACHE");
|
|
|
|
}
|
|
|
|
if (value != null) {
|
2014-10-07 15:20:16 +02:00
|
|
|
// System.out.println("***GETTING DATA FROM CACHE");
|
2014-10-07 11:42:14 +02:00
|
|
|
outputParameters = (List<FileModel>) value;
|
2014-10-24 14:16:09 +02:00
|
|
|
|
|
|
|
cacheHitsNumber++;
|
|
|
|
logger.info("dbmanager-> CheckDataInCache: data found in cache. cacheHitsNumber: " + cacheHitsNumber);
|
2014-10-07 11:42:14 +02:00
|
|
|
} else {
|
2014-10-24 14:16:09 +02:00
|
|
|
smComputationNumber++;
|
|
|
|
logger.info("dbmanager-> CheckDataInCache: data not found in cache. Starting the Statistical Computation. smComputationNumber: " + smComputationNumber);
|
|
|
|
|
2014-10-07 11:42:14 +02:00
|
|
|
// start the computation
|
|
|
|
// System.out.println("***STARTING THE COMPUTATION");
|
|
|
|
// create data structure for data output
|
|
|
|
ComputationOutput outputData = new ComputationOutput();
|
|
|
|
// computationId
|
|
|
|
String computationId = startComputation(algorithmId,
|
|
|
|
inputParameters, outputData, scope);
|
|
|
|
|
|
|
|
// print check
|
|
|
|
// retrieve data
|
|
|
|
// logger.info("output data retrieved");
|
|
|
|
|
|
|
|
// data output
|
|
|
|
LinkedHashMap<String, String> mapValues = new LinkedHashMap<String, String>();
|
|
|
|
mapValues = outputData.getMapValues();
|
|
|
|
|
|
|
|
for (int i = 0; i < mapValues.size(); i++) {
|
|
|
|
FileModel obj = new FileModel(mapValues.get(String
|
|
|
|
.valueOf(i)));
|
|
|
|
// obj.setIsLoaded(true);
|
|
|
|
outputParameters.add(obj);
|
|
|
|
}
|
|
|
|
|
|
|
|
if (outputParameters != null && outputParameters.size() != 0) {
|
|
|
|
// put data in cache
|
|
|
|
net.sf.ehcache.Element dataToCache = new net.sf.ehcache.Element(
|
|
|
|
key, outputParameters);
|
|
|
|
|
|
|
|
insertDataIntoCache(dataToCache);
|
2014-10-24 10:01:50 +02:00
|
|
|
// logger.trace("dbmanager-> element added in cache with key: " + key);
|
2014-10-07 11:42:14 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
return outputParameters;
|
|
|
|
} catch (Exception e) {
|
|
|
|
// e.printStackTrace();
|
|
|
|
// throw new Exception("Failed to load data. " + e);
|
|
|
|
logger.error("dbmanager-> ", e);
|
|
|
|
if (!(e instanceof StatisticalManagerException)) {
|
|
|
|
// GWT can't serialize all exceptions
|
|
|
|
throw new Exception(
|
|
|
|
"Error in server while loading data. Exception: " + e);
|
|
|
|
}
|
|
|
|
throw e;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
private LinkedHashMap<String, FileModel> recoverDatabases(String scope,
|
|
|
|
String resourceName) throws Exception {
|
|
|
|
try {
|
2014-10-10 18:38:39 +02:00
|
|
|
logger.info("dbmanager-> Databases Recovery Request received. Starting to manage the request.");
|
2014-10-07 11:42:14 +02:00
|
|
|
// data input
|
|
|
|
List<Parameter> inputParameters = new ArrayList<Parameter>();
|
|
|
|
// data output
|
|
|
|
LinkedHashMap<String, FileModel> outputParameters = new LinkedHashMap<String, FileModel>();
|
|
|
|
|
|
|
|
String algorithmId = ConstantsPortlet.ALGID_GETDBINFO;
|
|
|
|
|
|
|
|
// print check
|
|
|
|
logger.info("dbmanager-> ResourceName: " + resourceName);
|
|
|
|
|
|
|
|
if ((resourceName == null) || (resourceName.equals(""))) {
|
|
|
|
throw new Exception("Unable to load data");
|
|
|
|
}
|
|
|
|
|
|
|
|
Parameter resource = new Parameter("ResourceName", "", "String", "");
|
|
|
|
inputParameters.add(resource);
|
|
|
|
inputParameters.get(0).setValue(resourceName);
|
|
|
|
|
|
|
|
// get data from cache
|
|
|
|
// check if data exist considering as key the input parameters
|
2014-10-07 15:20:16 +02:00
|
|
|
String key = scope + inputParameters.get(0).getValue();
|
2014-10-07 11:42:14 +02:00
|
|
|
net.sf.ehcache.Element dataFromCache = getDataFromCache(key);
|
2014-10-10 18:38:39 +02:00
|
|
|
|
2014-10-07 11:42:14 +02:00
|
|
|
Object value = null;
|
|
|
|
if (dataFromCache != null) {
|
|
|
|
value = dataFromCache.getObjectValue();
|
|
|
|
}
|
|
|
|
if (value != null) {
|
|
|
|
outputParameters = (LinkedHashMap<String, FileModel>) value;
|
2014-10-24 14:16:09 +02:00
|
|
|
cacheHitsNumber++;
|
|
|
|
logger.info("dbmanager-> CheckDataInCache: data found in cache. cacheHitsNumber: " + cacheHitsNumber);
|
2014-10-07 11:42:14 +02:00
|
|
|
// System.out.println("***GETTING DATA FROM CACHE");
|
|
|
|
} else {
|
2014-10-24 14:16:09 +02:00
|
|
|
smComputationNumber++;
|
|
|
|
logger.info("dbmanager-> CheckDataInCache: data not found in cache. Starting the Statistical Computation. smComputationNumber: " + smComputationNumber);
|
2014-10-07 11:42:14 +02:00
|
|
|
// start the computation
|
|
|
|
// System.out.println("***STARTING THE COMPUTATION");
|
|
|
|
// create data structure
|
|
|
|
ComputationOutput outputData = new ComputationOutput();
|
|
|
|
// computation id
|
|
|
|
String computationId = startComputation(algorithmId,
|
|
|
|
inputParameters, outputData, scope);
|
|
|
|
|
|
|
|
// print check
|
|
|
|
// retrieve data
|
|
|
|
// logger.info("output data retrieved");
|
|
|
|
|
|
|
|
// data output values
|
|
|
|
LinkedHashMap<String, String> mapValues = new LinkedHashMap<String, String>();
|
|
|
|
// data output keys
|
|
|
|
LinkedHashMap<String, String> mapKeys = new LinkedHashMap<String, String>();
|
|
|
|
|
|
|
|
mapValues = outputData.getMapValues();
|
|
|
|
mapKeys = outputData.getmapKeys();
|
|
|
|
|
|
|
|
for (int i = 0; i < mapValues.size(); i++) {
|
|
|
|
FileModel obj = new FileModel(mapValues.get(String
|
|
|
|
.valueOf(i)));
|
|
|
|
// obj.setIsLoaded(true);
|
|
|
|
|
|
|
|
// print check
|
|
|
|
// logger.info("value: " +
|
|
|
|
// mapValues.get(String.valueOf(i)));
|
|
|
|
// logger.info("key: " +
|
|
|
|
// mapKeys.get(String.valueOf(i)));
|
|
|
|
outputParameters.put(mapKeys.get(String.valueOf(i)), obj);
|
|
|
|
}
|
|
|
|
|
|
|
|
// write data in cache
|
|
|
|
if (outputParameters != null && outputParameters.size() != 0) {
|
|
|
|
// put data in cache
|
|
|
|
net.sf.ehcache.Element dataToCache = new net.sf.ehcache.Element(
|
|
|
|
key, outputParameters);
|
|
|
|
|
|
|
|
insertDataIntoCache(dataToCache);
|
2014-10-24 10:01:50 +02:00
|
|
|
// logger.trace("dbmanager-> element added in cache with key: " + key);
|
2014-10-07 11:42:14 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
return outputParameters;
|
|
|
|
} catch (Exception e) {
|
|
|
|
// e.printStackTrace();
|
|
|
|
// throw new Exception("Failed to load data " + );
|
|
|
|
logger.error("dbmanager-> ", e);
|
|
|
|
|
|
|
|
if (!(e instanceof StatisticalManagerException)) {
|
|
|
|
// GWT can't serialize all exceptions
|
|
|
|
throw new Exception(
|
|
|
|
"Error in server while loading data. Exception: " + e);
|
|
|
|
}
|
|
|
|
throw e;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
private List<FileModel> recoverSchema(String scope,
|
|
|
|
LinkedHashMap<String, String> dataInput) throws Exception {
|
|
|
|
|
|
|
|
try {
|
2014-10-10 18:38:39 +02:00
|
|
|
logger.info("dbmanager-> Schema Recovery Request received. Starting to manage the request.");
|
2014-10-07 11:42:14 +02:00
|
|
|
// data input
|
|
|
|
List<Parameter> inputParameters = new ArrayList<Parameter>();
|
|
|
|
// data output
|
|
|
|
List<FileModel> outputParameters = new ArrayList<FileModel>();
|
|
|
|
|
|
|
|
String algorithmId = ConstantsPortlet.ALGID_GETDBSCHEMA;
|
|
|
|
// print check
|
|
|
|
String rs = dataInput.get("ResourceName");
|
|
|
|
String db = dataInput.get("DatabaseName");
|
|
|
|
|
|
|
|
logger.info("dbmanager-> ResourceName: " + rs);
|
|
|
|
logger.info("dbmanager-> DatabaseName: " + db);
|
|
|
|
|
|
|
|
if ((rs == null) || (rs.equals(""))) {
|
|
|
|
throw new Exception("Unable to load data");
|
|
|
|
}
|
|
|
|
if ((db == null) || (db.equals(""))) {
|
|
|
|
throw new Exception("Unable to load data");
|
|
|
|
}
|
|
|
|
|
|
|
|
// set input parameters
|
|
|
|
Parameter resource = new Parameter("ResourceName", "", "String", "");
|
|
|
|
Parameter database = new Parameter("DatabaseName", "", "String", "");
|
|
|
|
inputParameters.add(resource);
|
|
|
|
inputParameters.add(database);
|
|
|
|
|
|
|
|
inputParameters.get(0).setValue(rs);
|
|
|
|
inputParameters.get(1).setValue(db);
|
|
|
|
|
|
|
|
// print check algorithm input parameters
|
|
|
|
// for (int i = 0; i < inputParameters.size(); i++) {
|
|
|
|
// logger.info(inputParameters.get(i).getName());
|
|
|
|
// }
|
|
|
|
|
|
|
|
// get data from cache
|
|
|
|
// check if data exist considering as key the input parameters
|
2014-10-07 15:20:16 +02:00
|
|
|
String key = scope + inputParameters.get(0).getValue()
|
2014-10-07 11:42:14 +02:00
|
|
|
+ inputParameters.get(1).getValue();
|
|
|
|
net.sf.ehcache.Element dataFromCache = getDataFromCache(key);
|
2014-10-10 18:38:39 +02:00
|
|
|
|
2014-10-07 11:42:14 +02:00
|
|
|
Object value = null;
|
|
|
|
if (dataFromCache != null) {
|
|
|
|
value = dataFromCache.getObjectValue();
|
|
|
|
}
|
|
|
|
if (value != null) {
|
|
|
|
outputParameters = (List<FileModel>) value;
|
2014-10-24 14:16:09 +02:00
|
|
|
cacheHitsNumber++;
|
|
|
|
logger.info("dbmanager-> CheckDataInCache: data found in cache. cacheHitsNumber: " + cacheHitsNumber);
|
2014-10-07 11:42:14 +02:00
|
|
|
} else {
|
2014-10-24 14:16:09 +02:00
|
|
|
smComputationNumber++;
|
|
|
|
logger.info("dbmanager-> CheckDataInCache: data not found in cache. Starting the Statistical Computation. smComputationNumber: " + smComputationNumber);
|
2014-10-07 11:42:14 +02:00
|
|
|
|
|
|
|
// start the computation
|
|
|
|
// create data structure
|
|
|
|
ComputationOutput outputData = new ComputationOutput();
|
|
|
|
// computation id
|
|
|
|
String computationId = startComputation(algorithmId,
|
|
|
|
inputParameters, outputData, scope);
|
|
|
|
|
|
|
|
// print check
|
|
|
|
// retrieve data
|
|
|
|
// logger.info("dbmanager-> output data retrieved");
|
|
|
|
|
|
|
|
// data output values
|
|
|
|
LinkedHashMap<String, String> mapValues = new LinkedHashMap<String, String>();
|
|
|
|
// data output keys
|
|
|
|
LinkedHashMap<String, String> mapKeys = new LinkedHashMap<String, String>();
|
|
|
|
|
|
|
|
mapValues = outputData.getMapValues();
|
|
|
|
mapKeys = outputData.getmapKeys();
|
|
|
|
|
|
|
|
for (int i = 0; i < mapValues.size(); i++) {
|
|
|
|
FileModel obj = new FileModel(mapValues.get(String
|
|
|
|
.valueOf(i)));
|
|
|
|
// obj.setIsSchema(true);
|
|
|
|
// obj.setIsLoaded(true);
|
|
|
|
outputParameters.add(obj);
|
|
|
|
}
|
|
|
|
|
|
|
|
// write data in cache
|
|
|
|
if (outputParameters != null && outputParameters.size() != 0) {
|
|
|
|
// put data in cache
|
2014-10-07 15:20:16 +02:00
|
|
|
|
2014-10-07 11:42:14 +02:00
|
|
|
net.sf.ehcache.Element dataToCache = new net.sf.ehcache.Element(
|
2014-10-07 15:20:16 +02:00
|
|
|
key, outputParameters);
|
2014-10-07 11:42:14 +02:00
|
|
|
|
|
|
|
insertDataIntoCache(dataToCache);
|
2014-10-24 10:01:50 +02:00
|
|
|
// logger.trace("dbmanager-> element added in cache with key: " + key);
|
2014-10-07 11:42:14 +02:00
|
|
|
// DBCache.put(dataToCache);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return outputParameters;
|
|
|
|
} catch (Exception e) {
|
|
|
|
// e.printStackTrace();
|
|
|
|
// throw new Exception("Failed to load data. " + e);
|
|
|
|
logger.error("dbmanager-> ", e);
|
|
|
|
if (!(e instanceof StatisticalManagerException)) {
|
|
|
|
// GWT can't serialize all exceptions
|
|
|
|
throw new Exception(
|
|
|
|
"Error in server while loading data. Exception: " + e);
|
|
|
|
}
|
|
|
|
throw e;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
private List<Result> recoverTables(String scope,
|
|
|
|
LinkedHashMap<String, String> dataInput, String elementType)
|
|
|
|
throws Exception {
|
|
|
|
try {
|
|
|
|
|
2014-10-10 18:38:39 +02:00
|
|
|
logger.info("dbmanager-> Tables Recovery Request received. Starting to manage the request.");
|
2014-10-07 11:42:14 +02:00
|
|
|
// data input
|
|
|
|
List<Parameter> inputParameters = new ArrayList<Parameter>();
|
|
|
|
// data output
|
|
|
|
List<Result> outputParameters = new ArrayList<Result>();
|
|
|
|
|
|
|
|
String algorithmId = ConstantsPortlet.ALGID_GETTABLES;
|
|
|
|
|
|
|
|
String rs = dataInput.get("ResourceName");
|
|
|
|
String db = dataInput.get("DatabaseName");
|
|
|
|
String scm = dataInput.get("SchemaName");
|
|
|
|
|
|
|
|
// print check
|
|
|
|
logger.info("dbmanager-> ResourceName: " + rs);
|
|
|
|
logger.info("dbmanager-> DatabaseName: " + db);
|
|
|
|
logger.info("dbmanager-> SchemaName: " + scm);
|
2014-10-02 11:57:21 +02:00
|
|
|
|
2014-10-07 11:42:14 +02:00
|
|
|
if ((elementType != null)
|
|
|
|
&& (elementType.equals(ConstantsPortlet.SCHEMA))) {
|
|
|
|
if ((rs == null) || (rs.equals(""))) {
|
|
|
|
throw new Exception("Unable to load data");
|
|
|
|
}
|
|
|
|
if ((db == null) || (db.equals(""))) {
|
|
|
|
throw new Exception("Unable to load data");
|
|
|
|
}
|
|
|
|
if ((scm == null) || (scm.equals(""))) {
|
|
|
|
throw new Exception("Unable to load data");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if ((elementType != null)
|
|
|
|
&& (elementType.equals(ConstantsPortlet.DATABASE))) {
|
|
|
|
if ((rs == null) || (rs.equals(""))) {
|
|
|
|
throw new Exception("Unable to load data");
|
|
|
|
}
|
|
|
|
if ((db == null) || (db.equals(""))) {
|
|
|
|
throw new Exception("Unable to load data");
|
|
|
|
}
|
|
|
|
}
|
2014-10-02 11:57:21 +02:00
|
|
|
|
2014-10-07 11:42:14 +02:00
|
|
|
// set input parameters
|
|
|
|
Parameter resource = new Parameter("ResourceName", "", "String", "");
|
|
|
|
Parameter database = new Parameter("DatabaseName", "", "String", "");
|
|
|
|
Parameter schema = new Parameter("SchemaName", "", "String", "");
|
|
|
|
inputParameters.add(resource);
|
|
|
|
inputParameters.add(database);
|
|
|
|
inputParameters.add(schema);
|
2014-10-02 12:27:08 +02:00
|
|
|
|
2014-10-07 11:42:14 +02:00
|
|
|
inputParameters.get(0).setValue(rs);
|
|
|
|
inputParameters.get(1).setValue(db);
|
|
|
|
inputParameters.get(2).setValue(scm);
|
2014-10-02 11:57:21 +02:00
|
|
|
|
2014-10-07 11:42:14 +02:00
|
|
|
// get data from cache
|
|
|
|
// check if data exist considering as key the input parameters
|
2014-10-07 15:20:16 +02:00
|
|
|
String key = scope + inputParameters.get(0).getValue()
|
2014-10-07 11:42:14 +02:00
|
|
|
+ inputParameters.get(1).getValue()
|
|
|
|
+ inputParameters.get(2).getValue();
|
|
|
|
net.sf.ehcache.Element dataFromCache = getDataFromCache(key);
|
2014-10-10 18:38:39 +02:00
|
|
|
|
2014-10-07 11:42:14 +02:00
|
|
|
Object value = null;
|
|
|
|
if (dataFromCache != null) {
|
|
|
|
value = dataFromCache.getObjectValue();
|
|
|
|
// System.out.println("***GETTING DATA FROM CACHE");
|
|
|
|
}
|
|
|
|
if (value != null) {
|
|
|
|
outputParameters = (List<Result>) value;
|
2014-10-24 14:16:09 +02:00
|
|
|
cacheHitsNumber++;
|
|
|
|
logger.info("dbmanager-> CheckDataInCache: data found in cache. cacheHitsNumber: " + cacheHitsNumber);
|
2014-10-02 12:27:08 +02:00
|
|
|
|
2014-10-07 11:42:14 +02:00
|
|
|
} else {
|
2014-10-02 12:27:08 +02:00
|
|
|
|
2014-10-24 14:16:09 +02:00
|
|
|
smComputationNumber++;
|
|
|
|
logger.info("dbmanager-> CheckDataInCache: data not found in cache. Starting the Statistical Computation. smComputationNumber: " + smComputationNumber);
|
2014-10-07 11:42:14 +02:00
|
|
|
// start computation
|
|
|
|
// create data structure
|
|
|
|
ComputationOutput outputData = new ComputationOutput();
|
|
|
|
// computation id
|
|
|
|
String computationId = startComputation(algorithmId,
|
|
|
|
inputParameters, outputData, scope);
|
2014-09-12 14:05:22 +02:00
|
|
|
|
2014-10-07 11:42:14 +02:00
|
|
|
// print check on retrieving data
|
|
|
|
// logger.info("output data retrieved");
|
2014-09-29 18:07:58 +02:00
|
|
|
|
2014-10-07 11:42:14 +02:00
|
|
|
// data output values
|
|
|
|
LinkedHashMap<String, String> mapValues = new LinkedHashMap<String, String>();
|
|
|
|
// data output keys
|
|
|
|
LinkedHashMap<String, String> mapKeys = new LinkedHashMap<String, String>();
|
2014-09-29 18:07:58 +02:00
|
|
|
|
2014-10-07 11:42:14 +02:00
|
|
|
mapValues = outputData.getMapValues();
|
|
|
|
mapKeys = outputData.getmapKeys();
|
2014-09-29 18:07:58 +02:00
|
|
|
|
2014-10-07 11:42:14 +02:00
|
|
|
for (int i = 0; i < mapValues.size(); i++) {
|
|
|
|
Result row = new Result(String.valueOf(i),
|
|
|
|
mapValues.get(String.valueOf(i)));
|
|
|
|
outputParameters.add(row);
|
|
|
|
}
|
2014-10-02 14:53:13 +02:00
|
|
|
|
2014-10-07 11:42:14 +02:00
|
|
|
// write data in cache
|
|
|
|
if (outputParameters != null && outputParameters.size() != 0) {
|
|
|
|
// put data in cache
|
|
|
|
net.sf.ehcache.Element dataToCache = new net.sf.ehcache.Element(
|
|
|
|
key, outputParameters);
|
2014-09-29 18:07:58 +02:00
|
|
|
|
2014-10-07 11:42:14 +02:00
|
|
|
insertDataIntoCache(dataToCache);
|
2014-10-24 10:01:50 +02:00
|
|
|
// logger.trace("dbmanager-> element added in cache with key: " + key);
|
2014-09-29 18:07:58 +02:00
|
|
|
}
|
2014-10-07 11:42:14 +02:00
|
|
|
|
2014-09-29 18:07:58 +02:00
|
|
|
}
|
2014-10-07 11:42:14 +02:00
|
|
|
return outputParameters;
|
|
|
|
|
2014-09-29 18:07:58 +02:00
|
|
|
} catch (Exception e) {
|
2014-10-07 11:42:14 +02:00
|
|
|
// e.printStackTrace();
|
|
|
|
// throw new Exception("Failed to load data. " + e);
|
|
|
|
// logger.error("dbmanager-> ", e);
|
|
|
|
|
2014-09-29 18:07:58 +02:00
|
|
|
if (!(e instanceof StatisticalManagerException)) {
|
|
|
|
// GWT can't serialize all exceptions
|
|
|
|
throw new Exception(
|
|
|
|
"Error in server while loading data. Exception: " + e);
|
|
|
|
}
|
|
|
|
throw e;
|
2014-09-12 14:05:22 +02:00
|
|
|
}
|
2014-10-07 11:42:14 +02:00
|
|
|
|
2014-09-10 14:31:20 +02:00
|
|
|
}
|
|
|
|
|
2014-10-07 11:42:14 +02:00
|
|
|
private synchronized void updateThreadsStarted(String scope, Boolean value) {
|
|
|
|
threadsStarted.put(scope, value);
|
2014-09-10 14:31:20 +02:00
|
|
|
}
|
|
|
|
|
2014-10-07 11:42:14 +02:00
|
|
|
private synchronized Boolean getThreadStarted(String scope) {
|
|
|
|
Boolean value = threadsStarted.get(scope);
|
|
|
|
return value;
|
|
|
|
}
|
2014-10-02 11:57:21 +02:00
|
|
|
|
2014-10-07 11:42:14 +02:00
|
|
|
// thread that loads data on the resources
|
2014-10-10 12:53:26 +02:00
|
|
|
private class ThreadDataLoader implements Runnable {
|
2014-10-02 11:57:21 +02:00
|
|
|
|
2014-10-10 12:53:26 +02:00
|
|
|
public ThreadDataLoader() {
|
2014-10-02 11:57:21 +02:00
|
|
|
}
|
|
|
|
|
2014-10-07 11:42:14 +02:00
|
|
|
@Override
|
|
|
|
public void run() {
|
2014-10-10 12:53:26 +02:00
|
|
|
logger.info("dbmanager-> Thread DataLoader running");
|
2014-10-07 11:42:14 +02:00
|
|
|
try {
|
2014-10-02 12:27:08 +02:00
|
|
|
|
2014-10-10 12:53:26 +02:00
|
|
|
setThreadExecutionFinished(false);
|
|
|
|
|
|
|
|
while (!queue.isEmpty()) {
|
|
|
|
|
|
|
|
logger.info("dbmanager-> Queue to exchange data with the thread not empty");
|
|
|
|
|
|
|
|
// recover data from queue
|
|
|
|
DataExchangedThroughQueue node = queue.poll();
|
|
|
|
if (node != null) {
|
|
|
|
String scope = node.getScope();
|
|
|
|
// add an element related to the thread in the hashmap
|
|
|
|
updateThreadsStarted(scope, true);
|
|
|
|
boolean loadTree = node.treeToBeLoaded();
|
|
|
|
|
|
|
|
// System.out.println("value loadTree: " + loadTree);
|
|
|
|
if (loadTree == false) { // load the subtree with the
|
|
|
|
// node as root
|
|
|
|
if (!getEndThreadvariable()) {
|
|
|
|
String elementType = node.elementType();
|
|
|
|
String resource = node.resource();
|
|
|
|
|
|
|
|
logger.info("dbmanager-> Starting the node refreshing process");
|
|
|
|
|
|
|
|
switch (elementType) {
|
2014-10-10 18:38:39 +02:00
|
|
|
case ConstantsPortlet.RESOURCE:
|
2014-10-10 12:53:26 +02:00
|
|
|
getDatabase(scope, resource);
|
|
|
|
break;
|
2014-10-10 18:38:39 +02:00
|
|
|
case ConstantsPortlet.DATABASE:
|
2014-10-10 12:53:26 +02:00
|
|
|
String DBType = node.DBType();
|
|
|
|
String database = node.database();
|
|
|
|
if (DBType
|
|
|
|
.equals(ConstantsPortlet.POSTGRES)) {
|
|
|
|
getSchema(scope, resource, database);
|
|
|
|
}
|
|
|
|
if (DBType.equals(ConstantsPortlet.MYSQL)) {
|
|
|
|
String schema = node.schema();
|
|
|
|
getTables(scope, resource, database,
|
|
|
|
schema,
|
|
|
|
ConstantsPortlet.DATABASE);
|
|
|
|
}
|
|
|
|
break;
|
2014-10-10 18:38:39 +02:00
|
|
|
case ConstantsPortlet.SCHEMA:
|
2014-10-10 12:53:26 +02:00
|
|
|
String db = node.database();
|
|
|
|
String schema = node.schema();
|
|
|
|
getTables(scope, resource, db, schema,
|
|
|
|
ConstantsPortlet.SCHEMA);
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
2014-10-02 11:57:21 +02:00
|
|
|
|
2014-10-10 12:53:26 +02:00
|
|
|
} else { // load the tree
|
|
|
|
logger.info("dbmanager-> Starting the tree loading");
|
2014-10-07 11:42:14 +02:00
|
|
|
|
2014-10-10 12:53:26 +02:00
|
|
|
if (!getEndThreadvariable()) {
|
|
|
|
List<FileModel> resources = recoverResources(scope);
|
|
|
|
|
|
|
|
int i = 0;
|
|
|
|
while ((!getEndThreadvariable())
|
|
|
|
&& (i < resources.size())) {
|
|
|
|
getDatabase(scope, resources.get(i)
|
|
|
|
.getName());
|
|
|
|
i++;
|
|
|
|
// logger.info("dbmanager-> ***thread inside the while checking the EndThread variable");
|
|
|
|
}
|
|
|
|
// logger.info("dbmanager-> ***thread outside the while checking the EndThread variable");
|
|
|
|
}
|
|
|
|
}
|
2014-10-07 11:42:14 +02:00
|
|
|
}
|
|
|
|
}
|
2014-10-21 10:29:45 +02:00
|
|
|
} catch (Throwable e) {
|
|
|
|
logger.error("dbmanager-> ", e);
|
|
|
|
}finally{
|
2014-10-10 12:53:26 +02:00
|
|
|
// thread terminates its execution
|
|
|
|
setThreadExecutionFinished(true);
|
|
|
|
logger.info("dbmanager-> Thread DataLoader execution terminated");
|
2014-10-02 11:57:21 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2014-10-10 12:53:26 +02:00
|
|
|
private void getDatabase(String scope, String resourceName) {
|
2014-09-19 17:36:36 +02:00
|
|
|
|
2014-10-07 11:42:14 +02:00
|
|
|
try {
|
2014-10-01 15:38:16 +02:00
|
|
|
|
2014-10-07 11:42:14 +02:00
|
|
|
LinkedHashMap<String, FileModel> DBdata = recoverDatabases(
|
|
|
|
scope, resourceName);
|
|
|
|
if (DBdata != null) {
|
|
|
|
Set<String> keys = DBdata.keySet();
|
|
|
|
Object[] array = keys.toArray();
|
2014-09-19 17:36:36 +02:00
|
|
|
|
2014-10-07 11:42:14 +02:00
|
|
|
int numIterations = (DBdata.size()) / 5;
|
|
|
|
int i = 0;
|
|
|
|
int j = 0;
|
|
|
|
for (i = 0; i < numIterations; i++) {
|
|
|
|
// String DBName = "";
|
|
|
|
// for (j = (i * 5); j < (i + 1) * 5; j++) {
|
|
|
|
String DBName = "";
|
|
|
|
j = (i * 5);
|
|
|
|
while ((!getEndThreadvariable()) && (j < ((i + 1) * 5))) {
|
2014-09-19 17:36:36 +02:00
|
|
|
|
2014-10-07 11:42:14 +02:00
|
|
|
if (array[j].toString().contains("Database Name")) {
|
|
|
|
DBName = DBdata.get(array[j].toString())
|
|
|
|
.getName();
|
|
|
|
}
|
2014-09-19 17:36:36 +02:00
|
|
|
|
2014-10-07 11:42:14 +02:00
|
|
|
if (array[j].toString().contains("Driver Name")) {
|
|
|
|
String driver = DBdata.get(array[j].toString())
|
|
|
|
.getName();
|
|
|
|
|
|
|
|
if (driver.toUpperCase().contains(
|
|
|
|
ConstantsPortlet.POSTGRES)) {
|
|
|
|
// get schema
|
|
|
|
List<FileModel> schemaList = getSchema(
|
2014-10-10 12:53:26 +02:00
|
|
|
scope, resourceName, DBName);
|
2014-10-07 11:42:14 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
if (driver.toUpperCase().contains(
|
|
|
|
ConstantsPortlet.MYSQL)) {
|
|
|
|
// get tables
|
2014-10-10 12:53:26 +02:00
|
|
|
getTables(scope, resourceName, DBName, "",
|
2014-10-07 11:42:14 +02:00
|
|
|
ConstantsPortlet.DATABASE);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
j++;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
} catch (Exception e) {
|
|
|
|
logger.error("dbmanager-> ", e);
|
2014-09-19 17:36:36 +02:00
|
|
|
}
|
2014-10-07 11:42:14 +02:00
|
|
|
}
|
2014-09-19 17:36:36 +02:00
|
|
|
|
2014-10-10 12:53:26 +02:00
|
|
|
private List<FileModel> getSchema(String scope, String resourceName,
|
2014-10-07 11:42:14 +02:00
|
|
|
String databaseName) {
|
|
|
|
|
|
|
|
List<FileModel> schemaList = null;
|
|
|
|
try {
|
|
|
|
|
|
|
|
LinkedHashMap<String, String> dataInputForSchema = new LinkedHashMap<String, String>();
|
|
|
|
dataInputForSchema.put("ResourceName", resourceName);
|
|
|
|
dataInputForSchema.put("DatabaseName", databaseName);
|
|
|
|
|
|
|
|
schemaList = recoverSchema(scope, dataInputForSchema);
|
2014-10-10 12:53:26 +02:00
|
|
|
|
|
|
|
// recover tables
|
|
|
|
if (schemaList != null) {
|
|
|
|
int z = 0;
|
|
|
|
while ((!getEndThreadvariable()) && (z < schemaList.size())) {
|
|
|
|
// for (int i = 0; i <
|
|
|
|
// schemaList.size(); i++) {
|
|
|
|
String schemaName = schemaList.get(z).getName();
|
|
|
|
getTables(scope, resourceName, databaseName,
|
|
|
|
schemaName, ConstantsPortlet.SCHEMA);
|
|
|
|
z++;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2014-10-07 11:42:14 +02:00
|
|
|
} catch (Exception e) {
|
|
|
|
logger.error("dbmanager-> ", e);
|
|
|
|
}
|
|
|
|
return schemaList;
|
|
|
|
}
|
2014-09-19 17:36:36 +02:00
|
|
|
|
2014-10-10 12:53:26 +02:00
|
|
|
private void getTables(String scope, String resourceName,
|
|
|
|
String databaseName, String schemaName, String elementType) {
|
|
|
|
|
2014-10-07 11:42:14 +02:00
|
|
|
try {
|
|
|
|
LinkedHashMap<String, String> dataInputForTables = new LinkedHashMap<String, String>();
|
|
|
|
dataInputForTables.put("ResourceName", resourceName);
|
|
|
|
dataInputForTables.put("DatabaseName", databaseName);
|
|
|
|
dataInputForTables.put("SchemaName", schemaName);
|
|
|
|
recoverTables(scope, dataInputForTables,
|
|
|
|
ConstantsPortlet.DATABASE);
|
|
|
|
} catch (Exception e) {
|
|
|
|
logger.error("dbmanager-> ", e);
|
2014-09-19 17:36:36 +02:00
|
|
|
}
|
2014-10-07 11:42:14 +02:00
|
|
|
}
|
2014-10-01 15:38:16 +02:00
|
|
|
}
|
2014-07-02 12:57:14 +02:00
|
|
|
}
|