From 538613113038422a3de060d147dfcae1364370e8 Mon Sep 17 00:00:00 2001 From: Loredana Liccardo Date: Fri, 12 Sep 2014 12:05:22 +0000 Subject: [PATCH] - encache caching mechanism added for the information in the tree model. (cache disk create in the temp directory of tomcat) - for submitquery and samplings, message "Result not available" changed in the table has 0 rows or the query returned 0 values - directory for cache created in a corrected way and dependency for ehcache set to provided in the pom file - servlet computation made more fast removing for each algorithm the retrieving of algorithm's name and algorithm's input parameters. They have been set within each method (related to an algorithm) git-svn-id: https://svn.d4science.research-infrastructures.eu/gcube/trunk/portlets/user/databases-manager-portlet@99777 82a268e6-3cf1-43bd-a215-b396298e98cf --- distro/changelog.xml | 8 +- .../client/GWTdbManagerService.java | 5 + .../client/GWTdbManagerServiceAsync.java | 4 + .../client/datamodel/FileModel.java | 9 + .../datamodel/GeneralOutputFromServlet.java | 36 + .../client/events/EventsTypeEnum.java | 3 +- .../client/events/RefreshDataEvent.java | 25 + .../interfaces/RefreshDataEventHandler.java | 9 + .../client/panels/GxtBorderLayoutPanel.java | 30 +- .../client/panels/GxtTreePanel.java | 290 ++- .../toolbar/GxtToolbarFunctionality.java | 97 +- .../client/utils/ConstantsPortlet.java | 1 + .../server/GWTdbManagerServiceImpl.java | 1804 ++++++++++------- src/main/resources/encache.xml | 2 +- 14 files changed, 1566 insertions(+), 757 deletions(-) create mode 100644 src/main/java/org/gcube/portlets/user/databasesmanager/client/datamodel/GeneralOutputFromServlet.java create mode 100644 src/main/java/org/gcube/portlets/user/databasesmanager/client/events/RefreshDataEvent.java create mode 100644 src/main/java/org/gcube/portlets/user/databasesmanager/client/events/interfaces/RefreshDataEventHandler.java diff --git a/distro/changelog.xml b/distro/changelog.xml index d212e12..170fcc4 100644 --- a/distro/changelog.xml +++ b/distro/changelog.xml @@ -3,5 +3,11 @@ date="2014-06-30"> first release - + + first release + first release + first release + first release + \ No newline at end of file diff --git a/src/main/java/org/gcube/portlets/user/databasesmanager/client/GWTdbManagerService.java b/src/main/java/org/gcube/portlets/user/databasesmanager/client/GWTdbManagerService.java index dd964d7..e6cbeb7 100644 --- a/src/main/java/org/gcube/portlets/user/databasesmanager/client/GWTdbManagerService.java +++ b/src/main/java/org/gcube/portlets/user/databasesmanager/client/GWTdbManagerService.java @@ -4,6 +4,7 @@ import java.util.LinkedHashMap; import java.util.List; import org.gcube.portlets.user.databasesmanager.client.datamodel.FileModel; +import org.gcube.portlets.user.databasesmanager.client.datamodel.GeneralOutputFromServlet; import org.gcube.portlets.user.databasesmanager.client.datamodel.Result; import org.gcube.portlets.user.databasesmanager.client.datamodel.Row; import com.extjs.gxt.ui.client.data.PagingLoadConfig; @@ -53,4 +54,8 @@ public interface GWTdbManagerService extends RemoteService { Boolean removeComputation(String submitQueryUID) throws Exception; void refreshDataOnServer(String submitQueryUID); + + GeneralOutputFromServlet refreshDataTree(String ElementType, + LinkedHashMap inputData, FileModel element) throws Exception; + } diff --git a/src/main/java/org/gcube/portlets/user/databasesmanager/client/GWTdbManagerServiceAsync.java b/src/main/java/org/gcube/portlets/user/databasesmanager/client/GWTdbManagerServiceAsync.java index ee0d2a0..df9a05e 100644 --- a/src/main/java/org/gcube/portlets/user/databasesmanager/client/GWTdbManagerServiceAsync.java +++ b/src/main/java/org/gcube/portlets/user/databasesmanager/client/GWTdbManagerServiceAsync.java @@ -4,6 +4,7 @@ import java.util.LinkedHashMap; import java.util.List; import org.gcube.portlets.user.databasesmanager.client.datamodel.FileModel; +import org.gcube.portlets.user.databasesmanager.client.datamodel.GeneralOutputFromServlet; import org.gcube.portlets.user.databasesmanager.client.datamodel.Result; import org.gcube.portlets.user.databasesmanager.client.datamodel.Row; import com.extjs.gxt.ui.client.data.PagingLoadConfig; @@ -51,4 +52,7 @@ public interface GWTdbManagerServiceAsync { AsyncCallback> callback); void refreshDataOnServer(String submitQueryUID, AsyncCallback callback); + + void refreshDataTree(String ElementType,LinkedHashMap inputData, FileModel element, + AsyncCallback callback); } diff --git a/src/main/java/org/gcube/portlets/user/databasesmanager/client/datamodel/FileModel.java b/src/main/java/org/gcube/portlets/user/databasesmanager/client/datamodel/FileModel.java index 34c3b01..ec0800d 100644 --- a/src/main/java/org/gcube/portlets/user/databasesmanager/client/datamodel/FileModel.java +++ b/src/main/java/org/gcube/portlets/user/databasesmanager/client/datamodel/FileModel.java @@ -38,6 +38,8 @@ public class FileModel extends BaseModelData implements Serializable { // resource data private String ResourceName = null; private String DatabaseName = null; + //flag data cached +// private boolean isDataCached = false; public FileModel() { setId(); @@ -96,6 +98,13 @@ public class FileModel extends BaseModelData implements Serializable { // public boolean isSchema() { // return get("isSchema"); // } + + // public void setIsDataCached(boolean value){ + // set("isDataCached", value); + // } + // public boolean IsDataCached(){ + // return get("isDataCached"); + // } // set if the object is a schema public void setIsSchema(boolean value) { diff --git a/src/main/java/org/gcube/portlets/user/databasesmanager/client/datamodel/GeneralOutputFromServlet.java b/src/main/java/org/gcube/portlets/user/databasesmanager/client/datamodel/GeneralOutputFromServlet.java new file mode 100644 index 0000000..3d44cf0 --- /dev/null +++ b/src/main/java/org/gcube/portlets/user/databasesmanager/client/datamodel/GeneralOutputFromServlet.java @@ -0,0 +1,36 @@ +package org.gcube.portlets.user.databasesmanager.client.datamodel; + +import java.io.Serializable; +import java.util.LinkedHashMap; +import java.util.List; + +//class that allows to wrap output generated by the servlet +import com.extjs.gxt.ui.client.data.BaseModelData; + +public class GeneralOutputFromServlet extends BaseModelData implements + Serializable { + + private static final long serialVersionUID = 1L; + + private List listOutput; + private LinkedHashMap mapOutput; + + public GeneralOutputFromServlet() { + + } + + public GeneralOutputFromServlet(List data) { + set("listOutput", data); + } + + public GeneralOutputFromServlet(LinkedHashMap data) { + set("mapOutput", data); + } + + public List getListOutput(){ + return get("listOutput"); + } + public LinkedHashMap getMapOutput(){ + return get("mapOutput"); + } +} diff --git a/src/main/java/org/gcube/portlets/user/databasesmanager/client/events/EventsTypeEnum.java b/src/main/java/org/gcube/portlets/user/databasesmanager/client/events/EventsTypeEnum.java index 514014b..33c653d 100644 --- a/src/main/java/org/gcube/portlets/user/databasesmanager/client/events/EventsTypeEnum.java +++ b/src/main/java/org/gcube/portlets/user/databasesmanager/client/events/EventsTypeEnum.java @@ -9,5 +9,6 @@ public enum EventsTypeEnum { RANDOM_SAMPLING_EVENT, LOAD_TABLES_EVENT, SELECTED_TABLE_EVENT, - CANCEL_EXECUTION_QUERY; + CANCEL_EXECUTION_QUERY, + REFRESH_DATA; } \ No newline at end of file diff --git a/src/main/java/org/gcube/portlets/user/databasesmanager/client/events/RefreshDataEvent.java b/src/main/java/org/gcube/portlets/user/databasesmanager/client/events/RefreshDataEvent.java new file mode 100644 index 0000000..62745cf --- /dev/null +++ b/src/main/java/org/gcube/portlets/user/databasesmanager/client/events/RefreshDataEvent.java @@ -0,0 +1,25 @@ +package org.gcube.portlets.user.databasesmanager.client.events; + +import org.gcube.portlets.user.databasesmanager.client.events.interfaces.RefreshDataEventHandler; +import com.google.gwt.event.shared.GwtEvent; + + +public class RefreshDataEvent extends GwtEvent { + + public static Type TYPE = new Type(); + + @Override + public Type getAssociatedType() { + return TYPE; + } + + @Override + protected void dispatch(RefreshDataEventHandler handler) { + handler.onRefreshData(this); + } + + public EventsTypeEnum getKey() { + return EventsTypeEnum.REFRESH_DATA; + } + +} diff --git a/src/main/java/org/gcube/portlets/user/databasesmanager/client/events/interfaces/RefreshDataEventHandler.java b/src/main/java/org/gcube/portlets/user/databasesmanager/client/events/interfaces/RefreshDataEventHandler.java new file mode 100644 index 0000000..9a8636f --- /dev/null +++ b/src/main/java/org/gcube/portlets/user/databasesmanager/client/events/interfaces/RefreshDataEventHandler.java @@ -0,0 +1,9 @@ +package org.gcube.portlets.user.databasesmanager.client.events.interfaces; + +import org.gcube.portlets.user.databasesmanager.client.events.RefreshDataEvent; + +import com.google.gwt.event.shared.EventHandler; + +public interface RefreshDataEventHandler extends EventHandler { + public void onRefreshData(RefreshDataEvent refreshDataEvent); +} diff --git a/src/main/java/org/gcube/portlets/user/databasesmanager/client/panels/GxtBorderLayoutPanel.java b/src/main/java/org/gcube/portlets/user/databasesmanager/client/panels/GxtBorderLayoutPanel.java index e268e15..dd477fb 100644 --- a/src/main/java/org/gcube/portlets/user/databasesmanager/client/panels/GxtBorderLayoutPanel.java +++ b/src/main/java/org/gcube/portlets/user/databasesmanager/client/panels/GxtBorderLayoutPanel.java @@ -14,6 +14,7 @@ import org.gcube.portlets.user.databasesmanager.client.datamodel.Row; import org.gcube.portlets.user.databasesmanager.client.datamodel.SubmitQueryData; import org.gcube.portlets.user.databasesmanager.client.events.LoadTablesEvent; import org.gcube.portlets.user.databasesmanager.client.events.RandomSamplingEvent; +import org.gcube.portlets.user.databasesmanager.client.events.RefreshDataEvent; import org.gcube.portlets.user.databasesmanager.client.events.SamplingEvent; import org.gcube.portlets.user.databasesmanager.client.events.SelectedItemEvent; import org.gcube.portlets.user.databasesmanager.client.events.SelectedTableEvent; @@ -22,6 +23,7 @@ import org.gcube.portlets.user.databasesmanager.client.events.SmartSamplingEvent import org.gcube.portlets.user.databasesmanager.client.events.SubmitQueryEvent; import org.gcube.portlets.user.databasesmanager.client.events.interfaces.LoadTablesEventHandler; import org.gcube.portlets.user.databasesmanager.client.events.interfaces.RandomSamplingEventHandler; +import org.gcube.portlets.user.databasesmanager.client.events.interfaces.RefreshDataEventHandler; import org.gcube.portlets.user.databasesmanager.client.events.interfaces.SamplingEventHandler; import org.gcube.portlets.user.databasesmanager.client.events.interfaces.SelectedItemEventHandler; import org.gcube.portlets.user.databasesmanager.client.events.interfaces.ShowCreateTableEventHandler; @@ -309,6 +311,15 @@ public class GxtBorderLayoutPanel extends ContentPanel { getTableDetails(table); } }); + + eventBus.addHandler(RefreshDataEvent.TYPE, new RefreshDataEventHandler() { + + @Override + public void onRefreshData(RefreshDataEvent refreshDataEvent) { + + + } + }); } // method to load the tables list @@ -383,8 +394,8 @@ public class GxtBorderLayoutPanel extends ContentPanel { if (caught .getMessage() - .equals("java.lang.Exception: Result not available")) { - MessageBox.alert("Error ", "
Message:" + .contains("Result not available")) { + MessageBox.alert("Warning ", "
Message:" + "no tables available", null); } else { MessageBox.alert("Error ", "
Message:" @@ -401,11 +412,26 @@ public class GxtBorderLayoutPanel extends ContentPanel { rootLogger.log(Level.SEVERE, "SUCCESS RPC LoadTables"); callback.onSuccess(result); + if (keyword == null) { startSearchTable = false; } // enable toolbar in the dialog toolBarTop.enable(); + + if (result!=null){ + + List data = result.getData(); + if (data.size()==0){ + MessageBox.alert("Warning ", "
Message:" + + "no tables availables", null); + return; + } + } + + + + } }); // rootLogger.log(Level.SEVERE, "End RPC LoadTables"); diff --git a/src/main/java/org/gcube/portlets/user/databasesmanager/client/panels/GxtTreePanel.java b/src/main/java/org/gcube/portlets/user/databasesmanager/client/panels/GxtTreePanel.java index a07bafe..efeca0b 100644 --- a/src/main/java/org/gcube/portlets/user/databasesmanager/client/panels/GxtTreePanel.java +++ b/src/main/java/org/gcube/portlets/user/databasesmanager/client/panels/GxtTreePanel.java @@ -10,7 +10,9 @@ import java.util.logging.Logger; import org.gcube.portlets.user.databasesmanager.client.GWTdbManagerServiceAsync; import org.gcube.portlets.user.databasesmanager.client.datamodel.FileModel; import org.gcube.portlets.user.databasesmanager.client.datamodel.Result; +import org.gcube.portlets.user.databasesmanager.client.events.RefreshDataEvent; import org.gcube.portlets.user.databasesmanager.client.events.SelectedItemEvent; +import org.gcube.portlets.user.databasesmanager.client.events.interfaces.RefreshDataEventHandler; import org.gcube.portlets.user.databasesmanager.client.resources.Images; import org.gcube.portlets.user.databasesmanager.client.utils.ConstantsPortlet; import com.extjs.gxt.ui.client.Style.Scroll; @@ -30,6 +32,7 @@ import com.google.gwt.event.shared.HandlerManager; import com.google.gwt.user.client.Event; import com.google.gwt.user.client.rpc.AsyncCallback; import com.google.gwt.user.client.ui.AbstractImagePrototype; +import org.gcube.portlets.user.databasesmanager.client.datamodel.GeneralOutputFromServlet; //class that implements the tree object public class GxtTreePanel extends LayoutContainer { @@ -57,6 +60,7 @@ public class GxtTreePanel extends LayoutContainer { // tree is used) this.setScrollMode(Scroll.NONE); this.initLayout(); + addHandler(); } private void initLayout() throws Exception { @@ -441,6 +445,18 @@ public class GxtTreePanel extends LayoutContainer { }); } + private void addHandler() { + eventBus.addHandler(RefreshDataEvent.TYPE, + new RefreshDataEventHandler() { + + @Override + public void onRefreshData(RefreshDataEvent refreshDataEvent) { + + refreshData(); + } + }); + } + // load the root private void loadRootItemTree() throws Exception { this.mask("Loading", "x-mask-loading"); @@ -712,11 +728,19 @@ public class GxtTreePanel extends LayoutContainer { } } + private void removeChildren(FileModel parent) { + + if (parent != null) { + store.removeAll(parent); + rootLogger.log(Level.INFO, "children removed from the store"); + } + } + // set information (useful for the submitquery operation) for the selected // item. The selected item can be a schema, a table and a database. private void setInfoOnSelectedItem(FileModel element) { // the selected element is a schema - if ((treeDepthSelectedItem == 4) && (element.isSchema() == true)) { + if ((treeDepthSelectedItem == 4) && (element.isSchema() == true)) { rootLogger.log(Level.SEVERE, "setInfo for selected item schema"); // recover database name FileModel database = store.getParent(element); @@ -730,8 +754,8 @@ public class GxtTreePanel extends LayoutContainer { element.setDatabaseName(DatabaseName); } - //the selected element is a table because the database is mysql type - else if ((treeDepthSelectedItem == 4) && (element.isSchema() == false)) { + // the selected element is a table because the database is mysql type + else if ((treeDepthSelectedItem == 4) && (element.isSchema() == false)) { rootLogger.log(Level.SEVERE, "setInfo for selected item table"); // recover database name @@ -744,8 +768,9 @@ public class GxtTreePanel extends LayoutContainer { element.setResourceName(ResourceName); element.setDatabaseName(DatabaseName); - } - else if (treeDepthSelectedItem == 5) { //the selected item is a table so the database is postgres type + } else if (treeDepthSelectedItem == 5) { // the selected item is a table + // so the database is + // postgres type rootLogger.log(Level.SEVERE, "setInfo for selected item table"); // recover schema name @@ -761,7 +786,8 @@ public class GxtTreePanel extends LayoutContainer { element.setResourceName(ResourceName); element.setDatabaseName(DatabaseName); - } else if (treeDepthSelectedItem == 3) { // the selected item is a database + } else if (treeDepthSelectedItem == 3) { // the selected item is a + // database rootLogger.log(Level.SEVERE, "setInfo for selected item database"); // recover database name @@ -774,6 +800,7 @@ public class GxtTreePanel extends LayoutContainer { element.setResourceName(ResourceName); element.setDatabaseName(DatabaseName); } + } // get the tree panel @@ -785,4 +812,255 @@ public class GxtTreePanel extends LayoutContainer { public TreeStore getTreeStore() { return this.store; } + + // refresh data + private void refreshData() { + + final GxtTreePanel tree = this; + this.mask("Loading", "x-mask-loading"); + + List items = treePanel.getSelectionModel() + .getSelectedItems(); + + final FileModel selectedItem = items.get(0); + int Depth = store.getDepth(selectedItem); + LinkedHashMap inputData = new LinkedHashMap(); + + String elementType = ""; + String value; + + if (Depth == 1) { // root tree + elementType = "listResources"; + value = "listResources"; + inputData.put(value, value); + } else if (Depth == 2) { // resource + elementType = "resource"; + value = selectedItem.getName(); + inputData.put("ResourceName", value); + + } else if (Depth == 3) { // database + elementType = "database"; + String database = selectedItem.getName(); + FileModel parent = store.getParent(selectedItem); + String resource = parent.getName(); + inputData.put("ResourceName", resource); + inputData.put("DatabaseName", database); + + } + + final String elemType = elementType; + RPCservice.refreshDataTree(elemType, inputData, selectedItem, + new AsyncCallback() { + + @Override + public void onFailure(Throwable caught) { + rootLogger.log(Level.SEVERE, "FAILURE refreshDataTree"); + + MessageBox.alert("Error ", + "
Message:" + caught.getMessage(), null); + + if (tree.isMasked()) { + tree.unmask(); + } + } + + @Override + public void onSuccess(GeneralOutputFromServlet result) { + + rootLogger.log(Level.SEVERE, "SUCCESS refreshDataTree"); + + if (result != null) { + // remove children + removeChildren(selectedItem); + tree.layout(true); + + if (elemType.equals("listResources")) { + List output = new ArrayList(); + output = result.getListOutput(); + + if (output.size() == 0) { + selectedItem.setIsExpanded(false); + treePanel.setExpanded(selectedItem, false); + } else { + // add the children + addChildren(selectedItem, output); + // System.out.println("added new data"); + treePanel.setExpanded(selectedItem, true); + } + + selectedItem.setIsLoaded(true); + + } else if (elemType.equals("resource")) { + LinkedHashMap output = new LinkedHashMap(); + output = result.getMapOutput(); + + if (output.size() != 0) { + // recover keys from the result + Set keys = output.keySet(); + Object[] array = keys.toArray(); + + // recover information for each database + List children = new ArrayList(); + int numIterations = (output.size()) / 5; + + int i = 0; + int j = 0; + for (i = 0; i < numIterations; i++) { + + String DatabaseType = null; + // System.out.println("index i: " + i); + String DBName = null; + List DBInfo = new ArrayList(); + FileModel child = null; + + for (j = (i * 5); j < (i + 1) * 5; j++) { + // System.out.println("index j: " + + // j); + if (array[j].toString().contains( + "Database Name")) { + // recover the database name + DBName = output.get( + array[j].toString()) + .getName(); + + Result row = new Result( + "Database Name", + output.get( + array[j].toString()) + .getName()); + + child = output.get(array[j] + .toString()); + DBInfo.add(row); + } + + if (array[j].toString().contains( + "URL")) { + Result row = new Result( + "URL", + output.get( + array[j].toString()) + .getName()); + DBInfo.add(row); + } + + if (array[j].toString().contains( + "Driver Name")) { + Result row = new Result( + "Driver Name", + output.get( + array[j].toString()) + .getName()); + + String driver = output.get( + array[j].toString()) + .getName(); + DBInfo.add(row); + + if (driver + .toUpperCase() + .contains( + ConstantsPortlet.POSTGRES)) { + DatabaseType = ConstantsPortlet.POSTGRES; + } + + if (driver + .toUpperCase() + .contains( + ConstantsPortlet.MYSQL)) { + DatabaseType = ConstantsPortlet.MYSQL; + } + } + + if (array[j].toString().contains( + "Dialect Name")) { + Result row = new Result( + "Dialect Name", + output.get( + array[j].toString()) + .getName()); + DBInfo.add(row); + } + + if (array[j].toString().contains( + "Platform Name")) { + Result row = new Result( + "Platform Name", + output.get( + array[j].toString()) + .getName()); + DBInfo.add(row); + } + } + + // FileModel child = new + // FileModel(DBName); + + if (child != null) { + // set that the item is a database + child.setIsDatabase(true); + child.setDBInfo(DBInfo); + + // check print + // rootLogger.log(Level.INFO, + // "DatabaseType: " + DatabaseType); + + // set the database type considering + // the + // driver information + child.setDatabaseType(DatabaseType); + + if (DatabaseType + .equals(ConstantsPortlet.MYSQL)) { + child.setIsExpanded(false); + treePanel.setExpanded(child, + false); + } + children.add(child); + } + } + addChildren(selectedItem, children); + rootLogger + .log(Level.INFO, + "children number: " + + store.getChildCount(selectedItem)); + + treePanel.setExpanded(selectedItem, true); + } else if (output.size() == 0) { + selectedItem.setIsExpanded(false); + treePanel.setExpanded(selectedItem, false); + } + + selectedItem.setIsLoaded(true); + + } else if (elemType.equals("database")) { + List output = new ArrayList(); + output = result.getListOutput(); + + if (output == null) { + rootLogger.log(Level.INFO, + "the database has not schema"); + selectedItem.setIsExpanded(false); + treePanel.setExpanded(selectedItem, false); + } else { + for (int i = 0; i < output.size(); i++) { + // element.setIsSchema(true); + output.get(i).setIsSchema(true); + output.get(i).setIsExpanded(false); + treePanel.setExpanded(output.get(i), + false); + } + addChildren(selectedItem, output); + // element.setIsLoaded(true); + treePanel.setExpanded(selectedItem, true); + } + selectedItem.setIsLoaded(true); + } + } + if (tree.isMasked()) { + tree.unmask(); + } + } + }); + } } diff --git a/src/main/java/org/gcube/portlets/user/databasesmanager/client/toolbar/GxtToolbarFunctionality.java b/src/main/java/org/gcube/portlets/user/databasesmanager/client/toolbar/GxtToolbarFunctionality.java index f213a40..1b5f56a 100644 --- a/src/main/java/org/gcube/portlets/user/databasesmanager/client/toolbar/GxtToolbarFunctionality.java +++ b/src/main/java/org/gcube/portlets/user/databasesmanager/client/toolbar/GxtToolbarFunctionality.java @@ -8,6 +8,7 @@ import org.gcube.portlets.user.databasesmanager.client.GWTdbManagerServiceAsync; import org.gcube.portlets.user.databasesmanager.client.datamodel.SubmitQueryData; import org.gcube.portlets.user.databasesmanager.client.events.LoadTablesEvent; import org.gcube.portlets.user.databasesmanager.client.events.RandomSamplingEvent; +import org.gcube.portlets.user.databasesmanager.client.events.RefreshDataEvent; import org.gcube.portlets.user.databasesmanager.client.events.SamplingEvent; import org.gcube.portlets.user.databasesmanager.client.events.SelectedTableEvent; import org.gcube.portlets.user.databasesmanager.client.events.ShowCreateTableEvent; @@ -46,6 +47,7 @@ public class GxtToolbarFunctionality { private Button btnSimpleSample; private Button btnSmartSample; private Button btnRandomSample; + private Button btnRefresCachedData; // toolbar private ToolBar toolBar; // dialog list. Each dialog contains a form @@ -132,6 +134,15 @@ public class GxtToolbarFunctionality { btnRandomSample .setToolTip("retrieves 100 randomly picked rows from the table"); toolBar.add(btnRandomSample); + toolBar.add(new SeparatorToolItem()); + + // button for Random Sample + btnRefresCachedData = new Button(ConstantsPortlet.REFRESHCACHEDDATA); + btnRefresCachedData.setScale(ButtonScale.SMALL); + btnRefresCachedData.setArrowAlign(ButtonArrowAlign.BOTTOM); + btnRefresCachedData + .setToolTip("refreshes data"); + toolBar.add(btnRefresCachedData); // add(toolBar, new FlowData(10)); @@ -143,6 +154,7 @@ public class GxtToolbarFunctionality { btnSimpleSample.disable(); btnSmartSample.disable(); btnRandomSample.disable(); + btnRefresCachedData.disable(); } private void addHandler() { @@ -235,8 +247,9 @@ public class GxtToolbarFunctionality { new SelectionListener() { @Override public void componentSelected(ButtonEvent ce) { - //deactivate the button - dialog.getButtonById(Dialog.OK).disable(); + // deactivate the button + dialog.getButtonById(Dialog.OK) + .disable(); // recover info from dialog setInfoOnSubmitQuery(form, dialog, dialogID); @@ -256,36 +269,37 @@ public class GxtToolbarFunctionality { "button Cancel event"); if (uidSubmitQueryList - .get(dialogID)!=null){ - - // remove computation - RPCservice.removeComputation( - uidSubmitQueryList - .get(dialogID), - new AsyncCallback() { - @Override - public void onSuccess( - Boolean result) { - rootLogger - .log(Level.SEVERE, - "SUCCESS RPC removeComputation"); - if (result - .booleanValue() == true) { + .get(dialogID) != null) { + + // remove computation + RPCservice.removeComputation( + uidSubmitQueryList + .get(dialogID), + new AsyncCallback() { + @Override + public void onSuccess( + Boolean result) { rootLogger - .log(Level.INFO, - "computation removed with uid: " - + uidSubmitQueryList - .get(dialogID)); + .log(Level.SEVERE, + "SUCCESS RPC removeComputation"); + if (result + .booleanValue() == true) { + rootLogger + .log(Level.INFO, + "computation removed with uid: " + + uidSubmitQueryList + .get(dialogID)); + } } - } - @Override - public void onFailure( - Throwable caught) { - rootLogger - .log(Level.SEVERE, - "FAILURE RPC removeComputation"); - } - }); + + @Override + public void onFailure( + Throwable caught) { + rootLogger + .log(Level.SEVERE, + "FAILURE RPC removeComputation"); + } + }); } dialog.hide(); } @@ -337,6 +351,20 @@ public class GxtToolbarFunctionality { eventBus.fireEvent(new LoadTablesEvent()); } }); + + btnRefresCachedData.addSelectionListener(new SelectionListener(){ + + @Override + public void componentSelected(ButtonEvent ce) { + + rootLogger + .log(Level.INFO, + "REFRESH BUTTON"); + //fire event + eventBus.fireEvent(new RefreshDataEvent()); + } + + }); } // buttons enable/disable operation depending from the item selected in the @@ -350,6 +378,7 @@ public class GxtToolbarFunctionality { switch (infoTreeDepthSelectedItem) { case 1: + btnRefresCachedData.enable(); btnTablesList.disable(); btnSubmitQuery.disable(); // btnGetInfo.disable(); @@ -357,9 +386,11 @@ public class GxtToolbarFunctionality { btnSimpleSample.disable(); btnSmartSample.disable(); btnRandomSample.disable(); + break; case 2: + btnRefresCachedData.enable(); btnTablesList.disable(); btnSubmitQuery.disable(); // btnGetInfo.enable(); @@ -367,6 +398,7 @@ public class GxtToolbarFunctionality { btnSimpleSample.disable(); btnSmartSample.disable(); btnRandomSample.disable(); + break; case 3: @@ -374,6 +406,7 @@ public class GxtToolbarFunctionality { && (databaseType.equals(ConstantsPortlet.MYSQL))) { btnTablesList.enable(); btnSubmitQuery.enable(); + btnRefresCachedData.enable(); // btnGetInfo.disable(); btnShowCreateTable.disable(); btnSimpleSample.disable(); @@ -385,6 +418,7 @@ public class GxtToolbarFunctionality { && (databaseType.equals(ConstantsPortlet.POSTGRES))) { btnTablesList.disable(); btnSubmitQuery.enable(); + btnRefresCachedData.enable(); // btnGetInfo.disable(); btnShowCreateTable.disable(); btnSimpleSample.disable(); @@ -400,6 +434,7 @@ public class GxtToolbarFunctionality { // this tree level is a schema btnTablesList.enable(); btnSubmitQuery.enable(); + btnRefresCachedData.disable(); // btnShowCreateTable.enable(); btnShowCreateTable.disable(); btnSimpleSample.disable(); @@ -414,6 +449,7 @@ public class GxtToolbarFunctionality { btnSimpleSample.enable(); btnSmartSample.enable(); btnRandomSample.enable(); + btnRefresCachedData.disable(); } break; @@ -425,6 +461,7 @@ public class GxtToolbarFunctionality { btnSimpleSample.enable(); btnSmartSample.enable(); btnRandomSample.enable(); + btnRefresCachedData.disable(); break; } } diff --git a/src/main/java/org/gcube/portlets/user/databasesmanager/client/utils/ConstantsPortlet.java b/src/main/java/org/gcube/portlets/user/databasesmanager/client/utils/ConstantsPortlet.java index 991beff..0431dc2 100644 --- a/src/main/java/org/gcube/portlets/user/databasesmanager/client/utils/ConstantsPortlet.java +++ b/src/main/java/org/gcube/portlets/user/databasesmanager/client/utils/ConstantsPortlet.java @@ -13,6 +13,7 @@ public class ConstantsPortlet { public static final String SAMPLING = "Sampling"; public static final String SMARTSAMPLING = "Smart Sampling"; public static final String RANDOMSAMPLING = "Random Sampling"; + public static final String REFRESHCACHEDDATA = "Refresh Data"; //sql Dialects public static final String POSTGRES = "POSTGRES"; diff --git a/src/main/java/org/gcube/portlets/user/databasesmanager/server/GWTdbManagerServiceImpl.java b/src/main/java/org/gcube/portlets/user/databasesmanager/server/GWTdbManagerServiceImpl.java index d15db47..9892c56 100644 --- a/src/main/java/org/gcube/portlets/user/databasesmanager/server/GWTdbManagerServiceImpl.java +++ b/src/main/java/org/gcube/portlets/user/databasesmanager/server/GWTdbManagerServiceImpl.java @@ -15,7 +15,6 @@ import java.net.URLStreamHandler; import javax.servlet.ServletException; import javax.servlet.http.HttpSession; -import net.didion.jwnl.data.Exc; import net.sf.ehcache.CacheManager; import net.sf.ehcache.Ehcache; import net.sf.ehcache.config.CacheConfiguration; @@ -50,12 +49,14 @@ import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes; import org.gcube.portlets.user.databasesmanager.client.GWTdbManagerService; import org.gcube.portlets.user.databasesmanager.client.datamodel.ComputationOutput; import org.gcube.portlets.user.databasesmanager.client.datamodel.FileModel; +import org.gcube.portlets.user.databasesmanager.client.datamodel.GeneralOutputFromServlet; import org.gcube.portlets.user.databasesmanager.client.datamodel.Parameter; import org.gcube.portlets.user.databasesmanager.client.datamodel.Result; import org.gcube.portlets.user.databasesmanager.client.datamodel.Row; import org.gcube.portlets.user.databasesmanager.server.util.SessionUtil; import org.gcube.portlets.user.databasesmanager.server.util.WsUtil; +import com.extjs.gxt.ui.client.core.El; import com.extjs.gxt.ui.client.data.BasePagingLoadResult; import com.extjs.gxt.ui.client.data.PagingLoadConfig; import com.extjs.gxt.ui.client.data.PagingLoadResult; @@ -79,6 +80,7 @@ public class GWTdbManagerServiceImpl extends RemoteServiceServlet implements // private CacheManager cacheManager; private static Ehcache employeeCache; + public static List listAlgorithms; public GWTdbManagerServiceImpl() throws Exception { @@ -91,45 +93,56 @@ public class GWTdbManagerServiceImpl extends RemoteServiceServlet implements // cacheManager = CacheManager.create(resourceAsStream); // TODO: DA MODIFICARE LA MODALITÀ DI RECUPERO DEL FILE -// try { -// -// // CacheManager cacheManager = CacheManager -// // .newInstance("/home/loredana/workspace/databases-manager-portlet-TRUNK/configCache/encache.xml"); -// -// // InputStream is = -// // ClassLoader.getSystemResourceAsStream("encache.xml"); -// // CacheManager cacheManager = CacheManager.newInstance(is); -// // is.close(); -// -// URL url = getClass().getResource("/encache.xml"); -// CacheManager cacheManager = CacheManager.newInstance(url); -// // getcache -// employeeCache = cacheManager.getEhcache("DBCache"); -// -// //set Disk Store Path in the configuration file encache.xml -//// CacheConfiguration config = employeeCache.getCacheConfiguration(); -//// String DiskCacheFolderName="DBManagerDisk"; -////// config.setDiskStorePath(this.getServletContext().getRealPath("")+DiskCacheFolderName); -//// -//// config.setDiskStorePath(this.getServletContext().getRealPath("")+DiskCacheFolderName); -// -// } catch (Exception e) { -// // TODO: handle exception -// e.printStackTrace(); -// throw new Exception("Failed to get cache. " + e); -// } + // try { + // + // // CacheManager cacheManager = CacheManager + // // + // .newInstance("/home/loredana/workspace/databases-manager-portlet-TRUNK/configCache/encache.xml"); + // + // // InputStream is = + // // ClassLoader.getSystemResourceAsStream("encache.xml"); + // // CacheManager cacheManager = CacheManager.newInstance(is); + // // is.close(); + // + // URL url = getClass().getResource("/encache.xml"); + // CacheManager cacheManager = CacheManager.newInstance(url); + // // getcache + // employeeCache = cacheManager.getEhcache("DBCache"); + // + // //set Disk Store Path in the configuration file encache.xml + // // CacheConfiguration config = employeeCache.getCacheConfiguration(); + // // String DiskCacheFolderName="DBManagerDisk"; + // //// + // config.setDiskStorePath(this.getServletContext().getRealPath("")+DiskCacheFolderName); + // // + // // + // config.setDiskStorePath(this.getServletContext().getRealPath("")+DiskCacheFolderName); + // + // } catch (Exception e) { + // // TODO: handle exception + // e.printStackTrace(); + // throw new Exception("Failed to get cache. " + e); + // } } @Override public void init() throws ServletException { - - - super.init(); + super.init(); - //TODO MODIFY: SET THE NAME OF THE CACHE DISK WITH CODE AND NOT IN FILE ENCACHE.XML - +// //build the list of available algorithms +// listAlgorithms.add("LISTDBNAMES"); +// listAlgorithms.add("LISTDBINFO"); +// listAlgorithms.add("LISTDBSCHEMA"); +// listAlgorithms.add("LISTTABLES"); +// listAlgorithms.add("GETTABLEDETAILS"); +// listAlgorithms.add("SUBMITQUERY"); +// listAlgorithms.add("SAMPLEONTABLE"); +// listAlgorithms.add("SMARTSAMPLEONTABLE"); +// listAlgorithms.add("RANDOMSAMPLEONTABLE"); + + //craete cache try { // CacheManager cacheManager = CacheManager @@ -143,61 +156,57 @@ public class GWTdbManagerServiceImpl extends RemoteServiceServlet implements URL url = getClass().getResource("/encache.xml"); CacheManager cacheManager = CacheManager.newInstance(url); // getcache - employeeCache = cacheManager.getEhcache("DBCache"); - - //set Disk Store Path in the configuration file encache.xml -// CacheConfiguration config = employeeCache.getCacheConfiguration(); -// String DiskCacheFolderName="DBManagerDisk"; -//// config.setDiskStorePath(this.getServletContext().getRealPath("")+DiskCacheFolderName); -// -// config.setDiskStorePath(this.getServletContext().getRealPath("")+DiskCacheFolderName); + employeeCache = cacheManager.getEhcache("DBCache"); + + // set Disk Store Path in the configuration file encache.xml + // CacheConfiguration config = + // employeeCache.getCacheConfiguration(); + // String DiskCacheFolderName="DBManagerDisk"; + // // + // config.setDiskStorePath(this.getServletContext().getRealPath("")+DiskCacheFolderName); + // + // config.setDiskStorePath(this.getServletContext().getRealPath("")+DiskCacheFolderName); + + String path = System.getenv("CATALINA_TMPDIR"); + System.out.println("Path: " + + this.getServletContext().getRealPath("")); + + // System.out.println("Path: " + path); + + // create folder for caching data + // String DiskCacheFolderName="DBManagerDisk"; + // File f = new + // File(this.getServletContext().getRealPath("")+DiskCacheFolderName); + + CacheConfiguration config = employeeCache.getCacheConfiguration(); + String DiskCacheFolderName = "DBManagerDisk"; + // config.setDiskStorePath(this.getServletContext().getRealPath("")+DiskCacheFolderName); + + // File f = new File(path+"/"+DiskCacheFolderName); + + // if (!f.exists()){ + // f.mkdir(); + // + // } + + // config.setDiskStorePath(path+"/"+DiskCacheFolderName); + config.setDiskStorePath(path); + // config.setDiskPersistent(true); + // config.setOverflowToDisk(true); - - - String path = System.getenv("CATALINA_TMPDIR"); -// System.out.println("Path: " + this.getServletContext().getRealPath("")); - -// System.out.println("Path: " + path); - - - //create folder for caching data -// String DiskCacheFolderName="DBManagerDisk"; -// File f = new File(this.getServletContext().getRealPath("")+DiskCacheFolderName); - - CacheConfiguration config = employeeCache.getCacheConfiguration(); - String DiskCacheFolderName="DBManagerDisk"; -// config.setDiskStorePath(this.getServletContext().getRealPath("")+DiskCacheFolderName); - -// File f = new File(path+"/"+DiskCacheFolderName); - -// if (!f.exists()){ -// f.mkdir(); -// -// } - - - -// config.setDiskStorePath(path+"/"+DiskCacheFolderName); - config.setDiskStorePath(path); -// config.setDiskPersistent(true); -// config.setOverflowToDisk(true); - - - } catch (Exception e) { - // TODO: handle exception + logger.error("Failed to get cache: " + e); e.printStackTrace(); -// throw new Exception("Failed to get cache. " + e); -// Exception exc = new Exception("Failed to get cache. " + e.getMessage()); + throw e; } - -// File f = new File(path+"/"+DiskCacheFolderName); -// -// if (!f.exists()){ -// f.mkdir(); -// } - + + // File f = new File(path+"/"+DiskCacheFolderName); + // + // if (!f.exists()){ + // f.mkdir(); + // } + } private void initVariables() { @@ -242,6 +251,7 @@ public class GWTdbManagerServiceImpl extends RemoteServiceServlet implements // to get resources from IS @Override public List getResource() throws Exception { + // initialize variables with application startup initVariables(); @@ -250,27 +260,35 @@ public class GWTdbManagerServiceImpl extends RemoteServiceServlet implements // data output List outputParameters = new ArrayList(); - // get list of algorithms - List algorithms = new ArrayList(); - algorithms = getDatabaseManagerAlgorithms(); - +// if (listAlgorithms==null){ +// // get list of algorithms +//// List algorithms = new ArrayList(); +//// algorithms = getDatabaseManagerAlgorithms(); +// +// listAlgorithms = getDatabaseManagerAlgorithms(); +// +// } // get algorithmId - String algorithmId = null; - - for (int i = 0; i < algorithms.size(); i++) { - if (algorithms.get(i).equals("LISTDBNAMES")) { - algorithmId = algorithms.get(i); - // print check - // logger.info("algorithmId: " + algorithmId); - } - } + String algorithmId = "LISTDBNAMES"; +// for (int i = 0; i < listAlgorithms.size(); i++) { +// if (listAlgorithms.get(i).equals("LISTDBNAMES")) { +// algorithmId = listAlgorithms.get(i); +// // print check +// // logger.info("algorithmId: " + algorithmId); +// } +// } + + // get input parameters of the algorithm specified by id - inputParameters = getParameters(algorithmId); +// inputParameters = getParameters(algorithmId); + + Parameter maxNumber = new Parameter("MaxNumber", "", "String", "-1"); + inputParameters.add(maxNumber); - if (inputParameters.size() != 0) { - // print check - logger.info("dbmanager-> algorithm input parameters retrieved"); +// if (inputParameters.size() != 0) { +// print check +// logger.info("dbmanager-> algorithm input parameters retrieved"); // // print check input parameters // for (int i = 0; i < inputParameters.size(); i++) { @@ -280,15 +298,15 @@ public class GWTdbManagerServiceImpl extends RemoteServiceServlet implements // check if the value is in cache. If data does not exist in cache // the computation is started otherwise data are retrieved from // cache. + + try { - // //get data from cache - // Ehcache employeeCache; - // employeeCache = cacheManager.getEhcache("myCache"); - + // get data from cache // check if data exist considering as key the input parameters - String key = inputParameters.get(0).getDefaultValue(); + // String key = inputParameters.get(0).getDefaultValue(); + String key = "listResources"; // System.out.println("***KEY: " + key); // net.sf.ehcache.Element dataFromCache = @@ -303,70 +321,51 @@ public class GWTdbManagerServiceImpl extends RemoteServiceServlet implements if (value != null) { outputParameters = (List) value; } else { - // start the computation -// System.out.println("***STARTING THE COMPUTATION"); - // create data structure for data output - ComputationOutput outputData = new ComputationOutput(); - // computationId - String computationId = startComputation(algorithmId, - inputParameters, outputData); + + + // start the computation +// System.out.println("***STARTING THE COMPUTATION"); + // create data structure for data output + ComputationOutput outputData = new ComputationOutput(); + // computationId + String computationId = startComputation(algorithmId, + inputParameters, outputData); - // print check - // retrieve data - // logger.info("output data retrieved"); + // print check + // retrieve data + // logger.info("output data retrieved"); - // data output - LinkedHashMap mapValues = new LinkedHashMap(); - mapValues = outputData.getMapValues(); + // data output + LinkedHashMap mapValues = new LinkedHashMap(); + mapValues = outputData.getMapValues(); - for (int i = 0; i < mapValues.size(); i++) { - FileModel obj = new FileModel(mapValues.get(String - .valueOf(i))); - // obj.setIsLoaded(true); - outputParameters.add(obj); - } + for (int i = 0; i < mapValues.size(); i++) { + FileModel obj = new FileModel(mapValues.get(String + .valueOf(i))); + // obj.setIsLoaded(true); + outputParameters.add(obj); + } - if (outputParameters != null - && outputParameters.size() != 0) { - // put data in cache - net.sf.ehcache.Element dataToCache = new net.sf.ehcache.Element( - inputParameters.get(0).getDefaultValue(), - outputParameters); + if (outputParameters != null + && outputParameters.size() != 0) { + // put data in cache + net.sf.ehcache.Element dataToCache = new net.sf.ehcache.Element( + key, outputParameters); - insertDataIntoCache(dataToCache); - // employeeCache.put(dataToCache); - } + insertDataIntoCache(dataToCache); + // employeeCache.put(dataToCache); + + } + + } } catch (Exception e) { // TODO: handle exception - // e.printStackTrace(); +// e.printStackTrace(); throw new Exception("Failed to load data. " + e); } - - // // create data structure for data output - // ComputationOutput outputData = new ComputationOutput(); - // // computationId - // String computationId = startComputation(algorithmId, - // inputParameters, - // outputData); - // - // // print check - // // retrieve data - // // logger.info("output data retrieved"); - // - // // data output - // LinkedHashMap mapValues = new - // LinkedHashMap(); - // mapValues = outputData.getMapValues(); - // - // for (int i = 0; i < mapValues.size(); i++) { - // FileModel obj = new FileModel(mapValues.get(String.valueOf(i))); - // // obj.setIsLoaded(true); - // outputParameters.add(obj); - // } - - } +// } return outputParameters; } @@ -380,32 +379,43 @@ public class GWTdbManagerServiceImpl extends RemoteServiceServlet implements LinkedHashMap outputParameters = new LinkedHashMap(); // get list of algorithms - List algorithms = new ArrayList(); - algorithms = getDatabaseManagerAlgorithms(); - - // get algorithmId - String algorithmId = null; - - for (int i = 0; i < algorithms.size(); i++) { - if (algorithms.get(i).equals("LISTDBINFO")) { - algorithmId = algorithms.get(i); - // print check - // logger.info("algorithmId: " + algorithmId); - } - } +// List algorithms = new ArrayList(); +// algorithms = getDatabaseManagerAlgorithms(); + +// if (listAlgorithms==null){ +// listAlgorithms = getDatabaseManagerAlgorithms(); +// } +// +// // get algorithmId +// String algorithmId = null; +// +// for (int i = 0; i < listAlgorithms.size(); i++) { +// if (listAlgorithms.get(i).equals("LISTDBINFO")) { +// algorithmId = listAlgorithms.get(i); +// // print check +// // logger.info("algorithmId: " + algorithmId); +// } +// } // get input parameters of the algorithm specified by id - inputParameters = getParameters(algorithmId); +// inputParameters = getParameters(algorithmId); - if (inputParameters.size() != 0) { +// if (inputParameters.size() != 0) { // print check - logger.info("dbmanager-> algorithm input parameters retrieved"); - +// logger.info("dbmanager-> algorithm input parameters retrieved"); + + String algorithmId = "LISTDBINFO"; + + //print check + logger.info("dbmanager-> ResourceName: " + + resourceName); + +// if (!resourceName.equals("")){ + + Parameter resource = new Parameter("ResourceName", "", "String", ""); + inputParameters.add(resource); inputParameters.get(0).setValue(resourceName); - - // print check - // logger.info(inputParameters.get(0).getName()); - + try { // get data from cache // check if data exist considering as key the input parameters @@ -423,53 +433,57 @@ public class GWTdbManagerServiceImpl extends RemoteServiceServlet implements } if (value != null) { outputParameters = (LinkedHashMap) value; -// System.out.println("***GETTING DATA FROM CACHE"); + // System.out.println("***GETTING DATA FROM CACHE"); } else { - // start the computation -// System.out.println("***STARTING THE COMPUTATION"); - // create data structure - ComputationOutput outputData = new ComputationOutput(); - // computation id - String computationId = startComputation(algorithmId, - inputParameters, outputData); - - // print check - // retrieve data - // logger.info("output data retrieved"); - - // data output values - LinkedHashMap mapValues = new LinkedHashMap(); - // data output keys - LinkedHashMap mapKeys = new LinkedHashMap(); - - mapValues = outputData.getMapValues(); - mapKeys = outputData.getmapKeys(); - - for (int i = 0; i < mapValues.size(); i++) { - FileModel obj = new FileModel(mapValues.get(String - .valueOf(i))); - // obj.setIsLoaded(true); + + // start the computation + // System.out.println("***STARTING THE COMPUTATION"); + // create data structure + ComputationOutput outputData = new ComputationOutput(); + // computation id + String computationId = startComputation(algorithmId, + inputParameters, outputData); // print check - // logger.info("value: " + - // mapValues.get(String.valueOf(i))); - // logger.info("key: " + - // mapKeys.get(String.valueOf(i))); - outputParameters.put(mapKeys.get(String.valueOf(i)), - obj); - } + // retrieve data + // logger.info("output data retrieved"); - // write data in cache - if (outputParameters != null - && outputParameters.size() != 0) { - // put data in cache - net.sf.ehcache.Element dataToCache = new net.sf.ehcache.Element( - inputParameters.get(0).getValue(), - outputParameters); + // data output values + LinkedHashMap mapValues = new LinkedHashMap(); + // data output keys + LinkedHashMap mapKeys = new LinkedHashMap(); - insertDataIntoCache(dataToCache); - // employeeCache.put(dataToCache); - } + mapValues = outputData.getMapValues(); + mapKeys = outputData.getmapKeys(); + + for (int i = 0; i < mapValues.size(); i++) { + FileModel obj = new FileModel(mapValues.get(String + .valueOf(i))); + // obj.setIsLoaded(true); + + // print check + // logger.info("value: " + + // mapValues.get(String.valueOf(i))); + // logger.info("key: " + + // mapKeys.get(String.valueOf(i))); + outputParameters.put(mapKeys.get(String.valueOf(i)), + obj); + } + + // write data in cache + if (outputParameters != null + && outputParameters.size() != 0) { + // put data in cache + net.sf.ehcache.Element dataToCache = new net.sf.ehcache.Element( + inputParameters.get(0).getValue(), + outputParameters); + + insertDataIntoCache(dataToCache); + // employeeCache.put(dataToCache); + } + + + } @@ -478,46 +492,17 @@ public class GWTdbManagerServiceImpl extends RemoteServiceServlet implements e.printStackTrace(); throw new Exception("Failed to load data. " + e); } + +// } - } +// inputParameters.get(0).setValue(resourceName); - // // create data structure - // ComputationOutput outputData = new ComputationOutput(); - // // computation id - // String computationId = startComputation(algorithmId, inputParameters, - // outputData); - // - // // print check - // // retrieve data - // // logger.info("output data retrieved"); - // - // // data output values - // LinkedHashMap mapValues = new LinkedHashMap(); - // // data output keys - // LinkedHashMap mapKeys = new LinkedHashMap(); - // - // mapValues = outputData.getMapValues(); - // mapKeys = outputData.getmapKeys(); - // - // for (int i = 0; i < mapValues.size(); i++) { - // FileModel obj = new FileModel(mapValues.get(String.valueOf(i))); - // // obj.setIsLoaded(true); - // - // // print check - // // logger.info("value: " + mapValues.get(String.valueOf(i))); - // // logger.info("key: " + mapKeys.get(String.valueOf(i))); - // outputParameters.put(mapKeys.get(String.valueOf(i)), obj); - // } - // - // // write data in cache - // if (outputParameters != null && outputParameters.size() != 0) { - // // put data in cache - // net.sf.ehcache.Element dataToCache = new net.sf.ehcache.Element( - // inputParameters.get(0).getDefaultValue(), outputParameters); - // employeeCache.put(dataToCache); - // } + // print check + // logger.info(inputParameters.get(0).getName()); + + + +// } return outputParameters; } @@ -533,148 +518,135 @@ public class GWTdbManagerServiceImpl extends RemoteServiceServlet implements List outputParameters = new ArrayList(); // get list of algorithms - List algorithms = new ArrayList(); - algorithms = getDatabaseManagerAlgorithms(); - - // get algorithmId - String algorithmId = null; - - for (int i = 0; i < algorithms.size(); i++) { - if (algorithms.get(i).equals("LISTDBSCHEMA")) { - algorithmId = algorithms.get(i); - // print check - // logger.info("algorithmId: " + algorithmId); - } - } +// List algorithms = new ArrayList(); +// algorithms = getDatabaseManagerAlgorithms(); + +// if (listAlgorithms==null){ +// listAlgorithms = getDatabaseManagerAlgorithms(); +// } +// +// // get algorithmId +// String algorithmId = null; +// +// for (int i = 0; i < listAlgorithms.size(); i++) { +// if (listAlgorithms.get(i).equals("LISTDBSCHEMA")) { +// algorithmId = listAlgorithms.get(i); +// // print check +// // logger.info("algorithmId: " + algorithmId); +// } +// } // get input parameters of the algorithm specified by id - inputParameters = getParameters(algorithmId); +// inputParameters = getParameters(algorithmId); + String algorithmId = "LISTDBSCHEMA"; // print check + String rs = dataInput.get("ResourceName"); + String db = dataInput.get("DatabaseName"); + logger.info("dbmanager-> ResourceName: " - + dataInput.get("ResourceName")); + + rs); logger.info("dbmanager-> DatabaseName: " - + dataInput.get("DatabaseName")); + + db); + + - if (inputParameters.size() != 0) { +// if (inputParameters.size() != 0) { // print check - logger.info("dbmanager-> algorithm input parameters retrieved"); +// logger.info("dbmanager-> algorithm input parameters retrieved"); + +// if ((!rs.equals(""))&&(!db.equals(""))){ + + //set input parameters + Parameter resource = new Parameter("ResourceName", "", "String", ""); + Parameter database = new Parameter("DatabaseName", "", "String", ""); + inputParameters.add(resource); + inputParameters.add(database); - inputParameters.get(0).setValue(dataInput.get("ResourceName")); - inputParameters.get(1).setValue(dataInput.get("DatabaseName")); + inputParameters.get(0).setValue(rs); + inputParameters.get(1).setValue(db); - // print check algorithm input parameters - // for (int i = 0; i < inputParameters.size(); i++) { - // logger.info(inputParameters.get(i).getName()); - // } + // print check algorithm input parameters + // for (int i = 0; i < inputParameters.size(); i++) { + // logger.info(inputParameters.get(i).getName()); + // } - try { - // get data from cache - // check if data exist considering as key the input parameters - String key = inputParameters.get(0).getValue() - + inputParameters.get(1).getValue(); + try { + // get data from cache + // check if data exist considering as key the input parameters + String key = inputParameters.get(0).getValue() + + inputParameters.get(1).getValue(); + +// System.out.println("key in GETSCHEMA: " + key); - // System.out.println("key: " + key); - // net.sf.ehcache.Element dataFromCache = - // employeeCache.get(key); + net.sf.ehcache.Element dataFromCache = getDataFromCache(key); - net.sf.ehcache.Element dataFromCache = getDataFromCache(key); + Object value = null; + if (dataFromCache != null) { + value = dataFromCache.getObjectValue(); + } + if (value != null) { + outputParameters = (List) value; + } else { + +// if (algorithmId!=null){ + // start the computation + // create data structure + ComputationOutput outputData = new ComputationOutput(); + // computation id + String computationId = startComputation(algorithmId, + inputParameters, outputData); - Object value = null; - if (dataFromCache != null) { - value = dataFromCache.getObjectValue(); - } - if (value != null) { - outputParameters = (List) value; - } else { - // start the computation - // create data structure - ComputationOutput outputData = new ComputationOutput(); - // computation id - String computationId = startComputation(algorithmId, - inputParameters, outputData); + // print check + // retrieve data + // logger.info("dbmanager-> output data retrieved"); - // print check - // retrieve data - // logger.info("dbmanager-> output data retrieved"); + // data output values + LinkedHashMap mapValues = new LinkedHashMap(); + // data output keys + LinkedHashMap mapKeys = new LinkedHashMap(); - // data output values - LinkedHashMap mapValues = new LinkedHashMap(); - // data output keys - LinkedHashMap mapKeys = new LinkedHashMap(); + mapValues = outputData.getMapValues(); + mapKeys = outputData.getmapKeys(); - mapValues = outputData.getMapValues(); - mapKeys = outputData.getmapKeys(); + for (int i = 0; i < mapValues.size(); i++) { + FileModel obj = new FileModel(mapValues.get(String + .valueOf(i))); + // obj.setIsSchema(true); + // obj.setIsLoaded(true); + outputParameters.add(obj); + } + + // write data in cache + if (outputParameters != null + && outputParameters.size() != 0) { + // put data in cache + net.sf.ehcache.Element dataToCache = new net.sf.ehcache.Element( + inputParameters.get(0).getValue() + + inputParameters.get(1).getValue(), + outputParameters); + + insertDataIntoCache(dataToCache); + // employeeCache.put(dataToCache); + } + +// } + - for (int i = 0; i < mapValues.size(); i++) { - FileModel obj = new FileModel(mapValues.get(String - .valueOf(i))); - // obj.setIsSchema(true); - // obj.setIsLoaded(true); - outputParameters.add(obj); } - // write data in cache - if (outputParameters != null - && outputParameters.size() != 0) { - // put data in cache - net.sf.ehcache.Element dataToCache = new net.sf.ehcache.Element( - inputParameters.get(0).getValue() - + inputParameters.get(1).getValue(), - outputParameters); - - insertDataIntoCache(dataToCache); - // employeeCache.put(dataToCache); - } + } catch (Exception e) { + // TODO: handle exception + e.printStackTrace(); + throw new Exception("Failed to load data. " + e); } + +// } + + - } catch (Exception e) { - // TODO: handle exception - e.printStackTrace(); - throw new Exception("Failed to load data. " + e); - - } - - } - - // // create data structure - // ComputationOutput outputData = new ComputationOutput(); - // // computation id - // String computationId = startComputation(algorithmId, inputParameters, - // outputData); - // - // // print check - // // retrieve data - // // logger.info("dbmanager-> output data retrieved"); - // - // // data output values - // LinkedHashMap mapValues = new LinkedHashMap(); - // // data output keys - // LinkedHashMap mapKeys = new LinkedHashMap(); - // - // mapValues = outputData.getMapValues(); - // mapKeys = outputData.getmapKeys(); - // - // for (int i = 0; i < mapValues.size(); i++) { - // FileModel obj = new FileModel(mapValues.get(String.valueOf(i))); - // // obj.setIsSchema(true); - // // obj.setIsLoaded(true); - // outputParameters.add(obj); - // } - // - // // write data in cache - // if (outputParameters != null - // && outputParameters.size() != 0) { - // // put data in cache - // net.sf.ehcache.Element dataToCache = new net.sf.ehcache.Element( - // inputParameters.get(0).getDefaultValue(), - // outputParameters); - // employeeCache.put(dataToCache); - // } - +// } return outputParameters; } @@ -687,66 +659,127 @@ public class GWTdbManagerServiceImpl extends RemoteServiceServlet implements List outputParameters = new ArrayList(); // get list of algorithms - List algorithms = new ArrayList(); - algorithms = getDatabaseManagerAlgorithms(); - - // get algorithmId - String algorithmId = null; - - for (int i = 0; i < algorithms.size(); i++) { - if (algorithms.get(i).equals("LISTTABLES")) { - algorithmId = algorithms.get(i); - // print check - // logger.info("algorithmId: " + algorithmId); - } - } +// List algorithms = new ArrayList(); +// algorithms = getDatabaseManagerAlgorithms(); + +// if (listAlgorithms==null){ +// listAlgorithms = getDatabaseManagerAlgorithms(); +// } +// +// // get algorithmId +// String algorithmId = null; +// +// for (int i = 0; i < listAlgorithms.size(); i++) { +// if (listAlgorithms.get(i).equals("LISTTABLES")) { +// algorithmId = listAlgorithms.get(i); +// // print check +// // logger.info("algorithmId: " + algorithmId); +// } +// } // get input parameters of the algorithm - inputParameters = getParameters(algorithmId); +// inputParameters = getParameters(algorithmId); - if (inputParameters.size() != 0) { - // print check - logger.info("dbmanager-> algorithm input parameters retrieved"); - } +// if (inputParameters.size() != 0) { +// // print check +// logger.info("dbmanager-> algorithm input parameters retrieved"); +// } // print check algorithm input parameters // for (int i = 0; i < inputParameters.size(); i++) { // logger.info(inputParameters.get(i).getName()); // } - - inputParameters.get(0).setValue(dataInput.get("ResourceName")); - inputParameters.get(1).setValue(dataInput.get("DatabaseName")); - inputParameters.get(2).setValue(dataInput.get("SchemaName")); - - // print check + + String algorithmId = "LISTTABLES"; + + String rs= dataInput.get("ResourceName"); + String db= dataInput.get("DatabaseName"); + String scm= dataInput.get("SchemaName"); + + //print check logger.info("dbmanager-> ResourceName: " - + dataInput.get("ResourceName")); + + rs); logger.info("dbmanager-> DatabaseName: " - + dataInput.get("DatabaseName")); - logger.info("dbmanager-> SchemaName: " + dataInput.get("SchemaName")); + + db); + logger.info("dbmanager-> SchemaName: " + + scm); + +// if ((!rs.equals(""))&&(!db.equals(""))){ + + //set input parameters + Parameter resource = new Parameter("ResourceName", "", "String", ""); + Parameter database = new Parameter("DatabaseName", "", "String", ""); + Parameter schema = new Parameter("SchemaName","", "String", ""); + inputParameters.add(resource); + inputParameters.add(database); + inputParameters.add(schema); - // create data structure - ComputationOutput outputData = new ComputationOutput(); - // computation id - String computationId = startComputation(algorithmId, inputParameters, - outputData); + inputParameters.get(0).setValue(rs); + inputParameters.get(1).setValue(db); + inputParameters.get(2).setValue(scm); + + + // create data structure + ComputationOutput outputData = new ComputationOutput(); + // computation id + String computationId = startComputation(algorithmId, inputParameters, + outputData); - // print check on retrieving data - // logger.info("output data retrieved"); + // print check on retrieving data + // logger.info("output data retrieved"); - // data output values - LinkedHashMap mapValues = new LinkedHashMap(); - // data output keys - LinkedHashMap mapKeys = new LinkedHashMap(); + // data output values + LinkedHashMap mapValues = new LinkedHashMap(); + // data output keys + LinkedHashMap mapKeys = new LinkedHashMap(); - mapValues = outputData.getMapValues(); - mapKeys = outputData.getmapKeys(); + mapValues = outputData.getMapValues(); + mapKeys = outputData.getmapKeys(); - for (int i = 0; i < mapValues.size(); i++) { - Result row = new Result(String.valueOf(i), mapValues.get(String - .valueOf(i))); - outputParameters.add(row); - } + for (int i = 0; i < mapValues.size(); i++) { + Result row = new Result(String.valueOf(i), mapValues.get(String + .valueOf(i))); + outputParameters.add(row); + } + + + + +// } + +// inputParameters.get(0).setValue(dataInput.get("ResourceName")); +// inputParameters.get(1).setValue(dataInput.get("DatabaseName")); +// inputParameters.get(2).setValue(dataInput.get("SchemaName")); + +// // print check +// logger.info("dbmanager-> ResourceName: " +// + dataInput.get("ResourceName")); +// logger.info("dbmanager-> DatabaseName: " +// + dataInput.get("DatabaseName")); +// logger.info("dbmanager-> SchemaName: " + dataInput.get("SchemaName")); + +// // create data structure +// ComputationOutput outputData = new ComputationOutput(); +// // computation id +// String computationId = startComputation(algorithmId, inputParameters, +// outputData); +// +// // print check on retrieving data +// // logger.info("output data retrieved"); +// +// // data output values +// LinkedHashMap mapValues = new LinkedHashMap(); +// // data output keys +// LinkedHashMap mapKeys = new LinkedHashMap(); +// +// mapValues = outputData.getMapValues(); +// mapKeys = outputData.getmapKeys(); +// +// for (int i = 0; i < mapValues.size(); i++) { +// Result row = new Result(String.valueOf(i), mapValues.get(String +// .valueOf(i))); +// outputParameters.add(row); +// } return outputParameters; } @@ -869,107 +902,154 @@ public class GWTdbManagerServiceImpl extends RemoteServiceServlet implements List listAttributes = null; // get list of algorithms - List algorithms = new ArrayList(); - algorithms = getDatabaseManagerAlgorithms(); +// List algorithms = new ArrayList(); +// algorithms = getDatabaseManagerAlgorithms(); + +// if (listAlgorithms==null){ +// listAlgorithms = getDatabaseManagerAlgorithms(); +// } +// +// // get algorithmId +// String algorithmId = null; +// +// for (int i = 0; i < listAlgorithms.size(); i++) { +// if (listAlgorithms.get(i).equals("SUBMITQUERY")) { +// algorithmId = listAlgorithms.get(i); +// // print check +// // logger.info("algorithmId: " + algorithmId); +// } +// } - // get algorithmId - String algorithmId = null; - - for (int i = 0; i < algorithms.size(); i++) { - if (algorithms.get(i).equals("SUBMITQUERY")) { - algorithmId = algorithms.get(i); - // print check - // logger.info("algorithmId: " + algorithmId); - } - } - - // get input parameters of the algorithm - inputParameters = getParameters(algorithmId); - - if (inputParameters.size() != 0) { - // print check - logger.info("dbmanager-> algorithm input parameters retrieved"); - } +// // get input parameters of the algorithm +// inputParameters = getParameters(algorithmId); +// +// if (inputParameters.size() != 0) { +// // print check +// logger.info("dbmanager-> algorithm input parameters retrieved"); +// } // print check // for (int i = 0; i < inputParameters.size(); i++) { // logger.info(inputParameters.get(i).getName()); // } + + String algorithmId = "SUBMITQUERY"; + // print check - logger.info("dbmanager-> ResourceName: " + dataDB.get("ResourceName")); - logger.info("dbmanager-> DatabaseName: " + dataDB.get("DatabaseName")); + String rs= dataDB.get("ResourceName"); + String db= dataDB.get("DatabaseName"); + + + //print check + logger.info("dbmanager-> ResourceName: " + + rs); + logger.info("dbmanager-> DatabaseName: " + + db); + +// logger.info("dbmanager-> ResourceName: " + dataDB.get("ResourceName")); +// logger.info("dbmanager-> DatabaseName: " + dataDB.get("DatabaseName")); logger.info("dbmanager-> Query: " + query); logger.info("dbmanager-> SmartCorrections check: " + smartCorrectionQuery); + +// if ((!rs.equals(""))&&(!db.equals(""))){ + //set input parameters + + Parameter resource = new Parameter("ResourceName", "", "String", ""); + Parameter database = new Parameter("DatabaseName", "", "String", ""); + Parameter readOnlyQuery = new Parameter("Read-Only Query", "", "Boolean", "true"); + Parameter applySmartCorrection = new Parameter("Apply Smart Correction", "", "Boolean", "true"); + Parameter lng = new Parameter("Language", "", "NONE", "NONE"); + Parameter q = new Parameter("Query", "", "String", ""); + + inputParameters.add(resource); + inputParameters.add(database); + inputParameters.add(readOnlyQuery); + inputParameters.add(applySmartCorrection); + inputParameters.add(lng); + inputParameters.add(q); + - inputParameters.get(0).setValue(dataDB.get("ResourceName")); - inputParameters.get(1).setValue(dataDB.get("DatabaseName")); - inputParameters.get(2).setValue(String.valueOf(valueReadOnlyQuery)); - inputParameters.get(3).setValue(String.valueOf(smartCorrectionQuery)); - inputParameters.get(4).setValue(language); - inputParameters.get(5).setValue(query); + inputParameters.get(0).setValue(rs); + inputParameters.get(1).setValue(db); + inputParameters.get(2).setValue(String.valueOf(valueReadOnlyQuery)); + inputParameters.get(3).setValue(String.valueOf(smartCorrectionQuery)); + inputParameters.get(4).setValue(language); + inputParameters.get(5).setValue(query); + + // create data structure + ComputationOutput outputData = new ComputationOutput(); + try { + // computation id + String computationId = startComputation(algorithmId, + inputParameters, outputData, UID); - // create data structure - ComputationOutput outputData = new ComputationOutput(); - try { - // computation id - String computationId = startComputation(algorithmId, - inputParameters, outputData, UID); + // get JobID + if (checkJob(UID)) { + // computationIDMap.put(id, computationId); - // get JobID - if (checkJob(UID)) { - // computationIDMap.put(id, computationId); + // print check on retrieving data + // logger.info("output data retrieved"); - // print check on retrieving data - // logger.info("output data retrieved"); + // data output values + LinkedHashMap mapValues = new LinkedHashMap(); + // data output keys + LinkedHashMap mapKeys = new LinkedHashMap(); - // data output values - LinkedHashMap mapValues = new LinkedHashMap(); - // data output keys - LinkedHashMap mapKeys = new LinkedHashMap(); + mapValues = outputData.getMapValues(); + mapKeys = outputData.getmapKeys(); - mapValues = outputData.getMapValues(); - mapKeys = outputData.getmapKeys(); + if (mapValues.size() != 0) { + output = new ArrayList(); - if (mapValues.size() != 0) { - output = new ArrayList(); + // logger.info("build the result - started"); + for (int i = 0; i < mapValues.size(); i++) { + Result row = new Result(mapKeys.get(String.valueOf(i)), + mapValues.get(String.valueOf(i))); + output.add(row); + } - // logger.info("build the result - started"); - for (int i = 0; i < mapValues.size(); i++) { - Result row = new Result(mapKeys.get(String.valueOf(i)), - mapValues.get(String.valueOf(i))); - output.add(row); + // System.out.println("output size submit: " + + // output.size()); + // logger.info("build the result - finished"); + + // get the attributes list for the result table + listAttributes = new ArrayList(); + listAttributes = getListAttributes(output.get(0).getValue()); + // remove the header in order to parse only the result + output.remove(0); + // store the result of the submit query operation + updateSubmitQueryResultMap(UID, output); + // remove job with the specified uid + removeJob(UID); } - - // System.out.println("output size submit: " + - // output.size()); - // logger.info("build the result - finished"); - - // get the attributes list for the result table - listAttributes = new ArrayList(); - listAttributes = getListAttributes(output.get(0).getValue()); - // remove the header in order to parse only the result - output.remove(0); - // store the result of the submit query operation - updateSubmitQueryResultMap(UID, output); - // remove job with the specified uid - removeJob(UID); + } else { + listAttributes = null; } - } else { - listAttributes = null; + } catch (Exception e) { + // e.printStackTrace(); + // TODO TO REMOVE. Exception Statistical management to remove a + // computation + if (e.getMessage() + .contains( + "javax.xml.ws.soap.SOAPFaultException: java.lang.IndexOutOfBoundsException")) { + e = new Exception("ServerException"); + } + throw e; } - } catch (Exception e) { - // e.printStackTrace(); - // TODO TO REMOVE. Exception Statistical management to remove a - // computation - if (e.getMessage() - .contains( - "javax.xml.ws.soap.SOAPFaultException: java.lang.IndexOutOfBoundsException")) { - e = new Exception("ServerException"); - } - throw e; - } + + +// } + +// inputParameters.get(0).setValue(dataDB.get("ResourceName")); +// inputParameters.get(1).setValue(dataDB.get("DatabaseName")); +// inputParameters.get(2).setValue(String.valueOf(valueReadOnlyQuery)); +// inputParameters.get(3).setValue(String.valueOf(smartCorrectionQuery)); +// inputParameters.get(4).setValue(language); +// inputParameters.get(5).setValue(query); + + return listAttributes; } @@ -981,69 +1061,111 @@ public class GWTdbManagerServiceImpl extends RemoteServiceServlet implements // output sample result List output = new ArrayList(); - // get list of algorithms - List algorithms = new ArrayList(); - algorithms = getDatabaseManagerAlgorithms(); - - // get algorithmId - String algorithmId = null; - - for (int i = 0; i < algorithms.size(); i++) { - if (algorithms.get(i).equals("SAMPLEONTABLE")) { - algorithmId = algorithms.get(i); - // print check - // logger.info("algorithmId: " + algorithmId); - } - } - - // get input parameters of the algorithm specified by id - inputParameters = getParameters(algorithmId); - - if (inputParameters.size() != 0) { - // print check - logger.info("dbmanager-> algorithm input parameters retrieved"); - } +// // get list of algorithms +// List algorithms = new ArrayList(); +// algorithms = getDatabaseManagerAlgorithms(); +// +// // get algorithmId +//// String algorithmId = null; +// +// for (int i = 0; i < algorithms.size(); i++) { +// if (algorithms.get(i).equals("SAMPLEONTABLE")) { +// algorithmId = algorithms.get(i); +// // print check +// // logger.info("algorithmId: " + algorithmId); +// } +// } +// +// // get input parameters of the algorithm specified by id +// inputParameters = getParameters(algorithmId); +// +// if (inputParameters.size() != 0) { +// // print check +// logger.info("dbmanager-> algorithm input parameters retrieved"); +// } // print check // for (int i = 0; i < inputParameters.size(); i++) { // logger.info(inputParameters.get(i).getName()); // } + + String algorithmId = "SAMPLEONTABLE"; + + //print check + String rs= dataInput.get("ResourceName"); + String db= dataInput.get("DatabaseName"); + String scm= dataInput.get("SchemaName"); + String tab= dataInput.get("TableName"); + + //print check + logger.info("dbmanager-> ResourceName: " + + rs); + logger.info("dbmanager-> DatabaseName: " + + db); + logger.info("dbmanager-> SchemaName: " + + scm); + logger.info("dbmanager-> TableName: " + + tab); + +// if ((!rs.equals(""))&&(!db.equals(""))&&(!scm.equals(""))&&(!tab.equals(""))){ + + //set input parameters + Parameter resource = new Parameter("ResourceName", "", "String", ""); + Parameter database = new Parameter("DatabaseName", "", "String", ""); + Parameter schema = new Parameter("SchemaName","", "String", ""); + Parameter table = new Parameter("TableName","", "String", ""); + inputParameters.add(resource); + inputParameters.add(database); + inputParameters.add(schema); + inputParameters.add(table); + + inputParameters.get(0).setValue(rs); + inputParameters.get(1).setValue(db); + inputParameters.get(2).setValue(scm); + inputParameters.get(3).setValue(tab); + + + // create data structure + ComputationOutput outputData = new ComputationOutput(); + // computation id + String computationId = startComputation(algorithmId, inputParameters, + outputData); - // print check - logger.info("dbmanager-> ResourceName: " - + dataInput.get("ResourceName")); - logger.info("dbmanager-> DatabaseName: " - + dataInput.get("DatabaseName")); - logger.info("dbmanager-> SchemaName: " + dataInput.get("SchemaName")); - logger.info("dbmanager-> TableName: " + dataInput.get("TableName")); + // print check on retrieving data + // logger.info("output data retrieved"); - inputParameters.get(0).setValue(dataInput.get("ResourceName")); - inputParameters.get(1).setValue(dataInput.get("DatabaseName")); - inputParameters.get(2).setValue(dataInput.get("SchemaName")); - inputParameters.get(3).setValue(dataInput.get("TableName")); + // data output values + LinkedHashMap mapValues = new LinkedHashMap(); + // data output keys + LinkedHashMap mapKeys = new LinkedHashMap(); - // create data structure - ComputationOutput outputData = new ComputationOutput(); - // computation id - String computationId = startComputation(algorithmId, inputParameters, - outputData); + mapValues = outputData.getMapValues(); + mapKeys = outputData.getmapKeys(); - // print check on retrieving data - // logger.info("output data retrieved"); + for (int i = 0; i < mapValues.size(); i++) { + Result row = new Result(mapKeys.get(String.valueOf(i)), + mapValues.get(String.valueOf(i))); + output.add(row); + } + + +// } - // data output values - LinkedHashMap mapValues = new LinkedHashMap(); - // data output keys - LinkedHashMap mapKeys = new LinkedHashMap(); + +// // print check +// logger.info("dbmanager-> ResourceName: " +// + dataInput.get("ResourceName")); +// logger.info("dbmanager-> DatabaseName: " +// + dataInput.get("DatabaseName")); +// logger.info("dbmanager-> SchemaName: " + dataInput.get("SchemaName")); +// logger.info("dbmanager-> TableName: " + dataInput.get("TableName")); - mapValues = outputData.getMapValues(); - mapKeys = outputData.getmapKeys(); +// inputParameters.get(0).setValue(dataInput.get("ResourceName")); +// inputParameters.get(1).setValue(dataInput.get("DatabaseName")); +// inputParameters.get(2).setValue(dataInput.get("SchemaName")); +// inputParameters.get(3).setValue(dataInput.get("TableName")); - for (int i = 0; i < mapValues.size(); i++) { - Result row = new Result(mapKeys.get(String.valueOf(i)), - mapValues.get(String.valueOf(i))); - output.add(row); - } + return output; } @@ -1055,69 +1177,130 @@ public class GWTdbManagerServiceImpl extends RemoteServiceServlet implements // output sample result List output = new ArrayList(); - // get list of algorithms - List algorithms = new ArrayList(); - algorithms = getDatabaseManagerAlgorithms(); - - // get algorithmId - String algorithmId = null; - - for (int i = 0; i < algorithms.size(); i++) { - if (algorithms.get(i).equals("SMARTSAMPLEONTABLE")) { - algorithmId = algorithms.get(i); - // print check - // logger.info("algorithmId: " + algorithmId); - } - } - - // get input parameters of the algorithm - inputParameters = getParameters(algorithmId); - - if (inputParameters.size() != 0) { - // print check - logger.info("dbmanager-> algorithm input parameters retrieved"); - } +// // get list of algorithms +// List algorithms = new ArrayList(); +// algorithms = getDatabaseManagerAlgorithms(); +// +// // get algorithmId +//// String algorithmId = null; +// +// for (int i = 0; i < algorithms.size(); i++) { +// if (algorithms.get(i).equals("SMARTSAMPLEONTABLE")) { +// algorithmId = algorithms.get(i); +// // print check +// // logger.info("algorithmId: " + algorithmId); +// } +// } +// +// // get input parameters of the algorithm +// inputParameters = getParameters(algorithmId); +// +// if (inputParameters.size() != 0) { +// // print check +// logger.info("dbmanager-> algorithm input parameters retrieved"); +// } // print check // for (int i = 0; i < inputParameters.size(); i++) { // logger.info(inputParameters.get(i).getName()); // } - - // print check + + String algorithmId = "SMARTSAMPLEONTABLE"; + + //print check + String rs= dataInput.get("ResourceName"); + String db= dataInput.get("DatabaseName"); + String scm= dataInput.get("SchemaName"); + String tab= dataInput.get("TableName"); + + //print check logger.info("dbmanager-> ResourceName: " - + dataInput.get("ResourceName")); + + rs); logger.info("dbmanager-> DatabaseName: " - + dataInput.get("DatabaseName")); - logger.info("dbmanager-> SchemaName: " + dataInput.get("SchemaName")); - logger.info("dbmanager-> TableName: " + dataInput.get("TableName")); + + db); + logger.info("dbmanager-> SchemaName: " + + scm); + logger.info("dbmanager-> TableName: " + + tab); + +// if ((!rs.equals(""))&&(!db.equals(""))&&(!scm.equals(""))&&(!tab.equals(""))){ + + //set input parameters + Parameter resource = new Parameter("ResourceName", "", "String", ""); + Parameter database = new Parameter("DatabaseName", "", "String", ""); + Parameter schema = new Parameter("SchemaName","", "String", ""); + Parameter table = new Parameter("TableName","", "String", ""); + inputParameters.add(resource); + inputParameters.add(database); + inputParameters.add(schema); + inputParameters.add(table); + + inputParameters.get(0).setValue(rs); + inputParameters.get(1).setValue(db); + inputParameters.get(2).setValue(scm); + inputParameters.get(3).setValue(tab); + + // create data structure + ComputationOutput outputData = new ComputationOutput(); + // computation id + String computationId = startComputation(algorithmId, inputParameters, + outputData); - inputParameters.get(0).setValue(dataInput.get("ResourceName")); - inputParameters.get(1).setValue(dataInput.get("DatabaseName")); - inputParameters.get(2).setValue(dataInput.get("SchemaName")); - inputParameters.get(3).setValue(dataInput.get("TableName")); + // print check on retrieving data + // logger.info("dbmanager-> output data retrieved"); - // create data structure - ComputationOutput outputData = new ComputationOutput(); - // computation id - String computationId = startComputation(algorithmId, inputParameters, - outputData); + // data output values + LinkedHashMap mapValues = new LinkedHashMap(); + // data output keys + LinkedHashMap mapKeys = new LinkedHashMap(); - // print check on retrieving data - // logger.info("dbmanager-> output data retrieved"); + mapValues = outputData.getMapValues(); + mapKeys = outputData.getmapKeys(); - // data output values - LinkedHashMap mapValues = new LinkedHashMap(); - // data output keys - LinkedHashMap mapKeys = new LinkedHashMap(); + for (int i = 0; i < mapValues.size(); i++) { + Result row = new Result(mapKeys.get(String.valueOf(i)), + mapValues.get(String.valueOf(i))); + output.add(row); + } + +// } + - mapValues = outputData.getMapValues(); - mapKeys = outputData.getmapKeys(); +// // print check +// logger.info("dbmanager-> ResourceName: " +// + dataInput.get("ResourceName")); +// logger.info("dbmanager-> DatabaseName: " +// + dataInput.get("DatabaseName")); +// logger.info("dbmanager-> SchemaName: " + dataInput.get("SchemaName")); +// logger.info("dbmanager-> TableName: " + dataInput.get("TableName")); +// +// inputParameters.get(0).setValue(dataInput.get("ResourceName")); +// inputParameters.get(1).setValue(dataInput.get("DatabaseName")); +// inputParameters.get(2).setValue(dataInput.get("SchemaName")); +// inputParameters.get(3).setValue(dataInput.get("TableName")); - for (int i = 0; i < mapValues.size(); i++) { - Result row = new Result(mapKeys.get(String.valueOf(i)), - mapValues.get(String.valueOf(i))); - output.add(row); - } +// // create data structure +// ComputationOutput outputData = new ComputationOutput(); +// // computation id +// String computationId = startComputation(algorithmId, inputParameters, +// outputData); +// +// // print check on retrieving data +// // logger.info("dbmanager-> output data retrieved"); +// +// // data output values +// LinkedHashMap mapValues = new LinkedHashMap(); +// // data output keys +// LinkedHashMap mapKeys = new LinkedHashMap(); +// +// mapValues = outputData.getMapValues(); +// mapKeys = outputData.getmapKeys(); +// +// for (int i = 0; i < mapValues.size(); i++) { +// Result row = new Result(mapKeys.get(String.valueOf(i)), +// mapValues.get(String.valueOf(i))); +// output.add(row); +// } return output; } @@ -1130,69 +1313,130 @@ public class GWTdbManagerServiceImpl extends RemoteServiceServlet implements // output sample result List output = new ArrayList(); - // get list of algorithms - List algorithms = new ArrayList(); - algorithms = getDatabaseManagerAlgorithms(); - - // get algorithmId - String algorithmId = null; - - for (int i = 0; i < algorithms.size(); i++) { - if (algorithms.get(i).equals("RANDOMSAMPLEONTABLE")) { - algorithmId = algorithms.get(i); - // print check - // logger.info("algorithmId: " + algorithmId); - } - } - - // get input parameters of the algorithm specified by id - inputParameters = getParameters(algorithmId); - - if (inputParameters.size() != 0) { - // print check - logger.info("dbmanager-> algorithm input parameters retrieved"); - } +// // get list of algorithms +// List algorithms = new ArrayList(); +// algorithms = getDatabaseManagerAlgorithms(); +// +// // get algorithmId +//// String algorithmId = null; +// +// for (int i = 0; i < algorithms.size(); i++) { +// if (algorithms.get(i).equals("RANDOMSAMPLEONTABLE")) { +// algorithmId = algorithms.get(i); +// // print check +// // logger.info("algorithmId: " + algorithmId); +// } +// } +// +// // get input parameters of the algorithm specified by id +// inputParameters = getParameters(algorithmId); +// +// if (inputParameters.size() != 0) { +// // print check +// logger.info("dbmanager-> algorithm input parameters retrieved"); +// } // print check algorithm input parameters // for (int i = 0; i < inputParameters.size(); i++) { // logger.info(inputParameters.get(i).getName()); // } - - // print check + + String algorithmId = "RANDOMSAMPLEONTABLE"; + + //print check + String rs= dataInput.get("ResourceName"); + String db= dataInput.get("DatabaseName"); + String scm= dataInput.get("SchemaName"); + String tab= dataInput.get("TableName"); + + //print check logger.info("dbmanager-> ResourceName: " - + dataInput.get("ResourceName")); + + rs); logger.info("dbmanager-> DatabaseName: " - + dataInput.get("DatabaseName")); - logger.info("dbmanager-> SchemaName: " + dataInput.get("SchemaName")); - logger.info("dbmanager-> TableName: " + dataInput.get("TableName")); + + db); + logger.info("dbmanager-> SchemaName: " + + scm); + logger.info("dbmanager-> TableName: " + + tab); + + +// if ((!rs.equals(""))&&(!db.equals(""))&&(!scm.equals(""))&&(!tab.equals(""))){ + + //set input parameters + Parameter resource = new Parameter("ResourceName", "", "String", ""); + Parameter database = new Parameter("DatabaseName", "", "String", ""); + Parameter schema = new Parameter("SchemaName","", "String", ""); + Parameter table = new Parameter("TableName","", "String", ""); + inputParameters.add(resource); + inputParameters.add(database); + inputParameters.add(schema); + inputParameters.add(table); + + inputParameters.get(0).setValue(rs); + inputParameters.get(1).setValue(db); + inputParameters.get(2).setValue(scm); + inputParameters.get(3).setValue(tab); + + // create data structure + ComputationOutput outputData = new ComputationOutput(); + // computation id + String computationId = startComputation(algorithmId, inputParameters, + outputData); - inputParameters.get(0).setValue(dataInput.get("ResourceName")); - inputParameters.get(1).setValue(dataInput.get("DatabaseName")); - inputParameters.get(2).setValue(dataInput.get("SchemaName")); - inputParameters.get(3).setValue(dataInput.get("TableName")); + // print check on retrieving data + // logger.info("dbmanager-> output data retrieved"); - // create data structure - ComputationOutput outputData = new ComputationOutput(); - // computation id - String computationId = startComputation(algorithmId, inputParameters, - outputData); + // data output values + LinkedHashMap mapValues = new LinkedHashMap(); + // data output keys + LinkedHashMap mapKeys = new LinkedHashMap(); - // print check on retrieving data - // logger.info("dbmanager-> output data retrieved"); + mapValues = outputData.getMapValues(); + mapKeys = outputData.getmapKeys(); - // data output values - LinkedHashMap mapValues = new LinkedHashMap(); - // data output keys - LinkedHashMap mapKeys = new LinkedHashMap(); + for (int i = 0; i < mapValues.size(); i++) { + Result row = new Result(mapKeys.get(String.valueOf(i)), + mapValues.get(String.valueOf(i))); + output.add(row); + } + +// } - mapValues = outputData.getMapValues(); - mapKeys = outputData.getmapKeys(); +// // print check +// logger.info("dbmanager-> ResourceName: " +// + dataInput.get("ResourceName")); +// logger.info("dbmanager-> DatabaseName: " +// + dataInput.get("DatabaseName")); +// logger.info("dbmanager-> SchemaName: " + dataInput.get("SchemaName")); +// logger.info("dbmanager-> TableName: " + dataInput.get("TableName")); +// +// inputParameters.get(0).setValue(dataInput.get("ResourceName")); +// inputParameters.get(1).setValue(dataInput.get("DatabaseName")); +// inputParameters.get(2).setValue(dataInput.get("SchemaName")); +// inputParameters.get(3).setValue(dataInput.get("TableName")); - for (int i = 0; i < mapValues.size(); i++) { - Result row = new Result(mapKeys.get(String.valueOf(i)), - mapValues.get(String.valueOf(i))); - output.add(row); - } +// // create data structure +// ComputationOutput outputData = new ComputationOutput(); +// // computation id +// String computationId = startComputation(algorithmId, inputParameters, +// outputData); +// +// // print check on retrieving data +// // logger.info("dbmanager-> output data retrieved"); +// +// // data output values +// LinkedHashMap mapValues = new LinkedHashMap(); +// // data output keys +// LinkedHashMap mapKeys = new LinkedHashMap(); +// +// mapValues = outputData.getMapValues(); +// mapKeys = outputData.getmapKeys(); +// +// for (int i = 0; i < mapValues.size(); i++) { +// Result row = new Result(mapKeys.get(String.valueOf(i)), +// mapValues.get(String.valueOf(i))); +// output.add(row); +// } return output; } @@ -1204,72 +1448,136 @@ public class GWTdbManagerServiceImpl extends RemoteServiceServlet implements // data ouptut LinkedHashMap outputParameters = new LinkedHashMap(); - // get list of algorithms - List algorithms = new ArrayList(); - algorithms = getDatabaseManagerAlgorithms(); - - // get algorithmId - String algorithmId = null; - - for (int i = 0; i < algorithms.size(); i++) { - if (algorithms.get(i).equals("GETTABLEDETAILS")) { - algorithmId = algorithms.get(i); - // print check - // logger.info("algorithmId: " + algorithmId); - } - } - - // get input parameters of the algorithm specified by id - inputParameters = getParameters(algorithmId); - - if (inputParameters.size() != 0) { - // print check - logger.info("dbmanager-> algorithm input parameters retrieved"); - } +// // get list of algorithms +// List algorithms = new ArrayList(); +// algorithms = getDatabaseManagerAlgorithms(); +// +// // get algorithmId +// String algorithmId = null; +// +// for (int i = 0; i < algorithms.size(); i++) { +// if (algorithms.get(i).equals("GETTABLEDETAILS")) { +// algorithmId = algorithms.get(i); +// // print check +// // logger.info("algorithmId: " + algorithmId); +// } +// } +// +// // get input parameters of the algorithm specified by id +// inputParameters = getParameters(algorithmId); +// +// if (inputParameters.size() != 0) { +// // print check +// logger.info("dbmanager-> algorithm input parameters retrieved"); +// } // print check algorithm input parameters // for (int i = 0; i < inputParameters.size(); i++) { // logger.info(inputParameters.get(i).getName()); // } - - // print check + String algorithmId = "GETTABLEDETAILS"; + + //print check + String rs= dataInput.get("ResourceName"); + String db= dataInput.get("DatabaseName"); + String scm= dataInput.get("SchemaName"); + String tab= dataInput.get("TableName"); + + //print check logger.info("dbmanager-> ResourceName: " - + dataInput.get("ResourceName")); + + rs); logger.info("dbmanager-> DatabaseName: " - + dataInput.get("DatabaseName")); - logger.info("dbmanager-> SchemaName: " + dataInput.get("SchemaName")); - logger.info("dbmanager-> TableName: " + dataInput.get("TableName")); + + db); + logger.info("dbmanager-> SchemaName: " + + scm); + logger.info("dbmanager-> TableName: " + + tab); + + +// if ((!rs.equals(""))&&(!db.equals(""))&&(!scm.equals(""))&&(!tab.equals(""))){ + + //set input parameters + Parameter resource = new Parameter("ResourceName", "", "String", ""); + Parameter database = new Parameter("DatabaseName", "", "String", ""); + Parameter schema = new Parameter("SchemaName","", "String", ""); + Parameter table = new Parameter("TableName","", "String", ""); + inputParameters.add(resource); + inputParameters.add(database); + inputParameters.add(schema); + inputParameters.add(table); + + inputParameters.get(0).setValue(rs); + inputParameters.get(1).setValue(db); + inputParameters.get(2).setValue(scm); + inputParameters.get(3).setValue(tab); + + + // create data structure + ComputationOutput outputData = new ComputationOutput(); + // computation id + String computationId = startComputation(algorithmId, inputParameters, + outputData); - inputParameters.get(0).setValue(dataInput.get("ResourceName")); - inputParameters.get(1).setValue(dataInput.get("DatabaseName")); - inputParameters.get(2).setValue(dataInput.get("SchemaName")); - inputParameters.get(3).setValue(dataInput.get("TableName")); + // print check on retrieving data + // logger.info("output data retrieved"); - // create data structure - ComputationOutput outputData = new ComputationOutput(); - // computation id - String computationId = startComputation(algorithmId, inputParameters, - outputData); + // output data values + LinkedHashMap mapValues = new LinkedHashMap(); + // output data keys + LinkedHashMap mapKeys = new LinkedHashMap(); - // print check on retrieving data - // logger.info("output data retrieved"); + mapValues = outputData.getMapValues(); + mapKeys = outputData.getmapKeys(); - // output data values - LinkedHashMap mapValues = new LinkedHashMap(); - // output data keys - LinkedHashMap mapKeys = new LinkedHashMap(); + for (int i = 0; i < mapValues.size(); i++) { + FileModel obj = new FileModel(mapValues.get(String.valueOf(i))); + // obj.setIsLoaded(true); + outputParameters.put(mapKeys.get(String.valueOf(i)), obj); + // print check + // logger.info("value: " + outputMap.get(String.valueOf(i))); + // logger.info("key: " + outputKey.get(String.valueOf(i))); + } + +// } + +// // print check +// logger.info("dbmanager-> ResourceName: " +// + dataInput.get("ResourceName")); +// logger.info("dbmanager-> DatabaseName: " +// + dataInput.get("DatabaseName")); +// logger.info("dbmanager-> SchemaName: " + dataInput.get("SchemaName")); +// logger.info("dbmanager-> TableName: " + dataInput.get("TableName")); +// +// inputParameters.get(0).setValue(dataInput.get("ResourceName")); +// inputParameters.get(1).setValue(dataInput.get("DatabaseName")); +// inputParameters.get(2).setValue(dataInput.get("SchemaName")); +// inputParameters.get(3).setValue(dataInput.get("TableName")); - mapValues = outputData.getMapValues(); - mapKeys = outputData.getmapKeys(); - - for (int i = 0; i < mapValues.size(); i++) { - FileModel obj = new FileModel(mapValues.get(String.valueOf(i))); - // obj.setIsLoaded(true); - outputParameters.put(mapKeys.get(String.valueOf(i)), obj); - // print check - // logger.info("value: " + outputMap.get(String.valueOf(i))); - // logger.info("key: " + outputKey.get(String.valueOf(i))); - } +// // create data structure +// ComputationOutput outputData = new ComputationOutput(); +// // computation id +// String computationId = startComputation(algorithmId, inputParameters, +// outputData); +// +// // print check on retrieving data +// // logger.info("output data retrieved"); +// +// // output data values +// LinkedHashMap mapValues = new LinkedHashMap(); +// // output data keys +// LinkedHashMap mapKeys = new LinkedHashMap(); +// +// mapValues = outputData.getMapValues(); +// mapKeys = outputData.getmapKeys(); +// +// for (int i = 0; i < mapValues.size(); i++) { +// FileModel obj = new FileModel(mapValues.get(String.valueOf(i))); +// // obj.setIsLoaded(true); +// outputParameters.put(mapKeys.get(String.valueOf(i)), obj); +// // print check +// // logger.info("value: " + outputMap.get(String.valueOf(i))); +// // logger.info("key: " + outputKey.get(String.valueOf(i))); +// } return outputParameters; } @@ -1451,11 +1759,11 @@ public class GWTdbManagerServiceImpl extends RemoteServiceServlet implements String objectType = smType.values().get(0); // print CHECK - // logger.info("parameters: "); - // logger.info(paramName); - // logger.info(paramDescription); - // logger.info(objectType); - // logger.info(defaultValue); + logger.info("parameters: "); + logger.info(paramName); + logger.info(paramDescription); + logger.info(objectType); + logger.info(defaultValue); Parameter objectParam = new Parameter(paramName, paramDescription, objectType, defaultValue); @@ -1898,6 +2206,7 @@ public class GWTdbManagerServiceImpl extends RemoteServiceServlet implements private synchronized void insertDataIntoCache(net.sf.ehcache.Element data) { employeeCache.put(data); +// System.out.println("data inserted"); } private synchronized net.sf.ehcache.Element getDataFromCache(String key) { @@ -1906,13 +2215,76 @@ public class GWTdbManagerServiceImpl extends RemoteServiceServlet implements } // clear the cache on the user request - public void refreshDataTree() throws Exception { - refreshCache(); + public GeneralOutputFromServlet refreshDataTree(String ElementType, + LinkedHashMap inputData, FileModel element) throws Exception { + + // // call the method related to the element selected + // + String resourceName = ""; + String databaseName = ""; + + GeneralOutputFromServlet result = null; + + if (inputData != null && inputData.size() != 0) { +// System.out.println("server-> input size not null"); + String key = ""; + + if (!ElementType.equals("")) { +// System.out.println("server-> element type not null"); + switch (ElementType) { + case "listResources": + key = inputData.get("listResources"); + break; + case "resource": + key = inputData.get("ResourceName"); + break; + case "database": + key = inputData.get("ResourceName") + + inputData.get("DatabaseName"); + break; + } + } + + +// System.out.println("server->KEY: " + key); + net.sf.ehcache.Element dataFromCache = getDataFromCache(key); + + if (dataFromCache != null) { +// System.out.println("server-> data in cache with key: " + key); + refreshCache(key); +// System.out.println("server-> data removed from cache with key: " + key); + + if (!ElementType.equals("")) { +// System.out.println("server-> element type not null"); + switch (ElementType) { + case "listResources": + List output1 = getResource(); + result = new GeneralOutputFromServlet(output1); +// System.out.println("server-> output generated"); + break; + case "resource": + resourceName = inputData.get("ResourceName"); + LinkedHashMap output2 = getDBInfo(resourceName); + result = new GeneralOutputFromServlet(output2); + break; + case "database": + resourceName = inputData.get("ResourceName"); + databaseName = inputData.get("DatabaseName"); + List output3 = getDBSchema(inputData); + result = new GeneralOutputFromServlet(output3); + break; + } + } + + } + + } + return result; } - private synchronized void refreshCache() throws Exception { + private synchronized void refreshCache(String key) throws Exception { try { - employeeCache.removeAll(); + employeeCache.remove(key); } catch (Exception e) { // TODO: handle exception throw new Exception("Failure to clear the cache. " + e); diff --git a/src/main/resources/encache.xml b/src/main/resources/encache.xml index a7e3e49..b931a1d 100644 --- a/src/main/resources/encache.xml +++ b/src/main/resources/encache.xml @@ -3,7 +3,7 @@ xsi:noNamespaceSchemaLocation="ehcache.xsd" updateCheck="true" monitoring="autodetect" dynamicConfig="true"> - +