diff --git a/.classpath b/.classpath
index e7f33f3..404bed0 100644
--- a/.classpath
+++ b/.classpath
@@ -1,13 +1,13 @@
-
+
-
+
@@ -41,5 +41,5 @@
-
+
diff --git a/.settings/com.gwtplugins.gdt.eclipse.core.prefs b/.settings/com.gwtplugins.gdt.eclipse.core.prefs
index 680c947..f0559bd 100644
--- a/.settings/com.gwtplugins.gdt.eclipse.core.prefs
+++ b/.settings/com.gwtplugins.gdt.eclipse.core.prefs
@@ -1,4 +1,4 @@
eclipse.preferences.version=1
-lastWarOutDir=/home/francesco/git/species-discovery/target/species-discovery-3.10.0
+lastWarOutDir=/home/francescomangiacrapa/git/species-discovery/target/species-discovery-3.11.0-SNAPSHOT
warSrcDir=src/main/webapp
warSrcDirIsOutput=false
diff --git a/.settings/org.eclipse.wst.common.component b/.settings/org.eclipse.wst.common.component
index e80dfae..4fa362a 100644
--- a/.settings/org.eclipse.wst.common.component
+++ b/.settings/org.eclipse.wst.common.component
@@ -1,10 +1,19 @@
-
+
+
+
-
-
+
+
+
+
+
-
+
+
+
-
+
+
+
diff --git a/changelog.md b/CHANGELOG.md
similarity index 63%
rename from changelog.md
rename to CHANGELOG.md
index e9ec1d7..d5cbe14 100644
--- a/changelog.md
+++ b/CHANGELOG.md
@@ -3,37 +3,40 @@
All notable changes to this project will be documented in this file.
This project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
-## [v3-10-0] [r4-23-0] - 2020-05-22
+## [v3.11.0-SNAPSHOT] - 27-10-2021
-[#19221] Ported to git/jenkins
+- [#21969] Removed HL dependency
+- Ported to maven-portal-bom v3.6.3
+- Ported to workspace-explorer v2.X.Y
+- Ported to geonetwork [3.4.5,4.0.0-SNAPSHOT)
+- Ported to storagehub-client-wrapper [1.0.0, 2.0.0-SNAPSHOT)
+- Ported to spd-client-library [4.1.0-SNAPSHOT, 5.0.0-SNAPSHOT)
-**Fixes**
+## [v3.10.0] - 22-05-2020
-[#19312] the WorkspaceExplorer window appears behind the Job Window (z-index issue)
+- [#19221] Migrateto git/jenkins
+#### Fixes
+- [#19312] the WorkspaceExplorer window appears behind the Job Window (z-index issue)
-
-## [3-9-1] - [20-03-2017]
+## [v3.9.1] - 20-03-2017
[Bug #7568] Managed ASL session expiration during polling on SPD jobs
-
-## [3-9-0] - [11-01-2016]
+## [v3.9.0] - 11-01-2016
[Feature #6313] SPD portlet upgrade: porting to spd-client-library 4.0.0
[Task #7001] Create Gis Layer via job
-
-## [3-8-1] - [15-09-2016]
+## [v3.8.1] - 15-09-2016
Removed Gis -viewer dependency
-
-## [3-8-0] - [15-05-2015]
+## [v3.8.0] - 15-05-2015
Upgraded to gwt 2.6.1
@@ -42,86 +45,73 @@ Integrated with Workspace Explorer
Updated to Liferay 6.2
-
-## [3-7-2] - [15-05-2015]
+## [v3.7.2] - 15-05-2015
Changed dateformat at dd-MM-yyyy
-
-## [3-7-1] - [09-12-2014]
+## [v3.7.1] - 09-12-2014
Removed openlayers dependency from pom
-
-## [3-7-0] - [04-06-2014]
+## [v3.7.0] - 04-06-2014
Updated pom to support new portal configuration (gcube release 3.2)
-
-## [3-6-0] - [23-04-2014]
+## [v3.6.0] - 23-04-2014
Portlet updated to support service updating
-
-## [3-5-0] - [24-10-2013]
+## [v3.5.0] - 24-10-2013
Portlet updated to support GWT 2.5.1
Ticket 2224: was implemented
-
-## [3-4-0] - [06-09-2013]
+## [v3.4.0] - 06-09-2013
Enhancements on GUI of SPD portlet was realized (view last query, new expand button are available)
The functionalities was updated in order to fit the changes in the service client. New advanced options are now available: "Expand with synonyms", "Unfold the taxa group by".
-
-## [3-3-0] - [09-07-2013]
+## [v3.3.0] - 09-07-2013
Bug Fixed: on species classification
-
-## [3-2-0] - [29-05-2013]
+## [v3.2.0] - 29-05-2013
Bug Fixed: #612 (SPD: Error when saving csv file in workspace)
-
-## [3-1-0] - [14-04-2013]
+## [v3.1.0] - 14-04-2013
Synch with SPD service changes
-
-## [3-0-0] - [05-03-2013]
+## [v3.0.0] - 05-03-2013
Ticket #1260: This component has been mavenized
-
-## [2-3-0] - [17-01-2013]
+## [v2.3.0] - 17-01-2013
Ticket #986: Species Occurrence jobs / Re-submit job and "info"
Ticket #1002: SPD portlet persistence refactoring
-
-## [2-2-0] - [30-11-2012]
+## [v2.2.0] - 30-11-2012
Ticket #508: Implementation of requirements resulting from ticket #508
-
-## [2-0-0] - [13-09-2012]
+## [v2.0.0] - 13-09-2012
Ticket #81: Implementation of requirements resulting from user feedback
@@ -130,15 +120,11 @@ Dynamic clustering of result set items
Revised the occurrence points visualization strategy through the GIS Viewer
-
-## [1-0-1] - [23-07-2012]
+## [v1.0.1] - 23-07-2012
Fixed 447: Map generation fails in Species Discovery Portlet
-
-## [1-0-0] - [04-05-2012]
+## [v1.0.0] - 04-05-2012
First release
-
-
diff --git a/SpeciesDiscoveryTest-dev.launch b/SpeciesDiscoveryTest-dev.launch
deleted file mode 100644
index 546b1d2..0000000
--- a/SpeciesDiscoveryTest-dev.launch
+++ /dev/null
@@ -1,22 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/SpeciesDiscoveryTest-prod.launch b/SpeciesDiscoveryTest-prod.launch
deleted file mode 100644
index f4cb15a..0000000
--- a/SpeciesDiscoveryTest-prod.launch
+++ /dev/null
@@ -1,22 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/pom.xml b/pom.xml
index 1996104..9e0ec32 100644
--- a/pom.xml
+++ b/pom.xml
@@ -14,14 +14,14 @@
eclipselink
- http://maven.research-infrastructures.eu:8081/nexus/content/repositories/eclipselink/
+ https://maven.research-infrastructures.eu/nexus/content/repositories/eclipselink/
org.gcube.portlets.user
species-discovery
war
- 3.10.0
+ 3.11.0-SNAPSHOT
gCube Species Discovery
gCube Species Discovery Portlet lets the users discovery species information from the Species Service
@@ -51,7 +51,7 @@
org.gcube.distribution
maven-portal-bom
- 3.6.0
+ 3.6.3
pom
import
@@ -94,7 +94,7 @@
org.gcube.data.spd
spd-client-library
- [4.0.0, 5.0.0-SNAPSHOT)
+ [4.1.0-SNAPSHOT, 5.0.0-SNAPSHOT)
compile
@@ -170,7 +170,7 @@
org.gcube.portlets.widgets
workspace-explorer
- [1.0.0, 2.0.0-SNAPSHOT)
+ [2.0.0, 3.0.0-SNAPSHOT)
compile
@@ -190,7 +190,7 @@
org.gcube.spatial.data
geonetwork
- [3.0.0,4.0.0-SNAPSHOT)
+ [3.4.5,4.0.0-SNAPSHOT)
@@ -245,6 +245,7 @@
aslcore
provided
+
org.gcube.portal
social-networking-library
@@ -252,15 +253,29 @@
+
+
+
+
+
+
+
+
+
+
+
+
- org.gcube.common
- home-library-jcr
+ org.gcube.common.portal
+ portal-manager
provided
+
org.gcube.common
- home-library
- provided
+ storagehub-client-wrapper
+ [1.0.0, 2.0.0-SNAPSHOT)
+ compile
@@ -280,22 +295,18 @@
provided
-
-
-
- log4j
- log4j
-
org.slf4j
slf4j-log4j12
- runtime
+
+ provided
org.slf4j
slf4j-api
- runtime
+
+ provided
diff --git a/src/main/java/org/gcube/portlets/user/speciesdiscovery/server/TaxonomySearchServiceImpl.java b/src/main/java/org/gcube/portlets/user/speciesdiscovery/server/TaxonomySearchServiceImpl.java
index d6a0028..5acb897 100644
--- a/src/main/java/org/gcube/portlets/user/speciesdiscovery/server/TaxonomySearchServiceImpl.java
+++ b/src/main/java/org/gcube/portlets/user/speciesdiscovery/server/TaxonomySearchServiceImpl.java
@@ -23,9 +23,7 @@ import javax.persistence.criteria.Predicate;
import org.apache.log4j.Logger;
import org.gcube.application.framework.core.session.ASLSession;
-import org.gcube.common.homelibrary.home.workspace.Workspace;
-import org.gcube.common.homelibrary.home.workspace.WorkspaceFolder;
-import org.gcube.common.homelibrary.util.WorkspaceUtil;
+import org.gcube.common.storagehubwrapper.server.tohl.Workspace;
import org.gcube.data.spd.model.products.OccurrencePoint;
import org.gcube.data.spd.model.products.ResultElement;
import org.gcube.data.spd.model.products.TaxonomyItem;
@@ -110,6 +108,7 @@ import net.sf.csv4j.CSVWriter;
/**
* The server side implementation of the RPC service.
+ *
* @author "Francesco Mangiacrapa francesco.mangiacrapa@isti.cnr.it"
* @author "Federico De Faveri defaveri@isti.cnr.it" -
*/
@@ -134,8 +133,7 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T
*
* @return the ASL session
*/
- protected ASLSession getASLSession()
- {
+ protected ASLSession getASLSession() {
return SessionUtil.getAslSession(this.getThreadLocalRequest().getSession());
}
@@ -145,15 +143,14 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T
* @return the species service
* @throws SearchServiceException the search service exception
*/
- protected SpeciesService getSpeciesService() throws SearchServiceException
- {
+ protected SpeciesService getSpeciesService() throws SearchServiceException {
try {
ASLSession session = getASLSession();
return SessionUtil.getService(session);
} catch (Exception e) {
e.printStackTrace();
logger.error("An error occurred when contacting the species service", e);
- // System.out.println("An error occurred retrieving the service" +e);
+ // System.out.println("An error occurred retrieving the service" +e);
throw new SearchServiceException("contacting the species service.");
}
}
@@ -164,35 +161,31 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T
* @return the search session
* @throws SearchServiceException the search service exception
*/
- protected FetchingSession extends FetchingElement> getSearchSession() throws SearchServiceException
- {
+ protected FetchingSession extends FetchingElement> getSearchSession() throws SearchServiceException {
ASLSession session = getASLSession();
FetchingSession extends FetchingElement> searchSession = SessionUtil.getCurrentSearchSession(session);
if (searchSession == null) {
- logger.error("No search session found for user "+session.getUsername());
- throw new SearchServiceException("No search session found for user "+session.getUsername());
+ logger.error("No search session found for user " + session.getUsername());
+ throw new SearchServiceException("No search session found for user " + session.getUsername());
}
return searchSession;
}
-
-
/**
* Gets the occurrence session.
*
* @return the occurrence session
* @throws SearchServiceException the search service exception
*/
- protected FetchingSession getOccurrenceSession() throws SearchServiceException
- {
+ protected FetchingSession getOccurrenceSession() throws SearchServiceException {
ASLSession session = getASLSession();
FetchingSession occurrenceSession = SessionUtil.getCurrentOccurrenceSession(session);
if (occurrenceSession == null) {
- logger.error("No occurrence session found for user "+session.getUsername());
- throw new SearchServiceException("No occurrence session found for user "+session.getUsername());
+ logger.error("No occurrence session found for user " + session.getUsername());
+ throw new SearchServiceException("No occurrence session found for user " + session.getUsername());
}
return occurrenceSession;
@@ -203,7 +196,7 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T
*/
@Override
public void searchByScientificName(String searchTerm, SearchFilters searchFilters) throws SearchServiceException {
- logger.info("searchByScientificName searchTerm: "+searchTerm+" searchFilters: "+searchFilters);
+ logger.info("searchByScientificName searchTerm: " + searchTerm + " searchFilters: " + searchFilters);
stopSearch();
search(searchTerm, SearchType.BY_SCIENTIFIC_NAME, searchFilters);
@@ -214,7 +207,7 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T
*/
@Override
public void searchByCommonName(String searchTerm, SearchFilters searchFilters) throws SearchServiceException {
- logger.info("searchByCommonName searchTerm: "+searchTerm+" searchFilters: "+searchFilters);
+ logger.info("searchByCommonName searchTerm: " + searchTerm + " searchFilters: " + searchFilters);
stopSearch();
search(searchTerm, SearchType.BY_COMMON_NAME, searchFilters);
@@ -223,20 +216,21 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T
/**
* Search.
*
- * @param searchTerm the search term
- * @param searchType the search type
+ * @param searchTerm the search term
+ * @param searchType the search type
* @param searchFilters the search filters
* @return the search result type
* @throws SearchServiceException the search service exception
*/
- protected SearchResultType search(String searchTerm, SearchType searchType, SearchFilters searchFilters) throws SearchServiceException
- {
+ protected SearchResultType search(String searchTerm, SearchType searchType, SearchFilters searchFilters)
+ throws SearchServiceException {
ASLSession aslSession = getASLSession();
try {
- deleteAllRowIntoDaoTable(); //RESET TABLE
+ deleteAllRowIntoDaoTable(); // RESET TABLE
SpeciesService taxonomyService = getSpeciesService();
- CloseableIterator input = taxonomyService.searchByFilters(searchTerm, searchType, searchFilters);
+ CloseableIterator input = taxonomyService.searchByFilters(searchTerm, searchType,
+ searchFilters);
SessionUtil.setCurrentQuery(aslSession, taxonomyService.getLastQuery());
// System.out.println("returned input stream by service...");
@@ -247,25 +241,36 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T
FetchingSessionUtil.createFetchingSession(output, resultType, aslSession);
return resultType;
} catch (Exception e) {
- logger.error("Error starting search "+searchType+" for term \""+searchTerm+"\" with filters "+searchFilters, e);
+ logger.error("Error starting search " + searchType + " for term \"" + searchTerm + "\" with filters "
+ + searchFilters, e);
SessionUtil.setCurrentQuery(aslSession, "invalid query");
throw new SearchServiceException(e.getMessage());
}
}
-
- /* (non-Javadoc)
- * @see org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService#searchByQuery(java.lang.String)
+ /**
+ * Search by query.
+ *
+ * @param query the query
+ * @return the search by query parameter
+ * @throws SearchServiceException the search service exception
+ */
+ /*
+ * (non-Javadoc)
+ *
+ * @see
+ * org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService#
+ * searchByQuery(java.lang.String)
*/
@Override
public SearchByQueryParameter searchByQuery(String query) throws SearchServiceException {
- logger.info("searchByQuery - query: "+query);
+ logger.info("searchByQuery - query: " + query);
stopSearch();
ASLSession aslSession = getASLSession();
try {
- deleteAllRowIntoDaoTable(); //RESET TABLE
+ deleteAllRowIntoDaoTable(); // RESET TABLE
SpeciesService taxonomyService = getSpeciesService();
CloseableIterator input = taxonomyService.searchByQuery(query);
@@ -273,7 +278,8 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T
logger.info("service return iterator searched...");
SearchByQueryParameter queryParameters = QueryUtil.getQueryResultType(query);
- CloseableIterator output = IteratorChainBuilder.buildChain(input, queryParameters.getSearchResultType(), aslSession);
+ CloseableIterator output = IteratorChainBuilder.buildChain(input,
+ queryParameters.getSearchResultType(), aslSession);
FetchingSessionUtil.createFetchingSession(output, queryParameters.getSearchResultType(), aslSession);
logger.info("creating fetching session completed!");
return queryParameters;
@@ -285,13 +291,12 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T
}
}
-
/**
* Delete all row into dao table.
*
* @throws Exception the exception
*/
- private void deleteAllRowIntoDaoTable() throws Exception{
+ private void deleteAllRowIntoDaoTable() throws Exception {
logger.info("deleting all row into dao's");
ResultRowPersistence daoResultRow = null;
TaxonRowPersistence daoTaxon = null;
@@ -304,18 +309,18 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T
daoTaxonomyRow = DaoSession.getTaxonomyDAO(session);
} catch (Exception e) {
- logger.error("Error in delete all row - getDao's " +e.getMessage(), e);
+ logger.error("Error in delete all row - getDao's " + e.getMessage(), e);
throw new Exception("Error in delete all row- getDao's " + e.getMessage(), e);
}
try {
- if(daoResultRow!=null)
+ if (daoResultRow != null)
daoResultRow.removeAll();
- if(daoTaxon!=null)
+ if (daoTaxon != null)
daoTaxon.removeAll();
- if(daoTaxonomyRow!=null)
+ if (daoTaxonomyRow != null)
daoTaxonomyRow.removeAll();
logger.info("delete all row into Dao's - completed");
@@ -330,9 +335,10 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T
*/
@SuppressWarnings("unchecked")
@Override
- public SearchResult getSearchResultRows(int start, int limit, ResultFilter activeFiltersObject, boolean onlySelected) throws SessionExpired, SearchServiceException {
- logger.info("getSearchResultRows start: "+start+" limit: "+limit+" onlySelected: "+onlySelected);
- Long startTime = System.currentTimeMillis();
+ public SearchResult getSearchResultRows(int start, int limit, ResultFilter activeFiltersObject,
+ boolean onlySelected) throws SessionExpired, SearchServiceException {
+ logger.info("getSearchResultRows start: " + start + " limit: " + limit + " onlySelected: " + onlySelected);
+ Long startTime = System.currentTimeMillis();
FetchingSession searchSession = (FetchingSession) getSearchSession();
ArrayList chunk = new ArrayList();
@@ -340,53 +346,58 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T
try {
List data = new ArrayList();
if (onlySelected) {
- SelectableFetchingBuffer buffer = (SelectableFetchingBuffer) searchSession.getBuffer();
+ SelectableFetchingBuffer buffer = (SelectableFetchingBuffer) searchSession
+ .getBuffer();
data = buffer.getSelected();
- int end = Math.min(start+limit, data.size());
+ int end = Math.min(start + limit, data.size());
start = Math.min(start, end);
- logger.info("chunk selected data bounds [start: "+start+" end: " + end+"]");
+ logger.info("chunk selected data bounds [start: " + start + " end: " + end + "]");
data = data.subList(start, end);
} else if (activeFiltersObject == null || !activeFiltersObject.isActiveFilters()) {
- if(limit>0){
- data = searchSession.getBuffer().getList(start,limit);
+ if (limit > 0) {
+ data = searchSession.getBuffer().getList(start, limit);
}
} else {
- FilterableFetchingBuffer buffer = (FilterableFetchingBuffer) searchSession.getBuffer();
+ FilterableFetchingBuffer buffer = (FilterableFetchingBuffer) searchSession
+ .getBuffer();
data = buffer.getFilteredList(activeFiltersObject);
- int end = Math.min(start+limit, data.size());
+ int end = Math.min(start + limit, data.size());
start = Math.min(start, end);
- logger.info("chunk filtered data bounds [start: "+start+" end: " + end+"]");
+ logger.info("chunk filtered data bounds [start: " + start + " end: " + end + "]");
data = data.subList(start, end);
}
- logger.info("Fetching data from search session buffer, size: "+data.size());
+ logger.info("Fetching data from search session buffer, size: " + data.size());
for (ResultRow resultRow : data) {
- //return common names?
- if(activeFiltersObject == null || !activeFiltersObject.isLoadCommonName() || !resultRow.existsCommonName()){
+ // return common names?
+ if (activeFiltersObject == null || !activeFiltersObject.isLoadCommonName()
+ || !resultRow.existsCommonName()) {
resultRow.setCommonNames(null);
}
- //return properties?
- if(activeFiltersObject == null || !resultRow.existsProperties() || !activeFiltersObject.isLoadAllProperties()){
+ // return properties?
+ if (activeFiltersObject == null || !resultRow.existsProperties()
+ || !activeFiltersObject.isLoadAllProperties()) {
resultRow.setProperties(null);
}
chunk.add(resultRow);
- logger.debug("getSearchResultRows returning on client result item with id: " +resultRow.getId() + " service id: "+resultRow.getServiceId());
+ logger.debug("getSearchResultRows returning on client result item with id: " + resultRow.getId()
+ + " service id: " + resultRow.getServiceId());
}
Long endTime = System.currentTimeMillis() - startTime;
String time = String.format("%d msc %d sec", endTime, TimeUnit.MILLISECONDS.toSeconds(endTime));
- logger.info("returning "+chunk.size()+" elements in " + time);
+ logger.info("returning " + chunk.size() + " elements in " + time);
} catch (Exception e) {
- if(e instanceof SessionExpired)
+ if (e instanceof SessionExpired)
throw new SessionExpired("The session is expired");
logger.error("Error in getSearchResultRows ", e);
@@ -401,86 +412,107 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T
*
* @param properties the properties
*/
- private void printProperties(List properties){
+ private void printProperties(List properties) {
for (ItemParameter itemParameter : properties) {
- System.out.println("Property "+itemParameter);
+ System.out.println("Property " + itemParameter);
}
}
- /* (non-Javadoc)
- * @see org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService#getSearchTaxonomyRow(int, int, org.gcube.portlets.user.speciesdiscovery.shared.filter.ResultFilter, boolean)
+ /**
+ * Gets the search taxonomy row.
+ *
+ * @param start the start
+ * @param limit the limit
+ * @param activeFiltersObject the active filters object
+ * @param onlySelected the only selected
+ * @return the search taxonomy row
+ * @throws SearchServiceException the search service exception
+ */
+ /*
+ * (non-Javadoc)
+ *
+ * @see
+ * org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService#
+ * getSearchTaxonomyRow(int, int,
+ * org.gcube.portlets.user.speciesdiscovery.shared.filter.ResultFilter, boolean)
*/
@SuppressWarnings("unchecked")
@Override
- public SearchResult getSearchTaxonomyRow(int start, int limit, ResultFilter activeFiltersObject, boolean onlySelected) throws SearchServiceException {
+ public SearchResult getSearchTaxonomyRow(int start, int limit, ResultFilter activeFiltersObject,
+ boolean onlySelected) throws SearchServiceException {
- logger.info("getSearchResultRows start: "+start+" limit: "+limit+" onlySelected: "+onlySelected);
- Long startTime = System.currentTimeMillis();
+ logger.info("getSearchResultRows start: " + start + " limit: " + limit + " onlySelected: " + onlySelected);
+ Long startTime = System.currentTimeMillis();
FetchingSession searchSession = (FetchingSession) getSearchSession();
ArrayList chunk = new ArrayList();
try {
- logger.info("current buffer size "+searchSession.getBuffer().size());
+ logger.info("current buffer size " + searchSession.getBuffer().size());
List data = new ArrayList();
if (onlySelected) {
logger.info("getting only selected data");
- SelectableFetchingBuffer buffer = (SelectableFetchingBuffer) searchSession.getBuffer();
+ SelectableFetchingBuffer buffer = (SelectableFetchingBuffer) searchSession
+ .getBuffer();
data = buffer.getSelected();
- int end = Math.min(start+limit, data.size());
+ int end = Math.min(start + limit, data.size());
start = Math.min(start, end);
- logger.info("chunk selected data bounds [start: "+start+" end: " + end+"]");
+ logger.info("chunk selected data bounds [start: " + start + " end: " + end + "]");
data = data.subList(start, end);
} else if (activeFiltersObject == null || !activeFiltersObject.isActiveFilters()) {
logger.info("getting all available data");
- if(limit>0){
+ if (limit > 0) {
Map filterAndMap = new HashMap();
filterAndMap.put(TaxonomyRow.IS_PARENT, "false");
- data = searchSession.getBuffer().getList(filterAndMap, start,limit);
+ data = searchSession.getBuffer().getList(filterAndMap, start, limit);
}
} else {
logger.info("getting filtered data");
- FilterableFetchingBuffer buffer = (FilterableFetchingBuffer) searchSession.getBuffer();
+ FilterableFetchingBuffer buffer = (FilterableFetchingBuffer) searchSession
+ .getBuffer();
data = buffer.getFilteredList(activeFiltersObject);
- int end = Math.min(start+limit, data.size());
+ int end = Math.min(start + limit, data.size());
start = Math.min(start, end);
- logger.info("chunk filtered data bounds [start: "+start+" end: " + end+"]");
+ logger.info("chunk filtered data bounds [start: " + start + " end: " + end + "]");
data = data.subList(start, end);
}
- logger.info("Fetching data from search session buffer, size: "+data.size());
+ logger.info("Fetching data from search session buffer, size: " + data.size());
for (TaxonomyRow taxonomyRow : data) {
- //ADD common names
+ // ADD common names
- //return common names?
- if(activeFiltersObject == null || !activeFiltersObject.isLoadCommonName() || !taxonomyRow.existsCommonName()){
+ // return common names?
+ if (activeFiltersObject == null || !activeFiltersObject.isLoadCommonName()
+ || !taxonomyRow.existsCommonName()) {
taxonomyRow.setCommonNames(null);
}
- //return properties?
- if(activeFiltersObject == null || !taxonomyRow.existsProperties() || !activeFiltersObject.isLoadAllProperties()){
+ // return properties?
+ if (activeFiltersObject == null || !taxonomyRow.existsProperties()
+ || !activeFiltersObject.isLoadAllProperties()) {
taxonomyRow.setProperties(null);
}
- logger.info("getSearchTaxonomyRow return on client taxonomy item with id: " +taxonomyRow.getId() + " service id: "+taxonomyRow.getServiceId());
+ logger.info("getSearchTaxonomyRow return on client taxonomy item with id: " + taxonomyRow.getId()
+ + " service id: " + taxonomyRow.getServiceId());
chunk.add(taxonomyRow);
}
Long endTime = System.currentTimeMillis() - startTime;
String time = String.format("%d msc %d sec", endTime, TimeUnit.MILLISECONDS.toSeconds(endTime));
- logger.info("returning "+chunk.size()+" elements in " + time);
+ logger.info("returning " + chunk.size() + " elements in " + time);
} catch (Exception e) {
logger.error("Error in getSearchTaxonomyRow " + e.getMessage(), e);
@@ -500,19 +532,19 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T
public TaxonomyRow loadTaxonomyParentByParentId(String parentID) throws Exception {
TaxonomyRow taxonomyRow = null;
- logger.info("loadTaxonomyParentByParentId: "+ parentID);
+ logger.info("loadTaxonomyParentByParentId: " + parentID);
try {
TaxonomyRowPersistence dao = DaoSession.getTaxonomyDAO(getASLSession());
CriteriaBuilder queryBuilder = dao.getCriteriaBuilder();
CriteriaQuery