diff --git a/.classpath b/.classpath index e7f33f3..404bed0 100644 --- a/.classpath +++ b/.classpath @@ -1,13 +1,13 @@ - + - + @@ -41,5 +41,5 @@ - + diff --git a/.settings/com.gwtplugins.gdt.eclipse.core.prefs b/.settings/com.gwtplugins.gdt.eclipse.core.prefs index 680c947..f0559bd 100644 --- a/.settings/com.gwtplugins.gdt.eclipse.core.prefs +++ b/.settings/com.gwtplugins.gdt.eclipse.core.prefs @@ -1,4 +1,4 @@ eclipse.preferences.version=1 -lastWarOutDir=/home/francesco/git/species-discovery/target/species-discovery-3.10.0 +lastWarOutDir=/home/francescomangiacrapa/git/species-discovery/target/species-discovery-3.11.0-SNAPSHOT warSrcDir=src/main/webapp warSrcDirIsOutput=false diff --git a/.settings/org.eclipse.wst.common.component b/.settings/org.eclipse.wst.common.component index e80dfae..4fa362a 100644 --- a/.settings/org.eclipse.wst.common.component +++ b/.settings/org.eclipse.wst.common.component @@ -1,10 +1,19 @@ - + + + - - + + + + + - + + + - + + + diff --git a/changelog.md b/CHANGELOG.md similarity index 63% rename from changelog.md rename to CHANGELOG.md index e9ec1d7..d5cbe14 100644 --- a/changelog.md +++ b/CHANGELOG.md @@ -3,37 +3,40 @@ All notable changes to this project will be documented in this file. This project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). -## [v3-10-0] [r4-23-0] - 2020-05-22 +## [v3.11.0-SNAPSHOT] - 27-10-2021 -[#19221] Ported to git/jenkins +- [#21969] Removed HL dependency +- Ported to maven-portal-bom v3.6.3 +- Ported to workspace-explorer v2.X.Y +- Ported to geonetwork [3.4.5,4.0.0-SNAPSHOT) +- Ported to storagehub-client-wrapper [1.0.0, 2.0.0-SNAPSHOT) +- Ported to spd-client-library [4.1.0-SNAPSHOT, 5.0.0-SNAPSHOT) -**Fixes** +## [v3.10.0] - 22-05-2020 -[#19312] the WorkspaceExplorer window appears behind the Job Window (z-index issue) +- [#19221] Migrateto git/jenkins +#### Fixes +- [#19312] the WorkspaceExplorer window appears behind the Job Window (z-index issue) - -## [3-9-1] - [20-03-2017] +## [v3.9.1] - 20-03-2017 [Bug #7568] Managed ASL session expiration during polling on SPD jobs - -## [3-9-0] - [11-01-2016] +## [v3.9.0] - 11-01-2016 [Feature #6313] SPD portlet upgrade: porting to spd-client-library 4.0.0 [Task #7001] Create Gis Layer via job - -## [3-8-1] - [15-09-2016] +## [v3.8.1] - 15-09-2016 Removed Gis -viewer dependency - -## [3-8-0] - [15-05-2015] +## [v3.8.0] - 15-05-2015 Upgraded to gwt 2.6.1 @@ -42,86 +45,73 @@ Integrated with Workspace Explorer Updated to Liferay 6.2 - -## [3-7-2] - [15-05-2015] +## [v3.7.2] - 15-05-2015 Changed dateformat at dd-MM-yyyy - -## [3-7-1] - [09-12-2014] +## [v3.7.1] - 09-12-2014 Removed openlayers dependency from pom - -## [3-7-0] - [04-06-2014] +## [v3.7.0] - 04-06-2014 Updated pom to support new portal configuration (gcube release 3.2) - -## [3-6-0] - [23-04-2014] +## [v3.6.0] - 23-04-2014 Portlet updated to support service updating - -## [3-5-0] - [24-10-2013] +## [v3.5.0] - 24-10-2013 Portlet updated to support GWT 2.5.1 Ticket 2224: was implemented - -## [3-4-0] - [06-09-2013] +## [v3.4.0] - 06-09-2013 Enhancements on GUI of SPD portlet was realized (view last query, new expand button are available) The functionalities was updated in order to fit the changes in the service client. New advanced options are now available: "Expand with synonyms", "Unfold the taxa group by". - -## [3-3-0] - [09-07-2013] +## [v3.3.0] - 09-07-2013 Bug Fixed: on species classification - -## [3-2-0] - [29-05-2013] +## [v3.2.0] - 29-05-2013 Bug Fixed: #612 (SPD: Error when saving csv file in workspace) - -## [3-1-0] - [14-04-2013] +## [v3.1.0] - 14-04-2013 Synch with SPD service changes - -## [3-0-0] - [05-03-2013] +## [v3.0.0] - 05-03-2013 Ticket #1260: This component has been mavenized - -## [2-3-0] - [17-01-2013] +## [v2.3.0] - 17-01-2013 Ticket #986: Species Occurrence jobs / Re-submit job and "info" Ticket #1002: SPD portlet persistence refactoring - -## [2-2-0] - [30-11-2012] +## [v2.2.0] - 30-11-2012 Ticket #508: Implementation of requirements resulting from ticket #508 - -## [2-0-0] - [13-09-2012] +## [v2.0.0] - 13-09-2012 Ticket #81: Implementation of requirements resulting from user feedback @@ -130,15 +120,11 @@ Dynamic clustering of result set items Revised the occurrence points visualization strategy through the GIS Viewer - -## [1-0-1] - [23-07-2012] +## [v1.0.1] - 23-07-2012 Fixed 447: Map generation fails in Species Discovery Portlet - -## [1-0-0] - [04-05-2012] +## [v1.0.0] - 04-05-2012 First release - - diff --git a/SpeciesDiscoveryTest-dev.launch b/SpeciesDiscoveryTest-dev.launch deleted file mode 100644 index 546b1d2..0000000 --- a/SpeciesDiscoveryTest-dev.launch +++ /dev/null @@ -1,22 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - diff --git a/SpeciesDiscoveryTest-prod.launch b/SpeciesDiscoveryTest-prod.launch deleted file mode 100644 index f4cb15a..0000000 --- a/SpeciesDiscoveryTest-prod.launch +++ /dev/null @@ -1,22 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - diff --git a/pom.xml b/pom.xml index 1996104..9e0ec32 100644 --- a/pom.xml +++ b/pom.xml @@ -14,14 +14,14 @@ eclipselink - http://maven.research-infrastructures.eu:8081/nexus/content/repositories/eclipselink/ + https://maven.research-infrastructures.eu/nexus/content/repositories/eclipselink/ org.gcube.portlets.user species-discovery war - 3.10.0 + 3.11.0-SNAPSHOT gCube Species Discovery gCube Species Discovery Portlet lets the users discovery species information from the Species Service @@ -51,7 +51,7 @@ org.gcube.distribution maven-portal-bom - 3.6.0 + 3.6.3 pom import @@ -94,7 +94,7 @@ org.gcube.data.spd spd-client-library - [4.0.0, 5.0.0-SNAPSHOT) + [4.1.0-SNAPSHOT, 5.0.0-SNAPSHOT) compile @@ -170,7 +170,7 @@ org.gcube.portlets.widgets workspace-explorer - [1.0.0, 2.0.0-SNAPSHOT) + [2.0.0, 3.0.0-SNAPSHOT) compile @@ -190,7 +190,7 @@ org.gcube.spatial.data geonetwork - [3.0.0,4.0.0-SNAPSHOT) + [3.4.5,4.0.0-SNAPSHOT) @@ -245,6 +245,7 @@ aslcore provided + org.gcube.portal social-networking-library @@ -252,15 +253,29 @@ + + + + + + + + + + + + - org.gcube.common - home-library-jcr + org.gcube.common.portal + portal-manager provided + org.gcube.common - home-library - provided + storagehub-client-wrapper + [1.0.0, 2.0.0-SNAPSHOT) + compile @@ -280,22 +295,18 @@ provided - - - - log4j - log4j - org.slf4j slf4j-log4j12 - runtime + + provided org.slf4j slf4j-api - runtime + + provided diff --git a/src/main/java/org/gcube/portlets/user/speciesdiscovery/server/TaxonomySearchServiceImpl.java b/src/main/java/org/gcube/portlets/user/speciesdiscovery/server/TaxonomySearchServiceImpl.java index d6a0028..5acb897 100644 --- a/src/main/java/org/gcube/portlets/user/speciesdiscovery/server/TaxonomySearchServiceImpl.java +++ b/src/main/java/org/gcube/portlets/user/speciesdiscovery/server/TaxonomySearchServiceImpl.java @@ -23,9 +23,7 @@ import javax.persistence.criteria.Predicate; import org.apache.log4j.Logger; import org.gcube.application.framework.core.session.ASLSession; -import org.gcube.common.homelibrary.home.workspace.Workspace; -import org.gcube.common.homelibrary.home.workspace.WorkspaceFolder; -import org.gcube.common.homelibrary.util.WorkspaceUtil; +import org.gcube.common.storagehubwrapper.server.tohl.Workspace; import org.gcube.data.spd.model.products.OccurrencePoint; import org.gcube.data.spd.model.products.ResultElement; import org.gcube.data.spd.model.products.TaxonomyItem; @@ -110,6 +108,7 @@ import net.sf.csv4j.CSVWriter; /** * The server side implementation of the RPC service. + * * @author "Francesco Mangiacrapa francesco.mangiacrapa@isti.cnr.it" * @author "Federico De Faveri defaveri@isti.cnr.it" - */ @@ -134,8 +133,7 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T * * @return the ASL session */ - protected ASLSession getASLSession() - { + protected ASLSession getASLSession() { return SessionUtil.getAslSession(this.getThreadLocalRequest().getSession()); } @@ -145,15 +143,14 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T * @return the species service * @throws SearchServiceException the search service exception */ - protected SpeciesService getSpeciesService() throws SearchServiceException - { + protected SpeciesService getSpeciesService() throws SearchServiceException { try { ASLSession session = getASLSession(); return SessionUtil.getService(session); } catch (Exception e) { e.printStackTrace(); logger.error("An error occurred when contacting the species service", e); - // System.out.println("An error occurred retrieving the service" +e); + // System.out.println("An error occurred retrieving the service" +e); throw new SearchServiceException("contacting the species service."); } } @@ -164,35 +161,31 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T * @return the search session * @throws SearchServiceException the search service exception */ - protected FetchingSession getSearchSession() throws SearchServiceException - { + protected FetchingSession getSearchSession() throws SearchServiceException { ASLSession session = getASLSession(); FetchingSession searchSession = SessionUtil.getCurrentSearchSession(session); if (searchSession == null) { - logger.error("No search session found for user "+session.getUsername()); - throw new SearchServiceException("No search session found for user "+session.getUsername()); + logger.error("No search session found for user " + session.getUsername()); + throw new SearchServiceException("No search session found for user " + session.getUsername()); } return searchSession; } - - /** * Gets the occurrence session. * * @return the occurrence session * @throws SearchServiceException the search service exception */ - protected FetchingSession getOccurrenceSession() throws SearchServiceException - { + protected FetchingSession getOccurrenceSession() throws SearchServiceException { ASLSession session = getASLSession(); FetchingSession occurrenceSession = SessionUtil.getCurrentOccurrenceSession(session); if (occurrenceSession == null) { - logger.error("No occurrence session found for user "+session.getUsername()); - throw new SearchServiceException("No occurrence session found for user "+session.getUsername()); + logger.error("No occurrence session found for user " + session.getUsername()); + throw new SearchServiceException("No occurrence session found for user " + session.getUsername()); } return occurrenceSession; @@ -203,7 +196,7 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T */ @Override public void searchByScientificName(String searchTerm, SearchFilters searchFilters) throws SearchServiceException { - logger.info("searchByScientificName searchTerm: "+searchTerm+" searchFilters: "+searchFilters); + logger.info("searchByScientificName searchTerm: " + searchTerm + " searchFilters: " + searchFilters); stopSearch(); search(searchTerm, SearchType.BY_SCIENTIFIC_NAME, searchFilters); @@ -214,7 +207,7 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T */ @Override public void searchByCommonName(String searchTerm, SearchFilters searchFilters) throws SearchServiceException { - logger.info("searchByCommonName searchTerm: "+searchTerm+" searchFilters: "+searchFilters); + logger.info("searchByCommonName searchTerm: " + searchTerm + " searchFilters: " + searchFilters); stopSearch(); search(searchTerm, SearchType.BY_COMMON_NAME, searchFilters); @@ -223,20 +216,21 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T /** * Search. * - * @param searchTerm the search term - * @param searchType the search type + * @param searchTerm the search term + * @param searchType the search type * @param searchFilters the search filters * @return the search result type * @throws SearchServiceException the search service exception */ - protected SearchResultType search(String searchTerm, SearchType searchType, SearchFilters searchFilters) throws SearchServiceException - { + protected SearchResultType search(String searchTerm, SearchType searchType, SearchFilters searchFilters) + throws SearchServiceException { ASLSession aslSession = getASLSession(); try { - deleteAllRowIntoDaoTable(); //RESET TABLE + deleteAllRowIntoDaoTable(); // RESET TABLE SpeciesService taxonomyService = getSpeciesService(); - CloseableIterator input = taxonomyService.searchByFilters(searchTerm, searchType, searchFilters); + CloseableIterator input = taxonomyService.searchByFilters(searchTerm, searchType, + searchFilters); SessionUtil.setCurrentQuery(aslSession, taxonomyService.getLastQuery()); // System.out.println("returned input stream by service..."); @@ -247,25 +241,36 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T FetchingSessionUtil.createFetchingSession(output, resultType, aslSession); return resultType; } catch (Exception e) { - logger.error("Error starting search "+searchType+" for term \""+searchTerm+"\" with filters "+searchFilters, e); + logger.error("Error starting search " + searchType + " for term \"" + searchTerm + "\" with filters " + + searchFilters, e); SessionUtil.setCurrentQuery(aslSession, "invalid query"); throw new SearchServiceException(e.getMessage()); } } - - /* (non-Javadoc) - * @see org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService#searchByQuery(java.lang.String) + /** + * Search by query. + * + * @param query the query + * @return the search by query parameter + * @throws SearchServiceException the search service exception + */ + /* + * (non-Javadoc) + * + * @see + * org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService# + * searchByQuery(java.lang.String) */ @Override public SearchByQueryParameter searchByQuery(String query) throws SearchServiceException { - logger.info("searchByQuery - query: "+query); + logger.info("searchByQuery - query: " + query); stopSearch(); ASLSession aslSession = getASLSession(); try { - deleteAllRowIntoDaoTable(); //RESET TABLE + deleteAllRowIntoDaoTable(); // RESET TABLE SpeciesService taxonomyService = getSpeciesService(); CloseableIterator input = taxonomyService.searchByQuery(query); @@ -273,7 +278,8 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T logger.info("service return iterator searched..."); SearchByQueryParameter queryParameters = QueryUtil.getQueryResultType(query); - CloseableIterator output = IteratorChainBuilder.buildChain(input, queryParameters.getSearchResultType(), aslSession); + CloseableIterator output = IteratorChainBuilder.buildChain(input, + queryParameters.getSearchResultType(), aslSession); FetchingSessionUtil.createFetchingSession(output, queryParameters.getSearchResultType(), aslSession); logger.info("creating fetching session completed!"); return queryParameters; @@ -285,13 +291,12 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T } } - /** * Delete all row into dao table. * * @throws Exception the exception */ - private void deleteAllRowIntoDaoTable() throws Exception{ + private void deleteAllRowIntoDaoTable() throws Exception { logger.info("deleting all row into dao's"); ResultRowPersistence daoResultRow = null; TaxonRowPersistence daoTaxon = null; @@ -304,18 +309,18 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T daoTaxonomyRow = DaoSession.getTaxonomyDAO(session); } catch (Exception e) { - logger.error("Error in delete all row - getDao's " +e.getMessage(), e); + logger.error("Error in delete all row - getDao's " + e.getMessage(), e); throw new Exception("Error in delete all row- getDao's " + e.getMessage(), e); } try { - if(daoResultRow!=null) + if (daoResultRow != null) daoResultRow.removeAll(); - if(daoTaxon!=null) + if (daoTaxon != null) daoTaxon.removeAll(); - if(daoTaxonomyRow!=null) + if (daoTaxonomyRow != null) daoTaxonomyRow.removeAll(); logger.info("delete all row into Dao's - completed"); @@ -330,9 +335,10 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T */ @SuppressWarnings("unchecked") @Override - public SearchResult getSearchResultRows(int start, int limit, ResultFilter activeFiltersObject, boolean onlySelected) throws SessionExpired, SearchServiceException { - logger.info("getSearchResultRows start: "+start+" limit: "+limit+" onlySelected: "+onlySelected); - Long startTime = System.currentTimeMillis(); + public SearchResult getSearchResultRows(int start, int limit, ResultFilter activeFiltersObject, + boolean onlySelected) throws SessionExpired, SearchServiceException { + logger.info("getSearchResultRows start: " + start + " limit: " + limit + " onlySelected: " + onlySelected); + Long startTime = System.currentTimeMillis(); FetchingSession searchSession = (FetchingSession) getSearchSession(); ArrayList chunk = new ArrayList(); @@ -340,53 +346,58 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T try { List data = new ArrayList(); if (onlySelected) { - SelectableFetchingBuffer buffer = (SelectableFetchingBuffer) searchSession.getBuffer(); + SelectableFetchingBuffer buffer = (SelectableFetchingBuffer) searchSession + .getBuffer(); data = buffer.getSelected(); - int end = Math.min(start+limit, data.size()); + int end = Math.min(start + limit, data.size()); start = Math.min(start, end); - logger.info("chunk selected data bounds [start: "+start+" end: " + end+"]"); + logger.info("chunk selected data bounds [start: " + start + " end: " + end + "]"); data = data.subList(start, end); } else if (activeFiltersObject == null || !activeFiltersObject.isActiveFilters()) { - if(limit>0){ - data = searchSession.getBuffer().getList(start,limit); + if (limit > 0) { + data = searchSession.getBuffer().getList(start, limit); } } else { - FilterableFetchingBuffer buffer = (FilterableFetchingBuffer) searchSession.getBuffer(); + FilterableFetchingBuffer buffer = (FilterableFetchingBuffer) searchSession + .getBuffer(); data = buffer.getFilteredList(activeFiltersObject); - int end = Math.min(start+limit, data.size()); + int end = Math.min(start + limit, data.size()); start = Math.min(start, end); - logger.info("chunk filtered data bounds [start: "+start+" end: " + end+"]"); + logger.info("chunk filtered data bounds [start: " + start + " end: " + end + "]"); data = data.subList(start, end); } - logger.info("Fetching data from search session buffer, size: "+data.size()); + logger.info("Fetching data from search session buffer, size: " + data.size()); for (ResultRow resultRow : data) { - //return common names? - if(activeFiltersObject == null || !activeFiltersObject.isLoadCommonName() || !resultRow.existsCommonName()){ + // return common names? + if (activeFiltersObject == null || !activeFiltersObject.isLoadCommonName() + || !resultRow.existsCommonName()) { resultRow.setCommonNames(null); } - //return properties? - if(activeFiltersObject == null || !resultRow.existsProperties() || !activeFiltersObject.isLoadAllProperties()){ + // return properties? + if (activeFiltersObject == null || !resultRow.existsProperties() + || !activeFiltersObject.isLoadAllProperties()) { resultRow.setProperties(null); } chunk.add(resultRow); - logger.debug("getSearchResultRows returning on client result item with id: " +resultRow.getId() + " service id: "+resultRow.getServiceId()); + logger.debug("getSearchResultRows returning on client result item with id: " + resultRow.getId() + + " service id: " + resultRow.getServiceId()); } Long endTime = System.currentTimeMillis() - startTime; String time = String.format("%d msc %d sec", endTime, TimeUnit.MILLISECONDS.toSeconds(endTime)); - logger.info("returning "+chunk.size()+" elements in " + time); + logger.info("returning " + chunk.size() + " elements in " + time); } catch (Exception e) { - if(e instanceof SessionExpired) + if (e instanceof SessionExpired) throw new SessionExpired("The session is expired"); logger.error("Error in getSearchResultRows ", e); @@ -401,86 +412,107 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T * * @param properties the properties */ - private void printProperties(List properties){ + private void printProperties(List properties) { for (ItemParameter itemParameter : properties) { - System.out.println("Property "+itemParameter); + System.out.println("Property " + itemParameter); } } - /* (non-Javadoc) - * @see org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService#getSearchTaxonomyRow(int, int, org.gcube.portlets.user.speciesdiscovery.shared.filter.ResultFilter, boolean) + /** + * Gets the search taxonomy row. + * + * @param start the start + * @param limit the limit + * @param activeFiltersObject the active filters object + * @param onlySelected the only selected + * @return the search taxonomy row + * @throws SearchServiceException the search service exception + */ + /* + * (non-Javadoc) + * + * @see + * org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService# + * getSearchTaxonomyRow(int, int, + * org.gcube.portlets.user.speciesdiscovery.shared.filter.ResultFilter, boolean) */ @SuppressWarnings("unchecked") @Override - public SearchResult getSearchTaxonomyRow(int start, int limit, ResultFilter activeFiltersObject, boolean onlySelected) throws SearchServiceException { + public SearchResult getSearchTaxonomyRow(int start, int limit, ResultFilter activeFiltersObject, + boolean onlySelected) throws SearchServiceException { - logger.info("getSearchResultRows start: "+start+" limit: "+limit+" onlySelected: "+onlySelected); - Long startTime = System.currentTimeMillis(); + logger.info("getSearchResultRows start: " + start + " limit: " + limit + " onlySelected: " + onlySelected); + Long startTime = System.currentTimeMillis(); FetchingSession searchSession = (FetchingSession) getSearchSession(); ArrayList chunk = new ArrayList(); try { - logger.info("current buffer size "+searchSession.getBuffer().size()); + logger.info("current buffer size " + searchSession.getBuffer().size()); List data = new ArrayList(); if (onlySelected) { logger.info("getting only selected data"); - SelectableFetchingBuffer buffer = (SelectableFetchingBuffer) searchSession.getBuffer(); + SelectableFetchingBuffer buffer = (SelectableFetchingBuffer) searchSession + .getBuffer(); data = buffer.getSelected(); - int end = Math.min(start+limit, data.size()); + int end = Math.min(start + limit, data.size()); start = Math.min(start, end); - logger.info("chunk selected data bounds [start: "+start+" end: " + end+"]"); + logger.info("chunk selected data bounds [start: " + start + " end: " + end + "]"); data = data.subList(start, end); } else if (activeFiltersObject == null || !activeFiltersObject.isActiveFilters()) { logger.info("getting all available data"); - if(limit>0){ + if (limit > 0) { Map filterAndMap = new HashMap(); filterAndMap.put(TaxonomyRow.IS_PARENT, "false"); - data = searchSession.getBuffer().getList(filterAndMap, start,limit); + data = searchSession.getBuffer().getList(filterAndMap, start, limit); } } else { logger.info("getting filtered data"); - FilterableFetchingBuffer buffer = (FilterableFetchingBuffer) searchSession.getBuffer(); + FilterableFetchingBuffer buffer = (FilterableFetchingBuffer) searchSession + .getBuffer(); data = buffer.getFilteredList(activeFiltersObject); - int end = Math.min(start+limit, data.size()); + int end = Math.min(start + limit, data.size()); start = Math.min(start, end); - logger.info("chunk filtered data bounds [start: "+start+" end: " + end+"]"); + logger.info("chunk filtered data bounds [start: " + start + " end: " + end + "]"); data = data.subList(start, end); } - logger.info("Fetching data from search session buffer, size: "+data.size()); + logger.info("Fetching data from search session buffer, size: " + data.size()); for (TaxonomyRow taxonomyRow : data) { - //ADD common names + // ADD common names - //return common names? - if(activeFiltersObject == null || !activeFiltersObject.isLoadCommonName() || !taxonomyRow.existsCommonName()){ + // return common names? + if (activeFiltersObject == null || !activeFiltersObject.isLoadCommonName() + || !taxonomyRow.existsCommonName()) { taxonomyRow.setCommonNames(null); } - //return properties? - if(activeFiltersObject == null || !taxonomyRow.existsProperties() || !activeFiltersObject.isLoadAllProperties()){ + // return properties? + if (activeFiltersObject == null || !taxonomyRow.existsProperties() + || !activeFiltersObject.isLoadAllProperties()) { taxonomyRow.setProperties(null); } - logger.info("getSearchTaxonomyRow return on client taxonomy item with id: " +taxonomyRow.getId() + " service id: "+taxonomyRow.getServiceId()); + logger.info("getSearchTaxonomyRow return on client taxonomy item with id: " + taxonomyRow.getId() + + " service id: " + taxonomyRow.getServiceId()); chunk.add(taxonomyRow); } Long endTime = System.currentTimeMillis() - startTime; String time = String.format("%d msc %d sec", endTime, TimeUnit.MILLISECONDS.toSeconds(endTime)); - logger.info("returning "+chunk.size()+" elements in " + time); + logger.info("returning " + chunk.size() + " elements in " + time); } catch (Exception e) { logger.error("Error in getSearchTaxonomyRow " + e.getMessage(), e); @@ -500,19 +532,19 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T public TaxonomyRow loadTaxonomyParentByParentId(String parentID) throws Exception { TaxonomyRow taxonomyRow = null; - logger.info("loadTaxonomyParentByParentId: "+ parentID); + logger.info("loadTaxonomyParentByParentId: " + parentID); try { TaxonomyRowPersistence dao = DaoSession.getTaxonomyDAO(getASLSession()); CriteriaBuilder queryBuilder = dao.getCriteriaBuilder(); CriteriaQuery cq = queryBuilder.createQuery(); - Predicate pr1 = queryBuilder.equal(dao.rootFrom(cq).get(TaxonomyRow.PARENT_ID), parentID); + Predicate pr1 = queryBuilder.equal(dao.rootFrom(cq).get(TaxonomyRow.PARENT_ID), parentID); cq.where(pr1); Iterator iterator = dao.executeCriteriaQuery(cq).iterator(); - if(iterator!=null && iterator.hasNext()){ + if (iterator != null && iterator.hasNext()) { taxonomyRow = iterator.next(); } @@ -542,48 +574,61 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T */ @Override public HashMap getFilterCounterById(GridField field) throws Exception { - logger.info("Filter Counter for: "+ field); + logger.info("Filter Counter for: " + field); try { FetchingSession searchSession = getSearchSession(); - FieldAggregator aggregator = (FieldAggregator) searchSession.getAggregator(FieldAggregator.getFieldAggregatorName(field)); - if (aggregator!=null) return aggregator.getAggregation(); - else return new HashMap(); - } catch(Exception e) - { - logger.error("Error in getFilterCounterById "+ field.getId()+" "+field.getName(), e); + FieldAggregator aggregator = (FieldAggregator) searchSession + .getAggregator(FieldAggregator.getFieldAggregatorName(field)); + if (aggregator != null) + return aggregator.getAggregation(); + else + return new HashMap(); + } catch (Exception e) { + logger.error("Error in getFilterCounterById " + field.getId() + " " + field.getName(), e); throw new SearchServiceException(e.getMessage()); } } - - /* (non-Javadoc) - * @see org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService#getFilterCounterForClassification(java.lang.String) + /** + * Gets the filter counter for classification. + * + * @param rankLabel the rank label + * @return the filter counter for classification + * @throws Exception the exception + */ + /* + * (non-Javadoc) + * + * @see + * org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService# + * getFilterCounterForClassification(java.lang.String) */ @Override public HashMap getFilterCounterForClassification(String rankLabel) throws Exception { - logger.info("Counter for classification: "+ rankLabel); + logger.info("Counter for classification: " + rankLabel); try { MainTaxonomicRankEnum rank = MainTaxonomicRankEnum.valueOfLabel(rankLabel); - if (rank!=null) { + if (rank != null) { FetchingSession searchSession = getSearchSession(); - TaxonomyClassificationAggregator classificationAggregator = (TaxonomyClassificationAggregator) searchSession.getAggregator(TaxonomyClassificationAggregator.NAME); + TaxonomyClassificationAggregator classificationAggregator = (TaxonomyClassificationAggregator) searchSession + .getAggregator(TaxonomyClassificationAggregator.NAME); return classificationAggregator.getAggregation().get(rank); - } else return new HashMap(); - } catch(Exception e) - { - logger.error("Error in getFilterCounterForClassification "+ rankLabel, e); + } else + return new HashMap(); + } catch (Exception e) { + logger.error("Error in getFilterCounterForClassification " + rankLabel, e); throw new SearchServiceException(e.getMessage()); } } - /** * {@inheritDoc} */ @Override - public SearchStatus getSearchStatus(boolean onlySelected, boolean isActiveFilterOnResult) throws SearchServiceException { - logger.info("getSearchStatus onlySelected: "+onlySelected); + public SearchStatus getSearchStatus(boolean onlySelected, boolean isActiveFilterOnResult) + throws SearchServiceException { + logger.info("getSearchStatus onlySelected: " + onlySelected); FetchingSession searchSession = getSearchSession(); @@ -591,31 +636,38 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T int bufferSize = 0; try { - bufferSize = isActiveFilterOnResult?((FilterableFetchingBuffer) searchSession.getBuffer()).getFilteredListSize():searchSession.getBufferSize(); + bufferSize = isActiveFilterOnResult + ? ((FilterableFetchingBuffer) searchSession.getBuffer()) + .getFilteredListSize() + : searchSession.getBufferSize(); } catch (Exception e) { - logger.info("getSearchStatus bufferSize error : "+e.getMessage(), e); + logger.info("getSearchStatus bufferSize error : " + e.getMessage(), e); throw new SearchServiceException(e.getMessage()); } logger.info("getSearchStatus bufferSize " + bufferSize); - //if buffer size is >= the MAXIMUM ELEMENTS. Maximun is reached and the search is stopped - if(bufferSize>=MAX_BUFFERING_ELEMENTS){ + // if buffer size is >= the MAXIMUM ELEMENTS. Maximun is reached and the search + // is stopped + if (bufferSize >= MAX_BUFFERING_ELEMENTS) { logger.info("getSearchStatus MAX_BUFFERING_ELEMENTS is reached - stop search"); stopSearchWithoutRemove(); - //CALCULATE NEW BUFFER SIZE AFTER FETCHING IS CLOSED + // CALCULATE NEW BUFFER SIZE AFTER FETCHING IS CLOSED try { // int sleepingTime = 500; // logger.info("sleeping "+sleepingTime+" ms for translate last objets arrived into buffer"); // Thread.sleep(sleepingTime); //SLEEPING 0,5 sec, for translating objects that are inserting in buffer and calculate new size of buffer // logger.info("sleep termined - search status alive"); - bufferSize = isActiveFilterOnResult?((FilterableFetchingBuffer) searchSession.getBuffer()).getFilteredListSize():searchSession.getBufferSize(); + bufferSize = isActiveFilterOnResult + ? ((FilterableFetchingBuffer) searchSession.getBuffer()) + .getFilteredListSize() + : searchSession.getBufferSize(); } catch (SQLException e) { - logger.info("getSearchStatus bufferSize error : "+e.getMessage(), e); + logger.info("getSearchStatus bufferSize error : " + e.getMessage(), e); throw new SearchServiceException("An error occured on server in searching status, please retry"); } catch (Exception e) { - logger.info("getSearchStatus bufferSize error : "+e.getMessage(), e); + logger.info("getSearchStatus bufferSize error : " + e.getMessage(), e); throw new SearchServiceException("An error occured on server in searching status, please retry"); } @@ -628,33 +680,33 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T if (!onlySelected && !isActiveFilterOnResult) { status.setResultEOF(searchSession.isComplete()); status.setSize(bufferSize); - } else if(isActiveFilterOnResult){ + } else if (isActiveFilterOnResult) { status.setResultEOF(true); try { - status.setSize(((FilterableFetchingBuffer) searchSession.getBuffer()).getFilteredListSize()); + status.setSize(((FilterableFetchingBuffer) searchSession.getBuffer()) + .getFilteredListSize()); } catch (Exception e) { - logger.error("isActiveFilterOnResult - An error occured in getSearchStatus " +e.getMessage(), e); + logger.error("isActiveFilterOnResult - An error occured in getSearchStatus " + e.getMessage(), e); throw new SearchServiceException(e.getMessage()); } - } - else{ + } else { status.setResultEOF(true); try { - status.setSize(((SelectableFetchingBuffer) searchSession.getBuffer()).getSelected().size()); + status.setSize(((SelectableFetchingBuffer) searchSession.getBuffer()) + .getSelected().size()); } catch (Exception e) { - logger.error("An error occured in getSearchStatus " +e.getMessage(), e); + logger.error("An error occured in getSearchStatus " + e.getMessage(), e); throw new SearchServiceException(e.getMessage()); } } - logger.info("getSearchStatus return status size: "+status.getSize() +" EOF: " + status.isResultEOF()); + logger.info("getSearchStatus return status size: " + status.getSize() + " EOF: " + status.isResultEOF()); return status; } - /** * {@inheritDoc} */ @@ -662,7 +714,7 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T logger.info("stopSearch without Remove"); ASLSession session = getASLSession(); - //we safely get the session if exists + // we safely get the session if exists FetchingSession searchSession = SessionUtil.getCurrentSearchSession(session); if (searchSession != null) { @@ -671,11 +723,10 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T } catch (IOException e) { throw new SearchServiceException(e.getMessage()); } - } else logger.warn("Search session not found"); + } else + logger.warn("Search session not found"); } - - /** * Stop search. * @@ -694,13 +745,21 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T throw new SearchServiceException(e.getMessage()); } SessionUtil.setCurrentSearchSession(session, null); - } else logger.warn("Search session not found"); + } else + logger.warn("Search session not found"); } - - - /* (non-Javadoc) - * @see org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService#userStopSearch() + /** + * User stop search. + * + * @throws SearchServiceException the search service exception + */ + /* + * (non-Javadoc) + * + * @see + * org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService# + * userStopSearch() */ public void userStopSearch() throws SearchServiceException { ASLSession session = getASLSession(); @@ -712,57 +771,61 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T } catch (IOException e) { throw new SearchServiceException(e.getMessage()); } - } else logger.warn("Search session not found"); + } else + logger.warn("Search session not found"); } - /** * {@inheritDoc} */ @Override public void updateRowSelection(int rowId, boolean selection) throws SearchServiceException { - logger.info("updateRowSelection rowId: "+rowId+" selection: "+selection); + logger.info("updateRowSelection rowId: " + rowId + " selection: " + selection); try { ASLSession session = getASLSession(); FetchingSession searchSession = SessionUtil.getCurrentSearchSession(session); - SelectableFetchingBuffer buffer = (SelectableFetchingBuffer) searchSession.getBuffer(); + SelectableFetchingBuffer buffer = (SelectableFetchingBuffer) searchSession + .getBuffer(); buffer.updateSelection(rowId, selection); - } catch(Exception e){ - logger.error("Error in updateRowSelection rowId: "+rowId+" selection: "+selection, e); + } catch (Exception e) { + logger.error("Error in updateRowSelection rowId: " + rowId + " selection: " + selection, e); throw new SearchServiceException(e.getMessage()); } } - /** * {@inheritDoc} */ @Override - public Integer updateRowSelections(boolean selection, ResultFilter activeFiltersObject) throws SearchServiceException { - logger.info("updateRowSelections selection: "+selection); + public Integer updateRowSelections(boolean selection, ResultFilter activeFiltersObject) + throws SearchServiceException { + logger.info("updateRowSelections selection: " + selection); List data; FetchingSession searchSession = getSearchSession(); int size = 0; try { if (activeFiltersObject == null || !activeFiltersObject.isActiveFilters()) { - SelectableFetchingBuffer buffer = (SelectableFetchingBuffer) searchSession.getBuffer(); + SelectableFetchingBuffer buffer = (SelectableFetchingBuffer) searchSession + .getBuffer(); buffer.updateAllSelection(selection); size = buffer.size(); } else { @SuppressWarnings("unchecked") - FilterableFetchingBuffer buffer = (FilterableFetchingBuffer) searchSession.getBuffer(); + FilterableFetchingBuffer buffer = (FilterableFetchingBuffer) searchSession + .getBuffer(); data = buffer.getFilteredList(activeFiltersObject); - if(data!=null){ + if (data != null) { List ids = new ArrayList(); - for (FetchingElement fetchingElement : data){ - ids.add(fetchingElement.getId()+""); + for (FetchingElement fetchingElement : data) { + ids.add(fetchingElement.getId() + ""); } - SelectableFetchingBuffer bufferCompleted = (SelectableFetchingBuffer) searchSession.getBuffer(); + SelectableFetchingBuffer bufferCompleted = (SelectableFetchingBuffer) searchSession + .getBuffer(); bufferCompleted.updateAllSelectionByIds(selection, ids); size = data.size(); @@ -777,20 +840,21 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T return Integer.valueOf(size); } - /** * {@inheritDoc} + * * @throws SearchServiceException */ @Override - public int countOfSelectedRow() throws SearchServiceException{ + public int countOfSelectedRow() throws SearchServiceException { logger.info("countOfSelectedRow()"); FetchingSession searchSession = getSearchSession(); try { - SelectableFetchingBuffer buffer = (SelectableFetchingBuffer) searchSession.getBuffer(); + SelectableFetchingBuffer buffer = (SelectableFetchingBuffer) searchSession + .getBuffer(); return buffer.sizeSelected(); } catch (Exception e) { @@ -799,7 +863,6 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T } } - /** * {@inheritDoc} */ @@ -812,30 +875,32 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T FetchingSession searchSession = (FetchingSession) getSearchSession(); try { - Collection selectedRows = ((SelectableFetchingBuffer) searchSession.getBuffer()).getSelected(); - logger.info("found "+selectedRows.size()+" selected rows"); + Collection selectedRows = ((SelectableFetchingBuffer) searchSession.getBuffer()) + .getSelected(); + logger.info("found " + selectedRows.size() + " selected rows"); List keys = new ArrayList(selectedRows.size()); - for (ResultRow row:selectedRows) { - //ADD KEY ONLY IF IS NOT EQUAL NULL AND SIZE IS > 0 - if(row.getOccurencesKey()!=null && row.getOccurencesKey().length()>0){ + for (ResultRow row : selectedRows) { + // ADD KEY ONLY IF IS NOT EQUAL NULL AND SIZE IS > 0 + if (row.getOccurencesKey() != null && row.getOccurencesKey().length() > 0) { keys.add(row.getOccurencesKey()); count += row.getOccurencesCount(); } } - logger.info("found "+count+" occurrence points"); + logger.info("found " + count + " occurrence points"); - //TODO remove - if (logger.isInfoEnabled()) logger.info("selected keys: "+keys); + // TODO remove + if (logger.isInfoEnabled()) + logger.info("selected keys: " + keys); SpeciesService taxonomyService = getSpeciesService(); CloseableIterator source = taxonomyService.getOccurrencesByKeys(keys); CloseableIterator input = IteratorChainBuilder.buildOccurrenceConverter(source); - //DELETE ALL ROW INTO DAO OCCURENCES + // DELETE ALL ROW INTO DAO OCCURENCES OccurrenceRowPersistence occurrencesDao = DaoSession.getOccurrenceDAO(getASLSession()); occurrencesDao.removeAll(); FetchingSessionUtil.createOccurrenceFetchingSession(input, getASLSession()); @@ -848,12 +913,24 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T return count; } - /* (non-Javadoc) - * @see org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService#getOccurrencesBatch(int, int) + /** + * Gets the occurrences batch. + * + * @param start the start + * @param limit the limit + * @return the occurrences batch + * @throws SearchServiceException the search service exception + */ + /* + * (non-Javadoc) + * + * @see + * org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService# + * getOccurrencesBatch(int, int) */ @Override public OccurrenceBatch getOccurrencesBatch(int start, int limit) throws SearchServiceException { - logger.info("getOccurrencesBatch: start: "+start+" limit: "+limit); + logger.info("getOccurrencesBatch: start: " + start + " limit: " + limit); FetchingSession occurrenceSession = getOccurrenceSession(); List buffer; @@ -861,13 +938,13 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T try { buffer = occurrenceSession.getBuffer().getList(); - logger.info("Fetching data from occurrence session buffer, size: "+buffer.size()); - int end = Math.min(start+limit, buffer.size()); - logger.info("chunk bounds[start: "+start+" end: " + end+"]"); + logger.info("Fetching data from occurrence session buffer, size: " + buffer.size()); + int end = Math.min(start + limit, buffer.size()); + logger.info("chunk bounds[start: " + start + " end: " + end + "]"); ArrayList data = new ArrayList(buffer.subList(start, end)); - logger.info("returning "+data.size()+" elements"); + logger.info("returning " + data.size() + " elements"); - //DEBUG + // DEBUG // for (Occurrence occurrence : data) { // logger.info("citation :" + occurrence.getCitation()+ // " catalog number: "+occurrence.getCatalogueNumber()+ @@ -891,8 +968,18 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T return result; } - /* (non-Javadoc) - * @see org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService#getCountOfOccurrencesBatch() + /** + * Gets the count of occurrences batch. + * + * @return the count of occurrences batch + * @throws SearchServiceException the search service exception + */ + /* + * (non-Javadoc) + * + * @see + * org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService# + * getCountOfOccurrencesBatch() */ @Override public OccurrencesStatus getCountOfOccurrencesBatch() throws SearchServiceException { @@ -903,7 +990,7 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T try { buffer = occurrenceSession.getBuffer().getList(); - logger.info("Fetching data from occurrence session buffer, size: "+buffer.size()); + logger.info("Fetching data from occurrence session buffer, size: " + buffer.size()); return new OccurrencesStatus(occurrenceSession.isComplete(), buffer.size()); } catch (Exception e) { @@ -930,14 +1017,16 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T throw new SearchServiceException(e.getMessage()); } SessionUtil.setCurrentOccurrenceSession(session, null); - } else logger.warn("Occurrence session not found"); + } else + logger.warn("Occurrence session not found"); } /** * {@inheritDoc} */ @Override - public JobGisLayerModel createGisLayerJobFromSelectedOccurrenceKeys(String layerTitle, String layerDescr, long totalPoints) throws Exception { + public JobGisLayerModel createGisLayerJobFromSelectedOccurrenceKeys(String layerTitle, String layerDescr, + long totalPoints) throws Exception { try { List occurrenceKeys = getSelectedOccurrenceKeys(); @@ -946,37 +1035,36 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T String credits = ""; GisLayerJobPersistence gisLayerJob = DaoSession.getGisLayersJobDAO(getASLSession()); logger.trace("GisLayerJobPersistence found"); - return GisLayerJobUtil.createGisLayerJobByOccurrenceKeys(occurrenceKeys, taxonomyService, layerTitle, layerDescr, author, credits, totalPoints, gisLayerJob); + return GisLayerJobUtil.createGisLayerJobByOccurrenceKeys(occurrenceKeys, taxonomyService, layerTitle, + layerDescr, author, credits, totalPoints, gisLayerJob); } catch (Exception e) { logger.error("An error occurred creating the map", e); throw new Exception(e.getMessage()); + } + + /* + * Iterator iteratorOccurrences = + * getIteratorSelectedOccurrenceIds(); IteratorPointInfo streamKey = new + * IteratorPointInfo(iteratorOccurrences); + * + * try { SpeciesService taxonomyService = getSpeciesService(); String groupName + * = taxonomyService.generateMapFromOccurrencePoints(streamKey); + * logger.info("generated groupName: "+groupName); return groupName; } catch + * (Exception e) { logger.error("An error occurred creating the map", e); throw + * new SearchServiceException(e.getMessage()); } + */ } - - /*Iterator iteratorOccurrences = getIteratorSelectedOccurrenceIds(); - IteratorPointInfo streamKey = new IteratorPointInfo(iteratorOccurrences); - - try { - SpeciesService taxonomyService = getSpeciesService(); - String groupName = taxonomyService.generateMapFromOccurrencePoints(streamKey); - logger.info("generated groupName: "+groupName); - return groupName; - } catch (Exception e) { - logger.error("An error occurred creating the map", e); - throw new SearchServiceException(e.getMessage()); - }*/ - } - - //USED FOR DEBUG + // USED FOR DEBUG /** * Prints the id. * * @param listId the list id */ - protected void printId(List listId){ + protected void printId(List listId) { for (String id : listId) { - System.out.println("Found id : " +id); + System.out.println("Found id : " + id); } } @@ -986,7 +1074,7 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T * @return the selected occurrence ids * @throws SearchServiceException the search service exception */ - protected List getSelectedOccurrenceIds() throws SearchServiceException{ + protected List getSelectedOccurrenceIds() throws SearchServiceException { FetchingSession occurrenceSession = getOccurrenceSession(); List buffer; @@ -1000,7 +1088,7 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T listId.add(occurrence.getServiceId()); } - return listId; + return listId; } catch (Exception e) { logger.error("An error occurred on getSelectedOccurrenceIds", e); @@ -1014,7 +1102,7 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T * @return the iterator selected occurrence ids * @throws SearchServiceException the search service exception */ - protected Iterator getIteratorSelectedOccurrenceIds() throws SearchServiceException{ + protected Iterator getIteratorSelectedOccurrenceIds() throws SearchServiceException { FetchingSession occurrenceSession = getOccurrenceSession(); try { @@ -1026,21 +1114,52 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T } - /* (non-Javadoc) - * @see org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService#saveSelectedOccurrencePoints(java.lang.String, java.lang.String, org.gcube.portlets.user.speciesdiscovery.shared.SaveFileFormat, org.gcube.portlets.user.speciesdiscovery.shared.OccurrencesSaveEnum) + /** + * Save selected occurrence points. + * + * @param destinationFolderId the destination folder id + * @param fileName the file name + * @param fileFormat the file format + * @param typeCSV the type CSV + * @throws SearchServiceException the search service exception + */ + /* + * (non-Javadoc) + * + * @see + * org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService# + * saveSelectedOccurrencePoints(java.lang.String, java.lang.String, + * org.gcube.portlets.user.speciesdiscovery.shared.SaveFileFormat, + * org.gcube.portlets.user.speciesdiscovery.shared.OccurrencesSaveEnum) */ @Override @Deprecated - public void saveSelectedOccurrencePoints(String destinationFolderId, String fileName, SaveFileFormat fileFormat, OccurrencesSaveEnum typeCSV) throws SearchServiceException { - logger.info("saveSelectedOccurrencePoints destinationFolderId: "+destinationFolderId+" fileName: "+fileName+" fileFormat: "+fileFormat+" typeCSV: "+typeCSV); + public void saveSelectedOccurrencePoints(String destinationFolderId, String fileName, SaveFileFormat fileFormat, + OccurrencesSaveEnum typeCSV) throws SearchServiceException { + logger.info("saveSelectedOccurrencePoints destinationFolderId: " + destinationFolderId + " fileName: " + + fileName + " fileFormat: " + fileFormat + " typeCSV: " + typeCSV); } - /* (non-Javadoc) - * @see org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService#saveSelectedTaxonomyPoints(java.lang.String, java.lang.String, org.gcube.portlets.user.speciesdiscovery.shared.SaveFileFormat) + /** + * Save selected taxonomy points. + * + * @param destinationFolderId the destination folder id + * @param fileName the file name + * @param fileFormat the file format + * @throws SearchServiceException the search service exception + */ + /* + * (non-Javadoc) + * + * @see + * org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService# + * saveSelectedTaxonomyPoints(java.lang.String, java.lang.String, + * org.gcube.portlets.user.speciesdiscovery.shared.SaveFileFormat) */ @Override - public void saveSelectedTaxonomyPoints(String destinationFolderId, String fileName, SaveFileFormat fileFormat) throws SearchServiceException { - //TODO OLD CALL + public void saveSelectedTaxonomyPoints(String destinationFolderId, String fileName, SaveFileFormat fileFormat) + throws SearchServiceException { + // TODO OLD CALL } /** @@ -1050,8 +1169,7 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T * @throws SearchServiceException the search service exception */ @SuppressWarnings("unchecked") - protected List getSelectedOccurrenceKeys() throws SearchServiceException - { + protected List getSelectedOccurrenceKeys() throws SearchServiceException { FetchingSession searchSession = (FetchingSession) getSearchSession(); Collection selectedRows; List keys = null; @@ -1059,18 +1177,19 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T try { selectedRows = ((SelectableFetchingBuffer) searchSession.getBuffer()).getSelected(); - logger.info("found "+selectedRows.size()+" selected rows"); + logger.info("found " + selectedRows.size() + " selected rows"); keys = new ArrayList(selectedRows.size()); int count = 0; - for (ResultRow row:selectedRows) { + for (ResultRow row : selectedRows) { keys.add(row.getOccurencesKey()); count += row.getOccurencesCount(); } - logger.info("found "+count+" occurrence points and "+keys.size()+" keys"); + logger.info("found " + count + " occurrence points and " + keys.size() + " keys"); - //TODO remove - if (logger.isInfoEnabled()) logger.info("selected keys: "+keys); + // TODO remove + if (logger.isInfoEnabled()) + logger.info("selected keys: " + keys); } catch (Exception e) { logger.error("An error occured in getSelectedOccurrenceKeys" + e.getMessage()); @@ -1080,7 +1199,6 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T return keys; } - /** * Gets the selected result row id. * @@ -1088,8 +1206,7 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T * @throws SearchServiceException the search service exception */ @SuppressWarnings("unchecked") - protected List getSelectedResultRowId() throws SearchServiceException - { + protected List getSelectedResultRowId() throws SearchServiceException { FetchingSession searchSession = (FetchingSession) getSearchSession(); Collection selectedRows; List listId = null; @@ -1097,17 +1214,18 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T try { selectedRows = ((SelectableFetchingBuffer) searchSession.getBuffer()).getSelected(); - logger.info("found "+selectedRows.size()+" selected rows"); + logger.info("found " + selectedRows.size() + " selected rows"); listId = new ArrayList(selectedRows.size()); - for (ResultRow row:selectedRows) + for (ResultRow row : selectedRows) listId.add(row.getServiceId()); - logger.info("found "+listId.size()+" ids"); + logger.info("found " + listId.size() + " ids"); - //TODO remove - if (logger.isTraceEnabled()) logger.info("selected ids: "+listId); + // TODO remove + if (logger.isTraceEnabled()) + logger.info("selected ids: " + listId); } catch (Exception e) { logger.error("An error occured in getSelectedOccurrenceKeys" + e.getMessage()); @@ -1124,8 +1242,7 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T * @throws SearchServiceException the search service exception */ @SuppressWarnings("unchecked") - protected Map getSelectedTaxonomyIdAndDataSource() throws SearchServiceException - { + protected Map getSelectedTaxonomyIdAndDataSource() throws SearchServiceException { FetchingSession searchSession = (FetchingSession) getSearchSession(); HashMap hashIdTaxonDataSource = null; Collection selectedRows; @@ -1133,18 +1250,19 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T try { selectedRows = ((SelectableFetchingBuffer) searchSession.getBuffer()).getSelected(); - logger.info("found "+selectedRows.size()+" selected rows"); + logger.info("found " + selectedRows.size() + " selected rows"); hashIdTaxonDataSource = new HashMap(selectedRows.size()); - for (TaxonomyRow row:selectedRows){ + for (TaxonomyRow row : selectedRows) { hashIdTaxonDataSource.put(row.getServiceId(), row.getDataProviderName()); - logger.info("add this id: "+row.getServiceId()+" to list"); + logger.info("add this id: " + row.getServiceId() + " to list"); } - logger.info("found "+hashIdTaxonDataSource.size()+" id"); + logger.info("found " + hashIdTaxonDataSource.size() + " id"); - //TODO remove - if (logger.isInfoEnabled()) logger.info("selected ids: "+hashIdTaxonDataSource); + // TODO remove + if (logger.isInfoEnabled()) + logger.info("selected ids: " + hashIdTaxonDataSource); } catch (Exception e) { logger.error("An error occured in getSelectedOccurrenceKeys" + e.getMessage()); @@ -1157,15 +1275,14 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T /** * Generate csv file. * - * @param ids the ids + * @param ids the ids * @param csvType the csv type * @return the file * @throws Exception the exception */ - protected File generateCSVFile(List ids, OccurrencesSaveEnum csvType) throws Exception - { + protected File generateCSVFile(List ids, OccurrencesSaveEnum csvType) throws Exception { File csvFile = File.createTempFile("test", ".csv"); - logger.info("outputfile "+csvFile.getAbsolutePath()); + logger.info("outputfile " + csvFile.getAbsolutePath()); FileWriter fileWriter = new FileWriter(csvFile); CSVWriter writer = new CSVWriter(fileWriter); @@ -1179,30 +1296,43 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T switch (csvType) { - case OPENMODELLER: { + case OPENMODELLER: { - OccurenceCSVConverterOpenModeller converterOpenModeller = new OccurenceCSVConverterOpenModeller(); - csvGenerator = new CSVGenerator(result, converterOpenModeller, OccurenceCSVConverterOpenModeller.HEADER); + OccurenceCSVConverterOpenModeller converterOpenModeller = new OccurenceCSVConverterOpenModeller(); + csvGenerator = new CSVGenerator(result, converterOpenModeller, + OccurenceCSVConverterOpenModeller.HEADER); - }break; + } + break; - case STANDARD:{ + case STANDARD: { - OccurenceCSVConverter converter = new OccurenceCSVConverter(); - csvGenerator = new CSVGenerator(result, converter, OccurenceCSVConverter.HEADER); + OccurenceCSVConverter converter = new OccurenceCSVConverter(); + csvGenerator = new CSVGenerator(result, converter, OccurenceCSVConverter.HEADER); - }break; + } + break; } - while(csvGenerator.hasNext()) writer.writeLine(csvGenerator.next()); + while (csvGenerator.hasNext()) + writer.writeLine(csvGenerator.next()); fileWriter.close(); return csvFile; } - - /* (non-Javadoc) - * @see org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService#loadDataSourceList() + /** + * Load data source list. + * + * @return the list + * @throws SearchServiceException the search service exception + */ + /* + * (non-Javadoc) + * + * @see + * org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService# + * loadDataSourceList() */ @Override public List loadDataSourceList() throws SearchServiceException { @@ -1210,7 +1340,7 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T List listDS = null; try { - DaoSession.initSessionDaoObjects(getASLSession()); //FIXME temporary? + DaoSession.initSessionDaoObjects(getASLSession()); // FIXME temporary? logger.info("DAOs OK... "); System.out.println("DAOs OK"); SpeciesService taxonomyService = getSpeciesService(); @@ -1219,21 +1349,31 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T listDS = taxonomyService.getPlugins(); System.out.println("Plugins OK"); logger.info("Plugins OK"); - logger.info("Return list plugin - size: " +listDS.size()); + logger.info("Return list plugin - size: " + listDS.size()); } catch (DatabaseServiceException e) { - throw new SearchServiceException("Sorry, an error has occurred on the server while "+e.getMessage()); + throw new SearchServiceException("Sorry, an error has occurred on the server while " + e.getMessage()); } catch (Exception e) { - throw new SearchServiceException("Sorry, an error has occurred on the server while "+e.getMessage()); + throw new SearchServiceException("Sorry, an error has occurred on the server while " + e.getMessage()); } return listDS; } - - /* (non-Javadoc) - * @see org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService#loadListCommonNameByRowId(java.lang.String) + /** + * Load list common name by row id. + * + * @param resultRowId the result row id + * @return the array list + * @throws Exception the exception + */ + /* + * (non-Javadoc) + * + * @see + * org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService# + * loadListCommonNameByRowId(java.lang.String) */ @Override @Deprecated @@ -1244,8 +1384,19 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T return listCommonName; } - /* (non-Javadoc) - * @see org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService#getParentsList(org.gcube.portlets.user.speciesdiscovery.shared.Taxon) + /** + * Gets the parents list. + * + * @param taxon the taxon + * @return the parents list + * @throws Exception the exception + */ + /* + * (non-Javadoc) + * + * @see + * org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService# + * getParentsList(org.gcube.portlets.user.speciesdiscovery.shared.Taxon) */ @Override @Deprecated @@ -1254,9 +1405,19 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T return null; } - - /* (non-Javadoc) - * @see org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService#loadListChildrenByParentId(java.lang.String) + /** + * Load list children by parent id. + * + * @param parentId the parent id + * @return the array list + * @throws Exception the exception + */ + /* + * (non-Javadoc) + * + * @see + * org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService# + * loadListChildrenByParentId(java.lang.String) */ @Override public ArrayList loadListChildrenByParentId(String parentId) throws Exception { @@ -1264,7 +1425,7 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T ArrayList listLightTaxonomyRow = new ArrayList(); - if(parentId==null || parentId.isEmpty()){ + if (parentId == null || parentId.isEmpty()) { logger.warn("parentId is null or empty "); return listLightTaxonomyRow; } @@ -1278,7 +1439,7 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T Map mapChildren = SessionUtil.getHashMapChildrenTaxonomyCache(session); - if(mapChildren==null){ + if (mapChildren == null) { logger.info("Cache taxa children doesn't exists into session, creating.."); mapChildren = new HashMap(); } @@ -1288,12 +1449,14 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T TaxonomyRow taxonomy = converter.convert(tax); - if(mapChildren.get(taxonomy.getServiceId())==null){ - logger.info("Taxonomy with service id: "+taxonomy.getServiceId()+" doesn't exists into Map Children, adding.."); - mapChildren.put(taxonomy.getServiceId(),taxonomy); + if (mapChildren.get(taxonomy.getServiceId()) == null) { + logger.info("Taxonomy with service id: " + taxonomy.getServiceId() + + " doesn't exists into Map Children, adding.."); + mapChildren.put(taxonomy.getServiceId(), taxonomy); } - LightTaxonomyRow lightTaxRow = ClusterStructuresForTaxonomyRow.convetTaxonomyRowToLigthTaxonomyRow(taxonomy); + LightTaxonomyRow lightTaxRow = ClusterStructuresForTaxonomyRow + .convetTaxonomyRowToLigthTaxonomyRow(taxonomy); listLightTaxonomyRow.add(lightTaxRow); } @@ -1305,76 +1468,87 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T throw new Exception("Error on loadListChildByParentId", e); } - logger.info("Return list children By ParentId "+parentId+"- with size: "+ listLightTaxonomyRow.size()); + logger.info("Return list children By ParentId " + parentId + "- with size: " + listLightTaxonomyRow.size()); return listLightTaxonomyRow; } - /* (non-Javadoc) - * @see org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService#getListTaxonomyJobs() + /** + * Gets the list taxonomy jobs. + * + * @return the list taxonomy jobs + * @throws SessionExpired the session expired + * @throws Exception the exception + */ + /* + * (non-Javadoc) + * + * @see + * org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService# + * getListTaxonomyJobs() */ @Override public List getListTaxonomyJobs() throws SessionExpired, Exception { logger.info("getListTaxonomyJobs... "); List listJobs = new ArrayList(); - try{ + try { TaxonomyJobPersistence taxonomyJobDao = DaoSession.getTaxonomyJobDAO(getASLSession()); Iterator iterator = taxonomyJobDao.getList().iterator(); - SpeciesService taxonomyService = getSpeciesService(); + SpeciesService taxonomyService = getSpeciesService(); - while(iterator!=null && iterator.hasNext()){ - TaxonomyJob job = iterator.next(); - logger.info("get taxonomy job "+job.getId()+ " from service"); + while (iterator != null && iterator.hasNext()) { + TaxonomyJob job = iterator.next(); + logger.info("get taxonomy job " + job.getId() + " from service"); - try{ - CompleteJobStatus statusResponse = taxonomyService.getTaxonomyJobById(job.getId()); + try { + CompleteJobStatus statusResponse = taxonomyService.getTaxonomyJobById(job.getId()); - if(statusResponse!=null){ - logger.info("statusResponse is not null..." + job.getId()); - JobTaxonomyModel jobSpeciesModel = TaxonomyJobUtil.convertJob(job, statusResponse, taxonomyJobDao); - logger.info("added list jobTaxonomyId: "+job.getTaxonomyId() + " status "+job.getState()); + if (statusResponse != null) { + logger.info("statusResponse is not null..." + job.getId()); + JobTaxonomyModel jobSpeciesModel = TaxonomyJobUtil.convertJob(job, statusResponse, + taxonomyJobDao); + logger.info("added list jobTaxonomyId: " + job.getTaxonomyId() + " status " + job.getState()); - if(jobSpeciesModel!=null) - listJobs.add(jobSpeciesModel); - } - else{ - logger.info("TaxonomyJob statusResponse is null..." + job.getId()); - TaxonomyJobUtil.deleteTaxonomyJobById(job.getId(),taxonomyJobDao); - } + if (jobSpeciesModel != null) + listJobs.add(jobSpeciesModel); + } else { + logger.info("TaxonomyJob statusResponse is null..." + job.getId()); + TaxonomyJobUtil.deleteTaxonomyJobById(job.getId(), taxonomyJobDao); + } - }catch (Exception e) { + } catch (Exception e) { e.printStackTrace(); logger.error("Error on getListSpeciesJobs ", e); throw new Exception("Error on getListSpeciesJobs", e); } - } + } - }catch (Exception e) { + } catch (Exception e) { logger.error("Error on getListSpeciesJobs ", e); throw new Exception("Error on getListSpeciesJobs", e); } - return listJobs; + return listJobs; } - /** * Creates the taxonomy job by children. * * @param taxonomyServiceId the taxonomy service id - * @param taxonomyName the taxonomy name - * @param taxonomyRank the taxonomy rank - * @param dataSourceName the data source name + * @param taxonomyName the taxonomy name + * @param taxonomyRank the taxonomy rank + * @param dataSourceName the data source name * @return the job taxonomy model * @throws Exception the exception */ @Override - public JobTaxonomyModel createTaxonomyJobByChildren(String taxonomyServiceId, String taxonomyName, String taxonomyRank, String dataSourceName) throws Exception { - //FIXED 20/05/2013 + public JobTaxonomyModel createTaxonomyJobByChildren(String taxonomyServiceId, String taxonomyName, + String taxonomyRank, String dataSourceName) throws Exception { + // FIXED 20/05/2013 logger.info("Create job for taxonomy id: " + taxonomyServiceId); // System.out.println("Create job for taxonomy id: " + taxonomy.getServiceId()); @@ -1390,11 +1564,13 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T String name = NormalizeString.lowerCaseUpFirstChar(taxonomyName) + " group"; - //STORE INTO DAO - TaxonomyJob speciesJob = new TaxonomyJob(speciesJobId, DownloadState.PENDING.toString(), name, taxonomyName, dataSourceName, taxonomyRank, 0, submitTime, 0, taxonomyServiceId); + // STORE INTO DAO + TaxonomyJob speciesJob = new TaxonomyJob(speciesJobId, DownloadState.PENDING.toString(), name, taxonomyName, + dataSourceName, taxonomyRank, 0, submitTime, 0, taxonomyServiceId); taxonomyJobDao.insert(speciesJob); - jobSpeciesModel = new JobTaxonomyModel(speciesJob.getId(), speciesJob.getDescriptiveName(), DownloadState.PENDING, null, taxonomyName, dataSourceName, taxonomyRank); + jobSpeciesModel = new JobTaxonomyModel(speciesJob.getId(), speciesJob.getDescriptiveName(), + DownloadState.PENDING, null, taxonomyName, dataSourceName, taxonomyRank); Date submit = DateUtil.millisecondsToDate(speciesJob.getSubmitTime()); // jobSpeciesModel.setStartTime(DateUtil.dateToDateFormatString(start)); @@ -1404,9 +1580,20 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T return jobSpeciesModel; } - - /* (non-Javadoc) - * @see org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService#createTaxonomyJobByIds(java.lang.String, java.util.List) + /** + * Creates the taxonomy job by ids. + * + * @param search the search + * @param dataSources the data sources + * @return the job taxonomy model + * @throws Exception the exception + */ + /* + * (non-Javadoc) + * + * @see + * org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService# + * createTaxonomyJobByIds(java.lang.String, java.util.List) */ @Override public JobTaxonomyModel createTaxonomyJobByIds(String search, List dataSources) throws Exception { @@ -1423,23 +1610,25 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T String name = NormalizeString.lowerCaseUpFirstChar(search) + " - "; name += listId.size() + " "; - name += listId.size()>1?"taxa":"taxon"; + name += listId.size() > 1 ? "taxa" : "taxon"; String dataSourceName = ""; for (String taxonId : listId) { - if(!dataSourceName.contains(hashIdDs.get(taxonId))) //remove duplicate - dataSourceName+=hashIdDs.get(taxonId) + ", "; + if (!dataSourceName.contains(hashIdDs.get(taxonId))) // remove duplicate + dataSourceName += hashIdDs.get(taxonId) + ", "; } - if(dataSourceName.endsWith(", ")) - dataSourceName = dataSourceName.substring(0, dataSourceName.length()-2); + if (dataSourceName.endsWith(", ")) + dataSourceName = dataSourceName.substring(0, dataSourceName.length() - 2); - //STORE INTO DAO - TaxonomyJob speciesJob = new TaxonomyJob(speciesJobId, DownloadState.PENDING.toString(), name, name, dataSourceName, "", 0, submitTime, 0, speciesJobId); + // STORE INTO DAO + TaxonomyJob speciesJob = new TaxonomyJob(speciesJobId, DownloadState.PENDING.toString(), name, name, + dataSourceName, "", 0, submitTime, 0, speciesJobId); taxonomyJobDao.insert(speciesJob); - jobSpeciesModel = new JobTaxonomyModel(speciesJob.getId(), speciesJob.getDescriptiveName(), DownloadState.PENDING, null, name, dataSourceName, ""); + jobSpeciesModel = new JobTaxonomyModel(speciesJob.getId(), speciesJob.getDescriptiveName(), + DownloadState.PENDING, null, name, dataSourceName, ""); Date submit = DateUtil.millisecondsToDate(speciesJob.getSubmitTime()); // jobSpeciesModel.setStartTime(DateUtil.dateToDateFormatString(start)); @@ -1449,9 +1638,19 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T return jobSpeciesModel; } - - /* (non-Javadoc) - * @see org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService#resubmitTaxonomyJob(java.lang.String) + /** + * Resubmit taxonomy job. + * + * @param jobIdentifier the job identifier + * @return the job taxonomy model + * @throws Exception the exception + */ + /* + * (non-Javadoc) + * + * @see + * org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService# + * resubmitTaxonomyJob(java.lang.String) */ @Override public JobTaxonomyModel resubmitTaxonomyJob(String jobIdentifier) throws Exception { @@ -1460,37 +1659,41 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T JobTaxonomyModel jobSpeciesModel = null; - //Get Dao with list taxonomy jobs + // Get Dao with list taxonomy jobs TaxonomyJobPersistence taxonomyJobDao = DaoSession.getTaxonomyJobDAO(getASLSession()); CriteriaBuilder queryBuilder = taxonomyJobDao.getCriteriaBuilder(); CriteriaQuery cq = queryBuilder.createQuery(); - Predicate pr1 = queryBuilder.equal(taxonomyJobDao.rootFrom(cq).get(TaxonomyJob.ID_FIELD), jobIdentifier); + Predicate pr1 = queryBuilder.equal(taxonomyJobDao.rootFrom(cq).get(TaxonomyJob.ID_FIELD), jobIdentifier); cq.where(pr1); Iterator iterator = taxonomyJobDao.executeCriteriaQuery(cq).iterator(); TaxonomyJob taxonomy; - if(iterator.hasNext()) - taxonomy = iterator.next(); + if (iterator.hasNext()) + taxonomy = iterator.next(); else return jobSpeciesModel; SpeciesService taxonomyService = getSpeciesService(); - //recover taxomyId + // recover taxomyId String speciesJobId = taxonomyService.createTaxonomyJobForDWCAByChildren(taxonomy.getTaxonomyId()); long submitTime = Calendar.getInstance().getTimeInMillis(); - String name = RESUBMIT + ": " +NormalizeString.lowerCaseUpFirstChar(taxonomy.getDescriptiveName()); + String name = RESUBMIT + ": " + NormalizeString.lowerCaseUpFirstChar(taxonomy.getDescriptiveName()); - //STORE INTO DAO - TaxonomyJob speciesJob = new TaxonomyJob(speciesJobId, DownloadState.PENDING.toString(), name, taxonomy.getDescriptiveName(), taxonomy.getDataSourceName(), taxonomy.getRank(), 0, submitTime, 0, taxonomy.getTaxonomyId()); + // STORE INTO DAO + TaxonomyJob speciesJob = new TaxonomyJob(speciesJobId, DownloadState.PENDING.toString(), name, + taxonomy.getDescriptiveName(), taxonomy.getDataSourceName(), taxonomy.getRank(), 0, submitTime, 0, + taxonomy.getTaxonomyId()); taxonomyJobDao.insert(speciesJob); - jobSpeciesModel = new JobTaxonomyModel(speciesJob.getId(), speciesJob.getDescriptiveName(), DownloadState.PENDING, null, taxonomy.getDescriptiveName(), taxonomy.getDataSourceName(), taxonomy.getRank()); + jobSpeciesModel = new JobTaxonomyModel(speciesJob.getId(), speciesJob.getDescriptiveName(), + DownloadState.PENDING, null, taxonomy.getDescriptiveName(), taxonomy.getDataSourceName(), + taxonomy.getRank()); Date submit = DateUtil.millisecondsToDate(speciesJob.getSubmitTime()); // jobSpeciesModel.setStartTime(DateUtil.dateToDateFormatString(start)); @@ -1500,27 +1703,38 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T return jobSpeciesModel; } - /* (non-Javadoc) - * @see org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService#cancelTaxonomyJob(java.lang.String) + /** + * Cancel taxonomy job. + * + * @param jobIdentifier the job identifier + * @return true, if successful + * @throws Exception the exception + */ + /* + * (non-Javadoc) + * + * @see + * org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService# + * cancelTaxonomyJob(java.lang.String) */ @Override public boolean cancelTaxonomyJob(String jobIdentifier) throws Exception { - try{ + try { SpeciesService taxonomyService = getSpeciesService(); - //REMOVE JOB ON THE SERVICE + // REMOVE JOB ON THE SERVICE taxonomyService.cancelTaxonomyJobById(jobIdentifier); TaxonomyJobPersistence speciesJobDao = DaoSession.getTaxonomyJobDAO(getASLSession()); int count = TaxonomyJobUtil.deleteTaxonomyJobById(jobIdentifier, speciesJobDao); - if(count==1) + if (count == 1) return true; - }catch (Exception e) { + } catch (Exception e) { logger.error("Error on cancel taxonomy job ", e); throw new Exception("Error on cancel taxonomy job", e); } @@ -1528,13 +1742,31 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T return false; } - /* (non-Javadoc) - * @see org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService#saveTaxonomyJob(java.lang.String, java.lang.String, java.lang.String, java.lang.String, java.lang.String) + /** + * Save taxonomy job. + * + * @param jobIdentifier the job identifier + * @param destinationFolderId the destination folder id + * @param fileName the file name + * @param scientificName the scientific name + * @param dataSourceName the data source name + * @return true, if successful + * @throws Exception the exception + */ + /* + * (non-Javadoc) + * + * @see + * org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService# + * saveTaxonomyJob(java.lang.String, java.lang.String, java.lang.String, + * java.lang.String, java.lang.String) */ @Override - public boolean saveTaxonomyJob(String jobIdentifier, String destinationFolderId, String fileName, String scientificName, String dataSourceName) throws Exception { + public boolean saveTaxonomyJob(String jobIdentifier, String destinationFolderId, String fileName, + String scientificName, String dataSourceName) throws Exception { - logger.info("saveSpeciesJob jobId: "+jobIdentifier+" destinationFolderId: "+destinationFolderId+" fileName: "+fileName); + logger.info("saveSpeciesJob jobId: " + jobIdentifier + " destinationFolderId: " + destinationFolderId + + " fileName: " + fileName); try { @@ -1542,16 +1774,16 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T String mimeType = "application/zip"; InputStream inputStream = taxonomyService.getTaxonomyJobFileById(jobIdentifier); - if(inputStream!=null){ - Workspace workspace = GetWorkspaceUtil.getWorskspace(getASLSession()); + if (inputStream != null) { + Workspace workspace = GetWorkspaceUtil.getWorkspace(getThreadLocalRequest(), getASLSession()); logger.info("input stream is not null"); - WorkspaceFolder folder = (WorkspaceFolder) workspace.getItem(destinationFolderId); - fileName = WorkspaceUtil.getUniqueName(fileName, folder); - folder.createExternalFileItem(fileName,"Taxonomy job generated files", mimeType, inputStream); +// WorkspaceFolder folder = (WorkspaceFolder) workspace.getItem(destinationFolderId); +// folder.createExternalFileItem(fileName,"Taxonomy job generated files", mimeType, inputStream); +// + workspace.uploadFile(destinationFolderId, inputStream, fileName, "Taxonomy job generated files"); logger.info("Save file with taxonomy was completed"); - } - else{ + } else { logger.info("input stream is null"); return false; @@ -1561,18 +1793,36 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T } catch (Exception e) { - logger.error("An error occurred saving the generated file into the workspace",e); + logger.error("An error occurred saving the generated file into the workspace", e); throw new SearchServiceException(e.getMessage()); } } - /* (non-Javadoc) - * @see org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService#saveTaxonomyJobError(java.lang.String, java.lang.String, java.lang.String, java.lang.String, java.lang.String) + /** + * Save taxonomy job error. + * + * @param jobIdentifier the job identifier + * @param destinationFolderId the destination folder id + * @param fileName the file name + * @param scientificName the scientific name + * @param dataSourceName the data source name + * @return true, if successful + * @throws Exception the exception + */ + /* + * (non-Javadoc) + * + * @see + * org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService# + * saveTaxonomyJobError(java.lang.String, java.lang.String, java.lang.String, + * java.lang.String, java.lang.String) */ @Override - public boolean saveTaxonomyJobError(String jobIdentifier, String destinationFolderId, String fileName, String scientificName, String dataSourceName) throws Exception { - logger.info("saveSpeciesJob error jobId: "+jobIdentifier+" destinationFolderId: "+destinationFolderId+" fileName: "+fileName); + public boolean saveTaxonomyJobError(String jobIdentifier, String destinationFolderId, String fileName, + String scientificName, String dataSourceName) throws Exception { + logger.info("saveSpeciesJob error jobId: " + jobIdentifier + " destinationFolderId: " + destinationFolderId + + " fileName: " + fileName); try { @@ -1581,15 +1831,14 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T InputStream inputStream = taxonomyService.getTaxonomyJobErrorFileById(jobIdentifier); - if(inputStream!=null){ - Workspace workspace = GetWorkspaceUtil.getWorskspace(getASLSession()); + if (inputStream != null) { + Workspace workspace = GetWorkspaceUtil.getWorkspace(getThreadLocalRequest(), getASLSession()); logger.info("input stream is not null"); - WorkspaceFolder folder = (WorkspaceFolder) workspace.getItem(destinationFolderId); - fileName = WorkspaceUtil.getUniqueName(fileName, folder); - folder.createExternalFileItem(fileName,"Report errors on taxonomy job", mimeType, inputStream); +// WorkspaceFolder folder = (WorkspaceFolder) workspace.getItem(destinationFolderId); +// folder.createExternalFileItem(fileName,"Report errors on taxonomy job", mimeType, inputStream); + workspace.uploadFile(destinationFolderId, inputStream, fileName, "Report errors on taxonomy job"); logger.info("Save report file with errors occurred was completed"); - } - else{ + } else { logger.info("input stream is null"); return false; @@ -1599,24 +1848,35 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T } catch (Exception e) { - logger.error("An error occurred saving the generated file into the workspace",e); + logger.error("An error occurred saving the generated file into the workspace", e); throw new SearchServiceException(e.getMessage()); } } - /* (non-Javadoc) - * @see org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService#retrieveTaxonomyByIds(java.util.List) + /** + * Retrieve taxonomy by ids. + * + * @param ids the ids + * @return the list + * @throws Exception the exception + */ + /* + * (non-Javadoc) + * + * @see + * org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService# + * retrieveTaxonomyByIds(java.util.List) */ @Override - public List retrieveTaxonomyByIds(List ids) throws Exception{ + public List retrieveTaxonomyByIds(List ids) throws Exception { logger.info("retrieveTaxonomyByIds ids size: " + ids.size()); List listLightTaxonomyRow = new ArrayList(); try { SpeciesService taxonomyService = getSpeciesService(); - //StreamExtend convert = new StreamExtend(ids.iterator()); + // StreamExtend convert = new StreamExtend(ids.iterator()); CloseableIterator streamIterator = taxonomyService.retrieveTaxonomyById(ids); @@ -1625,7 +1885,7 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T Map mapTaxonomyIds = SessionUtil.getHashMapTaxonomyByIdsCache(session); - if(mapTaxonomyIds==null){ + if (mapTaxonomyIds == null) { logger.info("Cache taxa ByIds doesn't exists into session, creating.."); mapTaxonomyIds = new HashMap(); } @@ -1635,12 +1895,14 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T TaxonomyRow taxonomy = taxonomyItemConverter.convert(tax); - if(mapTaxonomyIds.get(taxonomy.getServiceId())==null){ - logger.info("Taxonomy with service id: "+taxonomy.getServiceId()+" doesn't exists into Map Taxonomy Ids, adding.."); - mapTaxonomyIds.put(taxonomy.getServiceId(),taxonomy); + if (mapTaxonomyIds.get(taxonomy.getServiceId()) == null) { + logger.info("Taxonomy with service id: " + taxonomy.getServiceId() + + " doesn't exists into Map Taxonomy Ids, adding.."); + mapTaxonomyIds.put(taxonomy.getServiceId(), taxonomy); } - LightTaxonomyRow lightTaxRow = ClusterStructuresForTaxonomyRow.convetTaxonomyRowToLigthTaxonomyRow(taxonomy); + LightTaxonomyRow lightTaxRow = ClusterStructuresForTaxonomyRow + .convetTaxonomyRowToLigthTaxonomyRow(taxonomy); listLightTaxonomyRow.add(lightTaxRow); } @@ -1651,7 +1913,7 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T } catch (Exception e) { e.printStackTrace(); - logger.error("An error retrieve taxonomy by Id",e); + logger.error("An error retrieve taxonomy by Id", e); // throw new Exception(e.getMessage()); } @@ -1659,11 +1921,22 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T } - /* (non-Javadoc) - * @see org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService#retrieveSynonymsByRefId(java.lang.String) + /** + * Retrieve synonyms by ref id. + * + * @param refId the ref id + * @return the list + * @throws Exception the exception + */ + /* + * (non-Javadoc) + * + * @see + * org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService# + * retrieveSynonymsByRefId(java.lang.String) */ @Override - public List retrieveSynonymsByRefId(String refId) throws Exception{ + public List retrieveSynonymsByRefId(String refId) throws Exception { logger.info("retrieveSynonymsById id: " + refId); List listLightTaxonomyRow = new ArrayList(); @@ -1678,7 +1951,7 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T Map mapSysnosyms = SessionUtil.getHashMapSynonymsTaxonomyCache(getASLSession()); - if(mapSysnosyms==null){ + if (mapSysnosyms == null) { logger.info("Cache synonyms doesn't exists into session, creating.."); mapSysnosyms = new HashMap(); } @@ -1689,12 +1962,14 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T TaxonomyRow taxonomy = taxonomyItemConverter.convert(tax); - if(mapSysnosyms.get(taxonomy.getServiceId())==null){ - logger.info("Taxonomy with service id: "+taxonomy.getServiceId()+" doesn't exists into Map Synonyms, adding.."); - mapSysnosyms.put(taxonomy.getServiceId(),taxonomy); + if (mapSysnosyms.get(taxonomy.getServiceId()) == null) { + logger.info("Taxonomy with service id: " + taxonomy.getServiceId() + + " doesn't exists into Map Synonyms, adding.."); + mapSysnosyms.put(taxonomy.getServiceId(), taxonomy); } - LightTaxonomyRow lightTaxRow = ClusterStructuresForTaxonomyRow.convetTaxonomyRowToLigthTaxonomyRow(taxonomy); + LightTaxonomyRow lightTaxRow = ClusterStructuresForTaxonomyRow + .convetTaxonomyRowToLigthTaxonomyRow(taxonomy); listLightTaxonomyRow.add(lightTaxRow); } @@ -1705,20 +1980,20 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T } catch (Exception e) { e.printStackTrace(); - logger.error("An error retrieve synonyms by Id",e); + logger.error("An error retrieve synonyms by Id", e); } return listLightTaxonomyRow; } - - /** * {@inheritDoc} */ @Override - public List createOccurrencesJob(List listJobOccurrenceModel, SaveFileFormat saveFileFormat, OccurrencesSaveEnum csvType, boolean isByDataSource, int expectedOccurrence) throws Exception { + public List createOccurrencesJob(List listJobOccurrenceModel, + SaveFileFormat saveFileFormat, OccurrencesSaveEnum csvType, boolean isByDataSource, int expectedOccurrence) + throws Exception { logger.info("createOccurencesJobFromSelection..."); List listResultJobModel = new ArrayList(); @@ -1731,43 +2006,50 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T try { List keys = null; - //Get Dao with list occurrences jobs + // Get Dao with list occurrences jobs OccurrenceJobPersistence occurrencesJobDao = DaoSession.getOccurrencesJobDAO(aslSession); - if(!isByDataSource){ //NOT IS BY DATASOURCE - CREATE ONE JOB + if (!isByDataSource) { // NOT IS BY DATASOURCE - CREATE ONE JOB keys = OccurrenceJobUtil.getListOfSelectedKey(searchSession); - if(listJobOccurrenceModel!=null && listJobOccurrenceModel.get(0)!=null){ //IN THIS CASE - THERE IS ONE JOBMODEL + if (listJobOccurrenceModel != null && listJobOccurrenceModel.get(0) != null) { // IN THIS CASE - THERE + // IS ONE JOBMODEL List dataSourceList = listJobOccurrenceModel.get(0).getDataSources(); - //ADDED DATA SOURCE LIST - listResultJobModel.add(OccurrenceJobUtil.createOccurrenceJobOnServiceByKeys(listJobOccurrenceModel.get(0), taxonomyService, occurrencesJobDao, keys, dataSourceList, saveFileFormat, csvType, expectedOccurrence)); + // ADDED DATA SOURCE LIST + listResultJobModel.add(OccurrenceJobUtil.createOccurrenceJobOnServiceByKeys( + listJobOccurrenceModel.get(0), taxonomyService, occurrencesJobDao, keys, dataSourceList, + saveFileFormat, csvType, expectedOccurrence)); } - }else{ //IS BY DATASOURCE - CREATE MORE JOB, ONE FOR EACH DATASOURCE + } else { // IS BY DATASOURCE - CREATE MORE JOB, ONE FOR EACH DATASOURCE - for (JobOccurrencesModel jobModel : listJobOccurrenceModel) { //IN THIS CASE - FOR EACH JOBMODEL THERE IS ONE DATASOURCE + for (JobOccurrencesModel jobModel : listJobOccurrenceModel) { // IN THIS CASE - FOR EACH JOBMODEL THERE + // IS ONE DATASOURCE dataSourceAsXml = ""; - if(jobModel.getDataSources()!=null && jobModel.getDataSources().get(0)!=null){ + if (jobModel.getDataSources() != null && jobModel.getDataSources().get(0) != null) { - //recover keys + // recover keys DataSource dataSource = jobModel.getDataSources().get(0); - OccurrenceKeys occKey = OccurrenceJobUtil.getListOfSelectedKeyByDataSource(dataSource.getName(), aslSession); + OccurrenceKeys occKey = OccurrenceJobUtil.getListOfSelectedKeyByDataSource(dataSource.getName(), + aslSession); List dataSourceList = jobModel.getDataSources(); - //ADDED DATA SOURCE LIST - listResultJobModel.add(OccurrenceJobUtil.createOccurrenceJobOnServiceByKeys(jobModel, taxonomyService, occurrencesJobDao, occKey.getListKey(), dataSourceList, saveFileFormat, csvType, occKey.getTotalOccurrence())); + // ADDED DATA SOURCE LIST + listResultJobModel.add(OccurrenceJobUtil.createOccurrenceJobOnServiceByKeys(jobModel, + taxonomyService, occurrencesJobDao, occKey.getListKey(), dataSourceList, saveFileFormat, + csvType, occKey.getTotalOccurrence())); } } } - return listResultJobModel; + return listResultJobModel; } catch (Exception e) { logger.error("An error occurred in createOccurencesJobFromSelection", e); @@ -1776,8 +2058,19 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T } - /* (non-Javadoc) - * @see org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService#resubmitOccurrencesJob(java.lang.String) + /** + * Resubmit occurrences job. + * + * @param jobIdentifier the job identifier + * @return the list + * @throws Exception the exception + */ + /* + * (non-Javadoc) + * + * @see + * org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService# + * resubmitOccurrencesJob(java.lang.String) */ @Override public List resubmitOccurrencesJob(String jobIdentifier) throws Exception { @@ -1790,38 +2083,42 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T try { - //Get Dao with list occurrences jobs + // Get Dao with list occurrences jobs OccurrenceJobPersistence occurrencesJobDao = DaoSession.getOccurrencesJobDAO(aslSession); CriteriaBuilder queryBuilder = occurrencesJobDao.getCriteriaBuilder(); CriteriaQuery cq = queryBuilder.createQuery(); - Predicate pr1 = queryBuilder.equal(occurrencesJobDao.rootFrom(cq).get(OccurrencesJob.ID_FIELD), jobIdentifier); + Predicate pr1 = queryBuilder.equal(occurrencesJobDao.rootFrom(cq).get(OccurrencesJob.ID_FIELD), + jobIdentifier); cq.where(pr1); Iterator iterator = occurrencesJobDao.executeCriteriaQuery(cq).iterator(); OccurrencesJob job; - if(iterator.hasNext()) - job = iterator.next(); + if (iterator.hasNext()) + job = iterator.next(); else return listResultJobModel; - //recover keys + // recover keys keys = OccurrenceJobUtil.revertListKeyFromStoredXMLString(job.getResultRowKeysAsXml()); - //recover file format + // recover file format SaveFileFormat fileFormat = OccurrenceJobUtil.converFileFormat(job.getFileFormat()); - //recover csv type + // recover csv type OccurrencesSaveEnum csvType = OccurrenceJobUtil.convertCsvType(job.getCsvType()); - String name = RESUBMIT + ": "+job.getName(); + String name = RESUBMIT + ": " + job.getName(); - JobOccurrencesModel jobModel = new JobOccurrencesModel("",name, job.getScientificName(), job.getDataSources(), fileFormat, csvType, job.isByDataSource()); + JobOccurrencesModel jobModel = new JobOccurrencesModel("", name, job.getScientificName(), + job.getDataSources(), fileFormat, csvType, job.isByDataSource()); jobModel.setTotalOccurrences(job.getExpectedOccurrence()); - listResultJobModel.add(OccurrenceJobUtil.createOccurrenceJobOnServiceByKeys(jobModel, taxonomyService, occurrencesJobDao, keys, job.getDataSources(), fileFormat, csvType, jobModel.getTotalOccurrences())); + listResultJobModel.add( + OccurrenceJobUtil.createOccurrenceJobOnServiceByKeys(jobModel, taxonomyService, occurrencesJobDao, + keys, job.getDataSources(), fileFormat, csvType, jobModel.getTotalOccurrences())); } catch (Exception e) { logger.error("An error occurred in createOccurencesJobFromSelection", e); @@ -1832,11 +2129,22 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T } - /* (non-Javadoc) - * @see org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService#getListOccurrencesJob() + /** + * Gets the list occurrences job. + * + * @return the list occurrences job + * @throws SessionExpired the session expired + * @throws Exception the exception + */ + /* + * (non-Javadoc) + * + * @see + * org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService# + * getListOccurrencesJob() */ @Override - public List getListOccurrencesJob() throws SessionExpired, Exception{ + public List getListOccurrencesJob() throws SessionExpired, Exception { logger.info("getListOccurencesJob... "); List listJobs = new ArrayList(); @@ -1846,29 +2154,30 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T Iterator iterator = occurrencesJobDao.getList().iterator(); SpeciesService taxonomyService = getSpeciesService(); - while (iterator!=null && iterator.hasNext()) { + while (iterator != null && iterator.hasNext()) { OccurrencesJob job = iterator.next(); CompleteJobStatus statusResponse = taxonomyService.getOccurrenceJobById(job.getId()); - logger.info("get occurrence job "+job.getId()+ " from service"); + logger.info("get occurrence job " + job.getId() + " from service"); - try{ + try { - if(statusResponse!=null){ - logger.info("statusResponse of occurrence job is not null..." + job.getId()); - JobOccurrencesModel jobOccurrenceModel = OccurrenceJobUtil.convertJob(job, statusResponse, occurrencesJobDao); + if (statusResponse != null) { + logger.info("statusResponse of occurrence job is not null..." + job.getId()); + JobOccurrencesModel jobOccurrenceModel = OccurrenceJobUtil.convertJob(job, statusResponse, + occurrencesJobDao); - if(jobOccurrenceModel!=null){ - logger.info("added list jobOccurrenceId: "+jobOccurrenceModel.getJobIdentifier() + " status "+jobOccurrenceModel.getDownloadState()); - listJobs.add(jobOccurrenceModel); - } + if (jobOccurrenceModel != null) { + logger.info("added list jobOccurrenceId: " + jobOccurrenceModel.getJobIdentifier() + + " status " + jobOccurrenceModel.getDownloadState()); + listJobs.add(jobOccurrenceModel); + } + } else { + logger.info("statusResponse of occurrence job is null..." + job.getId()); + logger.info("deleting job ..." + job.getId()); + OccurrenceJobUtil.deleteOccurrenceJobById(job.getId(), occurrencesJobDao); } - else{ - logger.info("statusResponse of occurrence job is null..." + job.getId()); - logger.info("deleting job ..." + job.getId()); - OccurrenceJobUtil.deleteOccurrenceJobById(job.getId(),occurrencesJobDao); - } - }catch (Exception e) { + } catch (Exception e) { e.printStackTrace(); logger.error("Error on getListOccurencesJob ", e); throw new Exception("Error on getListOccurencesJob", e); @@ -1877,25 +2186,34 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T } catch (Exception e) { - if(e instanceof SessionExpired){ + if (e instanceof SessionExpired) { logger.error("Session is expired"); throw new SessionExpired(e.getMessage()); } - - logger.error("Error on get iterator "+e, e); + logger.error("Error on get iterator " + e, e); } return listJobs; } - - /* (non-Javadoc) - * @see org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService#getListGisLayerJob() + /** + * Gets the list gis layer job. + * + * @return the list gis layer job + * @throws SessionExpired the session expired + * @throws Exception the exception + */ + /* + * (non-Javadoc) + * + * @see + * org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService# + * getListGisLayerJob() */ @Override - public List getListGisLayerJob() throws SessionExpired, Exception{ + public List getListGisLayerJob() throws SessionExpired, Exception { logger.info("getListGisLayerJob... "); List listJobs = new ArrayList(); @@ -1906,61 +2224,82 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T Iterator iterator = gisLayerJobDao.getList().iterator(); SpeciesService taxonomyService = getSpeciesService(); - while (iterator!=null && iterator.hasNext()) { + while (iterator != null && iterator.hasNext()) { GisLayerJob job = iterator.next(); - if(job.getId()==null || job.getId().isEmpty()){ + if (job.getId() == null || job.getId().isEmpty()) { logger.warn("Gis job has an id null or empty, skipping"); - }else{ + } else { - try{ + try { CompleteJobStatus statusResponse = taxonomyService.getGisLayerByJobId(job.getId()); - logger.info("get occurrence job "+job.getId()+ " from service"); + logger.info("get occurrence job " + job.getId() + " from service"); - if(statusResponse!=null){ - logger.info("statusResponse of gis layer job is not null..." + job.getId()); - JobGisLayerModel convertJob = GisLayerJobUtil.convertJob(job, statusResponse, gisLayerJobDao, taxonomyService, getASLSession()); + if (statusResponse != null) { + logger.info("statusResponse of gis layer job is not null..." + job.getId()); + JobGisLayerModel convertJob = GisLayerJobUtil.convertJob(job, statusResponse, + gisLayerJobDao, taxonomyService, getASLSession()); - if(convertJob!=null){ - logger.info("added list jobOccurrenceId: "+convertJob.getJobIdentifier() + " status "+convertJob.getDownloadState()); - listJobs.add(convertJob); - } + if (convertJob != null) { + logger.info("added list jobOccurrenceId: " + convertJob.getJobIdentifier() + " status " + + convertJob.getDownloadState()); + listJobs.add(convertJob); + } + } else { + logger.info("statusResponse of occurrence job is null..." + job.getId()); + logger.info("deleting job ..." + job.getId()); + GisLayerJobUtil.deleteGisLayerJobById(job.getId(), gisLayerJobDao); } - else{ - logger.info("statusResponse of occurrence job is null..." + job.getId()); - logger.info("deleting job ..." + job.getId()); - GisLayerJobUtil.deleteGisLayerJobById(job.getId(),gisLayerJobDao); - } - }catch (Exception e) { + } catch (Exception e) { - if (e instanceof InvalidJobIdException){ - logger.info("The spd service unkwnowns GIS job id: "+job.getId() +" deleting it from db..."); - GisLayerJobUtil.deleteGisLayerJobById(job.getId(),gisLayerJobDao); - }else{ + if (e instanceof InvalidJobIdException) { + logger.info( + "The spd service unkwnowns GIS job id: " + job.getId() + " deleting it from db..."); + GisLayerJobUtil.deleteGisLayerJobById(job.getId(), gisLayerJobDao); + } else { logger.error("Error on getListGisLayerJob ", e); throw new Exception("Error on getListGisLayerJob", e); - } + } } } } } catch (Exception e) { - logger.error("Error on get iterator "+e, e); + logger.error("Error on get iterator " + e, e); } return listJobs; } - /* (non-Javadoc) - * @see org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService#saveOccurrenceJob(org.gcube.portlets.user.speciesdiscovery.shared.JobOccurrencesModel, java.lang.String, java.lang.String, java.lang.String, java.lang.String) + /** + * Save occurrence job. + * + * @param jobModel the job model + * @param destinationFolderId the destination folder id + * @param fileName the file name + * @param scientificName the scientific name + * @param dataSourceName the data source name + * @return true, if successful + * @throws Exception the exception + */ + /* + * (non-Javadoc) + * + * @see + * org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService# + * saveOccurrenceJob(org.gcube.portlets.user.speciesdiscovery.shared. + * JobOccurrencesModel, java.lang.String, java.lang.String, java.lang.String, + * java.lang.String) */ @Override - public boolean saveOccurrenceJob(JobOccurrencesModel jobModel, String destinationFolderId, String fileName, String scientificName, String dataSourceName) throws Exception { - logger.info("saveOccurrenceJob jobId: "+jobModel.getJobIdentifier()+" destinationFolderId: "+destinationFolderId+" fileName: "+fileName + " file format: "+jobModel.getFileFormat()); + public boolean saveOccurrenceJob(JobOccurrencesModel jobModel, String destinationFolderId, String fileName, + String scientificName, String dataSourceName) throws Exception { + logger.info("saveOccurrenceJob jobId: " + jobModel.getJobIdentifier() + " destinationFolderId: " + + destinationFolderId + " fileName: " + fileName + " file format: " + jobModel.getFileFormat()); try { @@ -1968,27 +2307,30 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T String mimeType = null; switch (jobModel.getFileFormat()) { - case CSV: { - mimeType = "text/csv"; - } break; - case DARWIN_CORE:{ - mimeType = "application/xhtml+xml"; - } break; + case CSV: { + mimeType = "text/csv"; + } + break; + case DARWIN_CORE: { + mimeType = "application/xhtml+xml"; + } + break; } InputStream inputStream = taxonomyService.getOccurrenceJobFileById(jobModel.getJobIdentifier()); - if(inputStream!=null){ - Workspace workspace = GetWorkspaceUtil.getWorskspace(getASLSession()); + if (inputStream != null) { + // Workspace workspace = GetWorkspaceUtil.getWorskspace(getThreadLocalRequest(), + // getASLSession()); + Workspace workspace = GetWorkspaceUtil.getWorkspace(getThreadLocalRequest(), getASLSession()); logger.info("input stream is not null"); // System.out.println("input stream is not null"); - WorkspaceFolder folder = (WorkspaceFolder) workspace.getItem(destinationFolderId); - fileName = WorkspaceUtil.getUniqueName(fileName, folder); - folder.createExternalFileItem(fileName,"Occurrence job generated files", mimeType, inputStream); +// WorkspaceFolder folder = (WorkspaceFolder) workspace.getItem(destinationFolderId); +// folder.createExternalFileItem(fileName,"Occurrence job generated files", mimeType, inputStream); + workspace.uploadFile(destinationFolderId, inputStream, fileName, "Occurrence job generated files"); logger.info("Save file with occurrences was completed"); - } - else{ + } else { logger.info("input stream is null"); return false; } @@ -1996,19 +2338,38 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T return true; } catch (Exception e) { - logger.error("An error occurred saving the generated file into the workspace",e); + logger.error("An error occurred saving the generated file into the workspace", e); throw new SearchServiceException(e.getMessage()); } } - /* (non-Javadoc) - * @see org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService#saveOccurrenceJobError(org.gcube.portlets.user.speciesdiscovery.shared.JobOccurrencesModel, java.lang.String, java.lang.String, java.lang.String, java.lang.String) + /** + * Save occurrence job error. + * + * @param jobModel the job model + * @param destinationFolderId the destination folder id + * @param fileName the file name + * @param scientificName the scientific name + * @param dataSourceName the data source name + * @return true, if successful + * @throws Exception the exception + */ + /* + * (non-Javadoc) + * + * @see + * org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService# + * saveOccurrenceJobError(org.gcube.portlets.user.speciesdiscovery.shared. + * JobOccurrencesModel, java.lang.String, java.lang.String, java.lang.String, + * java.lang.String) */ @Override - public boolean saveOccurrenceJobError(JobOccurrencesModel jobModel, String destinationFolderId, String fileName, String scientificName, String dataSourceName) throws Exception { + public boolean saveOccurrenceJobError(JobOccurrencesModel jobModel, String destinationFolderId, String fileName, + String scientificName, String dataSourceName) throws Exception { - logger.info("saveOccurrenceJobError jobId: "+jobModel.getJobIdentifier()+" destinationFolderId: "+destinationFolderId+" fileName: "+fileName + " file format: "+jobModel.getFileFormat()); + logger.info("saveOccurrenceJobError jobId: " + jobModel.getJobIdentifier() + " destinationFolderId: " + + destinationFolderId + " fileName: " + fileName + " file format: " + jobModel.getFileFormat()); try { @@ -2017,17 +2378,17 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T InputStream inputStream = taxonomyService.getOccurrenceJobErrorFileById(jobModel.getJobIdentifier()); - if(inputStream!=null){ - Workspace workspace = GetWorkspaceUtil.getWorskspace(getASLSession()); + if (inputStream != null) { + Workspace workspace = GetWorkspaceUtil.getWorkspace(getThreadLocalRequest(), getASLSession()); logger.info("input stream is not null"); // System.out.println("input stream is not null"); - WorkspaceFolder folder = (WorkspaceFolder) workspace.getItem(destinationFolderId); - fileName = WorkspaceUtil.getUniqueName(fileName, folder); - folder.createExternalFileItem(fileName,"Report errors occurred on occurrence job", mimeType, inputStream); +// WorkspaceFolder folder = (WorkspaceFolder) workspace.getItem(destinationFolderId); +// folder.createExternalFileItem(fileName,"Report errors occurred on occurrence job", mimeType, inputStream); + workspace.uploadFile(destinationFolderId, inputStream, fileName, + "Report errors occurred on occurrence job"); logger.info("Save report file with errors occurred was completed"); - } - else{ + } else { logger.info("input stream is null"); return false; @@ -2037,30 +2398,41 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T } catch (Exception e) { - logger.error("An error occurred saving the generated file into the workspace",e); + logger.error("An error occurred saving the generated file into the workspace", e); throw new SearchServiceException(e.getMessage()); } } - /* (non-Javadoc) - * @see org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService#cancelOccurrenceJob(java.lang.String) + /** + * Cancel occurrence job. + * + * @param jobIdentifier the job identifier + * @return true, if successful + * @throws Exception the exception + */ + /* + * (non-Javadoc) + * + * @see + * org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService# + * cancelOccurrenceJob(java.lang.String) */ @Override public boolean cancelOccurrenceJob(String jobIdentifier) throws Exception { - logger.info("cancelOccurrenceJob jobIdentifier: "+jobIdentifier); - try{ + logger.info("cancelOccurrenceJob jobIdentifier: " + jobIdentifier); + try { SpeciesService taxonomyService = getSpeciesService(); - //REMOVE JOB ON THE SERVICE + // REMOVE JOB ON THE SERVICE taxonomyService.cancelTaxonomyJobById(jobIdentifier); OccurrenceJobPersistence occurrenceJobDAO = DaoSession.getOccurrencesJobDAO(getASLSession()); int count = OccurrenceJobUtil.deleteOccurrenceJobById(jobIdentifier, occurrenceJobDAO); - if(count==1) + if (count == 1) return true; - }catch (Exception e) { + } catch (Exception e) { logger.error("Error on cancel occurrence job ", e); throw new Exception("Error on cancel occurrence job", e); } @@ -2068,8 +2440,18 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T return false; } - /* (non-Javadoc) - * @see org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService#loadStructuresForResultRowClustering() + /** + * Load structures for result row clustering. + * + * @return the cluster structures for result row + * @throws Exception the exception + */ + /* + * (non-Javadoc) + * + * @see + * org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService# + * loadStructuresForResultRowClustering() */ @Override public ClusterStructuresForResultRow loadStructuresForResultRowClustering() throws Exception { @@ -2079,59 +2461,86 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T int totalRow = countSelectedRow; ASLSession session = getASLSession(); - HashMap hashCluster = SessionUtil.getCurrentClusterCommonNameForResultRow(session); + HashMap hashCluster = SessionUtil + .getCurrentClusterCommonNameForResultRow(session); - //Reset cluster for common name - if(hashCluster!=null) + // Reset cluster for common name + if (hashCluster != null) SessionUtil.setCurrentClusterCommonNameForResultRow(session, null); - - //LIMIT NUMBER OF ITEMS TO ConstantsSpeciesDiscovery.LIMIT_ITEM_DETAILS - if(countSelectedRow>ConstantsSpeciesDiscovery.RESULT_ROW_LIMIT_ITEM_DETAILS) + // LIMIT NUMBER OF ITEMS TO ConstantsSpeciesDiscovery.LIMIT_ITEM_DETAILS + if (countSelectedRow > ConstantsSpeciesDiscovery.RESULT_ROW_LIMIT_ITEM_DETAILS) countSelectedRow = ConstantsSpeciesDiscovery.RESULT_ROW_LIMIT_ITEM_DETAILS; ResultFilter filter = new ResultFilter(false, true, true); SearchResult searchResults = getSearchResultRows(0, countSelectedRow, filter, true); - ClusterStructuresForResultRow cluster = new ClusterStructuresForResultRow(searchResults,isReduced, totalRow); + ClusterStructuresForResultRow cluster = new ClusterStructuresForResultRow(searchResults, isReduced, totalRow); - //TODO USE THREAD? - ManagerClusterCommonNameDataSourceForResultRow manager = new ManagerClusterCommonNameDataSourceForResultRow(cluster.getHashClusterScientificNameResultRowServiceID(), cluster.getHashResult()); + // TODO USE THREAD? + ManagerClusterCommonNameDataSourceForResultRow manager = new ManagerClusterCommonNameDataSourceForResultRow( + cluster.getHashClusterScientificNameResultRowServiceID(), cluster.getHashResult()); - SessionUtil.setCurrentClusterCommonNameForResultRow(getASLSession(), manager.getHashClusterCommonNameDataSource()); + SessionUtil.setCurrentClusterCommonNameForResultRow(getASLSession(), + manager.getHashClusterCommonNameDataSource()); - //THIS OBJECT IS NOT USED ON CLIENT + // THIS OBJECT IS NOT USED ON CLIENT cluster.setHashResult(null); return cluster; } - /* (non-Javadoc) - * @see org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService#loadClusterCommonNameForResultRowByScientificName(java.lang.String) + /** + * Load cluster common name for result row by scientific name. + * + * @param scientificName the scientific name + * @return the cluster common name data source for result row + * @throws Exception the exception + */ + /* + * (non-Javadoc) + * + * @see + * org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService# + * loadClusterCommonNameForResultRowByScientificName(java.lang.String) */ @Override - public ClusterCommonNameDataSourceForResultRow loadClusterCommonNameForResultRowByScientificName(String scientificName) throws Exception { + public ClusterCommonNameDataSourceForResultRow loadClusterCommonNameForResultRowByScientificName( + String scientificName) throws Exception { - logger.info("loadClusterCommonNameForResultRowByScientificName for scientific name: "+scientificName); - HashMap hashCluster = SessionUtil.getCurrentClusterCommonNameForResultRow(getASLSession()); + logger.info("loadClusterCommonNameForResultRowByScientificName for scientific name: " + scientificName); + HashMap hashCluster = SessionUtil + .getCurrentClusterCommonNameForResultRow(getASLSession()); - if(hashCluster==null){ - logger.warn("Error in loadClusterCommonNameForResultRowByScientificName, hashCluster was not found in session"); + if (hashCluster == null) { + logger.warn( + "Error in loadClusterCommonNameForResultRowByScientificName, hashCluster was not found in session"); return null; } ClusterCommonNameDataSourceForResultRow cluster = hashCluster.get(scientificName); - if(cluster==null){ + if (cluster == null) { logger.warn("Error in loadClusterCommonNameForResultRowByScientificName, cluster was not found in session"); return null; } return cluster; } - - /* (non-Javadoc) - * @see org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService#loadDataSourceForResultRow(boolean, boolean) + /** + * Load data source for result row. + * + * @param selected the selected + * @param distinct the distinct + * @return the list + * @throws Exception the exception + */ + /* + * (non-Javadoc) + * + * @see + * org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService# + * loadDataSourceForResultRow(boolean, boolean) */ @Override public List loadDataSourceForResultRow(boolean selected, boolean distinct) throws Exception { @@ -2147,10 +2556,11 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T List listDN = new ArrayList(); try { String selectString = "select "; - if(distinct) - selectString+= "distinct "; + if (distinct) + selectString += "distinct "; - Query query = em.createQuery(selectString + "t."+ResultRow.DATASOURCE_NAME+" from ResultRow t where t.selected = "+selected ); + Query query = em.createQuery(selectString + "t." + ResultRow.DATASOURCE_NAME + + " from ResultRow t where t.selected = " + selected); listDN = query.getResultList(); } finally { @@ -2162,7 +2572,7 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T } } catch (Exception e) { - logger.error("Error in loadDataSourceForResultRow " +e.getMessage(), e); + logger.error("Error in loadDataSourceForResultRow " + e.getMessage(), e); throw new Exception("Error in loadDataSourceForResultRow " + e.getMessage(), e); } @@ -2170,9 +2580,18 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T } - - /* (non-Javadoc) - * @see org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService#loadStructuresForTaxonomyClustering() + /** + * Load structures for taxonomy clustering. + * + * @return the cluster structures for taxonomy row + * @throws Exception the exception + */ + /* + * (non-Javadoc) + * + * @see + * org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService# + * loadStructuresForTaxonomyClustering() */ @Override public ClusterStructuresForTaxonomyRow loadStructuresForTaxonomyClustering() throws Exception { @@ -2181,53 +2600,67 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T boolean isReduced = false; int totalRow = countSelectedRow; - ASLSession session = getASLSession(); - HashMap hashCluster = SessionUtil.getCurrentClusterCommonNameForTaxonomyRow(session); + HashMap hashCluster = SessionUtil + .getCurrentClusterCommonNameForTaxonomyRow(session); - //Reset cluster for common name - if(hashCluster!=null) + // Reset cluster for common name + if (hashCluster != null) SessionUtil.setCurrentClusterCommonNameForTaxonomyRow(session, null); - HashMap mapOldChildren = SessionUtil.getHashMapChildrenTaxonomyCache(session); - //Reset list children to last clustering - if(mapOldChildren!=null) + // Reset list children to last clustering + if (mapOldChildren != null) SessionUtil.setHashMapChildrenTaxonomyCache(session, null); HashMap mapSynonyms = SessionUtil.getHashMapSynonymsTaxonomyCache(session); - //Reset list synonyms to last clustering - if(mapSynonyms!=null) + // Reset list synonyms to last clustering + if (mapSynonyms != null) SessionUtil.setHashMapSynonymsTaxonomyCache(session, null); HashMap mapTaxonomyIds = SessionUtil.getHashMapTaxonomyByIdsCache(session); - //Reset list synonyms to last clustering - if(mapTaxonomyIds!=null) + // Reset list synonyms to last clustering + if (mapTaxonomyIds != null) SessionUtil.setHashMapTaxonomyByIdsCache(session, null); - //LIMIT NUMBER OF ITEMS TO ConstantsSpeciesDiscovery.LIMIT_ITEM_DETAILS - if(countSelectedRow>ConstantsSpeciesDiscovery.TAXONOMY_LIMIT_ITEMS_DETAILS){ + // LIMIT NUMBER OF ITEMS TO ConstantsSpeciesDiscovery.LIMIT_ITEM_DETAILS + if (countSelectedRow > ConstantsSpeciesDiscovery.TAXONOMY_LIMIT_ITEMS_DETAILS) { countSelectedRow = ConstantsSpeciesDiscovery.TAXONOMY_LIMIT_ITEMS_DETAILS; isReduced = true; } ResultFilter filter = new ResultFilter(false, true, true); SearchResult searchResults = getSearchTaxonomyRow(0, countSelectedRow, filter, true); - ClusterStructuresForTaxonomyRow cluster = new ClusterStructuresForTaxonomyRow(searchResults,isReduced, totalRow); + ClusterStructuresForTaxonomyRow cluster = new ClusterStructuresForTaxonomyRow(searchResults, isReduced, + totalRow); - //TODO USE THREAD? - ManagerClusterCommonNameDataSourceForTaxonomyRow manager = new ManagerClusterCommonNameDataSourceForTaxonomyRow(cluster.getHashClusterScientificNameTaxonomyRowServiceID(), cluster.getHashResult()); + // TODO USE THREAD? + ManagerClusterCommonNameDataSourceForTaxonomyRow manager = new ManagerClusterCommonNameDataSourceForTaxonomyRow( + cluster.getHashClusterScientificNameTaxonomyRowServiceID(), cluster.getHashResult()); SessionUtil.setCurrentClusterCommonNameForTaxonomyRow(session, manager.getHashClusterCommonNameDataSource()); - //THIS OBJECT IS NOT USED ON CLIENT + // THIS OBJECT IS NOT USED ON CLIENT cluster.setHashResult(null); return cluster; } - /* (non-Javadoc) - * @see org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService#changeStatusOccurrenceJob(java.lang.String, org.gcube.portlets.user.speciesdiscovery.shared.DownloadState) + /** + * Change status occurrence job. + * + * @param jobIdentifier the job identifier + * @param state the state + * @return true, if successful + * @throws Exception the exception + */ + /* + * (non-Javadoc) + * + * @see + * org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService# + * changeStatusOccurrenceJob(java.lang.String, + * org.gcube.portlets.user.speciesdiscovery.shared.DownloadState) */ @Override public boolean changeStatusOccurrenceJob(String jobIdentifier, DownloadState state) throws Exception { @@ -2236,15 +2669,28 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T int count = OccurrenceJobUtil.changeStatusOccurrenceJobById(jobIdentifier, state, occurrenceJobDAO); - if(count==1) + if (count == 1) return true; return false; } - /* (non-Javadoc) - * @see org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService#changeStatusTaxonomyJob(java.lang.String, org.gcube.portlets.user.speciesdiscovery.shared.DownloadState) + /** + * Change status taxonomy job. + * + * @param jobIdentifier the job identifier + * @param state the state + * @return true, if successful + * @throws Exception the exception + */ + /* + * (non-Javadoc) + * + * @see + * org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService# + * changeStatusTaxonomyJob(java.lang.String, + * org.gcube.portlets.user.speciesdiscovery.shared.DownloadState) */ @Override public boolean changeStatusTaxonomyJob(String jobIdentifier, DownloadState state) throws Exception { @@ -2253,125 +2699,206 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T int count = TaxonomyJobUtil.changeStatusTaxonomyJobById(jobIdentifier, state, taxonomyJobDAO); - if(count==1) + if (count == 1) return true; return false; } - /* (non-Javadoc) - * @see org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService#isAvailableTaxonomyJobReportError(java.lang.String) + /** + * Checks if is available taxonomy job report error. + * + * @param jobIdentifier the job identifier + * @return true, if is available taxonomy job report error + * @throws Exception the exception + */ + /* + * (non-Javadoc) + * + * @see + * org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService# + * isAvailableTaxonomyJobReportError(java.lang.String) */ @Override public boolean isAvailableTaxonomyJobReportError(String jobIdentifier) throws Exception { - logger.info("isAvailableTaxonomyJobReportError jobId: "+jobIdentifier); + logger.info("isAvailableTaxonomyJobReportError jobId: " + jobIdentifier); try { SpeciesService taxonomyService = getSpeciesService(); return taxonomyService.isAvailableTaxonomyJobErrorFileById(jobIdentifier); } catch (Exception e) { - logger.error("An error occurred getting error (taxonomy) file for jobid "+jobIdentifier,e); + logger.error("An error occurred getting error (taxonomy) file for jobid " + jobIdentifier, e); return false; } } - /* (non-Javadoc) - * @see org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService#isAvailableOccurrenceJobReportError(java.lang.String) + /** + * Checks if is available occurrence job report error. + * + * @param jobIdentifier the job identifier + * @return true, if is available occurrence job report error + * @throws Exception the exception + */ + /* + * (non-Javadoc) + * + * @see + * org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService# + * isAvailableOccurrenceJobReportError(java.lang.String) */ @Override public boolean isAvailableOccurrenceJobReportError(String jobIdentifier) throws Exception { - logger.info("isAvailableOccurrenceJobReportError jobId: "+jobIdentifier); + logger.info("isAvailableOccurrenceJobReportError jobId: " + jobIdentifier); try { SpeciesService taxonomyService = getSpeciesService(); return taxonomyService.isAvailableOccurrenceJobErrorFileById(jobIdentifier); } catch (Exception e) { - logger.error("An error occurred getting error (occurrence) file for jobid "+jobIdentifier,e); + logger.error("An error occurred getting error (occurrence) file for jobid " + jobIdentifier, e); return false; } } - /* (non-Javadoc) - * @see org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService#getLastQuery() + /** + * Gets the last query. + * + * @return the last query + */ + /* + * (non-Javadoc) + * + * @see + * org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService# + * getLastQuery() */ @Override - public String getLastQuery(){ + public String getLastQuery() { logger.info("getLastQuery..."); ASLSession session = getASLSession(); return SessionUtil.getCurrentQuery(session); } - /* (non-Javadoc) - * @see org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService#loadClusterCommonNameForTaxonomyRowByScientificName(java.lang.String) + /** + * Load cluster common name for taxonomy row by scientific name. + * + * @param scientificName the scientific name + * @return the cluster common name data source for taxonomy row + */ + /* + * (non-Javadoc) + * + * @see + * org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService# + * loadClusterCommonNameForTaxonomyRowByScientificName(java.lang.String) */ @Override - public ClusterCommonNameDataSourceForTaxonomyRow loadClusterCommonNameForTaxonomyRowByScientificName(String scientificName) { + public ClusterCommonNameDataSourceForTaxonomyRow loadClusterCommonNameForTaxonomyRowByScientificName( + String scientificName) { - logger.info("loadClusterCommonNameForTaxonomyRowByScientificName for scientific name: "+scientificName); + logger.info("loadClusterCommonNameForTaxonomyRowByScientificName for scientific name: " + scientificName); - HashMap hashCluster = SessionUtil.getCurrentClusterCommonNameForTaxonomyRow(getASLSession()); + HashMap hashCluster = SessionUtil + .getCurrentClusterCommonNameForTaxonomyRow(getASLSession()); - if(hashCluster==null){ - logger.warn("Error in loadClusterCommonNameForTaxonomyRowByScientificName, hashCluster was not found in session"); + if (hashCluster == null) { + logger.warn( + "Error in loadClusterCommonNameForTaxonomyRowByScientificName, hashCluster was not found in session"); return null; } ClusterCommonNameDataSourceForTaxonomyRow cluster = hashCluster.get(scientificName); - if(cluster==null){ - logger.warn("Error in loadClusterCommonNameForTaxonomyRowByScientificName, cluster was not found in session"); + if (cluster == null) { + logger.warn( + "Error in loadClusterCommonNameForTaxonomyRowByScientificName, cluster was not found in session"); return null; } return cluster; } - /* (non-Javadoc) - * @see org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService#saveGisLayerAsWsLink(org.gcube.portlets.user.speciesdiscovery.shared.JobGisLayerModel, java.lang.String, java.lang.String) + /** + * Save gis layer as ws link. + * + * @param jobGisLayer the job gis layer + * @param destinationFolderId the destination folder id + * @param fileName the file name + * @return true, if successful + * @throws Exception the exception + */ + /* + * (non-Javadoc) + * + * @see + * org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService# + * saveGisLayerAsWsLink(org.gcube.portlets.user.speciesdiscovery.shared. + * JobGisLayerModel, java.lang.String, java.lang.String) */ @Override - public boolean saveGisLayerAsWsLink(JobGisLayerModel jobGisLayer, String destinationFolderId, String fileName) throws Exception{ + public boolean saveGisLayerAsWsLink(JobGisLayerModel jobGisLayer, String destinationFolderId, String fileName) + throws Exception { try { - Workspace workspace = GetWorkspaceUtil.getWorskspace(getASLSession()); - logger.info("input stream is not null"); - WorkspaceFolder folder = (WorkspaceFolder) workspace.getItem(destinationFolderId); - fileName = WorkspaceUtil.getUniqueName(fileName, folder); + Workspace workspace = GetWorkspaceUtil.getWorkspace(getThreadLocalRequest(), getASLSession()); - if(jobGisLayer.getGisViewerAppLink()==null){ - SpeciesService speciesService = getSpeciesService(); - CompleteJobStatus statusResponse = speciesService.getGisLayerByJobId(jobGisLayer.getJobIdentifier()); - GisLayerJobPersistence gisLayerJobDao = DaoSession.getGisLayersJobDAO(getASLSession()); +// Workspace workspace = GetWorkspaceUtil.getWorskspace(getThreadLocalRequest(), getASLSession()); +// logger.info("input stream is not null"); +// WorkspaceFolder folder = (WorkspaceFolder) workspace.getItem(destinationFolderId); - try{ - GisLayerJob gLJ = gisLayerJobDao.getItemByIdField(jobGisLayer.getJobIdentifier()); - if(gLJ!=null){ - jobGisLayer = GisLayerJobUtil.convertJob(gLJ, statusResponse, gisLayerJobDao, speciesService, getASLSession()); - } - }catch(Exception e){ - logger.error("Error on retrieving gis link from DB for job id: "+jobGisLayer.getJobIdentifier(), e); - throw new Exception(e.getMessage()); - } + if (jobGisLayer.getGisViewerAppLink() == null) { + SpeciesService speciesService = getSpeciesService(); + CompleteJobStatus statusResponse = speciesService.getGisLayerByJobId(jobGisLayer.getJobIdentifier()); + GisLayerJobPersistence gisLayerJobDao = DaoSession.getGisLayersJobDAO(getASLSession()); + + try { + GisLayerJob gLJ = gisLayerJobDao.getItemByIdField(jobGisLayer.getJobIdentifier()); + if (gLJ != null) { + jobGisLayer = GisLayerJobUtil.convertJob(gLJ, statusResponse, gisLayerJobDao, speciesService, + getASLSession()); + } + } catch (Exception e) { + logger.error("Error on retrieving gis link from DB for job id: " + jobGisLayer.getJobIdentifier(), + e); + throw new Exception(e.getMessage()); + } } - workspace.createExternalUrl(fileName, jobGisLayer.getLayerDescription() + "- Layer UUID: "+jobGisLayer.getLayerUUID(), jobGisLayer.getGisViewerAppLink(), destinationFolderId); - logger.info("Saving External link "+fileName +" completed"); + workspace.createURL(fileName, + jobGisLayer.getLayerDescription() + "- Layer UUID: " + jobGisLayer.getLayerUUID(), + jobGisLayer.getGisViewerAppLink(), destinationFolderId); + + // workspace.createExternalUrl(fileName, jobGisLayer.getLayerDescription() + "- + // Layer UUID: "+jobGisLayer.getLayerUUID(), jobGisLayer.getGisViewerAppLink(), + // destinationFolderId); + logger.info("Saving External link " + fileName + " completed"); return true; } catch (Exception e) { - logger.error("Sorry, an error occurred saving the file '"+fileName+"' in your Workspace, try again",e); + logger.error("Sorry, an error occurred saving the file '" + fileName + "' in your Workspace, try again", e); throw new Exception(e.getMessage()); } } - /* (non-Javadoc) - * @see org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService#changeStatusGisLayerJob(java.lang.String, org.gcube.portlets.user.speciesdiscovery.shared.DownloadState) + /** + * Change status gis layer job. + * + * @param jobId the job id + * @param state the state + * @return true, if successful + */ + /* + * (non-Javadoc) + * + * @see + * org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService# + * changeStatusGisLayerJob(java.lang.String, + * org.gcube.portlets.user.speciesdiscovery.shared.DownloadState) */ @Override public boolean changeStatusGisLayerJob(String jobId, DownloadState state) { @@ -2381,33 +2908,43 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T gisLayerDAO = DaoSession.getGisLayersJobDAO(getASLSession()); int count = GisLayerJobUtil.changetStatusGisLayerJob(jobId, state, gisLayerDAO); - if(count==1) + if (count == 1) return true; return false; - } - catch (Exception e) { - logger.error("An error occured in changeStatusGisLayerJob for jobId: "+jobId); + } catch (Exception e) { + logger.error("An error occured in changeStatusGisLayerJob for jobId: " + jobId); return false; } } - /* (non-Javadoc) - * @see org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService#cancelGisLayerJob(java.lang.String) + /** + * Cancel gis layer job. + * + * @param jobIdentifier the job identifier + * @return true, if successful + * @throws Exception the exception + */ + /* + * (non-Javadoc) + * + * @see + * org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService# + * cancelGisLayerJob(java.lang.String) */ @Override public boolean cancelGisLayerJob(String jobIdentifier) throws Exception { - try{ + try { SpeciesService speciesService = getSpeciesService(); - //REMOVE JOB ON THE SERVICE + // REMOVE JOB ON THE SERVICE speciesService.cancelGisLayerByJobId(jobIdentifier); GisLayerJobPersistence gisLayerDao = DaoSession.getGisLayersJobDAO(getASLSession()); int count = GisLayerJobUtil.deleteGisLayerJobById(jobIdentifier, gisLayerDao); - if(count==1) + if (count == 1) return true; - }catch (Exception e) { + } catch (Exception e) { logger.error("Erroron deleting gis layer job ", e); throw new Exception("Sorry, an error occurred deleting gis layer job", e); } @@ -2415,13 +2952,24 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T return false; } - /* (non-Javadoc) - * @see org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService#resubmitGisLayerJob(java.lang.String) + /** + * Resubmit gis layer job. + * + * @param jobIdentifier the job identifier + * @return the job gis layer model + * @throws Exception the exception + */ + /* + * (non-Javadoc) + * + * @see + * org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService# + * resubmitGisLayerJob(java.lang.String) */ @Override public JobGisLayerModel resubmitGisLayerJob(String jobIdentifier) throws Exception { - //TODO + // TODO return null; // logger.info("Resubmit gis layer job by id: " + jobIdentifier); @@ -2465,5 +3013,4 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T // return jobGisLayerModel; } - } diff --git a/src/main/java/org/gcube/portlets/user/speciesdiscovery/server/util/GetWorkspaceUtil.java b/src/main/java/org/gcube/portlets/user/speciesdiscovery/server/util/GetWorkspaceUtil.java index 894e917..0981c86 100644 --- a/src/main/java/org/gcube/portlets/user/speciesdiscovery/server/util/GetWorkspaceUtil.java +++ b/src/main/java/org/gcube/portlets/user/speciesdiscovery/server/util/GetWorkspaceUtil.java @@ -3,33 +3,99 @@ */ package org.gcube.portlets.user.speciesdiscovery.server.util; +import javax.servlet.http.HttpServletRequest; + import org.apache.log4j.Logger; import org.gcube.application.framework.core.session.ASLSession; -import org.gcube.common.homelibrary.home.HomeLibrary; -import org.gcube.common.homelibrary.home.workspace.Workspace; +import org.gcube.common.portal.PortalContext; +import org.gcube.common.storagehubwrapper.server.StorageHubWrapper; +import org.gcube.common.storagehubwrapper.server.tohl.Workspace; + +import com.liferay.portal.service.UserLocalServiceUtil; /** - * @author Francesco Mangiacrapa francesco.mangiacrapa@isti.cnr.it - * @Oct 7, 2013 + * The Class GetWorkspaceUtil. * + * @author Francesco Mangiacrapa at ISTI-CNR francesco.mangiacrapa@isti.cnr.it + * + * Oct 27, 2021 */ public class GetWorkspaceUtil { protected static Logger logger = Logger.getLogger(GetWorkspaceUtil.class); - public static Workspace getWorskspace(ASLSession session) throws Exception { + /** + * Gets the workspace. + * + * @param request the request + * @param session the session + * @return the workspace + * @throws Exception the exception + */ + public static Workspace getWorkspace(final HttpServletRequest request, ASLSession session) throws Exception { - if(session==null) + if (session == null) throw new Exception("ASL session is null"); - if(session.getScope()==null) + if (session.getScope() == null) throw new Exception("Scope into ASL session is null"); String scope = session.getScope().toString(); - //logger.trace("Get workspace for scope "+scope); - //ScopeProvider.instance.set(scope); - //logger.trace("ScopeProvider instancied for scope "+scope); - logger.trace("retuning workspace for username "+session.getUsername()); - return HomeLibrary.getUserWorkspace(session.getUsername()); + // logger.trace("Get workspace for scope "+scope); + // ScopeProvider.instance.set(scope); + // logger.trace("ScopeProvider instancied for scope "+scope); + logger.trace("retuning workspace for username " + session.getUsername()); + return getStorageHubWrapper(request, null, session.getUsername()).getWorkspace(); } + + /** + * Checks if is within portal. + * + * @return true if you're running into the portal, false if in development + */ + public static boolean isWithinPortal() { + try { + UserLocalServiceUtil.getService(); + return true; + } catch (Exception ex) { + logger.trace("Development Mode ON"); + return false; + } + } + + /** + * Gets the storage hub wrapper. + * + * @param request the request + * @param scopeGroupId the scope group id. If scopeGroupId is null the scope is + * read by using the request else by using the scopeGroupId + * @param username the username + * @return the storage hub wrapper + * @throws Exception the exception + */ + public static StorageHubWrapper getStorageHubWrapper(final HttpServletRequest request, String scopeGroupId, + String username) throws Exception { + + if (username == null || username.isEmpty()) + throw new Exception("Session expired"); + + try { + String scope; + PortalContext pContext = PortalContext.getConfiguration(); + if (isWithinPortal() && scopeGroupId != null) { + scope = pContext.getCurrentScope(scopeGroupId); + logger.debug(scope + " has retrieved by using the scopeGroupId=" + scopeGroupId); + } else + scope = pContext.getCurrentScope(request); + + logger.debug("Getting " + StorageHubWrapper.class.getSimpleName() + " for user: " + username + + " by using the scope: " + scope); + String token = pContext.getCurrentUserToken(scope, username); + return new StorageHubWrapper(scope, token, false, false, true); + } catch (Exception e) { + logger.error("Error during getting storageHub wrapper", e); + throw new Exception("Error on gettig the StorageHub wrapper for userId: " + username); + } + } + }