From 76f12eebddf80dbf46f3c22d06c2d8e246c14eb0 Mon Sep 17 00:00:00 2001 From: Francesco Mangiacrapa Date: Wed, 11 Jan 2017 17:39:11 +0000 Subject: [PATCH] Porting to spd-client-library 4.0 Fixced bug #6156 Updated pom version at 3.9.0 git-svn-id: http://svn.research-infrastructures.eu/public/d4science/gcube/trunk/portlets/user/speciesdiscovery@141521 82a268e6-3cf1-43bd-a215-b396298e98cf --- .classpath | 4 +- ...se.wst.common.project.facet.core.prefs.xml | 5 + ....eclipse.wst.common.project.facet.core.xml | 1 + distro/changelog.xml | 7 +- pom.xml | 15 +- .../client/SpeciesDiscovery.java | 2 +- .../server/GisInfoServiceImpl.java | 30 +- .../server/TaxonomyRowTable.java | 177 ++- .../server/TaxonomySearchServiceImpl.java | 540 ++++---- .../server/job/OccurrenceJobUtil.java | 333 +++-- .../server/job/TaxonomyJobUtil.java | 169 ++- .../server/persistence/ResultRowBuffer.java | 139 +-- .../server/persistence/TaxonomyRowBuffer.java | 77 +- .../dao/OccurrenceJobPersistence.java | 40 +- .../dao/OccurrenceRowPersistence.java | 42 +- .../persistence/dao/ResultRowPersistence.java | 56 +- .../persistence/dao/TaxonRowPersistence.java | 28 +- .../dao/TaxonomyJobPersistence.java | 42 +- .../dao/TaxonomyRowPersistence.java | 38 +- .../server/service/ResultItemConverter.java | 84 +- .../server/service/SpeciesService.java | 1104 +++++++++++------ .../server/session/Fetcher.java | 4 +- .../speciesdiscovery/shared/DataSource.java | 81 +- .../shared/DataSourceCapability.java | 54 +- .../shared/DataSourceModel.java | 68 +- .../shared/DataSourceRepositoryInfo.java | 92 +- .../speciesdiscovery/shared/ResultRow.java | 38 +- .../shared/SpeciesCapability.java | 25 +- .../speciesdiscovery/SpeciesDiscovery.gwt.xml | 3 +- src/main/webapp/WEB-INF/web.xml | 32 +- .../speciesdiscovery/client/DBTester.java | 167 ++- .../speciesdiscovery/client/ListPlugins.java | 42 +- .../speciesdiscovery/client/ServiceQuery.java | 69 +- 33 files changed, 2145 insertions(+), 1463 deletions(-) diff --git a/.classpath b/.classpath index 450f33e..5402ac0 100644 --- a/.classpath +++ b/.classpath @@ -1,7 +1,7 @@ - + @@ -31,5 +31,5 @@ - + diff --git a/.settings/org.eclipse.wst.common.project.facet.core.prefs.xml b/.settings/org.eclipse.wst.common.project.facet.core.prefs.xml index 7961132..1f54044 100644 --- a/.settings/org.eclipse.wst.common.project.facet.core.prefs.xml +++ b/.settings/org.eclipse.wst.common.project.facet.core.prefs.xml @@ -4,4 +4,9 @@ + + + + + diff --git a/.settings/org.eclipse.wst.common.project.facet.core.xml b/.settings/org.eclipse.wst.common.project.facet.core.xml index 22270b5..d8b6ee4 100644 --- a/.settings/org.eclipse.wst.common.project.facet.core.xml +++ b/.settings/org.eclipse.wst.common.project.facet.core.xml @@ -6,4 +6,5 @@ + diff --git a/distro/changelog.xml b/distro/changelog.xml index 26c086d..422e9f0 100644 --- a/distro/changelog.xml +++ b/distro/changelog.xml @@ -1,5 +1,10 @@ - + + [Feature #6313] SPD portlet upgrade: porting to spd-client-library 4.0.0 + + Removed Gis -viewer dependency org.gcube.portlets.user species-discovery war - 3.8.1-SNAPSHOT + 3.9.0-SNAPSHOT gCube Species Discovery gCube Species Discovery Portlet lets the users discovery species information from the Species Service. @@ -31,7 +31,7 @@ - 2.6.1 + 2.7.0 distro 1.7 @@ -82,7 +82,7 @@ org.gcube.data.spd spd-client-library - [3.0.0-SNAPSHOT, 4.0.0-SNAPSHOT) + [4.0.0-SNAPSHOT, 5.0.0-SNAPSHOT) compile @@ -93,6 +93,13 @@ compile + + + + + + + org.gcube.common csv4j @@ -106,8 +113,6 @@ provided - - org.eclipse.persistence diff --git a/src/main/java/org/gcube/portlets/user/speciesdiscovery/client/SpeciesDiscovery.java b/src/main/java/org/gcube/portlets/user/speciesdiscovery/client/SpeciesDiscovery.java index 4b69f08..783043c 100644 --- a/src/main/java/org/gcube/portlets/user/speciesdiscovery/client/SpeciesDiscovery.java +++ b/src/main/java/org/gcube/portlets/user/speciesdiscovery/client/SpeciesDiscovery.java @@ -1,7 +1,7 @@ package org.gcube.portlets.user.speciesdiscovery.client; -import org.gcube.portlets.user.speciesdiscovery.client.rpc.GISInfoServiceAsync; import org.gcube.portlets.user.speciesdiscovery.client.rpc.GISInfoService; +import org.gcube.portlets.user.speciesdiscovery.client.rpc.GISInfoServiceAsync; import org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService; import org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchServiceAsync; diff --git a/src/main/java/org/gcube/portlets/user/speciesdiscovery/server/GisInfoServiceImpl.java b/src/main/java/org/gcube/portlets/user/speciesdiscovery/server/GisInfoServiceImpl.java index 7f52e3a..ab1a3fd 100644 --- a/src/main/java/org/gcube/portlets/user/speciesdiscovery/server/GisInfoServiceImpl.java +++ b/src/main/java/org/gcube/portlets/user/speciesdiscovery/server/GisInfoServiceImpl.java @@ -1,5 +1,10 @@ package org.gcube.portlets.user.speciesdiscovery.server; +import it.geosolutions.geonetwork.util.GNSearchRequest; +import it.geosolutions.geonetwork.util.GNSearchRequest.Config; +import it.geosolutions.geonetwork.util.GNSearchRequest.Param; +import it.geosolutions.geonetwork.util.GNSearchResponse; + import java.util.HashMap; import java.util.Map; @@ -16,20 +21,15 @@ import org.gcube.spatial.data.geonetwork.LoginLevel; import com.google.gwt.user.server.rpc.RemoteServiceServlet; -import it.geosolutions.geonetwork.util.GNSearchRequest; -import it.geosolutions.geonetwork.util.GNSearchResponse; -import it.geosolutions.geonetwork.util.GNSearchRequest.Config; -import it.geosolutions.geonetwork.util.GNSearchRequest.Param; - public class GisInfoServiceImpl extends RemoteServiceServlet implements GISInfoService{ /** - * + * */ private static final long serialVersionUID = -1137730151475571288L; - + protected static Logger logger = Logger.getLogger(GisInfoServiceImpl.class); - + @Override public String getGisLinkByLayerName(String layername) throws Exception { try{ @@ -46,19 +46,17 @@ public class GisInfoServiceImpl extends RemoteServiceServlet implements GISInfoS } } - + private static String getPublicLink(String uuid) throws UriResolverMapException, IllegalArgumentException{ UriResolverManager resolver = new UriResolverManager("GIS"); - - - + Map params = new HashMap(); params.put("gis-UUID", uuid); params.put("scope", ScopeProvider.instance.get()); return resolver.getLink(params, true); } - - + + private static String getUUIDbyGSId(String gsID) throws Exception{ GeoNetworkReader reader=GeoNetwork.get(); reader.login(LoginLevel.ADMIN); @@ -67,8 +65,8 @@ public class GisInfoServiceImpl extends RemoteServiceServlet implements GISInfoS GNSearchRequest req=new GNSearchRequest(); req.addParam(Param.any, gsID); req.addConfig(Config.similarity, "1"); - GNSearchResponse resp=reader.query(req); + GNSearchResponse resp=reader.query(req); return resp.getMetadata(0).getUUID(); } - + } diff --git a/src/main/java/org/gcube/portlets/user/speciesdiscovery/server/TaxonomyRowTable.java b/src/main/java/org/gcube/portlets/user/speciesdiscovery/server/TaxonomyRowTable.java index ba50aa8..ea24db0 100644 --- a/src/main/java/org/gcube/portlets/user/speciesdiscovery/server/TaxonomyRowTable.java +++ b/src/main/java/org/gcube/portlets/user/speciesdiscovery/server/TaxonomyRowTable.java @@ -27,7 +27,6 @@ import org.gcube.portlets.user.speciesdiscovery.server.persistence.dao.TaxonomyR import org.gcube.portlets.user.speciesdiscovery.server.service.SpeciesService; import org.gcube.portlets.user.speciesdiscovery.server.service.TaxonomyItemConverter; import org.gcube.portlets.user.speciesdiscovery.server.stream.CloseableIterator; -import org.gcube.portlets.user.speciesdiscovery.server.stream.StreamExtend; import org.gcube.portlets.user.speciesdiscovery.shared.CommonName; import org.gcube.portlets.user.speciesdiscovery.shared.ItemParameter; import org.gcube.portlets.user.speciesdiscovery.shared.SearchServiceException; @@ -35,7 +34,7 @@ import org.gcube.portlets.user.speciesdiscovery.shared.TaxonomyRow; /** - * + * * @author Francesco Mangiacrapa francesco.mangiacrapa@isti.cnr.it * @Sep 6, 2013 * @@ -43,37 +42,37 @@ import org.gcube.portlets.user.speciesdiscovery.shared.TaxonomyRow; public class TaxonomyRowTable extends HttpServlet { /** - * + * */ protected static final String UTF_8 = "UTF-8"; /** - * + * */ private static final long serialVersionUID = 9157876104914505028L; /** - * + * */ public static final String TEXT_HTML = "text/html; charset=UTF-8"; - + public static final String DOCTYPE = ""; public static final String HTML = ""; public static final String HTMLCLOSE = ""; public static final String HEAD = ""; public static final String HEADCLOSE = ""; public static final String TITLE = ""; - public static final String TITLECLOSE = ""; + public static final String TITLECLOSE = ""; public static final String BODY = ""; public static final String BODYCLOSE = ""; - + public static String headWithTitle(String title) { // return (DOCTYPE + "\n" + HTML+"\n" + HEAD+TITLE + title + TITLECLOSE+HEADCLOSE+"\n"); - return (DOCTYPE + "\n" + HTML+ "\n"+HEAD + "\n"+TITLE + title + TITLECLOSE+"\n"+HEADCLOSE+"\n"); - + return DOCTYPE + "\n" + HTML+ "\n"+HEAD + "\n"+TITLE + title + TITLECLOSE+"\n"+HEADCLOSE+"\n"; + } - - + + protected Logger logger = Logger.getLogger(TaxonomyRowTable.class); @@ -89,96 +88,92 @@ public class TaxonomyRowTable extends HttpServlet { protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException { retrieveTaxonomyRowAsHtmlTable(req, resp); } - + @Override protected void doPost(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException { retrieveTaxonomyRowAsHtmlTable(req, resp); } - - + + /** * @param req * @param resp - * @throws IOException + * @throws IOException */ private void retrieveTaxonomyRowAsHtmlTable(HttpServletRequest req, HttpServletResponse resp) { - + String taxonomyServiceRowID = ""; // resp.setCharacterEncoding(UTF_8); - resp.setContentType(TEXT_HTML); + resp.setContentType(TEXT_HTML); TaxonomyRow row = null; PrintWriter out = null; - + try { out = new PrintWriter(new OutputStreamWriter(resp.getOutputStream(), UTF_8), true); out.println(headWithTitle("")); out.println(BODY); // get parameters taxonomyServiceRowID = req.getParameter("oid"); - + ASLSession aslSession = getASLSession(req); - + //IS VALID RR ID? if(taxonomyServiceRowID==null || taxonomyServiceRowID.isEmpty()){ out.println(error("Parameter oid not found")); - }else{ + }else{ logger.trace("doGet found oid "+taxonomyServiceRowID); - + try { TaxonomyRowPersistence persistence = SessionUtil.getCurrentEJBTaxonomyItem(aslSession); - + if(persistence==null){ - + logger.trace("TaxonomyRowPersistence not found in session, creating it"); persistence = new TaxonomyRowPersistence(DaoSession.getEntityManagerFactory(aslSession)); } - + CriteriaBuilder queryBuilder = persistence.getCriteriaBuilder(); CriteriaQuery cq = queryBuilder.createQuery(); Predicate pr1 = queryBuilder.equal(persistence.rootFrom(cq).get(TaxonomyRow.SERVICE_ID_FIELD), taxonomyServiceRowID); cq.where(pr1); - + Iterator iterator = persistence.executeCriteriaQuery(cq).iterator(); while(iterator.hasNext()){ row = iterator.next(); break; } - + if(row==null){ - + logger.trace("Taxonomy Row with id "+taxonomyServiceRowID+" not found in database"); - + row = findingTaxonomyInCaches(taxonomyServiceRowID, getASLSession(req)); - + if(row!=null){ logger.trace("Taxonomy Row with id "+taxonomyServiceRowID+" found into hash map caches"); //ROW was found into database or by service logger.trace("converting taxonomy row to html table"); String table = getHTMLTableForTaxonomy(row); - + logger.trace("table for ResultRowPersistence is empty? "+table.isEmpty()); out.println(table); out.close(); return; }else logger.trace("Taxonomy Row with id "+taxonomyServiceRowID+" doesn't found into hash map caches"); - - + + logger.trace("Tentative recovering taxonomy with id "+taxonomyServiceRowID+" from service"); - SpeciesService service = getSpeciesService(req); - - StreamExtend streamIds = new StreamExtend(Arrays.asList(taxonomyServiceRowID).iterator()); - - CloseableIterator streamIterator = service.retrieveTaxonomyById(streamIds); - + //StreamExtend streamIds = new StreamExtend(Arrays.asList(taxonomyServiceRowID).iterator()); + CloseableIterator streamIterator = service.retrieveTaxonomyById(Arrays.asList(taxonomyServiceRowID)); TaxonomyItemConverter taxonomyItemConverter = new TaxonomyItemConverter(aslSession); - + // int i = 1; while (streamIterator.hasNext()) { TaxonomyItem tax = streamIterator.next(); @@ -189,7 +184,7 @@ public class TaxonomyRowTable extends HttpServlet { } streamIterator.close(); - + if(row==null){ out.println(error("Sorry, taxonomy with "+taxonomyServiceRowID+" doesn't found in service")); out.close(); @@ -203,9 +198,9 @@ public class TaxonomyRowTable extends HttpServlet { logger.trace("table for ResultRowPersistence is empty? "+table.isEmpty()); out.println(table); - + } catch (Exception e) { - + logger.error("Error in TaxonomyRowTable servlet ",e); throw new Exception("Error in ResultRowTable servlet ", e); } @@ -213,10 +208,10 @@ public class TaxonomyRowTable extends HttpServlet { out.println(BODYCLOSE); out.println(HTMLCLOSE); out.close(); //CLOSE STREAM - + }catch (Exception e) { String error = "Sorry an error occurred when creating the table for taxonomy row with id: "+taxonomyServiceRowID; - + if(out==null){ try{ out = resp.getWriter(); @@ -229,56 +224,52 @@ public class TaxonomyRowTable extends HttpServlet { out.println(HTMLCLOSE); out.close(); //CLOSE STREAM } - + } - - - - protected TaxonomyRow findingTaxonomyInCaches(String taxonomyServiceRowID, ASLSession session){ - + HashMap hashChildrenTaxa = SessionUtil.getHashMapChildrenTaxonomyCache(session); - - + + if(hashChildrenTaxa!=null){ logger.trace("Finding Taxonomy Row with id "+taxonomyServiceRowID+" into hash map children"); TaxonomyRow row = hashChildrenTaxa.get(taxonomyServiceRowID); - + if(row!=null){ logger.trace("Taxonomy Row with id "+taxonomyServiceRowID+" found into hash map children"); return row; } } - + hashChildrenTaxa = SessionUtil.getHashMapSynonymsTaxonomyCache(session); - + if(hashChildrenTaxa!=null){ logger.trace("Finding Taxonomy Row with id "+taxonomyServiceRowID+" into hash map synonyms"); TaxonomyRow row = hashChildrenTaxa.get(taxonomyServiceRowID); - + if(row!=null){ logger.trace("Taxonomy Row with id "+taxonomyServiceRowID+" found into hash map synonyms"); return row; } } - + hashChildrenTaxa = SessionUtil.getHashMapTaxonomyByIdsCache(session); - + if(hashChildrenTaxa!=null){ logger.trace("Finding Taxonomy Row with id "+taxonomyServiceRowID+" into hash map ByIds"); TaxonomyRow row = hashChildrenTaxa.get(taxonomyServiceRowID); - + if(row!=null){ logger.trace("Taxonomy Row with id "+taxonomyServiceRowID+" found into hash map ByIds"); return row; } } - + return null; } - - + + protected SpeciesService getSpeciesService(HttpServletRequest req) throws SearchServiceException { @@ -292,83 +283,83 @@ public class TaxonomyRowTable extends HttpServlet { throw new SearchServiceException("contacting the species service."); } } - + public String error(String message){ String errorPage = ""; - errorPage +=("

Error: "+message+"

"); + errorPage +="

Error: "+message+"

"; return errorPage; } - + public String getHTMLTableForTaxonomy(TaxonomyRow row){ - + //Init values String dataProviderName = ""; String dataSetCitation= ""; String matchingAccordionTo= ""; String rank= ""; // String matchingCredits= ""; - + String statusName = ""; String dateModified = ""; String statusRemark = ""; - + String scientificNameAuthorship = ""; String lsid = ""; String credits = ""; - + String propertiesHtml = ""; if(row.getDataProviderName()!=null) dataProviderName = row.getDataProviderName(); if(row.getStatusName()!=null) statusName = row.getStatusName(); if(row.getDateModified()!=null) dateModified = row.getDateModified(); - + if(row.getDataSetCitation()!=null) dataSetCitation = row.getDataSetCitation(); if(row.getRank()!=null) rank = row.getRank(); if(row.getAccordingTo()!=null) matchingAccordionTo = row.getAccordingTo(); - + if(row.getStatusRemarks()!=null) statusRemark = row.getStatusRemarks(); - + if(row.getScientificNameAuthorship()!=null) scientificNameAuthorship = row.getScientificNameAuthorship(); - + if(row.getLsid()!=null) lsid = row.getLsid(); - + if(row.getCredits()!=null) credits = row.getCredits(); - - + + //BUILD TABLES PROPERTIES if(row.getProperties()!=null){ - + List hashProperties = row.getProperties(); Collections.sort(hashProperties, ItemParameter.COMPARATOR); - + propertiesHtml+=""; - + for (ItemParameter itemParameter : hashProperties) { - + propertiesHtml+= "" + " " + " " + ""; } - + propertiesHtml+="
"+itemParameter.getKey()+""+itemParameter.getValue()+"
"; } - + //Create list common name String commonNames = ""; - + if(row.getCommonNames()!=null){ for (CommonName comName : row.getCommonNames()) { commonNames+= ""+comName.getName()+"" +" ("+comName.getLanguage()+") - "; } } - + String table = ""; - + // if(isNewTab) table+= "" + @@ -385,8 +376,8 @@ public class TaxonomyRowTable extends HttpServlet { // " " + // " " + // "" + - - + + table +="" + " " + " " + @@ -415,19 +406,19 @@ public class TaxonomyRowTable extends HttpServlet { " " + " " + "" + - + "" + " " + " " + "" + "
"+TaxonomyGridField.STATUS_REMARKS.getName()+""+statusRemark+"
"+TaxonomyGridField.DATASOURCE.getName()+""+dataProviderName+""+TaxonomyGridField.CREDITS.getName()+""+credits+"
"+TaxonomyGridField.PROPERTIES.getName()+""+propertiesHtml+"
"; - + //DEBUG // System.out.println("Table: "+table); - + return table; - + } } \ No newline at end of file diff --git a/src/main/java/org/gcube/portlets/user/speciesdiscovery/server/TaxonomySearchServiceImpl.java b/src/main/java/org/gcube/portlets/user/speciesdiscovery/server/TaxonomySearchServiceImpl.java index 99b524c..bbfed76 100644 --- a/src/main/java/org/gcube/portlets/user/speciesdiscovery/server/TaxonomySearchServiceImpl.java +++ b/src/main/java/org/gcube/portlets/user/speciesdiscovery/server/TaxonomySearchServiceImpl.java @@ -31,7 +31,7 @@ import org.gcube.common.homelibrary.util.WorkspaceUtil; import org.gcube.data.spd.model.products.OccurrencePoint; import org.gcube.data.spd.model.products.ResultElement; import org.gcube.data.spd.model.products.TaxonomyItem; -import org.gcube.data.spd.stubs.types.Status; +import org.gcube.data.spd.model.service.types.CompleteJobStatus; import org.gcube.portlets.user.speciesdiscovery.client.ConstantsSpeciesDiscovery; import org.gcube.portlets.user.speciesdiscovery.client.model.ClassificationModel; import org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService; @@ -59,7 +59,6 @@ import org.gcube.portlets.user.speciesdiscovery.server.stream.CloseableIterator; import org.gcube.portlets.user.speciesdiscovery.server.stream.IteratorPointInfo; import org.gcube.portlets.user.speciesdiscovery.server.stream.OccurenceCSVConverter; import org.gcube.portlets.user.speciesdiscovery.server.stream.OccurenceCSVConverterOpenModeller; -import org.gcube.portlets.user.speciesdiscovery.server.stream.StreamExtend; import org.gcube.portlets.user.speciesdiscovery.server.stream.aggregation.FieldAggregator; import org.gcube.portlets.user.speciesdiscovery.server.stream.aggregation.TaxonomyClassificationAggregator; import org.gcube.portlets.user.speciesdiscovery.server.util.DateUtil; @@ -106,14 +105,14 @@ import com.google.gwt.user.server.rpc.RemoteServiceServlet; /** * The server side implementation of the RPC service. - * @author "Federico De Faveri defaveri@isti.cnr.it" - + * @author "Federico De Faveri defaveri@isti.cnr.it" - * @author "Francesco Mangiacrapa francesco.mangiacrapa@isti.cnr.it" */ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements TaxonomySearchService { protected static final String SAVE_CHILDREN_OF = "Save children of "; protected static final String RESUBMIT = "Resubmit"; - + private static final long serialVersionUID = -287193068445844326L; protected static final long MAX_BUFFERING_ELEMENTS = 1000; @@ -182,7 +181,7 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T @Override public void searchByScientificName(String searchTerm, SearchFilters searchFilters) throws SearchServiceException { logger.info("searchByScientificName searchTerm: "+searchTerm+" searchFilters: "+searchFilters); - + stopSearch(); search(searchTerm, SearchType.BY_SCIENTIFIC_NAME, searchFilters); } @@ -200,7 +199,7 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T protected SearchResultType search(String searchTerm, SearchType searchType, SearchFilters searchFilters) throws SearchServiceException { - + ASLSession aslSession = getASLSession(); try { deleteAllRowIntoDaoTable(); //RESET TABLE @@ -229,7 +228,7 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T stopSearch(); ASLSession aslSession = getASLSession(); - + try { deleteAllRowIntoDaoTable(); //RESET TABLE SpeciesService taxonomyService = getSpeciesService(); @@ -237,7 +236,7 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T SessionUtil.setCurrentQuery(aslSession, query); logger.info("service return iterator searched..."); - + SearchByQueryParameter queryParameters = QueryUtil.getQueryResultType(query); CloseableIterator output = IteratorChainBuilder.buildChain(input, queryParameters.getSearchResultType(), aslSession); FetchingSessionUtil.createFetchingSession(output, queryParameters.getSearchResultType(), aslSession); @@ -275,10 +274,10 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T if(daoTaxon!=null) daoTaxon.removeAll(); - + if(daoTaxonomyRow!=null) daoTaxonomyRow.removeAll(); - + logger.info("delete all row into Dao's - completed"); } catch (Exception e) { logger.error("Error in delete all row"); @@ -297,27 +296,27 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T FetchingSession searchSession = (FetchingSession) getSearchSession(); ArrayList chunk = new ArrayList(); - + try { List data = new ArrayList(); if (onlySelected) { SelectableFetchingBuffer buffer = (SelectableFetchingBuffer) searchSession.getBuffer(); data = buffer.getSelected(); - + int end = Math.min(start+limit, data.size()); start = Math.min(start, end); logger.info("chunk selected data bounds [start: "+start+" end: " + end+"]"); data = data.subList(start, end); - - } else if (activeFiltersObject == null || (!activeFiltersObject.isActiveFilters())) { + + } else if (activeFiltersObject == null || !activeFiltersObject.isActiveFilters()) { if(limit>0){ data = searchSession.getBuffer().getList(start,limit); } } else { FilterableFetchingBuffer buffer = (FilterableFetchingBuffer) searchSession.getBuffer(); data = buffer.getFilteredList(activeFiltersObject); - + int end = Math.min(start+limit, data.size()); start = Math.min(start, end); @@ -326,20 +325,20 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T } logger.info("Fetching data from search session buffer, size: "+data.size()); - + for (ResultRow resultRow : data) { //return common names? if(activeFiltersObject == null || !activeFiltersObject.isLoadCommonName() || !resultRow.existsCommonName()){ resultRow.setCommonNames(null); } - + //return properties? if(activeFiltersObject == null || !resultRow.existsProperties() || !activeFiltersObject.isLoadAllProperties()){ resultRow.setProperties(null); } chunk.add(resultRow); - logger.info("getSearchResultRows return on client result item with id: " +resultRow.getId() + " service id: "+resultRow.getServiceId()); + logger.debug("getSearchResultRows returning on client result item with id: " +resultRow.getId() + " service id: "+resultRow.getServiceId()); } Long endTime = System.currentTimeMillis() - startTime; @@ -349,18 +348,18 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T logger.error("Error in getSearchResultRows ", e); throw new SearchServiceException(e.getMessage()); } - + return new SearchResult(chunk); } - + private void printProperties(List properties){ - + for (ItemParameter itemParameter : properties) { System.out.println("Property "+itemParameter); } - + } - + @SuppressWarnings("unchecked") @Override public SearchResult getSearchTaxonomyRow(int start, int limit, ResultFilter activeFiltersObject, boolean onlySelected) throws SearchServiceException { @@ -379,17 +378,17 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T logger.info("getting only selected data"); SelectableFetchingBuffer buffer = (SelectableFetchingBuffer) searchSession.getBuffer(); data = buffer.getSelected(); - + int end = Math.min(start+limit, data.size()); start = Math.min(start, end); logger.info("chunk selected data bounds [start: "+start+" end: " + end+"]"); - + data = data.subList(start, end); - - } else if (activeFiltersObject == null || (!activeFiltersObject.isActiveFilters())) { + + } else if (activeFiltersObject == null || !activeFiltersObject.isActiveFilters()) { logger.info("getting all available data"); - + if(limit>0){ Map filterAndMap = new HashMap(); filterAndMap.put(TaxonomyRow.IS_PARENT, "false"); @@ -399,12 +398,12 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T logger.info("getting filtered data"); FilterableFetchingBuffer buffer = (FilterableFetchingBuffer) searchSession.getBuffer(); data = buffer.getFilteredList(activeFiltersObject); - + int end = Math.min(start+limit, data.size()); start = Math.min(start, end); logger.info("chunk filtered data bounds [start: "+start+" end: " + end+"]"); - + data = data.subList(start, end); } @@ -443,9 +442,9 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T TaxonomyRow taxonomyRow = null; logger.info("loadTaxonomyParentByParentId: "+ parentID); - + try { - + TaxonomyRowPersistence dao = DaoSession.getTaxonomyDAO(getASLSession()); CriteriaBuilder queryBuilder = dao.getCriteriaBuilder(); CriteriaQuery cq = queryBuilder.createQuery(); @@ -457,7 +456,7 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T if(iterator!=null && iterator.hasNext()){ taxonomyRow = iterator.next(); } - + } catch (Exception e) { logger.error("Error in loadTaxonomyParentsByRowId", e); throw new Exception(e); @@ -476,7 +475,7 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T // if (taxonomy == null) return null; // // DaoSession.getTaxonomyDAO(getASLSession()).refresh(taxonomy.getParent()); // taxonomy.setParent(setParentListOfTaxonomy(taxonomy.getParent())); -// return taxonomy; +// return taxonomy; // } /** @@ -486,7 +485,7 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T public HashMap getFilterCounterById(GridField field) throws Exception { logger.info("Filter Counter for: "+ field); try { - FetchingSession searchSession = (FetchingSession) getSearchSession(); + FetchingSession searchSession = getSearchSession(); FieldAggregator aggregator = (FieldAggregator) searchSession.getAggregator(FieldAggregator.getFieldAggregatorName(field)); if (aggregator!=null) return aggregator.getAggregation(); else return new HashMap(); @@ -505,7 +504,7 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T MainTaxonomicRankEnum rank = MainTaxonomicRankEnum.valueOfLabel(rankLabel); if (rank!=null) { - FetchingSession searchSession = (FetchingSession) getSearchSession(); + FetchingSession searchSession = getSearchSession(); TaxonomyClassificationAggregator classificationAggregator = (TaxonomyClassificationAggregator) searchSession.getAggregator(TaxonomyClassificationAggregator.NAME); return classificationAggregator.getAggregation().get(rank); } else return new HashMap(); @@ -541,8 +540,7 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T if(bufferSize>=MAX_BUFFERING_ELEMENTS){ logger.info("getSearchStatus MAX_BUFFERING_ELEMENTS is reached - stop search"); stopSearchWithoutRemove(); - - + //CALCULATE NEW BUFFER SIZE AFTER FETCHING IS CLOSED try { // int sleepingTime = 500; @@ -550,7 +548,7 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T // Thread.sleep(sleepingTime); //SLEEPING 0,5 sec, for translating objects that are inserting in buffer and calculate new size of buffer // logger.info("sleep termined - search status alive"); bufferSize = isActiveFilterOnResult?((FilterableFetchingBuffer) searchSession.getBuffer()).getFilteredListSize():searchSession.getBufferSize(); - + } catch (SQLException e) { logger.info("getSearchStatus bufferSize error : "+e.getMessage(), e); throw new SearchServiceException("An error occured on server in searching status, please retry"); @@ -558,7 +556,7 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T logger.info("getSearchStatus bufferSize error : "+e.getMessage(), e); throw new SearchServiceException("An error occured on server in searching status, please retry"); } - + status.setResultEOF(true); status.setSize(bufferSize); status.setIsMaxSize(true); @@ -665,7 +663,7 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T int size = 0; try { - if (activeFiltersObject == null || (!activeFiltersObject.isActiveFilters())) { + if (activeFiltersObject == null || !activeFiltersObject.isActiveFilters()) { SelectableFetchingBuffer buffer = (SelectableFetchingBuffer) searchSession.getBuffer(); buffer.updateAllSelection(selection); size = buffer.size(); @@ -673,16 +671,16 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T @SuppressWarnings("unchecked") FilterableFetchingBuffer buffer = (FilterableFetchingBuffer) searchSession.getBuffer(); data = buffer.getFilteredList(activeFiltersObject); - + if(data!=null){ - + List ids = new ArrayList(); for (FetchingElement fetchingElement : data){ ids.add(fetchingElement.getId()+""); } SelectableFetchingBuffer bufferCompleted = (SelectableFetchingBuffer) searchSession.getBuffer(); bufferCompleted.updateAllSelectionByIds(selection, ids); - + size = data.size(); } } @@ -691,14 +689,14 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T logger.error("An error occurred in updateRowSelections", e); throw new SearchServiceException(e.getMessage()); } - + return Integer.valueOf(size); } /** * {@inheritDoc} - * @throws SearchServiceException + * @throws SearchServiceException */ @Override public int countOfSelectedRow() throws SearchServiceException{ @@ -926,9 +924,9 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T FetchingSession searchSession = (FetchingSession) getSearchSession(); Collection selectedRows; List keys = null; - + try { - + selectedRows = ((SelectableFetchingBuffer) searchSession.getBuffer()).getSelected(); logger.info("found "+selectedRows.size()+" selected rows"); keys = new ArrayList(selectedRows.size()); @@ -958,7 +956,7 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T FetchingSession searchSession = (FetchingSession) getSearchSession(); Collection selectedRows; List listId = null; - + try { selectedRows = ((SelectableFetchingBuffer) searchSession.getBuffer()).getSelected(); @@ -986,7 +984,7 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T protected Map getSelectedTaxonomyIdAndDataSource() throws SearchServiceException { FetchingSession searchSession = (FetchingSession) getSearchSession(); - HashMap hashIdTaxonDataSource = null; + HashMap hashIdTaxonDataSource = null; Collection selectedRows; try { @@ -1071,7 +1069,7 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T } catch (DatabaseServiceException e) { throw new SearchServiceException("Sorry, an error has occurred on the server while "+e.getMessage()); - + } catch (Exception e) { throw new SearchServiceException("Sorry, an error has occurred on the server while "+e.getMessage()); } @@ -1092,7 +1090,7 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T @Override @Deprecated public List getParentsList(Taxon taxon) throws Exception { - + return null; } @@ -1100,23 +1098,23 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T @Override public ArrayList loadListChildrenByParentId(String parentId) throws Exception { logger.info("Load List Children By ParentId: " + parentId); - + ArrayList listLightTaxonomyRow = new ArrayList(); - + if(parentId==null || parentId.isEmpty()){ logger.warn("parentId is null or empty "); return listLightTaxonomyRow; } - + try { SpeciesService taxonomyService = getSpeciesService(); CloseableIterator streamIterator = taxonomyService.getTaxonChildrenByParentId(parentId); ASLSession session = getASLSession(); - TaxonomyItemConverter converter = new TaxonomyItemConverter(getASLSession()); - + TaxonomyItemConverter converter = new TaxonomyItemConverter(getASLSession()); + Map mapChildren = SessionUtil.getHashMapChildrenTaxonomyCache(session); - + if(mapChildren==null){ logger.info("Cache taxa children doesn't exists into session, creating.."); mapChildren = new HashMap(); @@ -1124,14 +1122,14 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T while (streamIterator.hasNext()) { TaxonomyItem tax = streamIterator.next(); - + TaxonomyRow taxonomy = converter.convert(tax); - + if(mapChildren.get(taxonomy.getServiceId())==null){ logger.info("Taxonomy with service id: "+taxonomy.getServiceId()+" doesn't exists into Map Children, adding.."); mapChildren.put(taxonomy.getServiceId(),taxonomy); } - + LightTaxonomyRow lightTaxRow = ClusterStructuresForTaxonomyRow.convetTaxonomyRowToLigthTaxonomyRow(taxonomy); listLightTaxonomyRow.add(lightTaxRow); } @@ -1155,24 +1153,24 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T List listJobs = new ArrayList(); try{ - + TaxonomyJobPersistence taxonomyJobDao = DaoSession.getTaxonomyJobDAO(getASLSession()); Iterator iterator = taxonomyJobDao.getList().iterator(); - + SpeciesService taxonomyService = getSpeciesService(); - + while(iterator!=null && iterator.hasNext()){ TaxonomyJob job = iterator.next(); logger.info("get taxonomy job "+job.getId()+ " from service"); - + try{ - Status statusResponse = taxonomyService.getTaxonomyJobById(job.getId()); - + CompleteJobStatus statusResponse = taxonomyService.getTaxonomyJobById(job.getId()); + if(statusResponse!=null){ logger.info("statusResponse is not null..." + job.getId()); JobTaxonomyModel jobSpeciesModel = TaxonomyJobUtil.convertJob(job, statusResponse, taxonomyJobDao); logger.info("added list jobTaxonomyId: "+job.getTaxonomyId() + " status "+job.getState()); - + if(jobSpeciesModel!=null) listJobs.add(jobSpeciesModel); } @@ -1180,14 +1178,14 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T logger.info("TaxonomyJob statusResponse is null..." + job.getId()); TaxonomyJobUtil.deleteTaxonomyJobById(job.getId(),taxonomyJobDao); } - + }catch (Exception e) { e.printStackTrace(); logger.error("Error on getListSpeciesJobs ", e); throw new Exception("Error on getListSpeciesJobs", e); } } - + }catch (Exception e) { logger.error("Error on getListSpeciesJobs ", e); throw new Exception("Error on getListSpeciesJobs", e); @@ -1196,10 +1194,10 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T return listJobs; } - - + + /** - * + * * @param taxonomyServiceId * @param taxonomyName * @param taxonomyRank @@ -1212,37 +1210,37 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T //FIXED 20/05/2013 logger.info("Create job for taxonomy id: " + taxonomyServiceId); // System.out.println("Create job for taxonomy id: " + taxonomy.getServiceId()); - + JobTaxonomyModel jobSpeciesModel = null; - + TaxonomyJobPersistence taxonomyJobDao = DaoSession.getTaxonomyJobDAO(getASLSession()); - + SpeciesService taxonomyService = getSpeciesService(); - + String speciesJobId = taxonomyService.createTaxonomyJobForDWCAByChildren(taxonomyServiceId); - + long submitTime = Calendar.getInstance().getTimeInMillis(); String name = NormalizeString.lowerCaseUpFirstChar(taxonomyName) + " group"; - + //STORE INTO DAO TaxonomyJob speciesJob = new TaxonomyJob(speciesJobId, DownloadState.PENDING.toString(), name, taxonomyName, dataSourceName, taxonomyRank, 0, submitTime, 0, taxonomyServiceId); taxonomyJobDao.insert(speciesJob); - + jobSpeciesModel = new JobTaxonomyModel(speciesJob.getId(), speciesJob.getDescriptiveName(), DownloadState.PENDING, null, taxonomyName, dataSourceName, taxonomyRank); - + Date submit = DateUtil.millisecondsToDate(speciesJob.getSubmitTime()); // jobSpeciesModel.setStartTime(DateUtil.dateToDateFormatString(start)); jobSpeciesModel.setSubmitTime(submit); jobSpeciesModel.setEndTime(null); - + return jobSpeciesModel; } - + @Override public JobTaxonomyModel createTaxonomyJobByIds(String search, List dataSources) throws Exception { - + logger.info("Create job ForDWCAByIds for: " + search); Map hashIdDs = getSelectedTaxonomyIdAndDataSource(); @@ -1256,84 +1254,84 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T String name = NormalizeString.lowerCaseUpFirstChar(search) + " - "; name += listId.size() + " "; name += listId.size()>1?"taxa":"taxon"; - + String dataSourceName = ""; - + for (String taxonId : listId) { if(!dataSourceName.contains(hashIdDs.get(taxonId))) //remove duplicate dataSourceName+=hashIdDs.get(taxonId) + ", "; } - + if(dataSourceName.endsWith(", ")) dataSourceName = dataSourceName.substring(0, dataSourceName.length()-2); //STORE INTO DAO TaxonomyJob speciesJob = new TaxonomyJob(speciesJobId, DownloadState.PENDING.toString(), name, name, dataSourceName, "", 0, submitTime, 0, speciesJobId); taxonomyJobDao.insert(speciesJob); - + jobSpeciesModel = new JobTaxonomyModel(speciesJob.getId(), speciesJob.getDescriptiveName(), DownloadState.PENDING, null, name, dataSourceName, ""); - + Date submit = DateUtil.millisecondsToDate(speciesJob.getSubmitTime()); // jobSpeciesModel.setStartTime(DateUtil.dateToDateFormatString(start)); jobSpeciesModel.setSubmitTime(submit); jobSpeciesModel.setEndTime(null); - + return jobSpeciesModel; } - - + + @Override public JobTaxonomyModel resubmitTaxonomyJob(String jobIdentifier) throws Exception { - + logger.info("Resubmit taxonomy job for id: " + jobIdentifier); - + JobTaxonomyModel jobSpeciesModel = null; - + //Get Dao with list taxonomy jobs TaxonomyJobPersistence taxonomyJobDao = DaoSession.getTaxonomyJobDAO(getASLSession()); - + CriteriaBuilder queryBuilder = taxonomyJobDao.getCriteriaBuilder(); CriteriaQuery cq = queryBuilder.createQuery(); Predicate pr1 = queryBuilder.equal(taxonomyJobDao.rootFrom(cq).get(TaxonomyJob.ID_FIELD), jobIdentifier); cq.where(pr1); - + Iterator iterator = taxonomyJobDao.executeCriteriaQuery(cq).iterator(); - + TaxonomyJob taxonomy; - + if(iterator.hasNext()) taxonomy = iterator.next(); else return jobSpeciesModel; - + SpeciesService taxonomyService = getSpeciesService(); - + //recover taxomyId String speciesJobId = taxonomyService.createTaxonomyJobForDWCAByChildren(taxonomy.getTaxonomyId()); - + long submitTime = Calendar.getInstance().getTimeInMillis(); String name = RESUBMIT + ": " +NormalizeString.lowerCaseUpFirstChar(taxonomy.getDescriptiveName()); - + //STORE INTO DAO TaxonomyJob speciesJob = new TaxonomyJob(speciesJobId, DownloadState.PENDING.toString(), name, taxonomy.getDescriptiveName(), taxonomy.getDataSourceName(), taxonomy.getRank(), 0, submitTime, 0, taxonomy.getTaxonomyId()); taxonomyJobDao.insert(speciesJob); - + jobSpeciesModel = new JobTaxonomyModel(speciesJob.getId(), speciesJob.getDescriptiveName(), DownloadState.PENDING, null, taxonomy.getDescriptiveName(), taxonomy.getDataSourceName(), taxonomy.getRank()); - + Date submit = DateUtil.millisecondsToDate(speciesJob.getSubmitTime()); // jobSpeciesModel.setStartTime(DateUtil.dateToDateFormatString(start)); jobSpeciesModel.setSubmitTime(submit); jobSpeciesModel.setEndTime(null); - + return jobSpeciesModel; } @Override public boolean cancelTaxonomyJob(String jobIdentifier) throws Exception { - + try{ - + SpeciesService taxonomyService = getSpeciesService(); //REMOVE JOB ON THE SERVICE @@ -1342,7 +1340,7 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T TaxonomyJobPersistence speciesJobDao = DaoSession.getTaxonomyJobDAO(getASLSession()); int count = TaxonomyJobUtil.deleteTaxonomyJobById(jobIdentifier, speciesJobDao); - + if(count==1) return true; @@ -1350,36 +1348,36 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T logger.error("Error on cancel taxonomy job ", e); throw new Exception("Error on cancel taxonomy job", e); } - + return false; } @Override public boolean saveTaxonomyJob(String jobIdentifier, String destinationFolderId, String fileName, String scientificName, String dataSourceName) throws Exception { - + logger.info("saveSpeciesJob jobId: "+jobIdentifier+" destinationFolderId: "+destinationFolderId+" fileName: "+fileName); - + try { - + SpeciesService taxonomyService = getSpeciesService(); String mimeType = "application/zip"; InputStream inputStream = taxonomyService.getTaxonomyJobFileById(jobIdentifier); - + if(inputStream!=null){ Workspace workspace = GetWorkspaceUtil.getWorskspace(getASLSession()); logger.info("input stream is not null"); - + WorkspaceFolder folder = (WorkspaceFolder) workspace.getItem(destinationFolderId); fileName = WorkspaceUtil.getUniqueName(fileName, folder); folder.createExternalFileItem(fileName,"Taxonomy job generated files", mimeType, inputStream); logger.info("Save file with taxonomy was completed"); } else{ - + logger.info("input stream is null"); return false; } - + return true; } catch (Exception e) { @@ -1387,20 +1385,20 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T logger.error("An error occurred saving the generated file into the workspace",e); throw new SearchServiceException(e.getMessage()); } - + } - + @Override public boolean saveTaxonomyJobError(String jobIdentifier, String destinationFolderId, String fileName, String scientificName, String dataSourceName) throws Exception { logger.info("saveSpeciesJob error jobId: "+jobIdentifier+" destinationFolderId: "+destinationFolderId+" fileName: "+fileName); - + try { - + SpeciesService taxonomyService = getSpeciesService(); String mimeType = "text/plain"; - + InputStream inputStream = taxonomyService.getTaxonomyJobErrorFileById(jobIdentifier); - + if(inputStream!=null){ Workspace workspace = GetWorkspaceUtil.getWorskspace(getASLSession()); logger.info("input stream is not null"); @@ -1410,11 +1408,11 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T logger.info("Save report file with errors occurred was completed"); } else{ - + logger.info("input stream is null"); return false; } - + return true; } catch (Exception e) { @@ -1422,41 +1420,41 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T logger.error("An error occurred saving the generated file into the workspace",e); throw new SearchServiceException(e.getMessage()); } - + } - + @Override public List retrieveTaxonomyByIds(List ids) throws Exception{ logger.info("retrieveTaxonomyByIds ids size: " + ids.size()); List listLightTaxonomyRow = new ArrayList(); - + try { - + SpeciesService taxonomyService = getSpeciesService(); - StreamExtend convert = new StreamExtend(ids.iterator()); - - CloseableIterator streamIterator = taxonomyService.retrieveTaxonomyById(convert); - + //StreamExtend convert = new StreamExtend(ids.iterator()); + + CloseableIterator streamIterator = taxonomyService.retrieveTaxonomyById(ids); + ASLSession session = getASLSession(); - TaxonomyItemConverter taxonomyItemConverter = new TaxonomyItemConverter(session); - + TaxonomyItemConverter taxonomyItemConverter = new TaxonomyItemConverter(session); + Map mapTaxonomyIds = SessionUtil.getHashMapTaxonomyByIdsCache(session); - + if(mapTaxonomyIds==null){ logger.info("Cache taxa ByIds doesn't exists into session, creating.."); mapTaxonomyIds = new HashMap(); } - + while (streamIterator.hasNext()) { TaxonomyItem tax = streamIterator.next(); - + TaxonomyRow taxonomy = taxonomyItemConverter.convert(tax); - + if(mapTaxonomyIds.get(taxonomy.getServiceId())==null){ logger.info("Taxonomy with service id: "+taxonomy.getServiceId()+" doesn't exists into Map Taxonomy Ids, adding.."); mapTaxonomyIds.put(taxonomy.getServiceId(),taxonomy); } - + LightTaxonomyRow lightTaxRow = ClusterStructuresForTaxonomyRow.convetTaxonomyRowToLigthTaxonomyRow(taxonomy); listLightTaxonomyRow.add(lightTaxRow); } @@ -1471,43 +1469,43 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T logger.error("An error retrieve taxonomy by Id",e); // throw new Exception(e.getMessage()); } - + return listLightTaxonomyRow; - + } - + @Override public List retrieveSynonymsByRefId(String refId) throws Exception{ logger.info("retrieveSynonymsById id: " + refId); List listLightTaxonomyRow = new ArrayList(); - + try { - + SpeciesService taxonomyService = getSpeciesService(); CloseableIterator streamIterator = taxonomyService.retrieveSynonymsById(refId); - + ASLSession session = getASLSession(); TaxonomyItemConverter taxonomyItemConverter = new TaxonomyItemConverter(getASLSession()); - + Map mapSysnosyms = SessionUtil.getHashMapSynonymsTaxonomyCache(getASLSession()); - + if(mapSysnosyms==null){ logger.info("Cache synonyms doesn't exists into session, creating.."); mapSysnosyms = new HashMap(); } - + // int i = 1; while (streamIterator.hasNext()) { TaxonomyItem tax = streamIterator.next(); - + TaxonomyRow taxonomy = taxonomyItemConverter.convert(tax); - + if(mapSysnosyms.get(taxonomy.getServiceId())==null){ logger.info("Taxonomy with service id: "+taxonomy.getServiceId()+" doesn't exists into Map Synonyms, adding.."); mapSysnosyms.put(taxonomy.getServiceId(),taxonomy); } - + LightTaxonomyRow lightTaxRow = ClusterStructuresForTaxonomyRow.convetTaxonomyRowToLigthTaxonomyRow(taxonomy); listLightTaxonomyRow.add(lightTaxRow); } @@ -1521,128 +1519,128 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T e.printStackTrace(); logger.error("An error retrieve synonyms by Id",e); } - + return listLightTaxonomyRow; - + } - - - + + + /** * {@inheritDoc} */ @Override public List createOccurrencesJob(List listJobOccurrenceModel, SaveFileFormat saveFileFormat, OccurrencesSaveEnum csvType, boolean isByDataSource, int expectedOccurrence) throws Exception { logger.info("createOccurencesJobFromSelection..."); - + List listResultJobModel = new ArrayList(); @SuppressWarnings("unchecked") FetchingSession searchSession = (FetchingSession) getSearchSession(); SpeciesService taxonomyService = getSpeciesService(); ASLSession aslSession = getASLSession(); String dataSourceAsXml = ""; - + try { - + List keys = null; //Get Dao with list occurrences jobs OccurrenceJobPersistence occurrencesJobDao = DaoSession.getOccurrencesJobDAO(aslSession); if(!isByDataSource){ //NOT IS BY DATASOURCE - CREATE ONE JOB - + keys = OccurrenceJobUtil.getListOfSelectedKey(searchSession); - + if(listJobOccurrenceModel!=null && listJobOccurrenceModel.get(0)!=null){ //IN THIS CASE - THERE IS ONE JOBMODEL - + List dataSourceList = listJobOccurrenceModel.get(0).getDataSources(); - + //ADDED DATA SOURCE LIST listResultJobModel.add(OccurrenceJobUtil.createOccurrenceJobOnServiceByKeys(listJobOccurrenceModel.get(0), taxonomyService, occurrencesJobDao, keys, dataSourceList, saveFileFormat, csvType, expectedOccurrence)); - } + } }else{ //IS BY DATASOURCE - CREATE MORE JOB, ONE FOR EACH DATASOURCE - - for (JobOccurrencesModel jobModel : listJobOccurrenceModel) { //IN THIS CASE - FOR EACH JOBMODEL THERE IS ONE DATASOURCE - + + for (JobOccurrencesModel jobModel : listJobOccurrenceModel) { //IN THIS CASE - FOR EACH JOBMODEL THERE IS ONE DATASOURCE + dataSourceAsXml = ""; - + if(jobModel.getDataSources()!=null && jobModel.getDataSources().get(0)!=null){ - + //recover keys DataSource dataSource = jobModel.getDataSources().get(0); - + OccurrenceKeys occKey = OccurrenceJobUtil.getListOfSelectedKeyByDataSource(dataSource.getName(), aslSession); - + List dataSourceList = jobModel.getDataSources(); - + //ADDED DATA SOURCE LIST listResultJobModel.add(OccurrenceJobUtil.createOccurrenceJobOnServiceByKeys(jobModel, taxonomyService, occurrencesJobDao, occKey.getListKey(), dataSourceList, saveFileFormat, csvType, occKey.getTotalOccurrence())); } } - + } - + return listResultJobModel; - + } catch (Exception e) { logger.error("An error occurred in createOccurencesJobFromSelection", e); throw new Exception(e.getMessage()); } - + } - + @Override public List resubmitOccurrencesJob(String jobIdentifier) throws Exception { logger.info("createOccurencesJobFromSelection..."); - + List listResultJobModel = new ArrayList(); SpeciesService taxonomyService = getSpeciesService(); ASLSession aslSession = getASLSession(); List keys = null; - + try { - + //Get Dao with list occurrences jobs OccurrenceJobPersistence occurrencesJobDao = DaoSession.getOccurrencesJobDAO(aslSession); - + CriteriaBuilder queryBuilder = occurrencesJobDao.getCriteriaBuilder(); CriteriaQuery cq = queryBuilder.createQuery(); Predicate pr1 = queryBuilder.equal(occurrencesJobDao.rootFrom(cq).get(OccurrencesJob.ID_FIELD), jobIdentifier); cq.where(pr1); - + Iterator iterator = occurrencesJobDao.executeCriteriaQuery(cq).iterator(); OccurrencesJob job; - + if(iterator.hasNext()) job = iterator.next(); else return listResultJobModel; - + //recover keys keys = OccurrenceJobUtil.revertListKeyFromStoredXMLString(job.getResultRowKeysAsXml()); - + //recover file format SaveFileFormat fileFormat = OccurrenceJobUtil.converFileFormat(job.getFileFormat()); - + //recover csv type OccurrencesSaveEnum csvType = OccurrenceJobUtil.convertCsvType(job.getCsvType()); - + String name = RESUBMIT + ": "+job.getName(); - + JobOccurrencesModel jobModel = new JobOccurrencesModel("",name, job.getScientificName(), job.getDataSources(), fileFormat, csvType, job.isByDataSource()); jobModel.setTotalOccurrences(job.getExpectedOccurrence()); - + listResultJobModel.add(OccurrenceJobUtil.createOccurrenceJobOnServiceByKeys(jobModel, taxonomyService, occurrencesJobDao, keys, job.getDataSources(), fileFormat, csvType, jobModel.getTotalOccurrences())); - + } catch (Exception e) { logger.error("An error occurred in createOccurencesJobFromSelection", e); throw new Exception(e.getMessage()); } - + return listResultJobModel; - + } - + @Override public List getListOccurrencesJob() throws Exception{ logger.info("getListOccurencesJob... "); @@ -1656,15 +1654,15 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T while (iterator!=null && iterator.hasNext()) { OccurrencesJob job = iterator.next(); - Status statusResponse = taxonomyService.getOccurrenceJobById(job.getId()); + CompleteJobStatus statusResponse = taxonomyService.getOccurrenceJobById(job.getId()); logger.info("get occurrence job "+job.getId()+ " from service"); - + try{ - + if(statusResponse!=null){ logger.info("statusResponse of occurrence job is not null..." + job.getId()); JobOccurrencesModel jobOccurrenceModel = OccurrenceJobUtil.convertJob(job, statusResponse, occurrencesJobDao); - + if(jobOccurrenceModel!=null){ logger.info("added list jobOccurrenceId: "+jobOccurrenceModel.getJobIdentifier() + " status "+jobOccurrenceModel.getDownloadState()); listJobs.add(jobOccurrenceModel); @@ -1675,7 +1673,7 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T logger.info("delete job ..." + job.getId()); OccurrenceJobUtil.deleteOccurrenceJobById(job.getId(),occurrencesJobDao); } - + }catch (Exception e) { e.printStackTrace(); logger.error("Error on getListOccurencesJob ", e); @@ -1686,20 +1684,20 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T } catch (Exception e) { logger.error("Error on get iterator "+e, e); } - + return listJobs; } - + @Override public boolean saveOccurrenceJob(JobOccurrencesModel jobModel, String destinationFolderId, String fileName, String scientificName, String dataSourceName) throws Exception { logger.info("saveOccurrenceJob jobId: "+jobModel.getJobIdentifier()+" destinationFolderId: "+destinationFolderId+" fileName: "+fileName + " file format: "+jobModel.getFileFormat()); try { - + SpeciesService taxonomyService = getSpeciesService(); String mimeType = null; - + switch (jobModel.getFileFormat()) { case CSV: { mimeType = "text/csv"; @@ -1708,14 +1706,14 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T mimeType = "application/xhtml+xml"; } break; } - + InputStream inputStream = taxonomyService.getOccurrenceJobFileById(jobModel.getJobIdentifier()); - + if(inputStream!=null){ Workspace workspace = GetWorkspaceUtil.getWorskspace(getASLSession()); logger.info("input stream is not null"); // System.out.println("input stream is not null"); - + WorkspaceFolder folder = (WorkspaceFolder) workspace.getItem(destinationFolderId); fileName = WorkspaceUtil.getUniqueName(fileName, folder); folder.createExternalFileItem(fileName,"Occurrence job generated files", mimeType, inputStream); @@ -1732,16 +1730,16 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T logger.error("An error occurred saving the generated file into the workspace",e); throw new SearchServiceException(e.getMessage()); } - + } - + @Override public boolean saveOccurrenceJobError(JobOccurrencesModel jobModel, String destinationFolderId, String fileName, String scientificName, String dataSourceName) throws Exception { - + logger.info("saveOccurrenceJobError jobId: "+jobModel.getJobIdentifier()+" destinationFolderId: "+destinationFolderId+" fileName: "+fileName + " file format: "+jobModel.getFileFormat()); - + try { - + SpeciesService taxonomyService = getSpeciesService(); // String mimeType = null; @@ -1753,27 +1751,27 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T // mimeType = "application/xhtml+xml"; // } break; // } - + String mimeType = "text/plain"; - + InputStream inputStream = taxonomyService.getOccurrenceJobErrorFileById(jobModel.getJobIdentifier()); - + if(inputStream!=null){ Workspace workspace = GetWorkspaceUtil.getWorskspace(getASLSession()); logger.info("input stream is not null"); // System.out.println("input stream is not null"); - + WorkspaceFolder folder = (WorkspaceFolder) workspace.getItem(destinationFolderId); fileName = WorkspaceUtil.getUniqueName(fileName, folder); folder.createExternalFileItem(fileName,"Report errors occurred on occurrence job", mimeType, inputStream); logger.info("Save report file with errors occurred was completed"); } else{ - + logger.info("input stream is null"); return false; } - + return true; } catch (Exception e) { @@ -1781,20 +1779,20 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T logger.error("An error occurred saving the generated file into the workspace",e); throw new SearchServiceException(e.getMessage()); } - + } - + @Override public boolean cancelOccurrenceJob(String jobIdentifier) throws Exception { logger.info("cancelOccurrenceJob jobIdentifier: "+jobIdentifier); try{ - + SpeciesService taxonomyService = getSpeciesService(); //REMOVE JOB ON THE SERVICE taxonomyService.cancelTaxonomyJobById(jobIdentifier); OccurrenceJobPersistence occurrenceJobDAO = DaoSession.getOccurrencesJobDAO(getASLSession()); int count = OccurrenceJobUtil.deleteOccurrenceJobById(jobIdentifier, occurrenceJobDAO); - + if(count==1) return true; @@ -1802,7 +1800,7 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T logger.error("Error on cancel occurrence job ", e); throw new Exception("Error on cancel occurrence job", e); } - + return false; } @@ -1812,47 +1810,47 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T int countSelectedRow = countOfSelectedRow(); boolean isReduced = false; int totalRow = countSelectedRow; - + ASLSession session = getASLSession(); HashMap hashCluster = SessionUtil.getCurrentClusterCommonNameForResultRow(session); - + //Reset cluster for common name if(hashCluster!=null) SessionUtil.setCurrentClusterCommonNameForResultRow(session, null); - - + + //LIMIT NUMBER OF ITEMS TO ConstantsSpeciesDiscovery.LIMIT_ITEM_DETAILS if(countSelectedRow>ConstantsSpeciesDiscovery.RESULT_ROW_LIMIT_ITEM_DETAILS) - countSelectedRow = ConstantsSpeciesDiscovery.RESULT_ROW_LIMIT_ITEM_DETAILS; - + countSelectedRow = ConstantsSpeciesDiscovery.RESULT_ROW_LIMIT_ITEM_DETAILS; + ResultFilter filter = new ResultFilter(false, true, true); - + SearchResult searchResults = getSearchResultRows(0, countSelectedRow, filter, true); ClusterStructuresForResultRow cluster = new ClusterStructuresForResultRow(searchResults,isReduced, totalRow); //TODO USE THREAD? ManagerClusterCommonNameDataSourceForResultRow manager = new ManagerClusterCommonNameDataSourceForResultRow(cluster.getHashClusterScientificNameResultRowServiceID(), cluster.getHashResult()); - + SessionUtil.setCurrentClusterCommonNameForResultRow(getASLSession(), manager.getHashClusterCommonNameDataSource()); //THIS OBJECT IS NOT USED ON CLIENT cluster.setHashResult(null); - + return cluster; } - + @Override public ClusterCommonNameDataSourceForResultRow loadClusterCommonNameForResultRowByScientificName(String scientificName) throws Exception { - logger.info("loadClusterCommonNameForResultRowByScientificName for scientific name: "+scientificName); + logger.info("loadClusterCommonNameForResultRowByScientificName for scientific name: "+scientificName); HashMap hashCluster = SessionUtil.getCurrentClusterCommonNameForResultRow(getASLSession()); if(hashCluster==null){ logger.warn("Error in loadClusterCommonNameForResultRowByScientificName, hashCluster was not found in session"); return null; } - + ClusterCommonNameDataSourceForResultRow cluster = hashCluster.get(scientificName); if(cluster==null){ logger.warn("Error in loadClusterCommonNameForResultRowByScientificName, cluster was not found in session"); @@ -1860,8 +1858,8 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T } return cluster; } - - + + @Override public List loadDataSourceForResultRow(boolean selected, boolean distinct) throws Exception { @@ -1871,17 +1869,17 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T try { daoResultRow = DaoSession.getResultRowDAO(session); - + EntityManager em = daoResultRow.createNewManager(); List listDN = new ArrayList(); try { String selectString = "select "; if(distinct) selectString+= "distinct "; - + Query query = em.createQuery(selectString + "t."+ResultRow.DATASOURCE_NAME+" from ResultRow t where t.selected = "+selected ); listDN = query.getResultList(); - + } finally { em.close(); } @@ -1889,17 +1887,17 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T for (String dataSourceName : listDN) { listDataSource.add(new DataSource(dataSourceName, dataSourceName)); } - + } catch (Exception e) { logger.error("Error in loadDataSourceForResultRow " +e.getMessage(), e); throw new Exception("Error in loadDataSourceForResultRow " + e.getMessage(), e); } return listDataSource; - + } - + @Override public ClusterStructuresForTaxonomyRow loadStructuresForTaxonomyClustering() throws Exception { @@ -1907,25 +1905,25 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T boolean isReduced = false; int totalRow = countSelectedRow; - + ASLSession session = getASLSession(); HashMap hashCluster = SessionUtil.getCurrentClusterCommonNameForTaxonomyRow(session); - + //Reset cluster for common name if(hashCluster!=null) SessionUtil.setCurrentClusterCommonNameForTaxonomyRow(session, null); - - + + HashMap mapOldChildren = SessionUtil.getHashMapChildrenTaxonomyCache(session); //Reset list children to last clustering if(mapOldChildren!=null) SessionUtil.setHashMapChildrenTaxonomyCache(session, null); - + HashMap mapSynonyms = SessionUtil.getHashMapSynonymsTaxonomyCache(session); //Reset list synonyms to last clustering if(mapSynonyms!=null) SessionUtil.setHashMapSynonymsTaxonomyCache(session, null); - + HashMap mapTaxonomyIds = SessionUtil.getHashMapTaxonomyByIdsCache(session); //Reset list synonyms to last clustering if(mapTaxonomyIds!=null) @@ -1940,44 +1938,44 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T ResultFilter filter = new ResultFilter(false, true, true); SearchResult searchResults = getSearchTaxonomyRow(0, countSelectedRow, filter, true); ClusterStructuresForTaxonomyRow cluster = new ClusterStructuresForTaxonomyRow(searchResults,isReduced, totalRow); - + //TODO USE THREAD? ManagerClusterCommonNameDataSourceForTaxonomyRow manager = new ManagerClusterCommonNameDataSourceForTaxonomyRow(cluster.getHashClusterScientificNameTaxonomyRowServiceID(), cluster.getHashResult()); SessionUtil.setCurrentClusterCommonNameForTaxonomyRow(session, manager.getHashClusterCommonNameDataSource()); //THIS OBJECT IS NOT USED ON CLIENT cluster.setHashResult(null); - + return cluster; - + } @Override public boolean changeStatusOccurrenceJob(String jobIdentifier, DownloadState state) throws Exception { OccurrenceJobPersistence occurrenceJobDAO = DaoSession.getOccurrencesJobDAO(getASLSession()); - + int count = OccurrenceJobUtil.changeStatusOccurrenceJobById(jobIdentifier, state, occurrenceJobDAO); - + if(count==1) return true; - + return false; - + } @Override public boolean changeStatusTaxonomyJob(String jobIdentifier, DownloadState state) throws Exception { - + TaxonomyJobPersistence taxonomyJobDAO = DaoSession.getTaxonomyJobDAO(getASLSession()); int count = TaxonomyJobUtil.changeStatusTaxonomyJobById(jobIdentifier, state, taxonomyJobDAO); - + if(count==1) return true; - + return false; - + } /* (non-Javadoc) @@ -1985,13 +1983,13 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T */ @Override public boolean isAvailableTaxonomyJobReportError(String jobIdentifier) throws Exception { - + logger.info("isAvailableTaxonomyJobReportError jobId: "+jobIdentifier); try { - + SpeciesService taxonomyService = getSpeciesService(); return taxonomyService.isAvailableTaxonomyJobErrorFileById(jobIdentifier); - + } catch (Exception e) { logger.error("An error occurred getting error (taxonomy) file for jobid "+jobIdentifier,e); return false; @@ -2003,19 +2001,19 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T */ @Override public boolean isAvailableOccurrenceJobReportError(String jobIdentifier) throws Exception { - + logger.info("isAvailableOccurrenceJobReportError jobId: "+jobIdentifier); try { - + SpeciesService taxonomyService = getSpeciesService(); return taxonomyService.isAvailableOccurrenceJobErrorFileById(jobIdentifier); - + } catch (Exception e) { logger.error("An error occurred getting error (occurrence) file for jobid "+jobIdentifier,e); return false; } } - + @Override public String getLastQuery(){ logger.info("getLastQuery..."); @@ -2028,23 +2026,23 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T */ @Override public ClusterCommonNameDataSourceForTaxonomyRow loadClusterCommonNameForTaxonomyRowByScientificName(String scientificName) { - + logger.info("loadClusterCommonNameForTaxonomyRowByScientificName for scientific name: "+scientificName); - + HashMap hashCluster = SessionUtil.getCurrentClusterCommonNameForTaxonomyRow(getASLSession()); if(hashCluster==null){ logger.warn("Error in loadClusterCommonNameForTaxonomyRowByScientificName, hashCluster was not found in session"); return null; } - + ClusterCommonNameDataSourceForTaxonomyRow cluster = hashCluster.get(scientificName); - + if(cluster==null){ logger.warn("Error in loadClusterCommonNameForTaxonomyRowByScientificName, cluster was not found in session"); return null; } - + return cluster; } } diff --git a/src/main/java/org/gcube/portlets/user/speciesdiscovery/server/job/OccurrenceJobUtil.java b/src/main/java/org/gcube/portlets/user/speciesdiscovery/server/job/OccurrenceJobUtil.java index 63dfd4a..82d4f73 100644 --- a/src/main/java/org/gcube/portlets/user/speciesdiscovery/server/job/OccurrenceJobUtil.java +++ b/src/main/java/org/gcube/portlets/user/speciesdiscovery/server/job/OccurrenceJobUtil.java @@ -15,7 +15,8 @@ import javax.persistence.criteria.Predicate; import org.apache.log4j.Logger; import org.gcube.application.framework.core.session.ASLSession; -import org.gcube.data.spd.stubs.types.Status; +import org.gcube.data.spd.model.service.types.CompleteJobStatus; +import org.gcube.data.spd.model.service.types.JobStatus; import org.gcube.portlets.user.speciesdiscovery.server.persistence.DaoSession; import org.gcube.portlets.user.speciesdiscovery.server.persistence.dao.OccurrenceJobPersistence; import org.gcube.portlets.user.speciesdiscovery.server.persistence.dao.ResultRowPersistence; @@ -33,6 +34,13 @@ import org.gcube.portlets.user.speciesdiscovery.shared.OccurrencesSaveEnum; import org.gcube.portlets.user.speciesdiscovery.shared.ResultRow; import org.gcube.portlets.user.speciesdiscovery.shared.SaveFileFormat; + +/** + * The Class OccurrenceJobUtil. + * + * @author Francesco Mangiacrapa francesco.mangiacrapa@isti.cnr.it + * Jan 10, 2017 + */ public class OccurrenceJobUtil { //STATE RETURNED BY STATUS RESPONSE @@ -40,24 +48,32 @@ public class OccurrenceJobUtil { public static final String FAILED = "FAILED"; public static final String RUNNING = "RUNNING"; public static final String PENDING = "PENDING"; - + private static final String ALIASKEY = "key"; protected static Logger logger = Logger.getLogger(OccurrenceJobUtil.class); - public static JobOccurrencesModel convertJob(OccurrencesJob job, Status statusResponse, OccurrenceJobPersistence occurrencesJobDao) { - //TODO SET END TIME + /** + * Convert job. + * + * @param job the job + * @param statusResponse the status response + * @param occurrencesJobDao the occurrences job dao + * @return the job occurrences model + */ + public static JobOccurrencesModel convertJob(OccurrencesJob job, CompleteJobStatus statusResponse, OccurrenceJobPersistence occurrencesJobDao) { + //TODO SET END TIME JobOccurrencesModel jobOccurrenceModel; DownloadState downloadState = null; long endTime = 0; - String status = statusResponse.getStatus(); + JobStatus status = statusResponse.getStatus(); downloadState = getDownloadState(status); logger.trace("jobId: "+job.getId() +" download state: " + downloadState); - + //FOR DEBUG // System.out.println("jobId: "+job.getId() +" download state: " + downloadState); - + if(downloadState==null) //Case with exception return null; @@ -65,13 +81,13 @@ public class OccurrenceJobUtil { long submitTime = job.getSubmitTime(); Date submit = DateUtil.millisecondsToDate(submitTime); // jobSpeciesModel.setStartTime(DateUtil.dateToDateFormatString(start)); - + int completedEntry = 0; if(statusResponse.getCompletedEntries()>0) completedEntry = statusResponse.getCompletedEntries(); - + boolean changeStatus = false; - + //if status is completed and job was saved, update status as saved if(downloadState.equals(DownloadState.COMPLETED)){ if(job.getState().compareTo(DownloadState.SAVED.toString())==0){ @@ -79,18 +95,18 @@ public class OccurrenceJobUtil { changeStatus = true; } } - + jobOccurrenceModel = new JobOccurrencesModel(job.getId(), job.getName(), job.getDescription(), downloadState, job.getScientificName(), job.getDataSources(), submit, null,completedEntry, job.getExpectedOccurrence()); try{ boolean changeEndTime = false; - + //UPDATE END TIME if(downloadState.equals(DownloadState.FAILED) || downloadState.equals(DownloadState.COMPLETED)){ - + // if(job.getEndTime()==0){ //UPDATE end time first time only -// +// // logger.trace("UPDATE end time first time only - " + downloadState); // endTime = Calendar.getInstance().getTimeInMillis(); // job.setEndTime(endTime); @@ -99,22 +115,22 @@ public class OccurrenceJobUtil { // System.out.println("job "+job); // System.out.println("statusResponse.getEndDate() "+statusResponse.getEndDate()); // System.out.println("job.getEndTime() "+job.getEndTime()); - + if(statusResponse.getEndDate()!=null && job.getEndTime()==0){ //UPDATE end time first time only - + logger.trace("UPDATE end time first time only - " + downloadState); // endTime = Calendar.getInstance().getTimeInMillis(); endTime = statusResponse.getEndDate().getTimeInMillis(); job.setEndTime(endTime); changeEndTime = true; // speciesJobDao.update(job); - + } } - - + + boolean changeStartTime = false; - + //SET START TIME long startTime = job.getStartTime(); if(statusResponse.getStartDate()!=null && startTime==0){ //UPDATE start time first time only @@ -124,7 +140,7 @@ public class OccurrenceJobUtil { changeStartTime = true; } - + //UPDATE DAO if(changeStatus || changeEndTime || changeStartTime){ job.setState(downloadState.toString()); @@ -134,11 +150,11 @@ public class OccurrenceJobUtil { }catch (Exception e) { logger.error("An error occurred on update the occurrencesJobDao ", e); } - - + + endTime = job.getEndTime(); long elapsedTime = 0; - + //SET END TIME, BECAUSE IT IS CHANGED if(endTime!=0){ Date end = DateUtil.millisecondsToDate(endTime); @@ -147,39 +163,54 @@ public class OccurrenceJobUtil { } else elapsedTime = Calendar.getInstance().getTimeInMillis(); - + //SET ELAPSED TIME jobOccurrenceModel.setElapsedTime(DateUtil.getDifference(submitTime, elapsedTime)); - - + + //OTHERS SET jobOccurrenceModel.setFileFormat(converFileFormat(job.getFileFormat())); jobOccurrenceModel.setCsvType(convertCsvType(job.getCsvType())); jobOccurrenceModel.setByDataSource(job.isByDataSource()); - + return jobOccurrenceModel; } - - public static DownloadState getDownloadState(String status){ - - if(status!=null){ - if(status.compareToIgnoreCase(PENDING)==0){ - return DownloadState.PENDING; - }else if(status.compareToIgnoreCase(RUNNING)==0){ - return DownloadState.ONGOING; - }else if(status.compareToIgnoreCase(FAILED)==0){ - return DownloadState.FAILED; - }else if(status.compareToIgnoreCase(COMPLETED)==0){ + + /** + * Gets the download state. + * + * @param status the status + * @return the download state + */ + public static DownloadState getDownloadState(JobStatus status){ + + if(status!=null){ + switch (status) { + case COMPLETED: return DownloadState.COMPLETED; + case FAILED: + return DownloadState.FAILED; + case PENDING: + return DownloadState.PENDING; + case RUNNING: + return DownloadState.ONGOING; + default: + return null; } } + return null; - } + /** + * Convert csv type. + * + * @param csvType the csv type + * @return the occurrences save enum + */ public static OccurrencesSaveEnum convertCsvType(String csvType) { - + if(csvType!=null){ if(csvType.compareToIgnoreCase(OccurrencesSaveEnum.STANDARD.toString())==0){ return OccurrencesSaveEnum.STANDARD; @@ -191,8 +222,14 @@ public class OccurrenceJobUtil { } + /** + * Conver file format. + * + * @param fileFormat the file format + * @return the save file format + */ public static SaveFileFormat converFileFormat(String fileFormat) { - + if(fileFormat!=null){ if(fileFormat.compareToIgnoreCase(SaveFileFormat.CSV.toString())==0){ return SaveFileFormat.CSV; @@ -205,6 +242,13 @@ public class OccurrenceJobUtil { } + /** + * Delete occurrence job by id. + * + * @param jobIdentifier the job identifier + * @param occurrencesJobDao the occurrences job dao + * @return the int + */ public static int deleteOccurrenceJobById(String jobIdentifier, OccurrenceJobPersistence occurrencesJobDao){ logger.trace("Delete occurrence job id: " + jobIdentifier); @@ -214,55 +258,70 @@ public class OccurrenceJobUtil { }catch (Exception e) { logger.error("An error occured deleteOccurrenceJobById jobId: " + jobIdentifier + " exception: "+e, e); - + } - + logger.trace("job not exists : " +jobIdentifier); return 0; } - - + + + /** + * Change status occurrence job by id. + * + * @param jobIdentifier the job identifier + * @param state the state + * @param occurrencesJobDao the occurrences job dao + * @return the int + */ public static int changeStatusOccurrenceJobById(String jobIdentifier, DownloadState state, OccurrenceJobPersistence occurrencesJobDao){ logger.trace("Change status occurrence job id: " + jobIdentifier); // System.out.println("Delete job id: " + jobIdentifier); - + int result = 0; - + try{ - + CriteriaBuilder queryBuilder = occurrencesJobDao.getCriteriaBuilder(); CriteriaQuery cq = queryBuilder.createQuery(); Predicate pr1 = queryBuilder.equal(occurrencesJobDao.rootFrom(cq).get(OccurrencesJob.ID_FIELD), jobIdentifier); cq.where(pr1); - + Iterator iterator = occurrencesJobDao.executeCriteriaQuery(cq).iterator(); - + OccurrencesJob job; - + if(iterator.hasNext()) job = iterator.next(); else return 0; - + job.setState(state.toString()); - + occurrencesJobDao.update(job); - + }catch (Exception e) { logger.error("An error occured in change status jobId: " + jobIdentifier + " exception: "+e, e ); } - + return result; } - - + + + /** + * Gets the list of selected key. + * + * @param searchSession the search session + * @return the list of selected key + * @throws Exception the exception + */ public static List getListOfSelectedKey(FetchingSession searchSession) throws Exception{ - + Collection selectedRows = ((SelectableFetchingBuffer) searchSession.getBuffer()).getSelected(); logger.trace("found "+selectedRows.size()+" selected rows"); - + int count = 0; - + List keys = new ArrayList(selectedRows.size()); for (ResultRow row:selectedRows) { @@ -272,45 +331,52 @@ public class OccurrenceJobUtil { count += row.getOccurencesCount(); } } - + logger.trace("found "+count+" occurrence points"); - + return keys; - + } - - + + + /** + * Gets the list of selected key by data source. + * + * @param dataSource the data source + * @param session the session + * @return the list of selected key by data source + */ public static OccurrenceKeys getListOfSelectedKeyByDataSource(String dataSource, ASLSession session) { logger.trace("getListOfSelectedKeyByDataSource..."); - + OccurrenceKeys occurrenceKeys = new OccurrenceKeys(); - + List keys = new ArrayList(); - + Iterator resulRowIt = null; - + int count = 0; - + try{ - + // System.out.println("dasource name: "+dataSource); logger.trace("datasource name: "+dataSource); - + ResultRowPersistence resultRowDao = DaoSession.getResultRowDAO(session); - + /*CriteriaBuilder cb = resultRowDao.getCriteriaBuilder(); - + CriteriaQuery cq = cb.createQuery(); - + Predicate pr1 = cb.equal(resultRowDao.rootFrom(cq).get(ResultRow.DATASOURCE_NAME), dataSource); - + Predicate pr2 = cb.equal(resultRowDao.rootFrom(cq).get(ResultRow.SELECTED), true); - + cq.where(cb.and(pr1,pr2)); - + Iterator resulRowIt = resultRowDao.executeCriteriaQuery(cq).iterator(); */ - + EntityManager em = resultRowDao.createNewManager(); try { @@ -318,7 +384,7 @@ public class OccurrenceJobUtil { resulRowIt = query.getResultList().iterator(); - + } catch (Exception e) { logger.error("Error in update: "+e.getMessage(), e); return null; @@ -328,59 +394,72 @@ public class OccurrenceJobUtil { } while(resulRowIt.hasNext()){ - + ResultRow row = resulRowIt.next(); - + if(row.getOccurencesKey()!=null && row.getOccurencesKey().length()>0){ keys.add(row.getOccurencesKey()); count += row.getOccurencesCount(); } - + } - + occurrenceKeys.setListKey(keys); occurrenceKeys.setTotalOccurrence(count); - + }catch (Exception e) { logger.error("error in getListOfSelectedKeyByDataSource "+ e); } - + logger.trace("found "+count+" occurrence points"); - + return occurrenceKeys; - + } + /** + * Creates the occurrence job on service by keys. + * + * @param jobModel the job model + * @param taxonomyService the taxonomy service + * @param occurrencesJobDao the occurrences job dao + * @param keys the keys + * @param dataSources the data sources + * @param saveFileFormat the save file format + * @param csvType the csv type + * @param expectedOccurrence the expected occurrence + * @return the job occurrences model + */ public static JobOccurrencesModel createOccurrenceJobOnServiceByKeys(JobOccurrencesModel jobModel,SpeciesService taxonomyService, OccurrenceJobPersistence occurrencesJobDao, List keys, List dataSources, SaveFileFormat saveFileFormat, OccurrencesSaveEnum csvType, int expectedOccurrence) { String serviceJobId = null; - + StreamExtend streamKeys = new StreamExtend(keys.iterator()); //convert - + String csvTypeString = null; - + try { - + switch (saveFileFormat) { - + case CSV: - + if(csvType.equals(OccurrencesSaveEnum.STANDARD)) serviceJobId = taxonomyService.createOccurrenceCSVJob(streamKeys); else if(csvType.equals(OccurrencesSaveEnum.OPENMODELLER)) serviceJobId = taxonomyService.createOccurrenceCSVOpenModellerJob(streamKeys); - + if(jobModel.getCsvType()!=null) csvTypeString = jobModel.getCsvType().toString(); //CASE CSV - + break; - + case DARWIN_CORE: - + serviceJobId = taxonomyService.createOccurrenceDARWINCOREJob(streamKeys); - + csvTypeString = ""; - + break; default: @@ -391,23 +470,23 @@ public class OccurrenceJobUtil { logger.error("An error occured in create new occurrences job on server ",e); return null; } - + long submitTimeInMillis = Calendar.getInstance().getTimeInMillis(); - + try { - + //STORE INTO DAO OccurrencesJob occurrenceJob = new OccurrencesJob(serviceJobId, jobModel.getJobName(), jobModel.getDescription(), jobModel.getScientificName(), dataSources, DownloadState.PENDING.toString(), "", submitTimeInMillis, 0, 0, jobModel.getFileFormat().toString(),csvTypeString, jobModel.isByDataSource(), convertListKeyIntoStoreXMLString(keys), expectedOccurrence); - + //for debug // System.out.println("INTO createOccurrenceJobOnServiceByKeys " + occurrenceJob); - + occurrencesJobDao.insert(occurrenceJob); - + Date start = DateUtil.millisecondsToDate(submitTimeInMillis); jobModel.setSubmitTime(start); - + //FILL MODEL WITH OTHER DATA jobModel.setId(serviceJobId); jobModel.setState(DownloadState.PENDING); @@ -416,25 +495,31 @@ public class OccurrenceJobUtil { }catch (Exception e) { logger.error("An error occured in create new occurrences job on dao object " +e,e); } - + return jobModel; - + } - - + + + /** + * Convert list key into store xml string. + * + * @param keys the keys + * @return the string + */ public static String convertListKeyIntoStoreXMLString(List keys){ String storeKeys = ""; - + KeyStringList keyStringList = new KeyStringList(); - + XStreamUtil xstreamUtil = new XStreamUtil(ALIASKEY,KeyStringList.class); - + for (String key : keys) { // System.out.println("key :"+ key); logger.info("key converted: "+key); keyStringList.addKey(key); } - + storeKeys = xstreamUtil.toXML(keyStringList); //FOR DEBUG @@ -442,15 +527,21 @@ public class OccurrenceJobUtil { return storeKeys; } - + + /** + * Revert list key from stored xml string. + * + * @param storedKeysAsXml the stored keys as xml + * @return the list + */ public static List revertListKeyFromStoredXMLString(String storedKeysAsXml){ - + List listKey = new ArrayList(); - + XStreamUtil xstreamUtil = new XStreamUtil(ALIASKEY,KeyStringList.class); - + KeyStringList keyStringList = (KeyStringList) xstreamUtil.fromXML(storedKeysAsXml); - + for (String key : keyStringList.getListKeys()) { // for debug // System.out.println("key :"+ key); diff --git a/src/main/java/org/gcube/portlets/user/speciesdiscovery/server/job/TaxonomyJobUtil.java b/src/main/java/org/gcube/portlets/user/speciesdiscovery/server/job/TaxonomyJobUtil.java index 027d5c0..7af7f28 100644 --- a/src/main/java/org/gcube/portlets/user/speciesdiscovery/server/job/TaxonomyJobUtil.java +++ b/src/main/java/org/gcube/portlets/user/speciesdiscovery/server/job/TaxonomyJobUtil.java @@ -12,14 +12,21 @@ import javax.persistence.criteria.CriteriaQuery; import javax.persistence.criteria.Predicate; import org.apache.log4j.Logger; -import org.gcube.data.spd.stubs.types.NodeStatus; -import org.gcube.data.spd.stubs.types.Status; +import org.gcube.data.spd.model.service.types.CompleteJobStatus; +import org.gcube.data.spd.model.service.types.JobStatus; +import org.gcube.data.spd.model.service.types.NodeStatus; import org.gcube.portlets.user.speciesdiscovery.server.persistence.dao.TaxonomyJobPersistence; import org.gcube.portlets.user.speciesdiscovery.server.util.DateUtil; import org.gcube.portlets.user.speciesdiscovery.shared.DownloadState; import org.gcube.portlets.user.speciesdiscovery.shared.JobTaxonomyModel; import org.gcube.portlets.user.speciesdiscovery.shared.TaxonomyJob; +/** + * The Class TaxonomyJobUtil. + * + * @author Francesco Mangiacrapa francesco.mangiacrapa@isti.cnr.it + * Jan 10, 2017 + */ public class TaxonomyJobUtil { //STATE RETURNED BY STATUS RESPONSE @@ -31,40 +38,50 @@ public class TaxonomyJobUtil { protected static Logger logger = Logger.getLogger(TaxonomyJobUtil.class); - public static JobTaxonomyModel convertJob(TaxonomyJob job, Status statusResponse, TaxonomyJobPersistence speciesJobDao) throws SQLException{ - - //TODO SET END TIME + + /** + * Convert job. + * + * @param job the job + * @param statusResponse the status response + * @param speciesJobDao the species job dao + * @return the job taxonomy model + * @throws SQLException the SQL exception + */ + public static JobTaxonomyModel convertJob(TaxonomyJob job, CompleteJobStatus statusResponse, TaxonomyJobPersistence speciesJobDao) throws SQLException{ + + //TODO SET END TIME JobTaxonomyModel jobSpeciesModel; - String status = statusResponse.getStatus(); + JobStatus status = statusResponse.getStatus(); DownloadState downloadState = null; long endTime = 0; - + downloadState = getDownloadState(status); logger.trace("download state: " + downloadState); - - + + if(downloadState==null){ //Case with exception logger.warn("download state is null, returning"); return null; } - + boolean onSaving = true; - + //GET CHILDREN ArrayList listChildJob = new ArrayList(); logger.trace("status response subnodes is != null? " + (statusResponse.getSubNodes()!=null)); if(statusResponse.getSubNodes()!=null){ logger.trace("subNodes size is: " + statusResponse.getSubNodes().size()); - + for (NodeStatus nodeStatus : statusResponse.getSubNodes()){ - + logger.trace("node status " + nodeStatus); - + DownloadState downloadStateChildren = getDownloadState(nodeStatus.getStatus()); if(!downloadStateChildren.equals(DownloadState.COMPLETED)) onSaving=false; - + listChildJob.add(new JobTaxonomyModel(UUID.randomUUID().toString(), nodeStatus.getScientificName(),downloadStateChildren)); } }else{ @@ -72,12 +89,12 @@ public class TaxonomyJobUtil { onSaving=false; } boolean changeStatus = false; - + //If status of children is completed and job status is not completed (the file is generated) or failed, set download state on saving - if(onSaving &&(!downloadState.equals(DownloadState.COMPLETED) && !downloadState.equals(DownloadState.FAILED))){ + if(onSaving &&!downloadState.equals(DownloadState.COMPLETED) && !downloadState.equals(DownloadState.FAILED)){ downloadState = DownloadState.SAVING; changeStatus = true; - + //if status is completed and job was saved, update status as saved }else if(downloadState.equals(DownloadState.COMPLETED)){ if(job.getState().compareTo(DownloadState.SAVED.toString())==0){ @@ -85,65 +102,65 @@ public class TaxonomyJobUtil { changeStatus = true; } } - + jobSpeciesModel = new JobTaxonomyModel(job.getId(), job.getDescriptiveName(), downloadState, null, job.getScientificName(), job.getDataSourceName(), job.getRank()); - + jobSpeciesModel.setListChildJobs(listChildJob); - + boolean changeEndTime = false; - + //UPDATE END TIME if(downloadState.equals(DownloadState.FAILED) || downloadState.equals(DownloadState.COMPLETED)){ // if(job.getEndTime()==0){ //UPDATE end time first time only -// +// // logger.trace("UPDATE end time first time only - " + downloadState); // endTime = Calendar.getInstance().getTimeInMillis(); // job.setEndTime(endTime); // changeEndTime = true; //// speciesJobDao.update(job); -// +// // } - + // System.out.println("job "+job); // System.out.println("statusResponse.getEndDate() "+statusResponse.getEndDate()); // System.out.println("job.getEndTime() "+job.getEndTime()); - + if(statusResponse.getEndDate()!=null && job.getEndTime()==0){ //UPDATE end time first time only - + logger.trace("UPDATE end time first time only - " + downloadState); // endTime = Calendar.getInstance().getTimeInMillis(); endTime = statusResponse.getEndDate().getTimeInMillis(); job.setEndTime(endTime); changeEndTime = true; // speciesJobDao.update(job); - + } - + } - + boolean changeStartTime = false; - + //SET START TIME long startTime = job.getStartTime(); - + // System.out.println("statusResponse.getStartDate(): "+statusResponse.getStartDate()); // System.out.println("startTime: "+startTime); - + if(statusResponse.getStartDate()!=null && startTime==0){ //UPDATE start time first time only Date start = DateUtil.millisecondsToDate(statusResponse.getStartDate().getTimeInMillis()); // jobSpeciesModel.setStartTime(DateUtil.dateToDateFormatString(start)); jobSpeciesModel.setStartTime(start); changeStartTime = true; } - - + + try{ //UPDATE DAO if(changeStatus || changeEndTime || changeStartTime){ job.setState(downloadState.toString()); speciesJobDao.update(job); - } + } }catch (Exception e) { logger.trace("An error occurred when update dao: ",e); } @@ -154,10 +171,10 @@ public class TaxonomyJobUtil { // jobSpeciesModel.setStartTime(DateUtil.dateToDateFormatString(start)); jobSpeciesModel.setSubmitTime(submit); - + endTime = job.getEndTime(); long elapsedTime = 0; - + //SET END TIME, BECAUSE IT IS CHANGED if(endTime!=0){ Date end = DateUtil.millisecondsToDate(endTime); @@ -167,31 +184,49 @@ public class TaxonomyJobUtil { } else elapsedTime = Calendar.getInstance().getTimeInMillis(); - + //SET ELAPSED TIME jobSpeciesModel.setElapsedTime(DateUtil.getDifference(submitTime, elapsedTime)); - + return jobSpeciesModel; } - - public static DownloadState getDownloadState(String status){ - + + + /** + * Gets the download state. + * + * @param status the status + * @return the download state + */ + public static DownloadState getDownloadState(JobStatus status){ + if(status!=null){ - if(status.compareToIgnoreCase(PENDING)==0){ - return DownloadState.PENDING; - }else if(status.compareToIgnoreCase(RUNNING)==0){ - return DownloadState.ONGOING; - }else if(status.compareToIgnoreCase(FAILED)==0){ - return DownloadState.FAILED; - }else if(status.compareToIgnoreCase(COMPLETED)==0){ + switch (status) { + case COMPLETED: return DownloadState.COMPLETED; + case FAILED: + return DownloadState.FAILED; + case PENDING: + return DownloadState.PENDING; + case RUNNING: + return DownloadState.ONGOING; + default: + return null; } } return null; - + } - + + /** + * Delete taxonomy job by id. + * + * @param jobIdentifier the job identifier + * @param taxonomyJobDao the taxonomy job dao + * @return the int + * @throws SQLException the SQL exception + */ public static int deleteTaxonomyJobById(String jobIdentifier, TaxonomyJobPersistence taxonomyJobDao) throws SQLException{ logger.trace("Delete taxonomy job id: " + jobIdentifier); try{ @@ -203,41 +238,49 @@ public class TaxonomyJobUtil { logger.error("An error occured deleteTaxonomyJobById " + jobIdentifier + " exception: "+e, e); e.printStackTrace(); } - + return 0; } + /** + * Change status taxonomy job by id. + * + * @param jobIdentifier the job identifier + * @param state the state + * @param taxonomyJobDAO the taxonomy job dao + * @return the int + */ public static int changeStatusTaxonomyJobById(String jobIdentifier,DownloadState state, TaxonomyJobPersistence taxonomyJobDAO) { logger.trace("Change status taxonomy job id: " + jobIdentifier); // System.out.println("Delete job id: " + jobIdentifier); - + int result = 0; - + try{ - + CriteriaBuilder queryBuilder = taxonomyJobDAO.getCriteriaBuilder(); CriteriaQuery cq = queryBuilder.createQuery(); Predicate pr1 = queryBuilder.equal(taxonomyJobDAO.rootFrom(cq).get(TaxonomyJob.ID_FIELD), jobIdentifier); cq.where(pr1); - + Iterator iterator = taxonomyJobDAO.executeCriteriaQuery(cq).iterator(); - + TaxonomyJob job; - + if(iterator.hasNext()) job = iterator.next(); else return 0; - + job.setState(state.toString()); - + taxonomyJobDAO.update(job); - + }catch (Exception e) { logger.error("An error occured in change status jobId: " + jobIdentifier + " exception: "+e, e ); } - + return result; } } diff --git a/src/main/java/org/gcube/portlets/user/speciesdiscovery/server/persistence/ResultRowBuffer.java b/src/main/java/org/gcube/portlets/user/speciesdiscovery/server/persistence/ResultRowBuffer.java index d28ef1c..dc824fe 100644 --- a/src/main/java/org/gcube/portlets/user/speciesdiscovery/server/persistence/ResultRowBuffer.java +++ b/src/main/java/org/gcube/portlets/user/speciesdiscovery/server/persistence/ResultRowBuffer.java @@ -1,5 +1,5 @@ /** - * + * */ package org.gcube.portlets.user.speciesdiscovery.server.persistence; @@ -27,41 +27,41 @@ import org.gcube.portlets.user.speciesdiscovery.shared.util.NormalizeString; /** - * + * * @author Francesco Mangiacrapa francesco.mangiacrapa@isti.cnr.it * @May 20, 2013 * */ public class ResultRowBuffer extends AbstractSelectableDaoBuffer implements FilterableFetchingBuffer { - + protected Logger logger = Logger.getLogger(ResultRowBuffer.class); - + private AbstractPersistence taxonDao; private int filteredListSize = 0; - + public ResultRowBuffer(AbstractPersistence dao, AbstractPersistence taxonDao) { super(dao, ResultRow.ID_FIELD, ResultRow.SELECTED); this.taxonDao = taxonDao; } - + /** * {@inheritDoc} - * @throws Exception + * @throws Exception */ @Override public void add(ResultRow row) throws Exception{ //FOR DEBUG // logger.trace("Add item "+ row.getId() + " service id: " +row.getServiceId()); - + super.add(row); } - + public int getFilteredListSize() throws SQLException { return filteredListSize; } - + //TODO MODIFIED @Override @@ -71,22 +71,18 @@ public class ResultRowBuffer extends AbstractSelectableDaoBuffer impl List list = new ArrayList(); Iterator iterator = null; String value; - + if(activeFiltersObject!=null){ - //FILTER BY CLASSIFICATION if(activeFiltersObject.isByClassification()){ -// List listId = activeFiltersObject.getListByClassification(); - int counter = activeFiltersObject.getNumberOfData(); - logger.trace("in classification filter - counter: "+counter); logger.trace("in classification filter - rank: "+activeFiltersObject.getRankClassification()); logger.trace("in classification filter - classification id: "+activeFiltersObject.getClassificationId()); - + String columName = null; - + if(activeFiltersObject.getRankClassification().compareTo(MainTaxonomicRankEnum.KINGDOM.getLabel())==0) columName = ResultRow.KINGDOM_ID; else if(activeFiltersObject.getRankClassification().compareTo(MainTaxonomicRankEnum.FAMILY.getLabel())==0) @@ -101,121 +97,103 @@ public class ResultRowBuffer extends AbstractSelectableDaoBuffer impl columName = ResultRow.PHYLUM_ID; else if(activeFiltersObject.getRankClassification().compareTo(MainTaxonomicRankEnum.SPECIES.getLabel())==0) columName = ResultRow.SPECIES_ID; - + // logger.trace("in classification filter - columName: "+columName); try { - CriteriaBuilder queryBuilder = dao.getCriteriaBuilder(); - Query query = dao.createNewManager().createQuery("select r FROM ResultRow r where r."+columName+ "='"+activeFiltersObject.getClassificationId()+"'"); -// query.setMaxResults(counter); - iterator = query.getResultList().iterator(); // logger.trace("in classification filter - statement: "+queryBuilder.where().eq(columName, activeFiltersObject.getClassificationId()).getStatement()); } catch (Exception e) { logger.error("Error in activeFiltersObject.isByClassification(): "+e, e); } - + //FILTER BY DATA PROVIDER }else if(activeFiltersObject.isByDataProvider()){ - + try { CriteriaBuilder queryBuilder = dao.getCriteriaBuilder(); value = activeFiltersObject.getDataProviderName(); CriteriaQuery cq = queryBuilder.createQuery(); Predicate pr1 = queryBuilder.equal(dao.rootFrom(cq).get(ResultRow.DATAPROVIDER_NAME), value); cq.where(pr1); - + //TODO FIXME empty value logger.trace("FILTER BY DATA PROVIDER: "+ value ); - + iterator = dao.executeCriteriaQuery(cq).iterator(); - + } catch (Exception e) { logger.error("Error in activeFiltersObject.isByDataProvider(): "+e, e); e.printStackTrace(); } - + //FILTER BY DATA SOURCE }else if(activeFiltersObject.isByDataSourceName()){ - + try { CriteriaBuilder queryBuilder = dao.getCriteriaBuilder(); value = activeFiltersObject.getDataSourceName(); CriteriaQuery cq = queryBuilder.createQuery(); Predicate pr1 = queryBuilder.equal(dao.rootFrom(cq).get(ResultRow.DATASOURCE_NAME), NormalizeString.validateUndefined(value)); cq.where(pr1); - + logger.trace("FILTER BY DATA DATA SOURCE NAME: "+ value ); - + iterator = dao.executeCriteriaQuery(cq).iterator(); - + } catch (Exception e) { logger.error("Error in activeFiltersObject.isByDataSourceName(): "+e, e); e.printStackTrace(); } - + //FILTER BY RANK }else if(activeFiltersObject.isByRank()){ - - try { - - CriteriaBuilder queryBuilder = taxonDao.getCriteriaBuilder(); - value = activeFiltersObject.getRankName(); -// value = NormalizeString.lowerCaseUpFirstChar(activeFiltersObject.getRankName()); - CriteriaQuery cq = queryBuilder.createQuery(); - Predicate pr1 = queryBuilder.equal(taxonDao.rootFrom(cq).get(Taxon.RANK), NormalizeString.validateUndefined(value)); - cq.where(pr1); - - Iterator iteratorTaxon = taxonDao.executeCriteriaQuery(cq).iterator(); - - logger.trace("in rank filter - value: "+value); - List listTaxonId = new ArrayList(); - - if(iteratorTaxon!=null){ - - while(iteratorTaxon.hasNext()){ - - Taxon tax = iteratorTaxon.next(); - listTaxonId.add(tax.getId()); - } + try { + + value = activeFiltersObject.getRankName(); + EntityManager em = dao.createNewManager(); + + String queryString = "select *" + + " FROM "+ResultRow.class.getSimpleName()+" r" + + " INNER JOIN RESULTROW_TAXON rt on r.ID=rt.RESULTROW_ID" + + " INNER JOIN "+Taxon.class.getSimpleName()+" t on t.INTERNALID=rt.MATCHINGTAXON_INTERNALID" + + " where t.RANK = '"+value+"' and t.ID IN" + + " (select MIN(tax.ID) from TAXON tax)"; + + Query query = em.createNativeQuery(queryString, ResultRow.class); + List listResultRow = new ArrayList(); + try { + + listResultRow = query.getResultList(); + } catch (Exception e) { + logger.error("Error in ResultRow - executeCriteriaQuery: " + e.getMessage(), e); + } finally { + em.close(); } - logger.trace("in rank filter - listTaxonId size: "+listTaxonId.size()); - - - queryBuilder = dao.getCriteriaBuilder(); - cq = queryBuilder.createQuery(); - pr1 = dao.rootFrom(cq).get(ResultRow.PARENT_FOREIGN_KEY_TAXON).in(listTaxonId); - cq.where(pr1); - - iterator = dao.executeCriteriaQuery(cq).iterator(); + iterator = listResultRow.iterator(); } catch (Exception e) { logger.error("Error in activeFiltersObject.isByRank(): "+e, e); e.printStackTrace(); } } - + if(iterator!=null){ - while(iterator.hasNext()){ - ResultRow row = iterator.next(); list.add(row); } - - filteredListSize = list.size(); - + filteredListSize = list.size(); } } - + logger.trace("RETURNED List size " + list.size()); - return list; } @@ -225,14 +203,11 @@ public class ResultRowBuffer extends AbstractSelectableDaoBuffer impl EntityManager em = dao.createNewManager(); //TODO use filterMap - + try { em.getTransaction().begin(); - int updateCount = em.createQuery("UPDATE ResultRow SET " + ResultRow.SELECTED + " = "+ selection).executeUpdate(); - logger.trace("Updated " + updateCount + " item"); - em.getTransaction().commit(); } finally { if (em.getTransaction().isActive()) @@ -248,22 +223,22 @@ public class ResultRowBuffer extends AbstractSelectableDaoBuffer impl @Override public void updateAllSelectionByIds(boolean selection, List listIds) throws Exception { - + EntityManager em = dao.createNewManager(); - String queryString = "UPDATE ResultRow t SET " + String queryString = "UPDATE ResultRow t SET " + ResultRow.SELECTED + " = "+ selection +" where " + ResultRow.ID_FIELD+" IN :inclList"; - + try { em.getTransaction().begin(); TypedQuery query = em.createQuery(queryString, ResultRow.class); - + query.setParameter("inclList", listIds); - + int updateCount = query.executeUpdate(); - + logger.trace("Updated " + updateCount + " item"); em.getTransaction().commit(); diff --git a/src/main/java/org/gcube/portlets/user/speciesdiscovery/server/persistence/TaxonomyRowBuffer.java b/src/main/java/org/gcube/portlets/user/speciesdiscovery/server/persistence/TaxonomyRowBuffer.java index d6d7a09..64a3a92 100644 --- a/src/main/java/org/gcube/portlets/user/speciesdiscovery/server/persistence/TaxonomyRowBuffer.java +++ b/src/main/java/org/gcube/portlets/user/speciesdiscovery/server/persistence/TaxonomyRowBuffer.java @@ -1,5 +1,5 @@ /** - * + * */ package org.gcube.portlets.user.speciesdiscovery.server.persistence; @@ -25,7 +25,7 @@ import org.gcube.portlets.user.speciesdiscovery.shared.filter.ResultFilter; import org.gcube.portlets.user.speciesdiscovery.shared.util.NormalizeString; /** - * + * * @author Francesco Mangiacrapa francesco.mangiacrapa@isti.cnr.it * @May 20, 2013 * @@ -37,17 +37,17 @@ public class TaxonomyRowBuffer extends AbstractSelectableDaoBuffer public TaxonomyRowBuffer(AbstractPersistence dao) { super(dao, TaxonomyRow.ID_FIELD, TaxonomyRow.SELECTED); } - + /** * {@inheritDoc} - * @throws Exception + * @throws Exception */ @Override public void add(TaxonomyRow row) throws Exception { //DEBUG // logger.trace("Add item "+ row.getId() + " service id: " +row.getServiceId()); - + super.add(row); } @@ -58,14 +58,14 @@ public class TaxonomyRowBuffer extends AbstractSelectableDaoBuffer Iterator iterator = null; // QueryBuilder queryBuilder = dao.queryBuilder(); String value; - - if(activeFiltersObject!=null){ + + if(activeFiltersObject!=null){ //FILTER BY CLASSIFICATION if(activeFiltersObject.isByClassification()){ int counter = activeFiltersObject.getNumberOfData(); String columName = null; - + if(activeFiltersObject.getRankClassification().compareTo(MainTaxonomicRankEnum.KINGDOM.getLabel())==0) columName = TaxonomyRow.KINGDOM_ID; else if(activeFiltersObject.getRankClassification().compareTo(MainTaxonomicRankEnum.FAMILY.getLabel())==0) @@ -80,17 +80,14 @@ public class TaxonomyRowBuffer extends AbstractSelectableDaoBuffer columName = TaxonomyRow.PHYLUM_ID; else if(activeFiltersObject.getRankClassification().compareTo(MainTaxonomicRankEnum.SPECIES.getLabel())==0) columName = TaxonomyRow.SPECIES_ID; -// +// // logger.trace("in classification filter - columName: "+columName); try { - + CriteriaBuilder queryBuilder = dao.getCriteriaBuilder(); - Query query = dao.createNewManager().createQuery("select r FROM TaxonomyRow r where r."+columName+ "='"+activeFiltersObject.getClassificationId()+"'"); -// query.setMaxResults(counter); - iterator = query.getResultList().iterator(); // logger.trace("in classification filter - statement: "+queryBuilder.where().eq(columName, activeFiltersObject.getClassificationId()).getStatement()); @@ -98,22 +95,21 @@ public class TaxonomyRowBuffer extends AbstractSelectableDaoBuffer logger.error("Error in activeFiltersObject.isByClassification(): "+e, e); e.printStackTrace(); } - + //FILTER BY DATA PROVIDER }else if(activeFiltersObject.isByDataProvider()){ - + try { CriteriaBuilder queryBuilder = dao.getCriteriaBuilder(); value = activeFiltersObject.getDataProviderName(); CriteriaQuery cq = queryBuilder.createQuery(); Predicate pr1 = queryBuilder.equal(dao.rootFrom(cq).get(TaxonomyRow.DATAPROVIDER_NAME), value); cq.where(pr1); - + //TODO FIXME empty value logger.trace("FILTER BY DATA PROVIDER: "+ value ); - iterator = dao.executeCriteriaQuery(cq).iterator(); - + } catch (Exception e) { logger.error("Error in activeFiltersObject.isByDataProvider(): "+e, e); e.printStackTrace(); @@ -121,30 +117,27 @@ public class TaxonomyRowBuffer extends AbstractSelectableDaoBuffer //FILTER BY DATA SOURCE }else if(activeFiltersObject.isByDataSourceName()){ - + try { CriteriaBuilder queryBuilder = dao.getCriteriaBuilder(); value = activeFiltersObject.getDataSourceName(); CriteriaQuery cq = queryBuilder.createQuery(); Predicate pr1 = queryBuilder.equal(dao.rootFrom(cq).get(TaxonomyRow.DATAPROVIDER_NAME), value); cq.where(pr1); - //TODO FIXME empty value logger.trace("FILTER BY DATA SOURCE: "+ value ); - iterator = dao.executeCriteriaQuery(cq).iterator(); - + } catch (Exception e) { logger.error("Error in activeFiltersObject.isByDataProvider(): "+e, e); e.printStackTrace(); } - + // //FILTER BY RANK }else if(activeFiltersObject.isByRank()){ - + try { - CriteriaBuilder queryBuilder = dao.getCriteriaBuilder(); value = activeFiltersObject.getRankName(); // value = NormalizeString.lowerCaseUpFirstChar(activeFiltersObject.getRankName()); @@ -152,33 +145,25 @@ public class TaxonomyRowBuffer extends AbstractSelectableDaoBuffer CriteriaQuery cq = queryBuilder.createQuery(); Predicate pr1 = queryBuilder.equal(dao.rootFrom(cq).get(TaxonomyRow.RANK), NormalizeString.validateUndefined(value)); cq.where(pr1); - logger.trace("FILTER BY RANK: "+ value ); - iterator = dao.executeCriteriaQuery(cq).iterator(); - } catch (Exception e) { logger.error("Error in activeFiltersObject.isByRank(): "+e, e); } - + } if(iterator!=null){ - while(iterator.hasNext()){ - TaxonomyRow row = iterator.next(); list.add(row); } - - filteredListSize = list.size(); - + filteredListSize = list.size(); } } - + logger.trace("RETURNED List size " + list.size()); - return list; } @@ -194,8 +179,8 @@ public class TaxonomyRowBuffer extends AbstractSelectableDaoBuffer EntityManager em = dao.createNewManager(); //TODO generalize? - String queryString = "UPDATE TaxonomyRow SET " - + ResultRow.SELECTED + " = "+ selection + String queryString = "UPDATE TaxonomyRow SET " + + ResultRow.SELECTED + " = "+ selection +" where "+TaxonomyRow.IS_PARENT +"=false"; try { @@ -210,7 +195,7 @@ public class TaxonomyRowBuffer extends AbstractSelectableDaoBuffer if (em.getTransaction().isActive()) em.getTransaction().rollback(); em.close(); - } + } } /* (non-Javadoc) @@ -218,12 +203,12 @@ public class TaxonomyRowBuffer extends AbstractSelectableDaoBuffer */ @Override public void updateAllSelectionByIds(boolean selection, List listIds) throws Exception { - + EntityManager em = dao.createNewManager(); //TODO generalize? - String queryString = "UPDATE TaxonomyRow SET " - + ResultRow.SELECTED + " = "+ selection + String queryString = "UPDATE TaxonomyRow SET " + + ResultRow.SELECTED + " = "+ selection +" where "+TaxonomyRow.IS_PARENT +"=false AND " +ResultRow.ID_FIELD+" IN :inclList"; @@ -231,9 +216,9 @@ public class TaxonomyRowBuffer extends AbstractSelectableDaoBuffer em.getTransaction().begin(); TypedQuery query = em.createQuery(queryString, TaxonomyRow.class); - + query.setParameter("inclList", listIds); - + int updateCount = query.executeUpdate(); logger.trace("Updated " + updateCount + " item"); @@ -243,8 +228,8 @@ public class TaxonomyRowBuffer extends AbstractSelectableDaoBuffer if (em.getTransaction().isActive()) em.getTransaction().rollback(); em.close(); - } - + } + } diff --git a/src/main/java/org/gcube/portlets/user/speciesdiscovery/server/persistence/dao/OccurrenceJobPersistence.java b/src/main/java/org/gcube/portlets/user/speciesdiscovery/server/persistence/dao/OccurrenceJobPersistence.java index 0f0b4c6..15d187c 100644 --- a/src/main/java/org/gcube/portlets/user/speciesdiscovery/server/persistence/dao/OccurrenceJobPersistence.java +++ b/src/main/java/org/gcube/portlets/user/speciesdiscovery/server/persistence/dao/OccurrenceJobPersistence.java @@ -23,7 +23,7 @@ public class OccurrenceJobPersistence extends AbstractPersistence getList(Map filterMap, int startIndex, int offset) throws DatabaseServiceException{ - + EntityManager em = super.createNewManager(); List listOJ = new ArrayList(); try { String queryString = "select t from OccurrencesJob t"; - + if(filterMap!=null && filterMap.size()>0){ queryString+=" where "; for (String param : filterMap.keySet()) { @@ -145,11 +145,11 @@ public class OccurrenceJobPersistence extends AbstractPersistence{ protected CriteriaQuery criteriaQuery; protected Root rootFrom; protected Logger logger = Logger.getLogger(OccurrenceRowPersistence.class); - + public OccurrenceRowPersistence(EntityManagerFactory factory) throws DatabaseServiceException{ super(factory); criteriaBuilder = super.createNewManager().getCriteriaBuilder(); } - + @Override public Root rootFrom(CriteriaQuery cq){ return cq.from(Occurrence.class); @@ -78,7 +78,7 @@ public class OccurrenceRowPersistence extends AbstractPersistence{ Occurrence row = null; try { row = em.getReference(Occurrence.class, id); - + } finally { em.close(); } @@ -86,10 +86,10 @@ public class OccurrenceRowPersistence extends AbstractPersistence{ logger.trace("getItemByKey return row: "+row.getId() + ", service id: " + row.getServiceId()); else logger.trace("getItemByKey return null"); - + //FOR DEBUG // System.out.println("getItemByKey return: "+row ); - + return row; } @@ -99,7 +99,7 @@ public class OccurrenceRowPersistence extends AbstractPersistence{ } /** - * + * * @return */ public int removeAll() throws DatabaseServiceException{ @@ -110,7 +110,7 @@ public class OccurrenceRowPersistence extends AbstractPersistence{ em.getTransaction().begin(); removed = em.createQuery("DELETE FROM Occurrence").executeUpdate(); em.getTransaction().commit(); - logger.trace("DELETE FROM Occurrence " + removed +" items"); + logger.trace("DELETED FROM Occurrence " + removed +" items"); } catch (Exception e) { logger.error("Error in removeAll: " + e.getMessage(), e); @@ -124,7 +124,7 @@ public class OccurrenceRowPersistence extends AbstractPersistence{ @Override public List getList(int startIndex, int offset) throws DatabaseServiceException { - + EntityManager em = super.createNewManager(); List listOccurrence = new ArrayList(); try { @@ -132,7 +132,7 @@ public class OccurrenceRowPersistence extends AbstractPersistence{ query.setFirstResult(startIndex); query.setMaxResults(offset); listOccurrence = query.getResultList(); - + } finally { em.close(); } @@ -141,12 +141,12 @@ public class OccurrenceRowPersistence extends AbstractPersistence{ @Override public List getList(Map filterMap, int startIndex, int offset) throws DatabaseServiceException{ - + EntityManager em = super.createNewManager(); List listOccurrence = new ArrayList(); try { String queryString = "select t from Occurrence t"; - + if(filterMap!=null && filterMap.size()>0){ queryString+=" where "; for (String param : filterMap.keySet()) { @@ -154,11 +154,11 @@ public class OccurrenceRowPersistence extends AbstractPersistence{ queryString+=" t."+param+"="+value; queryString+=AND; } - + queryString = queryString.substring(0, queryString.lastIndexOf(AND)); } Query query = em.createQuery(queryString); - + if(startIndex>-1) query.setFirstResult(startIndex); if(offset>-1) @@ -170,28 +170,28 @@ public class OccurrenceRowPersistence extends AbstractPersistence{ } return listOccurrence; } - - + + @Override public List executeTypedQuery(CriteriaQuery cq, int startIndex, int offset) throws DatabaseServiceException{ - + EntityManager em = super.createNewManager(); List listOJ = new ArrayList(); try { TypedQuery typedQuery = em.createQuery(cq); - + if(startIndex>-1) typedQuery.setFirstResult(startIndex); if(offset>-1) typedQuery.setMaxResults(offset); listOJ = typedQuery.getResultList(); - + } finally { em.close(); } - + return listOJ; } @@ -200,13 +200,13 @@ public class OccurrenceRowPersistence extends AbstractPersistence{ public int deleteItemByIdField(String idField) throws DatabaseServiceException{ EntityManager em = super.createNewManager(); int removed = 0; - + try { em.getTransaction().begin(); removed = em.createQuery("DELETE FROM Occurrence t WHERE t."+Occurrence.ID_FIELD+"='"+idField+"'").executeUpdate(); em.getTransaction().commit(); logger.trace("Item "+ idField + " was deleted from Occurrence"); - + } catch (Exception e) { logger.error("Error in Occurrence deleteJobById: " + e.getMessage(), e); diff --git a/src/main/java/org/gcube/portlets/user/speciesdiscovery/server/persistence/dao/ResultRowPersistence.java b/src/main/java/org/gcube/portlets/user/speciesdiscovery/server/persistence/dao/ResultRowPersistence.java index c5bc5cb..854655b 100644 --- a/src/main/java/org/gcube/portlets/user/speciesdiscovery/server/persistence/dao/ResultRowPersistence.java +++ b/src/main/java/org/gcube/portlets/user/speciesdiscovery/server/persistence/dao/ResultRowPersistence.java @@ -23,14 +23,14 @@ public class ResultRowPersistence extends AbstractPersistence{ protected CriteriaQuery criteriaQuery; protected Root rootFrom; protected Logger logger = Logger.getLogger(ResultRowPersistence.class); - + public ResultRowPersistence(EntityManagerFactory factory) throws DatabaseServiceException{ super(factory); criteriaBuilder = super.createNewManager().getCriteriaBuilder(); // CriteriaQuery cq = criteriaBuilder.createQuery(); // Root rootFrom = cq.from(ResultRow.class); } - + @Override public Root rootFrom(CriteriaQuery cq){ return cq.from(ResultRow.class); @@ -46,7 +46,7 @@ public class ResultRowPersistence extends AbstractPersistence{ listResultRow = query.getResultList(); } catch (Exception e) { - logger.error("Error in ResultRow - getList: " + e.getMessage(), e); + logger.error("Error in ResultRow - getList: " + e.getMessage(), e); } finally { em.close(); } @@ -70,7 +70,7 @@ public class ResultRowPersistence extends AbstractPersistence{ listResultRow = query.getResultList(); } catch (Exception e) { - logger.error("Error in ResultRow - executeCriteriaQuery: " + e.getMessage(), e); + logger.error("Error in ResultRow - executeCriteriaQuery: " + e.getMessage(), e); } finally { em.close(); } @@ -85,9 +85,9 @@ public class ResultRowPersistence extends AbstractPersistence{ ResultRow row = null; try { row = em.getReference(ResultRow.class, id); - + } catch (Exception e) { - logger.error("Error in ResultRow - getItemByKey: " + e.getMessage(), e); + logger.error("Error in ResultRow - getItemByKey: " + e.getMessage(), e); } finally { em.close(); } @@ -95,21 +95,21 @@ public class ResultRowPersistence extends AbstractPersistence{ logger.trace("getItemByKey return row: "+row.getId() + ", service id: " + row.getServiceId()); else logger.trace("getItemByKey return null"); - + //FOR DEBUG // System.out.println("getItemByKey return: "+row ); - + return row; } - + @Override public CriteriaBuilder getCriteriaBuilder() throws DatabaseServiceException{ return createNewManager().getCriteriaBuilder(); } /** - * + * * @return */ public int removeAll() throws DatabaseServiceException{ @@ -120,10 +120,10 @@ public class ResultRowPersistence extends AbstractPersistence{ em.getTransaction().begin(); removed = em.createQuery("DELETE FROM ResultRow").executeUpdate(); em.getTransaction().commit(); - logger.trace("DELETE FROM ResultRow " + removed +" items"); + logger.trace("DELETED FROM ResultRow " + removed +" items"); } catch (Exception e) { - logger.error("Error in ResultRow - removeAll: " + e.getMessage(), e); + logger.error("Error in ResultRow - removeAll: " + e.getMessage(), e); } finally { em.close(); } @@ -133,7 +133,7 @@ public class ResultRowPersistence extends AbstractPersistence{ @Override public List getList(int startIndex, int offset) throws DatabaseServiceException{ - + EntityManager em = super.createNewManager(); List listResultRow = new ArrayList(); try { @@ -141,23 +141,23 @@ public class ResultRowPersistence extends AbstractPersistence{ query.setFirstResult(startIndex); query.setMaxResults(offset); listResultRow = query.getResultList(); - + } catch (Exception e) { - logger.error("Error in ResultRow - getList: " + e.getMessage(), e); + logger.error("Error in ResultRow - getList: " + e.getMessage(), e); } finally { em.close(); } return listResultRow; } - + @Override public List getList(Map filterMap, int startIndex, int offset) throws DatabaseServiceException{ - + EntityManager em = super.createNewManager(); List listResultRow = new ArrayList(); try { String queryString = "select t from ResultRow t"; - + if(filterMap!=null && filterMap.size()>0){ queryString+=" where "; for (String param : filterMap.keySet()) { @@ -165,42 +165,42 @@ public class ResultRowPersistence extends AbstractPersistence{ queryString+=" t."+param+"="+value; queryString+=AND; } - + queryString = queryString.substring(0, queryString.lastIndexOf(AND)); } Query query = em.createQuery(queryString); listResultRow = query.getResultList(); } catch (Exception e) { - logger.error("Error in ResultRow - getList: " + e.getMessage(), e); + logger.error("Error in ResultRow - getList: " + e.getMessage(), e); } finally { em.close(); } return listResultRow; } - + @Override public List executeTypedQuery(CriteriaQuery cq, int startIndex, int offset) throws DatabaseServiceException{ - + EntityManager em = super.createNewManager(); List listOJ = new ArrayList(); try { TypedQuery typedQuery = em.createQuery(cq); - + if(startIndex>-1) typedQuery.setFirstResult(startIndex); if(offset>-1) typedQuery.setMaxResults(offset); listOJ = typedQuery.getResultList(); - + } catch (Exception e) { - logger.error("Error in ResultRow - executeTypedQuery: " + e.getMessage(), e); + logger.error("Error in ResultRow - executeTypedQuery: " + e.getMessage(), e); } finally { em.close(); } - + return listOJ; } @@ -209,13 +209,13 @@ public class ResultRowPersistence extends AbstractPersistence{ public int deleteItemByIdField(String idField) throws DatabaseServiceException{ EntityManager em = super.createNewManager(); int removed = 0; - + try { em.getTransaction().begin(); removed = em.createQuery("DELETE FROM ResultRow t WHERE t."+ResultRow.ID_FIELD+"='"+idField+"'").executeUpdate(); em.getTransaction().commit(); logger.trace("Item "+ idField + " was deleted from ResultRow"); - + } catch (Exception e) { logger.error("Error in ResultRow - deleteJobById: " + e.getMessage(), e); diff --git a/src/main/java/org/gcube/portlets/user/speciesdiscovery/server/persistence/dao/TaxonRowPersistence.java b/src/main/java/org/gcube/portlets/user/speciesdiscovery/server/persistence/dao/TaxonRowPersistence.java index 80704f9..5fcfa35 100644 --- a/src/main/java/org/gcube/portlets/user/speciesdiscovery/server/persistence/dao/TaxonRowPersistence.java +++ b/src/main/java/org/gcube/portlets/user/speciesdiscovery/server/persistence/dao/TaxonRowPersistence.java @@ -31,7 +31,7 @@ public class TaxonRowPersistence extends AbstractPersistence{ em.getTransaction().begin(); removed = em.createQuery("DELETE FROM Taxon").executeUpdate(); em.getTransaction().commit(); - logger.trace("DELETE FROM Taxon " + removed +" items"); + logger.trace("DELETED FROM Taxon " + removed +" items"); } catch (Exception e) { logger.error("Error in removeAll: " + e.getMessage(), e); @@ -51,7 +51,7 @@ public class TaxonRowPersistence extends AbstractPersistence{ Query query = em.createQuery("select t from Taxon t"); listTaxon = query.getResultList(); - + } catch (Exception e) { logger.error("Error in Taxon - removeAll: " + e.getMessage(), e); @@ -63,7 +63,7 @@ public class TaxonRowPersistence extends AbstractPersistence{ @Override public List getList(int startIndex, int offset) throws DatabaseServiceException { - + EntityManager em = super.createNewManager(); List listTaxon = new ArrayList(); try { @@ -71,7 +71,7 @@ public class TaxonRowPersistence extends AbstractPersistence{ query.setFirstResult(startIndex); query.setMaxResults(offset); listTaxon = query.getResultList(); - + } catch (Exception e) { logger.error("Error in Taxon - getList: " + e.getMessage(), e); @@ -93,7 +93,7 @@ public class TaxonRowPersistence extends AbstractPersistence{ Taxon row = null; try { row = em.getReference(Taxon.class, id); - + } catch (Exception e) { logger.error("Error in Taxon - getItemByKey: " + e.getMessage(), e); @@ -104,10 +104,10 @@ public class TaxonRowPersistence extends AbstractPersistence{ logger.trace("getItemByKey return row id: "+row.getId()); else logger.trace("getItemByKey return null"); - + //FOR DEBUG // System.out.println("getItemByKey return: "+row ); - + return row; } @@ -146,7 +146,7 @@ public class TaxonRowPersistence extends AbstractPersistence{ List listTaxon = new ArrayList(); try { String queryString = "select t from Taxon t"; - + if(filterMap!=null && filterMap.size()>0){ queryString+=" where "; for (String param : filterMap.keySet()) { @@ -154,7 +154,7 @@ public class TaxonRowPersistence extends AbstractPersistence{ queryString+=" t."+param+"="+value; queryString+=AND; } - + queryString = queryString.substring(0, queryString.lastIndexOf(AND)); } Query query = em.createQuery(queryString); @@ -176,21 +176,21 @@ public class TaxonRowPersistence extends AbstractPersistence{ try { TypedQuery typedQuery = em.createQuery(cq); - + if(startIndex>-1) typedQuery.setFirstResult(startIndex); if(offset>-1) typedQuery.setMaxResults(offset); listTaxon = typedQuery.getResultList(); - + } catch (Exception e) { logger.error("Error in Taxon - executeTypedQuery: " + e.getMessage(), e); }finally { em.close(); } - + return listTaxon; } @@ -198,13 +198,13 @@ public class TaxonRowPersistence extends AbstractPersistence{ public int deleteItemByIdField(String idField) throws DatabaseServiceException{ EntityManager em = super.createNewManager(); int removed = 0; - + try { em.getTransaction().begin(); removed = em.createQuery("DELETE FROM Taxon t WHERE t."+Taxon.ID_FIELD+"='"+idField+"'").executeUpdate(); em.getTransaction().commit(); logger.trace("Item "+ idField + " was deleted from Taxon"); - + } catch (Exception e) { logger.error("Error in Taxon deleteJobById: " + e.getMessage(), e); diff --git a/src/main/java/org/gcube/portlets/user/speciesdiscovery/server/persistence/dao/TaxonomyJobPersistence.java b/src/main/java/org/gcube/portlets/user/speciesdiscovery/server/persistence/dao/TaxonomyJobPersistence.java index 111aaec..b9b4413 100644 --- a/src/main/java/org/gcube/portlets/user/speciesdiscovery/server/persistence/dao/TaxonomyJobPersistence.java +++ b/src/main/java/org/gcube/portlets/user/speciesdiscovery/server/persistence/dao/TaxonomyJobPersistence.java @@ -30,7 +30,7 @@ public class TaxonomyJobPersistence extends AbstractPersistence{ em.getTransaction().begin(); removed = em.createQuery("DELETE FROM TaxonomyJob").executeUpdate(); em.getTransaction().commit(); - logger.trace("DELETE FROM TaxonomyJob " + removed +" items"); + logger.trace("DELETED FROM TaxonomyJob " + removed +" items"); } catch (Exception e) { logger.error("Error in removeAll: " + e.getMessage(), e); @@ -41,19 +41,19 @@ public class TaxonomyJobPersistence extends AbstractPersistence{ return removed; } - + @Override public int deleteItemByIdField(String idField) throws DatabaseServiceException{ EntityManager em = super.createNewManager(); int removed = 0; - + try { em.getTransaction().begin(); removed = em.createQuery("DELETE FROM TaxonomyJob t WHERE t."+TaxonomyJob.ID_FIELD+"='"+idField+"'").executeUpdate(); em.getTransaction().commit(); - logger.trace("Item "+ idField + " was deleted from TaxonomyJob. removed "+ removed + " item" ); + logger.trace("Item "+ idField + " was deleted from TaxonomyJob. removed "+ removed + " item" ); // System.out.println("Item "+ idField + " was deleted from TaxonomyJob. removed "+ removed + " item" ); - + } catch (Exception e) { logger.error("Error in TaxonomyJob deleteJobById: " + e.getMessage(), e); e.printStackTrace(); @@ -73,7 +73,7 @@ public class TaxonomyJobPersistence extends AbstractPersistence{ Query query = em.createQuery("select t from TaxonomyJob t"); listTaxJob = query.getResultList(); - + } catch (Exception e) { logger.error("Error in TaxonomyJob - getList: " + e.getMessage(), e); @@ -95,7 +95,7 @@ public class TaxonomyJobPersistence extends AbstractPersistence{ TaxonomyJob taxJob = null; try { taxJob = em.getReference(TaxonomyJob.class, id); - + } catch (Exception e) { logger.error("Error in TaxonomyJob - getItemByKey: " + e.getMessage(), e); @@ -106,10 +106,10 @@ public class TaxonomyJobPersistence extends AbstractPersistence{ logger.trace("getItemByKey return row: "+taxJob.getId()); else logger.trace("getItemByKey return null"); - + //FOR DEBUG // System.out.println("getItemByKey return: "+row ); - + return taxJob; } @@ -152,7 +152,7 @@ public class TaxonomyJobPersistence extends AbstractPersistence{ query.setFirstResult(startIndex); query.setMaxResults(offset); listTaxJob = query.getResultList(); - + } catch (Exception e) { logger.error("Error in TaxonomyJob - getList: " + e.getMessage(), e); @@ -161,15 +161,15 @@ public class TaxonomyJobPersistence extends AbstractPersistence{ } return listTaxJob; } - + @Override public List getList(Map filterMap, int startIndex, int offset) throws DatabaseServiceException { - + EntityManager em = super.createNewManager(); List listTaxJob = new ArrayList(); try { String queryString = "select t from TaxonomyJob t"; - + if(filterMap!=null && filterMap.size()>0){ queryString+=" where "; for (String param : filterMap.keySet()) { @@ -177,11 +177,11 @@ public class TaxonomyJobPersistence extends AbstractPersistence{ queryString+=" t."+param+"="+value; queryString+=AND; } - + queryString = queryString.substring(0, queryString.lastIndexOf(AND)); } Query query = em.createQuery(queryString); - + if(startIndex>-1) query.setFirstResult(startIndex); if(offset>-1) @@ -196,33 +196,33 @@ public class TaxonomyJobPersistence extends AbstractPersistence{ } return listTaxJob; } - + @Override public List executeTypedQuery(CriteriaQuery cq, int startIndex, int offset) throws DatabaseServiceException { - + EntityManager em = super.createNewManager(); List listOJ = new ArrayList(); try { TypedQuery typedQuery = em.createQuery(cq); - + if(startIndex>-1) typedQuery.setFirstResult(startIndex); if(offset>-1) typedQuery.setMaxResults(offset); listOJ = typedQuery.getResultList(); - + } catch (Exception e) { logger.error("Error in TaxonomyJob - executeTypedQuery: " + e.getMessage(), e); } finally { em.close(); } - + return listOJ; } - + } \ No newline at end of file diff --git a/src/main/java/org/gcube/portlets/user/speciesdiscovery/server/persistence/dao/TaxonomyRowPersistence.java b/src/main/java/org/gcube/portlets/user/speciesdiscovery/server/persistence/dao/TaxonomyRowPersistence.java index d976785..fe389c9 100644 --- a/src/main/java/org/gcube/portlets/user/speciesdiscovery/server/persistence/dao/TaxonomyRowPersistence.java +++ b/src/main/java/org/gcube/portlets/user/speciesdiscovery/server/persistence/dao/TaxonomyRowPersistence.java @@ -31,7 +31,7 @@ public class TaxonomyRowPersistence extends AbstractPersistence{ em.getTransaction().begin(); removed = em.createQuery("DELETE FROM TaxonomyRow").executeUpdate(); em.getTransaction().commit(); - logger.trace("DELETE FROM TaxonomyRow " + removed +" items"); + logger.trace("DELETED FROM TaxonomyRow " + removed +" items"); } catch (Exception e) { logger.error("Error in TaxonomyRow - removeAll: " + e.getMessage(), e); @@ -51,7 +51,7 @@ public class TaxonomyRowPersistence extends AbstractPersistence{ Query query = em.createQuery("select t from TaxonomyRow t"); listTaxonomy = query.getResultList(); - + } catch (Exception e) { logger.error("Error in TaxonomyRow - getList: " + e.getMessage(), e); @@ -75,10 +75,10 @@ public class TaxonomyRowPersistence extends AbstractPersistence{ TaxonomyRow row = null; try { row = em.getReference(TaxonomyRow.class, id); - + }catch (Exception e) { logger.error("An error occurred in TaxonomyRow - getItemByKey ",e); - + } finally { em.close(); } @@ -86,10 +86,10 @@ public class TaxonomyRowPersistence extends AbstractPersistence{ logger.trace("getItemByKey return row: "+row.getId() + ", service id: " + row.getServiceId()); else logger.trace("getItemByKey return null"); - + //FOR DEBUG // System.out.println("getItemByKey return: "+row ); - + return row; } @@ -131,7 +131,7 @@ public class TaxonomyRowPersistence extends AbstractPersistence{ query.setFirstResult(startIndex); query.setMaxResults(offset); listTaxonomyRow = query.getResultList(); - + }catch (Exception e) { logger.error("An error occurred in TaxonomyRow - get List ",e); } @@ -140,15 +140,15 @@ public class TaxonomyRowPersistence extends AbstractPersistence{ } return listTaxonomyRow; } - + @Override public List getList(Map filterMap, int startIndex, int offset) throws DatabaseServiceException{ - + EntityManager em = super.createNewManager(); List listTaxonomyRow = new ArrayList(); try { String queryString = "select t from TaxonomyRow t"; - + if(filterMap!=null && filterMap.size()>0){ queryString+=" where "; for (String param : filterMap.keySet()) { @@ -156,11 +156,11 @@ public class TaxonomyRowPersistence extends AbstractPersistence{ queryString+=" t."+param+"="+value; queryString+=AND; } - + queryString = queryString.substring(0, queryString.lastIndexOf(AND)); } Query query = em.createQuery(queryString); - + if(startIndex>-1) query.setFirstResult(startIndex); if(offset>-1) @@ -174,16 +174,16 @@ public class TaxonomyRowPersistence extends AbstractPersistence{ } return listTaxonomyRow; } - + @Override public List executeTypedQuery(CriteriaQuery cq, int startIndex, int offset) throws DatabaseServiceException{ - + EntityManager em = super.createNewManager(); List listOJ = new ArrayList(); try { TypedQuery typedQuery = em.createQuery(cq); - + if(startIndex>-1) typedQuery.setFirstResult(startIndex); if(offset>-1) @@ -195,7 +195,7 @@ public class TaxonomyRowPersistence extends AbstractPersistence{ } finally { em.close(); } - + return listOJ; } @@ -204,13 +204,13 @@ public class TaxonomyRowPersistence extends AbstractPersistence{ public int deleteItemByIdField(String idField) throws DatabaseServiceException{ EntityManager em = super.createNewManager(); int removed = 0; - + try { em.getTransaction().begin(); removed = em.createQuery("DELETE FROM TaxonomyRow t WHERE t."+TaxonomyRow.ID_FIELD+"='"+idField+"'").executeUpdate(); em.getTransaction().commit(); logger.trace("Item "+ idField + " was deleted from TaxonomyRow"); - + } catch (Exception e) { logger.error("Error in TaxonomyRow deleteJobById: " + e.getMessage(), e); @@ -221,5 +221,5 @@ public class TaxonomyRowPersistence extends AbstractPersistence{ return removed; } - + } \ No newline at end of file diff --git a/src/main/java/org/gcube/portlets/user/speciesdiscovery/server/service/ResultItemConverter.java b/src/main/java/org/gcube/portlets/user/speciesdiscovery/server/service/ResultItemConverter.java index 6db9d94..81a8f91 100644 --- a/src/main/java/org/gcube/portlets/user/speciesdiscovery/server/service/ResultItemConverter.java +++ b/src/main/java/org/gcube/portlets/user/speciesdiscovery/server/service/ResultItemConverter.java @@ -1,5 +1,5 @@ /** - * + * */ package org.gcube.portlets.user.speciesdiscovery.server.service; @@ -23,37 +23,47 @@ import org.gcube.portlets.user.speciesdiscovery.shared.ResultRow; import org.gcube.portlets.user.speciesdiscovery.shared.Taxon; import org.gcube.portlets.user.speciesdiscovery.shared.util.NormalizeString; + /** - * - * @author Francesco Mangiacrapa francesco.mangiacrapa@isti.cnr.it + * The Class ResultItemConverter. * + * @author Francesco Mangiacrapa francesco.mangiacrapa@isti.cnr.it + * Jan 11, 2017 */ public class ResultItemConverter implements Converter { protected Logger logger = Logger.getLogger(ResultItemConverter.class); protected int id = 0; protected ASLSession session; - + + /** + * Instantiates a new result item converter. + * + * @param session the session + */ public ResultItemConverter(ASLSession session) { this.session = session; } + /* (non-Javadoc) + * @see org.gcube.portlets.user.speciesdiscovery.server.stream.Converter#convert(java.lang.Object) + */ @Override public ResultRow convert(ResultItem input) throws Exception { ResultRow row = new ResultRow(id++); - + row.setServiceId(input.getId()); - + //Retrieve Properties - List listProperties = input.getProperties(); + List listProperties = input.getProperties(); //Fill properties if(listProperties!=null){ for (ElementProperty elementProperty : listProperties) // row.getProperties().add(new ItemParameter(StringEscapeUtils.escapeSql(elementProperty.getName()), StringEscapeUtils.escapeSql(elementProperty.getValue()))); row.getProperties().add(new ItemParameter(elementProperty.getName(), elementProperty.getValue())); - + row.setExistsProperties(true); } @@ -64,7 +74,7 @@ public class ResultItemConverter implements Converter { } else row.setScientificNameAuthorship(ConstantsSpeciesDiscovery.NOT_FOUND); - + //set credits if(input.getCredits()!=null && !input.getCredits().isEmpty()){ // row.setCredits(StringEscapeUtils.escapeSql(input.getCredits())); @@ -72,7 +82,7 @@ public class ResultItemConverter implements Converter { } else row.setCredits(ConstantsSpeciesDiscovery.NOT_FOUND); - + //set lsid if(input.getLsid()!=null && !input.getLsid().isEmpty()){ // row.setLsid(StringEscapeUtils.escapeSql(input.getLsid())); @@ -80,7 +90,7 @@ public class ResultItemConverter implements Converter { } else row.setLsid(ConstantsSpeciesDiscovery.NOT_FOUND); - + if(input.getProvider()!=null && !input.getProvider().isEmpty()){ // row.setDataSourceId(StringEscapeUtils.escapeSql(input.getProvider())); @@ -94,34 +104,34 @@ public class ResultItemConverter implements Converter { if (input.getDataSet()!=null) { DataSet dataSet = input.getDataSet(); - + if(dataSet.getCitation()==null || dataSet.getCitation().isEmpty()) row.setDataSetCitation("Citation Id not found"); else // row.setDataSetCitation(StringEscapeUtils.escapeSql(dataSet.getCitation())); row.setDataSetCitation(dataSet.getCitation()); - + if(dataSet.getId()==null || dataSet.getId().isEmpty()) row.setDataSetId("Data Set Id not found"); else row.setDataSetId(dataSet.getId()); - + if(dataSet.getName()==null || dataSet.getName().isEmpty()) row.setDataSetName("Data Set Name not found"); else // row.setDataSetName(StringEscapeUtils.escapeSql(dataSet.getName())); row.setDataSetName(dataSet.getName()); - + if (input.getDataSet().getDataProvider()!=null) { DataProvider dataProvider = dataSet.getDataProvider(); - + if(dataProvider.getId() == null || dataProvider.getId().isEmpty()) row.setDataProviderId("Data Provider Id not found"); else // row.setDataProviderId(StringEscapeUtils.escapeSql(dataProvider.getId())); row.setDataProviderId(dataProvider.getId()); - + if(dataProvider.getName()==null || dataProvider.getName().isEmpty()) row.setDataProviderName("Data Provider not found"); else @@ -129,17 +139,17 @@ public class ResultItemConverter implements Converter { row.setDataProviderName(dataProvider.getName()); } } - + if(input.getCommonNames()!=null){ for (org.gcube.data.spd.model.CommonName commonName : input.getCommonNames()){ - + CommonName com = new CommonName(commonName.getName(), commonName.getLanguage(), row.getId()); // DaoSession.createOrUpdateCommonName(com, session); row.getCommonNames().add(com); row.setExistsCommonName(true); } } - + if (input.getProducts()!=null) { for (Product product:input.getProducts()) { @@ -152,35 +162,39 @@ public class ResultItemConverter implements Converter { } } - //DEBUG // System.out.println("Insert row id: "+row.getId()); - row.setMatchingTaxon(convertTaxon(input)); - row.setBaseTaxonValue(NormalizeString.lowerCaseUpFirstChar(getBaseTaxonValue(TaxonomySearchServiceImpl.BASETAXONOMY,input))); -// row.setMatchingCredits(StringEscapeUtils.escapeSql(input.getCredits())); row.setMatchingCredits(input.getCredits()); - -// logger.trace("convert completed: " +row); - return row; } + /** + * Gets the base taxon value. + * + * @param rank the rank + * @param taxon the taxon + * @return the base taxon value + */ private String getBaseTaxonValue(String rank, TaxonomyInterface taxon){ - + while(taxon!=null){ - if(taxon.getRank()!=null && taxon.getRank().equalsIgnoreCase(rank)) return taxon.getRank(); - + taxon = taxon.getParent(); } - return TaxonomySearchServiceImpl.TAXONOMYUNKNOWN; } - - + + + /** + * Convert taxon. + * + * @param taxon the taxon + * @return the list + */ protected List convertTaxon(TaxonomyInterface taxon) { List listTaxon = new ArrayList(); @@ -190,9 +204,7 @@ public class ResultItemConverter implements Converter { Taxon tax = new Taxon(count++, taxon.getScientificName(), taxon.getCitation(), NormalizeString.lowerCaseUpFirstChar(taxon.getRank())); listTaxon.add(tax); taxon = taxon.getParent(); - -// System.out.println("Insert tax parent id: "+tax.getId()); } - return listTaxon; + return listTaxon; } } diff --git a/src/main/java/org/gcube/portlets/user/speciesdiscovery/server/service/SpeciesService.java b/src/main/java/org/gcube/portlets/user/speciesdiscovery/server/service/SpeciesService.java index 5109ca3..583edd7 100644 --- a/src/main/java/org/gcube/portlets/user/speciesdiscovery/server/service/SpeciesService.java +++ b/src/main/java/org/gcube/portlets/user/speciesdiscovery/server/service/SpeciesService.java @@ -1,9 +1,10 @@ + package org.gcube.portlets.user.speciesdiscovery.server.service; import static org.gcube.data.spd.client.plugins.AbstractPlugin.classification; import static org.gcube.data.spd.client.plugins.AbstractPlugin.executor; import static org.gcube.data.spd.client.plugins.AbstractPlugin.manager; -import static org.gcube.data.spd.client.plugins.AbstractPlugin.occurrence; +import static org.gcube.data.spd.client.plugins.AbstractPlugin.occurrences; import static org.gcube.data.streams.dsl.Streams.convert; import java.io.InputStream; @@ -19,10 +20,10 @@ import java.util.concurrent.TimeUnit; import org.apache.log4j.Logger; import org.gcube.application.framework.core.session.ASLSession; import org.gcube.common.scope.api.ScopeProvider; -import org.gcube.data.spd.client.proxies.Classification; -import org.gcube.data.spd.client.proxies.Executor; -import org.gcube.data.spd.client.proxies.Manager; -import org.gcube.data.spd.client.proxies.Occurrence; +import org.gcube.data.spd.client.proxies.ClassificationClient; +import org.gcube.data.spd.client.proxies.ExecutorClient; +import org.gcube.data.spd.client.proxies.ManagerClient; +import org.gcube.data.spd.client.proxies.OccurrenceClient; import org.gcube.data.spd.model.Conditions; import org.gcube.data.spd.model.PluginDescription; import org.gcube.data.spd.model.RepositoryInfo; @@ -30,10 +31,10 @@ import org.gcube.data.spd.model.exceptions.InvalidQueryException; import org.gcube.data.spd.model.products.OccurrencePoint; import org.gcube.data.spd.model.products.ResultElement; import org.gcube.data.spd.model.products.TaxonomyItem; +import org.gcube.data.spd.model.service.exceptions.InvalidIdentifierException; +import org.gcube.data.spd.model.service.exceptions.UnsupportedPluginException; +import org.gcube.data.spd.model.service.types.CompleteJobStatus; import org.gcube.data.spd.model.util.Capabilities; -import org.gcube.data.spd.stubs.exceptions.InvalidIdentifierException; -import org.gcube.data.spd.stubs.exceptions.UnsupportedPluginException; -import org.gcube.data.spd.stubs.types.Status; import org.gcube.data.streams.Stream; import org.gcube.portlets.user.speciesdiscovery.server.stream.CloseableIterator; import org.gcube.portlets.user.speciesdiscovery.server.stream.IteratorPointInfo; @@ -47,635 +48,938 @@ import org.gcube.portlets.user.speciesdiscovery.shared.SearchServiceException; import org.gcube.portlets.user.speciesdiscovery.shared.SearchType; import org.gcube.portlets.user.speciesdiscovery.shared.SpeciesCapability; -//import org.gcube.contentmanager.storageclient.model.protocol.smp.Handler; /** - * @author "Federico De Faveri defaveri@isti.cnr.it" + * The Class SpeciesService. * + * @author Francesco Mangiacrapa francesco.mangiacrapa@isti.cnr.it + * Jan 10, 2017 */ public class SpeciesService { protected Logger logger = Logger.getLogger(SpeciesService.class); - protected String scope; protected ASLSession session; - - protected Manager call; - protected Occurrence occurrencesCall; - protected Classification classificationCall; - protected Executor executorCall; - + protected ManagerClient call; + protected OccurrenceClient occurrencesCall; + protected ClassificationClient classificationCall; + protected ExecutorClient executorCall; protected String lastQuery = ""; + /** + * Instantiates a new species service. + * + * @param scope + * the scope + * @param session + * the session + * @throws Exception + * the exception + */ + public SpeciesService(String scope, ASLSession session) + throws Exception { - public SpeciesService(String scope, ASLSession session) throws Exception - { this(scope); this.session = session; } - - - public SpeciesService(String scope) throws Exception - { + + /** + * Instantiates a new species service. + * + * @param scope + * the scope + * @throws Exception + * the exception + */ + public SpeciesService(String scope) + throws Exception { + this.scope = scope; ScopeProvider.instance.set(scope); -// this.call = manager().at( URI.create("http://node24.d.d4science.research-infrastructures.eu:9000")).withTimeout(3, TimeUnit.MINUTES).build(); -// this.occurrencesCall = occurrences().at( URI.create("http://node24.d.d4science.research-infrastructures.eu:9000")).withTimeout(3, TimeUnit.MINUTES).build(); -// this.classificationCall = classification().at( URI.create("http://node24.d.d4science.research-infrastructures.eu:9000")).withTimeout(3, TimeUnit.MINUTES).build(); + // this.call = manager().at( + // URI.create("http://node24.d.d4science.research-infrastructures.eu:9000")).withTimeout(3, + // TimeUnit.MINUTES).build(); + // this.occurrencesCall = occurrences().at( + // URI.create("http://node24.d.d4science.research-infrastructures.eu:9000")).withTimeout(3, + // TimeUnit.MINUTES).build(); + // this.classificationCall = classification().at( + // URI.create("http://node24.d.d4science.research-infrastructures.eu:9000")).withTimeout(3, + // TimeUnit.MINUTES).build(); System.out.println("CALLING MANAGER "); this.call = manager().withTimeout(3, TimeUnit.MINUTES).build(); this.executorCall = executor().withTimeout(3, TimeUnit.MINUTES).build(); - this.occurrencesCall = occurrence().withTimeout(3, TimeUnit.MINUTES).build(); - this.classificationCall = classification().withTimeout(3, TimeUnit.MINUTES).build(); + OccurrenceClient a = + occurrences().withTimeout(3, TimeUnit.MINUTES).build(); + this.classificationCall = + classification().withTimeout(3, TimeUnit.MINUTES).build(); } - - - public SpeciesService(String scope, boolean instanceOnlyOccurrence) throws Exception - { + + /** + * Instantiates a new species service. + * + * @param scope + * the scope + * @param instanceOnlyOccurrence + * the instance only occurrence + * @throws Exception + * the exception + */ + public SpeciesService(String scope, boolean instanceOnlyOccurrence) + throws Exception { + this.scope = scope; - - if(instanceOnlyOccurrence){ + if (instanceOnlyOccurrence) { ScopeProvider.instance.set(scope); System.out.println("CALLING OCCURRENCE MANAGER "); - this.occurrencesCall = occurrence().withTimeout(3, TimeUnit.MINUTES).build(); + this.occurrencesCall = occurrences().withTimeout(3, TimeUnit.MINUTES).build(); } } + /** + * Search by filters. + * + * @param searchTerm + * the search term + * @param searchType + * the search type + * @param searchFilters + * the search filters + * @return the closeable iterator + * @throws SearchServiceException + * the search service exception + */ + public CloseableIterator searchByFilters( + String searchTerm, SearchType searchType, SearchFilters searchFilters) + throws SearchServiceException { - public CloseableIterator searchByFilters(String searchTerm, SearchType searchType, SearchFilters searchFilters) throws SearchServiceException { - logger.trace("searchByFilters searchTerm: "+searchTerm+ " usearchFilters: "+searchFilters); - + logger.trace("searchByFilters searchTerm: " + searchTerm + + " usearchFilters: " + searchFilters); try { logger.trace("query building..."); - String query = QueryBuilder.buildQuery(searchTerm, searchType, searchFilters); -// System.out.println("query build - OK " + query); + String query = + QueryBuilder.buildQuery(searchTerm, searchType, searchFilters); + // System.out.println("query build - OK " + query); logger.trace("query build - OK " + query); -// System.out.println("query: "+query); + // System.out.println("query: "+query); return searchByQuery(query); - } catch (Exception e) { - logger.error("Error calling the Species Service: " + e.getMessage(), e); - throw new SearchServiceException("Error calling the Species Service: "+e.getMessage()); + } + catch (Exception e) { + logger.error( + "Error calling the Species Service: " + e.getMessage(), e); + throw new SearchServiceException( + "Error calling the Species Service: " + e.getMessage()); } } - - - public CloseableIterator retrieveTaxonomyById(Stream streamIds) throws SearchServiceException { + + /** + * Retrieve taxonomy by id. + * + * @param ids + * the ids + * @return the closeable iterator + * @throws SearchServiceException + * the search service exception + */ + public CloseableIterator retrieveTaxonomyById(List ids) + throws SearchServiceException { + logger.trace("retrieveTaxonomyById..."); - try { ScopeProvider.instance.set(scope); - Stream stream = classificationCall.getTaxaByIds(streamIds); + Stream stream = classificationCall.getTaxaByIds(ids); return new StreamIterator(stream); - } catch (Exception e) { - logger.error("Error calling the Species Service: " + e.getMessage(), e); - throw new SearchServiceException("Error calling the Species Service: "+e.getMessage()); + } + catch (Exception e) { + logger.error( + "Error calling the Species Service: " + e.getMessage(), e); + throw new SearchServiceException( + "Error calling the Species Service: " + e.getMessage()); } } - - - public CloseableIterator retrieveSynonymsById(String id) throws SearchServiceException { + + /** + * Retrieve synonyms by id. + * + * @param id + * the id + * @return the closeable iterator + * @throws SearchServiceException + * the search service exception + */ + public CloseableIterator retrieveSynonymsById(String id) + throws SearchServiceException { + logger.trace("retrieveSynonymsById..."); - try { ScopeProvider.instance.set(scope); - Stream stream = classificationCall.getSynonymsById(id); + Stream stream = + classificationCall.getSynonymsById(id); return new StreamIterator(stream); - } catch (Exception e) { - logger.error("Error calling the Species Service: " + e.getMessage(), e); - throw new SearchServiceException("Error calling the Species Service: "+e.getMessage()); + } + catch (Exception e) { + logger.error( + "Error calling the Species Service: " + e.getMessage(), e); + throw new SearchServiceException( + "Error calling the Species Service: " + e.getMessage()); } } + /** + * Search by query. + * + * @param query + * the query + * @return the closeable iterator + * @throws SearchServiceException + * the search service exception + */ + public CloseableIterator searchByQuery(String query) + throws SearchServiceException { - public CloseableIterator searchByQuery(String query) throws SearchServiceException { - logger.trace("search by Query - query is: "+query); -// System.out.println("searchByQuery query: "+query); - + logger.trace("search by Query - query is: " + query); + // System.out.println("searchByQuery query: "+query); try { ScopeProvider.instance.set(scope); lastQuery = query; -// System.err.println("ScopeProvider SCOPE "+ScopeProvider.instance.get()); + // System.err.println("ScopeProvider SCOPE "+ScopeProvider.instance.get()); logger.trace("call species service search..."); -// System.out.println("call species service search..."); + // System.out.println("call species service search..."); Stream stream = call.search(query); return new StreamIterator(stream); - - }catch (UnsupportedPluginException e2) { + } + catch (UnsupportedPluginException e2) { lastQuery = "Invalid query"; - String error = "Error calling the Species Service: plugin usupported"; - logger.error("Error calling the Species Service: " + e2.getMessage(), e2); + String error = + "Error calling the Species Service: plugin usupported"; + logger.error( + "Error calling the Species Service: " + e2.getMessage(), e2); throw new SearchServiceException(error); - }catch (InvalidQueryException e1) { + } + catch (InvalidQueryException e1) { lastQuery = "Invalid query"; - String error = "Error calling the Species Service: query syntax is not valid"; - logger.error("Error calling the Species Service: " + e1.getMessage(), e1); + String error = + "Error calling the Species Service: query syntax is not valid"; + logger.error( + "Error calling the Species Service: " + e1.getMessage(), e1); throw new SearchServiceException(error); - } catch (Exception e) { + } + catch (Exception e) { lastQuery = "Invalid query"; - String error = "Error calling the Species Service: an error occurred contacting the service"; - logger.error("Error calling the Species Service: " + e.getMessage(), e); + String error = + "Error calling the Species Service: an error occurred contacting the service"; + logger.error( + "Error calling the Species Service: " + e.getMessage(), e); throw new SearchServiceException(error); } } - - - public Stream searchByQuery2(String query) throws SearchServiceException { - logger.trace("searchByQuery query: "+query); -// System.out.println("searchByQuery query: "+query); + /** + * Search by query2. + * + * @param query + * the query + * @return the stream + * @throws SearchServiceException + * the search service exception + */ + public Stream searchByQuery2(String query) + throws SearchServiceException { + + logger.trace("searchByQuery query: " + query); + // System.out.println("searchByQuery query: "+query); try { ScopeProvider.instance.set(scope); -// System.err.println("ScopeProvider SCOPE "+ScopeProvider.instance.get()); + // System.err.println("ScopeProvider SCOPE "+ScopeProvider.instance.get()); logger.trace("call species service search..."); System.out.println("call species service search..."); return call.search(query); - } catch (Exception e) { - logger.error("Error calling the Species Service: " + e.getMessage(), e); - throw new SearchServiceException("Error calling the Species Service: "+e.getMessage()); + } + catch (Exception e) { + logger.error( + "Error calling the Species Service: " + e.getMessage(), e); + throw new SearchServiceException( + "Error calling the Species Service: " + e.getMessage()); } } - - public List getPlugins() throws SearchServiceException { + /** + * Gets the plugins. + * + * @return the plugins + * @throws SearchServiceException + * the search service exception + */ + public List getPlugins() + throws SearchServiceException { + logger.trace("getPlugins..."); try { - - List listDsModel = new ArrayList(); - + List listDsModel = + new ArrayList(); ScopeProvider.instance.set(scope); - System.out.println("setting scope "+scope); + System.out.println("setting scope " + scope); List plugin = call.getPluginsDescription(); - - if(plugin!=null){ - - logger.trace("*****PluginDescription is NOT null - length: " + plugin.size()); - + if (plugin != null) { + logger.trace("*****PluginDescription is NOT null - length: "+plugin.size()); for (int i = 0; i < plugin.size(); i++) { - PluginDescription pluginDescription = plugin.get(i); - - ArrayList datasourceCapabilities = new ArrayList(); - - Map> pluginCapabilities = pluginDescription.getSupportedCapabilities(); - - logger.trace("getCapabilities for..." + pluginDescription.getName()); - for (Entry> pluginCapability:pluginCapabilities.entrySet()) { - + ArrayList datasourceCapabilities = + new ArrayList(); + Map> pluginCapabilities = + pluginDescription.getSupportedCapabilities(); + logger.info("getCapabilities for plugin: " +pluginDescription.getName()); + for (Entry> pluginCapability : pluginCapabilities.entrySet()) { Capabilities capability = pluginCapability.getKey(); - + logger.info("capability name: " +capability.name()); ArrayList datasourceProperties = new ArrayList(); - for (Conditions condition:pluginCapability.getValue()) datasourceProperties.addAll(getFilterCapabilityFromProperties(condition)); - + logger.info("capability value: " +pluginCapability.getValue()); + for (Conditions condition : pluginCapability.getValue()) + datasourceProperties.addAll(getFilterCapabilityFromProperties(condition)); + datasourceCapabilities.add(new DataSourceCapability(getGxtCapabilityValueFromCapability(capability), datasourceProperties)); } - RepositoryInfo rep = pluginDescription.getInfo(); - - //CREATE DataSourceRepositoryInfo - DataSourceRepositoryInfo dsInfo = new DataSourceRepositoryInfo(); - - if(rep!=null){ -// System.out.println("DESCRIPTION REPOSITORY: " + rep.getDescription()); + // CREATE DataSourceRepositoryInfo + DataSourceRepositoryInfo dsInfo = + new DataSourceRepositoryInfo(); + if (rep != null) { + // System.out.println("DESCRIPTION REPOSITORY: " + + // rep.getDescription()); dsInfo.setLogoUrl(rep.getLogoUrl()); dsInfo.setPageUrl(rep.getPageReferenceUrl()); dsInfo.setProperties(getPropertiesFromRepositoryInfoType(rep)); dsInfo.setDescription(rep.getDescription()); -// dsInfo = new DataSourceRepositoryInfo(rep.getLogoUrl(), rep.getReferencePageUrl(),getPropertiesFromRepositoryInfoType(rep), rep.getDescription()); - logger.trace("DataSourceRepositoryInfo :"+dsInfo); -// logger.trace("Repository description size: " + rep.getDescription().length()); + // dsInfo = new + // DataSourceRepositoryInfo(rep.getLogoUrl(), + // rep.getReferencePageUrl(),getPropertiesFromRepositoryInfoType(rep), + // rep.getDescription()); + logger.trace("DataSourceRepositoryInfo :" + dsInfo); + // logger.trace("Repository description size: " + + // rep.getDescription().length()); } - - listDsModel.add(new DataSourceModel(pluginDescription.getName(), pluginDescription.getName(), pluginDescription.getDescription(), datasourceCapabilities, dsInfo)); - + listDsModel.add(new DataSourceModel( + pluginDescription.getName(), + pluginDescription.getName(), + pluginDescription.getDescription(), + datasourceCapabilities, dsInfo)); } } else logger.trace("*****PluginDescription is null"); - return listDsModel; - - } catch (Exception e) { - logger.error("Error calling the Species Service: " + e.getMessage(), e); -// System.out.println("Error calling the Species Service: " + e); + } + catch (Exception e) { + logger.error( + "Error calling the Species Service: " + e.getMessage(), e); + // System.out.println("Error calling the Species Service: " + e); e.printStackTrace(); throw new SearchServiceException("loading the data sources"); } } - - private Map getPropertiesFromRepositoryInfoType(RepositoryInfo rep){ - + + /** + * Gets the properties from repository info type. + * + * @param rep + * the rep + * @return the properties from repository info type + */ + private Map getPropertiesFromRepositoryInfoType( + RepositoryInfo rep) { + Map mapProperties = new HashMap(); - - if(rep.getProperties()==null){ + if (rep.getProperties() == null) { logger.trace("*****Properties From RepositoryInfoType is null"); return mapProperties; } - - - for (Entry property : rep.getProperties().entrySet()) mapProperties.put(property.getKey(), property.getValue()); - + for (Entry property : rep.getProperties().entrySet()) + mapProperties.put(property.getKey(), property.getValue()); return mapProperties; } - private List getFilterCapabilityFromProperties(Conditions property){ + /** + * Gets the filter capability from properties. + * + * @param property + * the property + * @return the filter capability from properties + */ + private List getFilterCapabilityFromProperties(Conditions property) { + + if(property==null){ + logger.info("Conditions is null"); + return Collections.singletonList(SpeciesCapability.UNKNOWN); + } switch (property) { - case DATE: return Arrays.asList(SpeciesCapability.FROMDATE, SpeciesCapability.TODATE); - case COORDINATE: return Arrays.asList(SpeciesCapability.UPPERBOUND, SpeciesCapability.LOWERBOUND); - default: return Collections.singletonList(SpeciesCapability.UNKNOWN); - + case DATE: + return Arrays.asList(SpeciesCapability.FROMDATE, SpeciesCapability.TODATE); + case COORDINATE: + return Arrays.asList(SpeciesCapability.UPPERBOUND, SpeciesCapability.LOWERBOUND); + default: + return Collections.singletonList(SpeciesCapability.UNKNOWN); } } - private SpeciesCapability getGxtCapabilityValueFromCapability(Capabilities capability){ + /** + * Gets the gxt capability value from capability. + * + * @param capability + * the capability + * @return the gxt capability value from capability + */ + private SpeciesCapability getGxtCapabilityValueFromCapability( + Capabilities capability) { switch (capability) { - case Classification: return SpeciesCapability.TAXONOMYITEM; - case NamesMapping: return SpeciesCapability.NAMESMAPPING; - case Occurrence: return SpeciesCapability.RESULTITEM; - case Expansion: return SpeciesCapability.SYNONYMS; - case Unfold: return SpeciesCapability.UNFOLD; - default: return SpeciesCapability.UNKNOWN; + case Classification: + return SpeciesCapability.TAXONOMYITEM; + case NamesMapping: + return SpeciesCapability.NAMESMAPPING; + case Occurrence: + return SpeciesCapability.RESULTITEM; + case Expansion: + return SpeciesCapability.SYNONYMS; + case Unfold: + return SpeciesCapability.UNFOLD; + default: + return SpeciesCapability.UNKNOWN; } } - /*protected List createFilterProperties(SearchFilters searchFilters) - { - List properties = new ArrayList(); - if (searchFilters.getUpperBound()!=null) properties.add(new Property(Properties.CoordinateTo, convertCoordinate(searchFilters.getUpperBound()))); - if (searchFilters.getLowerBound()!=null) properties.add(new Property(Properties.CoordinateFrom, convertCoordinate(searchFilters.getLowerBound()))); + /** + * Convert coordinate. + * + * @param coordinate + * the coordinate + * @return the org.gcube.data.spd.model. coordinate + */ + protected org.gcube.data.spd.model.Coordinate convertCoordinate( + Coordinate coordinate) { - if (searchFilters.getFromDate()!=null) { - Calendar fromDate = Calendar.getInstance(); - fromDate.setTime(searchFilters.getFromDate()); - properties.add(new Property(Properties.DateFrom, fromDate)); - } - if (searchFilters.getToDate()!=null) { - Calendar toDate = Calendar.getInstance(); - toDate.setTime(searchFilters.getToDate()); - properties.add(new Property(Properties.DateTo, toDate)); - } - - return properties; - }*/ - - protected org.gcube.data.spd.model.Coordinate convertCoordinate(Coordinate coordinate) - { - return new org.gcube.data.spd.model.Coordinate(coordinate.getLatitude(), coordinate.getLongitude()); + return new org.gcube.data.spd.model.Coordinate( + coordinate.getLatitude(), coordinate.getLongitude()); } - public CloseableIterator getOccurrencesByKeys(List keys) throws SearchServiceException { + /** + * Gets the occurrences by keys. + * + * @param keys + * the keys + * @return the occurrences by keys + * @throws SearchServiceException + * the search service exception + */ + public CloseableIterator getOccurrencesByKeys( + List keys) + throws SearchServiceException { + try { - Stream keysStream = convert(keys); + // Stream keysStream = convert(keys); ScopeProvider.instance.set(scope); - Stream stream = occurrencesCall.getByKeys(keysStream); + logger.trace("Instancing scope: "+scope); + logger.trace("Calling occurrencesCall passing keys: "+keys); + logger.trace("occurrencesCall != null? "+(occurrencesCall==null)); + Stream stream = occurrencesCall.getByKeys(keys); return new StreamIterator(stream); - } catch (Exception e) { - logger.error("Error calling the Species Service: " + e.getMessage(), e); - throw new SearchServiceException("Error calling the Species Service: "+e.getMessage()); } - } - - public CloseableIterator getOccurrencesByIds(List ids) throws SearchServiceException { - try { - Stream idsStream = convert(ids); - ScopeProvider.instance.set(scope); - Stream stream = occurrencesCall.getByIds(idsStream); - return new StreamIterator(stream); - } catch (Exception e) { + catch (Exception e) { logger.error("Error calling the Species Service: " + e.getMessage(), e); - throw new SearchServiceException("Error calling the Species Service: "+e.getMessage()); + throw new SearchServiceException("Error calling the Species Service: " + e.getMessage()); } } -// public String generateMapFromOccurrencePoints(List keys) throws SearchServiceException { -// try { -// Stream keysStream = convert(keys); -// ScopeProvider.instance.set(scope); -// return occurrencesCall.getLayerByIds(keysStream); -// } catch (Exception e) { -// logger.error("Error calling the Species Service: " + e.getMessage(), e); -// throw new SearchServiceException("Error calling the Species Service: "+e.getMessage()); -// } -// } - - public String generateMapFromOccurrencePoints(IteratorPointInfo streamKey) throws SearchServiceException { + /** + * Gets the occurrences by ids. + * + * @param ids + * the ids + * @return the occurrences by ids + * @throws SearchServiceException + * the search service exception + */ + public CloseableIterator getOccurrencesByIds( + List ids) + throws SearchServiceException { + try { -// Stream keysStream = convert(keys); + // Stream idsStream = convert(ids); + ScopeProvider.instance.set(scope); + Stream stream = occurrencesCall.getByIds(ids); + return new StreamIterator(stream); + } + catch (Exception e) { + logger.error( + "Error calling the Species Service: " + e.getMessage(), e); + throw new SearchServiceException( + "Error calling the Species Service: " + e.getMessage()); + } + } + + /** + * Generate map from occurrence points. + * + * @param streamKey + * the stream key + * @return the string + * @throws SearchServiceException + * the search service exception + */ + public String generateMapFromOccurrencePoints(IteratorPointInfo streamKey) + throws SearchServiceException { + + try { + // Stream keysStream = convert(keys); ScopeProvider.instance.set(scope); - return occurrencesCall.createLayer(streamKey); - } catch (Exception e) { - logger.error("Error calling the Species Service: " + e.getMessage(), e); - throw new SearchServiceException("Error calling the Species Service: "+e.getMessage()); + } + catch (Exception e) { + logger.error( + "Error calling the Species Service: " + e.getMessage(), e); + throw new SearchServiceException( + "Error calling the Species Service: " + e.getMessage()); } } - -// public File getOccurrencesAsDarwinCoreByIds(List ids) throws SearchServiceException { -// try { -// Stream keysStream = convert(ids); -// ScopeProvider.instance.set(scope); -// File occurrenceFile = occurrencesCall.getDarwinCoreByIds(keysStream); -// return occurrenceFile; -// } catch (Exception e) { -// logger.error("Error calling the Species Service: " + e.getMessage(), e); -// throw new SearchServiceException("Error calling the Species Service: "+e.getMessage()); -// } -// } -// -// public File getOccurrencesAsDarwinCoreArchive(List ids) throws SearchServiceException { -// try { -// Stream keysStream = convert(ids); -// ScopeProvider.instance.set(scope); -// File occurrenceFile = classificationCall.getDarwinCoreArchive(keysStream); -// return occurrenceFile; -// } catch (Exception e) { -// logger.error("Error calling the Species Service: " + e.getMessage(), e); -// throw new SearchServiceException("Error calling the Species Service: "+e.getMessage()); -// } -// } - - public StreamIterator getTaxonChildrenByParentId(String parentId) throws Exception{ - + /** + * Gets the taxon children by parent id. + * + * @param parentId + * the parent id + * @return the taxon children by parent id + * @throws Exception + * the exception + */ + public StreamIterator getTaxonChildrenByParentId( + String parentId) + throws Exception { + try { ScopeProvider.instance.set(scope); - Stream items = classificationCall.getTaxonChildrenById(parentId); + Stream items = + classificationCall.getTaxonChildrenById(parentId); return new StreamIterator(items); - } catch (Exception e) { -// e.printStackTrace(); - logger.error("Error calling the Species Service: " + e.getMessage(), e); - throw new Exception("Error calling the Species Service: "+e.getMessage()); + } + catch (Exception e) { + // e.printStackTrace(); + logger.error( + "Error calling the Species Service: " + e.getMessage(), e); + throw new Exception("Error calling the Species Service: " + + e.getMessage()); } } - public Status getTaxonomyJobById(String jobId) { + /** + * Gets the taxonomy job by id. + * + * @param jobId + * the job id + * @return the taxonomy job by id + */ + public CompleteJobStatus getTaxonomyJobById(String jobId) { ScopeProvider.instance.set(scope); - Status status = null; - - try{ - + CompleteJobStatus status = null; + try { status = this.executorCall.getStatus(jobId); - - }catch (InvalidIdentifierException e) { + } + catch (InvalidIdentifierException e) { logger.error("Error on service for get job by Id - InvalidIdentifierException"); status = null; - - }catch (Exception e) { - logger.error("Error on service for get job by Id: " + e.getMessage(), e); - status = null; -// return new Status(); } - + catch (Exception e) { + logger.error( + "Error on service for get job by Id: " + e.getMessage(), e); + status = null; + // return new Status(); + } return status; } - public InputStream getTaxonomyJobFileById(String jobIdentifier) throws Exception { + /** + * Gets the taxonomy job file by id. + * + * @param jobIdentifier + * the job identifier + * @return the taxonomy job file by id + * @throws Exception + * the exception + */ + public InputStream getTaxonomyJobFileById(String jobIdentifier) + throws Exception { InputStream is = null; ScopeProvider.instance.set(scope); - try { String url = this.executorCall.getResultLink(jobIdentifier); - - if(url==null || url.isEmpty()){ - logger.error("URL returned by species service is: "+url); + if (url == null || url.isEmpty()) { + logger.error("URL returned by species service is: " + url); throw new StorageUrlIsEmpty(); } - - logger.trace("URL returned by species service is: "+url); + logger.trace("URL returned by species service is: " + url); is = StorageUtil.getInputStreamByStorageClient(url); - - } catch (Exception e) { -// e.printStackTrace(); - logger.error("Error saving file: "+e.getMessage(), e); - throw new Exception("Error saving file: "+e.getMessage()); } - + catch (Exception e) { + // e.printStackTrace(); + logger.error("Error saving file: " + e.getMessage(), e); + throw new Exception("Error saving file: " + e.getMessage()); + } return is; } - - - public InputStream getTaxonomyJobErrorFileById(String jobIdentifier) throws Exception { + + /** + * Gets the taxonomy job error file by id. + * + * @param jobIdentifier + * the job identifier + * @return the taxonomy job error file by id + * @throws Exception + * the exception + */ + public InputStream getTaxonomyJobErrorFileById(String jobIdentifier) + throws Exception { InputStream is = null; ScopeProvider.instance.set(scope); - try { String url = this.executorCall.getErrorLink(jobIdentifier); - - if(url==null || url.isEmpty()){ - logger.error("URL returned by species service is: "+url); + if (url == null || url.isEmpty()) { + logger.error("URL returned by species service is: " + url); throw new StorageUrlIsEmpty(); } - - logger.trace("URL returned by species service is: "+url); + logger.trace("URL returned by species service is: " + url); is = StorageUtil.getInputStreamByStorageClient(url); - - } catch (Exception e) { -// e.printStackTrace(); - logger.error("Error saving error file: "+e.getMessage(), e); - throw new Exception("Error saving file: "+e.getMessage()); } - + catch (Exception e) { + // e.printStackTrace(); + logger.error("Error saving error file: " + e.getMessage(), e); + throw new Exception("Error saving file: " + e.getMessage()); + } return is; } - - public boolean isAvailableTaxonomyJobErrorFileById(String jobIdentifier) throws Exception { + + /** + * Checks if is available taxonomy job error file by id. + * + * @param jobIdentifier + * the job identifier + * @return true, if is available taxonomy job error file by id + * @throws Exception + * the exception + */ + public boolean isAvailableTaxonomyJobErrorFileById(String jobIdentifier) + throws Exception { ScopeProvider.instance.set(scope); - try { String url = this.executorCall.getErrorLink(jobIdentifier); - - if(url==null || url.isEmpty()){ + if (url == null || url.isEmpty()) { return false; } - return true; - - } catch (Exception e) { - logger.error("Error in is Available Taxonomy JobError File: "+e.getMessage(), e); - throw new Exception("Error in is Available Taxonomy JobError File: "+e.getMessage()); + } + catch (Exception e) { + logger.error( + "Error in is Available Taxonomy JobError File: " + + e.getMessage(), e); + throw new Exception( + "Error in is Available Taxonomy JobError File: " + + e.getMessage()); } } - public String createTaxonomyJobForDWCAByChildren(String taxonomyId) throws Exception { - + /** + * Creates the taxonomy job for dwca by children. + * + * @param taxonomyId + * the taxonomy id + * @return the string + * @throws Exception + * the exception + */ + public String createTaxonomyJobForDWCAByChildren(String taxonomyId) + throws Exception { + ScopeProvider.instance.set(scope); - try { return this.executorCall.createDwCAByChildren(taxonomyId); - - } catch (Exception e) { - logger.error("Error in createTaxonomyJob: "+e.getMessage(), e); - throw new Exception("Error in createTaxonomyJob: "+e.getMessage()); + } + catch (Exception e) { + logger.error("Error in createTaxonomyJob: " + e.getMessage(), e); + throw new Exception("Error in createTaxonomyJob: " + e.getMessage()); } } - - public String createTaxonomyJobForDWCAByIds(List ids) throws Exception { - + + /** + * Creates the taxonomy job for dwca by ids. + * + * @param ids + * the ids + * @return the string + * @throws Exception + * the exception + */ + public String createTaxonomyJobForDWCAByIds(List ids) + throws Exception { + ScopeProvider.instance.set(scope); - try { Stream keysStream = convert(ids); return executorCall.createDwCAByIds(keysStream); - } catch (Exception e) { - logger.error("Error in createTaxonomyJobForDWCA: "+e.getMessage(), e); - throw new Exception("Error in createTaxonomyJobForDWCA: "+e.getMessage()); + } + catch (Exception e) { + logger.error( + "Error in createTaxonomyJobForDWCA: " + e.getMessage(), e); + throw new Exception("Error in createTaxonomyJobForDWCA: " + + e.getMessage()); } } - public void cancelTaxonomyJobById(String jobIdentifier){ - - try{ - ScopeProvider.instance.set(scope); - this.executorCall.removeJob(jobIdentifier); - }catch (Exception e) { - logger.error("Error on service for remove job: " + e.getMessage(), e); - } - } + /** + * Cancel taxonomy job by id. + * + * @param jobIdentifier + * the job identifier + */ + public void cancelTaxonomyJobById(String jobIdentifier) { - - public Status getOccurrenceJobById(String jobId) { - try{ + try { ScopeProvider.instance.set(scope); - return this.executorCall.getStatus(jobId); //CHANGE INTO OCCURRENCE JOB ************************************************************************************** //TODO - - }catch (InvalidIdentifierException e) { + this.executorCall.removeJob(jobIdentifier); + } + catch (Exception e) { + logger.error( + "Error on service for remove job: " + e.getMessage(), e); + } + } + + /** + * Gets the occurrence job by id. + * + * @param jobId + * the job id + * @return the occurrence job by id + */ + public CompleteJobStatus getOccurrenceJobById(String jobId) { + + try { + ScopeProvider.instance.set(scope); + return this.executorCall.getStatus(jobId); // CHANGE INTO OCCURRENCE + // JOB + // ************************************************************************************** + // //TODO + } + catch (InvalidIdentifierException e) { logger.error("Error on service for get job by Id - InvalidIdentifierException"); return null; - - }catch (Exception e) { - logger.error("Error on service for get job by Id: " + e.getMessage()); + } + catch (Exception e) { + logger.error("Error on service for get job by Id: " + + e.getMessage()); return null; } } - - - public String createOccurrenceCSVJob(Stream streamKey) throws Exception{ - + + /** + * Creates the occurrence csv job. + * + * @param streamKey + * the stream key + * @return the string + * @throws Exception + * the exception + */ + public String createOccurrenceCSVJob(Stream streamKey) + throws Exception { + try { ScopeProvider.instance.set(scope); return this.executorCall.createCSV(streamKey); - - } catch (Exception e) { - logger.error("Error in createOccurrenceCSVJob: "+e.getMessage(), e); - throw new Exception("Error in createOccurrenceCSVJob: "+e.getMessage()); + } + catch (Exception e) { + logger.error( + "Error in createOccurrenceCSVJob: " + e.getMessage(), e); + throw new Exception("Error in createOccurrenceCSVJob: " + + e.getMessage()); } } - - - public String createOccurrenceDARWINCOREJob(Stream streamKey) throws Exception{ - + + /** + * Creates the occurrence darwincore job. + * + * @param streamKey + * the stream key + * @return the string + * @throws Exception + * the exception + */ + public String createOccurrenceDARWINCOREJob(Stream streamKey) + throws Exception { + try { ScopeProvider.instance.set(scope); return this.executorCall.createDarwincoreFromOccurrenceKeys(streamKey); - - } catch (Exception e) { - logger.error("Error in createOccurrenceDARWINCOREJob: "+e.getMessage(), e); - throw new Exception("Error in createOccurrenceDARWINCOREJob: "+e.getMessage()); + } + catch (Exception e) { + logger.error( + "Error in createOccurrenceDARWINCOREJob: " + e.getMessage(), e); + throw new Exception("Error in createOccurrenceDARWINCOREJob: " + + e.getMessage()); } } - - - public String createOccurrenceCSVOpenModellerJob(Stream streamKey) throws Exception{ - + + /** + * Creates the occurrence csv open modeller job. + * + * @param streamKey + * the stream key + * @return the string + * @throws Exception + * the exception + */ + public String createOccurrenceCSVOpenModellerJob(Stream streamKey) + throws Exception { + try { ScopeProvider.instance.set(scope); return this.executorCall.createCSVforOM(streamKey); - - } catch (Exception e) { - logger.error("Error in createOccurrenceCSVOpenModellerJob: "+e.getMessage(), e); - throw new Exception("Error in createOccurrenceCSVOpenModellerJob: "+e.getMessage()); + } + catch (Exception e) { + logger.error( + "Error in createOccurrenceCSVOpenModellerJob: " + + e.getMessage(), e); + throw new Exception( + "Error in createOccurrenceCSVOpenModellerJob: " + + e.getMessage()); } } + /** + * Cancel occurrence job by id. + * + * @param jobIdentifier + * the job identifier + */ + public void cancelOccurrenceJobById(String jobIdentifier) { - public void cancelOccurrenceJobById(String jobIdentifier){ - - try{ - ScopeProvider.instance.set(scope); - this.executorCall.removeJob(jobIdentifier); //CHANGE INTO OCCURRENCE JOB ************************************************************************************** ** //TODO - }catch (Exception e) { - logger.error("Error on service for remove job: " + e.getMessage(), e); - } - } - - - public InputStream getOccurrenceJobFileById(String jobIdentifier) throws Exception { - InputStream is = null; - ScopeProvider.instance.set(scope); - try { - String url = this.executorCall.getResultLink(jobIdentifier); //CHANGE INTO OCCURRENCE JOB ************************************************************************************** ** //TODO - logger.trace("URL returned by species service is: "+url); - - if(url==null || url.isEmpty()){ - logger.error("URL returned by species service is: "+url); - throw new StorageUrlIsEmpty(); - } - - is = StorageUtil.getInputStreamByStorageClient(url); - - } catch (Exception e) { -// e.printStackTrace(); - logger.error("Error saving file: "+e.getMessage(), e); - throw new Exception("Error saving file: "+e.getMessage()); + ScopeProvider.instance.set(scope); + this.executorCall.removeJob(jobIdentifier); // CHANGE INTO + // OCCURRENCE JOB + // ************************************************************************************** + // ** //TODO + } + catch (Exception e) { + logger.error( + "Error on service for remove job: " + e.getMessage(), e); } - - return is; } - - public InputStream getOccurrenceJobErrorFileById(String jobIdentifier) throws Exception { + + /** + * Gets the occurrence job file by id. + * + * @param jobIdentifier + * the job identifier + * @return the occurrence job file by id + * @throws Exception + * the exception + */ + public InputStream getOccurrenceJobFileById(String jobIdentifier) + throws Exception { InputStream is = null; ScopeProvider.instance.set(scope); - try { - String url = this.executorCall.getErrorLink(jobIdentifier); - logger.trace("URL returned by species service is: "+url); - - if(url==null || url.isEmpty()){ - logger.error("URL returned by species service is: "+url); + String url = this.executorCall.getResultLink(jobIdentifier); // CHANGE + // INTO + // OCCURRENCE + // JOB + // ************************************************************************************** + // ** + // //TODO + logger.trace("URL returned by species service is: " + url); + if (url == null || url.isEmpty()) { + logger.error("URL returned by species service is: " + url); throw new StorageUrlIsEmpty(); } - is = StorageUtil.getInputStreamByStorageClient(url); - - } catch (Exception e) { -// e.printStackTrace(); - logger.error("Error saving file: "+e.getMessage(), e); - throw new Exception("Error saving file: "+e.getMessage()); } - + catch (Exception e) { + // e.printStackTrace(); + logger.error("Error saving file: " + e.getMessage(), e); + throw new Exception("Error saving file: " + e.getMessage()); + } return is; } - - public boolean isAvailableOccurrenceJobErrorFileById(String jobIdentifier) throws Exception { + /** + * Gets the occurrence job error file by id. + * + * @param jobIdentifier + * the job identifier + * @return the occurrence job error file by id + * @throws Exception + * the exception + */ + public InputStream getOccurrenceJobErrorFileById(String jobIdentifier) + throws Exception { + + InputStream is = null; ScopeProvider.instance.set(scope); - try { String url = this.executorCall.getErrorLink(jobIdentifier); - - if(url==null || url.isEmpty()){ + logger.trace("URL returned by species service is: " + url); + if (url == null || url.isEmpty()) { + logger.error("URL returned by species service is: " + url); + throw new StorageUrlIsEmpty(); + } + is = StorageUtil.getInputStreamByStorageClient(url); + } + catch (Exception e) { + // e.printStackTrace(); + logger.error("Error saving file: " + e.getMessage(), e); + throw new Exception("Error saving file: " + e.getMessage()); + } + return is; + } + + /** + * Checks if is available occurrence job error file by id. + * + * @param jobIdentifier + * the job identifier + * @return true, if is available occurrence job error file by id + * @throws Exception + * the exception + */ + public boolean isAvailableOccurrenceJobErrorFileById(String jobIdentifier) + throws Exception { + + ScopeProvider.instance.set(scope); + try { + String url = this.executorCall.getErrorLink(jobIdentifier); + if (url == null || url.isEmpty()) { return false; } - return true; - - } catch (Exception e) { - logger.error("Error in is Available Occurrence JobError File: "+e.getMessage(), e); - throw new Exception("Error in is Available Occurrence JobError File: "+e.getMessage()); + } + catch (Exception e) { + logger.error( + "Error in is Available Occurrence JobError File: " + + e.getMessage(), e); + throw new Exception( + "Error in is Available Occurrence JobError File: " + + e.getMessage()); } } - + /** + * Gets the last query. + * + * @return the last query + */ public String getLastQuery() { + return lastQuery; } - - } diff --git a/src/main/java/org/gcube/portlets/user/speciesdiscovery/server/session/Fetcher.java b/src/main/java/org/gcube/portlets/user/speciesdiscovery/server/session/Fetcher.java index 1a08a89..e988df2 100644 --- a/src/main/java/org/gcube/portlets/user/speciesdiscovery/server/session/Fetcher.java +++ b/src/main/java/org/gcube/portlets/user/speciesdiscovery/server/session/Fetcher.java @@ -79,7 +79,7 @@ public class Fetcher implements Runnable, Closeable { } else{ countNullItems++; - logger.warn("fetch new row is null!! It is the number: "+countNullItems); + logger.warn("fetch new row is null!! Number of null value/s: "+countNullItems); if(MAX_CONSECUTIVE_ATTEMPTS_ON_NULL==countNullItems){ logger.warn("Fetched "+MAX_CONSECUTIVE_ATTEMPTS_ON_NULL+ " null rows, MAX ATTEMPTS reached, complete fetch true and closing stream!!"); silentClose(); @@ -93,7 +93,7 @@ public class Fetcher implements Runnable, Closeable { } } catch (Exception e) { - logger.error("Error in add row " + e.getMessage()); + logger.error("Error in add row " + e.getMessage(), e); silentClose(); } diff --git a/src/main/java/org/gcube/portlets/user/speciesdiscovery/shared/DataSource.java b/src/main/java/org/gcube/portlets/user/speciesdiscovery/shared/DataSource.java index 455b35f..25cec0c 100644 --- a/src/main/java/org/gcube/portlets/user/speciesdiscovery/shared/DataSource.java +++ b/src/main/java/org/gcube/portlets/user/speciesdiscovery/shared/DataSource.java @@ -1,41 +1,41 @@ package org.gcube.portlets.user.speciesdiscovery.shared; -import java.io.Serializable; - import javax.persistence.Entity; import javax.persistence.GeneratedValue; import javax.persistence.GenerationType; import javax.persistence.Id; +import com.google.gwt.user.client.rpc.IsSerializable; + + /** - * - * @author Francesco Mangiacrapa francesco.mangiacrapa@isti.cnr.it + * The Class DataSource. * + * @author Francesco Mangiacrapa francesco.mangiacrapa@isti.cnr.it + * Jan 10, 2017 */ - @Entity -public class DataSource implements Serializable{ - - /** - * - */ - private static final long serialVersionUID = 3373136869904925484L; +public class DataSource implements IsSerializable{ @Id @GeneratedValue(strategy = GenerationType.IDENTITY) protected int internalId; - + private String id; private String name; private String description; - - public DataSource() {} - + /** - * - * @param id - * @param name - * @param description + * Instantiates a new data source. + */ + public DataSource() {} + + /** + * Instantiates a new data source. + * + * @param id the id + * @param name the name + * @param description the description */ public DataSource(String id, String name, String description) { setId(id); @@ -44,38 +44,75 @@ public class DataSource implements Serializable{ } //Used in Data Source advanced option to create the check list + /** + * Instantiates a new data source. + * + * @param id the id + * @param name the name + */ public DataSource(String id, String name){ setId(id); setName(name); } - - - + + /** + * Gets the id. + * + * @return the id + */ public String getId() { return id; } + /** + * Sets the id. + * + * @param id the new id + */ public void setId(String id) { this.id = id; } + /** + * Gets the name. + * + * @return the name + */ public String getName() { return name; } + /** + * Sets the name. + * + * @param name the new name + */ public void setName(String name) { this.name = name; } + /** + * Gets the description. + * + * @return the description + */ public String getDescription() { return description; } + /** + * Sets the description. + * + * @param description the new description + */ public void setDescription(String description) { this.description = description; } + /* (non-Javadoc) + * @see java.lang.Object#toString() + */ @Override public String toString() { StringBuilder builder = new StringBuilder(); diff --git a/src/main/java/org/gcube/portlets/user/speciesdiscovery/shared/DataSourceCapability.java b/src/main/java/org/gcube/portlets/user/speciesdiscovery/shared/DataSourceCapability.java index fe8b1b7..6ddffc9 100644 --- a/src/main/java/org/gcube/portlets/user/speciesdiscovery/shared/DataSourceCapability.java +++ b/src/main/java/org/gcube/portlets/user/speciesdiscovery/shared/DataSourceCapability.java @@ -1,23 +1,33 @@ package org.gcube.portlets.user.speciesdiscovery.shared; -import java.io.Serializable; import java.util.ArrayList; -/** - * - * @author Francesco Mangiacrapa francesco.mangiacrapa@isti.cnr.it - * - */ -public class DataSourceCapability implements Serializable{ +import com.google.gwt.user.client.rpc.IsSerializable; - private static final long serialVersionUID = -9083819206898794333L; + + +/** + * The Class DataSourceCapability. + * + * @author Francesco Mangiacrapa francesco.mangiacrapa@isti.cnr.it + * Jan 10, 2017 + */ +public class DataSourceCapability implements IsSerializable{ private ArrayList listFilters; - private SpeciesCapability capability; - + + /** + * Instantiates a new data source capability. + */ public DataSourceCapability() {} - + + /** + * Instantiates a new data source capability. + * + * @param capability the capability + * @param listFilters the list filters + */ public DataSourceCapability(SpeciesCapability capability, ArrayList listFilters) { super(); this.capability = capability; @@ -25,20 +35,40 @@ public class DataSourceCapability implements Serializable{ } + /** + * Gets the list filters. + * + * @return the list filters + */ public ArrayList getListFilters() { return listFilters; } + /** + * Sets the list filters. + * + * @param listFilters the new list filters + */ public void setListFilters(ArrayList listFilters) { this.listFilters = listFilters; } + /** + * Gets the capability. + * + * @return the capability + */ public SpeciesCapability getCapability() { return capability; } + /** + * Sets the capability. + * + * @param capability the new capability + */ public void setCapability(SpeciesCapability capability) { this.capability = capability; } - + } diff --git a/src/main/java/org/gcube/portlets/user/speciesdiscovery/shared/DataSourceModel.java b/src/main/java/org/gcube/portlets/user/speciesdiscovery/shared/DataSourceModel.java index b32f5be..d39b7df 100644 --- a/src/main/java/org/gcube/portlets/user/speciesdiscovery/shared/DataSourceModel.java +++ b/src/main/java/org/gcube/portlets/user/speciesdiscovery/shared/DataSourceModel.java @@ -1,57 +1,85 @@ package org.gcube.portlets.user.speciesdiscovery.shared; -import java.io.Serializable; import java.util.ArrayList; + /** - * - * @author Francesco Mangiacrapa francesco.mangiacrapa@isti.cnr.it + * The Class DataSourceModel. * + * @author Francesco Mangiacrapa francesco.mangiacrapa@isti.cnr.it + * Jan 10, 2017 */ +public class DataSourceModel extends DataSource { -public class DataSourceModel extends DataSource implements Serializable{ - - - private static final long serialVersionUID = 7399231525793036218L; - private ArrayList listCapabilities; private DataSourceRepositoryInfo dataSourceRepositoryInfo; - - public DataSourceModel() {} + /** - * - * @param id - * @param name - * @param description - * @param listCapabilities - * @param dsInfo + * Instantiates a new data source model. + */ + public DataSourceModel() {} + + /** + * Instantiates a new data source model. + * + * @param id the id + * @param name the name + * @param description the description + * @param listCapabilities the list capabilities + * @param dsInfo the ds info */ public DataSourceModel(String id, String name, String description, ArrayList listCapabilities, DataSourceRepositoryInfo dsInfo) { super(id,name,description); setListCapabilities(listCapabilities); setDataSourceRepositoryInfo(dsInfo); } - + + /** + * Sets the data source repository info. + * + * @param dsInfo the new data source repository info + */ private void setDataSourceRepositoryInfo(DataSourceRepositoryInfo dsInfo) { this.dataSourceRepositoryInfo = dsInfo; - + } //Used in Data Source advanced option to create the check list + /** + * Instantiates a new data source model. + * + * @param id the id + * @param name the name + */ public DataSourceModel(String id, String name){ super(id,name); } - - + + + /** + * Gets the list capabilities. + * + * @return the list capabilities + */ public ArrayList getListCapabilities() { return listCapabilities; } + /** + * Sets the list capabilities. + * + * @param listCapabilities the new list capabilities + */ public void setListCapabilities(ArrayList listCapabilities) { this.listCapabilities = listCapabilities; } + /** + * Gets the data source repository info. + * + * @return the data source repository info + */ public DataSourceRepositoryInfo getDataSourceRepositoryInfo() { return dataSourceRepositoryInfo; } diff --git a/src/main/java/org/gcube/portlets/user/speciesdiscovery/shared/DataSourceRepositoryInfo.java b/src/main/java/org/gcube/portlets/user/speciesdiscovery/shared/DataSourceRepositoryInfo.java index db8b3ea..7ac4d25 100644 --- a/src/main/java/org/gcube/portlets/user/speciesdiscovery/shared/DataSourceRepositoryInfo.java +++ b/src/main/java/org/gcube/portlets/user/speciesdiscovery/shared/DataSourceRepositoryInfo.java @@ -1,30 +1,37 @@ package org.gcube.portlets.user.speciesdiscovery.shared; -import java.io.Serializable; import java.util.Map; +import com.google.gwt.user.client.rpc.IsSerializable; + + /** - * - * @author Francesco Mangiacrapa francesco.mangiacrapa@isti.cnr.it + * The Class DataSourceRepositoryInfo. * + * @author Francesco Mangiacrapa francesco.mangiacrapa@isti.cnr.it + * Jan 10, 2017 */ +public class DataSourceRepositoryInfo implements IsSerializable{ -public class DataSourceRepositoryInfo implements Serializable{ - - - /** - * - */ - private static final long serialVersionUID = -4557351371954637191L; - private String logoUrl; private String pageUrl; private Map properties; private String name; private String description; - + + /** + * Instantiates a new data source repository info. + */ public DataSourceRepositoryInfo() {} - + + /** + * Instantiates a new data source repository info. + * + * @param logoUrl the logo url + * @param pageUrl the page url + * @param properties the properties + * @param description the description + */ public DataSourceRepositoryInfo(String logoUrl, String pageUrl, Map properties, String description) { this.logoUrl = logoUrl; this.pageUrl = pageUrl; @@ -32,33 +39,90 @@ public class DataSourceRepositoryInfo implements Serializable{ this.description = description; } + /** + * Gets the logo url. + * + * @return the logo url + */ public String getLogoUrl() { return logoUrl; } + + /** + * Sets the logo url. + * + * @param logoUrl the new logo url + */ public void setLogoUrl(String logoUrl) { this.logoUrl = logoUrl; } + + /** + * Gets the page url. + * + * @return the page url + */ public String getPageUrl() { return pageUrl; } + + /** + * Sets the page url. + * + * @param pageUrl the new page url + */ public void setPageUrl(String pageUrl) { this.pageUrl = pageUrl; } + + /** + * Gets the properties. + * + * @return the properties + */ public Map getProperties() { return properties; } + + /** + * Sets the properties. + * + * @param properties the properties + */ public void setProperties(Map properties) { this.properties = properties; } + + /** + * Gets the name. + * + * @return the name + */ public String getName() { return name; } + + /** + * Gets the description. + * + * @return the description + */ public String getDescription() { return description; } + + /** + * Sets the description. + * + * @param description the new description + */ public void setDescription(String description) { this.description = description; } + + /* (non-Javadoc) + * @see java.lang.Object#toString() + */ @Override public String toString() { StringBuilder builder = new StringBuilder(); @@ -73,5 +137,5 @@ public class DataSourceRepositoryInfo implements Serializable{ builder.append("]"); return builder.toString(); } - + } diff --git a/src/main/java/org/gcube/portlets/user/speciesdiscovery/shared/ResultRow.java b/src/main/java/org/gcube/portlets/user/speciesdiscovery/shared/ResultRow.java index bafad19..5253ee5 100644 --- a/src/main/java/org/gcube/portlets/user/speciesdiscovery/shared/ResultRow.java +++ b/src/main/java/org/gcube/portlets/user/speciesdiscovery/shared/ResultRow.java @@ -1,5 +1,5 @@ /** - * + * */ package org.gcube.portlets.user.speciesdiscovery.shared; @@ -17,7 +17,7 @@ import javax.persistence.Id; import javax.persistence.OneToMany; /** - * + * * @author Francesco Mangiacrapa francesco.mangiacrapa@isti.cnr.it * */ @@ -48,7 +48,7 @@ public class ResultRow implements FetchingElement, SelectableElement, Serializab public static final String BASE_TAXON_VALUE = "baseTaxonValue"; public static final String DATAPROVIDER_ID = "dataProviderId"; public static final String DATASOURCE_ID = "dataSourceId"; - + public static final String KINGDOM_ID = "kingdomID"; public static final String PHYLUM_ID = "phylumID"; public static final String CLASS_ID = "classID"; @@ -56,7 +56,7 @@ public class ResultRow implements FetchingElement, SelectableElement, Serializab public static final String FAMILY_ID = "familyID"; public static final String GENUS_ID = "genusID"; public static final String SPECIES_ID = "speciesID"; - + public final static String SCIENTIFICNAMEAUTHORSHIP = "scientificNameAuthorship"; //USED public final static String CREDITS = "credits"; //USED public final static String LSID = "lsid"; //USED @@ -67,7 +67,7 @@ public class ResultRow implements FetchingElement, SelectableElement, Serializab @Id @GeneratedValue(strategy = GenerationType.IDENTITY) protected int id; - + protected boolean selected = false; protected String dataSourceId; protected String dataSourceName; @@ -82,10 +82,10 @@ public class ResultRow implements FetchingElement, SelectableElement, Serializab @OneToMany(cascade=CascadeType.PERSIST, fetch=FetchType.EAGER) protected List commonNames = new ArrayList(); - + @OneToMany(cascade=CascadeType.PERSIST, fetch=FetchType.EAGER) protected List matchingTaxon = new ArrayList(); - + @OneToMany(cascade=CascadeType.PERSIST, fetch=FetchType.EAGER) protected List properties = new ArrayList(); @@ -108,17 +108,13 @@ public class ResultRow implements FetchingElement, SelectableElement, Serializab protected String scientificNameAuthorship; protected String credits; protected String lsid; -// protected String propertiesAsXml; - -// protected ItemParameterList properties; - + protected boolean existsProperties = false; - protected ResultRow() { } - - + + /** * @param id */ @@ -140,7 +136,7 @@ public class ResultRow implements FetchingElement, SelectableElement, Serializab return id+""; } - + /** * @return the selected */ @@ -162,7 +158,7 @@ public class ResultRow implements FetchingElement, SelectableElement, Serializab public String getDataSourceId() { return dataSourceId; } - + /** * @param dataSourceId @@ -407,7 +403,7 @@ public class ResultRow implements FetchingElement, SelectableElement, Serializab } /** - * + * * @return true if there are common names */ public boolean existsCommonName() { @@ -415,7 +411,7 @@ public class ResultRow implements FetchingElement, SelectableElement, Serializab } /** - * + * * @param the * boolean to set */ @@ -486,7 +482,7 @@ public class ResultRow implements FetchingElement, SelectableElement, Serializab public void setSpeciesID(String speciesID) { this.speciesID = speciesID; } - + public String getServiceId() { return serviceIdField; } @@ -513,7 +509,7 @@ public class ResultRow implements FetchingElement, SelectableElement, Serializab } public void setExistsProperties(boolean b) { - this.existsProperties = b; + this.existsProperties = b; } public boolean existsProperties() { @@ -522,7 +518,7 @@ public class ResultRow implements FetchingElement, SelectableElement, Serializab public List getParents() { - + Collections.sort(matchingTaxon); // for (Taxon t : matchingTaxon) { // System.out.println("+++ Parent :" +t.getId() + ", name: "+t.getName() +", rank: "+t.getRank()); diff --git a/src/main/java/org/gcube/portlets/user/speciesdiscovery/shared/SpeciesCapability.java b/src/main/java/org/gcube/portlets/user/speciesdiscovery/shared/SpeciesCapability.java index 2c9bc68..dab8010 100644 --- a/src/main/java/org/gcube/portlets/user/speciesdiscovery/shared/SpeciesCapability.java +++ b/src/main/java/org/gcube/portlets/user/speciesdiscovery/shared/SpeciesCapability.java @@ -1,30 +1,31 @@ package org.gcube.portlets.user.speciesdiscovery.shared; +import com.google.gwt.user.client.rpc.IsSerializable; + + +public enum SpeciesCapability implements IsSerializable{ -public enum SpeciesCapability { - //Filters - FROMDATE("FROMDATE", "Date From"), - TODATE("DATETO", "Date To"), - LOWERBOUND("LOWERBOUND", "Lower Bound"), + FROMDATE("FROMDATE", "Date From"), + TODATE("DATETO", "Date To"), + LOWERBOUND("LOWERBOUND", "Lower Bound"), UPPERBOUND("UPPERBOUND", "Upper Bound"), - + //Capabilities RESULTITEM("RESULTITEM", "Occurrence"), TAXONOMYITEM("TAXONOMYITEM", "Taxon"), OCCURRENCESPOINTS("OCCURRENCESPOINTS", "OccurrencesPoints"), - - + SYNONYMS("SYNONYMS", "Synonyms"), UNFOLD("UNFOLD", "Unfold"), NAMESMAPPING("NAMESMAPPING", "Names Mapping"), - + UNKNOWN("UNKNOWN", "unknown"); - + private String id; private String name; - - private SpeciesCapability(){ + + private SpeciesCapability(){ } diff --git a/src/main/resources/org/gcube/portlets/user/speciesdiscovery/SpeciesDiscovery.gwt.xml b/src/main/resources/org/gcube/portlets/user/speciesdiscovery/SpeciesDiscovery.gwt.xml index cd8dcb2..72dafc0 100644 --- a/src/main/resources/org/gcube/portlets/user/speciesdiscovery/SpeciesDiscovery.gwt.xml +++ b/src/main/resources/org/gcube/portlets/user/speciesdiscovery/SpeciesDiscovery.gwt.xml @@ -7,7 +7,8 @@ - + + diff --git a/src/main/webapp/WEB-INF/web.xml b/src/main/webapp/WEB-INF/web.xml index 3b125ec..b3c99c1 100644 --- a/src/main/webapp/WEB-INF/web.xml +++ b/src/main/webapp/WEB-INF/web.xml @@ -36,26 +36,26 @@ - - GisViewerService - org.gcube.portlets.user.gcubegisviewer.server.GCubeGisViewerServletImpl - + + + + - - GisViewerService - /speciesdiscovery/GisViewerService - + + + + - - MapGenerator - org.gcube.portlets.user.gisviewer.server.MapGenerator - + + + + - - MapGenerator - /speciesdiscovery/MapGenerator - + + + + diff --git a/src/test/java/org/gcube/portlets/user/speciesdiscovery/client/DBTester.java b/src/test/java/org/gcube/portlets/user/speciesdiscovery/client/DBTester.java index 83cefbe..b59599b 100644 --- a/src/test/java/org/gcube/portlets/user/speciesdiscovery/client/DBTester.java +++ b/src/test/java/org/gcube/portlets/user/speciesdiscovery/client/DBTester.java @@ -1,30 +1,35 @@ /** - * + * */ package org.gcube.portlets.user.speciesdiscovery.client; import java.sql.Connection; -import java.sql.DriverManager; import java.sql.ResultSet; import java.sql.ResultSetMetaData; import java.sql.SQLException; -import java.sql.Statement; +import java.util.ArrayList; import java.util.HashMap; +import java.util.List; import java.util.Map; import javax.naming.Context; import javax.naming.InitialContext; import javax.naming.NamingException; +import javax.persistence.EntityManager; import javax.persistence.EntityManagerFactory; import javax.persistence.Persistence; +import javax.persistence.Query; +import javax.persistence.TypedQuery; +import javax.persistence.metamodel.EntityType; import org.apache.log4j.Logger; +import org.gcube.portlets.user.speciesdiscovery.shared.ResultRow; import org.h2.jdbcx.JdbcDataSource; /** * @author Francesco Mangiacrapa francesco.mangiacrapa@isti.cnr.it * @Nov 11, 2013 - * + * */ public class DBTester { @@ -33,74 +38,172 @@ public class DBTester { public static Logger logger = Logger.getLogger(DBTester.class); - + public static void main(String[] a) throws Exception { - /* - Class.forName("org.h2.Driver"); - Connection conn = DriverManager.getConnection("jdbc:h2:/home/francesco-mangiacrapa/Portal-Bundle2.2/tomcat-6.0.29/persistence/h2dbspecies/h2testusergcubedevsec;create=true", "",""); + + //Class.forName("org.h2.Driver"); + //Connection conn = DriverManager.getConnection("jdbc:h2:/home/francesco-mangiacrapa/Portal-Bundle-3.0.0-3.2.0/tomcat-6.0.29/persistence/h2dbspecies/h2testusergcubedevsec;create=true", "",""); // add application code here - - Statement stat = conn.createStatement(); - ResultSet rs = stat.executeQuery("select * from TaxonomyRow"); - + + /* Statement stat = conn.createStatement(); + ResultSet rs = stat.executeQuery("select * from ResultRow"); + ResultSetMetaData meta = rs.getMetaData(); int columnCount = meta.getColumnCount(); - - while (rs.next()) { System.out.println("New row"); for (int i = 1; i <= columnCount; i++) { - + // System.out.println("ColumName: "+ meta.getColumnName(i)); System.out.println("ColumLabel: "+meta.getColumnLabel(i)); System.out.println(rs.getString(meta.getColumnLabel(i))); - + } - + System.out.println("\n\n"); - } - conn.close(); - */ - - testJdbcDataSource(); + }*/ + + //testJdbcDataSource(); + + //String queryString = "SELECT MIN(tax.id) from Taxon tax"; + + +// +// testTypedQuery(queryString, Taxon.class); + + getAllEntities(); + + + String queryString = "SELECT *" + + " FROM "+ResultRow.class.getSimpleName()+" r" + + " LEFT OUTER JOIN RESULTROW_TAXON rt"; +// " INNER JOIN "+Taxon.class.getSimpleName()+" t"; + + queryString = "select *" + + " from RESULTROW r JOIN RESULTROW_TAXON rt on r.ID=rt.RESULTROW_ID JOIN TAXON t on t.INTERNALID=rt.MATCHINGTAXON_INTERNALID" + + " where t.RANK = 'Genus' and t.ID IN" + + " (select MIN(tax.ID) from TAXON tax)"; +////// +// +// testTypedQuery(queryString, ResultRow.class); + + //testQuery(queryString); + + + testNativeQuery(queryString, ResultRow.class); + } - - + /** + * @param queryString + * @param class1 + */ + private static void testNativeQuery(String queryString, Class className) { + + EntityManagerFactory emF = createEntityManagerFactory("/home/francesco-mangiacrapa/Portal-Bundle-3.0.0-3.2.0/tomcat-6.0.29/persistence/h2dbspecies/h2testusergcubedevsec;"); + EntityManager em = emF.createEntityManager(); + Query query = em.createNativeQuery(queryString, className); + + List listResult = new ArrayList(); + try { + listResult = query.getResultList(); + for (Object object : listResult) { + System.out.println(object.toString()); + } + } catch (Exception e) { + logger.error("Error in TypedQuery: " + e.getMessage(), e); + } finally { + em.close(); + } + + + } + + public static void getAllEntities(){ + EntityManagerFactory emF = createEntityManagerFactory("/home/francesco-mangiacrapa/Portal-Bundle-3.0.0-3.2.0/tomcat-6.0.29/persistence/h2dbspecies/h2testusergcubedevsec;"); + EntityManager em = emF.createEntityManager(); + for (EntityType entity : em.getMetamodel().getEntities()) { + final String className = entity.getName(); + System.out.println("Trying select * from: " + className); + Query q = em.createQuery("SELECT c from " + className + " c"); + q.getResultList().iterator(); + System.out.println("ok: " + className); + } + + + } + + public static void testTypedQuery(String queryString, Class classToReturn){ + EntityManagerFactory emF = createEntityManagerFactory("/home/francesco-mangiacrapa/Portal-Bundle-3.0.0-3.2.0/tomcat-6.0.29/persistence/h2dbspecies/h2testusergcubedevsec;"); + EntityManager em = emF.createEntityManager(); + TypedQuery tQuery = em.createQuery(queryString, classToReturn); + + List listResult = new ArrayList(); + try { + listResult = tQuery.getResultList(); + System.out.println(listResult.toString()); + } catch (Exception e) { + logger.error("Error in TypedQuery: " + e.getMessage(), e); + } finally { + em.close(); + } + + } + + public static void testQuery(String queryString){ + EntityManagerFactory emF = createEntityManagerFactory("/home/francesco-mangiacrapa/Portal-Bundle-3.0.0-3.2.0/tomcat-6.0.29/persistence/h2dbspecies/h2testusergcubedevsec;"); + EntityManager em = emF.createEntityManager(); + Query query = em.createQuery(queryString); + + List listResult = new ArrayList(); + try { + listResult = query.getResultList(); + System.out.println(listResult.toString()); + } catch (Exception e) { + logger.error("Error in TypedQuery: " + e.getMessage(), e); + } finally { + em.close(); + } + + } + + + + public static void testJdbcDataSource() throws NamingException { JdbcDataSource ds = new JdbcDataSource(); ds.setURL("jdbc:h2:/home/francesco-mangiacrapa/Portal-Bundle2.2/tomcat-6.0.29/persistence/h2dbspecies/h2testusergcubedevsec;create=true"); Context ctx = new InitialContext(); ctx.bind("java:global/jpa-eclipselink/TaxonomyRow", ds); - + // final Context context = EJBContainer.createEJBContainer(p).getContext(); -// +// // Movies movies = (Movies) context.lookup("java:global/jpa-eclipselink/Movies"); try { Connection conn = ds.getConnection(); ResultSet rs = conn.createStatement().executeQuery("select * from TaxonomyRow"); ResultSetMetaData meta = rs.getMetaData(); int columnCount = meta.getColumnCount(); - - + + while (rs.next()) { System.out.println("New row"); for (int i = 1; i <= columnCount; i++) { - + // System.out.println("ColumName: "+ meta.getColumnName(i)); System.out.println("ColumLabel: "+meta.getColumnLabel(i)); System.out.println(rs.getString(meta.getColumnLabel(i))); - + } - + System.out.println("\n\n"); } conn.close(); - + } catch (SQLException e) { // TODO Auto-generated catch block e.printStackTrace(); diff --git a/src/test/java/org/gcube/portlets/user/speciesdiscovery/client/ListPlugins.java b/src/test/java/org/gcube/portlets/user/speciesdiscovery/client/ListPlugins.java index 0275e36..a839aa4 100644 --- a/src/test/java/org/gcube/portlets/user/speciesdiscovery/client/ListPlugins.java +++ b/src/test/java/org/gcube/portlets/user/speciesdiscovery/client/ListPlugins.java @@ -1,5 +1,5 @@ /** - * + * */ package org.gcube.portlets.user.speciesdiscovery.client; @@ -9,44 +9,48 @@ import java.util.List; import java.util.concurrent.TimeUnit; import org.gcube.common.scope.api.ScopeProvider; -import org.gcube.data.spd.client.proxies.Classification; -import org.gcube.data.spd.client.proxies.Executor; -import org.gcube.data.spd.client.proxies.Manager; -import org.gcube.data.spd.client.proxies.Occurrence; +import org.gcube.data.spd.client.proxies.ClassificationClient; +import org.gcube.data.spd.client.proxies.ExecutorClient; +import org.gcube.data.spd.client.proxies.ManagerClient; +import org.gcube.data.spd.client.proxies.OccurrenceClient; import org.gcube.data.spd.model.PluginDescription; + /** - * @author "Federico De Faveri defaveri@isti.cnr.it" + * The Class ListPlugins. * + * @author Francesco Mangiacrapa francesco.mangiacrapa@isti.cnr.it + * Jan 10, 2017 */ public class ListPlugins { - - protected static Manager call; - protected static Occurrence occurrencesCall; - protected static Classification classificationCall; - protected static Executor executorCall; - + + protected static ManagerClient call; + protected static OccurrenceClient occurrencesCall; + protected static ClassificationClient classificationCall; + protected static ExecutorClient executorCall; + /** - * @param args + * The main method. + * + * @param args the arguments */ public static void main(String[] args) { - + String scope = "/gcube/devsec"; ScopeProvider.instance.set(scope); - + // this.call = manager().at( URI.create("http://node24.d.d4science.research-infrastructures.eu:9000")).withTimeout(3, TimeUnit.MINUTES).build(); // this.occurrencesCall = occurrences().at( URI.create("http://node24.d.d4science.research-infrastructures.eu:9000")).withTimeout(3, TimeUnit.MINUTES).build(); // this.classificationCall = classification().at( URI.create("http://node24.d.d4science.research-infrastructures.eu:9000")).withTimeout(3, TimeUnit.MINUTES).build(); - - + call = manager().withTimeout(3, TimeUnit.MINUTES).build(); // executorCall = executor().withTimeout(3, TimeUnit.MINUTES).build(); // occurrencesCall = occurrence().withTimeout(3, TimeUnit.MINUTES).build(); // classificationCall = classification().withTimeout(3, TimeUnit.MINUTES).build(); - + // call = manager().at(URI.create("http://node24.d.d4science.research-infrastructures.eu:9000")).withTimeout(3, TimeUnit.MINUTES).build(); - + //Manager call = manager().withTimeout(3, TimeUnit.MINUTES).build(); List plugins = call.getPluginsDescription(); diff --git a/src/test/java/org/gcube/portlets/user/speciesdiscovery/client/ServiceQuery.java b/src/test/java/org/gcube/portlets/user/speciesdiscovery/client/ServiceQuery.java index 994bd0e..3911133 100644 --- a/src/test/java/org/gcube/portlets/user/speciesdiscovery/client/ServiceQuery.java +++ b/src/test/java/org/gcube/portlets/user/speciesdiscovery/client/ServiceQuery.java @@ -1,5 +1,5 @@ /** - * + * */ package org.gcube.portlets.user.speciesdiscovery.client; @@ -10,12 +10,12 @@ import java.util.concurrent.TimeUnit; import org.gcube.application.framework.core.session.ASLSession; import org.gcube.application.framework.core.session.SessionManager; import org.gcube.common.scope.api.ScopeProvider; -import org.gcube.data.spd.client.proxies.Manager; +import org.gcube.data.spd.client.proxies.ManagerClient; import org.gcube.data.spd.model.exceptions.InvalidQueryException; import org.gcube.data.spd.model.products.ResultElement; import org.gcube.data.spd.model.products.ResultItem; -import org.gcube.data.spd.stubs.exceptions.UnsupportedCapabilityException; -import org.gcube.data.spd.stubs.exceptions.UnsupportedPluginException; +import org.gcube.data.spd.model.service.exceptions.UnsupportedCapabilityException; +import org.gcube.data.spd.model.service.exceptions.UnsupportedPluginException; import org.gcube.data.streams.Stream; import org.gcube.portlets.user.speciesdiscovery.server.service.ResultItemConverter; import org.gcube.portlets.user.speciesdiscovery.server.service.StreamIterator; @@ -24,76 +24,81 @@ import org.gcube.portlets.user.speciesdiscovery.server.stream.CloseableIterator; import org.gcube.portlets.user.speciesdiscovery.server.stream.ConversionIterator; import org.gcube.portlets.user.speciesdiscovery.shared.ResultRow; + /** - * @author "Federico De Faveri defaveri@isti.cnr.it" + * The Class ServiceQuery. * + * @author Francesco Mangiacrapa francesco.mangiacrapa@isti.cnr.it + * Jan 10, 2017 */ public class ServiceQuery { /** * @param args - * @throws UnsupportedPluginException - * @throws InvalidQueryException - * @throws UnsupportedCapabilityException + * @throws UnsupportedPluginException + * @throws InvalidQueryException + * @throws UnsupportedCapabilityException */ - + private static String username = "test.user"; - + + /** + * The main method. + * + * @param args the arguments + * @throws InvalidQueryException the invalid query exception + * @throws UnsupportedPluginException the unsupported plugin exception + * @throws UnsupportedCapabilityException the unsupported capability exception + */ public static void main(String[] args) throws InvalidQueryException, UnsupportedPluginException, UnsupportedCapabilityException { String scope = "/gcube/devsec"; // String scope = "/d4science.research-infrastructures.eu/gCubeApps/BiodiversityResearchEnvironment"; //Production ScopeProvider.instance.set(scope); - ASLSession session = SessionManager.getInstance().getASLSession("123", username); - - Manager call = manager().withTimeout(3, TimeUnit.MINUTES).build(); - + ManagerClient call = manager().withTimeout(3, TimeUnit.MINUTES).build(); // Manager call = manager().at(URI.create("http://node24.d.d4science.research-infrastructures.eu:9000")).withTimeout(3, TimeUnit.MINUTES).build(); - //Stream results = call.search("SEARCH BY CN 'shark' RESOLVE WITH OBIS EXPAND WITH ITIS RETURN Product"); // Stream results = call.search("SEARCH BY CN 'shark' RESOLVE WITH OBIS EXPAND WITH ITIS WHERE coordinate <= 15.12, 16.12 RETURN Product"); - - // Stream results = call.search("SEARCH BY SN 'sarda sarda' RESOLVE WITH OBIS EXPAND WITH ITIS WHERE coordinate <= 15.12, 16.12 RETURN Product"); - System.out.println("start query..."); - Stream results = call.search("SEARCH BY SN 'Latimeria chalumnae' IN GBIF RETURN Product HAVING xpath(\"//product[type='Occurrence' and count>0]\")"); - // Stream results = call.search("SEARCH BY SN 'Palinurus elephas' IN WoRMS RETURN Taxon"); - StreamIterator input = new StreamIterator(results); - - + + System.out.println("Results from service..."); int i=0; while(results.hasNext()) { ResultElement elem = results.next(); System.out.println(++i +") el: "+elem.getId() +" type: "+elem.getType().name()); } - - + System.out.println("Results from conversion..."); ConversionIterator caster = buildCaster(input); - + //from ResultItem to ResultRow ResultItemConverter converter = new ResultItemConverter(session); ConversionIterator inputConverter = new ConversionIterator(caster, converter); while (inputConverter.hasNext()) { ResultRow row = inputConverter.next(); - System.out.println(++i +") row: "+row); - } - - + results.close(); System.out.println("DONE"); } - + + /** + * Builds the caster. + * + * @param the generic type + * @param the generic type + * @param input the input + * @return the conversion iterator + */ protected static ConversionIterator buildCaster(CloseableIterator input) - { + { CastConverter elementConverter = new CastConverter(); ConversionIterator caster = new ConversionIterator(input, elementConverter); return caster;