2013-03-04 14:38:25 +01:00
|
|
|
package org.gcube.portlets.user.speciesdiscovery.server;
|
|
|
|
|
|
|
|
import java.io.File;
|
|
|
|
import java.io.FileWriter;
|
|
|
|
import java.io.IOException;
|
|
|
|
import java.io.InputStream;
|
2013-05-20 18:50:24 +02:00
|
|
|
import java.sql.SQLException;
|
2013-03-04 14:38:25 +01:00
|
|
|
import java.util.ArrayList;
|
|
|
|
import java.util.Calendar;
|
|
|
|
import java.util.Collection;
|
|
|
|
import java.util.Date;
|
|
|
|
import java.util.HashMap;
|
|
|
|
import java.util.Iterator;
|
|
|
|
import java.util.List;
|
|
|
|
import java.util.Map;
|
|
|
|
import java.util.concurrent.TimeUnit;
|
|
|
|
|
|
|
|
import javax.persistence.EntityManager;
|
|
|
|
import javax.persistence.Query;
|
|
|
|
import javax.persistence.criteria.CriteriaBuilder;
|
|
|
|
import javax.persistence.criteria.CriteriaQuery;
|
|
|
|
import javax.persistence.criteria.Predicate;
|
|
|
|
|
|
|
|
import net.sf.csv4j.CSVWriter;
|
|
|
|
|
|
|
|
import org.apache.log4j.Logger;
|
|
|
|
import org.gcube.application.framework.core.session.ASLSession;
|
2013-10-07 14:40:14 +02:00
|
|
|
import org.gcube.common.homelibrary.home.workspace.Workspace;
|
|
|
|
import org.gcube.common.homelibrary.home.workspace.WorkspaceFolder;
|
|
|
|
import org.gcube.common.homelibrary.util.WorkspaceUtil;
|
2013-04-30 17:47:00 +02:00
|
|
|
import org.gcube.data.spd.model.products.OccurrencePoint;
|
|
|
|
import org.gcube.data.spd.model.products.ResultElement;
|
|
|
|
import org.gcube.data.spd.model.products.TaxonomyItem;
|
2017-01-11 18:39:11 +01:00
|
|
|
import org.gcube.data.spd.model.service.types.CompleteJobStatus;
|
2013-03-04 14:38:25 +01:00
|
|
|
import org.gcube.portlets.user.speciesdiscovery.client.ConstantsSpeciesDiscovery;
|
|
|
|
import org.gcube.portlets.user.speciesdiscovery.client.model.ClassificationModel;
|
|
|
|
import org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService;
|
|
|
|
import org.gcube.portlets.user.speciesdiscovery.client.util.GridField;
|
|
|
|
import org.gcube.portlets.user.speciesdiscovery.server.asl.SessionUtil;
|
2017-02-10 15:18:57 +01:00
|
|
|
import org.gcube.portlets.user.speciesdiscovery.server.job.GisLayerJobUtil;
|
2013-03-04 14:38:25 +01:00
|
|
|
import org.gcube.portlets.user.speciesdiscovery.server.job.OccurrenceJobUtil;
|
|
|
|
import org.gcube.portlets.user.speciesdiscovery.server.job.OccurrenceKeys;
|
|
|
|
import org.gcube.portlets.user.speciesdiscovery.server.job.TaxonomyJobUtil;
|
|
|
|
import org.gcube.portlets.user.speciesdiscovery.server.persistence.DaoSession;
|
2017-02-09 18:23:28 +01:00
|
|
|
import org.gcube.portlets.user.speciesdiscovery.server.persistence.dao.GisLayerJobPersistence;
|
2013-03-04 14:38:25 +01:00
|
|
|
import org.gcube.portlets.user.speciesdiscovery.server.persistence.dao.OccurrenceJobPersistence;
|
|
|
|
import org.gcube.portlets.user.speciesdiscovery.server.persistence.dao.OccurrenceRowPersistence;
|
|
|
|
import org.gcube.portlets.user.speciesdiscovery.server.persistence.dao.ResultRowPersistence;
|
|
|
|
import org.gcube.portlets.user.speciesdiscovery.server.persistence.dao.TaxonRowPersistence;
|
|
|
|
import org.gcube.portlets.user.speciesdiscovery.server.persistence.dao.TaxonomyJobPersistence;
|
|
|
|
import org.gcube.portlets.user.speciesdiscovery.server.persistence.dao.TaxonomyRowPersistence;
|
|
|
|
import org.gcube.portlets.user.speciesdiscovery.server.service.IteratorChainBuilder;
|
|
|
|
import org.gcube.portlets.user.speciesdiscovery.server.service.SpeciesService;
|
|
|
|
import org.gcube.portlets.user.speciesdiscovery.server.service.TaxonomyItemConverter;
|
|
|
|
import org.gcube.portlets.user.speciesdiscovery.server.session.FetchingSession;
|
|
|
|
import org.gcube.portlets.user.speciesdiscovery.server.session.FetchingSessionUtil;
|
|
|
|
import org.gcube.portlets.user.speciesdiscovery.server.session.FilterableFetchingBuffer;
|
|
|
|
import org.gcube.portlets.user.speciesdiscovery.server.session.SelectableFetchingBuffer;
|
|
|
|
import org.gcube.portlets.user.speciesdiscovery.server.stream.CSVGenerator;
|
|
|
|
import org.gcube.portlets.user.speciesdiscovery.server.stream.CloseableIterator;
|
|
|
|
import org.gcube.portlets.user.speciesdiscovery.server.stream.OccurenceCSVConverter;
|
|
|
|
import org.gcube.portlets.user.speciesdiscovery.server.stream.OccurenceCSVConverterOpenModeller;
|
|
|
|
import org.gcube.portlets.user.speciesdiscovery.server.stream.aggregation.FieldAggregator;
|
|
|
|
import org.gcube.portlets.user.speciesdiscovery.server.stream.aggregation.TaxonomyClassificationAggregator;
|
|
|
|
import org.gcube.portlets.user.speciesdiscovery.server.util.DateUtil;
|
2013-10-24 12:52:34 +02:00
|
|
|
import org.gcube.portlets.user.speciesdiscovery.server.util.GetWorkspaceUtil;
|
2013-03-04 14:38:25 +01:00
|
|
|
import org.gcube.portlets.user.speciesdiscovery.server.util.QueryUtil;
|
|
|
|
import org.gcube.portlets.user.speciesdiscovery.shared.CommonName;
|
|
|
|
import org.gcube.portlets.user.speciesdiscovery.shared.DataSource;
|
|
|
|
import org.gcube.portlets.user.speciesdiscovery.shared.DataSourceModel;
|
2013-03-04 16:47:57 +01:00
|
|
|
import org.gcube.portlets.user.speciesdiscovery.shared.DatabaseServiceException;
|
2013-03-04 14:38:25 +01:00
|
|
|
import org.gcube.portlets.user.speciesdiscovery.shared.DownloadState;
|
|
|
|
import org.gcube.portlets.user.speciesdiscovery.shared.FetchingElement;
|
2017-02-10 15:18:57 +01:00
|
|
|
import org.gcube.portlets.user.speciesdiscovery.shared.GisLayerJob;
|
2017-02-15 16:45:21 +01:00
|
|
|
import org.gcube.portlets.user.speciesdiscovery.shared.InvalidJobIdException;
|
2013-03-04 14:38:25 +01:00
|
|
|
import org.gcube.portlets.user.speciesdiscovery.shared.ItemParameter;
|
2017-02-09 18:23:28 +01:00
|
|
|
import org.gcube.portlets.user.speciesdiscovery.shared.JobGisLayerModel;
|
2013-03-04 14:38:25 +01:00
|
|
|
import org.gcube.portlets.user.speciesdiscovery.shared.JobOccurrencesModel;
|
|
|
|
import org.gcube.portlets.user.speciesdiscovery.shared.JobTaxonomyModel;
|
2013-10-24 12:52:34 +02:00
|
|
|
import org.gcube.portlets.user.speciesdiscovery.shared.LightTaxonomyRow;
|
2013-03-04 14:38:25 +01:00
|
|
|
import org.gcube.portlets.user.speciesdiscovery.shared.MainTaxonomicRankEnum;
|
|
|
|
import org.gcube.portlets.user.speciesdiscovery.shared.Occurrence;
|
|
|
|
import org.gcube.portlets.user.speciesdiscovery.shared.OccurrenceBatch;
|
|
|
|
import org.gcube.portlets.user.speciesdiscovery.shared.OccurrencesJob;
|
|
|
|
import org.gcube.portlets.user.speciesdiscovery.shared.OccurrencesSaveEnum;
|
|
|
|
import org.gcube.portlets.user.speciesdiscovery.shared.OccurrencesStatus;
|
|
|
|
import org.gcube.portlets.user.speciesdiscovery.shared.ResultRow;
|
|
|
|
import org.gcube.portlets.user.speciesdiscovery.shared.SaveFileFormat;
|
2013-03-15 19:18:12 +01:00
|
|
|
import org.gcube.portlets.user.speciesdiscovery.shared.SearchByQueryParameter;
|
2013-03-04 14:38:25 +01:00
|
|
|
import org.gcube.portlets.user.speciesdiscovery.shared.SearchFilters;
|
|
|
|
import org.gcube.portlets.user.speciesdiscovery.shared.SearchResult;
|
|
|
|
import org.gcube.portlets.user.speciesdiscovery.shared.SearchResultType;
|
|
|
|
import org.gcube.portlets.user.speciesdiscovery.shared.SearchServiceException;
|
|
|
|
import org.gcube.portlets.user.speciesdiscovery.shared.SearchStatus;
|
|
|
|
import org.gcube.portlets.user.speciesdiscovery.shared.SearchType;
|
2017-03-20 14:43:11 +01:00
|
|
|
import org.gcube.portlets.user.speciesdiscovery.shared.SessionExpired;
|
2013-03-04 14:38:25 +01:00
|
|
|
import org.gcube.portlets.user.speciesdiscovery.shared.Taxon;
|
|
|
|
import org.gcube.portlets.user.speciesdiscovery.shared.TaxonomyJob;
|
|
|
|
import org.gcube.portlets.user.speciesdiscovery.shared.TaxonomyRow;
|
2013-10-24 12:52:34 +02:00
|
|
|
import org.gcube.portlets.user.speciesdiscovery.shared.cluster.ClusterCommonNameDataSourceForResultRow;
|
|
|
|
import org.gcube.portlets.user.speciesdiscovery.shared.cluster.ClusterCommonNameDataSourceForTaxonomyRow;
|
2013-03-04 14:38:25 +01:00
|
|
|
import org.gcube.portlets.user.speciesdiscovery.shared.cluster.ClusterStructuresForResultRow;
|
|
|
|
import org.gcube.portlets.user.speciesdiscovery.shared.cluster.ClusterStructuresForTaxonomyRow;
|
2013-10-24 12:52:34 +02:00
|
|
|
import org.gcube.portlets.user.speciesdiscovery.shared.cluster.ManagerClusterCommonNameDataSourceForResultRow;
|
|
|
|
import org.gcube.portlets.user.speciesdiscovery.shared.cluster.ManagerClusterCommonNameDataSourceForTaxonomyRow;
|
2013-03-04 14:38:25 +01:00
|
|
|
import org.gcube.portlets.user.speciesdiscovery.shared.filter.ResultFilter;
|
2013-07-18 18:32:28 +02:00
|
|
|
import org.gcube.portlets.user.speciesdiscovery.shared.util.NormalizeString;
|
2013-03-04 14:38:25 +01:00
|
|
|
|
|
|
|
import com.google.gwt.user.server.rpc.RemoteServiceServlet;
|
|
|
|
|
|
|
|
/**
|
|
|
|
* The server side implementation of the RPC service.
|
|
|
|
* @author "Francesco Mangiacrapa francesco.mangiacrapa@isti.cnr.it"
|
2017-02-16 12:44:41 +01:00
|
|
|
* @author "Federico De Faveri defaveri@isti.cnr.it" -
|
2013-03-04 14:38:25 +01:00
|
|
|
*/
|
|
|
|
public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements TaxonomySearchService {
|
|
|
|
|
|
|
|
protected static final String SAVE_CHILDREN_OF = "Save children of ";
|
|
|
|
protected static final String RESUBMIT = "Resubmit";
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
private static final long serialVersionUID = -287193068445844326L;
|
|
|
|
|
|
|
|
protected static final long MAX_BUFFERING_ELEMENTS = 1000;
|
|
|
|
protected static final long BUFFER_LIMIT = 10;
|
|
|
|
|
|
|
|
protected Logger logger = Logger.getLogger(TaxonomySearchService.class);
|
|
|
|
|
|
|
|
public static final String TAXONOMYUNKNOWN = "Unknown";
|
|
|
|
public static final String BASETAXONOMY = "Kingdom";
|
|
|
|
public static final String UNK = "Unk";
|
|
|
|
|
2017-02-16 17:15:45 +01:00
|
|
|
/**
|
2017-03-20 14:43:11 +01:00
|
|
|
* Gets the ASL session.
|
|
|
|
*
|
|
|
|
* @return the ASL session
|
|
|
|
*/
|
|
|
|
protected ASLSession getASLSession()
|
2013-03-04 14:38:25 +01:00
|
|
|
{
|
|
|
|
return SessionUtil.getAslSession(this.getThreadLocalRequest().getSession());
|
|
|
|
}
|
|
|
|
|
2017-02-16 17:15:45 +01:00
|
|
|
/**
|
|
|
|
* Gets the species service.
|
|
|
|
*
|
|
|
|
* @return the species service
|
|
|
|
* @throws SearchServiceException the search service exception
|
|
|
|
*/
|
2013-03-04 14:38:25 +01:00
|
|
|
protected SpeciesService getSpeciesService() throws SearchServiceException
|
|
|
|
{
|
|
|
|
try {
|
|
|
|
ASLSession session = getASLSession();
|
|
|
|
return SessionUtil.getService(session);
|
|
|
|
} catch (Exception e) {
|
|
|
|
e.printStackTrace();
|
2013-03-04 16:47:57 +01:00
|
|
|
logger.error("An error occurred when contacting the species service", e);
|
2013-03-04 14:38:25 +01:00
|
|
|
// System.out.println("An error occurred retrieving the service" +e);
|
2013-03-04 16:47:57 +01:00
|
|
|
throw new SearchServiceException("contacting the species service.");
|
2013-03-04 14:38:25 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-02-16 17:15:45 +01:00
|
|
|
/**
|
|
|
|
* Gets the search session.
|
|
|
|
*
|
|
|
|
* @return the search session
|
|
|
|
* @throws SearchServiceException the search service exception
|
|
|
|
*/
|
2013-03-04 14:38:25 +01:00
|
|
|
protected FetchingSession<? extends FetchingElement> getSearchSession() throws SearchServiceException
|
|
|
|
{
|
|
|
|
ASLSession session = getASLSession();
|
|
|
|
FetchingSession<? extends FetchingElement> searchSession = SessionUtil.getCurrentSearchSession(session);
|
|
|
|
|
|
|
|
if (searchSession == null) {
|
|
|
|
logger.error("No search session found for user "+session.getUsername());
|
|
|
|
throw new SearchServiceException("No search session found for user "+session.getUsername());
|
|
|
|
}
|
|
|
|
|
|
|
|
return searchSession;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
2017-02-16 17:15:45 +01:00
|
|
|
/**
|
|
|
|
* Gets the occurrence session.
|
|
|
|
*
|
|
|
|
* @return the occurrence session
|
|
|
|
* @throws SearchServiceException the search service exception
|
|
|
|
*/
|
2013-03-04 14:38:25 +01:00
|
|
|
protected FetchingSession<Occurrence> getOccurrenceSession() throws SearchServiceException
|
|
|
|
{
|
|
|
|
ASLSession session = getASLSession();
|
|
|
|
FetchingSession<Occurrence> occurrenceSession = SessionUtil.getCurrentOccurrenceSession(session);
|
|
|
|
|
|
|
|
if (occurrenceSession == null) {
|
|
|
|
logger.error("No occurrence session found for user "+session.getUsername());
|
|
|
|
throw new SearchServiceException("No occurrence session found for user "+session.getUsername());
|
|
|
|
}
|
|
|
|
|
|
|
|
return occurrenceSession;
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* {@inheritDoc}
|
|
|
|
*/
|
|
|
|
@Override
|
|
|
|
public void searchByScientificName(String searchTerm, SearchFilters searchFilters) throws SearchServiceException {
|
2015-07-22 17:19:38 +02:00
|
|
|
logger.info("searchByScientificName searchTerm: "+searchTerm+" searchFilters: "+searchFilters);
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
stopSearch();
|
|
|
|
search(searchTerm, SearchType.BY_SCIENTIFIC_NAME, searchFilters);
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* {@inheritDoc}
|
|
|
|
*/
|
|
|
|
@Override
|
|
|
|
public void searchByCommonName(String searchTerm, SearchFilters searchFilters) throws SearchServiceException {
|
2015-07-22 17:19:38 +02:00
|
|
|
logger.info("searchByCommonName searchTerm: "+searchTerm+" searchFilters: "+searchFilters);
|
2013-03-04 14:38:25 +01:00
|
|
|
|
|
|
|
stopSearch();
|
|
|
|
search(searchTerm, SearchType.BY_COMMON_NAME, searchFilters);
|
|
|
|
}
|
|
|
|
|
2017-02-16 17:15:45 +01:00
|
|
|
/**
|
|
|
|
* Search.
|
|
|
|
*
|
|
|
|
* @param searchTerm the search term
|
|
|
|
* @param searchType the search type
|
|
|
|
* @param searchFilters the search filters
|
|
|
|
* @return the search result type
|
|
|
|
* @throws SearchServiceException the search service exception
|
|
|
|
*/
|
2013-03-04 14:38:25 +01:00
|
|
|
protected SearchResultType search(String searchTerm, SearchType searchType, SearchFilters searchFilters) throws SearchServiceException
|
|
|
|
{
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-07-18 18:32:28 +02:00
|
|
|
ASLSession aslSession = getASLSession();
|
2013-03-04 14:38:25 +01:00
|
|
|
try {
|
|
|
|
deleteAllRowIntoDaoTable(); //RESET TABLE
|
|
|
|
SpeciesService taxonomyService = getSpeciesService();
|
|
|
|
CloseableIterator<ResultElement> input = taxonomyService.searchByFilters(searchTerm, searchType, searchFilters);
|
2015-07-22 17:19:38 +02:00
|
|
|
|
2013-07-18 18:32:28 +02:00
|
|
|
SessionUtil.setCurrentQuery(aslSession, taxonomyService.getLastQuery());
|
2013-03-04 14:38:25 +01:00
|
|
|
// System.out.println("returned input stream by service...");
|
2015-07-22 17:19:38 +02:00
|
|
|
logger.info("returned input stream by service...");
|
2013-03-04 14:38:25 +01:00
|
|
|
|
|
|
|
SearchResultType resultType = QueryUtil.getResultType(searchFilters);
|
|
|
|
CloseableIterator<FetchingElement> output = IteratorChainBuilder.buildChain(input, resultType, aslSession);
|
|
|
|
FetchingSessionUtil.createFetchingSession(output, resultType, aslSession);
|
|
|
|
return resultType;
|
|
|
|
} catch (Exception e) {
|
|
|
|
logger.error("Error starting search "+searchType+" for term \""+searchTerm+"\" with filters "+searchFilters, e);
|
2013-07-18 18:32:28 +02:00
|
|
|
SessionUtil.setCurrentQuery(aslSession, "invalid query");
|
2013-03-04 14:38:25 +01:00
|
|
|
throw new SearchServiceException(e.getMessage());
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2017-02-16 17:15:45 +01:00
|
|
|
/* (non-Javadoc)
|
|
|
|
* @see org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService#searchByQuery(java.lang.String)
|
|
|
|
*/
|
2013-03-04 14:38:25 +01:00
|
|
|
@Override
|
2013-03-15 19:18:12 +01:00
|
|
|
public SearchByQueryParameter searchByQuery(String query) throws SearchServiceException {
|
2015-07-22 17:19:38 +02:00
|
|
|
logger.info("searchByQuery - query: "+query);
|
2013-03-04 14:38:25 +01:00
|
|
|
|
|
|
|
stopSearch();
|
2013-07-18 18:32:28 +02:00
|
|
|
ASLSession aslSession = getASLSession();
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
try {
|
|
|
|
deleteAllRowIntoDaoTable(); //RESET TABLE
|
|
|
|
SpeciesService taxonomyService = getSpeciesService();
|
|
|
|
CloseableIterator<ResultElement> input = taxonomyService.searchByQuery(query);
|
2013-07-18 18:32:28 +02:00
|
|
|
|
|
|
|
SessionUtil.setCurrentQuery(aslSession, query);
|
2015-07-22 17:19:38 +02:00
|
|
|
logger.info("service return iterator searched...");
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-15 19:18:12 +01:00
|
|
|
SearchByQueryParameter queryParameters = QueryUtil.getQueryResultType(query);
|
|
|
|
CloseableIterator<FetchingElement> output = IteratorChainBuilder.buildChain(input, queryParameters.getSearchResultType(), aslSession);
|
|
|
|
FetchingSessionUtil.createFetchingSession(output, queryParameters.getSearchResultType(), aslSession);
|
2015-07-22 17:19:38 +02:00
|
|
|
logger.info("creating fetching session completed!");
|
2013-03-15 19:18:12 +01:00
|
|
|
return queryParameters;
|
2013-03-04 14:38:25 +01:00
|
|
|
|
|
|
|
} catch (Exception e) {
|
|
|
|
logger.error("Error starting search by query", e);
|
2013-07-18 18:32:28 +02:00
|
|
|
SessionUtil.setCurrentQuery(aslSession, "invalid query");
|
2013-03-04 14:38:25 +01:00
|
|
|
throw new SearchServiceException(e.getMessage());
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2017-02-16 17:15:45 +01:00
|
|
|
/**
|
|
|
|
* Delete all row into dao table.
|
|
|
|
*
|
|
|
|
* @throws Exception the exception
|
|
|
|
*/
|
2013-03-04 14:38:25 +01:00
|
|
|
private void deleteAllRowIntoDaoTable() throws Exception{
|
2015-07-22 17:19:38 +02:00
|
|
|
logger.info("deleting all row into dao's");
|
2013-03-04 14:38:25 +01:00
|
|
|
ResultRowPersistence daoResultRow = null;
|
|
|
|
TaxonRowPersistence daoTaxon = null;
|
|
|
|
TaxonomyRowPersistence daoTaxonomyRow = null;
|
|
|
|
ASLSession session = getASLSession();
|
|
|
|
|
|
|
|
try {
|
|
|
|
daoResultRow = DaoSession.getResultRowDAO(session);
|
|
|
|
daoTaxon = DaoSession.getTaxonDAO(session);
|
|
|
|
daoTaxonomyRow = DaoSession.getTaxonomyDAO(session);
|
|
|
|
|
|
|
|
} catch (Exception e) {
|
|
|
|
logger.error("Error in delete all row - getDao's " +e.getMessage(), e);
|
|
|
|
throw new Exception("Error in delete all row- getDao's " + e.getMessage(), e);
|
|
|
|
}
|
|
|
|
|
|
|
|
try {
|
2015-07-22 17:19:38 +02:00
|
|
|
if(daoResultRow!=null)
|
2013-03-04 14:38:25 +01:00
|
|
|
daoResultRow.removeAll();
|
|
|
|
|
2015-07-22 17:19:38 +02:00
|
|
|
if(daoTaxon!=null)
|
2013-03-04 14:38:25 +01:00
|
|
|
daoTaxon.removeAll();
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
if(daoTaxonomyRow!=null)
|
|
|
|
daoTaxonomyRow.removeAll();
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2015-07-22 17:19:38 +02:00
|
|
|
logger.info("delete all row into Dao's - completed");
|
2013-03-04 14:38:25 +01:00
|
|
|
} catch (Exception e) {
|
|
|
|
logger.error("Error in delete all row");
|
|
|
|
throw new Exception("Error in delete all row" + e.getCause(), e);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* {@inheritDoc}
|
|
|
|
*/
|
|
|
|
@SuppressWarnings("unchecked")
|
|
|
|
@Override
|
2017-03-20 14:43:11 +01:00
|
|
|
public SearchResult<ResultRow> getSearchResultRows(int start, int limit, ResultFilter activeFiltersObject, boolean onlySelected) throws SessionExpired, SearchServiceException {
|
2015-07-22 17:19:38 +02:00
|
|
|
logger.info("getSearchResultRows start: "+start+" limit: "+limit+" onlySelected: "+onlySelected);
|
2013-03-04 14:38:25 +01:00
|
|
|
Long startTime = System.currentTimeMillis();
|
|
|
|
|
|
|
|
FetchingSession<ResultRow> searchSession = (FetchingSession<ResultRow>) getSearchSession();
|
|
|
|
ArrayList<ResultRow> chunk = new ArrayList<ResultRow>();
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
try {
|
|
|
|
List<ResultRow> data = new ArrayList<ResultRow>();
|
|
|
|
if (onlySelected) {
|
|
|
|
SelectableFetchingBuffer<ResultRow> buffer = (SelectableFetchingBuffer<ResultRow>) searchSession.getBuffer();
|
|
|
|
data = buffer.getSelected();
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
int end = Math.min(start+limit, data.size());
|
|
|
|
start = Math.min(start, end);
|
|
|
|
|
2015-07-22 17:19:38 +02:00
|
|
|
logger.info("chunk selected data bounds [start: "+start+" end: " + end+"]");
|
2013-03-04 14:38:25 +01:00
|
|
|
data = data.subList(start, end);
|
2017-01-11 18:39:11 +01:00
|
|
|
|
|
|
|
} else if (activeFiltersObject == null || !activeFiltersObject.isActiveFilters()) {
|
2013-03-04 14:38:25 +01:00
|
|
|
if(limit>0){
|
|
|
|
data = searchSession.getBuffer().getList(start,limit);
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
FilterableFetchingBuffer<ResultRow> buffer = (FilterableFetchingBuffer<ResultRow>) searchSession.getBuffer();
|
|
|
|
data = buffer.getFilteredList(activeFiltersObject);
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
int end = Math.min(start+limit, data.size());
|
|
|
|
start = Math.min(start, end);
|
|
|
|
|
2015-07-22 17:19:38 +02:00
|
|
|
logger.info("chunk filtered data bounds [start: "+start+" end: " + end+"]");
|
2013-03-04 14:38:25 +01:00
|
|
|
data = data.subList(start, end);
|
|
|
|
}
|
|
|
|
|
2015-07-22 17:19:38 +02:00
|
|
|
logger.info("Fetching data from search session buffer, size: "+data.size());
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
for (ResultRow resultRow : data) {
|
|
|
|
|
|
|
|
//return common names?
|
|
|
|
if(activeFiltersObject == null || !activeFiltersObject.isLoadCommonName() || !resultRow.existsCommonName()){
|
|
|
|
resultRow.setCommonNames(null);
|
|
|
|
}
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
//return properties?
|
|
|
|
if(activeFiltersObject == null || !resultRow.existsProperties() || !activeFiltersObject.isLoadAllProperties()){
|
|
|
|
resultRow.setProperties(null);
|
|
|
|
}
|
|
|
|
chunk.add(resultRow);
|
2017-01-11 18:39:11 +01:00
|
|
|
logger.debug("getSearchResultRows returning on client result item with id: " +resultRow.getId() + " service id: "+resultRow.getServiceId());
|
2013-03-04 14:38:25 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
Long endTime = System.currentTimeMillis() - startTime;
|
|
|
|
String time = String.format("%d msc %d sec", endTime, TimeUnit.MILLISECONDS.toSeconds(endTime));
|
2015-07-22 17:19:38 +02:00
|
|
|
logger.info("returning "+chunk.size()+" elements in " + time);
|
2013-03-04 14:38:25 +01:00
|
|
|
} catch (Exception e) {
|
2017-03-20 14:43:11 +01:00
|
|
|
|
|
|
|
if(e instanceof SessionExpired)
|
|
|
|
throw new SessionExpired("The session is expired");
|
|
|
|
|
2015-07-22 17:19:38 +02:00
|
|
|
logger.error("Error in getSearchResultRows ", e);
|
2013-03-04 14:38:25 +01:00
|
|
|
throw new SearchServiceException(e.getMessage());
|
|
|
|
}
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
return new SearchResult<ResultRow>(chunk);
|
|
|
|
}
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2017-02-16 17:15:45 +01:00
|
|
|
/**
|
|
|
|
* Prints the properties.
|
|
|
|
*
|
|
|
|
* @param properties the properties
|
|
|
|
*/
|
2013-03-04 14:38:25 +01:00
|
|
|
private void printProperties(List<ItemParameter> properties){
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
for (ItemParameter itemParameter : properties) {
|
|
|
|
System.out.println("Property "+itemParameter);
|
|
|
|
}
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
}
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2017-02-16 17:15:45 +01:00
|
|
|
/* (non-Javadoc)
|
|
|
|
* @see org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService#getSearchTaxonomyRow(int, int, org.gcube.portlets.user.speciesdiscovery.shared.filter.ResultFilter, boolean)
|
|
|
|
*/
|
2013-03-04 14:38:25 +01:00
|
|
|
@SuppressWarnings("unchecked")
|
|
|
|
@Override
|
|
|
|
public SearchResult<TaxonomyRow> getSearchTaxonomyRow(int start, int limit, ResultFilter activeFiltersObject, boolean onlySelected) throws SearchServiceException {
|
|
|
|
|
2015-07-22 17:19:38 +02:00
|
|
|
logger.info("getSearchResultRows start: "+start+" limit: "+limit+" onlySelected: "+onlySelected);
|
2013-03-04 14:38:25 +01:00
|
|
|
Long startTime = System.currentTimeMillis();
|
|
|
|
FetchingSession<TaxonomyRow> searchSession = (FetchingSession<TaxonomyRow>) getSearchSession();
|
|
|
|
ArrayList<TaxonomyRow> chunk = new ArrayList<TaxonomyRow>();
|
|
|
|
|
|
|
|
try {
|
|
|
|
|
2015-07-22 17:19:38 +02:00
|
|
|
logger.info("current buffer size "+searchSession.getBuffer().size());
|
2013-03-04 14:38:25 +01:00
|
|
|
List<TaxonomyRow> data = new ArrayList<TaxonomyRow>();
|
|
|
|
|
|
|
|
if (onlySelected) {
|
2015-07-22 17:19:38 +02:00
|
|
|
logger.info("getting only selected data");
|
2013-03-04 14:38:25 +01:00
|
|
|
SelectableFetchingBuffer<TaxonomyRow> buffer = (SelectableFetchingBuffer<TaxonomyRow>) searchSession.getBuffer();
|
|
|
|
data = buffer.getSelected();
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
int end = Math.min(start+limit, data.size());
|
|
|
|
start = Math.min(start, end);
|
|
|
|
|
2015-07-22 17:19:38 +02:00
|
|
|
logger.info("chunk selected data bounds [start: "+start+" end: " + end+"]");
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
data = data.subList(start, end);
|
2017-01-11 18:39:11 +01:00
|
|
|
|
|
|
|
} else if (activeFiltersObject == null || !activeFiltersObject.isActiveFilters()) {
|
2015-07-22 17:19:38 +02:00
|
|
|
logger.info("getting all available data");
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
if(limit>0){
|
|
|
|
Map<String, String> filterAndMap = new HashMap<String, String>();
|
|
|
|
filterAndMap.put(TaxonomyRow.IS_PARENT, "false");
|
|
|
|
data = searchSession.getBuffer().getList(filterAndMap, start,limit);
|
|
|
|
}
|
|
|
|
} else {
|
2015-07-22 17:19:38 +02:00
|
|
|
logger.info("getting filtered data");
|
2013-03-04 14:38:25 +01:00
|
|
|
FilterableFetchingBuffer<TaxonomyRow> buffer = (FilterableFetchingBuffer<TaxonomyRow>) searchSession.getBuffer();
|
|
|
|
data = buffer.getFilteredList(activeFiltersObject);
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
int end = Math.min(start+limit, data.size());
|
|
|
|
start = Math.min(start, end);
|
|
|
|
|
2015-07-22 17:19:38 +02:00
|
|
|
logger.info("chunk filtered data bounds [start: "+start+" end: " + end+"]");
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
data = data.subList(start, end);
|
|
|
|
}
|
|
|
|
|
2015-07-22 17:19:38 +02:00
|
|
|
logger.info("Fetching data from search session buffer, size: "+data.size());
|
2013-03-04 14:38:25 +01:00
|
|
|
|
|
|
|
for (TaxonomyRow taxonomyRow : data) {
|
|
|
|
//ADD common names
|
|
|
|
|
|
|
|
//return common names?
|
|
|
|
if(activeFiltersObject == null || !activeFiltersObject.isLoadCommonName() || !taxonomyRow.existsCommonName()){
|
|
|
|
taxonomyRow.setCommonNames(null);
|
|
|
|
}
|
|
|
|
|
|
|
|
//return properties?
|
|
|
|
if(activeFiltersObject == null || !taxonomyRow.existsProperties() || !activeFiltersObject.isLoadAllProperties()){
|
|
|
|
taxonomyRow.setProperties(null);
|
|
|
|
}
|
2015-07-22 17:19:38 +02:00
|
|
|
logger.info("getSearchTaxonomyRow return on client taxonomy item with id: " +taxonomyRow.getId() + " service id: "+taxonomyRow.getServiceId());
|
2013-03-04 14:38:25 +01:00
|
|
|
chunk.add(taxonomyRow);
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
Long endTime = System.currentTimeMillis() - startTime;
|
|
|
|
String time = String.format("%d msc %d sec", endTime, TimeUnit.MILLISECONDS.toSeconds(endTime));
|
2015-07-22 17:19:38 +02:00
|
|
|
logger.info("returning "+chunk.size()+" elements in " + time);
|
2013-03-04 14:38:25 +01:00
|
|
|
|
|
|
|
} catch (Exception e) {
|
|
|
|
logger.error("Error in getSearchTaxonomyRow " + e.getMessage(), e);
|
|
|
|
throw new SearchServiceException(e.getMessage());
|
|
|
|
}
|
|
|
|
|
|
|
|
return new SearchResult<TaxonomyRow>(chunk);
|
|
|
|
}
|
|
|
|
|
2017-02-16 17:15:45 +01:00
|
|
|
/**
|
|
|
|
* Load taxonomy parent by parent id.
|
|
|
|
*
|
|
|
|
* @param parentID the parent id
|
|
|
|
* @return the taxonomy row
|
|
|
|
* @throws Exception the exception
|
|
|
|
*/
|
2013-03-04 14:38:25 +01:00
|
|
|
public TaxonomyRow loadTaxonomyParentByParentId(String parentID) throws Exception {
|
|
|
|
|
|
|
|
TaxonomyRow taxonomyRow = null;
|
2015-07-22 17:19:38 +02:00
|
|
|
logger.info("loadTaxonomyParentByParentId: "+ parentID);
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
try {
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
TaxonomyRowPersistence dao = DaoSession.getTaxonomyDAO(getASLSession());
|
|
|
|
CriteriaBuilder queryBuilder = dao.getCriteriaBuilder();
|
|
|
|
CriteriaQuery<Object> cq = queryBuilder.createQuery();
|
|
|
|
Predicate pr1 = queryBuilder.equal(dao.rootFrom(cq).get(TaxonomyRow.PARENT_ID), parentID);
|
|
|
|
cq.where(pr1);
|
|
|
|
|
|
|
|
Iterator<TaxonomyRow> iterator = dao.executeCriteriaQuery(cq).iterator();
|
|
|
|
|
|
|
|
if(iterator!=null && iterator.hasNext()){
|
|
|
|
taxonomyRow = iterator.next();
|
|
|
|
}
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
} catch (Exception e) {
|
|
|
|
logger.error("Error in loadTaxonomyParentsByRowId", e);
|
|
|
|
throw new Exception(e);
|
|
|
|
}
|
|
|
|
|
|
|
|
return taxonomyRow;
|
|
|
|
}
|
|
|
|
|
|
|
|
// public void loadParentsListOfTaxonomy(TaxonomyRow taxonomy) throws Exception {
|
|
|
|
//
|
|
|
|
// taxonomy.setParent(setParentListOfTaxonomy(taxonomy.getParent()));
|
|
|
|
// }
|
|
|
|
//
|
|
|
|
// private TaxonomyRow setParentListOfTaxonomy(TaxonomyRow taxonomy) throws Exception{
|
|
|
|
//
|
|
|
|
// if (taxonomy == null) return null;
|
|
|
|
// // DaoSession.getTaxonomyDAO(getASLSession()).refresh(taxonomy.getParent());
|
|
|
|
// taxonomy.setParent(setParentListOfTaxonomy(taxonomy.getParent()));
|
2017-01-11 18:39:11 +01:00
|
|
|
// return taxonomy;
|
2013-03-04 14:38:25 +01:00
|
|
|
// }
|
|
|
|
|
|
|
|
/**
|
|
|
|
* {@inheritDoc}
|
|
|
|
*/
|
|
|
|
@Override
|
|
|
|
public HashMap<String, Integer> getFilterCounterById(GridField field) throws Exception {
|
2015-07-22 17:19:38 +02:00
|
|
|
logger.info("Filter Counter for: "+ field);
|
2013-03-04 14:38:25 +01:00
|
|
|
try {
|
2017-01-11 18:39:11 +01:00
|
|
|
FetchingSession<? extends FetchingElement> searchSession = getSearchSession();
|
2013-03-04 14:38:25 +01:00
|
|
|
FieldAggregator<?,?> aggregator = (FieldAggregator<?,?>) searchSession.getAggregator(FieldAggregator.getFieldAggregatorName(field));
|
|
|
|
if (aggregator!=null) return aggregator.getAggregation();
|
|
|
|
else return new HashMap<String, Integer>();
|
|
|
|
} catch(Exception e)
|
|
|
|
{
|
|
|
|
logger.error("Error in getFilterCounterById "+ field.getId()+" "+field.getName(), e);
|
|
|
|
throw new SearchServiceException(e.getMessage());
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2017-02-16 17:15:45 +01:00
|
|
|
/* (non-Javadoc)
|
|
|
|
* @see org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService#getFilterCounterForClassification(java.lang.String)
|
|
|
|
*/
|
2013-03-04 14:38:25 +01:00
|
|
|
@Override
|
|
|
|
public HashMap<String, ClassificationModel> getFilterCounterForClassification(String rankLabel) throws Exception {
|
2015-07-22 17:19:38 +02:00
|
|
|
logger.info("Counter for classification: "+ rankLabel);
|
2013-03-04 14:38:25 +01:00
|
|
|
try {
|
|
|
|
MainTaxonomicRankEnum rank = MainTaxonomicRankEnum.valueOfLabel(rankLabel);
|
|
|
|
|
|
|
|
if (rank!=null) {
|
2017-01-11 18:39:11 +01:00
|
|
|
FetchingSession<? extends FetchingElement> searchSession = getSearchSession();
|
2013-03-04 14:38:25 +01:00
|
|
|
TaxonomyClassificationAggregator<?> classificationAggregator = (TaxonomyClassificationAggregator<?>) searchSession.getAggregator(TaxonomyClassificationAggregator.NAME);
|
|
|
|
return classificationAggregator.getAggregation().get(rank);
|
|
|
|
} else return new HashMap<String, ClassificationModel>();
|
|
|
|
} catch(Exception e)
|
|
|
|
{
|
|
|
|
logger.error("Error in getFilterCounterForClassification "+ rankLabel, e);
|
|
|
|
throw new SearchServiceException(e.getMessage());
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
* {@inheritDoc}
|
|
|
|
*/
|
|
|
|
@Override
|
|
|
|
public SearchStatus getSearchStatus(boolean onlySelected, boolean isActiveFilterOnResult) throws SearchServiceException {
|
2015-07-22 17:19:38 +02:00
|
|
|
logger.info("getSearchStatus onlySelected: "+onlySelected);
|
2013-03-04 14:38:25 +01:00
|
|
|
|
|
|
|
FetchingSession<? extends FetchingElement> searchSession = getSearchSession();
|
|
|
|
|
|
|
|
SearchStatus status = new SearchStatus();
|
|
|
|
int bufferSize = 0;
|
|
|
|
|
|
|
|
try {
|
|
|
|
bufferSize = isActiveFilterOnResult?((FilterableFetchingBuffer<? extends FetchingElement>) searchSession.getBuffer()).getFilteredListSize():searchSession.getBufferSize();
|
|
|
|
} catch (Exception e) {
|
2015-07-22 17:19:38 +02:00
|
|
|
logger.info("getSearchStatus bufferSize error : "+e.getMessage(), e);
|
2013-03-04 14:38:25 +01:00
|
|
|
throw new SearchServiceException(e.getMessage());
|
|
|
|
}
|
|
|
|
|
2015-07-22 17:19:38 +02:00
|
|
|
logger.info("getSearchStatus bufferSize " + bufferSize);
|
2013-03-04 14:38:25 +01:00
|
|
|
//if buffer size is >= the MAXIMUM ELEMENTS. Maximun is reached and the search is stopped
|
|
|
|
if(bufferSize>=MAX_BUFFERING_ELEMENTS){
|
2015-07-22 17:19:38 +02:00
|
|
|
logger.info("getSearchStatus MAX_BUFFERING_ELEMENTS is reached - stop search");
|
2013-03-04 14:38:25 +01:00
|
|
|
stopSearchWithoutRemove();
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-05-20 18:50:24 +02:00
|
|
|
//CALCULATE NEW BUFFER SIZE AFTER FETCHING IS CLOSED
|
|
|
|
try {
|
|
|
|
// int sleepingTime = 500;
|
2015-07-22 17:19:38 +02:00
|
|
|
// logger.info("sleeping "+sleepingTime+" ms for translate last objets arrived into buffer");
|
2013-05-20 18:50:24 +02:00
|
|
|
// Thread.sleep(sleepingTime); //SLEEPING 0,5 sec, for translating objects that are inserting in buffer and calculate new size of buffer
|
2015-07-22 17:19:38 +02:00
|
|
|
// logger.info("sleep termined - search status alive");
|
2013-05-20 18:50:24 +02:00
|
|
|
bufferSize = isActiveFilterOnResult?((FilterableFetchingBuffer<? extends FetchingElement>) searchSession.getBuffer()).getFilteredListSize():searchSession.getBufferSize();
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-05-20 18:50:24 +02:00
|
|
|
} catch (SQLException e) {
|
2015-07-22 17:19:38 +02:00
|
|
|
logger.info("getSearchStatus bufferSize error : "+e.getMessage(), e);
|
2013-05-20 18:50:24 +02:00
|
|
|
throw new SearchServiceException("An error occured on server in searching status, please retry");
|
|
|
|
} catch (Exception e) {
|
2015-07-22 17:19:38 +02:00
|
|
|
logger.info("getSearchStatus bufferSize error : "+e.getMessage(), e);
|
2013-05-20 18:50:24 +02:00
|
|
|
throw new SearchServiceException("An error occured on server in searching status, please retry");
|
|
|
|
}
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
status.setResultEOF(true);
|
|
|
|
status.setSize(bufferSize);
|
|
|
|
status.setIsMaxSize(true);
|
|
|
|
return status;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (!onlySelected && !isActiveFilterOnResult) {
|
|
|
|
status.setResultEOF(searchSession.isComplete());
|
|
|
|
status.setSize(bufferSize);
|
|
|
|
} else if(isActiveFilterOnResult){
|
|
|
|
status.setResultEOF(true);
|
|
|
|
|
|
|
|
try {
|
|
|
|
status.setSize(((FilterableFetchingBuffer<? extends FetchingElement>) searchSession.getBuffer()).getFilteredListSize());
|
|
|
|
} catch (Exception e) {
|
|
|
|
logger.error("isActiveFilterOnResult - An error occured in getSearchStatus " +e.getMessage(), e);
|
|
|
|
throw new SearchServiceException(e.getMessage());
|
|
|
|
}
|
|
|
|
}
|
|
|
|
else{
|
|
|
|
|
|
|
|
status.setResultEOF(true);
|
|
|
|
|
|
|
|
try {
|
|
|
|
status.setSize(((SelectableFetchingBuffer<? extends FetchingElement>) searchSession.getBuffer()).getSelected().size());
|
|
|
|
} catch (Exception e) {
|
|
|
|
logger.error("An error occured in getSearchStatus " +e.getMessage(), e);
|
|
|
|
throw new SearchServiceException(e.getMessage());
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2015-07-22 17:19:38 +02:00
|
|
|
logger.info("getSearchStatus return status size: "+status.getSize() +" EOF: " + status.isResultEOF());
|
2013-03-04 14:38:25 +01:00
|
|
|
return status;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
* {@inheritDoc}
|
|
|
|
*/
|
|
|
|
public void stopSearchWithoutRemove() throws SearchServiceException {
|
2015-07-22 17:19:38 +02:00
|
|
|
logger.info("stopSearch without Remove");
|
2013-03-04 14:38:25 +01:00
|
|
|
|
|
|
|
ASLSession session = getASLSession();
|
|
|
|
//we safely get the session if exists
|
|
|
|
FetchingSession<? extends FetchingElement> searchSession = SessionUtil.getCurrentSearchSession(session);
|
|
|
|
|
|
|
|
if (searchSession != null) {
|
|
|
|
try {
|
|
|
|
searchSession.close();
|
|
|
|
} catch (IOException e) {
|
|
|
|
throw new SearchServiceException(e.getMessage());
|
|
|
|
}
|
|
|
|
} else logger.warn("Search session not found");
|
|
|
|
}
|
|
|
|
|
2017-02-16 17:15:45 +01:00
|
|
|
|
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
/**
|
2017-02-16 17:15:45 +01:00
|
|
|
* Stop search.
|
|
|
|
*
|
|
|
|
* @throws SearchServiceException the search service exception
|
2013-03-04 14:38:25 +01:00
|
|
|
*/
|
2017-02-16 17:15:45 +01:00
|
|
|
private void stopSearch() throws SearchServiceException {
|
2015-07-22 17:19:38 +02:00
|
|
|
logger.info("stopSearch");
|
2013-03-04 14:38:25 +01:00
|
|
|
|
|
|
|
ASLSession session = getASLSession();
|
|
|
|
FetchingSession<? extends FetchingElement> searchSession = SessionUtil.getCurrentSearchSession(session);
|
|
|
|
|
|
|
|
if (searchSession != null) {
|
|
|
|
try {
|
|
|
|
searchSession.close();
|
|
|
|
} catch (IOException e) {
|
|
|
|
throw new SearchServiceException(e.getMessage());
|
|
|
|
}
|
|
|
|
SessionUtil.setCurrentSearchSession(session, null);
|
|
|
|
} else logger.warn("Search session not found");
|
|
|
|
}
|
|
|
|
|
2017-02-16 17:15:45 +01:00
|
|
|
|
|
|
|
|
|
|
|
/* (non-Javadoc)
|
|
|
|
* @see org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService#userStopSearch()
|
|
|
|
*/
|
|
|
|
public void userStopSearch() throws SearchServiceException {
|
|
|
|
ASLSession session = getASLSession();
|
|
|
|
FetchingSession<? extends FetchingElement> searchSession = SessionUtil.getCurrentSearchSession(session);
|
|
|
|
|
|
|
|
if (searchSession != null) {
|
|
|
|
try {
|
|
|
|
searchSession.close();
|
|
|
|
} catch (IOException e) {
|
|
|
|
throw new SearchServiceException(e.getMessage());
|
|
|
|
}
|
|
|
|
} else logger.warn("Search session not found");
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
/**
|
|
|
|
* {@inheritDoc}
|
|
|
|
*/
|
|
|
|
@Override
|
|
|
|
public void updateRowSelection(int rowId, boolean selection) throws SearchServiceException {
|
2015-07-22 17:19:38 +02:00
|
|
|
logger.info("updateRowSelection rowId: "+rowId+" selection: "+selection);
|
2013-03-04 14:38:25 +01:00
|
|
|
|
|
|
|
try {
|
|
|
|
ASLSession session = getASLSession();
|
|
|
|
FetchingSession<? extends FetchingElement> searchSession = SessionUtil.getCurrentSearchSession(session);
|
|
|
|
SelectableFetchingBuffer<? extends FetchingElement> buffer = (SelectableFetchingBuffer<? extends FetchingElement>) searchSession.getBuffer();
|
|
|
|
buffer.updateSelection(rowId, selection);
|
|
|
|
|
|
|
|
} catch(Exception e){
|
|
|
|
logger.error("Error in updateRowSelection rowId: "+rowId+" selection: "+selection, e);
|
|
|
|
throw new SearchServiceException(e.getMessage());
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
* {@inheritDoc}
|
|
|
|
*/
|
|
|
|
@Override
|
|
|
|
public Integer updateRowSelections(boolean selection, ResultFilter activeFiltersObject) throws SearchServiceException {
|
2015-07-22 17:19:38 +02:00
|
|
|
logger.info("updateRowSelections selection: "+selection);
|
2013-03-04 14:38:25 +01:00
|
|
|
List<? extends FetchingElement> data;
|
|
|
|
FetchingSession<? extends FetchingElement> searchSession = getSearchSession();
|
|
|
|
int size = 0;
|
|
|
|
try {
|
|
|
|
|
2017-01-11 18:39:11 +01:00
|
|
|
if (activeFiltersObject == null || !activeFiltersObject.isActiveFilters()) {
|
2013-03-04 14:38:25 +01:00
|
|
|
SelectableFetchingBuffer<? extends FetchingElement> buffer = (SelectableFetchingBuffer<? extends FetchingElement>) searchSession.getBuffer();
|
|
|
|
buffer.updateAllSelection(selection);
|
|
|
|
size = buffer.size();
|
|
|
|
} else {
|
|
|
|
@SuppressWarnings("unchecked")
|
|
|
|
FilterableFetchingBuffer<ResultRow> buffer = (FilterableFetchingBuffer<ResultRow>) searchSession.getBuffer();
|
|
|
|
data = buffer.getFilteredList(activeFiltersObject);
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
if(data!=null){
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-05-20 18:50:24 +02:00
|
|
|
List<String> ids = new ArrayList<String>();
|
|
|
|
for (FetchingElement fetchingElement : data){
|
|
|
|
ids.add(fetchingElement.getId()+"");
|
|
|
|
}
|
|
|
|
SelectableFetchingBuffer<? extends FetchingElement> bufferCompleted = (SelectableFetchingBuffer<? extends FetchingElement>) searchSession.getBuffer();
|
|
|
|
bufferCompleted.updateAllSelectionByIds(selection, ids);
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
size = data.size();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
} catch (Exception e) {
|
|
|
|
logger.error("An error occurred in updateRowSelections", e);
|
|
|
|
throw new SearchServiceException(e.getMessage());
|
|
|
|
}
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
return Integer.valueOf(size);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
* {@inheritDoc}
|
2017-01-11 18:39:11 +01:00
|
|
|
* @throws SearchServiceException
|
2013-03-04 14:38:25 +01:00
|
|
|
*/
|
|
|
|
@Override
|
|
|
|
public int countOfSelectedRow() throws SearchServiceException{
|
2015-07-22 17:19:38 +02:00
|
|
|
logger.info("countOfSelectedRow()");
|
2013-03-04 14:38:25 +01:00
|
|
|
|
|
|
|
FetchingSession<? extends FetchingElement> searchSession = getSearchSession();
|
|
|
|
|
|
|
|
try {
|
|
|
|
|
|
|
|
SelectableFetchingBuffer<? extends FetchingElement> buffer = (SelectableFetchingBuffer<? extends FetchingElement>) searchSession.getBuffer();
|
|
|
|
return buffer.sizeSelected();
|
|
|
|
|
|
|
|
} catch (Exception e) {
|
|
|
|
logger.error("An error occurred in updateRowSelections", e);
|
|
|
|
throw new SearchServiceException(e.getMessage());
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
* {@inheritDoc}
|
|
|
|
*/
|
|
|
|
@SuppressWarnings("unchecked")
|
|
|
|
@Override
|
|
|
|
public int retrieveOccurencesFromSelection() throws SearchServiceException {
|
2015-07-22 17:19:38 +02:00
|
|
|
logger.info("retrieveOccurencesFromSelection()");
|
2013-03-04 14:38:25 +01:00
|
|
|
|
|
|
|
int count = 0;
|
|
|
|
FetchingSession<ResultRow> searchSession = (FetchingSession<ResultRow>) getSearchSession();
|
|
|
|
|
|
|
|
try {
|
|
|
|
Collection<ResultRow> selectedRows = ((SelectableFetchingBuffer<ResultRow>) searchSession.getBuffer()).getSelected();
|
2015-07-22 17:19:38 +02:00
|
|
|
logger.info("found "+selectedRows.size()+" selected rows");
|
2013-03-04 14:38:25 +01:00
|
|
|
|
|
|
|
List<String> keys = new ArrayList<String>(selectedRows.size());
|
|
|
|
|
|
|
|
for (ResultRow row:selectedRows) {
|
|
|
|
//ADD KEY ONLY IF IS NOT EQUAL NULL AND SIZE IS > 0
|
|
|
|
if(row.getOccurencesKey()!=null && row.getOccurencesKey().length()>0){
|
|
|
|
keys.add(row.getOccurencesKey());
|
|
|
|
count += row.getOccurencesCount();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2015-07-22 17:19:38 +02:00
|
|
|
logger.info("found "+count+" occurrence points");
|
2013-03-04 14:38:25 +01:00
|
|
|
|
|
|
|
//TODO remove
|
2015-07-22 17:19:38 +02:00
|
|
|
if (logger.isInfoEnabled()) logger.info("selected keys: "+keys);
|
2013-03-04 14:38:25 +01:00
|
|
|
|
|
|
|
SpeciesService taxonomyService = getSpeciesService();
|
|
|
|
|
|
|
|
CloseableIterator<OccurrencePoint> source = taxonomyService.getOccurrencesByKeys(keys);
|
|
|
|
CloseableIterator<Occurrence> input = IteratorChainBuilder.buildOccurrenceConverter(source);
|
|
|
|
|
|
|
|
//DELETE ALL ROW INTO DAO OCCURENCES
|
|
|
|
OccurrenceRowPersistence occurrencesDao = DaoSession.getOccurrenceDAO(getASLSession());
|
|
|
|
occurrencesDao.removeAll();
|
|
|
|
FetchingSessionUtil.createOccurrenceFetchingSession(input, getASLSession());
|
|
|
|
|
|
|
|
} catch (Exception e) {
|
|
|
|
logger.error("An error occurred getting the number of occurrence points", e);
|
|
|
|
throw new SearchServiceException(e.getMessage());
|
|
|
|
}
|
|
|
|
|
|
|
|
return count;
|
|
|
|
}
|
|
|
|
|
2017-02-16 17:15:45 +01:00
|
|
|
/* (non-Javadoc)
|
|
|
|
* @see org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService#getOccurrencesBatch(int, int)
|
|
|
|
*/
|
2013-03-04 14:38:25 +01:00
|
|
|
@Override
|
|
|
|
public OccurrenceBatch getOccurrencesBatch(int start, int limit) throws SearchServiceException {
|
2015-07-22 17:19:38 +02:00
|
|
|
logger.info("getOccurrencesBatch: start: "+start+" limit: "+limit);
|
2013-03-04 14:38:25 +01:00
|
|
|
|
|
|
|
FetchingSession<Occurrence> occurrenceSession = getOccurrenceSession();
|
|
|
|
List<Occurrence> buffer;
|
|
|
|
OccurrenceBatch result = null;
|
|
|
|
|
|
|
|
try {
|
|
|
|
buffer = occurrenceSession.getBuffer().getList();
|
2015-07-22 17:19:38 +02:00
|
|
|
logger.info("Fetching data from occurrence session buffer, size: "+buffer.size());
|
2013-03-04 14:38:25 +01:00
|
|
|
int end = Math.min(start+limit, buffer.size());
|
2015-07-22 17:19:38 +02:00
|
|
|
logger.info("chunk bounds[start: "+start+" end: " + end+"]");
|
2013-03-04 14:38:25 +01:00
|
|
|
ArrayList<Occurrence> data = new ArrayList<Occurrence>(buffer.subList(start, end));
|
2015-07-22 17:19:38 +02:00
|
|
|
logger.info("returning "+data.size()+" elements");
|
2013-03-04 14:38:25 +01:00
|
|
|
|
|
|
|
//DEBUG
|
|
|
|
// for (Occurrence occurrence : data) {
|
2015-07-22 17:19:38 +02:00
|
|
|
// logger.info("citation :" + occurrence.getCitation()+
|
2013-03-04 14:38:25 +01:00
|
|
|
// " catalog number: "+occurrence.getCatalogueNumber()+
|
|
|
|
// " country: " +occurrence.getCountry()+
|
|
|
|
// " family: "+occurrence.getFamily()+
|
|
|
|
// " id: "+ occurrence.getId() +
|
|
|
|
// " institute code: " +occurrence.getInstitutionCode() +
|
|
|
|
// " kingdom: " + occurrence.getKingdom()+
|
|
|
|
// " scientific name: "+ occurrence.getScientificName()+
|
|
|
|
// " basis of record: "+occurrence.getBasisOfRecord());
|
|
|
|
//
|
|
|
|
// }
|
|
|
|
result = new OccurrenceBatch(data);
|
|
|
|
result.setResultEOF(occurrenceSession.isComplete());
|
|
|
|
|
|
|
|
} catch (Exception e) {
|
|
|
|
logger.error("An error occurred getting the occurrence points", e);
|
|
|
|
throw new SearchServiceException(e.getMessage());
|
|
|
|
}
|
|
|
|
|
|
|
|
return result;
|
|
|
|
}
|
|
|
|
|
2017-02-16 17:15:45 +01:00
|
|
|
/* (non-Javadoc)
|
|
|
|
* @see org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService#getCountOfOccurrencesBatch()
|
|
|
|
*/
|
2013-03-04 14:38:25 +01:00
|
|
|
@Override
|
|
|
|
public OccurrencesStatus getCountOfOccurrencesBatch() throws SearchServiceException {
|
2015-07-22 17:19:38 +02:00
|
|
|
logger.info("get CountOf Occurrences Batch");
|
2013-03-04 14:38:25 +01:00
|
|
|
|
|
|
|
FetchingSession<Occurrence> occurrenceSession = getOccurrenceSession();
|
|
|
|
List<Occurrence> buffer;
|
|
|
|
|
|
|
|
try {
|
|
|
|
buffer = occurrenceSession.getBuffer().getList();
|
2015-07-22 17:19:38 +02:00
|
|
|
logger.info("Fetching data from occurrence session buffer, size: "+buffer.size());
|
2013-03-04 14:38:25 +01:00
|
|
|
return new OccurrencesStatus(occurrenceSession.isComplete(), buffer.size());
|
|
|
|
|
|
|
|
} catch (Exception e) {
|
|
|
|
logger.error("An error occurred getting the occurrence points", e);
|
|
|
|
throw new SearchServiceException(e.getMessage());
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* {@inheritDoc}
|
|
|
|
*/
|
|
|
|
@Override
|
|
|
|
public void stopRetrievingOccurrences() throws SearchServiceException {
|
2015-07-22 17:19:38 +02:00
|
|
|
logger.info("stop Retrieving Occurrences ");
|
2013-03-04 14:38:25 +01:00
|
|
|
|
|
|
|
ASLSession session = getASLSession();
|
|
|
|
FetchingSession<Occurrence> occurrenceSearchSession = SessionUtil.getCurrentOccurrenceSession(session);
|
|
|
|
|
|
|
|
if (occurrenceSearchSession != null) {
|
|
|
|
try {
|
|
|
|
occurrenceSearchSession.close();
|
2015-07-22 17:19:38 +02:00
|
|
|
logger.info("Occurrence session removed");
|
2013-03-04 14:38:25 +01:00
|
|
|
} catch (IOException e) {
|
|
|
|
throw new SearchServiceException(e.getMessage());
|
|
|
|
}
|
|
|
|
SessionUtil.setCurrentOccurrenceSession(session, null);
|
|
|
|
} else logger.warn("Occurrence session not found");
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* {@inheritDoc}
|
|
|
|
*/
|
|
|
|
@Override
|
2017-02-10 15:18:57 +01:00
|
|
|
public JobGisLayerModel createGisLayerJobFromSelectedOccurrenceKeys(String layerTitle, String layerDescr, long totalPoints) throws Exception {
|
2017-02-09 18:23:28 +01:00
|
|
|
try {
|
|
|
|
|
|
|
|
List<String> occurrenceKeys = getSelectedOccurrenceKeys();
|
|
|
|
SpeciesService taxonomyService = getSpeciesService();
|
|
|
|
String author = getASLSession().getUsername();
|
|
|
|
String credits = "";
|
|
|
|
GisLayerJobPersistence gisLayerJob = DaoSession.getGisLayersJobDAO(getASLSession());
|
2017-02-10 18:04:01 +01:00
|
|
|
logger.trace("GisLayerJobPersistence found");
|
2017-02-10 15:18:57 +01:00
|
|
|
return GisLayerJobUtil.createGisLayerJobByOccurrenceKeys(occurrenceKeys, taxonomyService, layerTitle, layerDescr, author, credits, totalPoints, gisLayerJob);
|
2017-02-09 18:23:28 +01:00
|
|
|
} catch (Exception e) {
|
|
|
|
logger.error("An error occurred creating the map", e);
|
|
|
|
throw new Exception(e.getMessage());
|
|
|
|
}
|
2013-03-04 14:38:25 +01:00
|
|
|
|
|
|
|
|
2017-02-09 18:23:28 +01:00
|
|
|
/*Iterator<Occurrence> iteratorOccurrences = getIteratorSelectedOccurrenceIds();
|
2013-03-04 14:38:25 +01:00
|
|
|
IteratorPointInfo streamKey = new IteratorPointInfo(iteratorOccurrences);
|
|
|
|
|
|
|
|
try {
|
|
|
|
SpeciesService taxonomyService = getSpeciesService();
|
|
|
|
String groupName = taxonomyService.generateMapFromOccurrencePoints(streamKey);
|
2015-07-22 17:19:38 +02:00
|
|
|
logger.info("generated groupName: "+groupName);
|
2013-03-04 14:38:25 +01:00
|
|
|
return groupName;
|
|
|
|
} catch (Exception e) {
|
|
|
|
logger.error("An error occurred creating the map", e);
|
|
|
|
throw new SearchServiceException(e.getMessage());
|
2017-02-09 18:23:28 +01:00
|
|
|
}*/
|
2013-03-04 14:38:25 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
//USED FOR DEBUG
|
2017-02-16 17:15:45 +01:00
|
|
|
/**
|
|
|
|
* Prints the id.
|
|
|
|
*
|
|
|
|
* @param listId the list id
|
|
|
|
*/
|
2013-03-04 14:38:25 +01:00
|
|
|
protected void printId(List<String> listId){
|
|
|
|
|
|
|
|
for (String id : listId) {
|
|
|
|
System.out.println("Found id : " +id);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-02-16 17:15:45 +01:00
|
|
|
/**
|
|
|
|
* Gets the selected occurrence ids.
|
|
|
|
*
|
|
|
|
* @return the selected occurrence ids
|
|
|
|
* @throws SearchServiceException the search service exception
|
|
|
|
*/
|
2013-03-04 14:38:25 +01:00
|
|
|
protected List<String> getSelectedOccurrenceIds() throws SearchServiceException{
|
|
|
|
|
|
|
|
FetchingSession<Occurrence> occurrenceSession = getOccurrenceSession();
|
|
|
|
List<Occurrence> buffer;
|
|
|
|
List<String> listId = new ArrayList<String>();
|
|
|
|
|
|
|
|
try {
|
|
|
|
|
|
|
|
buffer = occurrenceSession.getBuffer().getList();
|
|
|
|
|
|
|
|
for (Occurrence occurrence : buffer) {
|
|
|
|
listId.add(occurrence.getServiceId());
|
|
|
|
}
|
|
|
|
|
|
|
|
return listId;
|
|
|
|
} catch (Exception e) {
|
|
|
|
|
|
|
|
logger.error("An error occurred on getSelectedOccurrenceIds", e);
|
|
|
|
throw new SearchServiceException(e.getMessage());
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-02-16 17:15:45 +01:00
|
|
|
/**
|
|
|
|
* Gets the iterator selected occurrence ids.
|
|
|
|
*
|
|
|
|
* @return the iterator selected occurrence ids
|
|
|
|
* @throws SearchServiceException the search service exception
|
|
|
|
*/
|
2013-03-04 14:38:25 +01:00
|
|
|
protected Iterator<Occurrence> getIteratorSelectedOccurrenceIds() throws SearchServiceException{
|
|
|
|
|
|
|
|
FetchingSession<Occurrence> occurrenceSession = getOccurrenceSession();
|
|
|
|
try {
|
|
|
|
return occurrenceSession.getBuffer().getList().iterator();
|
|
|
|
} catch (Exception e) {
|
|
|
|
logger.error("An error occurred on getIteratorSelectedOccurrenceIds", e);
|
|
|
|
throw new SearchServiceException(e.getMessage());
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
|
2017-02-16 17:15:45 +01:00
|
|
|
/* (non-Javadoc)
|
|
|
|
* @see org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService#saveSelectedOccurrencePoints(java.lang.String, java.lang.String, org.gcube.portlets.user.speciesdiscovery.shared.SaveFileFormat, org.gcube.portlets.user.speciesdiscovery.shared.OccurrencesSaveEnum)
|
|
|
|
*/
|
2013-03-04 14:38:25 +01:00
|
|
|
@Override
|
2015-07-22 17:19:38 +02:00
|
|
|
@Deprecated
|
2013-03-04 14:38:25 +01:00
|
|
|
public void saveSelectedOccurrencePoints(String destinationFolderId, String fileName, SaveFileFormat fileFormat, OccurrencesSaveEnum typeCSV) throws SearchServiceException {
|
2015-07-22 17:19:38 +02:00
|
|
|
logger.info("saveSelectedOccurrencePoints destinationFolderId: "+destinationFolderId+" fileName: "+fileName+" fileFormat: "+fileFormat+" typeCSV: "+typeCSV);
|
2013-03-04 14:38:25 +01:00
|
|
|
}
|
|
|
|
|
2017-02-16 17:15:45 +01:00
|
|
|
/* (non-Javadoc)
|
|
|
|
* @see org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService#saveSelectedTaxonomyPoints(java.lang.String, java.lang.String, org.gcube.portlets.user.speciesdiscovery.shared.SaveFileFormat)
|
|
|
|
*/
|
2013-03-04 14:38:25 +01:00
|
|
|
@Override
|
|
|
|
public void saveSelectedTaxonomyPoints(String destinationFolderId, String fileName, SaveFileFormat fileFormat) throws SearchServiceException {
|
2013-10-07 14:40:14 +02:00
|
|
|
//TODO OLD CALL
|
2013-03-04 14:38:25 +01:00
|
|
|
}
|
|
|
|
|
2017-02-16 17:15:45 +01:00
|
|
|
/**
|
|
|
|
* Gets the selected occurrence keys.
|
|
|
|
*
|
|
|
|
* @return the selected occurrence keys
|
|
|
|
* @throws SearchServiceException the search service exception
|
|
|
|
*/
|
2013-03-04 14:38:25 +01:00
|
|
|
@SuppressWarnings("unchecked")
|
|
|
|
protected List<String> getSelectedOccurrenceKeys() throws SearchServiceException
|
|
|
|
{
|
|
|
|
FetchingSession<ResultRow> searchSession = (FetchingSession<ResultRow>) getSearchSession();
|
|
|
|
Collection<ResultRow> selectedRows;
|
|
|
|
List<String> keys = null;
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
try {
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
selectedRows = ((SelectableFetchingBuffer<ResultRow>) searchSession.getBuffer()).getSelected();
|
2015-07-22 17:19:38 +02:00
|
|
|
logger.info("found "+selectedRows.size()+" selected rows");
|
2013-03-04 14:38:25 +01:00
|
|
|
keys = new ArrayList<String>(selectedRows.size());
|
|
|
|
int count = 0;
|
|
|
|
|
|
|
|
for (ResultRow row:selectedRows) {
|
|
|
|
keys.add(row.getOccurencesKey());
|
|
|
|
count += row.getOccurencesCount();
|
|
|
|
}
|
2015-07-22 17:19:38 +02:00
|
|
|
logger.info("found "+count+" occurrence points and "+keys.size()+" keys");
|
2013-03-04 14:38:25 +01:00
|
|
|
|
|
|
|
//TODO remove
|
2015-07-22 17:19:38 +02:00
|
|
|
if (logger.isInfoEnabled()) logger.info("selected keys: "+keys);
|
2013-03-04 14:38:25 +01:00
|
|
|
|
|
|
|
} catch (Exception e) {
|
|
|
|
logger.error("An error occured in getSelectedOccurrenceKeys" + e.getMessage());
|
|
|
|
throw new SearchServiceException(e.getMessage());
|
|
|
|
}
|
|
|
|
|
|
|
|
return keys;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2017-02-16 17:15:45 +01:00
|
|
|
/**
|
|
|
|
* Gets the selected result row id.
|
|
|
|
*
|
|
|
|
* @return the selected result row id
|
|
|
|
* @throws SearchServiceException the search service exception
|
|
|
|
*/
|
2013-03-04 14:38:25 +01:00
|
|
|
@SuppressWarnings("unchecked")
|
|
|
|
protected List<String> getSelectedResultRowId() throws SearchServiceException
|
|
|
|
{
|
|
|
|
FetchingSession<ResultRow> searchSession = (FetchingSession<ResultRow>) getSearchSession();
|
|
|
|
Collection<ResultRow> selectedRows;
|
|
|
|
List<String> listId = null;
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
try {
|
|
|
|
selectedRows = ((SelectableFetchingBuffer<ResultRow>) searchSession.getBuffer()).getSelected();
|
|
|
|
|
2015-07-22 17:19:38 +02:00
|
|
|
logger.info("found "+selectedRows.size()+" selected rows");
|
2013-03-04 14:38:25 +01:00
|
|
|
|
|
|
|
listId = new ArrayList<String>(selectedRows.size());
|
|
|
|
|
|
|
|
for (ResultRow row:selectedRows)
|
|
|
|
listId.add(row.getServiceId());
|
|
|
|
|
2015-07-22 17:19:38 +02:00
|
|
|
logger.info("found "+listId.size()+" ids");
|
2013-03-04 14:38:25 +01:00
|
|
|
|
|
|
|
//TODO remove
|
2015-07-22 17:19:38 +02:00
|
|
|
if (logger.isTraceEnabled()) logger.info("selected ids: "+listId);
|
2013-03-04 14:38:25 +01:00
|
|
|
|
|
|
|
} catch (Exception e) {
|
|
|
|
logger.error("An error occured in getSelectedOccurrenceKeys" + e.getMessage());
|
|
|
|
throw new SearchServiceException(e.getMessage());
|
|
|
|
}
|
|
|
|
|
|
|
|
return listId;
|
|
|
|
}
|
|
|
|
|
2017-02-16 17:15:45 +01:00
|
|
|
/**
|
|
|
|
* Gets the selected taxonomy id and data source.
|
|
|
|
*
|
|
|
|
* @return the selected taxonomy id and data source
|
|
|
|
* @throws SearchServiceException the search service exception
|
|
|
|
*/
|
2013-03-04 14:38:25 +01:00
|
|
|
@SuppressWarnings("unchecked")
|
2013-07-18 18:32:28 +02:00
|
|
|
protected Map<String, String> getSelectedTaxonomyIdAndDataSource() throws SearchServiceException
|
2013-03-04 14:38:25 +01:00
|
|
|
{
|
|
|
|
FetchingSession<TaxonomyRow> searchSession = (FetchingSession<TaxonomyRow>) getSearchSession();
|
2017-01-11 18:39:11 +01:00
|
|
|
HashMap<String, String> hashIdTaxonDataSource = null;
|
2013-03-04 14:38:25 +01:00
|
|
|
Collection<TaxonomyRow> selectedRows;
|
|
|
|
|
|
|
|
try {
|
|
|
|
selectedRows = ((SelectableFetchingBuffer<TaxonomyRow>) searchSession.getBuffer()).getSelected();
|
|
|
|
|
2015-07-22 17:19:38 +02:00
|
|
|
logger.info("found "+selectedRows.size()+" selected rows");
|
2013-07-18 18:32:28 +02:00
|
|
|
hashIdTaxonDataSource = new HashMap<String, String>(selectedRows.size());
|
2013-03-04 14:38:25 +01:00
|
|
|
|
|
|
|
for (TaxonomyRow row:selectedRows){
|
2013-07-18 18:32:28 +02:00
|
|
|
hashIdTaxonDataSource.put(row.getServiceId(), row.getDataProviderName());
|
2015-07-22 17:19:38 +02:00
|
|
|
logger.info("add this id: "+row.getServiceId()+" to list");
|
2013-03-04 14:38:25 +01:00
|
|
|
}
|
|
|
|
|
2015-07-22 17:19:38 +02:00
|
|
|
logger.info("found "+hashIdTaxonDataSource.size()+" id");
|
2013-03-04 14:38:25 +01:00
|
|
|
|
|
|
|
//TODO remove
|
2015-07-22 17:19:38 +02:00
|
|
|
if (logger.isInfoEnabled()) logger.info("selected ids: "+hashIdTaxonDataSource);
|
2013-03-04 14:38:25 +01:00
|
|
|
|
|
|
|
} catch (Exception e) {
|
|
|
|
logger.error("An error occured in getSelectedOccurrenceKeys" + e.getMessage());
|
|
|
|
throw new SearchServiceException(e.getMessage());
|
|
|
|
}
|
|
|
|
|
2013-07-18 18:32:28 +02:00
|
|
|
return hashIdTaxonDataSource;
|
2013-03-04 14:38:25 +01:00
|
|
|
}
|
|
|
|
|
2017-02-16 17:15:45 +01:00
|
|
|
/**
|
|
|
|
* Generate csv file.
|
|
|
|
*
|
|
|
|
* @param ids the ids
|
|
|
|
* @param csvType the csv type
|
|
|
|
* @return the file
|
|
|
|
* @throws Exception the exception
|
|
|
|
*/
|
2013-03-04 14:38:25 +01:00
|
|
|
protected File generateCSVFile(List<String> ids, OccurrencesSaveEnum csvType) throws Exception
|
|
|
|
{
|
|
|
|
File csvFile = File.createTempFile("test", ".csv");
|
2015-07-22 17:19:38 +02:00
|
|
|
logger.info("outputfile "+csvFile.getAbsolutePath());
|
2013-03-04 14:38:25 +01:00
|
|
|
|
|
|
|
FileWriter fileWriter = new FileWriter(csvFile);
|
|
|
|
CSVWriter writer = new CSVWriter(fileWriter);
|
|
|
|
|
|
|
|
SpeciesService taxonomyService = getSpeciesService();
|
|
|
|
|
|
|
|
CloseableIterator<OccurrencePoint> source = taxonomyService.getOccurrencesByIds(ids);
|
|
|
|
CloseableIterator<Occurrence> result = IteratorChainBuilder.buildOccurrenceConverter(source);
|
|
|
|
|
|
|
|
CSVGenerator<Occurrence> csvGenerator = null;
|
|
|
|
|
|
|
|
switch (csvType) {
|
|
|
|
|
|
|
|
case OPENMODELLER: {
|
|
|
|
|
|
|
|
OccurenceCSVConverterOpenModeller converterOpenModeller = new OccurenceCSVConverterOpenModeller();
|
|
|
|
csvGenerator = new CSVGenerator<Occurrence>(result, converterOpenModeller, OccurenceCSVConverterOpenModeller.HEADER);
|
|
|
|
|
|
|
|
}break;
|
|
|
|
|
|
|
|
case STANDARD:{
|
|
|
|
|
|
|
|
OccurenceCSVConverter converter = new OccurenceCSVConverter();
|
|
|
|
csvGenerator = new CSVGenerator<Occurrence>(result, converter, OccurenceCSVConverter.HEADER);
|
|
|
|
|
|
|
|
}break;
|
|
|
|
}
|
|
|
|
|
|
|
|
while(csvGenerator.hasNext()) writer.writeLine(csvGenerator.next());
|
|
|
|
|
|
|
|
fileWriter.close();
|
|
|
|
return csvFile;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2017-02-16 17:15:45 +01:00
|
|
|
/* (non-Javadoc)
|
|
|
|
* @see org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService#loadDataSourceList()
|
|
|
|
*/
|
2013-03-04 14:38:25 +01:00
|
|
|
@Override
|
2013-03-04 16:47:57 +01:00
|
|
|
public List<DataSourceModel> loadDataSourceList() throws SearchServiceException {
|
2015-07-22 17:19:38 +02:00
|
|
|
logger.info("loadDataSourceList... ");
|
2013-03-04 14:38:25 +01:00
|
|
|
List<DataSourceModel> listDS = null;
|
|
|
|
|
|
|
|
try {
|
|
|
|
DaoSession.initSessionDaoObjects(getASLSession()); //FIXME temporary?
|
2015-07-22 17:19:38 +02:00
|
|
|
logger.info("DAOs OK... ");
|
2013-03-04 14:38:25 +01:00
|
|
|
System.out.println("DAOs OK");
|
|
|
|
SpeciesService taxonomyService = getSpeciesService();
|
|
|
|
System.out.println("Species Services OK");
|
2015-07-22 17:19:38 +02:00
|
|
|
logger.info("Species Services OK... ");
|
2013-03-04 14:38:25 +01:00
|
|
|
listDS = taxonomyService.getPlugins();
|
|
|
|
System.out.println("Plugins OK");
|
2015-07-22 17:19:38 +02:00
|
|
|
logger.info("Plugins OK");
|
|
|
|
logger.info("Return list plugin - size: " +listDS.size());
|
2013-03-04 14:38:25 +01:00
|
|
|
|
2013-03-04 16:47:57 +01:00
|
|
|
} catch (DatabaseServiceException e) {
|
|
|
|
throw new SearchServiceException("Sorry, an error has occurred on the server while "+e.getMessage());
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
} catch (Exception e) {
|
2013-03-04 16:47:57 +01:00
|
|
|
throw new SearchServiceException("Sorry, an error has occurred on the server while "+e.getMessage());
|
2013-03-04 14:38:25 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
return listDS;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2017-02-16 17:15:45 +01:00
|
|
|
/* (non-Javadoc)
|
|
|
|
* @see org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService#loadListCommonNameByRowId(java.lang.String)
|
|
|
|
*/
|
2013-03-04 14:38:25 +01:00
|
|
|
@Override
|
2015-07-22 17:19:38 +02:00
|
|
|
@Deprecated
|
2013-03-04 14:38:25 +01:00
|
|
|
public ArrayList<CommonName> loadListCommonNameByRowId(String resultRowId) throws Exception {
|
|
|
|
|
|
|
|
ArrayList<CommonName> listCommonName = new ArrayList<CommonName>();
|
|
|
|
|
|
|
|
return listCommonName;
|
|
|
|
}
|
|
|
|
|
2017-02-16 17:15:45 +01:00
|
|
|
/* (non-Javadoc)
|
|
|
|
* @see org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService#getParentsList(org.gcube.portlets.user.speciesdiscovery.shared.Taxon)
|
|
|
|
*/
|
2013-03-04 14:38:25 +01:00
|
|
|
@Override
|
2015-07-22 17:19:38 +02:00
|
|
|
@Deprecated
|
2013-03-04 14:38:25 +01:00
|
|
|
public List<Taxon> getParentsList(Taxon taxon) throws Exception {
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
return null;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2017-02-16 17:15:45 +01:00
|
|
|
/* (non-Javadoc)
|
|
|
|
* @see org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService#loadListChildrenByParentId(java.lang.String)
|
|
|
|
*/
|
2013-03-04 14:38:25 +01:00
|
|
|
@Override
|
2013-10-24 12:52:34 +02:00
|
|
|
public ArrayList<LightTaxonomyRow> loadListChildrenByParentId(String parentId) throws Exception {
|
2015-07-22 17:19:38 +02:00
|
|
|
logger.info("Load List Children By ParentId: " + parentId);
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-10-24 12:52:34 +02:00
|
|
|
ArrayList<LightTaxonomyRow> listLightTaxonomyRow = new ArrayList<LightTaxonomyRow>();
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-10-24 12:52:34 +02:00
|
|
|
if(parentId==null || parentId.isEmpty()){
|
|
|
|
logger.warn("parentId is null or empty ");
|
|
|
|
return listLightTaxonomyRow;
|
|
|
|
}
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
try {
|
|
|
|
|
|
|
|
SpeciesService taxonomyService = getSpeciesService();
|
|
|
|
CloseableIterator<TaxonomyItem> streamIterator = taxonomyService.getTaxonChildrenByParentId(parentId);
|
2013-10-24 12:52:34 +02:00
|
|
|
ASLSession session = getASLSession();
|
2017-01-11 18:39:11 +01:00
|
|
|
TaxonomyItemConverter converter = new TaxonomyItemConverter(getASLSession());
|
|
|
|
|
2013-10-24 12:52:34 +02:00
|
|
|
Map<String, TaxonomyRow> mapChildren = SessionUtil.getHashMapChildrenTaxonomyCache(session);
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-10-24 12:52:34 +02:00
|
|
|
if(mapChildren==null){
|
2015-07-22 17:19:38 +02:00
|
|
|
logger.info("Cache taxa children doesn't exists into session, creating..");
|
2013-10-24 12:52:34 +02:00
|
|
|
mapChildren = new HashMap<String, TaxonomyRow>();
|
|
|
|
}
|
2015-07-22 17:19:38 +02:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
while (streamIterator.hasNext()) {
|
|
|
|
TaxonomyItem tax = streamIterator.next();
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-10-24 12:52:34 +02:00
|
|
|
TaxonomyRow taxonomy = converter.convert(tax);
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-10-24 12:52:34 +02:00
|
|
|
if(mapChildren.get(taxonomy.getServiceId())==null){
|
2015-07-22 17:19:38 +02:00
|
|
|
logger.info("Taxonomy with service id: "+taxonomy.getServiceId()+" doesn't exists into Map Children, adding..");
|
2013-10-24 12:52:34 +02:00
|
|
|
mapChildren.put(taxonomy.getServiceId(),taxonomy);
|
|
|
|
}
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-10-24 12:52:34 +02:00
|
|
|
LightTaxonomyRow lightTaxRow = ClusterStructuresForTaxonomyRow.convetTaxonomyRowToLigthTaxonomyRow(taxonomy);
|
|
|
|
listLightTaxonomyRow.add(lightTaxRow);
|
2013-03-04 14:38:25 +01:00
|
|
|
}
|
|
|
|
|
2013-10-24 12:52:34 +02:00
|
|
|
SessionUtil.setHashMapChildrenTaxonomyCache(session, mapChildren);
|
2013-03-04 14:38:25 +01:00
|
|
|
streamIterator.close();
|
|
|
|
|
|
|
|
} catch (Exception e) {
|
|
|
|
logger.error("Error on loadListChildByParentId ", e);
|
|
|
|
throw new Exception("Error on loadListChildByParentId", e);
|
|
|
|
}
|
|
|
|
|
2015-07-22 17:19:38 +02:00
|
|
|
logger.info("Return list children By ParentId "+parentId+"- with size: "+ listLightTaxonomyRow.size());
|
2013-03-04 14:38:25 +01:00
|
|
|
|
2013-10-24 12:52:34 +02:00
|
|
|
return listLightTaxonomyRow;
|
2013-03-04 14:38:25 +01:00
|
|
|
}
|
|
|
|
|
2017-02-16 17:15:45 +01:00
|
|
|
/* (non-Javadoc)
|
|
|
|
* @see org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService#getListTaxonomyJobs()
|
|
|
|
*/
|
2013-03-04 14:38:25 +01:00
|
|
|
@Override
|
2017-03-20 14:43:11 +01:00
|
|
|
public List<JobTaxonomyModel> getListTaxonomyJobs() throws SessionExpired, Exception {
|
2015-07-22 17:19:38 +02:00
|
|
|
logger.info("getListTaxonomyJobs... ");
|
2013-03-04 16:47:57 +01:00
|
|
|
List<JobTaxonomyModel> listJobs = new ArrayList<JobTaxonomyModel>();
|
|
|
|
|
|
|
|
try{
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 16:47:57 +01:00
|
|
|
TaxonomyJobPersistence taxonomyJobDao = DaoSession.getTaxonomyJobDAO(getASLSession());
|
|
|
|
Iterator<TaxonomyJob> iterator = taxonomyJobDao.getList().iterator();
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 16:47:57 +01:00
|
|
|
SpeciesService taxonomyService = getSpeciesService();
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 16:47:57 +01:00
|
|
|
while(iterator!=null && iterator.hasNext()){
|
|
|
|
TaxonomyJob job = iterator.next();
|
2015-07-22 17:19:38 +02:00
|
|
|
logger.info("get taxonomy job "+job.getId()+ " from service");
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 16:47:57 +01:00
|
|
|
try{
|
2017-01-11 18:39:11 +01:00
|
|
|
CompleteJobStatus statusResponse = taxonomyService.getTaxonomyJobById(job.getId());
|
|
|
|
|
2015-07-22 17:19:38 +02:00
|
|
|
if(statusResponse!=null){
|
|
|
|
logger.info("statusResponse is not null..." + job.getId());
|
|
|
|
JobTaxonomyModel jobSpeciesModel = TaxonomyJobUtil.convertJob(job, statusResponse, taxonomyJobDao);
|
|
|
|
logger.info("added list jobTaxonomyId: "+job.getTaxonomyId() + " status "+job.getState());
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2015-07-22 17:19:38 +02:00
|
|
|
if(jobSpeciesModel!=null)
|
|
|
|
listJobs.add(jobSpeciesModel);
|
|
|
|
}
|
|
|
|
else{
|
|
|
|
logger.info("TaxonomyJob statusResponse is null..." + job.getId());
|
|
|
|
TaxonomyJobUtil.deleteTaxonomyJobById(job.getId(),taxonomyJobDao);
|
|
|
|
}
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 16:47:57 +01:00
|
|
|
}catch (Exception e) {
|
|
|
|
e.printStackTrace();
|
|
|
|
logger.error("Error on getListSpeciesJobs ", e);
|
|
|
|
throw new Exception("Error on getListSpeciesJobs", e);
|
|
|
|
}
|
|
|
|
}
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 16:47:57 +01:00
|
|
|
}catch (Exception e) {
|
|
|
|
logger.error("Error on getListSpeciesJobs ", e);
|
|
|
|
throw new Exception("Error on getListSpeciesJobs", e);
|
|
|
|
}
|
2013-03-04 14:38:25 +01:00
|
|
|
|
|
|
|
return listJobs;
|
|
|
|
|
|
|
|
}
|
2017-01-11 18:39:11 +01:00
|
|
|
|
|
|
|
|
2013-05-20 18:50:24 +02:00
|
|
|
/**
|
2017-02-16 17:15:45 +01:00
|
|
|
* Creates the taxonomy job by children.
|
2017-01-11 18:39:11 +01:00
|
|
|
*
|
2017-02-16 17:15:45 +01:00
|
|
|
* @param taxonomyServiceId the taxonomy service id
|
|
|
|
* @param taxonomyName the taxonomy name
|
|
|
|
* @param taxonomyRank the taxonomy rank
|
|
|
|
* @param dataSourceName the data source name
|
|
|
|
* @return the job taxonomy model
|
|
|
|
* @throws Exception the exception
|
2013-05-20 18:50:24 +02:00
|
|
|
*/
|
2013-03-04 14:38:25 +01:00
|
|
|
@Override
|
2013-05-20 18:50:24 +02:00
|
|
|
public JobTaxonomyModel createTaxonomyJobByChildren(String taxonomyServiceId, String taxonomyName, String taxonomyRank, String dataSourceName) throws Exception {
|
|
|
|
//FIXED 20/05/2013
|
2015-07-22 17:19:38 +02:00
|
|
|
logger.info("Create job for taxonomy id: " + taxonomyServiceId);
|
2013-03-04 14:38:25 +01:00
|
|
|
// System.out.println("Create job for taxonomy id: " + taxonomy.getServiceId());
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
JobTaxonomyModel jobSpeciesModel = null;
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
TaxonomyJobPersistence taxonomyJobDao = DaoSession.getTaxonomyJobDAO(getASLSession());
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
SpeciesService taxonomyService = getSpeciesService();
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-05-20 18:50:24 +02:00
|
|
|
String speciesJobId = taxonomyService.createTaxonomyJobForDWCAByChildren(taxonomyServiceId);
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-07-16 11:43:25 +02:00
|
|
|
long submitTime = Calendar.getInstance().getTimeInMillis();
|
2013-03-04 14:38:25 +01:00
|
|
|
|
2013-07-18 18:32:28 +02:00
|
|
|
String name = NormalizeString.lowerCaseUpFirstChar(taxonomyName) + " group";
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
//STORE INTO DAO
|
2013-07-18 18:32:28 +02:00
|
|
|
TaxonomyJob speciesJob = new TaxonomyJob(speciesJobId, DownloadState.PENDING.toString(), name, taxonomyName, dataSourceName, taxonomyRank, 0, submitTime, 0, taxonomyServiceId);
|
2013-03-04 14:38:25 +01:00
|
|
|
taxonomyJobDao.insert(speciesJob);
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-05-20 18:50:24 +02:00
|
|
|
jobSpeciesModel = new JobTaxonomyModel(speciesJob.getId(), speciesJob.getDescriptiveName(), DownloadState.PENDING, null, taxonomyName, dataSourceName, taxonomyRank);
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-07-16 11:43:25 +02:00
|
|
|
Date submit = DateUtil.millisecondsToDate(speciesJob.getSubmitTime());
|
2013-03-04 14:38:25 +01:00
|
|
|
// jobSpeciesModel.setStartTime(DateUtil.dateToDateFormatString(start));
|
2013-07-16 11:43:25 +02:00
|
|
|
jobSpeciesModel.setSubmitTime(submit);
|
2013-03-04 14:38:25 +01:00
|
|
|
jobSpeciesModel.setEndTime(null);
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
return jobSpeciesModel;
|
|
|
|
}
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-07-18 18:32:28 +02:00
|
|
|
|
2017-02-16 17:15:45 +01:00
|
|
|
/* (non-Javadoc)
|
|
|
|
* @see org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService#createTaxonomyJobByIds(java.lang.String, java.util.List)
|
|
|
|
*/
|
2013-03-04 14:38:25 +01:00
|
|
|
@Override
|
|
|
|
public JobTaxonomyModel createTaxonomyJobByIds(String search, List<DataSourceModel> dataSources) throws Exception {
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2015-07-22 17:19:38 +02:00
|
|
|
logger.info("Create job ForDWCAByIds for: " + search);
|
2013-03-04 14:38:25 +01:00
|
|
|
|
2013-07-18 18:32:28 +02:00
|
|
|
Map<String, String> hashIdDs = getSelectedTaxonomyIdAndDataSource();
|
2013-03-04 14:38:25 +01:00
|
|
|
JobTaxonomyModel jobSpeciesModel = null;
|
|
|
|
TaxonomyJobPersistence taxonomyJobDao = DaoSession.getTaxonomyJobDAO(getASLSession());
|
|
|
|
SpeciesService taxonomyService = getSpeciesService();
|
2013-07-18 18:32:28 +02:00
|
|
|
List<String> listId = new ArrayList<String>(hashIdDs.keySet());
|
2013-03-04 14:38:25 +01:00
|
|
|
String speciesJobId = taxonomyService.createTaxonomyJobForDWCAByIds(listId);
|
2013-07-16 11:43:25 +02:00
|
|
|
long submitTime = Calendar.getInstance().getTimeInMillis();
|
2013-03-04 14:38:25 +01:00
|
|
|
|
2013-07-18 18:32:28 +02:00
|
|
|
String name = NormalizeString.lowerCaseUpFirstChar(search) + " - ";
|
|
|
|
name += listId.size() + " ";
|
|
|
|
name += listId.size()>1?"taxa":"taxon";
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-07-18 18:32:28 +02:00
|
|
|
String dataSourceName = "";
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-07-18 18:32:28 +02:00
|
|
|
for (String taxonId : listId) {
|
|
|
|
if(!dataSourceName.contains(hashIdDs.get(taxonId))) //remove duplicate
|
|
|
|
dataSourceName+=hashIdDs.get(taxonId) + ", ";
|
|
|
|
}
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-07-18 18:32:28 +02:00
|
|
|
if(dataSourceName.endsWith(", "))
|
|
|
|
dataSourceName = dataSourceName.substring(0, dataSourceName.length()-2);
|
2015-07-22 17:19:38 +02:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
//STORE INTO DAO
|
2013-07-16 11:43:25 +02:00
|
|
|
TaxonomyJob speciesJob = new TaxonomyJob(speciesJobId, DownloadState.PENDING.toString(), name, name, dataSourceName, "", 0, submitTime, 0, speciesJobId);
|
2013-03-04 14:38:25 +01:00
|
|
|
taxonomyJobDao.insert(speciesJob);
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
jobSpeciesModel = new JobTaxonomyModel(speciesJob.getId(), speciesJob.getDescriptiveName(), DownloadState.PENDING, null, name, dataSourceName, "");
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-07-16 11:43:25 +02:00
|
|
|
Date submit = DateUtil.millisecondsToDate(speciesJob.getSubmitTime());
|
2013-03-04 14:38:25 +01:00
|
|
|
// jobSpeciesModel.setStartTime(DateUtil.dateToDateFormatString(start));
|
2013-07-16 11:43:25 +02:00
|
|
|
jobSpeciesModel.setSubmitTime(submit);
|
2013-03-04 14:38:25 +01:00
|
|
|
jobSpeciesModel.setEndTime(null);
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
return jobSpeciesModel;
|
|
|
|
}
|
2017-01-11 18:39:11 +01:00
|
|
|
|
|
|
|
|
2017-02-16 17:15:45 +01:00
|
|
|
/* (non-Javadoc)
|
|
|
|
* @see org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService#resubmitTaxonomyJob(java.lang.String)
|
|
|
|
*/
|
2013-03-04 14:38:25 +01:00
|
|
|
@Override
|
|
|
|
public JobTaxonomyModel resubmitTaxonomyJob(String jobIdentifier) throws Exception {
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2015-07-22 17:19:38 +02:00
|
|
|
logger.info("Resubmit taxonomy job for id: " + jobIdentifier);
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
JobTaxonomyModel jobSpeciesModel = null;
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
//Get Dao with list taxonomy jobs
|
|
|
|
TaxonomyJobPersistence taxonomyJobDao = DaoSession.getTaxonomyJobDAO(getASLSession());
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
CriteriaBuilder queryBuilder = taxonomyJobDao.getCriteriaBuilder();
|
|
|
|
CriteriaQuery<Object> cq = queryBuilder.createQuery();
|
|
|
|
Predicate pr1 = queryBuilder.equal(taxonomyJobDao.rootFrom(cq).get(TaxonomyJob.ID_FIELD), jobIdentifier);
|
|
|
|
cq.where(pr1);
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
Iterator<TaxonomyJob> iterator = taxonomyJobDao.executeCriteriaQuery(cq).iterator();
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
TaxonomyJob taxonomy;
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
if(iterator.hasNext())
|
|
|
|
taxonomy = iterator.next();
|
|
|
|
else
|
|
|
|
return jobSpeciesModel;
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
SpeciesService taxonomyService = getSpeciesService();
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
//recover taxomyId
|
|
|
|
String speciesJobId = taxonomyService.createTaxonomyJobForDWCAByChildren(taxonomy.getTaxonomyId());
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-07-16 11:43:25 +02:00
|
|
|
long submitTime = Calendar.getInstance().getTimeInMillis();
|
2013-03-04 14:38:25 +01:00
|
|
|
|
2013-07-18 18:32:28 +02:00
|
|
|
String name = RESUBMIT + ": " +NormalizeString.lowerCaseUpFirstChar(taxonomy.getDescriptiveName());
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
//STORE INTO DAO
|
2013-07-18 18:32:28 +02:00
|
|
|
TaxonomyJob speciesJob = new TaxonomyJob(speciesJobId, DownloadState.PENDING.toString(), name, taxonomy.getDescriptiveName(), taxonomy.getDataSourceName(), taxonomy.getRank(), 0, submitTime, 0, taxonomy.getTaxonomyId());
|
2013-03-04 14:38:25 +01:00
|
|
|
taxonomyJobDao.insert(speciesJob);
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
jobSpeciesModel = new JobTaxonomyModel(speciesJob.getId(), speciesJob.getDescriptiveName(), DownloadState.PENDING, null, taxonomy.getDescriptiveName(), taxonomy.getDataSourceName(), taxonomy.getRank());
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-07-16 11:43:25 +02:00
|
|
|
Date submit = DateUtil.millisecondsToDate(speciesJob.getSubmitTime());
|
2013-03-04 14:38:25 +01:00
|
|
|
// jobSpeciesModel.setStartTime(DateUtil.dateToDateFormatString(start));
|
2013-07-16 11:43:25 +02:00
|
|
|
jobSpeciesModel.setSubmitTime(submit);
|
2013-03-04 14:38:25 +01:00
|
|
|
jobSpeciesModel.setEndTime(null);
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
return jobSpeciesModel;
|
|
|
|
}
|
|
|
|
|
2017-02-16 17:15:45 +01:00
|
|
|
/* (non-Javadoc)
|
|
|
|
* @see org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService#cancelTaxonomyJob(java.lang.String)
|
|
|
|
*/
|
2013-03-04 14:38:25 +01:00
|
|
|
@Override
|
|
|
|
public boolean cancelTaxonomyJob(String jobIdentifier) throws Exception {
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
try{
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
SpeciesService taxonomyService = getSpeciesService();
|
|
|
|
|
|
|
|
//REMOVE JOB ON THE SERVICE
|
|
|
|
taxonomyService.cancelTaxonomyJobById(jobIdentifier);
|
|
|
|
|
|
|
|
TaxonomyJobPersistence speciesJobDao = DaoSession.getTaxonomyJobDAO(getASLSession());
|
|
|
|
|
|
|
|
int count = TaxonomyJobUtil.deleteTaxonomyJobById(jobIdentifier, speciesJobDao);
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
if(count==1)
|
|
|
|
return true;
|
|
|
|
|
|
|
|
}catch (Exception e) {
|
|
|
|
logger.error("Error on cancel taxonomy job ", e);
|
|
|
|
throw new Exception("Error on cancel taxonomy job", e);
|
|
|
|
}
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
2017-02-16 17:15:45 +01:00
|
|
|
/* (non-Javadoc)
|
|
|
|
* @see org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService#saveTaxonomyJob(java.lang.String, java.lang.String, java.lang.String, java.lang.String, java.lang.String)
|
|
|
|
*/
|
2013-03-04 14:38:25 +01:00
|
|
|
@Override
|
|
|
|
public boolean saveTaxonomyJob(String jobIdentifier, String destinationFolderId, String fileName, String scientificName, String dataSourceName) throws Exception {
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2015-07-22 17:19:38 +02:00
|
|
|
logger.info("saveSpeciesJob jobId: "+jobIdentifier+" destinationFolderId: "+destinationFolderId+" fileName: "+fileName);
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
try {
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
SpeciesService taxonomyService = getSpeciesService();
|
|
|
|
String mimeType = "application/zip";
|
|
|
|
InputStream inputStream = taxonomyService.getTaxonomyJobFileById(jobIdentifier);
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
if(inputStream!=null){
|
2013-10-24 12:52:34 +02:00
|
|
|
Workspace workspace = GetWorkspaceUtil.getWorskspace(getASLSession());
|
2015-07-22 17:19:38 +02:00
|
|
|
logger.info("input stream is not null");
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
WorkspaceFolder folder = (WorkspaceFolder) workspace.getItem(destinationFolderId);
|
|
|
|
fileName = WorkspaceUtil.getUniqueName(fileName, folder);
|
|
|
|
folder.createExternalFileItem(fileName,"Taxonomy job generated files", mimeType, inputStream);
|
2015-07-22 17:19:38 +02:00
|
|
|
logger.info("Save file with taxonomy was completed");
|
2013-03-04 14:38:25 +01:00
|
|
|
}
|
|
|
|
else{
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2015-07-22 17:19:38 +02:00
|
|
|
logger.info("input stream is null");
|
2013-03-04 14:38:25 +01:00
|
|
|
return false;
|
|
|
|
}
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
return true;
|
|
|
|
|
|
|
|
} catch (Exception e) {
|
|
|
|
|
|
|
|
logger.error("An error occurred saving the generated file into the workspace",e);
|
|
|
|
throw new SearchServiceException(e.getMessage());
|
|
|
|
}
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
}
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2017-02-16 17:15:45 +01:00
|
|
|
/* (non-Javadoc)
|
|
|
|
* @see org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService#saveTaxonomyJobError(java.lang.String, java.lang.String, java.lang.String, java.lang.String, java.lang.String)
|
|
|
|
*/
|
2013-07-16 19:36:40 +02:00
|
|
|
@Override
|
|
|
|
public boolean saveTaxonomyJobError(String jobIdentifier, String destinationFolderId, String fileName, String scientificName, String dataSourceName) throws Exception {
|
2015-07-22 17:19:38 +02:00
|
|
|
logger.info("saveSpeciesJob error jobId: "+jobIdentifier+" destinationFolderId: "+destinationFolderId+" fileName: "+fileName);
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-07-16 19:36:40 +02:00
|
|
|
try {
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-07-16 19:36:40 +02:00
|
|
|
SpeciesService taxonomyService = getSpeciesService();
|
2013-07-19 14:42:33 +02:00
|
|
|
String mimeType = "text/plain";
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-07-16 19:36:40 +02:00
|
|
|
InputStream inputStream = taxonomyService.getTaxonomyJobErrorFileById(jobIdentifier);
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-07-16 19:36:40 +02:00
|
|
|
if(inputStream!=null){
|
2013-10-24 12:52:34 +02:00
|
|
|
Workspace workspace = GetWorkspaceUtil.getWorskspace(getASLSession());
|
2015-07-22 17:19:38 +02:00
|
|
|
logger.info("input stream is not null");
|
2013-07-16 19:36:40 +02:00
|
|
|
WorkspaceFolder folder = (WorkspaceFolder) workspace.getItem(destinationFolderId);
|
|
|
|
fileName = WorkspaceUtil.getUniqueName(fileName, folder);
|
|
|
|
folder.createExternalFileItem(fileName,"Report errors on taxonomy job", mimeType, inputStream);
|
2015-07-22 17:19:38 +02:00
|
|
|
logger.info("Save report file with errors occurred was completed");
|
2013-07-16 19:36:40 +02:00
|
|
|
}
|
|
|
|
else{
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2015-07-22 17:19:38 +02:00
|
|
|
logger.info("input stream is null");
|
2013-07-16 19:36:40 +02:00
|
|
|
return false;
|
|
|
|
}
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-07-16 19:36:40 +02:00
|
|
|
return true;
|
|
|
|
|
|
|
|
} catch (Exception e) {
|
|
|
|
|
|
|
|
logger.error("An error occurred saving the generated file into the workspace",e);
|
|
|
|
throw new SearchServiceException(e.getMessage());
|
|
|
|
}
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-07-16 19:36:40 +02:00
|
|
|
}
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2017-02-16 17:15:45 +01:00
|
|
|
/* (non-Javadoc)
|
|
|
|
* @see org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService#retrieveTaxonomyByIds(java.util.List)
|
|
|
|
*/
|
2013-03-04 14:38:25 +01:00
|
|
|
@Override
|
2013-10-24 12:52:34 +02:00
|
|
|
public List<LightTaxonomyRow> retrieveTaxonomyByIds(List<String> ids) throws Exception{
|
2015-07-22 17:19:38 +02:00
|
|
|
logger.info("retrieveTaxonomyByIds ids size: " + ids.size());
|
2013-10-24 12:52:34 +02:00
|
|
|
List<LightTaxonomyRow> listLightTaxonomyRow = new ArrayList<LightTaxonomyRow>();
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
try {
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
SpeciesService taxonomyService = getSpeciesService();
|
2017-01-11 18:39:11 +01:00
|
|
|
//StreamExtend<String> convert = new StreamExtend<String>(ids.iterator());
|
|
|
|
|
|
|
|
CloseableIterator<TaxonomyItem> streamIterator = taxonomyService.retrieveTaxonomyById(ids);
|
|
|
|
|
2013-10-24 12:52:34 +02:00
|
|
|
ASLSession session = getASLSession();
|
2017-01-11 18:39:11 +01:00
|
|
|
TaxonomyItemConverter taxonomyItemConverter = new TaxonomyItemConverter(session);
|
|
|
|
|
2013-10-24 12:52:34 +02:00
|
|
|
Map<String, TaxonomyRow> mapTaxonomyIds = SessionUtil.getHashMapTaxonomyByIdsCache(session);
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-10-24 12:52:34 +02:00
|
|
|
if(mapTaxonomyIds==null){
|
2015-07-22 17:19:38 +02:00
|
|
|
logger.info("Cache taxa ByIds doesn't exists into session, creating..");
|
2013-10-24 12:52:34 +02:00
|
|
|
mapTaxonomyIds = new HashMap<String, TaxonomyRow>();
|
|
|
|
}
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
while (streamIterator.hasNext()) {
|
|
|
|
TaxonomyItem tax = streamIterator.next();
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-10-24 12:52:34 +02:00
|
|
|
TaxonomyRow taxonomy = taxonomyItemConverter.convert(tax);
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-10-24 12:52:34 +02:00
|
|
|
if(mapTaxonomyIds.get(taxonomy.getServiceId())==null){
|
2015-07-22 17:19:38 +02:00
|
|
|
logger.info("Taxonomy with service id: "+taxonomy.getServiceId()+" doesn't exists into Map Taxonomy Ids, adding..");
|
2013-10-24 12:52:34 +02:00
|
|
|
mapTaxonomyIds.put(taxonomy.getServiceId(),taxonomy);
|
|
|
|
}
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-10-24 12:52:34 +02:00
|
|
|
LightTaxonomyRow lightTaxRow = ClusterStructuresForTaxonomyRow.convetTaxonomyRowToLigthTaxonomyRow(taxonomy);
|
|
|
|
listLightTaxonomyRow.add(lightTaxRow);
|
2013-03-04 14:38:25 +01:00
|
|
|
}
|
|
|
|
|
2013-10-24 12:52:34 +02:00
|
|
|
SessionUtil.setHashMapTaxonomyByIdsCache(session, mapTaxonomyIds);
|
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
streamIterator.close();
|
|
|
|
|
|
|
|
} catch (Exception e) {
|
|
|
|
|
|
|
|
e.printStackTrace();
|
|
|
|
logger.error("An error retrieve taxonomy by Id",e);
|
|
|
|
// throw new Exception(e.getMessage());
|
|
|
|
}
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-10-24 12:52:34 +02:00
|
|
|
return listLightTaxonomyRow;
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
}
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2017-02-16 17:15:45 +01:00
|
|
|
/* (non-Javadoc)
|
|
|
|
* @see org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService#retrieveSynonymsByRefId(java.lang.String)
|
|
|
|
*/
|
2013-03-04 14:38:25 +01:00
|
|
|
@Override
|
2013-10-24 12:52:34 +02:00
|
|
|
public List<LightTaxonomyRow> retrieveSynonymsByRefId(String refId) throws Exception{
|
2015-07-22 17:19:38 +02:00
|
|
|
logger.info("retrieveSynonymsById id: " + refId);
|
2013-10-24 12:52:34 +02:00
|
|
|
List<LightTaxonomyRow> listLightTaxonomyRow = new ArrayList<LightTaxonomyRow>();
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
try {
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
SpeciesService taxonomyService = getSpeciesService();
|
|
|
|
|
|
|
|
CloseableIterator<TaxonomyItem> streamIterator = taxonomyService.retrieveSynonymsById(refId);
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-10-24 12:52:34 +02:00
|
|
|
ASLSession session = getASLSession();
|
2013-03-04 14:38:25 +01:00
|
|
|
TaxonomyItemConverter taxonomyItemConverter = new TaxonomyItemConverter(getASLSession());
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-10-24 12:52:34 +02:00
|
|
|
Map<String, TaxonomyRow> mapSysnosyms = SessionUtil.getHashMapSynonymsTaxonomyCache(getASLSession());
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-10-24 12:52:34 +02:00
|
|
|
if(mapSysnosyms==null){
|
2015-07-22 17:19:38 +02:00
|
|
|
logger.info("Cache synonyms doesn't exists into session, creating..");
|
2013-10-24 12:52:34 +02:00
|
|
|
mapSysnosyms = new HashMap<String, TaxonomyRow>();
|
|
|
|
}
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-10-24 12:52:34 +02:00
|
|
|
// int i = 1;
|
2013-03-04 14:38:25 +01:00
|
|
|
while (streamIterator.hasNext()) {
|
|
|
|
TaxonomyItem tax = streamIterator.next();
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-10-24 12:52:34 +02:00
|
|
|
TaxonomyRow taxonomy = taxonomyItemConverter.convert(tax);
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-10-24 12:52:34 +02:00
|
|
|
if(mapSysnosyms.get(taxonomy.getServiceId())==null){
|
2015-07-22 17:19:38 +02:00
|
|
|
logger.info("Taxonomy with service id: "+taxonomy.getServiceId()+" doesn't exists into Map Synonyms, adding..");
|
2013-10-24 12:52:34 +02:00
|
|
|
mapSysnosyms.put(taxonomy.getServiceId(),taxonomy);
|
|
|
|
}
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-10-24 12:52:34 +02:00
|
|
|
LightTaxonomyRow lightTaxRow = ClusterStructuresForTaxonomyRow.convetTaxonomyRowToLigthTaxonomyRow(taxonomy);
|
|
|
|
listLightTaxonomyRow.add(lightTaxRow);
|
2013-03-04 14:38:25 +01:00
|
|
|
}
|
|
|
|
|
2013-10-24 12:52:34 +02:00
|
|
|
SessionUtil.setHashMapSynonymsTaxonomyCache(session, mapSysnosyms);
|
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
streamIterator.close();
|
|
|
|
|
|
|
|
} catch (Exception e) {
|
|
|
|
|
|
|
|
e.printStackTrace();
|
|
|
|
logger.error("An error retrieve synonyms by Id",e);
|
|
|
|
}
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-10-24 12:52:34 +02:00
|
|
|
return listLightTaxonomyRow;
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
}
|
2017-01-11 18:39:11 +01:00
|
|
|
|
|
|
|
|
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
/**
|
|
|
|
* {@inheritDoc}
|
|
|
|
*/
|
|
|
|
@Override
|
|
|
|
public List<JobOccurrencesModel> createOccurrencesJob(List<JobOccurrencesModel> listJobOccurrenceModel, SaveFileFormat saveFileFormat, OccurrencesSaveEnum csvType, boolean isByDataSource, int expectedOccurrence) throws Exception {
|
2015-07-22 17:19:38 +02:00
|
|
|
logger.info("createOccurencesJobFromSelection...");
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
List<JobOccurrencesModel> listResultJobModel = new ArrayList<JobOccurrencesModel>();
|
|
|
|
@SuppressWarnings("unchecked")
|
|
|
|
FetchingSession<ResultRow> searchSession = (FetchingSession<ResultRow>) getSearchSession();
|
|
|
|
SpeciesService taxonomyService = getSpeciesService();
|
|
|
|
ASLSession aslSession = getASLSession();
|
|
|
|
String dataSourceAsXml = "";
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
try {
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
List<String> keys = null;
|
|
|
|
//Get Dao with list occurrences jobs
|
|
|
|
OccurrenceJobPersistence occurrencesJobDao = DaoSession.getOccurrencesJobDAO(aslSession);
|
|
|
|
|
|
|
|
if(!isByDataSource){ //NOT IS BY DATASOURCE - CREATE ONE JOB
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
keys = OccurrenceJobUtil.getListOfSelectedKey(searchSession);
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
if(listJobOccurrenceModel!=null && listJobOccurrenceModel.get(0)!=null){ //IN THIS CASE - THERE IS ONE JOBMODEL
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
List<DataSource> dataSourceList = listJobOccurrenceModel.get(0).getDataSources();
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
//ADDED DATA SOURCE LIST
|
|
|
|
listResultJobModel.add(OccurrenceJobUtil.createOccurrenceJobOnServiceByKeys(listJobOccurrenceModel.get(0), taxonomyService, occurrencesJobDao, keys, dataSourceList, saveFileFormat, csvType, expectedOccurrence));
|
2017-01-11 18:39:11 +01:00
|
|
|
}
|
2015-07-22 17:19:38 +02:00
|
|
|
}else{ //IS BY DATASOURCE - CREATE MORE JOB, ONE FOR EACH DATASOURCE
|
2017-01-11 18:39:11 +01:00
|
|
|
|
|
|
|
for (JobOccurrencesModel jobModel : listJobOccurrenceModel) { //IN THIS CASE - FOR EACH JOBMODEL THERE IS ONE DATASOURCE
|
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
dataSourceAsXml = "";
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
if(jobModel.getDataSources()!=null && jobModel.getDataSources().get(0)!=null){
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
//recover keys
|
|
|
|
DataSource dataSource = jobModel.getDataSources().get(0);
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
OccurrenceKeys occKey = OccurrenceJobUtil.getListOfSelectedKeyByDataSource(dataSource.getName(), aslSession);
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
List<DataSource> dataSourceList = jobModel.getDataSources();
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
//ADDED DATA SOURCE LIST
|
|
|
|
listResultJobModel.add(OccurrenceJobUtil.createOccurrenceJobOnServiceByKeys(jobModel, taxonomyService, occurrencesJobDao, occKey.getListKey(), dataSourceList, saveFileFormat, csvType, occKey.getTotalOccurrence()));
|
|
|
|
}
|
|
|
|
}
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
}
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
return listResultJobModel;
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
} catch (Exception e) {
|
|
|
|
logger.error("An error occurred in createOccurencesJobFromSelection", e);
|
|
|
|
throw new Exception(e.getMessage());
|
|
|
|
}
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
}
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2017-02-16 17:15:45 +01:00
|
|
|
/* (non-Javadoc)
|
|
|
|
* @see org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService#resubmitOccurrencesJob(java.lang.String)
|
|
|
|
*/
|
2013-03-04 14:38:25 +01:00
|
|
|
@Override
|
|
|
|
public List<JobOccurrencesModel> resubmitOccurrencesJob(String jobIdentifier) throws Exception {
|
2015-07-22 17:19:38 +02:00
|
|
|
logger.info("createOccurencesJobFromSelection...");
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
List<JobOccurrencesModel> listResultJobModel = new ArrayList<JobOccurrencesModel>();
|
|
|
|
SpeciesService taxonomyService = getSpeciesService();
|
|
|
|
ASLSession aslSession = getASLSession();
|
|
|
|
List<String> keys = null;
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
try {
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
//Get Dao with list occurrences jobs
|
|
|
|
OccurrenceJobPersistence occurrencesJobDao = DaoSession.getOccurrencesJobDAO(aslSession);
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
CriteriaBuilder queryBuilder = occurrencesJobDao.getCriteriaBuilder();
|
|
|
|
CriteriaQuery<Object> cq = queryBuilder.createQuery();
|
|
|
|
Predicate pr1 = queryBuilder.equal(occurrencesJobDao.rootFrom(cq).get(OccurrencesJob.ID_FIELD), jobIdentifier);
|
|
|
|
cq.where(pr1);
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
Iterator<OccurrencesJob> iterator = occurrencesJobDao.executeCriteriaQuery(cq).iterator();
|
|
|
|
|
|
|
|
OccurrencesJob job;
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
if(iterator.hasNext())
|
|
|
|
job = iterator.next();
|
|
|
|
else
|
|
|
|
return listResultJobModel;
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
//recover keys
|
|
|
|
keys = OccurrenceJobUtil.revertListKeyFromStoredXMLString(job.getResultRowKeysAsXml());
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
//recover file format
|
|
|
|
SaveFileFormat fileFormat = OccurrenceJobUtil.converFileFormat(job.getFileFormat());
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
//recover csv type
|
|
|
|
OccurrencesSaveEnum csvType = OccurrenceJobUtil.convertCsvType(job.getCsvType());
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-07-18 18:32:28 +02:00
|
|
|
String name = RESUBMIT + ": "+job.getName();
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-07-18 18:32:28 +02:00
|
|
|
JobOccurrencesModel jobModel = new JobOccurrencesModel("",name, job.getScientificName(), job.getDataSources(), fileFormat, csvType, job.isByDataSource());
|
2013-03-04 14:38:25 +01:00
|
|
|
jobModel.setTotalOccurrences(job.getExpectedOccurrence());
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
listResultJobModel.add(OccurrenceJobUtil.createOccurrenceJobOnServiceByKeys(jobModel, taxonomyService, occurrencesJobDao, keys, job.getDataSources(), fileFormat, csvType, jobModel.getTotalOccurrences()));
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
} catch (Exception e) {
|
|
|
|
logger.error("An error occurred in createOccurencesJobFromSelection", e);
|
|
|
|
throw new Exception(e.getMessage());
|
|
|
|
}
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
return listResultJobModel;
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
}
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2017-02-16 17:15:45 +01:00
|
|
|
/* (non-Javadoc)
|
|
|
|
* @see org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService#getListOccurrencesJob()
|
|
|
|
*/
|
2013-03-04 14:38:25 +01:00
|
|
|
@Override
|
2017-03-20 14:43:11 +01:00
|
|
|
public List<JobOccurrencesModel> getListOccurrencesJob() throws SessionExpired, Exception{
|
2015-07-22 17:19:38 +02:00
|
|
|
logger.info("getListOccurencesJob... ");
|
2013-03-04 14:38:25 +01:00
|
|
|
List<JobOccurrencesModel> listJobs = new ArrayList<JobOccurrencesModel>();
|
|
|
|
|
|
|
|
try {
|
|
|
|
|
|
|
|
OccurrenceJobPersistence occurrencesJobDao = DaoSession.getOccurrencesJobDAO(getASLSession());
|
|
|
|
Iterator<OccurrencesJob> iterator = occurrencesJobDao.getList().iterator();
|
|
|
|
SpeciesService taxonomyService = getSpeciesService();
|
|
|
|
|
|
|
|
while (iterator!=null && iterator.hasNext()) {
|
|
|
|
OccurrencesJob job = iterator.next();
|
2017-01-11 18:39:11 +01:00
|
|
|
CompleteJobStatus statusResponse = taxonomyService.getOccurrenceJobById(job.getId());
|
2015-07-22 17:19:38 +02:00
|
|
|
logger.info("get occurrence job "+job.getId()+ " from service");
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
try{
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
if(statusResponse!=null){
|
2015-07-22 17:19:38 +02:00
|
|
|
logger.info("statusResponse of occurrence job is not null..." + job.getId());
|
2013-03-04 14:38:25 +01:00
|
|
|
JobOccurrencesModel jobOccurrenceModel = OccurrenceJobUtil.convertJob(job, statusResponse, occurrencesJobDao);
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
if(jobOccurrenceModel!=null){
|
2015-07-22 17:19:38 +02:00
|
|
|
logger.info("added list jobOccurrenceId: "+jobOccurrenceModel.getJobIdentifier() + " status "+jobOccurrenceModel.getDownloadState());
|
2013-03-04 14:38:25 +01:00
|
|
|
listJobs.add(jobOccurrenceModel);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
else{
|
2015-07-22 17:19:38 +02:00
|
|
|
logger.info("statusResponse of occurrence job is null..." + job.getId());
|
2017-02-09 18:23:28 +01:00
|
|
|
logger.info("deleting job ..." + job.getId());
|
2013-03-04 14:38:25 +01:00
|
|
|
OccurrenceJobUtil.deleteOccurrenceJobById(job.getId(),occurrencesJobDao);
|
|
|
|
}
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
}catch (Exception e) {
|
|
|
|
e.printStackTrace();
|
|
|
|
logger.error("Error on getListOccurencesJob ", e);
|
|
|
|
throw new Exception("Error on getListOccurencesJob", e);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
} catch (Exception e) {
|
2017-03-20 14:43:11 +01:00
|
|
|
|
|
|
|
if(e instanceof SessionExpired){
|
|
|
|
logger.error("Session is expired");
|
|
|
|
throw new SessionExpired(e.getMessage());
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
logger.error("Error on get iterator "+e, e);
|
|
|
|
}
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
return listJobs;
|
|
|
|
|
|
|
|
}
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2017-02-10 15:18:57 +01:00
|
|
|
|
2017-02-16 17:15:45 +01:00
|
|
|
/* (non-Javadoc)
|
|
|
|
* @see org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService#getListGisLayerJob()
|
|
|
|
*/
|
2017-02-10 15:18:57 +01:00
|
|
|
@Override
|
2017-03-20 14:43:11 +01:00
|
|
|
public List<JobGisLayerModel> getListGisLayerJob() throws SessionExpired, Exception{
|
2017-02-10 15:18:57 +01:00
|
|
|
logger.info("getListGisLayerJob... ");
|
|
|
|
List<JobGisLayerModel> listJobs = new ArrayList<JobGisLayerModel>();
|
|
|
|
|
|
|
|
try {
|
|
|
|
|
|
|
|
GisLayerJobPersistence gisLayerJobDao = DaoSession.getGisLayersJobDAO(getASLSession());
|
2017-02-10 18:04:01 +01:00
|
|
|
|
2017-02-10 15:18:57 +01:00
|
|
|
Iterator<GisLayerJob> iterator = gisLayerJobDao.getList().iterator();
|
|
|
|
SpeciesService taxonomyService = getSpeciesService();
|
|
|
|
|
|
|
|
while (iterator!=null && iterator.hasNext()) {
|
|
|
|
GisLayerJob job = iterator.next();
|
|
|
|
|
2017-02-10 18:04:01 +01:00
|
|
|
if(job.getId()==null || job.getId().isEmpty()){
|
|
|
|
logger.warn("Gis job has an id null or empty, skipping");
|
|
|
|
}else{
|
2017-02-15 16:45:21 +01:00
|
|
|
|
2017-02-10 18:04:01 +01:00
|
|
|
try{
|
|
|
|
|
2017-02-22 14:44:46 +01:00
|
|
|
CompleteJobStatus statusResponse = taxonomyService.getGisLayerByJobId(job.getId());
|
|
|
|
logger.info("get occurrence job "+job.getId()+ " from service");
|
|
|
|
|
2017-02-10 18:04:01 +01:00
|
|
|
if(statusResponse!=null){
|
|
|
|
logger.info("statusResponse of gis layer job is not null..." + job.getId());
|
2017-02-14 18:20:44 +01:00
|
|
|
JobGisLayerModel convertJob = GisLayerJobUtil.convertJob(job, statusResponse, gisLayerJobDao, taxonomyService, getASLSession());
|
2017-02-10 18:04:01 +01:00
|
|
|
|
|
|
|
if(convertJob!=null){
|
|
|
|
logger.info("added list jobOccurrenceId: "+convertJob.getJobIdentifier() + " status "+convertJob.getDownloadState());
|
|
|
|
listJobs.add(convertJob);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
else{
|
|
|
|
logger.info("statusResponse of occurrence job is null..." + job.getId());
|
|
|
|
logger.info("deleting job ..." + job.getId());
|
|
|
|
GisLayerJobUtil.deleteGisLayerJobById(job.getId(),gisLayerJobDao);
|
2017-02-10 15:18:57 +01:00
|
|
|
}
|
|
|
|
|
2017-02-10 18:04:01 +01:00
|
|
|
}catch (Exception e) {
|
2017-02-15 16:45:21 +01:00
|
|
|
|
|
|
|
if (e instanceof InvalidJobIdException){
|
|
|
|
logger.info("The spd service unkwnowns GIS job id: "+job.getId() +" deleting it from db...");
|
|
|
|
GisLayerJobUtil.deleteGisLayerJobById(job.getId(),gisLayerJobDao);
|
|
|
|
}else{
|
|
|
|
|
|
|
|
logger.error("Error on getListGisLayerJob ", e);
|
|
|
|
throw new Exception("Error on getListGisLayerJob", e);
|
|
|
|
}
|
2017-02-10 18:04:01 +01:00
|
|
|
}
|
2017-02-10 15:18:57 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
} catch (Exception e) {
|
|
|
|
logger.error("Error on get iterator "+e, e);
|
|
|
|
}
|
|
|
|
|
|
|
|
return listJobs;
|
|
|
|
|
|
|
|
}
|
|
|
|
|
2017-02-16 17:15:45 +01:00
|
|
|
/* (non-Javadoc)
|
|
|
|
* @see org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService#saveOccurrenceJob(org.gcube.portlets.user.speciesdiscovery.shared.JobOccurrencesModel, java.lang.String, java.lang.String, java.lang.String, java.lang.String)
|
|
|
|
*/
|
2013-03-04 14:38:25 +01:00
|
|
|
@Override
|
|
|
|
public boolean saveOccurrenceJob(JobOccurrencesModel jobModel, String destinationFolderId, String fileName, String scientificName, String dataSourceName) throws Exception {
|
2015-07-22 17:19:38 +02:00
|
|
|
logger.info("saveOccurrenceJob jobId: "+jobModel.getJobIdentifier()+" destinationFolderId: "+destinationFolderId+" fileName: "+fileName + " file format: "+jobModel.getFileFormat());
|
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
try {
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
SpeciesService taxonomyService = getSpeciesService();
|
|
|
|
String mimeType = null;
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
switch (jobModel.getFileFormat()) {
|
|
|
|
case CSV: {
|
|
|
|
mimeType = "text/csv";
|
|
|
|
} break;
|
|
|
|
case DARWIN_CORE:{
|
|
|
|
mimeType = "application/xhtml+xml";
|
|
|
|
} break;
|
|
|
|
}
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
InputStream inputStream = taxonomyService.getOccurrenceJobFileById(jobModel.getJobIdentifier());
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
if(inputStream!=null){
|
2013-10-24 12:52:34 +02:00
|
|
|
Workspace workspace = GetWorkspaceUtil.getWorskspace(getASLSession());
|
2015-07-22 17:19:38 +02:00
|
|
|
logger.info("input stream is not null");
|
2013-03-04 14:38:25 +01:00
|
|
|
// System.out.println("input stream is not null");
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
WorkspaceFolder folder = (WorkspaceFolder) workspace.getItem(destinationFolderId);
|
|
|
|
fileName = WorkspaceUtil.getUniqueName(fileName, folder);
|
|
|
|
folder.createExternalFileItem(fileName,"Occurrence job generated files", mimeType, inputStream);
|
2015-07-22 17:19:38 +02:00
|
|
|
logger.info("Save file with occurrences was completed");
|
2013-03-04 14:38:25 +01:00
|
|
|
}
|
|
|
|
else{
|
2015-07-22 17:19:38 +02:00
|
|
|
logger.info("input stream is null");
|
2013-03-04 14:38:25 +01:00
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
return true;
|
|
|
|
|
|
|
|
} catch (Exception e) {
|
|
|
|
logger.error("An error occurred saving the generated file into the workspace",e);
|
|
|
|
throw new SearchServiceException(e.getMessage());
|
|
|
|
}
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
}
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2017-02-16 17:15:45 +01:00
|
|
|
/* (non-Javadoc)
|
|
|
|
* @see org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService#saveOccurrenceJobError(org.gcube.portlets.user.speciesdiscovery.shared.JobOccurrencesModel, java.lang.String, java.lang.String, java.lang.String, java.lang.String)
|
|
|
|
*/
|
2013-07-16 19:36:40 +02:00
|
|
|
@Override
|
|
|
|
public boolean saveOccurrenceJobError(JobOccurrencesModel jobModel, String destinationFolderId, String fileName, String scientificName, String dataSourceName) throws Exception {
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2015-07-22 17:19:38 +02:00
|
|
|
logger.info("saveOccurrenceJobError jobId: "+jobModel.getJobIdentifier()+" destinationFolderId: "+destinationFolderId+" fileName: "+fileName + " file format: "+jobModel.getFileFormat());
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-07-16 19:36:40 +02:00
|
|
|
try {
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-07-16 19:36:40 +02:00
|
|
|
SpeciesService taxonomyService = getSpeciesService();
|
2013-07-19 14:47:44 +02:00
|
|
|
String mimeType = "text/plain";
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-07-16 19:36:40 +02:00
|
|
|
InputStream inputStream = taxonomyService.getOccurrenceJobErrorFileById(jobModel.getJobIdentifier());
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-07-16 19:36:40 +02:00
|
|
|
if(inputStream!=null){
|
2013-10-24 12:52:34 +02:00
|
|
|
Workspace workspace = GetWorkspaceUtil.getWorskspace(getASLSession());
|
2015-07-22 17:19:38 +02:00
|
|
|
logger.info("input stream is not null");
|
2013-07-16 19:36:40 +02:00
|
|
|
// System.out.println("input stream is not null");
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-07-16 19:36:40 +02:00
|
|
|
WorkspaceFolder folder = (WorkspaceFolder) workspace.getItem(destinationFolderId);
|
|
|
|
fileName = WorkspaceUtil.getUniqueName(fileName, folder);
|
|
|
|
folder.createExternalFileItem(fileName,"Report errors occurred on occurrence job", mimeType, inputStream);
|
2015-07-22 17:19:38 +02:00
|
|
|
logger.info("Save report file with errors occurred was completed");
|
2013-07-16 19:36:40 +02:00
|
|
|
}
|
|
|
|
else{
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2015-07-22 17:19:38 +02:00
|
|
|
logger.info("input stream is null");
|
2013-07-16 19:36:40 +02:00
|
|
|
return false;
|
|
|
|
}
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-07-16 19:36:40 +02:00
|
|
|
return true;
|
|
|
|
|
|
|
|
} catch (Exception e) {
|
|
|
|
|
|
|
|
logger.error("An error occurred saving the generated file into the workspace",e);
|
|
|
|
throw new SearchServiceException(e.getMessage());
|
|
|
|
}
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-07-16 19:36:40 +02:00
|
|
|
}
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2017-02-16 17:15:45 +01:00
|
|
|
/* (non-Javadoc)
|
|
|
|
* @see org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService#cancelOccurrenceJob(java.lang.String)
|
|
|
|
*/
|
2013-03-04 14:38:25 +01:00
|
|
|
@Override
|
|
|
|
public boolean cancelOccurrenceJob(String jobIdentifier) throws Exception {
|
2015-07-22 17:19:38 +02:00
|
|
|
logger.info("cancelOccurrenceJob jobIdentifier: "+jobIdentifier);
|
2013-03-04 14:38:25 +01:00
|
|
|
try{
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
SpeciesService taxonomyService = getSpeciesService();
|
|
|
|
//REMOVE JOB ON THE SERVICE
|
|
|
|
taxonomyService.cancelTaxonomyJobById(jobIdentifier);
|
|
|
|
OccurrenceJobPersistence occurrenceJobDAO = DaoSession.getOccurrencesJobDAO(getASLSession());
|
|
|
|
int count = OccurrenceJobUtil.deleteOccurrenceJobById(jobIdentifier, occurrenceJobDAO);
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
if(count==1)
|
|
|
|
return true;
|
|
|
|
|
|
|
|
}catch (Exception e) {
|
|
|
|
logger.error("Error on cancel occurrence job ", e);
|
|
|
|
throw new Exception("Error on cancel occurrence job", e);
|
|
|
|
}
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
2017-02-16 17:15:45 +01:00
|
|
|
/* (non-Javadoc)
|
|
|
|
* @see org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService#loadStructuresForResultRowClustering()
|
|
|
|
*/
|
2013-03-04 14:38:25 +01:00
|
|
|
@Override
|
|
|
|
public ClusterStructuresForResultRow loadStructuresForResultRowClustering() throws Exception {
|
|
|
|
|
|
|
|
int countSelectedRow = countOfSelectedRow();
|
|
|
|
boolean isReduced = false;
|
|
|
|
int totalRow = countSelectedRow;
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-10-24 12:52:34 +02:00
|
|
|
ASLSession session = getASLSession();
|
|
|
|
HashMap<String, ClusterCommonNameDataSourceForResultRow> hashCluster = SessionUtil.getCurrentClusterCommonNameForResultRow(session);
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-10-24 12:52:34 +02:00
|
|
|
//Reset cluster for common name
|
|
|
|
if(hashCluster!=null)
|
|
|
|
SessionUtil.setCurrentClusterCommonNameForResultRow(session, null);
|
2017-01-11 18:39:11 +01:00
|
|
|
|
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
//LIMIT NUMBER OF ITEMS TO ConstantsSpeciesDiscovery.LIMIT_ITEM_DETAILS
|
2013-10-24 12:52:34 +02:00
|
|
|
if(countSelectedRow>ConstantsSpeciesDiscovery.RESULT_ROW_LIMIT_ITEM_DETAILS)
|
2017-01-11 18:39:11 +01:00
|
|
|
countSelectedRow = ConstantsSpeciesDiscovery.RESULT_ROW_LIMIT_ITEM_DETAILS;
|
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
ResultFilter filter = new ResultFilter(false, true, true);
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
SearchResult<ResultRow> searchResults = getSearchResultRows(0, countSelectedRow, filter, true);
|
|
|
|
|
2013-10-24 12:52:34 +02:00
|
|
|
ClusterStructuresForResultRow cluster = new ClusterStructuresForResultRow(searchResults,isReduced, totalRow);
|
2015-07-22 17:19:38 +02:00
|
|
|
|
2013-10-24 12:52:34 +02:00
|
|
|
//TODO USE THREAD?
|
|
|
|
ManagerClusterCommonNameDataSourceForResultRow manager = new ManagerClusterCommonNameDataSourceForResultRow(cluster.getHashClusterScientificNameResultRowServiceID(), cluster.getHashResult());
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-10-24 12:52:34 +02:00
|
|
|
SessionUtil.setCurrentClusterCommonNameForResultRow(getASLSession(), manager.getHashClusterCommonNameDataSource());
|
|
|
|
|
|
|
|
//THIS OBJECT IS NOT USED ON CLIENT
|
|
|
|
cluster.setHashResult(null);
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-10-24 12:52:34 +02:00
|
|
|
return cluster;
|
|
|
|
}
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2017-02-16 17:15:45 +01:00
|
|
|
/* (non-Javadoc)
|
|
|
|
* @see org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService#loadClusterCommonNameForResultRowByScientificName(java.lang.String)
|
|
|
|
*/
|
2013-10-24 12:52:34 +02:00
|
|
|
@Override
|
|
|
|
public ClusterCommonNameDataSourceForResultRow loadClusterCommonNameForResultRowByScientificName(String scientificName) throws Exception {
|
|
|
|
|
2017-01-11 18:39:11 +01:00
|
|
|
logger.info("loadClusterCommonNameForResultRowByScientificName for scientific name: "+scientificName);
|
2013-10-24 12:52:34 +02:00
|
|
|
HashMap<String, ClusterCommonNameDataSourceForResultRow> hashCluster = SessionUtil.getCurrentClusterCommonNameForResultRow(getASLSession());
|
2015-07-22 17:19:38 +02:00
|
|
|
|
2013-10-24 12:52:34 +02:00
|
|
|
if(hashCluster==null){
|
|
|
|
logger.warn("Error in loadClusterCommonNameForResultRowByScientificName, hashCluster was not found in session");
|
|
|
|
return null;
|
|
|
|
}
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-10-24 12:52:34 +02:00
|
|
|
ClusterCommonNameDataSourceForResultRow cluster = hashCluster.get(scientificName);
|
|
|
|
if(cluster==null){
|
|
|
|
logger.warn("Error in loadClusterCommonNameForResultRowByScientificName, cluster was not found in session");
|
|
|
|
return null;
|
|
|
|
}
|
|
|
|
return cluster;
|
2013-03-04 14:38:25 +01:00
|
|
|
}
|
2017-01-11 18:39:11 +01:00
|
|
|
|
|
|
|
|
2017-02-16 17:15:45 +01:00
|
|
|
/* (non-Javadoc)
|
|
|
|
* @see org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService#loadDataSourceForResultRow(boolean, boolean)
|
|
|
|
*/
|
2013-03-04 14:38:25 +01:00
|
|
|
@Override
|
|
|
|
public List<DataSource> loadDataSourceForResultRow(boolean selected, boolean distinct) throws Exception {
|
|
|
|
|
|
|
|
ResultRowPersistence daoResultRow = null;
|
|
|
|
ASLSession session = getASLSession();
|
|
|
|
List<DataSource> listDataSource = new ArrayList<DataSource>();
|
|
|
|
|
|
|
|
try {
|
|
|
|
daoResultRow = DaoSession.getResultRowDAO(session);
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
EntityManager em = daoResultRow.createNewManager();
|
|
|
|
List<String> listDN = new ArrayList<String>();
|
|
|
|
try {
|
|
|
|
String selectString = "select ";
|
|
|
|
if(distinct)
|
|
|
|
selectString+= "distinct ";
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
Query query = em.createQuery(selectString + "t."+ResultRow.DATASOURCE_NAME+" from ResultRow t where t.selected = "+selected );
|
|
|
|
listDN = query.getResultList();
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
} finally {
|
|
|
|
em.close();
|
|
|
|
}
|
|
|
|
|
|
|
|
for (String dataSourceName : listDN) {
|
|
|
|
listDataSource.add(new DataSource(dataSourceName, dataSourceName));
|
|
|
|
}
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
} catch (Exception e) {
|
|
|
|
logger.error("Error in loadDataSourceForResultRow " +e.getMessage(), e);
|
|
|
|
throw new Exception("Error in loadDataSourceForResultRow " + e.getMessage(), e);
|
|
|
|
}
|
|
|
|
|
|
|
|
return listDataSource;
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
}
|
|
|
|
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2017-02-16 17:15:45 +01:00
|
|
|
/* (non-Javadoc)
|
|
|
|
* @see org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService#loadStructuresForTaxonomyClustering()
|
|
|
|
*/
|
2013-03-04 14:38:25 +01:00
|
|
|
@Override
|
|
|
|
public ClusterStructuresForTaxonomyRow loadStructuresForTaxonomyClustering() throws Exception {
|
|
|
|
|
|
|
|
int countSelectedRow = countOfSelectedRow();
|
|
|
|
boolean isReduced = false;
|
|
|
|
int totalRow = countSelectedRow;
|
|
|
|
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-10-24 12:52:34 +02:00
|
|
|
ASLSession session = getASLSession();
|
|
|
|
HashMap<String, ClusterCommonNameDataSourceForTaxonomyRow> hashCluster = SessionUtil.getCurrentClusterCommonNameForTaxonomyRow(session);
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-10-24 12:52:34 +02:00
|
|
|
//Reset cluster for common name
|
|
|
|
if(hashCluster!=null)
|
|
|
|
SessionUtil.setCurrentClusterCommonNameForTaxonomyRow(session, null);
|
2017-01-11 18:39:11 +01:00
|
|
|
|
|
|
|
|
2013-10-24 12:52:34 +02:00
|
|
|
HashMap<String, TaxonomyRow> mapOldChildren = SessionUtil.getHashMapChildrenTaxonomyCache(session);
|
|
|
|
//Reset list children to last clustering
|
|
|
|
if(mapOldChildren!=null)
|
|
|
|
SessionUtil.setHashMapChildrenTaxonomyCache(session, null);
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-10-24 12:52:34 +02:00
|
|
|
HashMap<String, TaxonomyRow> mapSynonyms = SessionUtil.getHashMapSynonymsTaxonomyCache(session);
|
|
|
|
//Reset list synonyms to last clustering
|
|
|
|
if(mapSynonyms!=null)
|
|
|
|
SessionUtil.setHashMapSynonymsTaxonomyCache(session, null);
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-10-24 12:52:34 +02:00
|
|
|
HashMap<String, TaxonomyRow> mapTaxonomyIds = SessionUtil.getHashMapTaxonomyByIdsCache(session);
|
|
|
|
//Reset list synonyms to last clustering
|
|
|
|
if(mapTaxonomyIds!=null)
|
|
|
|
SessionUtil.setHashMapTaxonomyByIdsCache(session, null);
|
2015-07-22 17:19:38 +02:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
//LIMIT NUMBER OF ITEMS TO ConstantsSpeciesDiscovery.LIMIT_ITEM_DETAILS
|
2013-10-24 12:52:34 +02:00
|
|
|
if(countSelectedRow>ConstantsSpeciesDiscovery.TAXONOMY_LIMIT_ITEMS_DETAILS){
|
|
|
|
countSelectedRow = ConstantsSpeciesDiscovery.TAXONOMY_LIMIT_ITEMS_DETAILS;
|
2013-03-04 14:38:25 +01:00
|
|
|
isReduced = true;
|
|
|
|
}
|
|
|
|
|
|
|
|
ResultFilter filter = new ResultFilter(false, true, true);
|
|
|
|
SearchResult<TaxonomyRow> searchResults = getSearchTaxonomyRow(0, countSelectedRow, filter, true);
|
2013-10-24 12:52:34 +02:00
|
|
|
ClusterStructuresForTaxonomyRow cluster = new ClusterStructuresForTaxonomyRow(searchResults,isReduced, totalRow);
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-10-24 12:52:34 +02:00
|
|
|
//TODO USE THREAD?
|
|
|
|
ManagerClusterCommonNameDataSourceForTaxonomyRow manager = new ManagerClusterCommonNameDataSourceForTaxonomyRow(cluster.getHashClusterScientificNameTaxonomyRowServiceID(), cluster.getHashResult());
|
|
|
|
SessionUtil.setCurrentClusterCommonNameForTaxonomyRow(session, manager.getHashClusterCommonNameDataSource());
|
|
|
|
|
|
|
|
//THIS OBJECT IS NOT USED ON CLIENT
|
|
|
|
cluster.setHashResult(null);
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-10-24 12:52:34 +02:00
|
|
|
return cluster;
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
}
|
|
|
|
|
2017-02-16 17:15:45 +01:00
|
|
|
/* (non-Javadoc)
|
|
|
|
* @see org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService#changeStatusOccurrenceJob(java.lang.String, org.gcube.portlets.user.speciesdiscovery.shared.DownloadState)
|
|
|
|
*/
|
2013-03-04 14:38:25 +01:00
|
|
|
@Override
|
|
|
|
public boolean changeStatusOccurrenceJob(String jobIdentifier, DownloadState state) throws Exception {
|
|
|
|
|
|
|
|
OccurrenceJobPersistence occurrenceJobDAO = DaoSession.getOccurrencesJobDAO(getASLSession());
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
int count = OccurrenceJobUtil.changeStatusOccurrenceJobById(jobIdentifier, state, occurrenceJobDAO);
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
if(count==1)
|
|
|
|
return true;
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
return false;
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
}
|
|
|
|
|
2017-02-16 17:15:45 +01:00
|
|
|
/* (non-Javadoc)
|
|
|
|
* @see org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService#changeStatusTaxonomyJob(java.lang.String, org.gcube.portlets.user.speciesdiscovery.shared.DownloadState)
|
|
|
|
*/
|
2013-03-04 14:38:25 +01:00
|
|
|
@Override
|
|
|
|
public boolean changeStatusTaxonomyJob(String jobIdentifier, DownloadState state) throws Exception {
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
TaxonomyJobPersistence taxonomyJobDAO = DaoSession.getTaxonomyJobDAO(getASLSession());
|
|
|
|
|
|
|
|
int count = TaxonomyJobUtil.changeStatusTaxonomyJobById(jobIdentifier, state, taxonomyJobDAO);
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
if(count==1)
|
|
|
|
return true;
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
return false;
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
}
|
2013-07-16 19:36:40 +02:00
|
|
|
|
|
|
|
/* (non-Javadoc)
|
|
|
|
* @see org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService#isAvailableTaxonomyJobReportError(java.lang.String)
|
|
|
|
*/
|
|
|
|
@Override
|
|
|
|
public boolean isAvailableTaxonomyJobReportError(String jobIdentifier) throws Exception {
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2015-07-22 17:19:38 +02:00
|
|
|
logger.info("isAvailableTaxonomyJobReportError jobId: "+jobIdentifier);
|
2013-07-16 19:36:40 +02:00
|
|
|
try {
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-07-16 19:36:40 +02:00
|
|
|
SpeciesService taxonomyService = getSpeciesService();
|
|
|
|
return taxonomyService.isAvailableTaxonomyJobErrorFileById(jobIdentifier);
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-07-16 19:36:40 +02:00
|
|
|
} catch (Exception e) {
|
|
|
|
logger.error("An error occurred getting error (taxonomy) file for jobid "+jobIdentifier,e);
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/* (non-Javadoc)
|
|
|
|
* @see org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService#isAvailableOccurrenceJobReportError(java.lang.String)
|
|
|
|
*/
|
|
|
|
@Override
|
|
|
|
public boolean isAvailableOccurrenceJobReportError(String jobIdentifier) throws Exception {
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2015-07-22 17:19:38 +02:00
|
|
|
logger.info("isAvailableOccurrenceJobReportError jobId: "+jobIdentifier);
|
2013-07-16 19:36:40 +02:00
|
|
|
try {
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-07-16 19:36:40 +02:00
|
|
|
SpeciesService taxonomyService = getSpeciesService();
|
|
|
|
return taxonomyService.isAvailableOccurrenceJobErrorFileById(jobIdentifier);
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-07-16 19:36:40 +02:00
|
|
|
} catch (Exception e) {
|
|
|
|
logger.error("An error occurred getting error (occurrence) file for jobid "+jobIdentifier,e);
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
}
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2017-02-16 17:15:45 +01:00
|
|
|
/* (non-Javadoc)
|
|
|
|
* @see org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService#getLastQuery()
|
|
|
|
*/
|
2013-07-18 18:32:28 +02:00
|
|
|
@Override
|
|
|
|
public String getLastQuery(){
|
2015-07-22 17:19:38 +02:00
|
|
|
logger.info("getLastQuery...");
|
2013-07-18 18:32:28 +02:00
|
|
|
ASLSession session = getASLSession();
|
|
|
|
return SessionUtil.getCurrentQuery(session);
|
|
|
|
}
|
2013-10-24 12:52:34 +02:00
|
|
|
|
|
|
|
/* (non-Javadoc)
|
|
|
|
* @see org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService#loadClusterCommonNameForTaxonomyRowByScientificName(java.lang.String)
|
|
|
|
*/
|
|
|
|
@Override
|
|
|
|
public ClusterCommonNameDataSourceForTaxonomyRow loadClusterCommonNameForTaxonomyRowByScientificName(String scientificName) {
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2015-07-22 17:19:38 +02:00
|
|
|
logger.info("loadClusterCommonNameForTaxonomyRowByScientificName for scientific name: "+scientificName);
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-10-24 12:52:34 +02:00
|
|
|
HashMap<String, ClusterCommonNameDataSourceForTaxonomyRow> hashCluster = SessionUtil.getCurrentClusterCommonNameForTaxonomyRow(getASLSession());
|
2015-07-22 17:19:38 +02:00
|
|
|
|
2013-10-24 12:52:34 +02:00
|
|
|
if(hashCluster==null){
|
|
|
|
logger.warn("Error in loadClusterCommonNameForTaxonomyRowByScientificName, hashCluster was not found in session");
|
|
|
|
return null;
|
|
|
|
}
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-10-24 12:52:34 +02:00
|
|
|
ClusterCommonNameDataSourceForTaxonomyRow cluster = hashCluster.get(scientificName);
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-10-24 12:52:34 +02:00
|
|
|
if(cluster==null){
|
|
|
|
logger.warn("Error in loadClusterCommonNameForTaxonomyRowByScientificName, cluster was not found in session");
|
|
|
|
return null;
|
|
|
|
}
|
2017-01-11 18:39:11 +01:00
|
|
|
|
2013-10-24 12:52:34 +02:00
|
|
|
return cluster;
|
|
|
|
}
|
2017-02-16 12:44:41 +01:00
|
|
|
|
|
|
|
/* (non-Javadoc)
|
|
|
|
* @see org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService#saveGisLayerAsWsLink(org.gcube.portlets.user.speciesdiscovery.shared.JobGisLayerModel, java.lang.String, java.lang.String)
|
|
|
|
*/
|
|
|
|
@Override
|
|
|
|
public boolean saveGisLayerAsWsLink(JobGisLayerModel jobGisLayer, String destinationFolderId, String fileName) throws Exception{
|
|
|
|
|
|
|
|
try {
|
|
|
|
|
|
|
|
Workspace workspace = GetWorkspaceUtil.getWorskspace(getASLSession());
|
|
|
|
logger.info("input stream is not null");
|
|
|
|
WorkspaceFolder folder = (WorkspaceFolder) workspace.getItem(destinationFolderId);
|
|
|
|
fileName = WorkspaceUtil.getUniqueName(fileName, folder);
|
2017-02-16 15:17:45 +01:00
|
|
|
|
|
|
|
if(jobGisLayer.getGisViewerAppLink()==null){
|
|
|
|
SpeciesService speciesService = getSpeciesService();
|
|
|
|
CompleteJobStatus statusResponse = speciesService.getGisLayerByJobId(jobGisLayer.getJobIdentifier());
|
|
|
|
GisLayerJobPersistence gisLayerJobDao = DaoSession.getGisLayersJobDAO(getASLSession());
|
|
|
|
|
|
|
|
try{
|
|
|
|
GisLayerJob gLJ = gisLayerJobDao.getItemByIdField(jobGisLayer.getJobIdentifier());
|
|
|
|
if(gLJ!=null){
|
|
|
|
jobGisLayer = GisLayerJobUtil.convertJob(gLJ, statusResponse, gisLayerJobDao, speciesService, getASLSession());
|
|
|
|
}
|
|
|
|
}catch(Exception e){
|
|
|
|
logger.error("Error on retrieving gis link from DB for job id: "+jobGisLayer.getJobIdentifier(), e);
|
|
|
|
throw new Exception(e.getMessage());
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-02-16 12:44:41 +01:00
|
|
|
workspace.createExternalUrl(fileName, jobGisLayer.getLayerDescription() + "- Layer UUID: "+jobGisLayer.getLayerUUID(), jobGisLayer.getGisViewerAppLink(), destinationFolderId);
|
|
|
|
logger.info("Saving External link "+fileName +" completed");
|
|
|
|
return true;
|
|
|
|
|
|
|
|
} catch (Exception e) {
|
|
|
|
logger.error("Sorry, an error occurred saving the file '"+fileName+"' in your Workspace, try again",e);
|
|
|
|
throw new Exception(e.getMessage());
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/* (non-Javadoc)
|
|
|
|
* @see org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService#changeStatusGisLayerJob(java.lang.String, org.gcube.portlets.user.speciesdiscovery.shared.DownloadState)
|
|
|
|
*/
|
|
|
|
@Override
|
|
|
|
public boolean changeStatusGisLayerJob(String jobId, DownloadState state) {
|
|
|
|
|
|
|
|
GisLayerJobPersistence gisLayerDAO;
|
|
|
|
try {
|
|
|
|
|
2017-02-16 15:17:45 +01:00
|
|
|
gisLayerDAO = DaoSession.getGisLayersJobDAO(getASLSession());
|
2017-02-16 12:44:41 +01:00
|
|
|
int count = GisLayerJobUtil.changetStatusGisLayerJob(jobId, state, gisLayerDAO);
|
|
|
|
if(count==1)
|
|
|
|
return true;
|
|
|
|
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
catch (Exception e) {
|
|
|
|
logger.error("An error occured in changeStatusGisLayerJob for jobId: "+jobId);
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/* (non-Javadoc)
|
|
|
|
* @see org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService#cancelGisLayerJob(java.lang.String)
|
|
|
|
*/
|
|
|
|
@Override
|
|
|
|
public boolean cancelGisLayerJob(String jobIdentifier) throws Exception {
|
|
|
|
|
|
|
|
try{
|
|
|
|
SpeciesService speciesService = getSpeciesService();
|
|
|
|
//REMOVE JOB ON THE SERVICE
|
|
|
|
speciesService.cancelGisLayerByJobId(jobIdentifier);
|
|
|
|
GisLayerJobPersistence gisLayerDao = DaoSession.getGisLayersJobDAO(getASLSession());
|
|
|
|
int count = GisLayerJobUtil.deleteGisLayerJobById(jobIdentifier, gisLayerDao);
|
|
|
|
if(count==1)
|
|
|
|
return true;
|
|
|
|
|
|
|
|
}catch (Exception e) {
|
|
|
|
logger.error("Erroron deleting gis layer job ", e);
|
|
|
|
throw new Exception("Sorry, an error occurred deleting gis layer job", e);
|
|
|
|
}
|
|
|
|
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
/* (non-Javadoc)
|
|
|
|
* @see org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService#resubmitGisLayerJob(java.lang.String)
|
|
|
|
*/
|
|
|
|
@Override
|
|
|
|
public JobGisLayerModel resubmitGisLayerJob(String jobIdentifier) throws Exception {
|
|
|
|
|
|
|
|
//TODO
|
|
|
|
return null;
|
|
|
|
|
|
|
|
// logger.info("Resubmit gis layer job by id: " + jobIdentifier);
|
|
|
|
// JobGisLayerModel jobGisLayerModel = null;
|
|
|
|
// GisLayerJobPersistence gisLayerJobDAO = DaoSession.getGisLayersJobDAO(getASLSession());
|
|
|
|
//
|
|
|
|
// CriteriaBuilder queryBuilder = gisLayerJobDAO.getCriteriaBuilder();
|
|
|
|
// CriteriaQuery<Object> cq = queryBuilder.createQuery();
|
|
|
|
// Predicate pr1 = queryBuilder.equal(gisLayerJobDAO.rootFrom(cq).get(GisLayerJob.ID_FIELD), jobIdentifier);
|
|
|
|
// cq.where(pr1);
|
|
|
|
//
|
|
|
|
// Iterator<GisLayerJob> iterator = gisLayerJobDAO.executeCriteriaQuery(cq).iterator();
|
|
|
|
//
|
|
|
|
// GisLayerJob gisLayerJob;
|
|
|
|
//
|
|
|
|
// if(iterator.hasNext())
|
|
|
|
// gisLayerJob = iterator.next();
|
|
|
|
// else
|
|
|
|
// return jobGisLayerModel;
|
|
|
|
//
|
|
|
|
// SpeciesService speciesService = getSpeciesService();
|
|
|
|
//
|
|
|
|
// //recover taxomyId
|
|
|
|
// String speciesJobId = speciesService.generateGisLayerFromOccurrenceKeys(occurrenceKeys, layerTitle, layerDescr, author, credits)
|
|
|
|
//
|
|
|
|
// long submitTime = Calendar.getInstance().getTimeInMillis();
|
|
|
|
//
|
|
|
|
// String name = RESUBMIT + ": " +NormalizeString.lowerCaseUpFirstChar(gisLayerJob.getDescriptiveName());
|
|
|
|
//
|
|
|
|
// //STORE INTO DAO
|
|
|
|
// TaxonomyJob speciesJob = new TaxonomyJob(speciesJobId, DownloadState.PENDING.toString(), name, gisLayerJob.getDescriptiveName(), gisLayerJob.getDataSourceName(), gisLayerJob.getRank(), 0, submitTime, 0, gisLayerJob.getTaxonomyId());
|
|
|
|
// gisLayerJobDAO.insert(speciesJob);
|
|
|
|
//
|
|
|
|
// jobGisLayerModel = new JobTaxonomyModel(speciesJob.getId(), speciesJob.getDescriptiveName(), DownloadState.PENDING, null, gisLayerJob.getDescriptiveName(), gisLayerJob.getDataSourceName(), gisLayerJob.getRank());
|
|
|
|
//
|
|
|
|
// Date submit = DateUtil.millisecondsToDate(speciesJob.getSubmitTime());
|
|
|
|
//// jobSpeciesModel.setStartTime(DateUtil.dateToDateFormatString(start));
|
|
|
|
// jobGisLayerModel.setSubmitTime(submit);
|
|
|
|
// jobGisLayerModel.setEndTime(null);
|
|
|
|
//
|
|
|
|
// return jobGisLayerModel;
|
|
|
|
}
|
2017-02-16 17:15:45 +01:00
|
|
|
|
|
|
|
|
2013-03-04 14:38:25 +01:00
|
|
|
}
|