Porting to spd-client-library 4.0

Fixced bug #6156
Updated pom version at 3.9.0

git-svn-id: http://svn.research-infrastructures.eu/public/d4science/gcube/trunk/portlets/user/speciesdiscovery@141521 82a268e6-3cf1-43bd-a215-b396298e98cf
This commit is contained in:
Francesco Mangiacrapa 2017-01-11 17:39:11 +00:00
parent 630ddcb9b2
commit 76f12eebdd
33 changed files with 2145 additions and 1463 deletions

View File

@ -1,7 +1,7 @@
<?xml version="1.0" encoding="UTF-8"?>
<classpath>
<classpathentry kind="lib" path="/home/francesco-mangiacrapa/libraries/asm-5.0.3/lib/asm-5.0.3.jar"/>
<classpathentry kind="src" output="target/species-discovery-3.8.0-SNAPSHOT/WEB-INF/classes" path="src/main/java">
<classpathentry kind="src" output="target/species-discovery-3.8.2-SNAPSHOT/WEB-INF/classes" path="src/main/java">
<attributes>
<attribute name="optional" value="true"/>
<attribute name="maven.pomderived" value="true"/>
@ -31,5 +31,5 @@
<attribute name="org.eclipse.jst.component.dependency" value="/WEB-INF/lib"/>
</attributes>
</classpathentry>
<classpathentry kind="output" path="target/species-discovery-3.8.0-SNAPSHOT/WEB-INF/classes"/>
<classpathentry kind="output" path="target/species-discovery-3.8.2-SNAPSHOT/WEB-INF/classes"/>
</classpath>

View File

@ -4,4 +4,9 @@
<attribute name="provider-id" value="jpa-no-op-library-provider"/>
</node>
</facet>
<facet id="jst.jaxrs">
<node name="libprov">
<attribute name="provider-id" value="jaxrs-no-op-library-provider"/>
</node>
</facet>
</root>

View File

@ -6,4 +6,5 @@
<installed facet="jpt.jpa" version="2.0"/>
<installed facet="liferay.portlet" version="6.0"/>
<installed facet="jst.web" version="3.0"/>
<installed facet="jst.jaxrs" version="2.0"/>
</faceted-project>

View File

@ -1,5 +1,10 @@
<ReleaseNotes>
<Changeset component="org.gcube.portlets-user.species-discovery.3-8-1" date="15-09-2016">
<Changeset component="org.gcube.portlets-user.species-discovery.3-9-0"
date="11-01-2016">
<Change>[Feature #6313] SPD portlet upgrade: porting to spd-client-library 4.0.0 </Change>
</Changeset>
<Changeset component="org.gcube.portlets-user.species-discovery.3-8-1"
date="15-09-2016">
<Change>Removed Gis -viewer dependency</Change>
</Changeset>
<Changeset component="org.gcube.portlets-user.species-discovery.3-8-0"

15
pom.xml
View File

@ -19,7 +19,7 @@
<groupId>org.gcube.portlets.user</groupId>
<artifactId>species-discovery</artifactId>
<packaging>war</packaging>
<version>3.8.1-SNAPSHOT</version>
<version>3.9.0-SNAPSHOT</version>
<name>gCube Species Discovery</name>
<description>
gCube Species Discovery Portlet lets the users discovery species information from the Species Service.
@ -31,7 +31,7 @@
</scm>
<properties>
<!-- Convenience property to set the GWT version -->
<gwtVersion>2.6.1</gwtVersion>
<gwtVersion>2.7.0</gwtVersion>
<distroDirectory>distro</distroDirectory>
<!-- GWT needs at least java 1.6 -->
<maven.compiler.source>1.7</maven.compiler.source>
@ -82,7 +82,7 @@
<dependency>
<groupId>org.gcube.data.spd</groupId>
<artifactId>spd-client-library</artifactId>
<version>[3.0.0-SNAPSHOT, 4.0.0-SNAPSHOT)</version>
<version>[4.0.0-SNAPSHOT, 5.0.0-SNAPSHOT)</version>
<scope>compile</scope>
</dependency>
@ -93,6 +93,13 @@
<scope>compile</scope>
</dependency>
<!-- USED TO SHOW A LAYER VIA GISVIEWER -->
<!-- <dependency> -->
<!-- <groupId>org.gcube.portlets.user</groupId> -->
<!-- <artifactId>gcube-gis-viewer</artifactId> -->
<!-- <version>[2.0.0-SNAPSHOT, 3.0.0-SNAPSHOT)</version> -->
<!-- </dependency> -->
<dependency>
<groupId>org.gcube.common</groupId>
<artifactId>csv4j</artifactId>
@ -106,8 +113,6 @@
<scope>provided</scope>
</dependency>
<!-- Eclipselink dependencies -->
<dependency>
<groupId>org.eclipse.persistence</groupId>

View File

@ -1,7 +1,7 @@
package org.gcube.portlets.user.speciesdiscovery.client;
import org.gcube.portlets.user.speciesdiscovery.client.rpc.GISInfoServiceAsync;
import org.gcube.portlets.user.speciesdiscovery.client.rpc.GISInfoService;
import org.gcube.portlets.user.speciesdiscovery.client.rpc.GISInfoServiceAsync;
import org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService;
import org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchServiceAsync;

View File

@ -1,5 +1,10 @@
package org.gcube.portlets.user.speciesdiscovery.server;
import it.geosolutions.geonetwork.util.GNSearchRequest;
import it.geosolutions.geonetwork.util.GNSearchRequest.Config;
import it.geosolutions.geonetwork.util.GNSearchRequest.Param;
import it.geosolutions.geonetwork.util.GNSearchResponse;
import java.util.HashMap;
import java.util.Map;
@ -16,11 +21,6 @@ import org.gcube.spatial.data.geonetwork.LoginLevel;
import com.google.gwt.user.server.rpc.RemoteServiceServlet;
import it.geosolutions.geonetwork.util.GNSearchRequest;
import it.geosolutions.geonetwork.util.GNSearchResponse;
import it.geosolutions.geonetwork.util.GNSearchRequest.Config;
import it.geosolutions.geonetwork.util.GNSearchRequest.Param;
public class GisInfoServiceImpl extends RemoteServiceServlet implements GISInfoService{
/**
@ -50,8 +50,6 @@ public class GisInfoServiceImpl extends RemoteServiceServlet implements GISInfoS
private static String getPublicLink(String uuid) throws UriResolverMapException, IllegalArgumentException{
UriResolverManager resolver = new UriResolverManager("GIS");
Map<String, String> params = new HashMap<String, String>();
params.put("gis-UUID", uuid);
params.put("scope", ScopeProvider.instance.get());

View File

@ -27,7 +27,6 @@ import org.gcube.portlets.user.speciesdiscovery.server.persistence.dao.TaxonomyR
import org.gcube.portlets.user.speciesdiscovery.server.service.SpeciesService;
import org.gcube.portlets.user.speciesdiscovery.server.service.TaxonomyItemConverter;
import org.gcube.portlets.user.speciesdiscovery.server.stream.CloseableIterator;
import org.gcube.portlets.user.speciesdiscovery.server.stream.StreamExtend;
import org.gcube.portlets.user.speciesdiscovery.shared.CommonName;
import org.gcube.portlets.user.speciesdiscovery.shared.ItemParameter;
import org.gcube.portlets.user.speciesdiscovery.shared.SearchServiceException;
@ -69,7 +68,7 @@ public class TaxonomyRowTable extends HttpServlet {
public static String headWithTitle(String title) {
// return (DOCTYPE + "\n" + HTML+"\n<link type=\"text/css\" rel=\"stylesheet\" href=\"SpeciesDiscovery.css\">" + HEAD+TITLE + title + TITLECLOSE+HEADCLOSE+"\n");
return (DOCTYPE + "\n" + HTML+ "\n"+HEAD + "\n"+TITLE + title + TITLECLOSE+"\n"+HEADCLOSE+"\n");
return DOCTYPE + "\n" + HTML+ "\n"+HEAD + "\n"+TITLE + title + TITLECLOSE+"\n"+HEADCLOSE+"\n";
}
@ -170,13 +169,9 @@ public class TaxonomyRowTable extends HttpServlet {
logger.trace("Tentative recovering taxonomy with id "+taxonomyServiceRowID+" from service");
SpeciesService service = getSpeciesService(req);
StreamExtend<String> streamIds = new StreamExtend<String>(Arrays.asList(taxonomyServiceRowID).iterator());
CloseableIterator<TaxonomyItem> streamIterator = service.retrieveTaxonomyById(streamIds);
//StreamExtend<String> streamIds = new StreamExtend<String>(Arrays.asList(taxonomyServiceRowID).iterator());
CloseableIterator<TaxonomyItem> streamIterator = service.retrieveTaxonomyById(Arrays.asList(taxonomyServiceRowID));
TaxonomyItemConverter taxonomyItemConverter = new TaxonomyItemConverter(aslSession);
// int i = 1;
@ -232,10 +227,6 @@ public class TaxonomyRowTable extends HttpServlet {
}
protected TaxonomyRow findingTaxonomyInCaches(String taxonomyServiceRowID, ASLSession session){
HashMap<String, TaxonomyRow> hashChildrenTaxa = SessionUtil.getHashMapChildrenTaxonomyCache(session);
@ -295,7 +286,7 @@ public class TaxonomyRowTable extends HttpServlet {
public String error(String message){
String errorPage = "";
errorPage +=("<p>Error: "+message+"</p>");
errorPage +="<p>Error: "+message+"</p>";
return errorPage;
}

View File

@ -31,7 +31,7 @@ import org.gcube.common.homelibrary.util.WorkspaceUtil;
import org.gcube.data.spd.model.products.OccurrencePoint;
import org.gcube.data.spd.model.products.ResultElement;
import org.gcube.data.spd.model.products.TaxonomyItem;
import org.gcube.data.spd.stubs.types.Status;
import org.gcube.data.spd.model.service.types.CompleteJobStatus;
import org.gcube.portlets.user.speciesdiscovery.client.ConstantsSpeciesDiscovery;
import org.gcube.portlets.user.speciesdiscovery.client.model.ClassificationModel;
import org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService;
@ -59,7 +59,6 @@ import org.gcube.portlets.user.speciesdiscovery.server.stream.CloseableIterator;
import org.gcube.portlets.user.speciesdiscovery.server.stream.IteratorPointInfo;
import org.gcube.portlets.user.speciesdiscovery.server.stream.OccurenceCSVConverter;
import org.gcube.portlets.user.speciesdiscovery.server.stream.OccurenceCSVConverterOpenModeller;
import org.gcube.portlets.user.speciesdiscovery.server.stream.StreamExtend;
import org.gcube.portlets.user.speciesdiscovery.server.stream.aggregation.FieldAggregator;
import org.gcube.portlets.user.speciesdiscovery.server.stream.aggregation.TaxonomyClassificationAggregator;
import org.gcube.portlets.user.speciesdiscovery.server.util.DateUtil;
@ -310,7 +309,7 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T
logger.info("chunk selected data bounds [start: "+start+" end: " + end+"]");
data = data.subList(start, end);
} else if (activeFiltersObject == null || (!activeFiltersObject.isActiveFilters())) {
} else if (activeFiltersObject == null || !activeFiltersObject.isActiveFilters()) {
if(limit>0){
data = searchSession.getBuffer().getList(start,limit);
}
@ -339,7 +338,7 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T
resultRow.setProperties(null);
}
chunk.add(resultRow);
logger.info("getSearchResultRows return on client result item with id: " +resultRow.getId() + " service id: "+resultRow.getServiceId());
logger.debug("getSearchResultRows returning on client result item with id: " +resultRow.getId() + " service id: "+resultRow.getServiceId());
}
Long endTime = System.currentTimeMillis() - startTime;
@ -387,7 +386,7 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T
data = data.subList(start, end);
} else if (activeFiltersObject == null || (!activeFiltersObject.isActiveFilters())) {
} else if (activeFiltersObject == null || !activeFiltersObject.isActiveFilters()) {
logger.info("getting all available data");
if(limit>0){
@ -486,7 +485,7 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T
public HashMap<String, Integer> getFilterCounterById(GridField field) throws Exception {
logger.info("Filter Counter for: "+ field);
try {
FetchingSession<? extends FetchingElement> searchSession = (FetchingSession<? extends FetchingElement>) getSearchSession();
FetchingSession<? extends FetchingElement> searchSession = getSearchSession();
FieldAggregator<?,?> aggregator = (FieldAggregator<?,?>) searchSession.getAggregator(FieldAggregator.getFieldAggregatorName(field));
if (aggregator!=null) return aggregator.getAggregation();
else return new HashMap<String, Integer>();
@ -505,7 +504,7 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T
MainTaxonomicRankEnum rank = MainTaxonomicRankEnum.valueOfLabel(rankLabel);
if (rank!=null) {
FetchingSession<? extends FetchingElement> searchSession = (FetchingSession<? extends FetchingElement>) getSearchSession();
FetchingSession<? extends FetchingElement> searchSession = getSearchSession();
TaxonomyClassificationAggregator<?> classificationAggregator = (TaxonomyClassificationAggregator<?>) searchSession.getAggregator(TaxonomyClassificationAggregator.NAME);
return classificationAggregator.getAggregation().get(rank);
} else return new HashMap<String, ClassificationModel>();
@ -542,7 +541,6 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T
logger.info("getSearchStatus MAX_BUFFERING_ELEMENTS is reached - stop search");
stopSearchWithoutRemove();
//CALCULATE NEW BUFFER SIZE AFTER FETCHING IS CLOSED
try {
// int sleepingTime = 500;
@ -665,7 +663,7 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T
int size = 0;
try {
if (activeFiltersObject == null || (!activeFiltersObject.isActiveFilters())) {
if (activeFiltersObject == null || !activeFiltersObject.isActiveFilters()) {
SelectableFetchingBuffer<? extends FetchingElement> buffer = (SelectableFetchingBuffer<? extends FetchingElement>) searchSession.getBuffer();
buffer.updateAllSelection(selection);
size = buffer.size();
@ -1166,7 +1164,7 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T
logger.info("get taxonomy job "+job.getId()+ " from service");
try{
Status statusResponse = taxonomyService.getTaxonomyJobById(job.getId());
CompleteJobStatus statusResponse = taxonomyService.getTaxonomyJobById(job.getId());
if(statusResponse!=null){
logger.info("statusResponse is not null..." + job.getId());
@ -1433,9 +1431,9 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T
try {
SpeciesService taxonomyService = getSpeciesService();
StreamExtend<String> convert = new StreamExtend<String>(ids.iterator());
//StreamExtend<String> convert = new StreamExtend<String>(ids.iterator());
CloseableIterator<TaxonomyItem> streamIterator = taxonomyService.retrieveTaxonomyById(convert);
CloseableIterator<TaxonomyItem> streamIterator = taxonomyService.retrieveTaxonomyById(ids);
ASLSession session = getASLSession();
TaxonomyItemConverter taxonomyItemConverter = new TaxonomyItemConverter(session);
@ -1656,7 +1654,7 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T
while (iterator!=null && iterator.hasNext()) {
OccurrencesJob job = iterator.next();
Status statusResponse = taxonomyService.getOccurrenceJobById(job.getId());
CompleteJobStatus statusResponse = taxonomyService.getOccurrenceJobById(job.getId());
logger.info("get occurrence job "+job.getId()+ " from service");
try{

View File

@ -15,7 +15,8 @@ import javax.persistence.criteria.Predicate;
import org.apache.log4j.Logger;
import org.gcube.application.framework.core.session.ASLSession;
import org.gcube.data.spd.stubs.types.Status;
import org.gcube.data.spd.model.service.types.CompleteJobStatus;
import org.gcube.data.spd.model.service.types.JobStatus;
import org.gcube.portlets.user.speciesdiscovery.server.persistence.DaoSession;
import org.gcube.portlets.user.speciesdiscovery.server.persistence.dao.OccurrenceJobPersistence;
import org.gcube.portlets.user.speciesdiscovery.server.persistence.dao.ResultRowPersistence;
@ -33,6 +34,13 @@ import org.gcube.portlets.user.speciesdiscovery.shared.OccurrencesSaveEnum;
import org.gcube.portlets.user.speciesdiscovery.shared.ResultRow;
import org.gcube.portlets.user.speciesdiscovery.shared.SaveFileFormat;
/**
* The Class OccurrenceJobUtil.
*
* @author Francesco Mangiacrapa francesco.mangiacrapa@isti.cnr.it
* Jan 10, 2017
*/
public class OccurrenceJobUtil {
//STATE RETURNED BY STATUS RESPONSE
@ -45,13 +53,21 @@ public class OccurrenceJobUtil {
protected static Logger logger = Logger.getLogger(OccurrenceJobUtil.class);
public static JobOccurrencesModel convertJob(OccurrencesJob job, Status statusResponse, OccurrenceJobPersistence occurrencesJobDao) {
/**
* Convert job.
*
* @param job the job
* @param statusResponse the status response
* @param occurrencesJobDao the occurrences job dao
* @return the job occurrences model
*/
public static JobOccurrencesModel convertJob(OccurrencesJob job, CompleteJobStatus statusResponse, OccurrenceJobPersistence occurrencesJobDao) {
//TODO SET END TIME
JobOccurrencesModel jobOccurrenceModel;
DownloadState downloadState = null;
long endTime = 0;
String status = statusResponse.getStatus();
JobStatus status = statusResponse.getStatus();
downloadState = getDownloadState(status);
logger.trace("jobId: "+job.getId() +" download state: " + downloadState);
@ -161,23 +177,38 @@ public class OccurrenceJobUtil {
return jobOccurrenceModel;
}
public static DownloadState getDownloadState(String status){
/**
* Gets the download state.
*
* @param status the status
* @return the download state
*/
public static DownloadState getDownloadState(JobStatus status){
if(status!=null){
if(status.compareToIgnoreCase(PENDING)==0){
return DownloadState.PENDING;
}else if(status.compareToIgnoreCase(RUNNING)==0){
return DownloadState.ONGOING;
}else if(status.compareToIgnoreCase(FAILED)==0){
return DownloadState.FAILED;
}else if(status.compareToIgnoreCase(COMPLETED)==0){
switch (status) {
case COMPLETED:
return DownloadState.COMPLETED;
}
}
case FAILED:
return DownloadState.FAILED;
case PENDING:
return DownloadState.PENDING;
case RUNNING:
return DownloadState.ONGOING;
default:
return null;
}
}
return null;
}
/**
* Convert csv type.
*
* @param csvType the csv type
* @return the occurrences save enum
*/
public static OccurrencesSaveEnum convertCsvType(String csvType) {
if(csvType!=null){
@ -191,6 +222,12 @@ public class OccurrenceJobUtil {
}
/**
* Conver file format.
*
* @param fileFormat the file format
* @return the save file format
*/
public static SaveFileFormat converFileFormat(String fileFormat) {
if(fileFormat!=null){
@ -205,6 +242,13 @@ public class OccurrenceJobUtil {
}
/**
* Delete occurrence job by id.
*
* @param jobIdentifier the job identifier
* @param occurrencesJobDao the occurrences job dao
* @return the int
*/
public static int deleteOccurrenceJobById(String jobIdentifier, OccurrenceJobPersistence occurrencesJobDao){
logger.trace("Delete occurrence job id: " + jobIdentifier);
@ -222,6 +266,14 @@ public class OccurrenceJobUtil {
}
/**
* Change status occurrence job by id.
*
* @param jobIdentifier the job identifier
* @param state the state
* @param occurrencesJobDao the occurrences job dao
* @return the int
*/
public static int changeStatusOccurrenceJobById(String jobIdentifier, DownloadState state, OccurrenceJobPersistence occurrencesJobDao){
logger.trace("Change status occurrence job id: " + jobIdentifier);
// System.out.println("Delete job id: " + jobIdentifier);
@ -256,6 +308,13 @@ public class OccurrenceJobUtil {
}
/**
* Gets the list of selected key.
*
* @param searchSession the search session
* @return the list of selected key
* @throws Exception the exception
*/
public static List<String> getListOfSelectedKey(FetchingSession<ResultRow> searchSession) throws Exception{
Collection<ResultRow> selectedRows = ((SelectableFetchingBuffer<ResultRow>) searchSession.getBuffer()).getSelected();
@ -280,6 +339,13 @@ public class OccurrenceJobUtil {
}
/**
* Gets the list of selected key by data source.
*
* @param dataSource the data source
* @param session the session
* @return the list of selected key by data source
*/
public static OccurrenceKeys getListOfSelectedKeyByDataSource(String dataSource, ASLSession session) {
logger.trace("getListOfSelectedKeyByDataSource...");
@ -352,6 +418,19 @@ public class OccurrenceJobUtil {
}
/**
* Creates the occurrence job on service by keys.
*
* @param jobModel the job model
* @param taxonomyService the taxonomy service
* @param occurrencesJobDao the occurrences job dao
* @param keys the keys
* @param dataSources the data sources
* @param saveFileFormat the save file format
* @param csvType the csv type
* @param expectedOccurrence the expected occurrence
* @return the job occurrences model
*/
public static JobOccurrencesModel createOccurrenceJobOnServiceByKeys(JobOccurrencesModel jobModel,SpeciesService taxonomyService, OccurrenceJobPersistence occurrencesJobDao, List<String> keys, List<DataSource> dataSources, SaveFileFormat saveFileFormat, OccurrencesSaveEnum csvType, int expectedOccurrence) {
String serviceJobId = null;
@ -422,6 +501,12 @@ public class OccurrenceJobUtil {
}
/**
* Convert list key into store xml string.
*
* @param keys the keys
* @return the string
*/
public static String convertListKeyIntoStoreXMLString(List<String> keys){
String storeKeys = "";
@ -443,6 +528,12 @@ public class OccurrenceJobUtil {
return storeKeys;
}
/**
* Revert list key from stored xml string.
*
* @param storedKeysAsXml the stored keys as xml
* @return the list
*/
public static List<String> revertListKeyFromStoredXMLString(String storedKeysAsXml){
List<String> listKey = new ArrayList<String>();

View File

@ -12,14 +12,21 @@ import javax.persistence.criteria.CriteriaQuery;
import javax.persistence.criteria.Predicate;
import org.apache.log4j.Logger;
import org.gcube.data.spd.stubs.types.NodeStatus;
import org.gcube.data.spd.stubs.types.Status;
import org.gcube.data.spd.model.service.types.CompleteJobStatus;
import org.gcube.data.spd.model.service.types.JobStatus;
import org.gcube.data.spd.model.service.types.NodeStatus;
import org.gcube.portlets.user.speciesdiscovery.server.persistence.dao.TaxonomyJobPersistence;
import org.gcube.portlets.user.speciesdiscovery.server.util.DateUtil;
import org.gcube.portlets.user.speciesdiscovery.shared.DownloadState;
import org.gcube.portlets.user.speciesdiscovery.shared.JobTaxonomyModel;
import org.gcube.portlets.user.speciesdiscovery.shared.TaxonomyJob;
/**
* The Class TaxonomyJobUtil.
*
* @author Francesco Mangiacrapa francesco.mangiacrapa@isti.cnr.it
* Jan 10, 2017
*/
public class TaxonomyJobUtil {
//STATE RETURNED BY STATUS RESPONSE
@ -31,11 +38,21 @@ public class TaxonomyJobUtil {
protected static Logger logger = Logger.getLogger(TaxonomyJobUtil.class);
public static JobTaxonomyModel convertJob(TaxonomyJob job, Status statusResponse, TaxonomyJobPersistence speciesJobDao) throws SQLException{
/**
* Convert job.
*
* @param job the job
* @param statusResponse the status response
* @param speciesJobDao the species job dao
* @return the job taxonomy model
* @throws SQLException the SQL exception
*/
public static JobTaxonomyModel convertJob(TaxonomyJob job, CompleteJobStatus statusResponse, TaxonomyJobPersistence speciesJobDao) throws SQLException{
//TODO SET END TIME
JobTaxonomyModel jobSpeciesModel;
String status = statusResponse.getStatus();
JobStatus status = statusResponse.getStatus();
DownloadState downloadState = null;
long endTime = 0;
@ -74,7 +91,7 @@ public class TaxonomyJobUtil {
boolean changeStatus = false;
//If status of children is completed and job status is not completed (the file is generated) or failed, set download state on saving
if(onSaving &&(!downloadState.equals(DownloadState.COMPLETED) && !downloadState.equals(DownloadState.FAILED))){
if(onSaving &&!downloadState.equals(DownloadState.COMPLETED) && !downloadState.equals(DownloadState.FAILED)){
downloadState = DownloadState.SAVING;
changeStatus = true;
@ -175,23 +192,41 @@ public class TaxonomyJobUtil {
return jobSpeciesModel;
}
public static DownloadState getDownloadState(String status){
/**
* Gets the download state.
*
* @param status the status
* @return the download state
*/
public static DownloadState getDownloadState(JobStatus status){
if(status!=null){
if(status.compareToIgnoreCase(PENDING)==0){
return DownloadState.PENDING;
}else if(status.compareToIgnoreCase(RUNNING)==0){
return DownloadState.ONGOING;
}else if(status.compareToIgnoreCase(FAILED)==0){
return DownloadState.FAILED;
}else if(status.compareToIgnoreCase(COMPLETED)==0){
switch (status) {
case COMPLETED:
return DownloadState.COMPLETED;
case FAILED:
return DownloadState.FAILED;
case PENDING:
return DownloadState.PENDING;
case RUNNING:
return DownloadState.ONGOING;
default:
return null;
}
}
return null;
}
/**
* Delete taxonomy job by id.
*
* @param jobIdentifier the job identifier
* @param taxonomyJobDao the taxonomy job dao
* @return the int
* @throws SQLException the SQL exception
*/
public static int deleteTaxonomyJobById(String jobIdentifier, TaxonomyJobPersistence taxonomyJobDao) throws SQLException{
logger.trace("Delete taxonomy job id: " + jobIdentifier);
try{
@ -208,6 +243,14 @@ public class TaxonomyJobUtil {
}
/**
* Change status taxonomy job by id.
*
* @param jobIdentifier the job identifier
* @param state the state
* @param taxonomyJobDAO the taxonomy job dao
* @return the int
*/
public static int changeStatusTaxonomyJobById(String jobIdentifier,DownloadState state, TaxonomyJobPersistence taxonomyJobDAO) {
logger.trace("Change status taxonomy job id: " + jobIdentifier);
// System.out.println("Delete job id: " + jobIdentifier);

View File

@ -73,14 +73,10 @@ public class ResultRowBuffer extends AbstractSelectableDaoBuffer<ResultRow> impl
String value;
if(activeFiltersObject!=null){
//FILTER BY CLASSIFICATION
if(activeFiltersObject.isByClassification()){
// List<Integer> listId = activeFiltersObject.getListByClassification();
int counter = activeFiltersObject.getNumberOfData();
logger.trace("in classification filter - counter: "+counter);
logger.trace("in classification filter - rank: "+activeFiltersObject.getRankClassification());
logger.trace("in classification filter - classification id: "+activeFiltersObject.getClassificationId());
@ -105,13 +101,9 @@ public class ResultRowBuffer extends AbstractSelectableDaoBuffer<ResultRow> impl
// logger.trace("in classification filter - columName: "+columName);
try {
CriteriaBuilder queryBuilder = dao.getCriteriaBuilder();
Query query = dao.createNewManager().createQuery("select r FROM ResultRow r where r."+columName+ "='"+activeFiltersObject.getClassificationId()+"'");
//
query.setMaxResults(counter);
iterator = query.getResultList().iterator();
// logger.trace("in classification filter - statement: "+queryBuilder.where().eq(columName, activeFiltersObject.getClassificationId()).getStatement());
@ -163,37 +155,28 @@ public class ResultRowBuffer extends AbstractSelectableDaoBuffer<ResultRow> impl
try {
CriteriaBuilder queryBuilder = taxonDao.getCriteriaBuilder();
value = activeFiltersObject.getRankName();
// value = NormalizeString.lowerCaseUpFirstChar(activeFiltersObject.getRankName());
CriteriaQuery<Object> cq = queryBuilder.createQuery();
Predicate pr1 = queryBuilder.equal(taxonDao.rootFrom(cq).get(Taxon.RANK), NormalizeString.validateUndefined(value));
cq.where(pr1);
EntityManager em = dao.createNewManager();
Iterator<Taxon> iteratorTaxon = taxonDao.executeCriteriaQuery(cq).iterator();
String queryString = "select *" +
" FROM "+ResultRow.class.getSimpleName()+" r" +
" INNER JOIN RESULTROW_TAXON rt on r.ID=rt.RESULTROW_ID" +
" INNER JOIN "+Taxon.class.getSimpleName()+" t on t.INTERNALID=rt.MATCHINGTAXON_INTERNALID" +
" where t.RANK = '"+value+"' and t.ID IN" +
" (select MIN(tax.ID) from TAXON tax)";
logger.trace("in rank filter - value: "+value);
Query query = em.createNativeQuery(queryString, ResultRow.class);
List<ResultRow> listResultRow = new ArrayList<ResultRow>();
try {
List<Integer> listTaxonId = new ArrayList<Integer>();
if(iteratorTaxon!=null){
while(iteratorTaxon.hasNext()){
Taxon tax = iteratorTaxon.next();
listTaxonId.add(tax.getId());
}
listResultRow = query.getResultList();
} catch (Exception e) {
logger.error("Error in ResultRow - executeCriteriaQuery: " + e.getMessage(), e);
} finally {
em.close();
}
logger.trace("in rank filter - listTaxonId size: "+listTaxonId.size());
queryBuilder = dao.getCriteriaBuilder();
cq = queryBuilder.createQuery();
pr1 = dao.rootFrom(cq).get(ResultRow.PARENT_FOREIGN_KEY_TAXON).in(listTaxonId);
cq.where(pr1);
iterator = dao.executeCriteriaQuery(cq).iterator();
iterator = listResultRow.iterator();
} catch (Exception e) {
logger.error("Error in activeFiltersObject.isByRank(): "+e, e);
@ -202,20 +185,15 @@ public class ResultRowBuffer extends AbstractSelectableDaoBuffer<ResultRow> impl
}
if(iterator!=null){
while(iterator.hasNext()){
ResultRow row = iterator.next();
list.add(row);
}
filteredListSize = list.size();
}
}
logger.trace("RETURNED List size " + list.size());
return list;
}
@ -228,11 +206,8 @@ public class ResultRowBuffer extends AbstractSelectableDaoBuffer<ResultRow> impl
try {
em.getTransaction().begin();
int updateCount = em.createQuery("UPDATE ResultRow SET " + ResultRow.SELECTED + " = "+ selection).executeUpdate();
logger.trace("Updated " + updateCount + " item");
em.getTransaction().commit();
} finally {
if (em.getTransaction().isActive())

View File

@ -86,11 +86,8 @@ public class TaxonomyRowBuffer extends AbstractSelectableDaoBuffer<TaxonomyRow>
try {
CriteriaBuilder queryBuilder = dao.getCriteriaBuilder();
Query query = dao.createNewManager().createQuery("select r FROM TaxonomyRow r where r."+columName+ "='"+activeFiltersObject.getClassificationId()+"'");
//
query.setMaxResults(counter);
iterator = query.getResultList().iterator();
// logger.trace("in classification filter - statement: "+queryBuilder.where().eq(columName, activeFiltersObject.getClassificationId()).getStatement());
@ -111,7 +108,6 @@ public class TaxonomyRowBuffer extends AbstractSelectableDaoBuffer<TaxonomyRow>
//TODO FIXME empty value
logger.trace("FILTER BY DATA PROVIDER: "+ value );
iterator = dao.executeCriteriaQuery(cq).iterator();
} catch (Exception e) {
@ -128,10 +124,8 @@ public class TaxonomyRowBuffer extends AbstractSelectableDaoBuffer<TaxonomyRow>
CriteriaQuery<Object> cq = queryBuilder.createQuery();
Predicate pr1 = queryBuilder.equal(dao.rootFrom(cq).get(TaxonomyRow.DATAPROVIDER_NAME), value);
cq.where(pr1);
//TODO FIXME empty value
logger.trace("FILTER BY DATA SOURCE: "+ value );
iterator = dao.executeCriteriaQuery(cq).iterator();
} catch (Exception e) {
@ -144,7 +138,6 @@ public class TaxonomyRowBuffer extends AbstractSelectableDaoBuffer<TaxonomyRow>
}else if(activeFiltersObject.isByRank()){
try {
CriteriaBuilder queryBuilder = dao.getCriteriaBuilder();
value = activeFiltersObject.getRankName();
// value = NormalizeString.lowerCaseUpFirstChar(activeFiltersObject.getRankName());
@ -152,12 +145,9 @@ public class TaxonomyRowBuffer extends AbstractSelectableDaoBuffer<TaxonomyRow>
CriteriaQuery<Object> cq = queryBuilder.createQuery();
Predicate pr1 = queryBuilder.equal(dao.rootFrom(cq).get(TaxonomyRow.RANK), NormalizeString.validateUndefined(value));
cq.where(pr1);
logger.trace("FILTER BY RANK: "+ value );
iterator = dao.executeCriteriaQuery(cq).iterator();
} catch (Exception e) {
logger.error("Error in activeFiltersObject.isByRank(): "+e, e);
}
@ -165,20 +155,15 @@ public class TaxonomyRowBuffer extends AbstractSelectableDaoBuffer<TaxonomyRow>
}
if(iterator!=null){
while(iterator.hasNext()){
TaxonomyRow row = iterator.next();
list.add(row);
}
filteredListSize = list.size();
}
}
logger.trace("RETURNED List size " + list.size());
return list;
}

View File

@ -31,7 +31,7 @@ public class OccurrenceJobPersistence extends AbstractPersistence<OccurrencesJob
em.getTransaction().begin();
removed = em.createQuery("DELETE FROM OccurrencesJob").executeUpdate();
em.getTransaction().commit();
logger.trace("DELETE FROM OccurrenceJob " + removed +" items");
logger.trace("DELETED FROM OccurrenceJob " + removed +" items");
} catch (Exception e) {
logger.error("Error in removeAll: " + e.getMessage(), e);

View File

@ -110,7 +110,7 @@ public class OccurrenceRowPersistence extends AbstractPersistence<Occurrence>{
em.getTransaction().begin();
removed = em.createQuery("DELETE FROM Occurrence").executeUpdate();
em.getTransaction().commit();
logger.trace("DELETE FROM Occurrence " + removed +" items");
logger.trace("DELETED FROM Occurrence " + removed +" items");
} catch (Exception e) {
logger.error("Error in removeAll: " + e.getMessage(), e);

View File

@ -120,7 +120,7 @@ public class ResultRowPersistence extends AbstractPersistence<ResultRow>{
em.getTransaction().begin();
removed = em.createQuery("DELETE FROM ResultRow").executeUpdate();
em.getTransaction().commit();
logger.trace("DELETE FROM ResultRow " + removed +" items");
logger.trace("DELETED FROM ResultRow " + removed +" items");
} catch (Exception e) {
logger.error("Error in ResultRow - removeAll: " + e.getMessage(), e);

View File

@ -31,7 +31,7 @@ public class TaxonRowPersistence extends AbstractPersistence<Taxon>{
em.getTransaction().begin();
removed = em.createQuery("DELETE FROM Taxon").executeUpdate();
em.getTransaction().commit();
logger.trace("DELETE FROM Taxon " + removed +" items");
logger.trace("DELETED FROM Taxon " + removed +" items");
} catch (Exception e) {
logger.error("Error in removeAll: " + e.getMessage(), e);

View File

@ -30,7 +30,7 @@ public class TaxonomyJobPersistence extends AbstractPersistence<TaxonomyJob>{
em.getTransaction().begin();
removed = em.createQuery("DELETE FROM TaxonomyJob").executeUpdate();
em.getTransaction().commit();
logger.trace("DELETE FROM TaxonomyJob " + removed +" items");
logger.trace("DELETED FROM TaxonomyJob " + removed +" items");
} catch (Exception e) {
logger.error("Error in removeAll: " + e.getMessage(), e);

View File

@ -31,7 +31,7 @@ public class TaxonomyRowPersistence extends AbstractPersistence<TaxonomyRow>{
em.getTransaction().begin();
removed = em.createQuery("DELETE FROM TaxonomyRow").executeUpdate();
em.getTransaction().commit();
logger.trace("DELETE FROM TaxonomyRow " + removed +" items");
logger.trace("DELETED FROM TaxonomyRow " + removed +" items");
} catch (Exception e) {
logger.error("Error in TaxonomyRow - removeAll: " + e.getMessage(), e);

View File

@ -23,10 +23,12 @@ import org.gcube.portlets.user.speciesdiscovery.shared.ResultRow;
import org.gcube.portlets.user.speciesdiscovery.shared.Taxon;
import org.gcube.portlets.user.speciesdiscovery.shared.util.NormalizeString;
/**
* The Class ResultItemConverter.
*
* @author Francesco Mangiacrapa francesco.mangiacrapa@isti.cnr.it
*
* Jan 11, 2017
*/
public class ResultItemConverter implements Converter<ResultItem, ResultRow> {
@ -34,10 +36,18 @@ public class ResultItemConverter implements Converter<ResultItem, ResultRow> {
protected int id = 0;
protected ASLSession session;
/**
* Instantiates a new result item converter.
*
* @param session the session
*/
public ResultItemConverter(ASLSession session) {
this.session = session;
}
/* (non-Javadoc)
* @see org.gcube.portlets.user.speciesdiscovery.server.stream.Converter#convert(java.lang.Object)
*/
@Override
public ResultRow convert(ResultItem input) throws Exception {
@ -152,35 +162,39 @@ public class ResultItemConverter implements Converter<ResultItem, ResultRow> {
}
}
//DEBUG
// System.out.println("Insert row id: "+row.getId());
row.setMatchingTaxon(convertTaxon(input));
row.setBaseTaxonValue(NormalizeString.lowerCaseUpFirstChar(getBaseTaxonValue(TaxonomySearchServiceImpl.BASETAXONOMY,input)));
// row.setMatchingCredits(StringEscapeUtils.escapeSql(input.getCredits()));
row.setMatchingCredits(input.getCredits());
// logger.trace("convert completed: " +row);
return row;
}
/**
* Gets the base taxon value.
*
* @param rank the rank
* @param taxon the taxon
* @return the base taxon value
*/
private String getBaseTaxonValue(String rank, TaxonomyInterface taxon){
while(taxon!=null){
if(taxon.getRank()!=null && taxon.getRank().equalsIgnoreCase(rank))
return taxon.getRank();
taxon = taxon.getParent();
}
return TaxonomySearchServiceImpl.TAXONOMYUNKNOWN;
}
/**
* Convert taxon.
*
* @param taxon the taxon
* @return the list
*/
protected List<Taxon> convertTaxon(TaxonomyInterface taxon)
{
List<Taxon> listTaxon = new ArrayList<Taxon>();
@ -190,8 +204,6 @@ public class ResultItemConverter implements Converter<ResultItem, ResultRow> {
Taxon tax = new Taxon(count++, taxon.getScientificName(), taxon.getCitation(), NormalizeString.lowerCaseUpFirstChar(taxon.getRank()));
listTaxon.add(tax);
taxon = taxon.getParent();
// System.out.println("Insert tax parent id: "+tax.getId());
}
return listTaxon;
}

View File

@ -79,7 +79,7 @@ public class Fetcher<T extends FetchingElement> implements Runnable, Closeable {
}
else{
countNullItems++;
logger.warn("fetch new row is null!! It is the number: "+countNullItems);
logger.warn("fetch new row is null!! Number of null value/s: "+countNullItems);
if(MAX_CONSECUTIVE_ATTEMPTS_ON_NULL==countNullItems){
logger.warn("Fetched "+MAX_CONSECUTIVE_ATTEMPTS_ON_NULL+ " null rows, MAX ATTEMPTS reached, complete fetch true and closing stream!!");
silentClose();
@ -93,7 +93,7 @@ public class Fetcher<T extends FetchingElement> implements Runnable, Closeable {
}
} catch (Exception e) {
logger.error("Error in add row " + e.getMessage());
logger.error("Error in add row " + e.getMessage(), e);
silentClose();
}

View File

@ -1,25 +1,21 @@
package org.gcube.portlets.user.speciesdiscovery.shared;
import java.io.Serializable;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import com.google.gwt.user.client.rpc.IsSerializable;
/**
* The Class DataSource.
*
* @author Francesco Mangiacrapa francesco.mangiacrapa@isti.cnr.it
*
* Jan 10, 2017
*/
@Entity
public class DataSource implements Serializable{
/**
*
*/
private static final long serialVersionUID = 3373136869904925484L;
public class DataSource implements IsSerializable{
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
@ -29,13 +25,17 @@ public class DataSource implements Serializable{
private String name;
private String description;
/**
* Instantiates a new data source.
*/
public DataSource() {}
/**
* Instantiates a new data source.
*
* @param id
* @param name
* @param description
* @param id the id
* @param name the name
* @param description the description
*/
public DataSource(String id, String name, String description) {
setId(id);
@ -44,38 +44,75 @@ public class DataSource implements Serializable{
}
//Used in Data Source advanced option to create the check list
/**
* Instantiates a new data source.
*
* @param id the id
* @param name the name
*/
public DataSource(String id, String name){
setId(id);
setName(name);
}
/**
* Gets the id.
*
* @return the id
*/
public String getId() {
return id;
}
/**
* Sets the id.
*
* @param id the new id
*/
public void setId(String id) {
this.id = id;
}
/**
* Gets the name.
*
* @return the name
*/
public String getName() {
return name;
}
/**
* Sets the name.
*
* @param name the new name
*/
public void setName(String name) {
this.name = name;
}
/**
* Gets the description.
*
* @return the description
*/
public String getDescription() {
return description;
}
/**
* Sets the description.
*
* @param description the new description
*/
public void setDescription(String description) {
this.description = description;
}
/* (non-Javadoc)
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder builder = new StringBuilder();

View File

@ -1,23 +1,33 @@
package org.gcube.portlets.user.speciesdiscovery.shared;
import java.io.Serializable;
import java.util.ArrayList;
import com.google.gwt.user.client.rpc.IsSerializable;
/**
* The Class DataSourceCapability.
*
* @author Francesco Mangiacrapa francesco.mangiacrapa@isti.cnr.it
*
* Jan 10, 2017
*/
public class DataSourceCapability implements Serializable{
private static final long serialVersionUID = -9083819206898794333L;
public class DataSourceCapability implements IsSerializable{
private ArrayList<SpeciesCapability> listFilters;
private SpeciesCapability capability;
/**
* Instantiates a new data source capability.
*/
public DataSourceCapability() {}
/**
* Instantiates a new data source capability.
*
* @param capability the capability
* @param listFilters the list filters
*/
public DataSourceCapability(SpeciesCapability capability, ArrayList<SpeciesCapability> listFilters) {
super();
this.capability = capability;
@ -25,18 +35,38 @@ public class DataSourceCapability implements Serializable{
}
/**
* Gets the list filters.
*
* @return the list filters
*/
public ArrayList<SpeciesCapability> getListFilters() {
return listFilters;
}
/**
* Sets the list filters.
*
* @param listFilters the new list filters
*/
public void setListFilters(ArrayList<SpeciesCapability> listFilters) {
this.listFilters = listFilters;
}
/**
* Gets the capability.
*
* @return the capability
*/
public SpeciesCapability getCapability() {
return capability;
}
/**
* Sets the capability.
*
* @param capability the new capability
*/
public void setCapability(SpeciesCapability capability) {
this.capability = capability;
}

View File

@ -1,30 +1,32 @@
package org.gcube.portlets.user.speciesdiscovery.shared;
import java.io.Serializable;
import java.util.ArrayList;
/**
* The Class DataSourceModel.
*
* @author Francesco Mangiacrapa francesco.mangiacrapa@isti.cnr.it
*
* Jan 10, 2017
*/
public class DataSourceModel extends DataSource implements Serializable{
private static final long serialVersionUID = 7399231525793036218L;
public class DataSourceModel extends DataSource {
private ArrayList<DataSourceCapability> listCapabilities;
private DataSourceRepositoryInfo dataSourceRepositoryInfo;
public DataSourceModel() {}
/**
* Instantiates a new data source model.
*/
public DataSourceModel() {}
/**
* Instantiates a new data source model.
*
* @param id
* @param name
* @param description
* @param listCapabilities
* @param dsInfo
* @param id the id
* @param name the name
* @param description the description
* @param listCapabilities the list capabilities
* @param dsInfo the ds info
*/
public DataSourceModel(String id, String name, String description, ArrayList<DataSourceCapability> listCapabilities, DataSourceRepositoryInfo dsInfo) {
super(id,name,description);
@ -32,26 +34,52 @@ public class DataSourceModel extends DataSource implements Serializable{
setDataSourceRepositoryInfo(dsInfo);
}
/**
* Sets the data source repository info.
*
* @param dsInfo the new data source repository info
*/
private void setDataSourceRepositoryInfo(DataSourceRepositoryInfo dsInfo) {
this.dataSourceRepositoryInfo = dsInfo;
}
//Used in Data Source advanced option to create the check list
/**
* Instantiates a new data source model.
*
* @param id the id
* @param name the name
*/
public DataSourceModel(String id, String name){
super(id,name);
}
/**
* Gets the list capabilities.
*
* @return the list capabilities
*/
public ArrayList<DataSourceCapability> getListCapabilities() {
return listCapabilities;
}
/**
* Sets the list capabilities.
*
* @param listCapabilities the new list capabilities
*/
public void setListCapabilities(ArrayList<DataSourceCapability> listCapabilities) {
this.listCapabilities = listCapabilities;
}
/**
* Gets the data source repository info.
*
* @return the data source repository info
*/
public DataSourceRepositoryInfo getDataSourceRepositoryInfo() {
return dataSourceRepositoryInfo;
}

View File

@ -1,21 +1,17 @@
package org.gcube.portlets.user.speciesdiscovery.shared;
import java.io.Serializable;
import java.util.Map;
import com.google.gwt.user.client.rpc.IsSerializable;
/**
* The Class DataSourceRepositoryInfo.
*
* @author Francesco Mangiacrapa francesco.mangiacrapa@isti.cnr.it
*
* Jan 10, 2017
*/
public class DataSourceRepositoryInfo implements Serializable{
/**
*
*/
private static final long serialVersionUID = -4557351371954637191L;
public class DataSourceRepositoryInfo implements IsSerializable{
private String logoUrl;
private String pageUrl;
@ -23,8 +19,19 @@ public class DataSourceRepositoryInfo implements Serializable{
private String name;
private String description;
/**
* Instantiates a new data source repository info.
*/
public DataSourceRepositoryInfo() {}
/**
* Instantiates a new data source repository info.
*
* @param logoUrl the logo url
* @param pageUrl the page url
* @param properties the properties
* @param description the description
*/
public DataSourceRepositoryInfo(String logoUrl, String pageUrl, Map<String,String> properties, String description) {
this.logoUrl = logoUrl;
this.pageUrl = pageUrl;
@ -32,33 +39,90 @@ public class DataSourceRepositoryInfo implements Serializable{
this.description = description;
}
/**
* Gets the logo url.
*
* @return the logo url
*/
public String getLogoUrl() {
return logoUrl;
}
/**
* Sets the logo url.
*
* @param logoUrl the new logo url
*/
public void setLogoUrl(String logoUrl) {
this.logoUrl = logoUrl;
}
/**
* Gets the page url.
*
* @return the page url
*/
public String getPageUrl() {
return pageUrl;
}
/**
* Sets the page url.
*
* @param pageUrl the new page url
*/
public void setPageUrl(String pageUrl) {
this.pageUrl = pageUrl;
}
/**
* Gets the properties.
*
* @return the properties
*/
public Map<String, String> getProperties() {
return properties;
}
/**
* Sets the properties.
*
* @param properties the properties
*/
public void setProperties(Map<String, String> properties) {
this.properties = properties;
}
/**
* Gets the name.
*
* @return the name
*/
public String getName() {
return name;
}
/**
* Gets the description.
*
* @return the description
*/
public String getDescription() {
return description;
}
/**
* Sets the description.
*
* @param description the new description
*/
public void setDescription(String description) {
this.description = description;
}
/* (non-Javadoc)
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder builder = new StringBuilder();

View File

@ -108,13 +108,9 @@ public class ResultRow implements FetchingElement, SelectableElement, Serializab
protected String scientificNameAuthorship;
protected String credits;
protected String lsid;
// protected String propertiesAsXml;
// protected ItemParameterList properties;
protected boolean existsProperties = false;
protected ResultRow() {
}

View File

@ -1,7 +1,9 @@
package org.gcube.portlets.user.speciesdiscovery.shared;
import com.google.gwt.user.client.rpc.IsSerializable;
public enum SpeciesCapability {
public enum SpeciesCapability implements IsSerializable{
//Filters
FROMDATE("FROMDATE", "Date From"),
@ -14,7 +16,6 @@ public enum SpeciesCapability {
TAXONOMYITEM("TAXONOMYITEM", "Taxon"),
OCCURRENCESPOINTS("OCCURRENCESPOINTS", "OccurrencesPoints"),
SYNONYMS("SYNONYMS", "Synonyms"),
UNFOLD("UNFOLD", "Unfold"),
NAMESMAPPING("NAMESMAPPING", "Names Mapping"),

View File

@ -7,7 +7,8 @@
<!-- Other module inherits -->
<inherits name="com.extjs.gxt.ui.GXT" />
<inherits name="org.gcube.portlets.user.gcubegisviewer.GCubeGisViewer" />
<!-- Used to show a layer via GisViewer -->
<!-- <inherits name="org.gcube.portlets.user.gcubegisviewer.GCubeGisViewer" /> -->
<inherits name="com.allen_sauer.gwt.log.gwt-log-TRACE" />
<set-property name="log_DivLogger" value="DISABLED" />

View File

@ -36,26 +36,26 @@
</servlet-mapping>
<!-- GisViewer Servlets -->
<servlet>
<servlet-name>GisViewerService</servlet-name>
<servlet-class>org.gcube.portlets.user.gcubegisviewer.server.GCubeGisViewerServletImpl</servlet-class>
</servlet>
<!-- <servlet> -->
<!-- <servlet-name>GisViewerService</servlet-name> -->
<!-- <servlet-class>org.gcube.portlets.user.gcubegisviewer.server.GCubeGisViewerServletImpl</servlet-class> -->
<!-- </servlet> -->
<servlet-mapping>
<servlet-name>GisViewerService</servlet-name>
<url-pattern>/speciesdiscovery/GisViewerService</url-pattern>
</servlet-mapping>
<!-- <servlet-mapping> -->
<!-- <servlet-name>GisViewerService</servlet-name> -->
<!-- <url-pattern>/speciesdiscovery/GisViewerService</url-pattern> -->
<!-- </servlet-mapping> -->
<!-- GISVIEWER MAP GENERATOR -->
<servlet>
<servlet-name>MapGenerator</servlet-name>
<servlet-class>org.gcube.portlets.user.gisviewer.server.MapGenerator</servlet-class>
</servlet>
<!-- <servlet> -->
<!-- <servlet-name>MapGenerator</servlet-name> -->
<!-- <servlet-class>org.gcube.portlets.user.gisviewer.server.MapGenerator</servlet-class> -->
<!-- </servlet> -->
<servlet-mapping>
<servlet-name>MapGenerator</servlet-name>
<url-pattern>/speciesdiscovery/MapGenerator</url-pattern>
</servlet-mapping>
<!-- <servlet-mapping> -->
<!-- <servlet-name>MapGenerator</servlet-name> -->
<!-- <url-pattern>/speciesdiscovery/MapGenerator</url-pattern> -->
<!-- </servlet-mapping> -->
<!-- Workspace Light Tree servlet -->
<!-- <servlet> -->

View File

@ -4,21 +4,26 @@
package org.gcube.portlets.user.speciesdiscovery.client;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.naming.Context;
import javax.naming.InitialContext;
import javax.naming.NamingException;
import javax.persistence.EntityManager;
import javax.persistence.EntityManagerFactory;
import javax.persistence.Persistence;
import javax.persistence.Query;
import javax.persistence.TypedQuery;
import javax.persistence.metamodel.EntityType;
import org.apache.log4j.Logger;
import org.gcube.portlets.user.speciesdiscovery.shared.ResultRow;
import org.h2.jdbcx.JdbcDataSource;
/**
@ -35,19 +40,17 @@ public class DBTester {
public static void main(String[] a) throws Exception {
/*
Class.forName("org.h2.Driver");
Connection conn = DriverManager.getConnection("jdbc:h2:/home/francesco-mangiacrapa/Portal-Bundle2.2/tomcat-6.0.29/persistence/h2dbspecies/h2testusergcubedevsec;create=true", "","");
//Class.forName("org.h2.Driver");
//Connection conn = DriverManager.getConnection("jdbc:h2:/home/francesco-mangiacrapa/Portal-Bundle-3.0.0-3.2.0/tomcat-6.0.29/persistence/h2dbspecies/h2testusergcubedevsec;create=true", "","");
// add application code here
Statement stat = conn.createStatement();
ResultSet rs = stat.executeQuery("select * from TaxonomyRow");
/* Statement stat = conn.createStatement();
ResultSet rs = stat.executeQuery("select * from ResultRow");
ResultSetMetaData meta = rs.getMetaData();
int columnCount = meta.getColumnCount();
while (rs.next())
{
System.out.println("New row");
@ -60,13 +63,113 @@ public class DBTester {
}
System.out.println("\n\n");
}
conn.close();
*/
}*/
//testJdbcDataSource();
//String queryString = "SELECT MIN(tax.id) from Taxon tax";
//
// testTypedQuery(queryString, Taxon.class);
getAllEntities();
String queryString = "SELECT *" +
" FROM "+ResultRow.class.getSimpleName()+" r" +
" LEFT OUTER JOIN RESULTROW_TAXON rt";
// " INNER JOIN "+Taxon.class.getSimpleName()+" t";
queryString = "select *" +
" from RESULTROW r JOIN RESULTROW_TAXON rt on r.ID=rt.RESULTROW_ID JOIN TAXON t on t.INTERNALID=rt.MATCHINGTAXON_INTERNALID" +
" where t.RANK = 'Genus' and t.ID IN" +
" (select MIN(tax.ID) from TAXON tax)";
//////
//
// testTypedQuery(queryString, ResultRow.class);
//testQuery(queryString);
testNativeQuery(queryString, ResultRow.class);
testJdbcDataSource();
}
/**
* @param queryString
* @param class1
*/
private static void testNativeQuery(String queryString, Class<?> className) {
EntityManagerFactory emF = createEntityManagerFactory("/home/francesco-mangiacrapa/Portal-Bundle-3.0.0-3.2.0/tomcat-6.0.29/persistence/h2dbspecies/h2testusergcubedevsec;");
EntityManager em = emF.createEntityManager();
Query query = em.createNativeQuery(queryString, className);
List<Object> listResult = new ArrayList<Object>();
try {
listResult = query.getResultList();
for (Object object : listResult) {
System.out.println(object.toString());
}
} catch (Exception e) {
logger.error("Error in TypedQuery: " + e.getMessage(), e);
} finally {
em.close();
}
}
public static void getAllEntities(){
EntityManagerFactory emF = createEntityManagerFactory("/home/francesco-mangiacrapa/Portal-Bundle-3.0.0-3.2.0/tomcat-6.0.29/persistence/h2dbspecies/h2testusergcubedevsec;");
EntityManager em = emF.createEntityManager();
for (EntityType<?> entity : em.getMetamodel().getEntities()) {
final String className = entity.getName();
System.out.println("Trying select * from: " + className);
Query q = em.createQuery("SELECT c from " + className + " c");
q.getResultList().iterator();
System.out.println("ok: " + className);
}
}
public static void testTypedQuery(String queryString, Class classToReturn){
EntityManagerFactory emF = createEntityManagerFactory("/home/francesco-mangiacrapa/Portal-Bundle-3.0.0-3.2.0/tomcat-6.0.29/persistence/h2dbspecies/h2testusergcubedevsec;");
EntityManager em = emF.createEntityManager();
TypedQuery<Class> tQuery = em.createQuery(queryString, classToReturn);
List<Class> listResult = new ArrayList<Class>();
try {
listResult = tQuery.getResultList();
System.out.println(listResult.toString());
} catch (Exception e) {
logger.error("Error in TypedQuery: " + e.getMessage(), e);
} finally {
em.close();
}
}
public static void testQuery(String queryString){
EntityManagerFactory emF = createEntityManagerFactory("/home/francesco-mangiacrapa/Portal-Bundle-3.0.0-3.2.0/tomcat-6.0.29/persistence/h2dbspecies/h2testusergcubedevsec;");
EntityManager em = emF.createEntityManager();
Query query = em.createQuery(queryString);
List<Object> listResult = new ArrayList<Object>();
try {
listResult = query.getResultList();
System.out.println(listResult.toString());
} catch (Exception e) {
logger.error("Error in TypedQuery: " + e.getMessage(), e);
} finally {
em.close();
}
}
public static void testJdbcDataSource() throws NamingException {

View File

@ -9,26 +9,31 @@ import java.util.List;
import java.util.concurrent.TimeUnit;
import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.data.spd.client.proxies.Classification;
import org.gcube.data.spd.client.proxies.Executor;
import org.gcube.data.spd.client.proxies.Manager;
import org.gcube.data.spd.client.proxies.Occurrence;
import org.gcube.data.spd.client.proxies.ClassificationClient;
import org.gcube.data.spd.client.proxies.ExecutorClient;
import org.gcube.data.spd.client.proxies.ManagerClient;
import org.gcube.data.spd.client.proxies.OccurrenceClient;
import org.gcube.data.spd.model.PluginDescription;
/**
* @author "Federico De Faveri defaveri@isti.cnr.it"
* The Class ListPlugins.
*
* @author Francesco Mangiacrapa francesco.mangiacrapa@isti.cnr.it
* Jan 10, 2017
*/
public class ListPlugins {
protected static Manager call;
protected static Occurrence occurrencesCall;
protected static Classification classificationCall;
protected static Executor executorCall;
protected static ManagerClient call;
protected static OccurrenceClient occurrencesCall;
protected static ClassificationClient classificationCall;
protected static ExecutorClient executorCall;
/**
* @param args
* The main method.
*
* @param args the arguments
*/
public static void main(String[] args) {
@ -39,7 +44,6 @@ public class ListPlugins {
// this.occurrencesCall = occurrences().at( URI.create("http://node24.d.d4science.research-infrastructures.eu:9000")).withTimeout(3, TimeUnit.MINUTES).build();
// this.classificationCall = classification().at( URI.create("http://node24.d.d4science.research-infrastructures.eu:9000")).withTimeout(3, TimeUnit.MINUTES).build();
call = manager().withTimeout(3, TimeUnit.MINUTES).build();
// executorCall = executor().withTimeout(3, TimeUnit.MINUTES).build();
// occurrencesCall = occurrence().withTimeout(3, TimeUnit.MINUTES).build();

View File

@ -10,12 +10,12 @@ import java.util.concurrent.TimeUnit;
import org.gcube.application.framework.core.session.ASLSession;
import org.gcube.application.framework.core.session.SessionManager;
import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.data.spd.client.proxies.Manager;
import org.gcube.data.spd.client.proxies.ManagerClient;
import org.gcube.data.spd.model.exceptions.InvalidQueryException;
import org.gcube.data.spd.model.products.ResultElement;
import org.gcube.data.spd.model.products.ResultItem;
import org.gcube.data.spd.stubs.exceptions.UnsupportedCapabilityException;
import org.gcube.data.spd.stubs.exceptions.UnsupportedPluginException;
import org.gcube.data.spd.model.service.exceptions.UnsupportedCapabilityException;
import org.gcube.data.spd.model.service.exceptions.UnsupportedPluginException;
import org.gcube.data.streams.Stream;
import org.gcube.portlets.user.speciesdiscovery.server.service.ResultItemConverter;
import org.gcube.portlets.user.speciesdiscovery.server.service.StreamIterator;
@ -24,9 +24,12 @@ import org.gcube.portlets.user.speciesdiscovery.server.stream.CloseableIterator;
import org.gcube.portlets.user.speciesdiscovery.server.stream.ConversionIterator;
import org.gcube.portlets.user.speciesdiscovery.shared.ResultRow;
/**
* @author "Federico De Faveri defaveri@isti.cnr.it"
* The Class ServiceQuery.
*
* @author Francesco Mangiacrapa francesco.mangiacrapa@isti.cnr.it
* Jan 10, 2017
*/
public class ServiceQuery {
@ -39,29 +42,27 @@ public class ServiceQuery {
private static String username = "test.user";
/**
* The main method.
*
* @param args the arguments
* @throws InvalidQueryException the invalid query exception
* @throws UnsupportedPluginException the unsupported plugin exception
* @throws UnsupportedCapabilityException the unsupported capability exception
*/
public static void main(String[] args) throws InvalidQueryException, UnsupportedPluginException, UnsupportedCapabilityException {
String scope = "/gcube/devsec";
// String scope = "/d4science.research-infrastructures.eu/gCubeApps/BiodiversityResearchEnvironment"; //Production
ScopeProvider.instance.set(scope);
ASLSession session = SessionManager.getInstance().getASLSession("123", username);
Manager call = manager().withTimeout(3, TimeUnit.MINUTES).build();
ManagerClient call = manager().withTimeout(3, TimeUnit.MINUTES).build();
// Manager call = manager().at(URI.create("http://node24.d.d4science.research-infrastructures.eu:9000")).withTimeout(3, TimeUnit.MINUTES).build();
//Stream<ResultElement> results = call.search("SEARCH BY CN 'shark' RESOLVE WITH OBIS EXPAND WITH ITIS RETURN Product");
// Stream<ResultElement> results = call.search("SEARCH BY CN 'shark' RESOLVE WITH OBIS EXPAND WITH ITIS WHERE coordinate <= 15.12, 16.12 RETURN Product");
// Stream<ResultElement> results = call.search("SEARCH BY SN 'sarda sarda' RESOLVE WITH OBIS EXPAND WITH ITIS WHERE coordinate <= 15.12, 16.12 RETURN Product");
System.out.println("start query...");
Stream<ResultElement> results = call.search("SEARCH BY SN 'Latimeria chalumnae' IN GBIF RETURN Product HAVING xpath(\"//product[type='Occurrence' and count>0]\")");
// Stream<ResultElement> results = call.search("SEARCH BY SN 'Palinurus elephas' IN WoRMS RETURN Taxon");
StreamIterator<ResultElement> input = new StreamIterator<ResultElement>(results);
@ -72,7 +73,6 @@ public class ServiceQuery {
System.out.println(++i +") el: "+elem.getId() +" type: "+elem.getType().name());
}
System.out.println("Results from conversion...");
ConversionIterator<ResultElement, ResultItem> caster = buildCaster(input);
@ -82,16 +82,21 @@ public class ServiceQuery {
while (inputConverter.hasNext()) {
ResultRow row = inputConverter.next();
System.out.println(++i +") row: "+row);
}
results.close();
System.out.println("DONE");
}
/**
* Builds the caster.
*
* @param <I> the generic type
* @param <O> the generic type
* @param input the input
* @return the conversion iterator
*/
protected static <I,O> ConversionIterator<I, O> buildCaster(CloseableIterator<I> input)
{
CastConverter<I, O> elementConverter = new CastConverter<I, O>();