Porting to spd-client-library 4.0
Fixced bug #6156 Updated pom version at 3.9.0 git-svn-id: http://svn.research-infrastructures.eu/public/d4science/gcube/trunk/portlets/user/speciesdiscovery@141521 82a268e6-3cf1-43bd-a215-b396298e98cf
This commit is contained in:
parent
630ddcb9b2
commit
76f12eebdd
|
@ -1,7 +1,7 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<classpath>
|
||||
<classpathentry kind="lib" path="/home/francesco-mangiacrapa/libraries/asm-5.0.3/lib/asm-5.0.3.jar"/>
|
||||
<classpathentry kind="src" output="target/species-discovery-3.8.0-SNAPSHOT/WEB-INF/classes" path="src/main/java">
|
||||
<classpathentry kind="src" output="target/species-discovery-3.8.2-SNAPSHOT/WEB-INF/classes" path="src/main/java">
|
||||
<attributes>
|
||||
<attribute name="optional" value="true"/>
|
||||
<attribute name="maven.pomderived" value="true"/>
|
||||
|
@ -31,5 +31,5 @@
|
|||
<attribute name="org.eclipse.jst.component.dependency" value="/WEB-INF/lib"/>
|
||||
</attributes>
|
||||
</classpathentry>
|
||||
<classpathentry kind="output" path="target/species-discovery-3.8.0-SNAPSHOT/WEB-INF/classes"/>
|
||||
<classpathentry kind="output" path="target/species-discovery-3.8.2-SNAPSHOT/WEB-INF/classes"/>
|
||||
</classpath>
|
||||
|
|
|
@ -4,4 +4,9 @@
|
|||
<attribute name="provider-id" value="jpa-no-op-library-provider"/>
|
||||
</node>
|
||||
</facet>
|
||||
<facet id="jst.jaxrs">
|
||||
<node name="libprov">
|
||||
<attribute name="provider-id" value="jaxrs-no-op-library-provider"/>
|
||||
</node>
|
||||
</facet>
|
||||
</root>
|
||||
|
|
|
@ -6,4 +6,5 @@
|
|||
<installed facet="jpt.jpa" version="2.0"/>
|
||||
<installed facet="liferay.portlet" version="6.0"/>
|
||||
<installed facet="jst.web" version="3.0"/>
|
||||
<installed facet="jst.jaxrs" version="2.0"/>
|
||||
</faceted-project>
|
||||
|
|
|
@ -1,5 +1,10 @@
|
|||
<ReleaseNotes>
|
||||
<Changeset component="org.gcube.portlets-user.species-discovery.3-8-1" date="15-09-2016">
|
||||
<Changeset component="org.gcube.portlets-user.species-discovery.3-9-0"
|
||||
date="11-01-2016">
|
||||
<Change>[Feature #6313] SPD portlet upgrade: porting to spd-client-library 4.0.0 </Change>
|
||||
</Changeset>
|
||||
<Changeset component="org.gcube.portlets-user.species-discovery.3-8-1"
|
||||
date="15-09-2016">
|
||||
<Change>Removed Gis -viewer dependency</Change>
|
||||
</Changeset>
|
||||
<Changeset component="org.gcube.portlets-user.species-discovery.3-8-0"
|
||||
|
|
15
pom.xml
15
pom.xml
|
@ -19,7 +19,7 @@
|
|||
<groupId>org.gcube.portlets.user</groupId>
|
||||
<artifactId>species-discovery</artifactId>
|
||||
<packaging>war</packaging>
|
||||
<version>3.8.1-SNAPSHOT</version>
|
||||
<version>3.9.0-SNAPSHOT</version>
|
||||
<name>gCube Species Discovery</name>
|
||||
<description>
|
||||
gCube Species Discovery Portlet lets the users discovery species information from the Species Service.
|
||||
|
@ -31,7 +31,7 @@
|
|||
</scm>
|
||||
<properties>
|
||||
<!-- Convenience property to set the GWT version -->
|
||||
<gwtVersion>2.6.1</gwtVersion>
|
||||
<gwtVersion>2.7.0</gwtVersion>
|
||||
<distroDirectory>distro</distroDirectory>
|
||||
<!-- GWT needs at least java 1.6 -->
|
||||
<maven.compiler.source>1.7</maven.compiler.source>
|
||||
|
@ -82,7 +82,7 @@
|
|||
<dependency>
|
||||
<groupId>org.gcube.data.spd</groupId>
|
||||
<artifactId>spd-client-library</artifactId>
|
||||
<version>[3.0.0-SNAPSHOT, 4.0.0-SNAPSHOT)</version>
|
||||
<version>[4.0.0-SNAPSHOT, 5.0.0-SNAPSHOT)</version>
|
||||
<scope>compile</scope>
|
||||
</dependency>
|
||||
|
||||
|
@ -93,6 +93,13 @@
|
|||
<scope>compile</scope>
|
||||
</dependency>
|
||||
|
||||
<!-- USED TO SHOW A LAYER VIA GISVIEWER -->
|
||||
<!-- <dependency> -->
|
||||
<!-- <groupId>org.gcube.portlets.user</groupId> -->
|
||||
<!-- <artifactId>gcube-gis-viewer</artifactId> -->
|
||||
<!-- <version>[2.0.0-SNAPSHOT, 3.0.0-SNAPSHOT)</version> -->
|
||||
<!-- </dependency> -->
|
||||
|
||||
<dependency>
|
||||
<groupId>org.gcube.common</groupId>
|
||||
<artifactId>csv4j</artifactId>
|
||||
|
@ -106,8 +113,6 @@
|
|||
<scope>provided</scope>
|
||||
</dependency>
|
||||
|
||||
|
||||
|
||||
<!-- Eclipselink dependencies -->
|
||||
<dependency>
|
||||
<groupId>org.eclipse.persistence</groupId>
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
package org.gcube.portlets.user.speciesdiscovery.client;
|
||||
|
||||
import org.gcube.portlets.user.speciesdiscovery.client.rpc.GISInfoServiceAsync;
|
||||
import org.gcube.portlets.user.speciesdiscovery.client.rpc.GISInfoService;
|
||||
import org.gcube.portlets.user.speciesdiscovery.client.rpc.GISInfoServiceAsync;
|
||||
import org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService;
|
||||
import org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchServiceAsync;
|
||||
|
||||
|
|
|
@ -1,5 +1,10 @@
|
|||
package org.gcube.portlets.user.speciesdiscovery.server;
|
||||
|
||||
import it.geosolutions.geonetwork.util.GNSearchRequest;
|
||||
import it.geosolutions.geonetwork.util.GNSearchRequest.Config;
|
||||
import it.geosolutions.geonetwork.util.GNSearchRequest.Param;
|
||||
import it.geosolutions.geonetwork.util.GNSearchResponse;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
|
@ -16,20 +21,15 @@ import org.gcube.spatial.data.geonetwork.LoginLevel;
|
|||
|
||||
import com.google.gwt.user.server.rpc.RemoteServiceServlet;
|
||||
|
||||
import it.geosolutions.geonetwork.util.GNSearchRequest;
|
||||
import it.geosolutions.geonetwork.util.GNSearchResponse;
|
||||
import it.geosolutions.geonetwork.util.GNSearchRequest.Config;
|
||||
import it.geosolutions.geonetwork.util.GNSearchRequest.Param;
|
||||
|
||||
public class GisInfoServiceImpl extends RemoteServiceServlet implements GISInfoService{
|
||||
|
||||
/**
|
||||
*
|
||||
*
|
||||
*/
|
||||
private static final long serialVersionUID = -1137730151475571288L;
|
||||
|
||||
|
||||
protected static Logger logger = Logger.getLogger(GisInfoServiceImpl.class);
|
||||
|
||||
|
||||
@Override
|
||||
public String getGisLinkByLayerName(String layername) throws Exception {
|
||||
try{
|
||||
|
@ -46,19 +46,17 @@ public class GisInfoServiceImpl extends RemoteServiceServlet implements GISInfoS
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
private static String getPublicLink(String uuid) throws UriResolverMapException, IllegalArgumentException{
|
||||
UriResolverManager resolver = new UriResolverManager("GIS");
|
||||
|
||||
|
||||
|
||||
|
||||
Map<String, String> params = new HashMap<String, String>();
|
||||
params.put("gis-UUID", uuid);
|
||||
params.put("scope", ScopeProvider.instance.get());
|
||||
return resolver.getLink(params, true);
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
private static String getUUIDbyGSId(String gsID) throws Exception{
|
||||
GeoNetworkReader reader=GeoNetwork.get();
|
||||
reader.login(LoginLevel.ADMIN);
|
||||
|
@ -67,8 +65,8 @@ public class GisInfoServiceImpl extends RemoteServiceServlet implements GISInfoS
|
|||
GNSearchRequest req=new GNSearchRequest();
|
||||
req.addParam(Param.any, gsID);
|
||||
req.addConfig(Config.similarity, "1");
|
||||
GNSearchResponse resp=reader.query(req);
|
||||
GNSearchResponse resp=reader.query(req);
|
||||
return resp.getMetadata(0).getUUID();
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
|
|
@ -27,7 +27,6 @@ import org.gcube.portlets.user.speciesdiscovery.server.persistence.dao.TaxonomyR
|
|||
import org.gcube.portlets.user.speciesdiscovery.server.service.SpeciesService;
|
||||
import org.gcube.portlets.user.speciesdiscovery.server.service.TaxonomyItemConverter;
|
||||
import org.gcube.portlets.user.speciesdiscovery.server.stream.CloseableIterator;
|
||||
import org.gcube.portlets.user.speciesdiscovery.server.stream.StreamExtend;
|
||||
import org.gcube.portlets.user.speciesdiscovery.shared.CommonName;
|
||||
import org.gcube.portlets.user.speciesdiscovery.shared.ItemParameter;
|
||||
import org.gcube.portlets.user.speciesdiscovery.shared.SearchServiceException;
|
||||
|
@ -35,7 +34,7 @@ import org.gcube.portlets.user.speciesdiscovery.shared.TaxonomyRow;
|
|||
|
||||
|
||||
/**
|
||||
*
|
||||
*
|
||||
* @author Francesco Mangiacrapa francesco.mangiacrapa@isti.cnr.it
|
||||
* @Sep 6, 2013
|
||||
*
|
||||
|
@ -43,37 +42,37 @@ import org.gcube.portlets.user.speciesdiscovery.shared.TaxonomyRow;
|
|||
public class TaxonomyRowTable extends HttpServlet {
|
||||
|
||||
/**
|
||||
*
|
||||
*
|
||||
*/
|
||||
protected static final String UTF_8 = "UTF-8";
|
||||
|
||||
/**
|
||||
*
|
||||
*
|
||||
*/
|
||||
private static final long serialVersionUID = 9157876104914505028L;
|
||||
|
||||
/**
|
||||
*
|
||||
*
|
||||
*/
|
||||
public static final String TEXT_HTML = "text/html; charset=UTF-8";
|
||||
|
||||
|
||||
public static final String DOCTYPE = "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0 Transitional//EN\">";
|
||||
public static final String HTML = "<HTML>";
|
||||
public static final String HTMLCLOSE = "</HTML>";
|
||||
public static final String HEAD = "<HEAD>";
|
||||
public static final String HEADCLOSE = "</HEAD>";
|
||||
public static final String TITLE = "<TITLE>";
|
||||
public static final String TITLECLOSE = "</TITLE>";
|
||||
public static final String TITLECLOSE = "</TITLE>";
|
||||
public static final String BODY = "<BODY>";
|
||||
public static final String BODYCLOSE = "</BODY>";
|
||||
|
||||
|
||||
public static String headWithTitle(String title) {
|
||||
// return (DOCTYPE + "\n" + HTML+"\n<link type=\"text/css\" rel=\"stylesheet\" href=\"SpeciesDiscovery.css\">" + HEAD+TITLE + title + TITLECLOSE+HEADCLOSE+"\n");
|
||||
return (DOCTYPE + "\n" + HTML+ "\n"+HEAD + "\n"+TITLE + title + TITLECLOSE+"\n"+HEADCLOSE+"\n");
|
||||
|
||||
return DOCTYPE + "\n" + HTML+ "\n"+HEAD + "\n"+TITLE + title + TITLECLOSE+"\n"+HEADCLOSE+"\n";
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
protected Logger logger = Logger.getLogger(TaxonomyRowTable.class);
|
||||
|
||||
|
||||
|
@ -89,96 +88,92 @@ public class TaxonomyRowTable extends HttpServlet {
|
|||
protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
|
||||
retrieveTaxonomyRowAsHtmlTable(req, resp);
|
||||
}
|
||||
|
||||
|
||||
|
||||
@Override
|
||||
protected void doPost(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
|
||||
retrieveTaxonomyRowAsHtmlTable(req, resp);
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* @param req
|
||||
* @param resp
|
||||
* @throws IOException
|
||||
* @throws IOException
|
||||
*/
|
||||
private void retrieveTaxonomyRowAsHtmlTable(HttpServletRequest req, HttpServletResponse resp) {
|
||||
|
||||
|
||||
String taxonomyServiceRowID = "";
|
||||
// resp.setCharacterEncoding(UTF_8);
|
||||
resp.setContentType(TEXT_HTML);
|
||||
resp.setContentType(TEXT_HTML);
|
||||
|
||||
TaxonomyRow row = null;
|
||||
PrintWriter out = null;
|
||||
|
||||
|
||||
try {
|
||||
out = new PrintWriter(new OutputStreamWriter(resp.getOutputStream(), UTF_8), true);
|
||||
out.println(headWithTitle(""));
|
||||
out.println(BODY);
|
||||
// get parameters
|
||||
taxonomyServiceRowID = req.getParameter("oid");
|
||||
|
||||
|
||||
ASLSession aslSession = getASLSession(req);
|
||||
|
||||
|
||||
//IS VALID RR ID?
|
||||
if(taxonomyServiceRowID==null || taxonomyServiceRowID.isEmpty()){
|
||||
out.println(error("Parameter oid not found"));
|
||||
}else{
|
||||
}else{
|
||||
|
||||
logger.trace("doGet found oid "+taxonomyServiceRowID);
|
||||
|
||||
|
||||
try {
|
||||
|
||||
TaxonomyRowPersistence persistence = SessionUtil.getCurrentEJBTaxonomyItem(aslSession);
|
||||
|
||||
|
||||
if(persistence==null){
|
||||
|
||||
|
||||
logger.trace("TaxonomyRowPersistence not found in session, creating it");
|
||||
persistence = new TaxonomyRowPersistence(DaoSession.getEntityManagerFactory(aslSession));
|
||||
}
|
||||
|
||||
|
||||
CriteriaBuilder queryBuilder = persistence.getCriteriaBuilder();
|
||||
CriteriaQuery<Object> cq = queryBuilder.createQuery();
|
||||
Predicate pr1 = queryBuilder.equal(persistence.rootFrom(cq).get(TaxonomyRow.SERVICE_ID_FIELD), taxonomyServiceRowID);
|
||||
cq.where(pr1);
|
||||
|
||||
|
||||
Iterator<TaxonomyRow> iterator = persistence.executeCriteriaQuery(cq).iterator();
|
||||
|
||||
while(iterator.hasNext()){
|
||||
row = iterator.next();
|
||||
break;
|
||||
}
|
||||
|
||||
|
||||
if(row==null){
|
||||
|
||||
|
||||
logger.trace("Taxonomy Row with id "+taxonomyServiceRowID+" not found in database");
|
||||
|
||||
|
||||
row = findingTaxonomyInCaches(taxonomyServiceRowID, getASLSession(req));
|
||||
|
||||
|
||||
if(row!=null){
|
||||
logger.trace("Taxonomy Row with id "+taxonomyServiceRowID+" found into hash map caches");
|
||||
//ROW was found into database or by service
|
||||
logger.trace("converting taxonomy row to html table");
|
||||
String table = getHTMLTableForTaxonomy(row);
|
||||
|
||||
|
||||
logger.trace("table for ResultRowPersistence is empty? "+table.isEmpty());
|
||||
out.println(table);
|
||||
out.close();
|
||||
return;
|
||||
}else
|
||||
logger.trace("Taxonomy Row with id "+taxonomyServiceRowID+" doesn't found into hash map caches");
|
||||
|
||||
|
||||
|
||||
|
||||
logger.trace("Tentative recovering taxonomy with id "+taxonomyServiceRowID+" from service");
|
||||
|
||||
SpeciesService service = getSpeciesService(req);
|
||||
|
||||
StreamExtend<String> streamIds = new StreamExtend<String>(Arrays.asList(taxonomyServiceRowID).iterator());
|
||||
|
||||
CloseableIterator<TaxonomyItem> streamIterator = service.retrieveTaxonomyById(streamIds);
|
||||
|
||||
//StreamExtend<String> streamIds = new StreamExtend<String>(Arrays.asList(taxonomyServiceRowID).iterator());
|
||||
CloseableIterator<TaxonomyItem> streamIterator = service.retrieveTaxonomyById(Arrays.asList(taxonomyServiceRowID));
|
||||
TaxonomyItemConverter taxonomyItemConverter = new TaxonomyItemConverter(aslSession);
|
||||
|
||||
|
||||
// int i = 1;
|
||||
while (streamIterator.hasNext()) {
|
||||
TaxonomyItem tax = streamIterator.next();
|
||||
|
@ -189,7 +184,7 @@ public class TaxonomyRowTable extends HttpServlet {
|
|||
}
|
||||
|
||||
streamIterator.close();
|
||||
|
||||
|
||||
if(row==null){
|
||||
out.println(error("Sorry, taxonomy with "+taxonomyServiceRowID+" doesn't found in service"));
|
||||
out.close();
|
||||
|
@ -203,9 +198,9 @@ public class TaxonomyRowTable extends HttpServlet {
|
|||
|
||||
logger.trace("table for ResultRowPersistence is empty? "+table.isEmpty());
|
||||
out.println(table);
|
||||
|
||||
|
||||
} catch (Exception e) {
|
||||
|
||||
|
||||
logger.error("Error in TaxonomyRowTable servlet ",e);
|
||||
throw new Exception("Error in ResultRowTable servlet ", e);
|
||||
}
|
||||
|
@ -213,10 +208,10 @@ public class TaxonomyRowTable extends HttpServlet {
|
|||
out.println(BODYCLOSE);
|
||||
out.println(HTMLCLOSE);
|
||||
out.close(); //CLOSE STREAM
|
||||
|
||||
|
||||
}catch (Exception e) {
|
||||
String error = "Sorry an error occurred when creating the table for taxonomy row with id: "+taxonomyServiceRowID;
|
||||
|
||||
|
||||
if(out==null){
|
||||
try{
|
||||
out = resp.getWriter();
|
||||
|
@ -229,56 +224,52 @@ public class TaxonomyRowTable extends HttpServlet {
|
|||
out.println(HTMLCLOSE);
|
||||
out.close(); //CLOSE STREAM
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
protected TaxonomyRow findingTaxonomyInCaches(String taxonomyServiceRowID, ASLSession session){
|
||||
|
||||
|
||||
HashMap<String, TaxonomyRow> hashChildrenTaxa = SessionUtil.getHashMapChildrenTaxonomyCache(session);
|
||||
|
||||
|
||||
|
||||
|
||||
if(hashChildrenTaxa!=null){
|
||||
logger.trace("Finding Taxonomy Row with id "+taxonomyServiceRowID+" into hash map children");
|
||||
TaxonomyRow row = hashChildrenTaxa.get(taxonomyServiceRowID);
|
||||
|
||||
|
||||
if(row!=null){
|
||||
logger.trace("Taxonomy Row with id "+taxonomyServiceRowID+" found into hash map children");
|
||||
return row;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
hashChildrenTaxa = SessionUtil.getHashMapSynonymsTaxonomyCache(session);
|
||||
|
||||
|
||||
if(hashChildrenTaxa!=null){
|
||||
logger.trace("Finding Taxonomy Row with id "+taxonomyServiceRowID+" into hash map synonyms");
|
||||
TaxonomyRow row = hashChildrenTaxa.get(taxonomyServiceRowID);
|
||||
|
||||
|
||||
if(row!=null){
|
||||
logger.trace("Taxonomy Row with id "+taxonomyServiceRowID+" found into hash map synonyms");
|
||||
return row;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
hashChildrenTaxa = SessionUtil.getHashMapTaxonomyByIdsCache(session);
|
||||
|
||||
|
||||
if(hashChildrenTaxa!=null){
|
||||
logger.trace("Finding Taxonomy Row with id "+taxonomyServiceRowID+" into hash map ByIds");
|
||||
TaxonomyRow row = hashChildrenTaxa.get(taxonomyServiceRowID);
|
||||
|
||||
|
||||
if(row!=null){
|
||||
logger.trace("Taxonomy Row with id "+taxonomyServiceRowID+" found into hash map ByIds");
|
||||
return row;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
protected SpeciesService getSpeciesService(HttpServletRequest req) throws SearchServiceException
|
||||
{
|
||||
|
@ -292,83 +283,83 @@ public class TaxonomyRowTable extends HttpServlet {
|
|||
throw new SearchServiceException("contacting the species service.");
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public String error(String message){
|
||||
String errorPage = "";
|
||||
errorPage +=("<p>Error: "+message+"</p>");
|
||||
errorPage +="<p>Error: "+message+"</p>";
|
||||
return errorPage;
|
||||
}
|
||||
|
||||
|
||||
|
||||
public String getHTMLTableForTaxonomy(TaxonomyRow row){
|
||||
|
||||
|
||||
//Init values
|
||||
String dataProviderName = "";
|
||||
String dataSetCitation= "";
|
||||
String matchingAccordionTo= "";
|
||||
String rank= "";
|
||||
// String matchingCredits= "";
|
||||
|
||||
|
||||
String statusName = "";
|
||||
String dateModified = "";
|
||||
String statusRemark = "";
|
||||
|
||||
|
||||
String scientificNameAuthorship = "";
|
||||
String lsid = "";
|
||||
String credits = "";
|
||||
|
||||
|
||||
String propertiesHtml = "";
|
||||
|
||||
if(row.getDataProviderName()!=null) dataProviderName = row.getDataProviderName();
|
||||
if(row.getStatusName()!=null) statusName = row.getStatusName();
|
||||
if(row.getDateModified()!=null) dateModified = row.getDateModified();
|
||||
|
||||
|
||||
if(row.getDataSetCitation()!=null) dataSetCitation = row.getDataSetCitation();
|
||||
if(row.getRank()!=null) rank = row.getRank();
|
||||
if(row.getAccordingTo()!=null) matchingAccordionTo = row.getAccordingTo();
|
||||
|
||||
|
||||
if(row.getStatusRemarks()!=null) statusRemark = row.getStatusRemarks();
|
||||
|
||||
|
||||
if(row.getScientificNameAuthorship()!=null) scientificNameAuthorship = row.getScientificNameAuthorship();
|
||||
|
||||
|
||||
if(row.getLsid()!=null) lsid = row.getLsid();
|
||||
|
||||
|
||||
if(row.getCredits()!=null) credits = row.getCredits();
|
||||
|
||||
|
||||
|
||||
|
||||
//BUILD TABLES PROPERTIES
|
||||
if(row.getProperties()!=null){
|
||||
|
||||
|
||||
List<ItemParameter> hashProperties = row.getProperties();
|
||||
Collections.sort(hashProperties, ItemParameter.COMPARATOR);
|
||||
|
||||
|
||||
propertiesHtml+="<table class=\"parameters\">";
|
||||
|
||||
|
||||
for (ItemParameter itemParameter : hashProperties) {
|
||||
|
||||
|
||||
propertiesHtml+=
|
||||
"<tr>" +
|
||||
" <td class=\"title\">"+itemParameter.getKey()+"</td>" +
|
||||
" <td>"+itemParameter.getValue()+"</td>" +
|
||||
"</tr>";
|
||||
}
|
||||
|
||||
|
||||
propertiesHtml+="</table>";
|
||||
}
|
||||
|
||||
|
||||
|
||||
//Create list common name
|
||||
String commonNames = "";
|
||||
|
||||
|
||||
if(row.getCommonNames()!=null){
|
||||
for (CommonName comName : row.getCommonNames()) {
|
||||
commonNames+= "<b>"+comName.getName()+"</b>" +" ("+comName.getLanguage()+") - ";
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
String table = "<table class=\"imagetable\">";
|
||||
|
||||
|
||||
// if(isNewTab)
|
||||
table+=
|
||||
"<tr>" +
|
||||
|
@ -385,8 +376,8 @@ public class TaxonomyRowTable extends HttpServlet {
|
|||
// " <td class=\"title\">"+TaxonomyGridField.STATUS_REMARKS.getName()+"</td>" +
|
||||
// " <td>"+statusRemark+"</td>" +
|
||||
// "</tr>" +
|
||||
|
||||
|
||||
|
||||
|
||||
table +="<tr>" +
|
||||
" <td class=\"title\">"+TaxonomyGridField.DATASOURCE.getName()+"</td>" +
|
||||
" <td>"+dataProviderName+"</td>" +
|
||||
|
@ -415,19 +406,19 @@ public class TaxonomyRowTable extends HttpServlet {
|
|||
" <td class=\"title\">"+TaxonomyGridField.CREDITS.getName()+"</td>" +
|
||||
" <td>"+credits+"</td>" +
|
||||
"</tr>" +
|
||||
|
||||
|
||||
"<tr>" +
|
||||
" <td class=\"title\">"+TaxonomyGridField.PROPERTIES.getName()+"</td>" +
|
||||
" <td>"+propertiesHtml+"</td>" +
|
||||
"</tr>" +
|
||||
|
||||
"</table>";
|
||||
|
||||
|
||||
//DEBUG
|
||||
// System.out.println("Table: "+table);
|
||||
|
||||
|
||||
return table;
|
||||
|
||||
|
||||
}
|
||||
|
||||
}
|
File diff suppressed because it is too large
Load Diff
|
@ -15,7 +15,8 @@ import javax.persistence.criteria.Predicate;
|
|||
|
||||
import org.apache.log4j.Logger;
|
||||
import org.gcube.application.framework.core.session.ASLSession;
|
||||
import org.gcube.data.spd.stubs.types.Status;
|
||||
import org.gcube.data.spd.model.service.types.CompleteJobStatus;
|
||||
import org.gcube.data.spd.model.service.types.JobStatus;
|
||||
import org.gcube.portlets.user.speciesdiscovery.server.persistence.DaoSession;
|
||||
import org.gcube.portlets.user.speciesdiscovery.server.persistence.dao.OccurrenceJobPersistence;
|
||||
import org.gcube.portlets.user.speciesdiscovery.server.persistence.dao.ResultRowPersistence;
|
||||
|
@ -33,6 +34,13 @@ import org.gcube.portlets.user.speciesdiscovery.shared.OccurrencesSaveEnum;
|
|||
import org.gcube.portlets.user.speciesdiscovery.shared.ResultRow;
|
||||
import org.gcube.portlets.user.speciesdiscovery.shared.SaveFileFormat;
|
||||
|
||||
|
||||
/**
|
||||
* The Class OccurrenceJobUtil.
|
||||
*
|
||||
* @author Francesco Mangiacrapa francesco.mangiacrapa@isti.cnr.it
|
||||
* Jan 10, 2017
|
||||
*/
|
||||
public class OccurrenceJobUtil {
|
||||
|
||||
//STATE RETURNED BY STATUS RESPONSE
|
||||
|
@ -40,24 +48,32 @@ public class OccurrenceJobUtil {
|
|||
public static final String FAILED = "FAILED";
|
||||
public static final String RUNNING = "RUNNING";
|
||||
public static final String PENDING = "PENDING";
|
||||
|
||||
|
||||
private static final String ALIASKEY = "key";
|
||||
|
||||
protected static Logger logger = Logger.getLogger(OccurrenceJobUtil.class);
|
||||
|
||||
public static JobOccurrencesModel convertJob(OccurrencesJob job, Status statusResponse, OccurrenceJobPersistence occurrencesJobDao) {
|
||||
//TODO SET END TIME
|
||||
/**
|
||||
* Convert job.
|
||||
*
|
||||
* @param job the job
|
||||
* @param statusResponse the status response
|
||||
* @param occurrencesJobDao the occurrences job dao
|
||||
* @return the job occurrences model
|
||||
*/
|
||||
public static JobOccurrencesModel convertJob(OccurrencesJob job, CompleteJobStatus statusResponse, OccurrenceJobPersistence occurrencesJobDao) {
|
||||
//TODO SET END TIME
|
||||
JobOccurrencesModel jobOccurrenceModel;
|
||||
DownloadState downloadState = null;
|
||||
long endTime = 0;
|
||||
|
||||
String status = statusResponse.getStatus();
|
||||
JobStatus status = statusResponse.getStatus();
|
||||
downloadState = getDownloadState(status);
|
||||
logger.trace("jobId: "+job.getId() +" download state: " + downloadState);
|
||||
|
||||
|
||||
//FOR DEBUG
|
||||
// System.out.println("jobId: "+job.getId() +" download state: " + downloadState);
|
||||
|
||||
|
||||
if(downloadState==null) //Case with exception
|
||||
return null;
|
||||
|
||||
|
@ -65,13 +81,13 @@ public class OccurrenceJobUtil {
|
|||
long submitTime = job.getSubmitTime();
|
||||
Date submit = DateUtil.millisecondsToDate(submitTime);
|
||||
// jobSpeciesModel.setStartTime(DateUtil.dateToDateFormatString(start));
|
||||
|
||||
|
||||
int completedEntry = 0;
|
||||
if(statusResponse.getCompletedEntries()>0)
|
||||
completedEntry = statusResponse.getCompletedEntries();
|
||||
|
||||
|
||||
boolean changeStatus = false;
|
||||
|
||||
|
||||
//if status is completed and job was saved, update status as saved
|
||||
if(downloadState.equals(DownloadState.COMPLETED)){
|
||||
if(job.getState().compareTo(DownloadState.SAVED.toString())==0){
|
||||
|
@ -79,18 +95,18 @@ public class OccurrenceJobUtil {
|
|||
changeStatus = true;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
jobOccurrenceModel = new JobOccurrencesModel(job.getId(), job.getName(), job.getDescription(), downloadState, job.getScientificName(), job.getDataSources(), submit, null,completedEntry, job.getExpectedOccurrence());
|
||||
|
||||
try{
|
||||
|
||||
boolean changeEndTime = false;
|
||||
|
||||
|
||||
//UPDATE END TIME
|
||||
if(downloadState.equals(DownloadState.FAILED) || downloadState.equals(DownloadState.COMPLETED)){
|
||||
|
||||
|
||||
// if(job.getEndTime()==0){ //UPDATE end time first time only
|
||||
//
|
||||
//
|
||||
// logger.trace("UPDATE end time first time only - " + downloadState);
|
||||
// endTime = Calendar.getInstance().getTimeInMillis();
|
||||
// job.setEndTime(endTime);
|
||||
|
@ -99,22 +115,22 @@ public class OccurrenceJobUtil {
|
|||
// System.out.println("job "+job);
|
||||
// System.out.println("statusResponse.getEndDate() "+statusResponse.getEndDate());
|
||||
// System.out.println("job.getEndTime() "+job.getEndTime());
|
||||
|
||||
|
||||
if(statusResponse.getEndDate()!=null && job.getEndTime()==0){ //UPDATE end time first time only
|
||||
|
||||
|
||||
logger.trace("UPDATE end time first time only - " + downloadState);
|
||||
// endTime = Calendar.getInstance().getTimeInMillis();
|
||||
endTime = statusResponse.getEndDate().getTimeInMillis();
|
||||
job.setEndTime(endTime);
|
||||
changeEndTime = true;
|
||||
// speciesJobDao.update(job);
|
||||
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
boolean changeStartTime = false;
|
||||
|
||||
|
||||
//SET START TIME
|
||||
long startTime = job.getStartTime();
|
||||
if(statusResponse.getStartDate()!=null && startTime==0){ //UPDATE start time first time only
|
||||
|
@ -124,7 +140,7 @@ public class OccurrenceJobUtil {
|
|||
changeStartTime = true;
|
||||
}
|
||||
|
||||
|
||||
|
||||
//UPDATE DAO
|
||||
if(changeStatus || changeEndTime || changeStartTime){
|
||||
job.setState(downloadState.toString());
|
||||
|
@ -134,11 +150,11 @@ public class OccurrenceJobUtil {
|
|||
}catch (Exception e) {
|
||||
logger.error("An error occurred on update the occurrencesJobDao ", e);
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
endTime = job.getEndTime();
|
||||
long elapsedTime = 0;
|
||||
|
||||
|
||||
//SET END TIME, BECAUSE IT IS CHANGED
|
||||
if(endTime!=0){
|
||||
Date end = DateUtil.millisecondsToDate(endTime);
|
||||
|
@ -147,39 +163,54 @@ public class OccurrenceJobUtil {
|
|||
}
|
||||
else
|
||||
elapsedTime = Calendar.getInstance().getTimeInMillis();
|
||||
|
||||
|
||||
|
||||
//SET ELAPSED TIME
|
||||
jobOccurrenceModel.setElapsedTime(DateUtil.getDifference(submitTime, elapsedTime));
|
||||
|
||||
|
||||
|
||||
|
||||
//OTHERS SET
|
||||
jobOccurrenceModel.setFileFormat(converFileFormat(job.getFileFormat()));
|
||||
jobOccurrenceModel.setCsvType(convertCsvType(job.getCsvType()));
|
||||
jobOccurrenceModel.setByDataSource(job.isByDataSource());
|
||||
|
||||
|
||||
return jobOccurrenceModel;
|
||||
}
|
||||
|
||||
public static DownloadState getDownloadState(String status){
|
||||
|
||||
if(status!=null){
|
||||
if(status.compareToIgnoreCase(PENDING)==0){
|
||||
return DownloadState.PENDING;
|
||||
}else if(status.compareToIgnoreCase(RUNNING)==0){
|
||||
return DownloadState.ONGOING;
|
||||
}else if(status.compareToIgnoreCase(FAILED)==0){
|
||||
return DownloadState.FAILED;
|
||||
}else if(status.compareToIgnoreCase(COMPLETED)==0){
|
||||
|
||||
/**
|
||||
* Gets the download state.
|
||||
*
|
||||
* @param status the status
|
||||
* @return the download state
|
||||
*/
|
||||
public static DownloadState getDownloadState(JobStatus status){
|
||||
|
||||
if(status!=null){
|
||||
switch (status) {
|
||||
case COMPLETED:
|
||||
return DownloadState.COMPLETED;
|
||||
case FAILED:
|
||||
return DownloadState.FAILED;
|
||||
case PENDING:
|
||||
return DownloadState.PENDING;
|
||||
case RUNNING:
|
||||
return DownloadState.ONGOING;
|
||||
default:
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert csv type.
|
||||
*
|
||||
* @param csvType the csv type
|
||||
* @return the occurrences save enum
|
||||
*/
|
||||
public static OccurrencesSaveEnum convertCsvType(String csvType) {
|
||||
|
||||
|
||||
if(csvType!=null){
|
||||
if(csvType.compareToIgnoreCase(OccurrencesSaveEnum.STANDARD.toString())==0){
|
||||
return OccurrencesSaveEnum.STANDARD;
|
||||
|
@ -191,8 +222,14 @@ public class OccurrenceJobUtil {
|
|||
}
|
||||
|
||||
|
||||
/**
|
||||
* Conver file format.
|
||||
*
|
||||
* @param fileFormat the file format
|
||||
* @return the save file format
|
||||
*/
|
||||
public static SaveFileFormat converFileFormat(String fileFormat) {
|
||||
|
||||
|
||||
if(fileFormat!=null){
|
||||
if(fileFormat.compareToIgnoreCase(SaveFileFormat.CSV.toString())==0){
|
||||
return SaveFileFormat.CSV;
|
||||
|
@ -205,6 +242,13 @@ public class OccurrenceJobUtil {
|
|||
}
|
||||
|
||||
|
||||
/**
|
||||
* Delete occurrence job by id.
|
||||
*
|
||||
* @param jobIdentifier the job identifier
|
||||
* @param occurrencesJobDao the occurrences job dao
|
||||
* @return the int
|
||||
*/
|
||||
public static int deleteOccurrenceJobById(String jobIdentifier, OccurrenceJobPersistence occurrencesJobDao){
|
||||
logger.trace("Delete occurrence job id: " + jobIdentifier);
|
||||
|
||||
|
@ -214,55 +258,70 @@ public class OccurrenceJobUtil {
|
|||
|
||||
}catch (Exception e) {
|
||||
logger.error("An error occured deleteOccurrenceJobById jobId: " + jobIdentifier + " exception: "+e, e);
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
logger.trace("job not exists : " +jobIdentifier);
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* Change status occurrence job by id.
|
||||
*
|
||||
* @param jobIdentifier the job identifier
|
||||
* @param state the state
|
||||
* @param occurrencesJobDao the occurrences job dao
|
||||
* @return the int
|
||||
*/
|
||||
public static int changeStatusOccurrenceJobById(String jobIdentifier, DownloadState state, OccurrenceJobPersistence occurrencesJobDao){
|
||||
logger.trace("Change status occurrence job id: " + jobIdentifier);
|
||||
// System.out.println("Delete job id: " + jobIdentifier);
|
||||
|
||||
|
||||
int result = 0;
|
||||
|
||||
|
||||
try{
|
||||
|
||||
|
||||
CriteriaBuilder queryBuilder = occurrencesJobDao.getCriteriaBuilder();
|
||||
CriteriaQuery<Object> cq = queryBuilder.createQuery();
|
||||
Predicate pr1 = queryBuilder.equal(occurrencesJobDao.rootFrom(cq).get(OccurrencesJob.ID_FIELD), jobIdentifier);
|
||||
cq.where(pr1);
|
||||
|
||||
|
||||
Iterator<OccurrencesJob> iterator = occurrencesJobDao.executeCriteriaQuery(cq).iterator();
|
||||
|
||||
|
||||
OccurrencesJob job;
|
||||
|
||||
|
||||
if(iterator.hasNext())
|
||||
job = iterator.next();
|
||||
else
|
||||
return 0;
|
||||
|
||||
|
||||
job.setState(state.toString());
|
||||
|
||||
|
||||
occurrencesJobDao.update(job);
|
||||
|
||||
|
||||
}catch (Exception e) {
|
||||
logger.error("An error occured in change status jobId: " + jobIdentifier + " exception: "+e, e );
|
||||
}
|
||||
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* Gets the list of selected key.
|
||||
*
|
||||
* @param searchSession the search session
|
||||
* @return the list of selected key
|
||||
* @throws Exception the exception
|
||||
*/
|
||||
public static List<String> getListOfSelectedKey(FetchingSession<ResultRow> searchSession) throws Exception{
|
||||
|
||||
|
||||
Collection<ResultRow> selectedRows = ((SelectableFetchingBuffer<ResultRow>) searchSession.getBuffer()).getSelected();
|
||||
logger.trace("found "+selectedRows.size()+" selected rows");
|
||||
|
||||
|
||||
int count = 0;
|
||||
|
||||
|
||||
List<String> keys = new ArrayList<String>(selectedRows.size());
|
||||
|
||||
for (ResultRow row:selectedRows) {
|
||||
|
@ -272,45 +331,52 @@ public class OccurrenceJobUtil {
|
|||
count += row.getOccurencesCount();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
logger.trace("found "+count+" occurrence points");
|
||||
|
||||
|
||||
return keys;
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* Gets the list of selected key by data source.
|
||||
*
|
||||
* @param dataSource the data source
|
||||
* @param session the session
|
||||
* @return the list of selected key by data source
|
||||
*/
|
||||
public static OccurrenceKeys getListOfSelectedKeyByDataSource(String dataSource, ASLSession session) {
|
||||
logger.trace("getListOfSelectedKeyByDataSource...");
|
||||
|
||||
|
||||
OccurrenceKeys occurrenceKeys = new OccurrenceKeys();
|
||||
|
||||
|
||||
List<String> keys = new ArrayList<String>();
|
||||
|
||||
|
||||
Iterator<ResultRow> resulRowIt = null;
|
||||
|
||||
|
||||
int count = 0;
|
||||
|
||||
|
||||
try{
|
||||
|
||||
|
||||
// System.out.println("dasource name: "+dataSource);
|
||||
logger.trace("datasource name: "+dataSource);
|
||||
|
||||
|
||||
ResultRowPersistence resultRowDao = DaoSession.getResultRowDAO(session);
|
||||
|
||||
|
||||
/*CriteriaBuilder cb = resultRowDao.getCriteriaBuilder();
|
||||
|
||||
|
||||
CriteriaQuery<Object> cq = cb.createQuery();
|
||||
|
||||
|
||||
Predicate pr1 = cb.equal(resultRowDao.rootFrom(cq).get(ResultRow.DATASOURCE_NAME), dataSource);
|
||||
|
||||
|
||||
Predicate pr2 = cb.equal(resultRowDao.rootFrom(cq).get(ResultRow.SELECTED), true);
|
||||
|
||||
|
||||
cq.where(cb.and(pr1,pr2));
|
||||
|
||||
|
||||
Iterator<ResultRow> resulRowIt = resultRowDao.executeCriteriaQuery(cq).iterator();
|
||||
*/
|
||||
|
||||
|
||||
EntityManager em = resultRowDao.createNewManager();
|
||||
try {
|
||||
|
||||
|
@ -318,7 +384,7 @@ public class OccurrenceJobUtil {
|
|||
|
||||
resulRowIt = query.getResultList().iterator();
|
||||
|
||||
|
||||
|
||||
} catch (Exception e) {
|
||||
logger.error("Error in update: "+e.getMessage(), e);
|
||||
return null;
|
||||
|
@ -328,59 +394,72 @@ public class OccurrenceJobUtil {
|
|||
}
|
||||
|
||||
while(resulRowIt.hasNext()){
|
||||
|
||||
|
||||
ResultRow row = resulRowIt.next();
|
||||
|
||||
|
||||
if(row.getOccurencesKey()!=null && row.getOccurencesKey().length()>0){
|
||||
keys.add(row.getOccurencesKey());
|
||||
count += row.getOccurencesCount();
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
occurrenceKeys.setListKey(keys);
|
||||
occurrenceKeys.setTotalOccurrence(count);
|
||||
|
||||
|
||||
}catch (Exception e) {
|
||||
logger.error("error in getListOfSelectedKeyByDataSource "+ e);
|
||||
}
|
||||
|
||||
|
||||
logger.trace("found "+count+" occurrence points");
|
||||
|
||||
|
||||
return occurrenceKeys;
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Creates the occurrence job on service by keys.
|
||||
*
|
||||
* @param jobModel the job model
|
||||
* @param taxonomyService the taxonomy service
|
||||
* @param occurrencesJobDao the occurrences job dao
|
||||
* @param keys the keys
|
||||
* @param dataSources the data sources
|
||||
* @param saveFileFormat the save file format
|
||||
* @param csvType the csv type
|
||||
* @param expectedOccurrence the expected occurrence
|
||||
* @return the job occurrences model
|
||||
*/
|
||||
public static JobOccurrencesModel createOccurrenceJobOnServiceByKeys(JobOccurrencesModel jobModel,SpeciesService taxonomyService, OccurrenceJobPersistence occurrencesJobDao, List<String> keys, List<DataSource> dataSources, SaveFileFormat saveFileFormat, OccurrencesSaveEnum csvType, int expectedOccurrence) {
|
||||
String serviceJobId = null;
|
||||
|
||||
|
||||
StreamExtend<String> streamKeys = new StreamExtend<String>(keys.iterator()); //convert
|
||||
|
||||
|
||||
String csvTypeString = null;
|
||||
|
||||
|
||||
try {
|
||||
|
||||
|
||||
switch (saveFileFormat) {
|
||||
|
||||
|
||||
case CSV:
|
||||
|
||||
|
||||
if(csvType.equals(OccurrencesSaveEnum.STANDARD))
|
||||
serviceJobId = taxonomyService.createOccurrenceCSVJob(streamKeys);
|
||||
else if(csvType.equals(OccurrencesSaveEnum.OPENMODELLER))
|
||||
serviceJobId = taxonomyService.createOccurrenceCSVOpenModellerJob(streamKeys);
|
||||
|
||||
|
||||
if(jobModel.getCsvType()!=null)
|
||||
csvTypeString = jobModel.getCsvType().toString(); //CASE CSV
|
||||
|
||||
|
||||
break;
|
||||
|
||||
|
||||
case DARWIN_CORE:
|
||||
|
||||
|
||||
serviceJobId = taxonomyService.createOccurrenceDARWINCOREJob(streamKeys);
|
||||
|
||||
|
||||
csvTypeString = "";
|
||||
|
||||
|
||||
break;
|
||||
|
||||
default:
|
||||
|
@ -391,23 +470,23 @@ public class OccurrenceJobUtil {
|
|||
logger.error("An error occured in create new occurrences job on server ",e);
|
||||
return null;
|
||||
}
|
||||
|
||||
|
||||
|
||||
long submitTimeInMillis = Calendar.getInstance().getTimeInMillis();
|
||||
|
||||
|
||||
try {
|
||||
|
||||
|
||||
//STORE INTO DAO
|
||||
OccurrencesJob occurrenceJob = new OccurrencesJob(serviceJobId, jobModel.getJobName(), jobModel.getDescription(), jobModel.getScientificName(), dataSources, DownloadState.PENDING.toString(), "", submitTimeInMillis, 0, 0, jobModel.getFileFormat().toString(),csvTypeString, jobModel.isByDataSource(), convertListKeyIntoStoreXMLString(keys), expectedOccurrence);
|
||||
|
||||
|
||||
//for debug
|
||||
// System.out.println("INTO createOccurrenceJobOnServiceByKeys " + occurrenceJob);
|
||||
|
||||
|
||||
occurrencesJobDao.insert(occurrenceJob);
|
||||
|
||||
|
||||
Date start = DateUtil.millisecondsToDate(submitTimeInMillis);
|
||||
jobModel.setSubmitTime(start);
|
||||
|
||||
|
||||
//FILL MODEL WITH OTHER DATA
|
||||
jobModel.setId(serviceJobId);
|
||||
jobModel.setState(DownloadState.PENDING);
|
||||
|
@ -416,25 +495,31 @@ public class OccurrenceJobUtil {
|
|||
}catch (Exception e) {
|
||||
logger.error("An error occured in create new occurrences job on dao object " +e,e);
|
||||
}
|
||||
|
||||
|
||||
return jobModel;
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* Convert list key into store xml string.
|
||||
*
|
||||
* @param keys the keys
|
||||
* @return the string
|
||||
*/
|
||||
public static String convertListKeyIntoStoreXMLString(List<String> keys){
|
||||
String storeKeys = "";
|
||||
|
||||
|
||||
KeyStringList keyStringList = new KeyStringList();
|
||||
|
||||
|
||||
XStreamUtil<KeyStringList> xstreamUtil = new XStreamUtil<KeyStringList>(ALIASKEY,KeyStringList.class);
|
||||
|
||||
|
||||
for (String key : keys) {
|
||||
// System.out.println("key :"+ key);
|
||||
logger.info("key converted: "+key);
|
||||
keyStringList.addKey(key);
|
||||
}
|
||||
|
||||
|
||||
storeKeys = xstreamUtil.toXML(keyStringList);
|
||||
|
||||
//FOR DEBUG
|
||||
|
@ -442,15 +527,21 @@ public class OccurrenceJobUtil {
|
|||
|
||||
return storeKeys;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Revert list key from stored xml string.
|
||||
*
|
||||
* @param storedKeysAsXml the stored keys as xml
|
||||
* @return the list
|
||||
*/
|
||||
public static List<String> revertListKeyFromStoredXMLString(String storedKeysAsXml){
|
||||
|
||||
|
||||
List<String> listKey = new ArrayList<String>();
|
||||
|
||||
|
||||
XStreamUtil<KeyStringList> xstreamUtil = new XStreamUtil<KeyStringList>(ALIASKEY,KeyStringList.class);
|
||||
|
||||
|
||||
KeyStringList keyStringList = (KeyStringList) xstreamUtil.fromXML(storedKeysAsXml);
|
||||
|
||||
|
||||
for (String key : keyStringList.getListKeys()) {
|
||||
// for debug
|
||||
// System.out.println("key :"+ key);
|
||||
|
|
|
@ -12,14 +12,21 @@ import javax.persistence.criteria.CriteriaQuery;
|
|||
import javax.persistence.criteria.Predicate;
|
||||
|
||||
import org.apache.log4j.Logger;
|
||||
import org.gcube.data.spd.stubs.types.NodeStatus;
|
||||
import org.gcube.data.spd.stubs.types.Status;
|
||||
import org.gcube.data.spd.model.service.types.CompleteJobStatus;
|
||||
import org.gcube.data.spd.model.service.types.JobStatus;
|
||||
import org.gcube.data.spd.model.service.types.NodeStatus;
|
||||
import org.gcube.portlets.user.speciesdiscovery.server.persistence.dao.TaxonomyJobPersistence;
|
||||
import org.gcube.portlets.user.speciesdiscovery.server.util.DateUtil;
|
||||
import org.gcube.portlets.user.speciesdiscovery.shared.DownloadState;
|
||||
import org.gcube.portlets.user.speciesdiscovery.shared.JobTaxonomyModel;
|
||||
import org.gcube.portlets.user.speciesdiscovery.shared.TaxonomyJob;
|
||||
|
||||
/**
|
||||
* The Class TaxonomyJobUtil.
|
||||
*
|
||||
* @author Francesco Mangiacrapa francesco.mangiacrapa@isti.cnr.it
|
||||
* Jan 10, 2017
|
||||
*/
|
||||
public class TaxonomyJobUtil {
|
||||
|
||||
//STATE RETURNED BY STATUS RESPONSE
|
||||
|
@ -31,40 +38,50 @@ public class TaxonomyJobUtil {
|
|||
protected static Logger logger = Logger.getLogger(TaxonomyJobUtil.class);
|
||||
|
||||
|
||||
public static JobTaxonomyModel convertJob(TaxonomyJob job, Status statusResponse, TaxonomyJobPersistence speciesJobDao) throws SQLException{
|
||||
|
||||
//TODO SET END TIME
|
||||
|
||||
/**
|
||||
* Convert job.
|
||||
*
|
||||
* @param job the job
|
||||
* @param statusResponse the status response
|
||||
* @param speciesJobDao the species job dao
|
||||
* @return the job taxonomy model
|
||||
* @throws SQLException the SQL exception
|
||||
*/
|
||||
public static JobTaxonomyModel convertJob(TaxonomyJob job, CompleteJobStatus statusResponse, TaxonomyJobPersistence speciesJobDao) throws SQLException{
|
||||
|
||||
//TODO SET END TIME
|
||||
JobTaxonomyModel jobSpeciesModel;
|
||||
String status = statusResponse.getStatus();
|
||||
JobStatus status = statusResponse.getStatus();
|
||||
DownloadState downloadState = null;
|
||||
long endTime = 0;
|
||||
|
||||
|
||||
downloadState = getDownloadState(status);
|
||||
logger.trace("download state: " + downloadState);
|
||||
|
||||
|
||||
|
||||
|
||||
if(downloadState==null){ //Case with exception
|
||||
logger.warn("download state is null, returning");
|
||||
return null;
|
||||
}
|
||||
|
||||
|
||||
boolean onSaving = true;
|
||||
|
||||
|
||||
//GET CHILDREN
|
||||
ArrayList<JobTaxonomyModel> listChildJob = new ArrayList<JobTaxonomyModel>();
|
||||
|
||||
logger.trace("status response subnodes is != null? " + (statusResponse.getSubNodes()!=null));
|
||||
if(statusResponse.getSubNodes()!=null){
|
||||
logger.trace("subNodes size is: " + statusResponse.getSubNodes().size());
|
||||
|
||||
|
||||
for (NodeStatus nodeStatus : statusResponse.getSubNodes()){
|
||||
|
||||
|
||||
logger.trace("node status " + nodeStatus);
|
||||
|
||||
|
||||
DownloadState downloadStateChildren = getDownloadState(nodeStatus.getStatus());
|
||||
if(!downloadStateChildren.equals(DownloadState.COMPLETED))
|
||||
onSaving=false;
|
||||
|
||||
|
||||
listChildJob.add(new JobTaxonomyModel(UUID.randomUUID().toString(), nodeStatus.getScientificName(),downloadStateChildren));
|
||||
}
|
||||
}else{
|
||||
|
@ -72,12 +89,12 @@ public class TaxonomyJobUtil {
|
|||
onSaving=false;
|
||||
}
|
||||
boolean changeStatus = false;
|
||||
|
||||
|
||||
//If status of children is completed and job status is not completed (the file is generated) or failed, set download state on saving
|
||||
if(onSaving &&(!downloadState.equals(DownloadState.COMPLETED) && !downloadState.equals(DownloadState.FAILED))){
|
||||
if(onSaving &&!downloadState.equals(DownloadState.COMPLETED) && !downloadState.equals(DownloadState.FAILED)){
|
||||
downloadState = DownloadState.SAVING;
|
||||
changeStatus = true;
|
||||
|
||||
|
||||
//if status is completed and job was saved, update status as saved
|
||||
}else if(downloadState.equals(DownloadState.COMPLETED)){
|
||||
if(job.getState().compareTo(DownloadState.SAVED.toString())==0){
|
||||
|
@ -85,65 +102,65 @@ public class TaxonomyJobUtil {
|
|||
changeStatus = true;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
jobSpeciesModel = new JobTaxonomyModel(job.getId(), job.getDescriptiveName(), downloadState, null, job.getScientificName(), job.getDataSourceName(), job.getRank());
|
||||
|
||||
|
||||
jobSpeciesModel.setListChildJobs(listChildJob);
|
||||
|
||||
|
||||
boolean changeEndTime = false;
|
||||
|
||||
|
||||
//UPDATE END TIME
|
||||
if(downloadState.equals(DownloadState.FAILED) || downloadState.equals(DownloadState.COMPLETED)){
|
||||
|
||||
// if(job.getEndTime()==0){ //UPDATE end time first time only
|
||||
//
|
||||
//
|
||||
// logger.trace("UPDATE end time first time only - " + downloadState);
|
||||
// endTime = Calendar.getInstance().getTimeInMillis();
|
||||
// job.setEndTime(endTime);
|
||||
// changeEndTime = true;
|
||||
//// speciesJobDao.update(job);
|
||||
//
|
||||
//
|
||||
// }
|
||||
|
||||
|
||||
// System.out.println("job "+job);
|
||||
// System.out.println("statusResponse.getEndDate() "+statusResponse.getEndDate());
|
||||
// System.out.println("job.getEndTime() "+job.getEndTime());
|
||||
|
||||
|
||||
if(statusResponse.getEndDate()!=null && job.getEndTime()==0){ //UPDATE end time first time only
|
||||
|
||||
|
||||
logger.trace("UPDATE end time first time only - " + downloadState);
|
||||
// endTime = Calendar.getInstance().getTimeInMillis();
|
||||
endTime = statusResponse.getEndDate().getTimeInMillis();
|
||||
job.setEndTime(endTime);
|
||||
changeEndTime = true;
|
||||
// speciesJobDao.update(job);
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
boolean changeStartTime = false;
|
||||
|
||||
|
||||
//SET START TIME
|
||||
long startTime = job.getStartTime();
|
||||
|
||||
|
||||
// System.out.println("statusResponse.getStartDate(): "+statusResponse.getStartDate());
|
||||
// System.out.println("startTime: "+startTime);
|
||||
|
||||
|
||||
if(statusResponse.getStartDate()!=null && startTime==0){ //UPDATE start time first time only
|
||||
Date start = DateUtil.millisecondsToDate(statusResponse.getStartDate().getTimeInMillis());
|
||||
// jobSpeciesModel.setStartTime(DateUtil.dateToDateFormatString(start));
|
||||
jobSpeciesModel.setStartTime(start);
|
||||
changeStartTime = true;
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
try{
|
||||
//UPDATE DAO
|
||||
if(changeStatus || changeEndTime || changeStartTime){
|
||||
job.setState(downloadState.toString());
|
||||
speciesJobDao.update(job);
|
||||
}
|
||||
}
|
||||
}catch (Exception e) {
|
||||
logger.trace("An error occurred when update dao: ",e);
|
||||
}
|
||||
|
@ -154,10 +171,10 @@ public class TaxonomyJobUtil {
|
|||
// jobSpeciesModel.setStartTime(DateUtil.dateToDateFormatString(start));
|
||||
jobSpeciesModel.setSubmitTime(submit);
|
||||
|
||||
|
||||
|
||||
endTime = job.getEndTime();
|
||||
long elapsedTime = 0;
|
||||
|
||||
|
||||
//SET END TIME, BECAUSE IT IS CHANGED
|
||||
if(endTime!=0){
|
||||
Date end = DateUtil.millisecondsToDate(endTime);
|
||||
|
@ -167,31 +184,49 @@ public class TaxonomyJobUtil {
|
|||
}
|
||||
else
|
||||
elapsedTime = Calendar.getInstance().getTimeInMillis();
|
||||
|
||||
|
||||
|
||||
//SET ELAPSED TIME
|
||||
jobSpeciesModel.setElapsedTime(DateUtil.getDifference(submitTime, elapsedTime));
|
||||
|
||||
|
||||
return jobSpeciesModel;
|
||||
}
|
||||
|
||||
public static DownloadState getDownloadState(String status){
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* Gets the download state.
|
||||
*
|
||||
* @param status the status
|
||||
* @return the download state
|
||||
*/
|
||||
public static DownloadState getDownloadState(JobStatus status){
|
||||
|
||||
if(status!=null){
|
||||
if(status.compareToIgnoreCase(PENDING)==0){
|
||||
return DownloadState.PENDING;
|
||||
}else if(status.compareToIgnoreCase(RUNNING)==0){
|
||||
return DownloadState.ONGOING;
|
||||
}else if(status.compareToIgnoreCase(FAILED)==0){
|
||||
return DownloadState.FAILED;
|
||||
}else if(status.compareToIgnoreCase(COMPLETED)==0){
|
||||
switch (status) {
|
||||
case COMPLETED:
|
||||
return DownloadState.COMPLETED;
|
||||
case FAILED:
|
||||
return DownloadState.FAILED;
|
||||
case PENDING:
|
||||
return DownloadState.PENDING;
|
||||
case RUNNING:
|
||||
return DownloadState.ONGOING;
|
||||
default:
|
||||
return null;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Delete taxonomy job by id.
|
||||
*
|
||||
* @param jobIdentifier the job identifier
|
||||
* @param taxonomyJobDao the taxonomy job dao
|
||||
* @return the int
|
||||
* @throws SQLException the SQL exception
|
||||
*/
|
||||
public static int deleteTaxonomyJobById(String jobIdentifier, TaxonomyJobPersistence taxonomyJobDao) throws SQLException{
|
||||
logger.trace("Delete taxonomy job id: " + jobIdentifier);
|
||||
try{
|
||||
|
@ -203,41 +238,49 @@ public class TaxonomyJobUtil {
|
|||
logger.error("An error occured deleteTaxonomyJobById " + jobIdentifier + " exception: "+e, e);
|
||||
e.printStackTrace();
|
||||
}
|
||||
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Change status taxonomy job by id.
|
||||
*
|
||||
* @param jobIdentifier the job identifier
|
||||
* @param state the state
|
||||
* @param taxonomyJobDAO the taxonomy job dao
|
||||
* @return the int
|
||||
*/
|
||||
public static int changeStatusTaxonomyJobById(String jobIdentifier,DownloadState state, TaxonomyJobPersistence taxonomyJobDAO) {
|
||||
logger.trace("Change status taxonomy job id: " + jobIdentifier);
|
||||
// System.out.println("Delete job id: " + jobIdentifier);
|
||||
|
||||
|
||||
int result = 0;
|
||||
|
||||
|
||||
try{
|
||||
|
||||
|
||||
CriteriaBuilder queryBuilder = taxonomyJobDAO.getCriteriaBuilder();
|
||||
CriteriaQuery<Object> cq = queryBuilder.createQuery();
|
||||
Predicate pr1 = queryBuilder.equal(taxonomyJobDAO.rootFrom(cq).get(TaxonomyJob.ID_FIELD), jobIdentifier);
|
||||
cq.where(pr1);
|
||||
|
||||
|
||||
Iterator<TaxonomyJob> iterator = taxonomyJobDAO.executeCriteriaQuery(cq).iterator();
|
||||
|
||||
|
||||
TaxonomyJob job;
|
||||
|
||||
|
||||
if(iterator.hasNext())
|
||||
job = iterator.next();
|
||||
else
|
||||
return 0;
|
||||
|
||||
|
||||
job.setState(state.toString());
|
||||
|
||||
|
||||
taxonomyJobDAO.update(job);
|
||||
|
||||
|
||||
}catch (Exception e) {
|
||||
logger.error("An error occured in change status jobId: " + jobIdentifier + " exception: "+e, e );
|
||||
}
|
||||
|
||||
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
/**
|
||||
*
|
||||
*
|
||||
*/
|
||||
package org.gcube.portlets.user.speciesdiscovery.server.persistence;
|
||||
|
||||
|
@ -27,41 +27,41 @@ import org.gcube.portlets.user.speciesdiscovery.shared.util.NormalizeString;
|
|||
|
||||
|
||||
/**
|
||||
*
|
||||
*
|
||||
* @author Francesco Mangiacrapa francesco.mangiacrapa@isti.cnr.it
|
||||
* @May 20, 2013
|
||||
*
|
||||
*/
|
||||
public class ResultRowBuffer extends AbstractSelectableDaoBuffer<ResultRow> implements FilterableFetchingBuffer<ResultRow> {
|
||||
|
||||
|
||||
protected Logger logger = Logger.getLogger(ResultRowBuffer.class);
|
||||
|
||||
|
||||
private AbstractPersistence<Taxon> taxonDao;
|
||||
private int filteredListSize = 0;
|
||||
|
||||
|
||||
public ResultRowBuffer(AbstractPersistence<ResultRow> dao, AbstractPersistence<Taxon> taxonDao)
|
||||
{
|
||||
super(dao, ResultRow.ID_FIELD, ResultRow.SELECTED);
|
||||
this.taxonDao = taxonDao;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
* @throws Exception
|
||||
* @throws Exception
|
||||
*/
|
||||
@Override
|
||||
public void add(ResultRow row) throws Exception{
|
||||
//FOR DEBUG
|
||||
// logger.trace("Add item "+ row.getId() + " service id: " +row.getServiceId());
|
||||
|
||||
|
||||
super.add(row);
|
||||
}
|
||||
|
||||
|
||||
public int getFilteredListSize() throws SQLException
|
||||
{
|
||||
return filteredListSize;
|
||||
}
|
||||
|
||||
|
||||
|
||||
//TODO MODIFIED
|
||||
@Override
|
||||
|
@ -71,22 +71,18 @@ public class ResultRowBuffer extends AbstractSelectableDaoBuffer<ResultRow> impl
|
|||
List<ResultRow> list = new ArrayList<ResultRow>();
|
||||
Iterator<ResultRow> iterator = null;
|
||||
String value;
|
||||
|
||||
|
||||
if(activeFiltersObject!=null){
|
||||
|
||||
//FILTER BY CLASSIFICATION
|
||||
if(activeFiltersObject.isByClassification()){
|
||||
|
||||
// List<Integer> listId = activeFiltersObject.getListByClassification();
|
||||
|
||||
int counter = activeFiltersObject.getNumberOfData();
|
||||
|
||||
logger.trace("in classification filter - counter: "+counter);
|
||||
logger.trace("in classification filter - rank: "+activeFiltersObject.getRankClassification());
|
||||
logger.trace("in classification filter - classification id: "+activeFiltersObject.getClassificationId());
|
||||
|
||||
|
||||
String columName = null;
|
||||
|
||||
|
||||
if(activeFiltersObject.getRankClassification().compareTo(MainTaxonomicRankEnum.KINGDOM.getLabel())==0)
|
||||
columName = ResultRow.KINGDOM_ID;
|
||||
else if(activeFiltersObject.getRankClassification().compareTo(MainTaxonomicRankEnum.FAMILY.getLabel())==0)
|
||||
|
@ -101,121 +97,103 @@ public class ResultRowBuffer extends AbstractSelectableDaoBuffer<ResultRow> impl
|
|||
columName = ResultRow.PHYLUM_ID;
|
||||
else if(activeFiltersObject.getRankClassification().compareTo(MainTaxonomicRankEnum.SPECIES.getLabel())==0)
|
||||
columName = ResultRow.SPECIES_ID;
|
||||
|
||||
|
||||
// logger.trace("in classification filter - columName: "+columName);
|
||||
|
||||
try {
|
||||
|
||||
CriteriaBuilder queryBuilder = dao.getCriteriaBuilder();
|
||||
|
||||
Query query = dao.createNewManager().createQuery("select r FROM ResultRow r where r."+columName+ "='"+activeFiltersObject.getClassificationId()+"'");
|
||||
//
|
||||
query.setMaxResults(counter);
|
||||
|
||||
iterator = query.getResultList().iterator();
|
||||
// logger.trace("in classification filter - statement: "+queryBuilder.where().eq(columName, activeFiltersObject.getClassificationId()).getStatement());
|
||||
|
||||
} catch (Exception e) {
|
||||
logger.error("Error in activeFiltersObject.isByClassification(): "+e, e);
|
||||
}
|
||||
|
||||
|
||||
//FILTER BY DATA PROVIDER
|
||||
}else if(activeFiltersObject.isByDataProvider()){
|
||||
|
||||
|
||||
try {
|
||||
CriteriaBuilder queryBuilder = dao.getCriteriaBuilder();
|
||||
value = activeFiltersObject.getDataProviderName();
|
||||
CriteriaQuery<Object> cq = queryBuilder.createQuery();
|
||||
Predicate pr1 = queryBuilder.equal(dao.rootFrom(cq).get(ResultRow.DATAPROVIDER_NAME), value);
|
||||
cq.where(pr1);
|
||||
|
||||
|
||||
//TODO FIXME empty value
|
||||
logger.trace("FILTER BY DATA PROVIDER: "+ value );
|
||||
|
||||
|
||||
iterator = dao.executeCriteriaQuery(cq).iterator();
|
||||
|
||||
|
||||
} catch (Exception e) {
|
||||
logger.error("Error in activeFiltersObject.isByDataProvider(): "+e, e);
|
||||
e.printStackTrace();
|
||||
}
|
||||
|
||||
|
||||
//FILTER BY DATA SOURCE
|
||||
}else if(activeFiltersObject.isByDataSourceName()){
|
||||
|
||||
|
||||
try {
|
||||
CriteriaBuilder queryBuilder = dao.getCriteriaBuilder();
|
||||
value = activeFiltersObject.getDataSourceName();
|
||||
CriteriaQuery<Object> cq = queryBuilder.createQuery();
|
||||
Predicate pr1 = queryBuilder.equal(dao.rootFrom(cq).get(ResultRow.DATASOURCE_NAME), NormalizeString.validateUndefined(value));
|
||||
cq.where(pr1);
|
||||
|
||||
|
||||
logger.trace("FILTER BY DATA DATA SOURCE NAME: "+ value );
|
||||
|
||||
|
||||
iterator = dao.executeCriteriaQuery(cq).iterator();
|
||||
|
||||
|
||||
} catch (Exception e) {
|
||||
logger.error("Error in activeFiltersObject.isByDataSourceName(): "+e, e);
|
||||
e.printStackTrace();
|
||||
}
|
||||
|
||||
|
||||
//FILTER BY RANK
|
||||
}else if(activeFiltersObject.isByRank()){
|
||||
|
||||
try {
|
||||
|
||||
CriteriaBuilder queryBuilder = taxonDao.getCriteriaBuilder();
|
||||
value = activeFiltersObject.getRankName();
|
||||
// value = NormalizeString.lowerCaseUpFirstChar(activeFiltersObject.getRankName());
|
||||
CriteriaQuery<Object> cq = queryBuilder.createQuery();
|
||||
Predicate pr1 = queryBuilder.equal(taxonDao.rootFrom(cq).get(Taxon.RANK), NormalizeString.validateUndefined(value));
|
||||
cq.where(pr1);
|
||||
|
||||
Iterator<Taxon> iteratorTaxon = taxonDao.executeCriteriaQuery(cq).iterator();
|
||||
|
||||
logger.trace("in rank filter - value: "+value);
|
||||
|
||||
List<Integer> listTaxonId = new ArrayList<Integer>();
|
||||
|
||||
if(iteratorTaxon!=null){
|
||||
|
||||
while(iteratorTaxon.hasNext()){
|
||||
|
||||
Taxon tax = iteratorTaxon.next();
|
||||
listTaxonId.add(tax.getId());
|
||||
}
|
||||
try {
|
||||
|
||||
value = activeFiltersObject.getRankName();
|
||||
EntityManager em = dao.createNewManager();
|
||||
|
||||
String queryString = "select *" +
|
||||
" FROM "+ResultRow.class.getSimpleName()+" r" +
|
||||
" INNER JOIN RESULTROW_TAXON rt on r.ID=rt.RESULTROW_ID" +
|
||||
" INNER JOIN "+Taxon.class.getSimpleName()+" t on t.INTERNALID=rt.MATCHINGTAXON_INTERNALID" +
|
||||
" where t.RANK = '"+value+"' and t.ID IN" +
|
||||
" (select MIN(tax.ID) from TAXON tax)";
|
||||
|
||||
Query query = em.createNativeQuery(queryString, ResultRow.class);
|
||||
List<ResultRow> listResultRow = new ArrayList<ResultRow>();
|
||||
try {
|
||||
|
||||
listResultRow = query.getResultList();
|
||||
} catch (Exception e) {
|
||||
logger.error("Error in ResultRow - executeCriteriaQuery: " + e.getMessage(), e);
|
||||
} finally {
|
||||
em.close();
|
||||
}
|
||||
|
||||
logger.trace("in rank filter - listTaxonId size: "+listTaxonId.size());
|
||||
|
||||
|
||||
queryBuilder = dao.getCriteriaBuilder();
|
||||
cq = queryBuilder.createQuery();
|
||||
pr1 = dao.rootFrom(cq).get(ResultRow.PARENT_FOREIGN_KEY_TAXON).in(listTaxonId);
|
||||
cq.where(pr1);
|
||||
|
||||
iterator = dao.executeCriteriaQuery(cq).iterator();
|
||||
iterator = listResultRow.iterator();
|
||||
|
||||
} catch (Exception e) {
|
||||
logger.error("Error in activeFiltersObject.isByRank(): "+e, e);
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
if(iterator!=null){
|
||||
|
||||
while(iterator.hasNext()){
|
||||
|
||||
ResultRow row = iterator.next();
|
||||
list.add(row);
|
||||
}
|
||||
|
||||
filteredListSize = list.size();
|
||||
|
||||
filteredListSize = list.size();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
logger.trace("RETURNED List size " + list.size());
|
||||
|
||||
return list;
|
||||
}
|
||||
|
||||
|
@ -225,14 +203,11 @@ public class ResultRowBuffer extends AbstractSelectableDaoBuffer<ResultRow> impl
|
|||
EntityManager em = dao.createNewManager();
|
||||
|
||||
//TODO use filterMap
|
||||
|
||||
|
||||
try {
|
||||
em.getTransaction().begin();
|
||||
|
||||
int updateCount = em.createQuery("UPDATE ResultRow SET " + ResultRow.SELECTED + " = "+ selection).executeUpdate();
|
||||
|
||||
logger.trace("Updated " + updateCount + " item");
|
||||
|
||||
em.getTransaction().commit();
|
||||
} finally {
|
||||
if (em.getTransaction().isActive())
|
||||
|
@ -248,22 +223,22 @@ public class ResultRowBuffer extends AbstractSelectableDaoBuffer<ResultRow> impl
|
|||
@Override
|
||||
public void updateAllSelectionByIds(boolean selection, List<String> listIds)
|
||||
throws Exception {
|
||||
|
||||
|
||||
EntityManager em = dao.createNewManager();
|
||||
|
||||
String queryString = "UPDATE ResultRow t SET "
|
||||
String queryString = "UPDATE ResultRow t SET "
|
||||
+ ResultRow.SELECTED + " = "+ selection +" where "
|
||||
+ ResultRow.ID_FIELD+" IN :inclList";
|
||||
|
||||
|
||||
try {
|
||||
em.getTransaction().begin();
|
||||
|
||||
TypedQuery<ResultRow> query = em.createQuery(queryString, ResultRow.class);
|
||||
|
||||
|
||||
query.setParameter("inclList", listIds);
|
||||
|
||||
|
||||
int updateCount = query.executeUpdate();
|
||||
|
||||
|
||||
logger.trace("Updated " + updateCount + " item");
|
||||
|
||||
em.getTransaction().commit();
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
/**
|
||||
*
|
||||
*
|
||||
*/
|
||||
package org.gcube.portlets.user.speciesdiscovery.server.persistence;
|
||||
|
||||
|
@ -25,7 +25,7 @@ import org.gcube.portlets.user.speciesdiscovery.shared.filter.ResultFilter;
|
|||
import org.gcube.portlets.user.speciesdiscovery.shared.util.NormalizeString;
|
||||
|
||||
/**
|
||||
*
|
||||
*
|
||||
* @author Francesco Mangiacrapa francesco.mangiacrapa@isti.cnr.it
|
||||
* @May 20, 2013
|
||||
*
|
||||
|
@ -37,17 +37,17 @@ public class TaxonomyRowBuffer extends AbstractSelectableDaoBuffer<TaxonomyRow>
|
|||
public TaxonomyRowBuffer(AbstractPersistence<TaxonomyRow> dao) {
|
||||
super(dao, TaxonomyRow.ID_FIELD, TaxonomyRow.SELECTED);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
* @throws Exception
|
||||
* @throws Exception
|
||||
*/
|
||||
@Override
|
||||
public void add(TaxonomyRow row) throws Exception
|
||||
{
|
||||
//DEBUG
|
||||
// logger.trace("Add item "+ row.getId() + " service id: " +row.getServiceId());
|
||||
|
||||
|
||||
super.add(row);
|
||||
}
|
||||
|
||||
|
@ -58,14 +58,14 @@ public class TaxonomyRowBuffer extends AbstractSelectableDaoBuffer<TaxonomyRow>
|
|||
Iterator<TaxonomyRow> iterator = null;
|
||||
// QueryBuilder<TaxonomyRow, Integer> queryBuilder = dao.queryBuilder();
|
||||
String value;
|
||||
|
||||
if(activeFiltersObject!=null){
|
||||
|
||||
if(activeFiltersObject!=null){
|
||||
//FILTER BY CLASSIFICATION
|
||||
if(activeFiltersObject.isByClassification()){
|
||||
|
||||
int counter = activeFiltersObject.getNumberOfData();
|
||||
String columName = null;
|
||||
|
||||
|
||||
if(activeFiltersObject.getRankClassification().compareTo(MainTaxonomicRankEnum.KINGDOM.getLabel())==0)
|
||||
columName = TaxonomyRow.KINGDOM_ID;
|
||||
else if(activeFiltersObject.getRankClassification().compareTo(MainTaxonomicRankEnum.FAMILY.getLabel())==0)
|
||||
|
@ -80,17 +80,14 @@ public class TaxonomyRowBuffer extends AbstractSelectableDaoBuffer<TaxonomyRow>
|
|||
columName = TaxonomyRow.PHYLUM_ID;
|
||||
else if(activeFiltersObject.getRankClassification().compareTo(MainTaxonomicRankEnum.SPECIES.getLabel())==0)
|
||||
columName = TaxonomyRow.SPECIES_ID;
|
||||
//
|
||||
//
|
||||
// logger.trace("in classification filter - columName: "+columName);
|
||||
|
||||
try {
|
||||
|
||||
|
||||
CriteriaBuilder queryBuilder = dao.getCriteriaBuilder();
|
||||
|
||||
Query query = dao.createNewManager().createQuery("select r FROM TaxonomyRow r where r."+columName+ "='"+activeFiltersObject.getClassificationId()+"'");
|
||||
//
|
||||
query.setMaxResults(counter);
|
||||
|
||||
iterator = query.getResultList().iterator();
|
||||
// logger.trace("in classification filter - statement: "+queryBuilder.where().eq(columName, activeFiltersObject.getClassificationId()).getStatement());
|
||||
|
||||
|
@ -98,22 +95,21 @@ public class TaxonomyRowBuffer extends AbstractSelectableDaoBuffer<TaxonomyRow>
|
|||
logger.error("Error in activeFiltersObject.isByClassification(): "+e, e);
|
||||
e.printStackTrace();
|
||||
}
|
||||
|
||||
|
||||
//FILTER BY DATA PROVIDER
|
||||
}else if(activeFiltersObject.isByDataProvider()){
|
||||
|
||||
|
||||
try {
|
||||
CriteriaBuilder queryBuilder = dao.getCriteriaBuilder();
|
||||
value = activeFiltersObject.getDataProviderName();
|
||||
CriteriaQuery<Object> cq = queryBuilder.createQuery();
|
||||
Predicate pr1 = queryBuilder.equal(dao.rootFrom(cq).get(TaxonomyRow.DATAPROVIDER_NAME), value);
|
||||
cq.where(pr1);
|
||||
|
||||
|
||||
//TODO FIXME empty value
|
||||
logger.trace("FILTER BY DATA PROVIDER: "+ value );
|
||||
|
||||
iterator = dao.executeCriteriaQuery(cq).iterator();
|
||||
|
||||
|
||||
} catch (Exception e) {
|
||||
logger.error("Error in activeFiltersObject.isByDataProvider(): "+e, e);
|
||||
e.printStackTrace();
|
||||
|
@ -121,30 +117,27 @@ public class TaxonomyRowBuffer extends AbstractSelectableDaoBuffer<TaxonomyRow>
|
|||
|
||||
//FILTER BY DATA SOURCE
|
||||
}else if(activeFiltersObject.isByDataSourceName()){
|
||||
|
||||
|
||||
try {
|
||||
CriteriaBuilder queryBuilder = dao.getCriteriaBuilder();
|
||||
value = activeFiltersObject.getDataSourceName();
|
||||
CriteriaQuery<Object> cq = queryBuilder.createQuery();
|
||||
Predicate pr1 = queryBuilder.equal(dao.rootFrom(cq).get(TaxonomyRow.DATAPROVIDER_NAME), value);
|
||||
cq.where(pr1);
|
||||
|
||||
//TODO FIXME empty value
|
||||
logger.trace("FILTER BY DATA SOURCE: "+ value );
|
||||
|
||||
iterator = dao.executeCriteriaQuery(cq).iterator();
|
||||
|
||||
|
||||
} catch (Exception e) {
|
||||
logger.error("Error in activeFiltersObject.isByDataProvider(): "+e, e);
|
||||
e.printStackTrace();
|
||||
}
|
||||
|
||||
|
||||
|
||||
// //FILTER BY RANK
|
||||
}else if(activeFiltersObject.isByRank()){
|
||||
|
||||
|
||||
try {
|
||||
|
||||
CriteriaBuilder queryBuilder = dao.getCriteriaBuilder();
|
||||
value = activeFiltersObject.getRankName();
|
||||
// value = NormalizeString.lowerCaseUpFirstChar(activeFiltersObject.getRankName());
|
||||
|
@ -152,33 +145,25 @@ public class TaxonomyRowBuffer extends AbstractSelectableDaoBuffer<TaxonomyRow>
|
|||
CriteriaQuery<Object> cq = queryBuilder.createQuery();
|
||||
Predicate pr1 = queryBuilder.equal(dao.rootFrom(cq).get(TaxonomyRow.RANK), NormalizeString.validateUndefined(value));
|
||||
cq.where(pr1);
|
||||
|
||||
logger.trace("FILTER BY RANK: "+ value );
|
||||
|
||||
iterator = dao.executeCriteriaQuery(cq).iterator();
|
||||
|
||||
|
||||
} catch (Exception e) {
|
||||
logger.error("Error in activeFiltersObject.isByRank(): "+e, e);
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
if(iterator!=null){
|
||||
|
||||
while(iterator.hasNext()){
|
||||
|
||||
TaxonomyRow row = iterator.next();
|
||||
list.add(row);
|
||||
}
|
||||
|
||||
filteredListSize = list.size();
|
||||
|
||||
filteredListSize = list.size();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
logger.trace("RETURNED List size " + list.size());
|
||||
|
||||
return list;
|
||||
}
|
||||
|
||||
|
@ -194,8 +179,8 @@ public class TaxonomyRowBuffer extends AbstractSelectableDaoBuffer<TaxonomyRow>
|
|||
EntityManager em = dao.createNewManager();
|
||||
|
||||
//TODO generalize?
|
||||
String queryString = "UPDATE TaxonomyRow SET "
|
||||
+ ResultRow.SELECTED + " = "+ selection
|
||||
String queryString = "UPDATE TaxonomyRow SET "
|
||||
+ ResultRow.SELECTED + " = "+ selection
|
||||
+" where "+TaxonomyRow.IS_PARENT +"=false";
|
||||
|
||||
try {
|
||||
|
@ -210,7 +195,7 @@ public class TaxonomyRowBuffer extends AbstractSelectableDaoBuffer<TaxonomyRow>
|
|||
if (em.getTransaction().isActive())
|
||||
em.getTransaction().rollback();
|
||||
em.close();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
|
@ -218,12 +203,12 @@ public class TaxonomyRowBuffer extends AbstractSelectableDaoBuffer<TaxonomyRow>
|
|||
*/
|
||||
@Override
|
||||
public void updateAllSelectionByIds(boolean selection, List<String> listIds) throws Exception {
|
||||
|
||||
|
||||
EntityManager em = dao.createNewManager();
|
||||
|
||||
//TODO generalize?
|
||||
String queryString = "UPDATE TaxonomyRow SET "
|
||||
+ ResultRow.SELECTED + " = "+ selection
|
||||
String queryString = "UPDATE TaxonomyRow SET "
|
||||
+ ResultRow.SELECTED + " = "+ selection
|
||||
+" where "+TaxonomyRow.IS_PARENT +"=false AND "
|
||||
+ResultRow.ID_FIELD+" IN :inclList";
|
||||
|
||||
|
@ -231,9 +216,9 @@ public class TaxonomyRowBuffer extends AbstractSelectableDaoBuffer<TaxonomyRow>
|
|||
em.getTransaction().begin();
|
||||
|
||||
TypedQuery<TaxonomyRow> query = em.createQuery(queryString, TaxonomyRow.class);
|
||||
|
||||
|
||||
query.setParameter("inclList", listIds);
|
||||
|
||||
|
||||
int updateCount = query.executeUpdate();
|
||||
|
||||
logger.trace("Updated " + updateCount + " item");
|
||||
|
@ -243,8 +228,8 @@ public class TaxonomyRowBuffer extends AbstractSelectableDaoBuffer<TaxonomyRow>
|
|||
if (em.getTransaction().isActive())
|
||||
em.getTransaction().rollback();
|
||||
em.close();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -23,7 +23,7 @@ public class OccurrenceJobPersistence extends AbstractPersistence<OccurrencesJob
|
|||
|
||||
@Override
|
||||
public int removeAll() throws DatabaseServiceException {
|
||||
|
||||
|
||||
EntityManager em = super.createNewManager();
|
||||
int removed = 0;
|
||||
try {
|
||||
|
@ -31,7 +31,7 @@ public class OccurrenceJobPersistence extends AbstractPersistence<OccurrencesJob
|
|||
em.getTransaction().begin();
|
||||
removed = em.createQuery("DELETE FROM OccurrencesJob").executeUpdate();
|
||||
em.getTransaction().commit();
|
||||
logger.trace("DELETE FROM OccurrenceJob " + removed +" items");
|
||||
logger.trace("DELETED FROM OccurrenceJob " + removed +" items");
|
||||
|
||||
} catch (Exception e) {
|
||||
logger.error("Error in removeAll: " + e.getMessage(), e);
|
||||
|
@ -51,10 +51,10 @@ public class OccurrenceJobPersistence extends AbstractPersistence<OccurrencesJob
|
|||
Query query = em.createQuery("select t from OccurrencesJob t");
|
||||
|
||||
listOccurrencesJob = query.getResultList();
|
||||
|
||||
|
||||
} catch (Exception e) {
|
||||
logger.error("Error in OccurrencesJob - getList: " + e.getMessage(), e);
|
||||
|
||||
|
||||
} finally {
|
||||
em.close();
|
||||
}
|
||||
|
@ -73,7 +73,7 @@ public class OccurrenceJobPersistence extends AbstractPersistence<OccurrencesJob
|
|||
OccurrencesJob occurrencesJob = null;
|
||||
try {
|
||||
occurrencesJob = em.getReference(OccurrencesJob.class, id);
|
||||
|
||||
|
||||
} finally {
|
||||
em.close();
|
||||
}
|
||||
|
@ -81,10 +81,10 @@ public class OccurrenceJobPersistence extends AbstractPersistence<OccurrencesJob
|
|||
logger.trace("getItemByKey return row: "+occurrencesJob.getId());
|
||||
else
|
||||
logger.trace("getItemByKey return null");
|
||||
|
||||
|
||||
//FOR DEBUG
|
||||
// System.out.println("getItemByKey return: "+row );
|
||||
|
||||
|
||||
return occurrencesJob;
|
||||
}
|
||||
|
||||
|
@ -123,21 +123,21 @@ public class OccurrenceJobPersistence extends AbstractPersistence<OccurrencesJob
|
|||
query.setFirstResult(startIndex);
|
||||
query.setMaxResults(offset);
|
||||
listOJ = query.getResultList();
|
||||
|
||||
|
||||
} finally {
|
||||
em.close();
|
||||
}
|
||||
return listOJ;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public List<OccurrencesJob> getList(Map<String, String> filterMap, int startIndex, int offset) throws DatabaseServiceException{
|
||||
|
||||
|
||||
EntityManager em = super.createNewManager();
|
||||
List<OccurrencesJob> listOJ = new ArrayList<OccurrencesJob>();
|
||||
try {
|
||||
String queryString = "select t from OccurrencesJob t";
|
||||
|
||||
|
||||
if(filterMap!=null && filterMap.size()>0){
|
||||
queryString+=" where ";
|
||||
for (String param : filterMap.keySet()) {
|
||||
|
@ -145,11 +145,11 @@ public class OccurrenceJobPersistence extends AbstractPersistence<OccurrencesJob
|
|||
queryString+=" t."+param+"="+value;
|
||||
queryString+=AND;
|
||||
}
|
||||
|
||||
|
||||
queryString = queryString.substring(0, queryString.lastIndexOf(AND));
|
||||
}
|
||||
Query query = em.createQuery(queryString);
|
||||
|
||||
|
||||
if(startIndex>-1)
|
||||
query.setFirstResult(startIndex);
|
||||
if(offset>-1)
|
||||
|
@ -164,24 +164,24 @@ public class OccurrenceJobPersistence extends AbstractPersistence<OccurrencesJob
|
|||
|
||||
@Override
|
||||
public List<OccurrencesJob> executeTypedQuery(CriteriaQuery<Object> cq, int startIndex, int offset) throws DatabaseServiceException {
|
||||
|
||||
|
||||
EntityManager em = super.createNewManager();
|
||||
List<OccurrencesJob> listOJ = new ArrayList<OccurrencesJob>();
|
||||
try {
|
||||
|
||||
TypedQuery typedQuery = em.createQuery(cq);
|
||||
|
||||
|
||||
if(startIndex>-1)
|
||||
typedQuery.setFirstResult(startIndex);
|
||||
if(offset>-1)
|
||||
typedQuery.setMaxResults(offset);
|
||||
|
||||
listOJ = typedQuery.getResultList();
|
||||
|
||||
|
||||
} finally {
|
||||
em.close();
|
||||
}
|
||||
|
||||
|
||||
return listOJ;
|
||||
|
||||
}
|
||||
|
@ -190,13 +190,13 @@ public class OccurrenceJobPersistence extends AbstractPersistence<OccurrencesJob
|
|||
public int deleteItemByIdField(String idField) throws DatabaseServiceException{
|
||||
EntityManager em = super.createNewManager();
|
||||
int removed = 0;
|
||||
|
||||
|
||||
try {
|
||||
em.getTransaction().begin();
|
||||
removed = em.createQuery("DELETE FROM OccurrencesJob t WHERE t."+OccurrencesJob.ID_FIELD+"='"+idField+"'").executeUpdate();
|
||||
em.getTransaction().commit();
|
||||
logger.trace("Item "+ idField + " was deleted from OccurrencesJob");
|
||||
|
||||
|
||||
} catch (Exception e) {
|
||||
logger.error("Error in OccurrencesJob deleteItemByIdField: " + e.getMessage(), e);
|
||||
e.printStackTrace();
|
||||
|
@ -208,5 +208,5 @@ public class OccurrenceJobPersistence extends AbstractPersistence<OccurrencesJob
|
|||
return removed;
|
||||
}
|
||||
|
||||
|
||||
|
||||
}
|
|
@ -22,12 +22,12 @@ public class OccurrenceRowPersistence extends AbstractPersistence<Occurrence>{
|
|||
protected CriteriaQuery<Object> criteriaQuery;
|
||||
protected Root<Occurrence> rootFrom;
|
||||
protected Logger logger = Logger.getLogger(OccurrenceRowPersistence.class);
|
||||
|
||||
|
||||
public OccurrenceRowPersistence(EntityManagerFactory factory) throws DatabaseServiceException{
|
||||
super(factory);
|
||||
criteriaBuilder = super.createNewManager().getCriteriaBuilder();
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public Root<Occurrence> rootFrom(CriteriaQuery<Object> cq){
|
||||
return cq.from(Occurrence.class);
|
||||
|
@ -78,7 +78,7 @@ public class OccurrenceRowPersistence extends AbstractPersistence<Occurrence>{
|
|||
Occurrence row = null;
|
||||
try {
|
||||
row = em.getReference(Occurrence.class, id);
|
||||
|
||||
|
||||
} finally {
|
||||
em.close();
|
||||
}
|
||||
|
@ -86,10 +86,10 @@ public class OccurrenceRowPersistence extends AbstractPersistence<Occurrence>{
|
|||
logger.trace("getItemByKey return row: "+row.getId() + ", service id: " + row.getServiceId());
|
||||
else
|
||||
logger.trace("getItemByKey return null");
|
||||
|
||||
|
||||
//FOR DEBUG
|
||||
// System.out.println("getItemByKey return: "+row );
|
||||
|
||||
|
||||
return row;
|
||||
}
|
||||
|
||||
|
@ -99,7 +99,7 @@ public class OccurrenceRowPersistence extends AbstractPersistence<Occurrence>{
|
|||
}
|
||||
|
||||
/**
|
||||
*
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public int removeAll() throws DatabaseServiceException{
|
||||
|
@ -110,7 +110,7 @@ public class OccurrenceRowPersistence extends AbstractPersistence<Occurrence>{
|
|||
em.getTransaction().begin();
|
||||
removed = em.createQuery("DELETE FROM Occurrence").executeUpdate();
|
||||
em.getTransaction().commit();
|
||||
logger.trace("DELETE FROM Occurrence " + removed +" items");
|
||||
logger.trace("DELETED FROM Occurrence " + removed +" items");
|
||||
|
||||
} catch (Exception e) {
|
||||
logger.error("Error in removeAll: " + e.getMessage(), e);
|
||||
|
@ -124,7 +124,7 @@ public class OccurrenceRowPersistence extends AbstractPersistence<Occurrence>{
|
|||
|
||||
@Override
|
||||
public List<Occurrence> getList(int startIndex, int offset) throws DatabaseServiceException {
|
||||
|
||||
|
||||
EntityManager em = super.createNewManager();
|
||||
List<Occurrence> listOccurrence = new ArrayList<Occurrence>();
|
||||
try {
|
||||
|
@ -132,7 +132,7 @@ public class OccurrenceRowPersistence extends AbstractPersistence<Occurrence>{
|
|||
query.setFirstResult(startIndex);
|
||||
query.setMaxResults(offset);
|
||||
listOccurrence = query.getResultList();
|
||||
|
||||
|
||||
} finally {
|
||||
em.close();
|
||||
}
|
||||
|
@ -141,12 +141,12 @@ public class OccurrenceRowPersistence extends AbstractPersistence<Occurrence>{
|
|||
|
||||
@Override
|
||||
public List<Occurrence> getList(Map<String, String> filterMap, int startIndex, int offset) throws DatabaseServiceException{
|
||||
|
||||
|
||||
EntityManager em = super.createNewManager();
|
||||
List<Occurrence> listOccurrence = new ArrayList<Occurrence>();
|
||||
try {
|
||||
String queryString = "select t from Occurrence t";
|
||||
|
||||
|
||||
if(filterMap!=null && filterMap.size()>0){
|
||||
queryString+=" where ";
|
||||
for (String param : filterMap.keySet()) {
|
||||
|
@ -154,11 +154,11 @@ public class OccurrenceRowPersistence extends AbstractPersistence<Occurrence>{
|
|||
queryString+=" t."+param+"="+value;
|
||||
queryString+=AND;
|
||||
}
|
||||
|
||||
|
||||
queryString = queryString.substring(0, queryString.lastIndexOf(AND));
|
||||
}
|
||||
Query query = em.createQuery(queryString);
|
||||
|
||||
|
||||
if(startIndex>-1)
|
||||
query.setFirstResult(startIndex);
|
||||
if(offset>-1)
|
||||
|
@ -170,28 +170,28 @@ public class OccurrenceRowPersistence extends AbstractPersistence<Occurrence>{
|
|||
}
|
||||
return listOccurrence;
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
@Override
|
||||
public List<Occurrence> executeTypedQuery(CriteriaQuery<Object> cq, int startIndex, int offset) throws DatabaseServiceException{
|
||||
|
||||
|
||||
EntityManager em = super.createNewManager();
|
||||
List<Occurrence> listOJ = new ArrayList<Occurrence>();
|
||||
try {
|
||||
|
||||
TypedQuery typedQuery = em.createQuery(cq);
|
||||
|
||||
|
||||
if(startIndex>-1)
|
||||
typedQuery.setFirstResult(startIndex);
|
||||
if(offset>-1)
|
||||
typedQuery.setMaxResults(offset);
|
||||
|
||||
listOJ = typedQuery.getResultList();
|
||||
|
||||
|
||||
} finally {
|
||||
em.close();
|
||||
}
|
||||
|
||||
|
||||
return listOJ;
|
||||
|
||||
}
|
||||
|
@ -200,13 +200,13 @@ public class OccurrenceRowPersistence extends AbstractPersistence<Occurrence>{
|
|||
public int deleteItemByIdField(String idField) throws DatabaseServiceException{
|
||||
EntityManager em = super.createNewManager();
|
||||
int removed = 0;
|
||||
|
||||
|
||||
try {
|
||||
em.getTransaction().begin();
|
||||
removed = em.createQuery("DELETE FROM Occurrence t WHERE t."+Occurrence.ID_FIELD+"='"+idField+"'").executeUpdate();
|
||||
em.getTransaction().commit();
|
||||
logger.trace("Item "+ idField + " was deleted from Occurrence");
|
||||
|
||||
|
||||
} catch (Exception e) {
|
||||
logger.error("Error in Occurrence deleteJobById: " + e.getMessage(), e);
|
||||
|
||||
|
|
|
@ -23,14 +23,14 @@ public class ResultRowPersistence extends AbstractPersistence<ResultRow>{
|
|||
protected CriteriaQuery<Object> criteriaQuery;
|
||||
protected Root<ResultRow> rootFrom;
|
||||
protected Logger logger = Logger.getLogger(ResultRowPersistence.class);
|
||||
|
||||
|
||||
public ResultRowPersistence(EntityManagerFactory factory) throws DatabaseServiceException{
|
||||
super(factory);
|
||||
criteriaBuilder = super.createNewManager().getCriteriaBuilder();
|
||||
// CriteriaQuery<Object> cq = criteriaBuilder.createQuery();
|
||||
// Root<ResultRow> rootFrom = cq.from(ResultRow.class);
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public Root<ResultRow> rootFrom(CriteriaQuery<Object> cq){
|
||||
return cq.from(ResultRow.class);
|
||||
|
@ -46,7 +46,7 @@ public class ResultRowPersistence extends AbstractPersistence<ResultRow>{
|
|||
|
||||
listResultRow = query.getResultList();
|
||||
} catch (Exception e) {
|
||||
logger.error("Error in ResultRow - getList: " + e.getMessage(), e);
|
||||
logger.error("Error in ResultRow - getList: " + e.getMessage(), e);
|
||||
} finally {
|
||||
em.close();
|
||||
}
|
||||
|
@ -70,7 +70,7 @@ public class ResultRowPersistence extends AbstractPersistence<ResultRow>{
|
|||
|
||||
listResultRow = query.getResultList();
|
||||
} catch (Exception e) {
|
||||
logger.error("Error in ResultRow - executeCriteriaQuery: " + e.getMessage(), e);
|
||||
logger.error("Error in ResultRow - executeCriteriaQuery: " + e.getMessage(), e);
|
||||
} finally {
|
||||
em.close();
|
||||
}
|
||||
|
@ -85,9 +85,9 @@ public class ResultRowPersistence extends AbstractPersistence<ResultRow>{
|
|||
ResultRow row = null;
|
||||
try {
|
||||
row = em.getReference(ResultRow.class, id);
|
||||
|
||||
|
||||
} catch (Exception e) {
|
||||
logger.error("Error in ResultRow - getItemByKey: " + e.getMessage(), e);
|
||||
logger.error("Error in ResultRow - getItemByKey: " + e.getMessage(), e);
|
||||
} finally {
|
||||
em.close();
|
||||
}
|
||||
|
@ -95,21 +95,21 @@ public class ResultRowPersistence extends AbstractPersistence<ResultRow>{
|
|||
logger.trace("getItemByKey return row: "+row.getId() + ", service id: " + row.getServiceId());
|
||||
else
|
||||
logger.trace("getItemByKey return null");
|
||||
|
||||
|
||||
//FOR DEBUG
|
||||
// System.out.println("getItemByKey return: "+row );
|
||||
|
||||
|
||||
return row;
|
||||
}
|
||||
|
||||
|
||||
|
||||
@Override
|
||||
public CriteriaBuilder getCriteriaBuilder() throws DatabaseServiceException{
|
||||
return createNewManager().getCriteriaBuilder();
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public int removeAll() throws DatabaseServiceException{
|
||||
|
@ -120,10 +120,10 @@ public class ResultRowPersistence extends AbstractPersistence<ResultRow>{
|
|||
em.getTransaction().begin();
|
||||
removed = em.createQuery("DELETE FROM ResultRow").executeUpdate();
|
||||
em.getTransaction().commit();
|
||||
logger.trace("DELETE FROM ResultRow " + removed +" items");
|
||||
logger.trace("DELETED FROM ResultRow " + removed +" items");
|
||||
|
||||
} catch (Exception e) {
|
||||
logger.error("Error in ResultRow - removeAll: " + e.getMessage(), e);
|
||||
logger.error("Error in ResultRow - removeAll: " + e.getMessage(), e);
|
||||
} finally {
|
||||
em.close();
|
||||
}
|
||||
|
@ -133,7 +133,7 @@ public class ResultRowPersistence extends AbstractPersistence<ResultRow>{
|
|||
|
||||
@Override
|
||||
public List<ResultRow> getList(int startIndex, int offset) throws DatabaseServiceException{
|
||||
|
||||
|
||||
EntityManager em = super.createNewManager();
|
||||
List<ResultRow> listResultRow = new ArrayList<ResultRow>();
|
||||
try {
|
||||
|
@ -141,23 +141,23 @@ public class ResultRowPersistence extends AbstractPersistence<ResultRow>{
|
|||
query.setFirstResult(startIndex);
|
||||
query.setMaxResults(offset);
|
||||
listResultRow = query.getResultList();
|
||||
|
||||
|
||||
} catch (Exception e) {
|
||||
logger.error("Error in ResultRow - getList: " + e.getMessage(), e);
|
||||
logger.error("Error in ResultRow - getList: " + e.getMessage(), e);
|
||||
} finally {
|
||||
em.close();
|
||||
}
|
||||
return listResultRow;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public List<ResultRow> getList(Map<String, String> filterMap, int startIndex, int offset) throws DatabaseServiceException{
|
||||
|
||||
|
||||
EntityManager em = super.createNewManager();
|
||||
List<ResultRow> listResultRow = new ArrayList<ResultRow>();
|
||||
try {
|
||||
String queryString = "select t from ResultRow t";
|
||||
|
||||
|
||||
if(filterMap!=null && filterMap.size()>0){
|
||||
queryString+=" where ";
|
||||
for (String param : filterMap.keySet()) {
|
||||
|
@ -165,42 +165,42 @@ public class ResultRowPersistence extends AbstractPersistence<ResultRow>{
|
|||
queryString+=" t."+param+"="+value;
|
||||
queryString+=AND;
|
||||
}
|
||||
|
||||
|
||||
queryString = queryString.substring(0, queryString.lastIndexOf(AND));
|
||||
}
|
||||
Query query = em.createQuery(queryString);
|
||||
|
||||
listResultRow = query.getResultList();
|
||||
} catch (Exception e) {
|
||||
logger.error("Error in ResultRow - getList: " + e.getMessage(), e);
|
||||
logger.error("Error in ResultRow - getList: " + e.getMessage(), e);
|
||||
} finally {
|
||||
em.close();
|
||||
}
|
||||
return listResultRow;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public List<ResultRow> executeTypedQuery(CriteriaQuery<Object> cq, int startIndex, int offset) throws DatabaseServiceException{
|
||||
|
||||
|
||||
EntityManager em = super.createNewManager();
|
||||
List<ResultRow> listOJ = new ArrayList<ResultRow>();
|
||||
try {
|
||||
|
||||
TypedQuery typedQuery = em.createQuery(cq);
|
||||
|
||||
|
||||
if(startIndex>-1)
|
||||
typedQuery.setFirstResult(startIndex);
|
||||
if(offset>-1)
|
||||
typedQuery.setMaxResults(offset);
|
||||
|
||||
listOJ = typedQuery.getResultList();
|
||||
|
||||
|
||||
} catch (Exception e) {
|
||||
logger.error("Error in ResultRow - executeTypedQuery: " + e.getMessage(), e);
|
||||
logger.error("Error in ResultRow - executeTypedQuery: " + e.getMessage(), e);
|
||||
} finally {
|
||||
em.close();
|
||||
}
|
||||
|
||||
|
||||
return listOJ;
|
||||
|
||||
}
|
||||
|
@ -209,13 +209,13 @@ public class ResultRowPersistence extends AbstractPersistence<ResultRow>{
|
|||
public int deleteItemByIdField(String idField) throws DatabaseServiceException{
|
||||
EntityManager em = super.createNewManager();
|
||||
int removed = 0;
|
||||
|
||||
|
||||
try {
|
||||
em.getTransaction().begin();
|
||||
removed = em.createQuery("DELETE FROM ResultRow t WHERE t."+ResultRow.ID_FIELD+"='"+idField+"'").executeUpdate();
|
||||
em.getTransaction().commit();
|
||||
logger.trace("Item "+ idField + " was deleted from ResultRow");
|
||||
|
||||
|
||||
} catch (Exception e) {
|
||||
logger.error("Error in ResultRow - deleteJobById: " + e.getMessage(), e);
|
||||
|
||||
|
|
|
@ -31,7 +31,7 @@ public class TaxonRowPersistence extends AbstractPersistence<Taxon>{
|
|||
em.getTransaction().begin();
|
||||
removed = em.createQuery("DELETE FROM Taxon").executeUpdate();
|
||||
em.getTransaction().commit();
|
||||
logger.trace("DELETE FROM Taxon " + removed +" items");
|
||||
logger.trace("DELETED FROM Taxon " + removed +" items");
|
||||
|
||||
} catch (Exception e) {
|
||||
logger.error("Error in removeAll: " + e.getMessage(), e);
|
||||
|
@ -51,7 +51,7 @@ public class TaxonRowPersistence extends AbstractPersistence<Taxon>{
|
|||
Query query = em.createQuery("select t from Taxon t");
|
||||
|
||||
listTaxon = query.getResultList();
|
||||
|
||||
|
||||
} catch (Exception e) {
|
||||
logger.error("Error in Taxon - removeAll: " + e.getMessage(), e);
|
||||
|
||||
|
@ -63,7 +63,7 @@ public class TaxonRowPersistence extends AbstractPersistence<Taxon>{
|
|||
|
||||
@Override
|
||||
public List<Taxon> getList(int startIndex, int offset) throws DatabaseServiceException {
|
||||
|
||||
|
||||
EntityManager em = super.createNewManager();
|
||||
List<Taxon> listTaxon = new ArrayList<Taxon>();
|
||||
try {
|
||||
|
@ -71,7 +71,7 @@ public class TaxonRowPersistence extends AbstractPersistence<Taxon>{
|
|||
query.setFirstResult(startIndex);
|
||||
query.setMaxResults(offset);
|
||||
listTaxon = query.getResultList();
|
||||
|
||||
|
||||
} catch (Exception e) {
|
||||
logger.error("Error in Taxon - getList: " + e.getMessage(), e);
|
||||
|
||||
|
@ -93,7 +93,7 @@ public class TaxonRowPersistence extends AbstractPersistence<Taxon>{
|
|||
Taxon row = null;
|
||||
try {
|
||||
row = em.getReference(Taxon.class, id);
|
||||
|
||||
|
||||
} catch (Exception e) {
|
||||
logger.error("Error in Taxon - getItemByKey: " + e.getMessage(), e);
|
||||
|
||||
|
@ -104,10 +104,10 @@ public class TaxonRowPersistence extends AbstractPersistence<Taxon>{
|
|||
logger.trace("getItemByKey return row id: "+row.getId());
|
||||
else
|
||||
logger.trace("getItemByKey return null");
|
||||
|
||||
|
||||
//FOR DEBUG
|
||||
// System.out.println("getItemByKey return: "+row );
|
||||
|
||||
|
||||
return row;
|
||||
}
|
||||
|
||||
|
@ -146,7 +146,7 @@ public class TaxonRowPersistence extends AbstractPersistence<Taxon>{
|
|||
List<Taxon> listTaxon = new ArrayList<Taxon>();
|
||||
try {
|
||||
String queryString = "select t from Taxon t";
|
||||
|
||||
|
||||
if(filterMap!=null && filterMap.size()>0){
|
||||
queryString+=" where ";
|
||||
for (String param : filterMap.keySet()) {
|
||||
|
@ -154,7 +154,7 @@ public class TaxonRowPersistence extends AbstractPersistence<Taxon>{
|
|||
queryString+=" t."+param+"="+value;
|
||||
queryString+=AND;
|
||||
}
|
||||
|
||||
|
||||
queryString = queryString.substring(0, queryString.lastIndexOf(AND));
|
||||
}
|
||||
Query query = em.createQuery(queryString);
|
||||
|
@ -176,21 +176,21 @@ public class TaxonRowPersistence extends AbstractPersistence<Taxon>{
|
|||
try {
|
||||
|
||||
TypedQuery typedQuery = em.createQuery(cq);
|
||||
|
||||
|
||||
if(startIndex>-1)
|
||||
typedQuery.setFirstResult(startIndex);
|
||||
if(offset>-1)
|
||||
typedQuery.setMaxResults(offset);
|
||||
|
||||
listTaxon = typedQuery.getResultList();
|
||||
|
||||
|
||||
} catch (Exception e) {
|
||||
logger.error("Error in Taxon - executeTypedQuery: " + e.getMessage(), e);
|
||||
|
||||
}finally {
|
||||
em.close();
|
||||
}
|
||||
|
||||
|
||||
return listTaxon;
|
||||
}
|
||||
|
||||
|
@ -198,13 +198,13 @@ public class TaxonRowPersistence extends AbstractPersistence<Taxon>{
|
|||
public int deleteItemByIdField(String idField) throws DatabaseServiceException{
|
||||
EntityManager em = super.createNewManager();
|
||||
int removed = 0;
|
||||
|
||||
|
||||
try {
|
||||
em.getTransaction().begin();
|
||||
removed = em.createQuery("DELETE FROM Taxon t WHERE t."+Taxon.ID_FIELD+"='"+idField+"'").executeUpdate();
|
||||
em.getTransaction().commit();
|
||||
logger.trace("Item "+ idField + " was deleted from Taxon");
|
||||
|
||||
|
||||
} catch (Exception e) {
|
||||
logger.error("Error in Taxon deleteJobById: " + e.getMessage(), e);
|
||||
|
||||
|
|
|
@ -30,7 +30,7 @@ public class TaxonomyJobPersistence extends AbstractPersistence<TaxonomyJob>{
|
|||
em.getTransaction().begin();
|
||||
removed = em.createQuery("DELETE FROM TaxonomyJob").executeUpdate();
|
||||
em.getTransaction().commit();
|
||||
logger.trace("DELETE FROM TaxonomyJob " + removed +" items");
|
||||
logger.trace("DELETED FROM TaxonomyJob " + removed +" items");
|
||||
|
||||
} catch (Exception e) {
|
||||
logger.error("Error in removeAll: " + e.getMessage(), e);
|
||||
|
@ -41,19 +41,19 @@ public class TaxonomyJobPersistence extends AbstractPersistence<TaxonomyJob>{
|
|||
|
||||
return removed;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public int deleteItemByIdField(String idField) throws DatabaseServiceException{
|
||||
EntityManager em = super.createNewManager();
|
||||
int removed = 0;
|
||||
|
||||
|
||||
try {
|
||||
em.getTransaction().begin();
|
||||
removed = em.createQuery("DELETE FROM TaxonomyJob t WHERE t."+TaxonomyJob.ID_FIELD+"='"+idField+"'").executeUpdate();
|
||||
em.getTransaction().commit();
|
||||
logger.trace("Item "+ idField + " was deleted from TaxonomyJob. removed "+ removed + " item" );
|
||||
logger.trace("Item "+ idField + " was deleted from TaxonomyJob. removed "+ removed + " item" );
|
||||
// System.out.println("Item "+ idField + " was deleted from TaxonomyJob. removed "+ removed + " item" );
|
||||
|
||||
|
||||
} catch (Exception e) {
|
||||
logger.error("Error in TaxonomyJob deleteJobById: " + e.getMessage(), e);
|
||||
e.printStackTrace();
|
||||
|
@ -73,7 +73,7 @@ public class TaxonomyJobPersistence extends AbstractPersistence<TaxonomyJob>{
|
|||
Query query = em.createQuery("select t from TaxonomyJob t");
|
||||
|
||||
listTaxJob = query.getResultList();
|
||||
|
||||
|
||||
} catch (Exception e) {
|
||||
logger.error("Error in TaxonomyJob - getList: " + e.getMessage(), e);
|
||||
|
||||
|
@ -95,7 +95,7 @@ public class TaxonomyJobPersistence extends AbstractPersistence<TaxonomyJob>{
|
|||
TaxonomyJob taxJob = null;
|
||||
try {
|
||||
taxJob = em.getReference(TaxonomyJob.class, id);
|
||||
|
||||
|
||||
} catch (Exception e) {
|
||||
logger.error("Error in TaxonomyJob - getItemByKey: " + e.getMessage(), e);
|
||||
|
||||
|
@ -106,10 +106,10 @@ public class TaxonomyJobPersistence extends AbstractPersistence<TaxonomyJob>{
|
|||
logger.trace("getItemByKey return row: "+taxJob.getId());
|
||||
else
|
||||
logger.trace("getItemByKey return null");
|
||||
|
||||
|
||||
//FOR DEBUG
|
||||
// System.out.println("getItemByKey return: "+row );
|
||||
|
||||
|
||||
return taxJob;
|
||||
}
|
||||
|
||||
|
@ -152,7 +152,7 @@ public class TaxonomyJobPersistence extends AbstractPersistence<TaxonomyJob>{
|
|||
query.setFirstResult(startIndex);
|
||||
query.setMaxResults(offset);
|
||||
listTaxJob = query.getResultList();
|
||||
|
||||
|
||||
} catch (Exception e) {
|
||||
logger.error("Error in TaxonomyJob - getList: " + e.getMessage(), e);
|
||||
|
||||
|
@ -161,15 +161,15 @@ public class TaxonomyJobPersistence extends AbstractPersistence<TaxonomyJob>{
|
|||
}
|
||||
return listTaxJob;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public List<TaxonomyJob> getList(Map<String, String> filterMap, int startIndex, int offset) throws DatabaseServiceException {
|
||||
|
||||
|
||||
EntityManager em = super.createNewManager();
|
||||
List<TaxonomyJob> listTaxJob = new ArrayList<TaxonomyJob>();
|
||||
try {
|
||||
String queryString = "select t from TaxonomyJob t";
|
||||
|
||||
|
||||
if(filterMap!=null && filterMap.size()>0){
|
||||
queryString+=" where ";
|
||||
for (String param : filterMap.keySet()) {
|
||||
|
@ -177,11 +177,11 @@ public class TaxonomyJobPersistence extends AbstractPersistence<TaxonomyJob>{
|
|||
queryString+=" t."+param+"="+value;
|
||||
queryString+=AND;
|
||||
}
|
||||
|
||||
|
||||
queryString = queryString.substring(0, queryString.lastIndexOf(AND));
|
||||
}
|
||||
Query query = em.createQuery(queryString);
|
||||
|
||||
|
||||
if(startIndex>-1)
|
||||
query.setFirstResult(startIndex);
|
||||
if(offset>-1)
|
||||
|
@ -196,33 +196,33 @@ public class TaxonomyJobPersistence extends AbstractPersistence<TaxonomyJob>{
|
|||
}
|
||||
return listTaxJob;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public List<TaxonomyJob> executeTypedQuery(CriteriaQuery<Object> cq, int startIndex, int offset) throws DatabaseServiceException {
|
||||
|
||||
|
||||
EntityManager em = super.createNewManager();
|
||||
List<TaxonomyJob> listOJ = new ArrayList<TaxonomyJob>();
|
||||
try {
|
||||
|
||||
TypedQuery typedQuery = em.createQuery(cq);
|
||||
|
||||
|
||||
if(startIndex>-1)
|
||||
typedQuery.setFirstResult(startIndex);
|
||||
if(offset>-1)
|
||||
typedQuery.setMaxResults(offset);
|
||||
|
||||
listOJ = typedQuery.getResultList();
|
||||
|
||||
|
||||
} catch (Exception e) {
|
||||
logger.error("Error in TaxonomyJob - executeTypedQuery: " + e.getMessage(), e);
|
||||
|
||||
} finally {
|
||||
em.close();
|
||||
}
|
||||
|
||||
|
||||
return listOJ;
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
}
|
|
@ -31,7 +31,7 @@ public class TaxonomyRowPersistence extends AbstractPersistence<TaxonomyRow>{
|
|||
em.getTransaction().begin();
|
||||
removed = em.createQuery("DELETE FROM TaxonomyRow").executeUpdate();
|
||||
em.getTransaction().commit();
|
||||
logger.trace("DELETE FROM TaxonomyRow " + removed +" items");
|
||||
logger.trace("DELETED FROM TaxonomyRow " + removed +" items");
|
||||
|
||||
} catch (Exception e) {
|
||||
logger.error("Error in TaxonomyRow - removeAll: " + e.getMessage(), e);
|
||||
|
@ -51,7 +51,7 @@ public class TaxonomyRowPersistence extends AbstractPersistence<TaxonomyRow>{
|
|||
Query query = em.createQuery("select t from TaxonomyRow t");
|
||||
|
||||
listTaxonomy = query.getResultList();
|
||||
|
||||
|
||||
} catch (Exception e) {
|
||||
logger.error("Error in TaxonomyRow - getList: " + e.getMessage(), e);
|
||||
|
||||
|
@ -75,10 +75,10 @@ public class TaxonomyRowPersistence extends AbstractPersistence<TaxonomyRow>{
|
|||
TaxonomyRow row = null;
|
||||
try {
|
||||
row = em.getReference(TaxonomyRow.class, id);
|
||||
|
||||
|
||||
}catch (Exception e) {
|
||||
logger.error("An error occurred in TaxonomyRow - getItemByKey ",e);
|
||||
|
||||
|
||||
} finally {
|
||||
em.close();
|
||||
}
|
||||
|
@ -86,10 +86,10 @@ public class TaxonomyRowPersistence extends AbstractPersistence<TaxonomyRow>{
|
|||
logger.trace("getItemByKey return row: "+row.getId() + ", service id: " + row.getServiceId());
|
||||
else
|
||||
logger.trace("getItemByKey return null");
|
||||
|
||||
|
||||
//FOR DEBUG
|
||||
// System.out.println("getItemByKey return: "+row );
|
||||
|
||||
|
||||
return row;
|
||||
}
|
||||
|
||||
|
@ -131,7 +131,7 @@ public class TaxonomyRowPersistence extends AbstractPersistence<TaxonomyRow>{
|
|||
query.setFirstResult(startIndex);
|
||||
query.setMaxResults(offset);
|
||||
listTaxonomyRow = query.getResultList();
|
||||
|
||||
|
||||
}catch (Exception e) {
|
||||
logger.error("An error occurred in TaxonomyRow - get List ",e);
|
||||
}
|
||||
|
@ -140,15 +140,15 @@ public class TaxonomyRowPersistence extends AbstractPersistence<TaxonomyRow>{
|
|||
}
|
||||
return listTaxonomyRow;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public List<TaxonomyRow> getList(Map<String, String> filterMap, int startIndex, int offset) throws DatabaseServiceException{
|
||||
|
||||
|
||||
EntityManager em = super.createNewManager();
|
||||
List<TaxonomyRow> listTaxonomyRow = new ArrayList<TaxonomyRow>();
|
||||
try {
|
||||
String queryString = "select t from TaxonomyRow t";
|
||||
|
||||
|
||||
if(filterMap!=null && filterMap.size()>0){
|
||||
queryString+=" where ";
|
||||
for (String param : filterMap.keySet()) {
|
||||
|
@ -156,11 +156,11 @@ public class TaxonomyRowPersistence extends AbstractPersistence<TaxonomyRow>{
|
|||
queryString+=" t."+param+"="+value;
|
||||
queryString+=AND;
|
||||
}
|
||||
|
||||
|
||||
queryString = queryString.substring(0, queryString.lastIndexOf(AND));
|
||||
}
|
||||
Query query = em.createQuery(queryString);
|
||||
|
||||
|
||||
if(startIndex>-1)
|
||||
query.setFirstResult(startIndex);
|
||||
if(offset>-1)
|
||||
|
@ -174,16 +174,16 @@ public class TaxonomyRowPersistence extends AbstractPersistence<TaxonomyRow>{
|
|||
}
|
||||
return listTaxonomyRow;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public List<TaxonomyRow> executeTypedQuery(CriteriaQuery cq, int startIndex, int offset) throws DatabaseServiceException{
|
||||
|
||||
|
||||
EntityManager em = super.createNewManager();
|
||||
List<TaxonomyRow> listOJ = new ArrayList<TaxonomyRow>();
|
||||
try {
|
||||
|
||||
TypedQuery typedQuery = em.createQuery(cq);
|
||||
|
||||
|
||||
if(startIndex>-1)
|
||||
typedQuery.setFirstResult(startIndex);
|
||||
if(offset>-1)
|
||||
|
@ -195,7 +195,7 @@ public class TaxonomyRowPersistence extends AbstractPersistence<TaxonomyRow>{
|
|||
} finally {
|
||||
em.close();
|
||||
}
|
||||
|
||||
|
||||
return listOJ;
|
||||
|
||||
}
|
||||
|
@ -204,13 +204,13 @@ public class TaxonomyRowPersistence extends AbstractPersistence<TaxonomyRow>{
|
|||
public int deleteItemByIdField(String idField) throws DatabaseServiceException{
|
||||
EntityManager em = super.createNewManager();
|
||||
int removed = 0;
|
||||
|
||||
|
||||
try {
|
||||
em.getTransaction().begin();
|
||||
removed = em.createQuery("DELETE FROM TaxonomyRow t WHERE t."+TaxonomyRow.ID_FIELD+"='"+idField+"'").executeUpdate();
|
||||
em.getTransaction().commit();
|
||||
logger.trace("Item "+ idField + " was deleted from TaxonomyRow");
|
||||
|
||||
|
||||
} catch (Exception e) {
|
||||
logger.error("Error in TaxonomyRow deleteJobById: " + e.getMessage(), e);
|
||||
|
||||
|
@ -221,5 +221,5 @@ public class TaxonomyRowPersistence extends AbstractPersistence<TaxonomyRow>{
|
|||
return removed;
|
||||
}
|
||||
|
||||
|
||||
|
||||
}
|
|
@ -1,5 +1,5 @@
|
|||
/**
|
||||
*
|
||||
*
|
||||
*/
|
||||
package org.gcube.portlets.user.speciesdiscovery.server.service;
|
||||
|
||||
|
@ -23,37 +23,47 @@ import org.gcube.portlets.user.speciesdiscovery.shared.ResultRow;
|
|||
import org.gcube.portlets.user.speciesdiscovery.shared.Taxon;
|
||||
import org.gcube.portlets.user.speciesdiscovery.shared.util.NormalizeString;
|
||||
|
||||
|
||||
/**
|
||||
*
|
||||
* @author Francesco Mangiacrapa francesco.mangiacrapa@isti.cnr.it
|
||||
* The Class ResultItemConverter.
|
||||
*
|
||||
* @author Francesco Mangiacrapa francesco.mangiacrapa@isti.cnr.it
|
||||
* Jan 11, 2017
|
||||
*/
|
||||
public class ResultItemConverter implements Converter<ResultItem, ResultRow> {
|
||||
|
||||
protected Logger logger = Logger.getLogger(ResultItemConverter.class);
|
||||
protected int id = 0;
|
||||
protected ASLSession session;
|
||||
|
||||
|
||||
/**
|
||||
* Instantiates a new result item converter.
|
||||
*
|
||||
* @param session the session
|
||||
*/
|
||||
public ResultItemConverter(ASLSession session) {
|
||||
this.session = session;
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.gcube.portlets.user.speciesdiscovery.server.stream.Converter#convert(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public ResultRow convert(ResultItem input) throws Exception {
|
||||
|
||||
ResultRow row = new ResultRow(id++);
|
||||
|
||||
|
||||
row.setServiceId(input.getId());
|
||||
|
||||
|
||||
//Retrieve Properties
|
||||
List<ElementProperty> listProperties = input.getProperties();
|
||||
List<ElementProperty> listProperties = input.getProperties();
|
||||
|
||||
//Fill properties
|
||||
if(listProperties!=null){
|
||||
for (ElementProperty elementProperty : listProperties)
|
||||
// row.getProperties().add(new ItemParameter(StringEscapeUtils.escapeSql(elementProperty.getName()), StringEscapeUtils.escapeSql(elementProperty.getValue())));
|
||||
row.getProperties().add(new ItemParameter(elementProperty.getName(), elementProperty.getValue()));
|
||||
|
||||
|
||||
row.setExistsProperties(true);
|
||||
}
|
||||
|
||||
|
@ -64,7 +74,7 @@ public class ResultItemConverter implements Converter<ResultItem, ResultRow> {
|
|||
}
|
||||
else
|
||||
row.setScientificNameAuthorship(ConstantsSpeciesDiscovery.NOT_FOUND);
|
||||
|
||||
|
||||
//set credits
|
||||
if(input.getCredits()!=null && !input.getCredits().isEmpty()){
|
||||
// row.setCredits(StringEscapeUtils.escapeSql(input.getCredits()));
|
||||
|
@ -72,7 +82,7 @@ public class ResultItemConverter implements Converter<ResultItem, ResultRow> {
|
|||
}
|
||||
else
|
||||
row.setCredits(ConstantsSpeciesDiscovery.NOT_FOUND);
|
||||
|
||||
|
||||
//set lsid
|
||||
if(input.getLsid()!=null && !input.getLsid().isEmpty()){
|
||||
// row.setLsid(StringEscapeUtils.escapeSql(input.getLsid()));
|
||||
|
@ -80,7 +90,7 @@ public class ResultItemConverter implements Converter<ResultItem, ResultRow> {
|
|||
}
|
||||
else
|
||||
row.setLsid(ConstantsSpeciesDiscovery.NOT_FOUND);
|
||||
|
||||
|
||||
|
||||
if(input.getProvider()!=null && !input.getProvider().isEmpty()){
|
||||
// row.setDataSourceId(StringEscapeUtils.escapeSql(input.getProvider()));
|
||||
|
@ -94,34 +104,34 @@ public class ResultItemConverter implements Converter<ResultItem, ResultRow> {
|
|||
|
||||
if (input.getDataSet()!=null) {
|
||||
DataSet dataSet = input.getDataSet();
|
||||
|
||||
|
||||
if(dataSet.getCitation()==null || dataSet.getCitation().isEmpty())
|
||||
row.setDataSetCitation("Citation Id not found");
|
||||
else
|
||||
// row.setDataSetCitation(StringEscapeUtils.escapeSql(dataSet.getCitation()));
|
||||
row.setDataSetCitation(dataSet.getCitation());
|
||||
|
||||
|
||||
if(dataSet.getId()==null || dataSet.getId().isEmpty())
|
||||
row.setDataSetId("Data Set Id not found");
|
||||
else
|
||||
row.setDataSetId(dataSet.getId());
|
||||
|
||||
|
||||
if(dataSet.getName()==null || dataSet.getName().isEmpty())
|
||||
row.setDataSetName("Data Set Name not found");
|
||||
else
|
||||
// row.setDataSetName(StringEscapeUtils.escapeSql(dataSet.getName()));
|
||||
row.setDataSetName(dataSet.getName());
|
||||
|
||||
|
||||
|
||||
if (input.getDataSet().getDataProvider()!=null) {
|
||||
DataProvider dataProvider = dataSet.getDataProvider();
|
||||
|
||||
|
||||
if(dataProvider.getId() == null || dataProvider.getId().isEmpty())
|
||||
row.setDataProviderId("Data Provider Id not found");
|
||||
else
|
||||
// row.setDataProviderId(StringEscapeUtils.escapeSql(dataProvider.getId()));
|
||||
row.setDataProviderId(dataProvider.getId());
|
||||
|
||||
|
||||
if(dataProvider.getName()==null || dataProvider.getName().isEmpty())
|
||||
row.setDataProviderName("Data Provider not found");
|
||||
else
|
||||
|
@ -129,17 +139,17 @@ public class ResultItemConverter implements Converter<ResultItem, ResultRow> {
|
|||
row.setDataProviderName(dataProvider.getName());
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
if(input.getCommonNames()!=null){
|
||||
for (org.gcube.data.spd.model.CommonName commonName : input.getCommonNames()){
|
||||
|
||||
|
||||
CommonName com = new CommonName(commonName.getName(), commonName.getLanguage(), row.getId());
|
||||
// DaoSession.createOrUpdateCommonName(com, session);
|
||||
row.getCommonNames().add(com);
|
||||
row.setExistsCommonName(true);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
if (input.getProducts()!=null) {
|
||||
for (Product product:input.getProducts()) {
|
||||
|
@ -152,35 +162,39 @@ public class ResultItemConverter implements Converter<ResultItem, ResultRow> {
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
//DEBUG
|
||||
// System.out.println("Insert row id: "+row.getId());
|
||||
|
||||
row.setMatchingTaxon(convertTaxon(input));
|
||||
|
||||
row.setBaseTaxonValue(NormalizeString.lowerCaseUpFirstChar(getBaseTaxonValue(TaxonomySearchServiceImpl.BASETAXONOMY,input)));
|
||||
// row.setMatchingCredits(StringEscapeUtils.escapeSql(input.getCredits()));
|
||||
row.setMatchingCredits(input.getCredits());
|
||||
|
||||
// logger.trace("convert completed: " +row);
|
||||
|
||||
return row;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the base taxon value.
|
||||
*
|
||||
* @param rank the rank
|
||||
* @param taxon the taxon
|
||||
* @return the base taxon value
|
||||
*/
|
||||
private String getBaseTaxonValue(String rank, TaxonomyInterface taxon){
|
||||
|
||||
|
||||
while(taxon!=null){
|
||||
|
||||
if(taxon.getRank()!=null && taxon.getRank().equalsIgnoreCase(rank))
|
||||
return taxon.getRank();
|
||||
|
||||
|
||||
taxon = taxon.getParent();
|
||||
}
|
||||
|
||||
return TaxonomySearchServiceImpl.TAXONOMYUNKNOWN;
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* Convert taxon.
|
||||
*
|
||||
* @param taxon the taxon
|
||||
* @return the list
|
||||
*/
|
||||
protected List<Taxon> convertTaxon(TaxonomyInterface taxon)
|
||||
{
|
||||
List<Taxon> listTaxon = new ArrayList<Taxon>();
|
||||
|
@ -190,9 +204,7 @@ public class ResultItemConverter implements Converter<ResultItem, ResultRow> {
|
|||
Taxon tax = new Taxon(count++, taxon.getScientificName(), taxon.getCitation(), NormalizeString.lowerCaseUpFirstChar(taxon.getRank()));
|
||||
listTaxon.add(tax);
|
||||
taxon = taxon.getParent();
|
||||
|
||||
// System.out.println("Insert tax parent id: "+tax.getId());
|
||||
}
|
||||
return listTaxon;
|
||||
return listTaxon;
|
||||
}
|
||||
}
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -79,7 +79,7 @@ public class Fetcher<T extends FetchingElement> implements Runnable, Closeable {
|
|||
}
|
||||
else{
|
||||
countNullItems++;
|
||||
logger.warn("fetch new row is null!! It is the number: "+countNullItems);
|
||||
logger.warn("fetch new row is null!! Number of null value/s: "+countNullItems);
|
||||
if(MAX_CONSECUTIVE_ATTEMPTS_ON_NULL==countNullItems){
|
||||
logger.warn("Fetched "+MAX_CONSECUTIVE_ATTEMPTS_ON_NULL+ " null rows, MAX ATTEMPTS reached, complete fetch true and closing stream!!");
|
||||
silentClose();
|
||||
|
@ -93,7 +93,7 @@ public class Fetcher<T extends FetchingElement> implements Runnable, Closeable {
|
|||
}
|
||||
|
||||
} catch (Exception e) {
|
||||
logger.error("Error in add row " + e.getMessage());
|
||||
logger.error("Error in add row " + e.getMessage(), e);
|
||||
silentClose();
|
||||
}
|
||||
|
||||
|
|
|
@ -1,41 +1,41 @@
|
|||
package org.gcube.portlets.user.speciesdiscovery.shared;
|
||||
|
||||
import java.io.Serializable;
|
||||
|
||||
import javax.persistence.Entity;
|
||||
import javax.persistence.GeneratedValue;
|
||||
import javax.persistence.GenerationType;
|
||||
import javax.persistence.Id;
|
||||
|
||||
import com.google.gwt.user.client.rpc.IsSerializable;
|
||||
|
||||
|
||||
/**
|
||||
*
|
||||
* @author Francesco Mangiacrapa francesco.mangiacrapa@isti.cnr.it
|
||||
* The Class DataSource.
|
||||
*
|
||||
* @author Francesco Mangiacrapa francesco.mangiacrapa@isti.cnr.it
|
||||
* Jan 10, 2017
|
||||
*/
|
||||
|
||||
@Entity
|
||||
public class DataSource implements Serializable{
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
private static final long serialVersionUID = 3373136869904925484L;
|
||||
public class DataSource implements IsSerializable{
|
||||
|
||||
@Id
|
||||
@GeneratedValue(strategy = GenerationType.IDENTITY)
|
||||
protected int internalId;
|
||||
|
||||
|
||||
private String id;
|
||||
private String name;
|
||||
private String description;
|
||||
|
||||
public DataSource() {}
|
||||
|
||||
|
||||
/**
|
||||
*
|
||||
* @param id
|
||||
* @param name
|
||||
* @param description
|
||||
* Instantiates a new data source.
|
||||
*/
|
||||
public DataSource() {}
|
||||
|
||||
/**
|
||||
* Instantiates a new data source.
|
||||
*
|
||||
* @param id the id
|
||||
* @param name the name
|
||||
* @param description the description
|
||||
*/
|
||||
public DataSource(String id, String name, String description) {
|
||||
setId(id);
|
||||
|
@ -44,38 +44,75 @@ public class DataSource implements Serializable{
|
|||
}
|
||||
|
||||
//Used in Data Source advanced option to create the check list
|
||||
/**
|
||||
* Instantiates a new data source.
|
||||
*
|
||||
* @param id the id
|
||||
* @param name the name
|
||||
*/
|
||||
public DataSource(String id, String name){
|
||||
setId(id);
|
||||
setName(name);
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* Gets the id.
|
||||
*
|
||||
* @return the id
|
||||
*/
|
||||
public String getId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the id.
|
||||
*
|
||||
* @param id the new id
|
||||
*/
|
||||
public void setId(String id) {
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the name.
|
||||
*
|
||||
* @return the name
|
||||
*/
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the name.
|
||||
*
|
||||
* @param name the new name
|
||||
*/
|
||||
public void setName(String name) {
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Gets the description.
|
||||
*
|
||||
* @return the description
|
||||
*/
|
||||
public String getDescription() {
|
||||
return description;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the description.
|
||||
*
|
||||
* @param description the new description
|
||||
*/
|
||||
public void setDescription(String description) {
|
||||
this.description = description;
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see java.lang.Object#toString()
|
||||
*/
|
||||
@Override
|
||||
public String toString() {
|
||||
StringBuilder builder = new StringBuilder();
|
||||
|
|
|
@ -1,23 +1,33 @@
|
|||
package org.gcube.portlets.user.speciesdiscovery.shared;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.ArrayList;
|
||||
|
||||
/**
|
||||
*
|
||||
* @author Francesco Mangiacrapa francesco.mangiacrapa@isti.cnr.it
|
||||
*
|
||||
*/
|
||||
public class DataSourceCapability implements Serializable{
|
||||
import com.google.gwt.user.client.rpc.IsSerializable;
|
||||
|
||||
private static final long serialVersionUID = -9083819206898794333L;
|
||||
|
||||
|
||||
/**
|
||||
* The Class DataSourceCapability.
|
||||
*
|
||||
* @author Francesco Mangiacrapa francesco.mangiacrapa@isti.cnr.it
|
||||
* Jan 10, 2017
|
||||
*/
|
||||
public class DataSourceCapability implements IsSerializable{
|
||||
|
||||
private ArrayList<SpeciesCapability> listFilters;
|
||||
|
||||
private SpeciesCapability capability;
|
||||
|
||||
|
||||
/**
|
||||
* Instantiates a new data source capability.
|
||||
*/
|
||||
public DataSourceCapability() {}
|
||||
|
||||
|
||||
/**
|
||||
* Instantiates a new data source capability.
|
||||
*
|
||||
* @param capability the capability
|
||||
* @param listFilters the list filters
|
||||
*/
|
||||
public DataSourceCapability(SpeciesCapability capability, ArrayList<SpeciesCapability> listFilters) {
|
||||
super();
|
||||
this.capability = capability;
|
||||
|
@ -25,20 +35,40 @@ public class DataSourceCapability implements Serializable{
|
|||
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the list filters.
|
||||
*
|
||||
* @return the list filters
|
||||
*/
|
||||
public ArrayList<SpeciesCapability> getListFilters() {
|
||||
return listFilters;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the list filters.
|
||||
*
|
||||
* @param listFilters the new list filters
|
||||
*/
|
||||
public void setListFilters(ArrayList<SpeciesCapability> listFilters) {
|
||||
this.listFilters = listFilters;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the capability.
|
||||
*
|
||||
* @return the capability
|
||||
*/
|
||||
public SpeciesCapability getCapability() {
|
||||
return capability;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the capability.
|
||||
*
|
||||
* @param capability the new capability
|
||||
*/
|
||||
public void setCapability(SpeciesCapability capability) {
|
||||
this.capability = capability;
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
|
|
@ -1,57 +1,85 @@
|
|||
package org.gcube.portlets.user.speciesdiscovery.shared;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.ArrayList;
|
||||
|
||||
|
||||
/**
|
||||
*
|
||||
* @author Francesco Mangiacrapa francesco.mangiacrapa@isti.cnr.it
|
||||
* The Class DataSourceModel.
|
||||
*
|
||||
* @author Francesco Mangiacrapa francesco.mangiacrapa@isti.cnr.it
|
||||
* Jan 10, 2017
|
||||
*/
|
||||
public class DataSourceModel extends DataSource {
|
||||
|
||||
public class DataSourceModel extends DataSource implements Serializable{
|
||||
|
||||
|
||||
private static final long serialVersionUID = 7399231525793036218L;
|
||||
|
||||
private ArrayList<DataSourceCapability> listCapabilities;
|
||||
private DataSourceRepositoryInfo dataSourceRepositoryInfo;
|
||||
|
||||
public DataSourceModel() {}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param id
|
||||
* @param name
|
||||
* @param description
|
||||
* @param listCapabilities
|
||||
* @param dsInfo
|
||||
* Instantiates a new data source model.
|
||||
*/
|
||||
public DataSourceModel() {}
|
||||
|
||||
/**
|
||||
* Instantiates a new data source model.
|
||||
*
|
||||
* @param id the id
|
||||
* @param name the name
|
||||
* @param description the description
|
||||
* @param listCapabilities the list capabilities
|
||||
* @param dsInfo the ds info
|
||||
*/
|
||||
public DataSourceModel(String id, String name, String description, ArrayList<DataSourceCapability> listCapabilities, DataSourceRepositoryInfo dsInfo) {
|
||||
super(id,name,description);
|
||||
setListCapabilities(listCapabilities);
|
||||
setDataSourceRepositoryInfo(dsInfo);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Sets the data source repository info.
|
||||
*
|
||||
* @param dsInfo the new data source repository info
|
||||
*/
|
||||
private void setDataSourceRepositoryInfo(DataSourceRepositoryInfo dsInfo) {
|
||||
this.dataSourceRepositoryInfo = dsInfo;
|
||||
|
||||
|
||||
}
|
||||
|
||||
//Used in Data Source advanced option to create the check list
|
||||
/**
|
||||
* Instantiates a new data source model.
|
||||
*
|
||||
* @param id the id
|
||||
* @param name the name
|
||||
*/
|
||||
public DataSourceModel(String id, String name){
|
||||
super(id,name);
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* Gets the list capabilities.
|
||||
*
|
||||
* @return the list capabilities
|
||||
*/
|
||||
public ArrayList<DataSourceCapability> getListCapabilities() {
|
||||
return listCapabilities;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the list capabilities.
|
||||
*
|
||||
* @param listCapabilities the new list capabilities
|
||||
*/
|
||||
public void setListCapabilities(ArrayList<DataSourceCapability> listCapabilities) {
|
||||
this.listCapabilities = listCapabilities;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the data source repository info.
|
||||
*
|
||||
* @return the data source repository info
|
||||
*/
|
||||
public DataSourceRepositoryInfo getDataSourceRepositoryInfo() {
|
||||
return dataSourceRepositoryInfo;
|
||||
}
|
||||
|
|
|
@ -1,30 +1,37 @@
|
|||
package org.gcube.portlets.user.speciesdiscovery.shared;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.Map;
|
||||
|
||||
import com.google.gwt.user.client.rpc.IsSerializable;
|
||||
|
||||
|
||||
/**
|
||||
*
|
||||
* @author Francesco Mangiacrapa francesco.mangiacrapa@isti.cnr.it
|
||||
* The Class DataSourceRepositoryInfo.
|
||||
*
|
||||
* @author Francesco Mangiacrapa francesco.mangiacrapa@isti.cnr.it
|
||||
* Jan 10, 2017
|
||||
*/
|
||||
public class DataSourceRepositoryInfo implements IsSerializable{
|
||||
|
||||
public class DataSourceRepositoryInfo implements Serializable{
|
||||
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
private static final long serialVersionUID = -4557351371954637191L;
|
||||
|
||||
private String logoUrl;
|
||||
private String pageUrl;
|
||||
private Map<String, String> properties;
|
||||
private String name;
|
||||
private String description;
|
||||
|
||||
|
||||
/**
|
||||
* Instantiates a new data source repository info.
|
||||
*/
|
||||
public DataSourceRepositoryInfo() {}
|
||||
|
||||
|
||||
/**
|
||||
* Instantiates a new data source repository info.
|
||||
*
|
||||
* @param logoUrl the logo url
|
||||
* @param pageUrl the page url
|
||||
* @param properties the properties
|
||||
* @param description the description
|
||||
*/
|
||||
public DataSourceRepositoryInfo(String logoUrl, String pageUrl, Map<String,String> properties, String description) {
|
||||
this.logoUrl = logoUrl;
|
||||
this.pageUrl = pageUrl;
|
||||
|
@ -32,33 +39,90 @@ public class DataSourceRepositoryInfo implements Serializable{
|
|||
this.description = description;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the logo url.
|
||||
*
|
||||
* @return the logo url
|
||||
*/
|
||||
public String getLogoUrl() {
|
||||
return logoUrl;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the logo url.
|
||||
*
|
||||
* @param logoUrl the new logo url
|
||||
*/
|
||||
public void setLogoUrl(String logoUrl) {
|
||||
this.logoUrl = logoUrl;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the page url.
|
||||
*
|
||||
* @return the page url
|
||||
*/
|
||||
public String getPageUrl() {
|
||||
return pageUrl;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the page url.
|
||||
*
|
||||
* @param pageUrl the new page url
|
||||
*/
|
||||
public void setPageUrl(String pageUrl) {
|
||||
this.pageUrl = pageUrl;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the properties.
|
||||
*
|
||||
* @return the properties
|
||||
*/
|
||||
public Map<String, String> getProperties() {
|
||||
return properties;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the properties.
|
||||
*
|
||||
* @param properties the properties
|
||||
*/
|
||||
public void setProperties(Map<String, String> properties) {
|
||||
this.properties = properties;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the name.
|
||||
*
|
||||
* @return the name
|
||||
*/
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the description.
|
||||
*
|
||||
* @return the description
|
||||
*/
|
||||
public String getDescription() {
|
||||
return description;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the description.
|
||||
*
|
||||
* @param description the new description
|
||||
*/
|
||||
public void setDescription(String description) {
|
||||
this.description = description;
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see java.lang.Object#toString()
|
||||
*/
|
||||
@Override
|
||||
public String toString() {
|
||||
StringBuilder builder = new StringBuilder();
|
||||
|
@ -73,5 +137,5 @@ public class DataSourceRepositoryInfo implements Serializable{
|
|||
builder.append("]");
|
||||
return builder.toString();
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
/**
|
||||
*
|
||||
*
|
||||
*/
|
||||
package org.gcube.portlets.user.speciesdiscovery.shared;
|
||||
|
||||
|
@ -17,7 +17,7 @@ import javax.persistence.Id;
|
|||
import javax.persistence.OneToMany;
|
||||
|
||||
/**
|
||||
*
|
||||
*
|
||||
* @author Francesco Mangiacrapa francesco.mangiacrapa@isti.cnr.it
|
||||
*
|
||||
*/
|
||||
|
@ -48,7 +48,7 @@ public class ResultRow implements FetchingElement, SelectableElement, Serializab
|
|||
public static final String BASE_TAXON_VALUE = "baseTaxonValue";
|
||||
public static final String DATAPROVIDER_ID = "dataProviderId";
|
||||
public static final String DATASOURCE_ID = "dataSourceId";
|
||||
|
||||
|
||||
public static final String KINGDOM_ID = "kingdomID";
|
||||
public static final String PHYLUM_ID = "phylumID";
|
||||
public static final String CLASS_ID = "classID";
|
||||
|
@ -56,7 +56,7 @@ public class ResultRow implements FetchingElement, SelectableElement, Serializab
|
|||
public static final String FAMILY_ID = "familyID";
|
||||
public static final String GENUS_ID = "genusID";
|
||||
public static final String SPECIES_ID = "speciesID";
|
||||
|
||||
|
||||
public final static String SCIENTIFICNAMEAUTHORSHIP = "scientificNameAuthorship"; //USED
|
||||
public final static String CREDITS = "credits"; //USED
|
||||
public final static String LSID = "lsid"; //USED
|
||||
|
@ -67,7 +67,7 @@ public class ResultRow implements FetchingElement, SelectableElement, Serializab
|
|||
@Id
|
||||
@GeneratedValue(strategy = GenerationType.IDENTITY)
|
||||
protected int id;
|
||||
|
||||
|
||||
protected boolean selected = false;
|
||||
protected String dataSourceId;
|
||||
protected String dataSourceName;
|
||||
|
@ -82,10 +82,10 @@ public class ResultRow implements FetchingElement, SelectableElement, Serializab
|
|||
|
||||
@OneToMany(cascade=CascadeType.PERSIST, fetch=FetchType.EAGER)
|
||||
protected List<CommonName> commonNames = new ArrayList<CommonName>();
|
||||
|
||||
|
||||
@OneToMany(cascade=CascadeType.PERSIST, fetch=FetchType.EAGER)
|
||||
protected List<Taxon> matchingTaxon = new ArrayList<Taxon>();
|
||||
|
||||
|
||||
@OneToMany(cascade=CascadeType.PERSIST, fetch=FetchType.EAGER)
|
||||
protected List<ItemParameter> properties = new ArrayList<ItemParameter>();
|
||||
|
||||
|
@ -108,17 +108,13 @@ public class ResultRow implements FetchingElement, SelectableElement, Serializab
|
|||
protected String scientificNameAuthorship;
|
||||
protected String credits;
|
||||
protected String lsid;
|
||||
// protected String propertiesAsXml;
|
||||
|
||||
// protected ItemParameterList properties;
|
||||
|
||||
|
||||
protected boolean existsProperties = false;
|
||||
|
||||
|
||||
protected ResultRow() {
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* @param id
|
||||
*/
|
||||
|
@ -140,7 +136,7 @@ public class ResultRow implements FetchingElement, SelectableElement, Serializab
|
|||
return id+"";
|
||||
}
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* @return the selected
|
||||
*/
|
||||
|
@ -162,7 +158,7 @@ public class ResultRow implements FetchingElement, SelectableElement, Serializab
|
|||
public String getDataSourceId() {
|
||||
return dataSourceId;
|
||||
}
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* @param dataSourceId
|
||||
|
@ -407,7 +403,7 @@ public class ResultRow implements FetchingElement, SelectableElement, Serializab
|
|||
}
|
||||
|
||||
/**
|
||||
*
|
||||
*
|
||||
* @return true if there are common names
|
||||
*/
|
||||
public boolean existsCommonName() {
|
||||
|
@ -415,7 +411,7 @@ public class ResultRow implements FetchingElement, SelectableElement, Serializab
|
|||
}
|
||||
|
||||
/**
|
||||
*
|
||||
*
|
||||
* @param the
|
||||
* boolean to set
|
||||
*/
|
||||
|
@ -486,7 +482,7 @@ public class ResultRow implements FetchingElement, SelectableElement, Serializab
|
|||
public void setSpeciesID(String speciesID) {
|
||||
this.speciesID = speciesID;
|
||||
}
|
||||
|
||||
|
||||
public String getServiceId() {
|
||||
return serviceIdField;
|
||||
}
|
||||
|
@ -513,7 +509,7 @@ public class ResultRow implements FetchingElement, SelectableElement, Serializab
|
|||
}
|
||||
|
||||
public void setExistsProperties(boolean b) {
|
||||
this.existsProperties = b;
|
||||
this.existsProperties = b;
|
||||
}
|
||||
|
||||
public boolean existsProperties() {
|
||||
|
@ -522,7 +518,7 @@ public class ResultRow implements FetchingElement, SelectableElement, Serializab
|
|||
|
||||
|
||||
public List<Taxon> getParents() {
|
||||
|
||||
|
||||
Collections.sort(matchingTaxon);
|
||||
// for (Taxon t : matchingTaxon) {
|
||||
// System.out.println("+++ Parent :" +t.getId() + ", name: "+t.getName() +", rank: "+t.getRank());
|
||||
|
|
|
@ -1,30 +1,31 @@
|
|||
package org.gcube.portlets.user.speciesdiscovery.shared;
|
||||
|
||||
import com.google.gwt.user.client.rpc.IsSerializable;
|
||||
|
||||
|
||||
public enum SpeciesCapability implements IsSerializable{
|
||||
|
||||
public enum SpeciesCapability {
|
||||
|
||||
//Filters
|
||||
FROMDATE("FROMDATE", "Date From"),
|
||||
TODATE("DATETO", "Date To"),
|
||||
LOWERBOUND("LOWERBOUND", "Lower Bound"),
|
||||
FROMDATE("FROMDATE", "Date From"),
|
||||
TODATE("DATETO", "Date To"),
|
||||
LOWERBOUND("LOWERBOUND", "Lower Bound"),
|
||||
UPPERBOUND("UPPERBOUND", "Upper Bound"),
|
||||
|
||||
|
||||
//Capabilities
|
||||
RESULTITEM("RESULTITEM", "Occurrence"),
|
||||
TAXONOMYITEM("TAXONOMYITEM", "Taxon"),
|
||||
OCCURRENCESPOINTS("OCCURRENCESPOINTS", "OccurrencesPoints"),
|
||||
|
||||
|
||||
|
||||
SYNONYMS("SYNONYMS", "Synonyms"),
|
||||
UNFOLD("UNFOLD", "Unfold"),
|
||||
NAMESMAPPING("NAMESMAPPING", "Names Mapping"),
|
||||
|
||||
|
||||
UNKNOWN("UNKNOWN", "unknown");
|
||||
|
||||
|
||||
private String id;
|
||||
private String name;
|
||||
|
||||
private SpeciesCapability(){
|
||||
|
||||
private SpeciesCapability(){
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -7,7 +7,8 @@
|
|||
|
||||
<!-- Other module inherits -->
|
||||
<inherits name="com.extjs.gxt.ui.GXT" />
|
||||
<inherits name="org.gcube.portlets.user.gcubegisviewer.GCubeGisViewer" />
|
||||
<!-- Used to show a layer via GisViewer -->
|
||||
<!-- <inherits name="org.gcube.portlets.user.gcubegisviewer.GCubeGisViewer" /> -->
|
||||
|
||||
<inherits name="com.allen_sauer.gwt.log.gwt-log-TRACE" />
|
||||
<set-property name="log_DivLogger" value="DISABLED" />
|
||||
|
|
|
@ -36,26 +36,26 @@
|
|||
</servlet-mapping>
|
||||
|
||||
<!-- GisViewer Servlets -->
|
||||
<servlet>
|
||||
<servlet-name>GisViewerService</servlet-name>
|
||||
<servlet-class>org.gcube.portlets.user.gcubegisviewer.server.GCubeGisViewerServletImpl</servlet-class>
|
||||
</servlet>
|
||||
<!-- <servlet> -->
|
||||
<!-- <servlet-name>GisViewerService</servlet-name> -->
|
||||
<!-- <servlet-class>org.gcube.portlets.user.gcubegisviewer.server.GCubeGisViewerServletImpl</servlet-class> -->
|
||||
<!-- </servlet> -->
|
||||
|
||||
<servlet-mapping>
|
||||
<servlet-name>GisViewerService</servlet-name>
|
||||
<url-pattern>/speciesdiscovery/GisViewerService</url-pattern>
|
||||
</servlet-mapping>
|
||||
<!-- <servlet-mapping> -->
|
||||
<!-- <servlet-name>GisViewerService</servlet-name> -->
|
||||
<!-- <url-pattern>/speciesdiscovery/GisViewerService</url-pattern> -->
|
||||
<!-- </servlet-mapping> -->
|
||||
|
||||
<!-- GISVIEWER MAP GENERATOR -->
|
||||
<servlet>
|
||||
<servlet-name>MapGenerator</servlet-name>
|
||||
<servlet-class>org.gcube.portlets.user.gisviewer.server.MapGenerator</servlet-class>
|
||||
</servlet>
|
||||
<!-- <servlet> -->
|
||||
<!-- <servlet-name>MapGenerator</servlet-name> -->
|
||||
<!-- <servlet-class>org.gcube.portlets.user.gisviewer.server.MapGenerator</servlet-class> -->
|
||||
<!-- </servlet> -->
|
||||
|
||||
<servlet-mapping>
|
||||
<servlet-name>MapGenerator</servlet-name>
|
||||
<url-pattern>/speciesdiscovery/MapGenerator</url-pattern>
|
||||
</servlet-mapping>
|
||||
<!-- <servlet-mapping> -->
|
||||
<!-- <servlet-name>MapGenerator</servlet-name> -->
|
||||
<!-- <url-pattern>/speciesdiscovery/MapGenerator</url-pattern> -->
|
||||
<!-- </servlet-mapping> -->
|
||||
|
||||
<!-- Workspace Light Tree servlet -->
|
||||
<!-- <servlet> -->
|
||||
|
|
|
@ -1,30 +1,35 @@
|
|||
/**
|
||||
*
|
||||
*
|
||||
*/
|
||||
package org.gcube.portlets.user.speciesdiscovery.client;
|
||||
|
||||
import java.sql.Connection;
|
||||
import java.sql.DriverManager;
|
||||
import java.sql.ResultSet;
|
||||
import java.sql.ResultSetMetaData;
|
||||
import java.sql.SQLException;
|
||||
import java.sql.Statement;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import javax.naming.Context;
|
||||
import javax.naming.InitialContext;
|
||||
import javax.naming.NamingException;
|
||||
import javax.persistence.EntityManager;
|
||||
import javax.persistence.EntityManagerFactory;
|
||||
import javax.persistence.Persistence;
|
||||
import javax.persistence.Query;
|
||||
import javax.persistence.TypedQuery;
|
||||
import javax.persistence.metamodel.EntityType;
|
||||
|
||||
import org.apache.log4j.Logger;
|
||||
import org.gcube.portlets.user.speciesdiscovery.shared.ResultRow;
|
||||
import org.h2.jdbcx.JdbcDataSource;
|
||||
|
||||
/**
|
||||
* @author Francesco Mangiacrapa francesco.mangiacrapa@isti.cnr.it
|
||||
* @Nov 11, 2013
|
||||
*
|
||||
*
|
||||
*/
|
||||
public class DBTester {
|
||||
|
||||
|
@ -33,74 +38,172 @@ public class DBTester {
|
|||
public static Logger logger = Logger.getLogger(DBTester.class);
|
||||
|
||||
|
||||
|
||||
|
||||
public static void main(String[] a) throws Exception {
|
||||
/*
|
||||
Class.forName("org.h2.Driver");
|
||||
Connection conn = DriverManager.getConnection("jdbc:h2:/home/francesco-mangiacrapa/Portal-Bundle2.2/tomcat-6.0.29/persistence/h2dbspecies/h2testusergcubedevsec;create=true", "","");
|
||||
|
||||
//Class.forName("org.h2.Driver");
|
||||
//Connection conn = DriverManager.getConnection("jdbc:h2:/home/francesco-mangiacrapa/Portal-Bundle-3.0.0-3.2.0/tomcat-6.0.29/persistence/h2dbspecies/h2testusergcubedevsec;create=true", "","");
|
||||
// add application code here
|
||||
|
||||
Statement stat = conn.createStatement();
|
||||
ResultSet rs = stat.executeQuery("select * from TaxonomyRow");
|
||||
|
||||
|
||||
/* Statement stat = conn.createStatement();
|
||||
ResultSet rs = stat.executeQuery("select * from ResultRow");
|
||||
|
||||
ResultSetMetaData meta = rs.getMetaData();
|
||||
int columnCount = meta.getColumnCount();
|
||||
|
||||
|
||||
|
||||
while (rs.next())
|
||||
{
|
||||
System.out.println("New row");
|
||||
for (int i = 1; i <= columnCount; i++) {
|
||||
|
||||
|
||||
// System.out.println("ColumName: "+ meta.getColumnName(i));
|
||||
System.out.println("ColumLabel: "+meta.getColumnLabel(i));
|
||||
System.out.println(rs.getString(meta.getColumnLabel(i)));
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
System.out.println("\n\n");
|
||||
}
|
||||
conn.close();
|
||||
*/
|
||||
|
||||
testJdbcDataSource();
|
||||
}*/
|
||||
|
||||
//testJdbcDataSource();
|
||||
|
||||
//String queryString = "SELECT MIN(tax.id) from Taxon tax";
|
||||
|
||||
|
||||
//
|
||||
// testTypedQuery(queryString, Taxon.class);
|
||||
|
||||
getAllEntities();
|
||||
|
||||
|
||||
String queryString = "SELECT *" +
|
||||
" FROM "+ResultRow.class.getSimpleName()+" r" +
|
||||
" LEFT OUTER JOIN RESULTROW_TAXON rt";
|
||||
// " INNER JOIN "+Taxon.class.getSimpleName()+" t";
|
||||
|
||||
queryString = "select *" +
|
||||
" from RESULTROW r JOIN RESULTROW_TAXON rt on r.ID=rt.RESULTROW_ID JOIN TAXON t on t.INTERNALID=rt.MATCHINGTAXON_INTERNALID" +
|
||||
" where t.RANK = 'Genus' and t.ID IN" +
|
||||
" (select MIN(tax.ID) from TAXON tax)";
|
||||
//////
|
||||
//
|
||||
// testTypedQuery(queryString, ResultRow.class);
|
||||
|
||||
//testQuery(queryString);
|
||||
|
||||
|
||||
testNativeQuery(queryString, ResultRow.class);
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* @param queryString
|
||||
* @param class1
|
||||
*/
|
||||
private static void testNativeQuery(String queryString, Class<?> className) {
|
||||
|
||||
EntityManagerFactory emF = createEntityManagerFactory("/home/francesco-mangiacrapa/Portal-Bundle-3.0.0-3.2.0/tomcat-6.0.29/persistence/h2dbspecies/h2testusergcubedevsec;");
|
||||
EntityManager em = emF.createEntityManager();
|
||||
Query query = em.createNativeQuery(queryString, className);
|
||||
|
||||
List<Object> listResult = new ArrayList<Object>();
|
||||
try {
|
||||
listResult = query.getResultList();
|
||||
for (Object object : listResult) {
|
||||
System.out.println(object.toString());
|
||||
}
|
||||
} catch (Exception e) {
|
||||
logger.error("Error in TypedQuery: " + e.getMessage(), e);
|
||||
} finally {
|
||||
em.close();
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
public static void getAllEntities(){
|
||||
EntityManagerFactory emF = createEntityManagerFactory("/home/francesco-mangiacrapa/Portal-Bundle-3.0.0-3.2.0/tomcat-6.0.29/persistence/h2dbspecies/h2testusergcubedevsec;");
|
||||
EntityManager em = emF.createEntityManager();
|
||||
for (EntityType<?> entity : em.getMetamodel().getEntities()) {
|
||||
final String className = entity.getName();
|
||||
System.out.println("Trying select * from: " + className);
|
||||
Query q = em.createQuery("SELECT c from " + className + " c");
|
||||
q.getResultList().iterator();
|
||||
System.out.println("ok: " + className);
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
public static void testTypedQuery(String queryString, Class classToReturn){
|
||||
EntityManagerFactory emF = createEntityManagerFactory("/home/francesco-mangiacrapa/Portal-Bundle-3.0.0-3.2.0/tomcat-6.0.29/persistence/h2dbspecies/h2testusergcubedevsec;");
|
||||
EntityManager em = emF.createEntityManager();
|
||||
TypedQuery<Class> tQuery = em.createQuery(queryString, classToReturn);
|
||||
|
||||
List<Class> listResult = new ArrayList<Class>();
|
||||
try {
|
||||
listResult = tQuery.getResultList();
|
||||
System.out.println(listResult.toString());
|
||||
} catch (Exception e) {
|
||||
logger.error("Error in TypedQuery: " + e.getMessage(), e);
|
||||
} finally {
|
||||
em.close();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public static void testQuery(String queryString){
|
||||
EntityManagerFactory emF = createEntityManagerFactory("/home/francesco-mangiacrapa/Portal-Bundle-3.0.0-3.2.0/tomcat-6.0.29/persistence/h2dbspecies/h2testusergcubedevsec;");
|
||||
EntityManager em = emF.createEntityManager();
|
||||
Query query = em.createQuery(queryString);
|
||||
|
||||
List<Object> listResult = new ArrayList<Object>();
|
||||
try {
|
||||
listResult = query.getResultList();
|
||||
System.out.println(listResult.toString());
|
||||
} catch (Exception e) {
|
||||
logger.error("Error in TypedQuery: " + e.getMessage(), e);
|
||||
} finally {
|
||||
em.close();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
public static void testJdbcDataSource() throws NamingException {
|
||||
|
||||
JdbcDataSource ds = new JdbcDataSource();
|
||||
ds.setURL("jdbc:h2:/home/francesco-mangiacrapa/Portal-Bundle2.2/tomcat-6.0.29/persistence/h2dbspecies/h2testusergcubedevsec;create=true");
|
||||
Context ctx = new InitialContext();
|
||||
ctx.bind("java:global/jpa-eclipselink/TaxonomyRow", ds);
|
||||
|
||||
|
||||
// final Context context = EJBContainer.createEJBContainer(p).getContext();
|
||||
//
|
||||
//
|
||||
// Movies movies = (Movies) context.lookup("java:global/jpa-eclipselink/Movies");
|
||||
try {
|
||||
Connection conn = ds.getConnection();
|
||||
ResultSet rs = conn.createStatement().executeQuery("select * from TaxonomyRow");
|
||||
ResultSetMetaData meta = rs.getMetaData();
|
||||
int columnCount = meta.getColumnCount();
|
||||
|
||||
|
||||
|
||||
|
||||
while (rs.next())
|
||||
{
|
||||
System.out.println("New row");
|
||||
for (int i = 1; i <= columnCount; i++) {
|
||||
|
||||
|
||||
// System.out.println("ColumName: "+ meta.getColumnName(i));
|
||||
System.out.println("ColumLabel: "+meta.getColumnLabel(i));
|
||||
System.out.println(rs.getString(meta.getColumnLabel(i)));
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
System.out.println("\n\n");
|
||||
}
|
||||
conn.close();
|
||||
|
||||
|
||||
} catch (SQLException e) {
|
||||
// TODO Auto-generated catch block
|
||||
e.printStackTrace();
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
/**
|
||||
*
|
||||
*
|
||||
*/
|
||||
package org.gcube.portlets.user.speciesdiscovery.client;
|
||||
|
||||
|
@ -9,44 +9,48 @@ import java.util.List;
|
|||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import org.gcube.common.scope.api.ScopeProvider;
|
||||
import org.gcube.data.spd.client.proxies.Classification;
|
||||
import org.gcube.data.spd.client.proxies.Executor;
|
||||
import org.gcube.data.spd.client.proxies.Manager;
|
||||
import org.gcube.data.spd.client.proxies.Occurrence;
|
||||
import org.gcube.data.spd.client.proxies.ClassificationClient;
|
||||
import org.gcube.data.spd.client.proxies.ExecutorClient;
|
||||
import org.gcube.data.spd.client.proxies.ManagerClient;
|
||||
import org.gcube.data.spd.client.proxies.OccurrenceClient;
|
||||
import org.gcube.data.spd.model.PluginDescription;
|
||||
|
||||
|
||||
/**
|
||||
* @author "Federico De Faveri defaveri@isti.cnr.it"
|
||||
* The Class ListPlugins.
|
||||
*
|
||||
* @author Francesco Mangiacrapa francesco.mangiacrapa@isti.cnr.it
|
||||
* Jan 10, 2017
|
||||
*/
|
||||
public class ListPlugins {
|
||||
|
||||
|
||||
protected static Manager call;
|
||||
protected static Occurrence occurrencesCall;
|
||||
protected static Classification classificationCall;
|
||||
protected static Executor executorCall;
|
||||
|
||||
|
||||
protected static ManagerClient call;
|
||||
protected static OccurrenceClient occurrencesCall;
|
||||
protected static ClassificationClient classificationCall;
|
||||
protected static ExecutorClient executorCall;
|
||||
|
||||
/**
|
||||
* @param args
|
||||
* The main method.
|
||||
*
|
||||
* @param args the arguments
|
||||
*/
|
||||
public static void main(String[] args) {
|
||||
|
||||
|
||||
String scope = "/gcube/devsec";
|
||||
ScopeProvider.instance.set(scope);
|
||||
|
||||
|
||||
// this.call = manager().at( URI.create("http://node24.d.d4science.research-infrastructures.eu:9000")).withTimeout(3, TimeUnit.MINUTES).build();
|
||||
// this.occurrencesCall = occurrences().at( URI.create("http://node24.d.d4science.research-infrastructures.eu:9000")).withTimeout(3, TimeUnit.MINUTES).build();
|
||||
// this.classificationCall = classification().at( URI.create("http://node24.d.d4science.research-infrastructures.eu:9000")).withTimeout(3, TimeUnit.MINUTES).build();
|
||||
|
||||
|
||||
|
||||
call = manager().withTimeout(3, TimeUnit.MINUTES).build();
|
||||
// executorCall = executor().withTimeout(3, TimeUnit.MINUTES).build();
|
||||
// occurrencesCall = occurrence().withTimeout(3, TimeUnit.MINUTES).build();
|
||||
// classificationCall = classification().withTimeout(3, TimeUnit.MINUTES).build();
|
||||
|
||||
|
||||
// call = manager().at(URI.create("http://node24.d.d4science.research-infrastructures.eu:9000")).withTimeout(3, TimeUnit.MINUTES).build();
|
||||
|
||||
|
||||
//Manager call = manager().withTimeout(3, TimeUnit.MINUTES).build();
|
||||
List<PluginDescription> plugins = call.getPluginsDescription();
|
||||
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
/**
|
||||
*
|
||||
*
|
||||
*/
|
||||
package org.gcube.portlets.user.speciesdiscovery.client;
|
||||
|
||||
|
@ -10,12 +10,12 @@ import java.util.concurrent.TimeUnit;
|
|||
import org.gcube.application.framework.core.session.ASLSession;
|
||||
import org.gcube.application.framework.core.session.SessionManager;
|
||||
import org.gcube.common.scope.api.ScopeProvider;
|
||||
import org.gcube.data.spd.client.proxies.Manager;
|
||||
import org.gcube.data.spd.client.proxies.ManagerClient;
|
||||
import org.gcube.data.spd.model.exceptions.InvalidQueryException;
|
||||
import org.gcube.data.spd.model.products.ResultElement;
|
||||
import org.gcube.data.spd.model.products.ResultItem;
|
||||
import org.gcube.data.spd.stubs.exceptions.UnsupportedCapabilityException;
|
||||
import org.gcube.data.spd.stubs.exceptions.UnsupportedPluginException;
|
||||
import org.gcube.data.spd.model.service.exceptions.UnsupportedCapabilityException;
|
||||
import org.gcube.data.spd.model.service.exceptions.UnsupportedPluginException;
|
||||
import org.gcube.data.streams.Stream;
|
||||
import org.gcube.portlets.user.speciesdiscovery.server.service.ResultItemConverter;
|
||||
import org.gcube.portlets.user.speciesdiscovery.server.service.StreamIterator;
|
||||
|
@ -24,76 +24,81 @@ import org.gcube.portlets.user.speciesdiscovery.server.stream.CloseableIterator;
|
|||
import org.gcube.portlets.user.speciesdiscovery.server.stream.ConversionIterator;
|
||||
import org.gcube.portlets.user.speciesdiscovery.shared.ResultRow;
|
||||
|
||||
|
||||
/**
|
||||
* @author "Federico De Faveri defaveri@isti.cnr.it"
|
||||
* The Class ServiceQuery.
|
||||
*
|
||||
* @author Francesco Mangiacrapa francesco.mangiacrapa@isti.cnr.it
|
||||
* Jan 10, 2017
|
||||
*/
|
||||
public class ServiceQuery {
|
||||
|
||||
/**
|
||||
* @param args
|
||||
* @throws UnsupportedPluginException
|
||||
* @throws InvalidQueryException
|
||||
* @throws UnsupportedCapabilityException
|
||||
* @throws UnsupportedPluginException
|
||||
* @throws InvalidQueryException
|
||||
* @throws UnsupportedCapabilityException
|
||||
*/
|
||||
|
||||
|
||||
private static String username = "test.user";
|
||||
|
||||
|
||||
/**
|
||||
* The main method.
|
||||
*
|
||||
* @param args the arguments
|
||||
* @throws InvalidQueryException the invalid query exception
|
||||
* @throws UnsupportedPluginException the unsupported plugin exception
|
||||
* @throws UnsupportedCapabilityException the unsupported capability exception
|
||||
*/
|
||||
public static void main(String[] args) throws InvalidQueryException, UnsupportedPluginException, UnsupportedCapabilityException {
|
||||
String scope = "/gcube/devsec";
|
||||
// String scope = "/d4science.research-infrastructures.eu/gCubeApps/BiodiversityResearchEnvironment"; //Production
|
||||
ScopeProvider.instance.set(scope);
|
||||
|
||||
ASLSession session = SessionManager.getInstance().getASLSession("123", username);
|
||||
|
||||
Manager call = manager().withTimeout(3, TimeUnit.MINUTES).build();
|
||||
|
||||
ManagerClient call = manager().withTimeout(3, TimeUnit.MINUTES).build();
|
||||
// Manager call = manager().at(URI.create("http://node24.d.d4science.research-infrastructures.eu:9000")).withTimeout(3, TimeUnit.MINUTES).build();
|
||||
|
||||
//Stream<ResultElement> results = call.search("SEARCH BY CN 'shark' RESOLVE WITH OBIS EXPAND WITH ITIS RETURN Product");
|
||||
// Stream<ResultElement> results = call.search("SEARCH BY CN 'shark' RESOLVE WITH OBIS EXPAND WITH ITIS WHERE coordinate <= 15.12, 16.12 RETURN Product");
|
||||
|
||||
|
||||
// Stream<ResultElement> results = call.search("SEARCH BY SN 'sarda sarda' RESOLVE WITH OBIS EXPAND WITH ITIS WHERE coordinate <= 15.12, 16.12 RETURN Product");
|
||||
|
||||
System.out.println("start query...");
|
||||
|
||||
Stream<ResultElement> results = call.search("SEARCH BY SN 'Latimeria chalumnae' IN GBIF RETURN Product HAVING xpath(\"//product[type='Occurrence' and count>0]\")");
|
||||
|
||||
// Stream<ResultElement> results = call.search("SEARCH BY SN 'Palinurus elephas' IN WoRMS RETURN Taxon");
|
||||
|
||||
StreamIterator<ResultElement> input = new StreamIterator<ResultElement>(results);
|
||||
|
||||
|
||||
|
||||
|
||||
System.out.println("Results from service...");
|
||||
int i=0;
|
||||
while(results.hasNext()) {
|
||||
ResultElement elem = results.next();
|
||||
System.out.println(++i +") el: "+elem.getId() +" type: "+elem.getType().name());
|
||||
}
|
||||
|
||||
|
||||
|
||||
System.out.println("Results from conversion...");
|
||||
ConversionIterator<ResultElement, ResultItem> caster = buildCaster(input);
|
||||
|
||||
|
||||
//from ResultItem to ResultRow
|
||||
ResultItemConverter converter = new ResultItemConverter(session);
|
||||
ConversionIterator<ResultItem, ResultRow> inputConverter = new ConversionIterator<ResultItem, ResultRow>(caster, converter);
|
||||
|
||||
while (inputConverter.hasNext()) {
|
||||
ResultRow row = inputConverter.next();
|
||||
|
||||
System.out.println(++i +") row: "+row);
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
results.close();
|
||||
System.out.println("DONE");
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Builds the caster.
|
||||
*
|
||||
* @param <I> the generic type
|
||||
* @param <O> the generic type
|
||||
* @param input the input
|
||||
* @return the conversion iterator
|
||||
*/
|
||||
protected static <I,O> ConversionIterator<I, O> buildCaster(CloseableIterator<I> input)
|
||||
{
|
||||
{
|
||||
CastConverter<I, O> elementConverter = new CastConverter<I, O>();
|
||||
ConversionIterator<I, O> caster = new ConversionIterator<I, O>(input, elementConverter);
|
||||
return caster;
|
||||
|
|
Reference in New Issue