fixed: pom, counter on grid, check/unchek all rows, taxonomy job

git-svn-id: http://svn.research-infrastructures.eu/public/d4science/gcube/trunk/portlets/user/speciesdiscovery@75163 82a268e6-3cf1-43bd-a215-b396298e98cf
This commit is contained in:
Francesco Mangiacrapa 2013-05-20 16:50:24 +00:00
parent 068539a9e0
commit e523587e42
15 changed files with 529 additions and 29 deletions

View File

@ -1,5 +1,5 @@
eclipse.preferences.version=1
jarsExcludedFromWebInfLib=
lastWarOutDir=/home/fedy2/workspace/org.gcube.portlets.user.species-discovery/target/species-discovery-3.1.0-SNAPSHOT
lastWarOutDir=/home/francesco-mangiacrapa/wseclipse/speciesdiscovery(trunk)/target/species-discovery-3.1.0-SNAPSHOT
warSrcDir=src/main/webapp
warSrcDirIsOutput=false

View File

@ -3,9 +3,6 @@
<wb-resource deploy-path="/" source-path="/target/m2e-wtp/web-resources"/>
<wb-resource deploy-path="/" source-path="/src/main/webapp" tag="defaultRootSource"/>
<wb-resource deploy-path="/WEB-INF/classes" source-path="/src/main/java"/>
<dependent-module archiveName="gis-viewer-3.0.1-SNAPSHOT.jar" deploy-path="/WEB-INF/lib" handle="module:/resource/gisViewer(private)/gisViewer(private)">
<dependency-type>uses</dependency-type>
</dependent-module>
<property name="java-output-path" value="/${module}/target/www/WEB-INF/classes"/>
<property name="context-root" value="species-discovery"/>
</wb-module>

View File

@ -78,7 +78,7 @@
<artifactId>spd-model</artifactId>
<version>1.0.0-SNAPSHOT</version>
<!-- <version>[1.0.0-SNAPSHOT, 2.0.0-SNAPSHOT)</version> -->
<scope>provided</scope>
<!-- <scope>provided</scope> -->
</dependency>
<dependency>

View File

@ -87,6 +87,7 @@ import org.gcube.portlets.user.speciesdiscovery.shared.SearchFilters;
import org.gcube.portlets.user.speciesdiscovery.shared.SearchResultType;
import org.gcube.portlets.user.speciesdiscovery.shared.SearchType;
import org.gcube.portlets.user.speciesdiscovery.shared.SpeciesCapability;
import org.gcube.portlets.user.speciesdiscovery.shared.TaxonomyRow;
import org.gcube.portlets.user.speciesdiscovery.shared.filter.ResultFilter;
import org.gcube.portlets.user.workspace.lighttree.client.ItemType;
import org.gcube.portlets.user.workspace.lighttree.client.event.DataLoadEvent;
@ -384,8 +385,30 @@ public class SearchController {
case BYCHILDREN:
SpeciesDiscovery.taxonomySearchService.createTaxonomyJobByChildren(createSpeciesJobEvent.getTaxonomy(), createSpeciesJobEvent.getDataSourceName(), new AsyncCallback<JobTaxonomyModel>() {
// createSpeciesJobEvent.getTaxonomy().setParent(null); //FIX FOR PARENTS SERIALIZATIONS
// SpeciesDiscovery.taxonomySearchService.createTaxonomyJobByChildren(createSpeciesJobEvent.getTaxonomy(), createSpeciesJobEvent.getDataSourceName(), new AsyncCallback<JobTaxonomyModel>() {
//
// @Override
// public void onFailure(Throwable caught) {
// Info.display("Error", "Sorry, An error occurred on create job. Please try again later");
// Log.error("Error on loading", "An error occurred on create job by children, retry." +caught.getMessage());
// }
//
// @Override
// public void onSuccess(JobTaxonomyModel result) {
//
// if(result!=null){
// Info.display("Species Taxonomy Job","A new taxonomy job was submitted");
// excecuteGetJobs(SearchResultType.TAXONOMY_ITEM, false);
// searchBorderLayoutPanel.getSpeciesSouthPanel().setIconTaxonomyByCounter(1);
// }
// }
// });
TaxonomyRow taxonomy = createSpeciesJobEvent.getTaxonomy();
SpeciesDiscovery.taxonomySearchService.createTaxonomyJobByChildren(taxonomy.getServiceId(), taxonomy.getName(), taxonomy.getRank(), createSpeciesJobEvent.getDataSourceName(), new AsyncCallback<JobTaxonomyModel>() {
@Override
public void onFailure(Throwable caught) {
Info.display("Error", "Sorry, An error occurred on create job. Please try again later");
@ -403,6 +426,7 @@ public class SearchController {
}
});
break;
case BYIDS:

View File

@ -265,10 +265,10 @@ public class TabItemForTaxonomyRow {
@Override
public void onClick(ClickEvent event) {
System.out.println("parent index: "+parentIndex + " size "+currentTaxonomy.getParents().size());
// System.out.println("parent index: "+parentIndex + " size "+currentTaxonomy.getParents().size());
if(parentIndex >=0 && parentIndex<=currentTaxonomy.getParents().size()){
System.out.println("set parent true");
// System.out.println("set parent true");
taxon.setParent(currentTaxonomy.getParents().subList(parentIndex, currentTaxonomy.getParents().size()));
}
else if(parentIndex == -1){ //items loaded from get children - the current Taxonomy item is the parent

View File

@ -105,8 +105,8 @@ public interface TaxonomySearchService extends RemoteService {
public List<JobTaxonomyModel> getListTaxonomyJobs() throws Exception;
public JobTaxonomyModel createTaxonomyJobByChildren(TaxonomyRow taxonomy,
String dataSourceName) throws Exception;
// public JobTaxonomyModel createTaxonomyJobByChildren(TaxonomyRow taxonomy,
// String dataSourceName) throws Exception;
public boolean cancelTaxonomyJob(String jobIdentifier) throws Exception;
@ -154,4 +154,9 @@ public interface TaxonomySearchService extends RemoteService {
public JobTaxonomyModel createTaxonomyJobByIds(String search,
List<DataSourceModel> dataSources) throws Exception;
public JobTaxonomyModel createTaxonomyJobByChildren(String taxonomyServiceId,
String taxonomyName, String taxonomyRank, String dataSourceName)
throws Exception;
}

View File

@ -95,8 +95,8 @@ public interface TaxonomySearchServiceAsync {
void getListTaxonomyJobs(AsyncCallback<List<JobTaxonomyModel>> callback);
void createTaxonomyJobByChildren(TaxonomyRow taxonomy, String dataSourceName,
AsyncCallback<JobTaxonomyModel> callback);
// void createTaxonomyJobByChildren(TaxonomyRow taxonomy, String dataSourceName,
// AsyncCallback<JobTaxonomyModel> callback);
void cancelTaxonomyJob(String jobIdentifier, AsyncCallback<Boolean> callback);
@ -137,5 +137,9 @@ public interface TaxonomySearchServiceAsync {
AsyncCallback<List<DataSource>> callback);
void createTaxonomyJobByIds(String search, List<DataSourceModel> dataSources, AsyncCallback<JobTaxonomyModel> callback);
void createTaxonomyJobByChildren(String taxonomyServiceId,
String taxonomyName, String taxonomyRank, String dataSourceName,
AsyncCallback<JobTaxonomyModel> callback);
}

View File

@ -233,8 +233,16 @@ public class StreamPagingLoader {
protected void streamComplete()
{
streamSizePoller.cancel();
fireStreamLoadingComplete();
Timer t = new Timer() {
@Override
public void run() {
streamSizePoller.cancel();
fireStreamLoadingComplete();
}
};
t.schedule(500);
}
public void setPage(int page)

View File

@ -4,6 +4,7 @@ import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStream;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Collection;
@ -584,6 +585,24 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T
if(bufferSize>=MAX_BUFFERING_ELEMENTS){
logger.trace("getSearchStatus MAX_BUFFERING_ELEMENTS is reached - stop search");
stopSearchWithoutRemove();
//CALCULATE NEW BUFFER SIZE AFTER FETCHING IS CLOSED
try {
// int sleepingTime = 500;
// logger.trace("sleeping "+sleepingTime+" ms for translate last objets arrived into buffer");
// Thread.sleep(sleepingTime); //SLEEPING 0,5 sec, for translating objects that are inserting in buffer and calculate new size of buffer
// logger.trace("sleep termined - search status alive");
bufferSize = isActiveFilterOnResult?((FilterableFetchingBuffer<? extends FetchingElement>) searchSession.getBuffer()).getFilteredListSize():searchSession.getBufferSize();
} catch (SQLException e) {
logger.trace("getSearchStatus bufferSize error : "+e.getMessage(), e);
throw new SearchServiceException("An error occured on server in searching status, please retry");
} catch (Exception e) {
logger.trace("getSearchStatus bufferSize error : "+e.getMessage(), e);
throw new SearchServiceException("An error occured on server in searching status, please retry");
}
status.setResultEOF(true);
status.setSize(bufferSize);
status.setIsMaxSize(true);
@ -702,10 +721,21 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T
data = buffer.getFilteredList(activeFiltersObject);
if(data!=null){
for (FetchingElement fetchingElement : data)
updateRowSelection(fetchingElement.getId(), selection);
List<String> ids = new ArrayList<String>();
for (FetchingElement fetchingElement : data){
ids.add(fetchingElement.getId()+"");
}
SelectableFetchingBuffer<? extends FetchingElement> bufferCompleted = (SelectableFetchingBuffer<? extends FetchingElement>) searchSession.getBuffer();
bufferCompleted.updateAllSelectionByIds(selection, ids);
size = data.size();
//OLD CODE
// for (FetchingElement fetchingElement : data)
// updateRowSelection(fetchingElement.getId(), selection);
//
// size = data.size();
}
}
@ -1361,10 +1391,20 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T
}
/**
*
* @param taxonomyServiceId
* @param taxonomyName
* @param taxonomyRank
* @param dataSourceName
* @return
* @throws Exception
*/
@Override
public JobTaxonomyModel createTaxonomyJobByChildren(TaxonomyRow taxonomy, String dataSourceName) throws Exception {
logger.trace("Create job for taxonomy id: " + taxonomy.getServiceId());
public JobTaxonomyModel createTaxonomyJobByChildren(String taxonomyServiceId, String taxonomyName, String taxonomyRank, String dataSourceName) throws Exception {
//FIXED 20/05/2013
logger.trace("Create job for taxonomy id: " + taxonomyServiceId);
// System.out.println("Create job for taxonomy id: " + taxonomy.getServiceId());
JobTaxonomyModel jobSpeciesModel = null;
@ -1373,15 +1413,15 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T
SpeciesService taxonomyService = getSpeciesService();
String speciesJobId = taxonomyService.createTaxonomyJobForDWCAByChildren(taxonomy.getServiceId());
String speciesJobId = taxonomyService.createTaxonomyJobForDWCAByChildren(taxonomyServiceId);
long startTime = Calendar.getInstance().getTimeInMillis();
//STORE INTO DAO
TaxonomyJob speciesJob = new TaxonomyJob(speciesJobId, DownloadState.PENDING.toString(), SAVE_CHILDREN_OF+taxonomy.getName(), taxonomy.getName(), dataSourceName, taxonomy.getRank(), startTime, 0, taxonomy.getServiceId());
TaxonomyJob speciesJob = new TaxonomyJob(speciesJobId, DownloadState.PENDING.toString(), SAVE_CHILDREN_OF+taxonomyName, taxonomyName, dataSourceName, taxonomyRank, startTime, 0, taxonomyServiceId);
taxonomyJobDao.insert(speciesJob);
jobSpeciesModel = new JobTaxonomyModel(speciesJob.getId(), speciesJob.getDescriptiveName(), DownloadState.PENDING, null, taxonomy.getName(), dataSourceName, taxonomy.getRank());
jobSpeciesModel = new JobTaxonomyModel(speciesJob.getId(), speciesJob.getDescriptiveName(), DownloadState.PENDING, null, taxonomyName, dataSourceName, taxonomyRank);
Date start = DateUtil.millisecondsToDate(speciesJob.getStartTime());
// jobSpeciesModel.setStartTime(DateUtil.dateToDateFormatString(start));
@ -1391,6 +1431,36 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T
return jobSpeciesModel;
}
// @Override
// public JobTaxonomyModel createTaxonomyJobByChildren(TaxonomyRow taxonomy, String dataSourceName) throws Exception {
//
// logger.trace("Create job for taxonomy id: " + taxonomy.getServiceId());
//// System.out.println("Create job for taxonomy id: " + taxonomy.getServiceId());
//
// JobTaxonomyModel jobSpeciesModel = null;
//
// TaxonomyJobPersistence taxonomyJobDao = DaoSession.getTaxonomyJobDAO(getASLSession());
//
// SpeciesService taxonomyService = getSpeciesService();
//
// String speciesJobId = taxonomyService.createTaxonomyJobForDWCAByChildren(taxonomy.getServiceId());
//
// long startTime = Calendar.getInstance().getTimeInMillis();
//
// //STORE INTO DAO
// TaxonomyJob speciesJob = new TaxonomyJob(speciesJobId, DownloadState.PENDING.toString(), SAVE_CHILDREN_OF+taxonomy.getName(), taxonomy.getName(), dataSourceName, taxonomy.getRank(), startTime, 0, taxonomy.getServiceId());
// taxonomyJobDao.insert(speciesJob);
//
// jobSpeciesModel = new JobTaxonomyModel(speciesJob.getId(), speciesJob.getDescriptiveName(), DownloadState.PENDING, null, taxonomy.getName(), dataSourceName, taxonomy.getRank());
//
// Date start = DateUtil.millisecondsToDate(speciesJob.getStartTime());
//// jobSpeciesModel.setStartTime(DateUtil.dateToDateFormatString(start));
// jobSpeciesModel.setStartTime(start);
// jobSpeciesModel.setEndTime(null);
//
// return jobSpeciesModel;
// }
@Override
public JobTaxonomyModel createTaxonomyJobByIds(String search, List<DataSourceModel> dataSources) throws Exception {

View File

@ -10,6 +10,7 @@ import java.util.List;
import javax.persistence.EntityManager;
import javax.persistence.Query;
import javax.persistence.TypedQuery;
import javax.persistence.criteria.CriteriaBuilder;
import javax.persistence.criteria.CriteriaQuery;
import javax.persistence.criteria.Predicate;
@ -20,12 +21,16 @@ import org.gcube.portlets.user.speciesdiscovery.server.session.FilterableFetchin
import org.gcube.portlets.user.speciesdiscovery.shared.MainTaxonomicRankEnum;
import org.gcube.portlets.user.speciesdiscovery.shared.ResultRow;
import org.gcube.portlets.user.speciesdiscovery.shared.Taxon;
import org.gcube.portlets.user.speciesdiscovery.shared.TaxonomyRow;
import org.gcube.portlets.user.speciesdiscovery.shared.filter.FilterCriteria;
import org.gcube.portlets.user.speciesdiscovery.shared.filter.ResultFilter;
import org.gcube.portlets.user.speciesdiscovery.shared.util.NormalizeString;
/**
* @author "Federico De Faveri defaveri@isti.cnr.it"
*
* @author Francesco Mangiacrapa francesco.mangiacrapa@isti.cnr.it
* @May 20, 2013
*
*/
public class ResultRowBuffer extends AbstractSelectableDaoBuffer<ResultRow> implements FilterableFetchingBuffer<ResultRow> {
@ -239,4 +244,36 @@ public class ResultRowBuffer extends AbstractSelectableDaoBuffer<ResultRow> impl
}
/* (non-Javadoc)
* @see org.gcube.portlets.user.speciesdiscovery.server.session.SelectableFetchingBuffer#updateAllSelectionByIds(boolean, java.util.List)
*/
@Override
public void updateAllSelectionByIds(boolean selection, List<String> listIds)
throws Exception {
EntityManager em = dao.createNewManager();
String queryString = "UPDATE ResultRow t SET "
+ ResultRow.SELECTED + " = "+ selection +" where "
+ ResultRow.ID_FIELD+" IN :inclList";
try {
em.getTransaction().begin();
TypedQuery<ResultRow> query = em.createQuery(queryString, ResultRow.class);
query.setParameter("inclList", listIds);
int updateCount = query.executeUpdate();
logger.trace("Updated " + updateCount + " item");
em.getTransaction().commit();
} finally {
if (em.getTransaction().isActive())
em.getTransaction().rollback();
em.close();
}
}
}

View File

@ -10,6 +10,7 @@ import java.util.List;
import javax.persistence.EntityManager;
import javax.persistence.Query;
import javax.persistence.TypedQuery;
import javax.persistence.criteria.CriteriaBuilder;
import javax.persistence.criteria.CriteriaQuery;
import javax.persistence.criteria.Predicate;
@ -26,6 +27,7 @@ import org.gcube.portlets.user.speciesdiscovery.shared.util.NormalizeString;
/**
*
* @author Francesco Mangiacrapa francesco.mangiacrapa@isti.cnr.it
* @May 20, 2013
*
*/
public class TaxonomyRowBuffer extends AbstractSelectableDaoBuffer<TaxonomyRow> implements FilterableFetchingBuffer<TaxonomyRow> {
@ -212,5 +214,39 @@ public class TaxonomyRowBuffer extends AbstractSelectableDaoBuffer<TaxonomyRow>
}
}
/* (non-Javadoc)
* @see org.gcube.portlets.user.speciesdiscovery.server.session.SelectableFetchingBuffer#updateAllSelectionByIds(boolean, java.util.List)
*/
@Override
public void updateAllSelectionByIds(boolean selection, List<String> listIds) throws Exception {
EntityManager em = dao.createNewManager();
//TODO generalize?
String queryString = "UPDATE TaxonomyRow SET "
+ ResultRow.SELECTED + " = "+ selection
+" where "+TaxonomyRow.IS_PARENT +"=false AND "
+ResultRow.ID_FIELD+" IN :inclList";
try {
em.getTransaction().begin();
TypedQuery<TaxonomyRow> query = em.createQuery(queryString, TaxonomyRow.class);
query.setParameter("inclList", listIds);
int updateCount = query.executeUpdate();
logger.trace("Updated " + updateCount + " item");
em.getTransaction().commit();
} finally {
if (em.getTransaction().isActive())
em.getTransaction().rollback();
em.close();
}
}
}

View File

@ -19,6 +19,8 @@ public interface SelectableFetchingBuffer<T extends SelectableElement> extends F
public void updateAllSelection(boolean selection) throws Exception;
public void updateAllSelectionByIds(boolean selection, List<String> listIds) throws Exception;
public int sizeSelected() throws Exception;
}

View File

@ -15,10 +15,8 @@ import javax.persistence.Id;
@Entity
public class TaxonomyJob implements Serializable {
/**
*
*/
private static final long serialVersionUID = -8525420257121366179L;
private static final long serialVersionUID = -38475548097321689L;
public final static String ID_FIELD = "jobId";
public final static String NAME = "descriptiveName";

View File

@ -0,0 +1,296 @@
/**
*
*/
package org.gcube.portlets.user.speciesdiscovery.client;
import java.util.ArrayList;
import java.util.List;
import javax.persistence.EntityManager;
import javax.persistence.EntityManagerFactory;
import javax.persistence.Query;
import javax.persistence.TypedQuery;
import org.gcube.application.framework.core.session.ASLSession;
import org.gcube.application.framework.core.session.SessionManager;
import org.gcube.common.core.scope.GCUBEScope;
import org.gcube.data.spd.model.products.ResultElement;
import org.gcube.data.spd.model.products.TaxonomyItem;
import org.gcube.data.streams.Stream;
import org.gcube.portlets.user.speciesdiscovery.server.persistence.DaoSession;
import org.gcube.portlets.user.speciesdiscovery.server.service.SpeciesService;
import org.gcube.portlets.user.speciesdiscovery.server.service.TaxonomyItemConverter;
import org.gcube.portlets.user.speciesdiscovery.shared.ResultRow;
import org.gcube.portlets.user.speciesdiscovery.shared.TaxonomyRow;
/**
* @author "Federico De Faveri defaveri@isti.cnr.it"
*
*/
public class H2TestResultItemRetrievingAndConverting {
private static final int MAX_ELEMENTS = 20;
static String sessionID = "1";
static String user = "test.user";
static GCUBEScope scope;
static ASLSession session;
public static void main(String[] args) throws Exception
{
session = SessionManager.getInstance().getASLSession(sessionID, user);
scope = GCUBEScope.getScope("/gcube/devsec");
session.setScope(scope.toString());
EntityManagerFactory factory = DaoSession.getEntityManagerFactory(session);
EntityManager em = factory.createEntityManager();
// ScopeProvider.instance.set("/gcube/devsec");
int removed;
String tableName;
List<ResultRow> list;
// String tableName = "ParentTaxonomyRow";
//
// List<ResultRow> list = getList(em, tableName);
//
// System.out.println("list "+tableName+ "size is "+list.size());
// em = factory.createEntityManager();
// removed = removeAll(em, tableName);
//
// System.out.println("removed " +removed);
tableName = "ResultRow";
em = factory.createEntityManager();
list = getList(em, tableName);
System.out.println("list "+tableName+ "size is "+list.size());
// removeAll(em, "ParentTaxonomyRow");
// em = factory.createEntityManager();
// removed = removeAll(em, tableName);
// System.out.println("removed " +removed);
// SpeciesService taxonomyService = new SpeciesService(scope, session);
long start = System.currentTimeMillis();
long last = System.currentTimeMillis();
int counter = 0;
// CloseableIterator<ResultElement> input = taxonomyService.searchByQuery("'sarda sarda' as ScientificName return Taxon");// searchByFilters(searchTerm, SearchTypeEnum.BY_SCIENTIFIC_NAME, searchFilters);
// SearchFilters searchFilters = new SearchFilters();
// searchFilters.setResultType(SpeciesCapability.TAXONOMYITEM);
// SearchResultType resultType = QueryUtil.getResultType(searchFilters);
// CloseableIterator<FetchingElement> output = IteratorChainBuilder.buildChain(input, resultType, session);
//
// FetchingSession<TaxonomyRow> fetchingSession = (FetchingSession<TaxonomyRow>) FetchingSessionUtil.createFetchingSession(output, resultType, session);
//
//
// while(!fetchingSession.isComplete()) {
//
// System.out.println((System.currentTimeMillis()-start)+" buffer size "+fetchingSession.getBufferSize());
// last = System.currentTimeMillis();
//
// Thread.sleep(1000);
//
// System.out.println("COMPLETE: "+(System.currentTimeMillis()-last)+"ms from last item to close the stream; buffer size "+fetchingSession.getBufferSize());
//
// System.out.println(fetchingSession.getBufferSize() + " results in "+(System.currentTimeMillis()-start)+"ms total; "+(last-start)+"ms from first to last result");
// }
// System.out.println("Start time: "+last);
//
// while(output.hasNext()) {
//
// System.out.println((System.currentTimeMillis()-last)+"ms "+output.next());
// last = System.currentTimeMillis();
// counter++;
// }
// Stream<ResultElement> input = taxonomyService.searchByQuery2("'sarda sarda' as ScientificName return Taxon");
// //from ResultItem to ResultRow
//// List<ResultItem> listElements = new ArrayList<ResultItem>();
//// List<ResultRow> listRR = new ArrayList<ResultRow>();
//// ResultItemConverter converter = new ResultItemConverter(session);
//
// TaxonomyItemConverter converter = new TaxonomyItemConverter(session);
// List<TaxonomyRow> listRR = new ArrayList<TaxonomyRow>();
// List<TaxonomyItem> listElements = new ArrayList<TaxonomyItem>();
//
// while(input.hasNext()) {
//
//// ResultItem result = (ResultItem) input.next();
//
// TaxonomyItem result = (TaxonomyItem) input.next();
//
// System.out.println((System.currentTimeMillis()-last)+" ms to recover "+result);
// last = System.currentTimeMillis();
// counter++;
//
// listElements.add(result);
//
// listRR.add(converter.convert(result));
//
// System.out.println((System.currentTimeMillis()-last)+" ms to convert "+result);
//
// if(MAX_ELEMENTS == counter)
// break;
// }
//
// input.close();
//
// counter = 0;
// start = System.currentTimeMillis();
// last = System.currentTimeMillis();
//
//// for (ResultRow resultRow : listRR) {
// for (TaxonomyRow resultRow : listRR) {
//
// System.out.println(counter + ")" + (System.currentTimeMillis()-last)+" ms "+resultRow);
// last = System.currentTimeMillis();
//
//
// try{
//
//// storeTaxonParents(resultRow);
//// storeCommonName(listElements.get(counter), resultRow);
// storeRR(resultRow);
//
// }catch (Exception e) {
// e.printStackTrace();
// }
//
// if(counter==50)
// break;
//
// counter++;
// }
// System.out.println("BUFFER SIZE: " + fetchingSession.getBuffer().getList().size());
// System.out.println("COMPLETE: "+(System.currentTimeMillis()-last)+" ms from last item to close the stream");
//
// System.out.println(counter + " results in "+(System.currentTimeMillis()-start)+" ms total; "+(last-start)+" ms from first to last result");
List<String> listServiceId = new ArrayList<String>();
int i = 0;
for (ResultRow rr : list) {
listServiceId.add(rr.getIdToString());
System.out.println(++i +")listserviceId "+listServiceId);
if(i==30)
break;
}
em = factory.createEntityManager();
String queryStr = "select t from ResultRow t where t."+ResultRow.ID_FIELD+" IN :inclList";
TypedQuery<ResultRow> query2 = em.createQuery(queryStr, ResultRow.class);
query2.setParameter("inclList", listServiceId);
// query.setFirstResult(2);
//
// query.setMaxResults(5);
start = System.currentTimeMillis();
List<ResultRow> results = query2.getResultList();
//
i = 0;
for (ResultRow r: results) {
System.out.println("ResultRow query "+ ++i +") " + r);
}
// ExpressionBuilder expression = new ExpressionBuilder(Taxon.class);
// expression.get(Taxon.RANK).equalsIgnoreCase("class");
//
//
// CriteriaBuilder cb = em.getCriteriaBuilder();
//
// // Query for a List of objects.
//// CriteriaQuery<Object> cq = cb.createQuery();
//
// CriteriaQuery<Taxon> cq = cb.createQuery(Taxon.class);
//
// Root<Taxon> e = cq.from(Taxon.class);
//
// cq.where(cb.equal(e.get(Taxon.RANK), "class"));
//
// query = em.createQuery(cq);
//
// List<Taxon> result = query.getResultList();
//
// for (Taxon taxon : result) {
// System.out.println("taxon: " + taxon);
// }
}
protected static void storeRR(TaxonomyRow row){
EntityManagerFactory factory = DaoSession.getEntityManagerFactory(session);
EntityManager em = factory.createEntityManager();
em.getTransaction().begin();
em.persist(row);
em.getTransaction().commit();
em.close();
}
public static int removeAll(EntityManager em, String tableName) {
int removed = 0;
try {
em.getTransaction().begin();
removed = em.createQuery("DELETE FROM "+tableName).executeUpdate();
em.getTransaction().commit();
System.out.println("DELETE FROM "+tableName + " " + removed +" items");
} catch (Exception e) {
e.printStackTrace();
} finally {
em.close();
}
return removed;
}
public static List<ResultRow> getList(EntityManager em, String tableName) {
List<ResultRow> listResultRow = new ArrayList<ResultRow>();
try {
Query query = em.createQuery("select t from "+tableName+" t");
listResultRow = query.getResultList();
} finally {
em.close();
}
return listResultRow;
}
}

View File

@ -3,14 +3,20 @@
*/
package org.gcube.portlets.user.speciesdiscovery.client;
import static org.gcube.data.spd.client.plugins.AbstractPlugin.classification;
import static org.gcube.data.spd.client.plugins.AbstractPlugin.executor;
import static org.gcube.data.spd.client.plugins.AbstractPlugin.manager;
import static org.gcube.data.spd.client.plugins.AbstractPlugin.occurrence;
import java.net.URI;
import java.util.List;
import java.util.concurrent.TimeUnit;
import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.data.spd.client.proxies.Classification;
import org.gcube.data.spd.client.proxies.Executor;
import org.gcube.data.spd.client.proxies.Manager;
import org.gcube.data.spd.client.proxies.Occurrence;
import org.gcube.data.spd.model.PluginDescription;
/**
@ -19,6 +25,12 @@ import org.gcube.data.spd.model.PluginDescription;
*/
public class ListPlugins {
protected static Manager call;
protected static Occurrence occurrencesCall;
protected static Classification classificationCall;
protected static Executor executorCall;
/**
* @param args
*/
@ -26,7 +38,18 @@ public class ListPlugins {
String scope = "/gcube/devsec";
ScopeProvider.instance.set(scope);
Manager call = manager().at(URI.create("http://node24.d.d4science.research-infrastructures.eu:9000")).withTimeout(3, TimeUnit.MINUTES).build();
// this.call = manager().at( URI.create("http://node24.d.d4science.research-infrastructures.eu:9000")).withTimeout(3, TimeUnit.MINUTES).build();
// this.occurrencesCall = occurrences().at( URI.create("http://node24.d.d4science.research-infrastructures.eu:9000")).withTimeout(3, TimeUnit.MINUTES).build();
// this.classificationCall = classification().at( URI.create("http://node24.d.d4science.research-infrastructures.eu:9000")).withTimeout(3, TimeUnit.MINUTES).build();
call = manager().withTimeout(3, TimeUnit.MINUTES).build();
// executorCall = executor().withTimeout(3, TimeUnit.MINUTES).build();
// occurrencesCall = occurrence().withTimeout(3, TimeUnit.MINUTES).build();
// classificationCall = classification().withTimeout(3, TimeUnit.MINUTES).build();
// call = manager().at(URI.create("http://node24.d.d4science.research-infrastructures.eu:9000")).withTimeout(3, TimeUnit.MINUTES).build();
//Manager call = manager().withTimeout(3, TimeUnit.MINUTES).build();
List<PluginDescription> plugins = call.getPluginsDescription();