Working on Task #7001

git-svn-id: http://svn.research-infrastructures.eu/public/d4science/gcube/trunk/portlets/user/speciesdiscovery@142466 82a268e6-3cf1-43bd-a215-b396298e98cf
This commit is contained in:
Francesco Mangiacrapa 2017-02-10 17:04:01 +00:00
parent 6f0417e86e
commit 4e5b889342
14 changed files with 224 additions and 171 deletions

View File

@ -17,30 +17,30 @@ import com.google.gwt.event.shared.SimpleEventBus;
import com.google.gwt.user.client.Timer;
/**
*
*
* @author Francesco Mangiacrapa francesco.mangiacrapa@isti.cnr.it
*
*/
public class SearchBorderLayoutPanel extends ContentPanel {
/**
* This is a singleton
*/
private static SearchBorderLayoutPanel instance;
private BorderLayoutData northData;
private BorderLayoutData centerData;
private BorderLayoutData westData;
private BorderLayoutData southData;
private final BorderLayout layout = new BorderLayout();
private final BorderLayout layout = new BorderLayout();
private static SearchController searchController;
private SpeciesSearchFormPanel speciesNorthPanel;
private SpeciesResultsPanelCardLayout speciesCenterPanel;
private SpeciesResultFilterAccordionPanel speciesWestPanel;
private SpeciesJobPanel speciesSouthPanel;
private final EventBus eventBus;
@ -53,78 +53,79 @@ public class SearchBorderLayoutPanel extends ContentPanel {
return instance;
}
private SearchBorderLayoutPanel() {
setLayout(layout);
setLayout(layout);
setHeaderVisible(false);
eventBus = new SimpleEventBus();
searchController = new SearchController(eventBus, this);
speciesNorthPanel = new SpeciesSearchFormPanel(eventBus);
// speciesNorthPanel.setScrollMode(Scroll.AUTO);
speciesSouthPanel = new SpeciesJobPanel(eventBus);
//TODO REMOVE searchController.getStreamPagingLoader()
speciesCenterPanel = new SpeciesResultsPanelCardLayout(eventBus, searchController.getStreamPagingLoader(), searchController);
northData = new BorderLayoutData(LayoutRegion.NORTH, DEFAULTNORTHHEIGHT);
northData.setCollapsible(false);
northData.setFloatable(false);
northData.setHideCollapseTool(true);
northData.setSplit(false);
westData = new BorderLayoutData(LayoutRegion.WEST, 250, 300, 350);
westData.setSplit(true);
westData.setCollapsible(true);
westData.setMargins(new Margins(0,0,0,0));
centerData = new BorderLayoutData(LayoutRegion.CENTER);
centerData.setMargins(new Margins(0));
southData = new BorderLayoutData(LayoutRegion.SOUTH, 34, 34, 34);
// southData = new BorderLayoutData(LayoutRegion.SOUTH, 150, 250, 250);
// southData.setSplit(true);
// southData.setCollapsible(true);
northData = new BorderLayoutData(LayoutRegion.NORTH, DEFAULTNORTHHEIGHT);
northData.setCollapsible(false);
northData.setFloatable(false);
northData.setHideCollapseTool(true);
northData.setSplit(false);
westData = new BorderLayoutData(LayoutRegion.WEST, 250, 300, 350);
westData.setSplit(true);
westData.setCollapsible(true);
westData.setMargins(new Margins(0,0,0,0));
centerData = new BorderLayoutData(LayoutRegion.CENTER);
centerData.setMargins(new Margins(0));
southData = new BorderLayoutData(LayoutRegion.SOUTH, 34, 34, 34);
// southData = new BorderLayoutData(LayoutRegion.SOUTH, 150, 250, 250);
// southData.setSplit(true);
// southData.setCollapsible(true);
southData.setMargins(new Margins(0,0,0,0));
speciesWestPanel = new SpeciesResultFilterAccordionPanel(eventBus, searchController.getStreamPagingLoader());
// eventBus.fireEvent(new LoadDataSourceEvent()); //load Data source event is fired
add(speciesNorthPanel, northData);
add(speciesCenterPanel, centerData);
// eventBus.fireEvent(new LoadDataSourceEvent()); //load Data source event is fired
add(speciesNorthPanel, northData);
add(speciesCenterPanel, centerData);
add(speciesWestPanel, westData);
add(speciesSouthPanel, southData);
initApplication();
}
private void initApplication(){
Scheduler.get().scheduleDeferred(new ScheduledCommand() {
@Override
public void execute() {
eventBus.fireEvent(new LoadDataSourceEvent()); //load Data source event is fired
eventBus.fireEvent(new LoadDataSourceEvent()); //load Data source event is fired
SearchController.excecuteGetJobs(SearchResultType.TAXONOMY_ITEM, false);
SearchController.excecuteGetJobs(SearchResultType.OCCURRENCE_POINT, false);
SearchController.excecuteGetJobs(SearchResultType.GIS_LAYER_POINT, false);
pollSpeciesJobs(ConstantsSpeciesDiscovery.JOBPOLLINGMILLISECONDS);
}
});
}
public static void pollSpeciesJobs(int milliseconds){
Timer timer = new Timer() {
@Override
public void run()
@ -132,16 +133,17 @@ public class SearchBorderLayoutPanel extends ContentPanel {
SearchController.excecuteGetJobs(SearchResultType.TAXONOMY_ITEM, false);
SearchController.excecuteGetJobs(SearchResultType.OCCURRENCE_POINT, false);
SearchController.excecuteGetJobs(SearchResultType.GIS_LAYER_POINT, false);
}
};
timer.scheduleRepeating(milliseconds);
}
public void updateNorthSize(int height){
northData.setSize(height);
layout(true);
layout(true);
}
public SpeciesSearchFormPanel getSpeciesNorthPanel() {

View File

@ -286,9 +286,9 @@ public class SearchController {
}
@Override
public void onSuccess(JobGisLayerModel jobId) {
Info.display("Gis Layer Occurrence Job", "Generating Gis layer: "+jobId.getJobName()+" submitted");
public void onSuccess(JobGisLayerModel gisJob) {
GWT.log("Returned gis job: "+gisJob);
Info.display("Gis Layer Occurrence Job", "Generating Gis layer: "+gisJob.getJobName()+" submitted");
excecuteGetJobs(SearchResultType.GIS_LAYER_POINT, false);
searchBorderLayoutPanel.getSpeciesSouthPanel().setIconGisLayerByCounter(1);
@ -297,35 +297,6 @@ public class SearchController {
// showMap(layerName);
}
});
// SpeciesDiscovery.taxonomySearchService.createOccurrencesJob(listJobOccurrenceModel, createOccurrenceJobEvent.getFileFormat(), createOccurrenceJobEvent.getSaveEnum(), createOccurrenceJobEvent.isByDataSource(), createOccurrenceJobEvent.getExpectedOccurrences(), new AsyncCallback<List<JobOccurrencesModel>>() {
//
// @Override
// public void onFailure(Throwable caught) {
// Info.display("Error", "Sorry, An error occurred on create job. Please try again later");
// Log.error("Error on loading", "An error occurred on create job, retry." +caught.getMessage());
//
// }
//
// @Override
// public void onSuccess(List<JobOccurrencesModel> result) {
//
// if(result!=null){
// int jobs = result.size();
// if(jobs>0){
// String msg = jobs==1? "was":"were";
// Info.display("Species Occurrence Job", result.size() + " occurrence job "+msg+" submitted");
// excecuteGetJobs(SearchResultType.OCCURRENCE_POINT, false);
// searchBorderLayoutPanel.getSpeciesSouthPanel().setIconOccurrenceByCounter(result.size());
// }
// else{
// Info.display("Species Occurrence Job","An error occurred on submit job, retry");
// }
//
// }
// }
// });
}
});
@ -1719,6 +1690,7 @@ public class SearchController {
if(resetStructures)
GisLayerJobSpeciesPanel.getInstance(eventBus).getGridJob().mask("Loading", ConstantsSpeciesDiscovery.LOADINGSTYLE);
//TODO
SpeciesDiscovery.taxonomySearchService.getListGisLayerJob(new AsyncCallback<List<JobGisLayerModel>>() {

View File

@ -518,7 +518,7 @@ public class GisLayerGridJob extends ContentPanel{
public void handleEvent(BaseEvent be) {
// resetStore();
eventBus.fireEvent(new ReLoadListJobEvent(SearchResultType.OCCURRENCE_POINT));
eventBus.fireEvent(new ReLoadListJobEvent(SearchResultType.GIS_LAYER_POINT));
}
});

View File

@ -36,7 +36,7 @@ public class GisLayerJobSpeciesPanel extends ContentPanel {
private static String LASTOPERATION = "Last Operation: ";
private Html lastOperation = new Html(LASTOPERATION);
private LayoutContainer lc;
private static String WINDOWTITLE = "Species Occurrence Jobs";
private static String WINDOWTITLE = "Gis Layer Jobs";
private static final String FAILED = "failed";
private static final String COMPLETED = "completed";
private static final String LOADING = "loading";

View File

@ -334,7 +334,7 @@ public class ViewDetailsWindow extends Window {
Button btnShowInGisViewer = new Button(ConstantsSpeciesDiscovery.CREATE_GIS_LAYER);
btnShowInGisViewer.setIcon(AbstractImagePrototype.create(Resources.INSTANCE.getGisProducts()));
btnShowInGisViewer.setToolTip(new ToolTipConfig(ConstantsSpeciesDiscovery.CREATE_GIS_LAYER, "Create a Gis Layer from selected occurences points in a Gis Viewer Map"));
btnShowInGisViewer.setToolTip(new ToolTipConfig(ConstantsSpeciesDiscovery.CREATE_GIS_LAYER, "Create a Gis Layer from selected occurences points."));
btnShowInGisViewer.setScale(ButtonScale.SMALL);
btnShowInGisViewer.setIconAlign(IconAlign.TOP);
btnShowInGisViewer.setArrowAlign(ButtonArrowAlign.BOTTOM);
@ -342,7 +342,7 @@ public class ViewDetailsWindow extends Window {
@Override
public void componentSelected(ButtonEvent ce) {
SearchController.eventBus.fireEvent(new CreateGisLayerJobEvent(lastSearchEvent.getSearchTerm() +" Occurrences layer", "Gis Layer generated from SPD Portlet by gCube Framework", count));
SearchController.eventBus.fireEvent(new CreateGisLayerJobEvent("Gis Layer by: "+lastSearchEvent.getSearchTerm(), "Gis Layer generated from SPD Portlet by gCube Framework", count));
}
});

View File

@ -858,6 +858,7 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T
String author = getASLSession().getUsername();
String credits = "";
GisLayerJobPersistence gisLayerJob = DaoSession.getGisLayersJobDAO(getASLSession());
logger.trace("GisLayerJobPersistence found");
return GisLayerJobUtil.createGisLayerJobByOccurrenceKeys(occurrenceKeys, taxonomyService, layerTitle, layerDescr, author, credits, totalPoints, gisLayerJob);
} catch (Exception e) {
logger.error("An error occurred creating the map", e);
@ -1712,35 +1713,41 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T
try {
GisLayerJobPersistence gisLayerJobDao = DaoSession.getGisLayersJobDAO(getASLSession());
Iterator<GisLayerJob> iterator = gisLayerJobDao.getList().iterator();
SpeciesService taxonomyService = getSpeciesService();
while (iterator!=null && iterator.hasNext()) {
GisLayerJob job = iterator.next();
CompleteJobStatus statusResponse = taxonomyService.getOccurrenceJobById(job.getId());
logger.info("get occurrence job "+job.getId()+ " from service");
try{
if(job.getId()==null || job.getId().isEmpty()){
logger.warn("Gis job has an id null or empty, skipping");
}else{
CompleteJobStatus statusResponse = taxonomyService.getGisLayerByJobId(job.getId());
logger.info("get occurrence job "+job.getId()+ " from service");
if(statusResponse!=null){
logger.info("statusResponse of gis layer job is not null..." + job.getId());
JobGisLayerModel convertJob = GisLayerJobUtil.convertJob(job, statusResponse, gisLayerJobDao);
try{
if(convertJob!=null){
logger.info("added list jobOccurrenceId: "+convertJob.getJobIdentifier() + " status "+convertJob.getDownloadState());
listJobs.add(convertJob);
if(statusResponse!=null){
logger.info("statusResponse of gis layer job is not null..." + job.getId());
JobGisLayerModel convertJob = GisLayerJobUtil.convertJob(job, statusResponse, gisLayerJobDao);
if(convertJob!=null){
logger.info("added list jobOccurrenceId: "+convertJob.getJobIdentifier() + " status "+convertJob.getDownloadState());
listJobs.add(convertJob);
}
}
else{
logger.info("statusResponse of occurrence job is null..." + job.getId());
logger.info("deleting job ..." + job.getId());
GisLayerJobUtil.deleteGisLayerJobById(job.getId(),gisLayerJobDao);
}
}
else{
logger.info("statusResponse of occurrence job is null..." + job.getId());
logger.info("deleting job ..." + job.getId());
GisLayerJobUtil.deleteGisLayerJobById(job.getId(),gisLayerJobDao);
}
}catch (Exception e) {
e.printStackTrace();
logger.error("Error on getListGisLayerJob ", e);
throw new Exception("Error on getListGisLayerJob", e);
}catch (Exception e) {
e.printStackTrace();
logger.error("Error on getListGisLayerJob ", e);
throw new Exception("Error on getListGisLayerJob", e);
}
}
}

View File

@ -51,7 +51,9 @@ public class GisLayerJobUtil {
Date submitTime = DateUtil.getDateFormat(Calendar.getInstance());
GisLayerJob gisLJ = new GisLayerJob(jobId, layerTitle, 0, submitTime.getTime(), 0, 0, layerDescr, DownloadState.PENDING.toString(), totalPoints);
gisLayerJob.insert(gisLJ);
return new JobGisLayerModel(jobId, layerTitle, DownloadState.PENDING,null, submitTime, null, null, layerDescr, 0, totalPoints);
JobGisLayerModel jobModel = new JobGisLayerModel(jobId, layerTitle, DownloadState.PENDING,null, submitTime, null, null, layerDescr, 0, totalPoints);
logger.info("Returning job: "+jobModel);
return jobModel;
} catch (Exception e) {
logger.error("An error occurred creating the map", e);
throw new Exception(e.getMessage());

View File

@ -489,10 +489,8 @@ public class SpeciesService {
return creator.createLayer(keyStream, details);
}
catch (Exception e) {
logger.error(
"Error calling the Species Service: " + e.getMessage(), e);
throw new SearchServiceException(
"Error calling the Species Service: " + e.getMessage());
logger.error("Error calling the Species Service: " + e.getMessage(), e);
throw new SearchServiceException("Error calling the Species Service: " + e.getMessage());
}
}

View File

@ -5,19 +5,22 @@ package org.gcube.portlets.user.speciesdiscovery.shared;
import java.io.Serializable;
import javax.persistence.MappedSuperclass;
/**
*
* @author Francesco Mangiacrapa francesco.mangiacrapa@isti.cnr.it
* Feb 9, 2017
*/
@MappedSuperclass
public class DefaultJob implements Serializable{
/**
*
*/
private static final long serialVersionUID = -4448201743475516557L;
private static final long serialVersionUID = 5677419614560436596L;
public final static String ID_FIELD = "id";
public final static String NAME = "name";
public final static String DESCRIPTION = "description";
@ -44,8 +47,6 @@ public class DefaultJob implements Serializable{
}
/**
* @param id
* @param name
@ -56,11 +57,7 @@ public class DefaultJob implements Serializable{
* @param state
* @param elapsedTime
*/
public DefaultJob(
String id, String name, long startTime, long submitTime, long endTime,
String description, String state, long elapsedTime) {
super();
public DefaultJob(String id, String name, long startTime, long submitTime, long endTime, String description, String state, long elapsedTime) {
this.id = id;
this.name = name;
this.startTime = startTime;
@ -73,177 +70,177 @@ public class DefaultJob implements Serializable{
/**
* @return the id
*/
public String getId() {
return id;
}
/**
* @return the name
*/
public String getName() {
return name;
}
/**
* @return the startTime
*/
public long getStartTime() {
return startTime;
}
/**
* @return the submitTime
*/
public long getSubmitTime() {
return submitTime;
}
/**
* @return the endTime
*/
public long getEndTime() {
return endTime;
}
/**
* @return the description
*/
public String getDescription() {
return description;
}
/**
* @return the state
*/
public String getState() {
return state;
}
/**
* @return the elapsedTime
*/
public long getElapsedTime() {
return elapsedTime;
}
/**
* @param id the id to set
*/
public void setId(String id) {
this.id = id;
}
/**
* @param name the name to set
*/
public void setName(String name) {
this.name = name;
}
/**
* @param startTime the startTime to set
*/
public void setStartTime(long startTime) {
this.startTime = startTime;
}
/**
* @param submitTime the submitTime to set
*/
public void setSubmitTime(long submitTime) {
this.submitTime = submitTime;
}
/**
* @param endTime the endTime to set
*/
public void setEndTime(long endTime) {
this.endTime = endTime;
}
/**
* @param description the description to set
*/
public void setDescription(String description) {
this.description = description;
}
/**
* @param state the state to set
*/
public void setState(String state) {
this.state = state;
}
/**
* @param elapsedTime the elapsedTime to set
*/
public void setElapsedTime(long elapsedTime) {
this.elapsedTime = elapsedTime;
}

View File

@ -4,6 +4,9 @@
package org.gcube.portlets.user.speciesdiscovery.shared;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
/**
@ -21,6 +24,9 @@ public class GisLayerJob extends DefaultJob{
private static final long serialVersionUID = 2604265579184366453L;
private long totalPoints;
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
protected int internalId;
/**
* Instantiates a new gis layer job.
@ -28,8 +34,6 @@ public class GisLayerJob extends DefaultJob{
public GisLayerJob() {
}
/**
* Instantiates a new gis layer job.
*
@ -41,16 +45,11 @@ public class GisLayerJob extends DefaultJob{
* @param description the description
* @param state the state
*/
public GisLayerJob(
String id, String name, long startTime, long submitTime, long endTime,
long elapsedTime, String description, String state, long totalPoints) {
public GisLayerJob(String id, String name, long startTime, long submitTime, long endTime,long elapsedTime, String description, String state, long totalPoints) {
super(id, name, startTime, submitTime, endTime, description, state, elapsedTime);
this.totalPoints = totalPoints;
}
/**
* @return the totalPoints
*/
@ -60,8 +59,6 @@ public class GisLayerJob extends DefaultJob{
}
/**
* @param totalPoints the totalPoints to set
*/
@ -72,6 +69,16 @@ public class GisLayerJob extends DefaultJob{
/**
* @return the internalId
*/
public int getInternalId() {
return internalId;
}
/* (non-Javadoc)
* @see java.lang.Object#toString()
*/
@ -81,6 +88,8 @@ public class GisLayerJob extends DefaultJob{
StringBuilder builder = new StringBuilder();
builder.append("GisLayerJob [totalPoints=");
builder.append(totalPoints);
builder.append(", internalId=");
builder.append(internalId);
builder.append(", id=");
builder.append(id);
builder.append(", name=");
@ -101,6 +110,4 @@ public class GisLayerJob extends DefaultJob{
return builder.toString();
}
}

View File

@ -6,6 +6,8 @@ package org.gcube.portlets.user.speciesdiscovery.shared;
import java.io.Serializable;
import java.util.Date;
import com.google.gwt.user.client.rpc.IsSerializable;
@ -15,7 +17,7 @@ import java.util.Date;
* @author Francesco Mangiacrapa francesco.mangiacrapa@isti.cnr.it
* Feb 9, 2017
*/
public class JobGisLayerModel implements Serializable{
public class JobGisLayerModel implements Serializable, IsSerializable{
/**
*
@ -92,7 +94,6 @@ public class JobGisLayerModel implements Serializable{
String layerDescription, long completedPoints,
long totalPoints) {
super();
this.jobIdentifier = jobIdentifier;
this.jobName = jobName;
this.downloadState = downloadState;

View File

@ -0,0 +1,5 @@
<?xml version="1.0" encoding="UTF-8"?>
<entity-mappings version="2.0" xmlns="http://java.sun.com/xml/ns/persistence/orm" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://java.sun.com/xml/ns/persistence/orm http://java.sun.com/xml/ns/persistence/orm_2_0.xsd">
<mapped-superclass class="DefaultJob">
</mapped-superclass>
</entity-mappings>

View File

@ -2,8 +2,7 @@
<persistence xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://java.sun.com/xml/ns/persistence http://java.sun.com/xml/ns/persistence/persistence_2_0.xsd"
version="2.0" xmlns="http://java.sun.com/xml/ns/persistence">
<persistence-unit name="SPD_PERSISTENCE_FACTORY"
transaction-type="RESOURCE_LOCAL">
<persistence-unit name="SPD_PERSISTENCE_FACTORY" transaction-type="RESOURCE_LOCAL">
<class>org.gcube.portlets.user.speciesdiscovery.shared.CommonName</class>
<class>org.gcube.portlets.user.speciesdiscovery.shared.Taxon</class>
<class>org.gcube.portlets.user.speciesdiscovery.shared.ResultRow</class>
@ -12,11 +11,12 @@
<class>org.gcube.portlets.user.speciesdiscovery.shared.TaxonomyRow</class>
<class>org.gcube.portlets.user.speciesdiscovery.shared.OccurrencesJob</class>
<class>org.gcube.portlets.user.speciesdiscovery.shared.TaxonomyJob</class>
<class>org.gcube.portlets.user.speciesdiscovery.shared.GisLayerJob</class>
<properties>
<property name="javax.persistence.jdbc.driver" value="org.h2.Driver" />
<property name="eclipselink.ddl-generation" value="create-tables" />
<property name="eclipselink.ddl-generation.output-mode"
value="database" />
</properties>
</properties>
</persistence-unit>
</persistence>

View File

@ -0,0 +1,62 @@
/**
*
*/
package org.gcube.portlets.user.speciesdiscovery.client;
import java.util.ArrayList;
import java.util.List;
import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.data.spd.client.plugins.AbstractPlugin;
import org.gcube.data.spd.client.proxies.ExecutorClient;
import org.gcube.data.spd.model.service.types.CompleteJobStatus;
import org.gcube.data.spd.model.service.types.JobStatus;
import org.gcube.data.spd.model.service.types.MetadataDetails;
import org.gcube.data.streams.Stream;
import org.gcube.data.streams.dsl.Streams;
/**
*
* @author Francesco Mangiacrapa francesco.mangiacrapa@isti.cnr.it
* Feb 10, 2017
*/
public class GisJob {
static List<String> keys = new ArrayList<String>();
public static void main(String[] args) {
try {
ScopeProvider.instance.set("/gcube/devsec");
keys.add("OBIS:161||666236");
ExecutorClient creator = AbstractPlugin.executor().build();
Stream<String> keyStream = Streams.convert(keys);
System.out.println("keys are: "+keys.toString());
MetadataDetails details= new MetadataDetails("title", "descr", "tile", "author", "credits");
System.out.println("submittings job...");
String jobId = creator.createLayer(keyStream, details);
System.out.println("The job id is: "+jobId);
CompleteJobStatus status = creator.getStatus(jobId);
JobStatus sta = status.getStatus();
while(sta!=JobStatus.COMPLETED && sta!=JobStatus.FAILED){
Thread.sleep(1000);
sta = creator.getStatus(jobId).getStatus();
System.out.println("checking status.."+sta);
}
System.out.println("job terminated");
}catch (Exception e) {
e.printStackTrace();
}
}
}