Bug Fixes

This commit is contained in:
Fabio Sinibaldi 2020-12-03 12:51:38 +01:00
parent d78a565229
commit 40fb63b48e
12 changed files with 335 additions and 179 deletions

View File

@ -2,6 +2,10 @@ This project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.htm
# Changelog for org.gcube.application.geoportal-logic # Changelog for org.gcube.application.geoportal-logic
## [v1.0.5-SNAPSHOT] - 2020-12-1
Deletion feature
## [v1.0.4] - 2020-11-9 ## [v1.0.4] - 2020-11-9
Profiles adoption Profiles adoption
Fixed collection management Fixed collection management

View File

@ -4,7 +4,7 @@
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>
<groupId>org.gcube.application</groupId> <groupId>org.gcube.application</groupId>
<artifactId>geoportal-logic</artifactId> <artifactId>geoportal-logic</artifactId>
<version>1.0.4</version> <version>1.0.5-SNAPSHOT</version>
<name>Geoportal Logic</name> <name>Geoportal Logic</name>

View File

@ -11,6 +11,7 @@ import javax.persistence.EntityManagerFactory;
import javax.persistence.EntityTransaction; import javax.persistence.EntityTransaction;
import org.gcube.application.geoportal.model.Record; import org.gcube.application.geoportal.model.Record;
import org.gcube.application.geoportal.model.content.AssociatedContent;
import org.gcube.application.geoportal.model.db.DBConstants; import org.gcube.application.geoportal.model.db.DBConstants;
import org.gcube.application.geoportal.model.db.PostgisTable; import org.gcube.application.geoportal.model.db.PostgisTable;
import org.gcube.application.geoportal.model.db.PostgisTable.Field; import org.gcube.application.geoportal.model.db.PostgisTable.Field;
@ -131,9 +132,8 @@ public abstract class AbstractRecordManager<T extends Record> {
entityManager.persist(theRecord); entityManager.persist(theRecord);
}else { }else {
log.debug("Passed record ID = "+theRecord.getId()+". Mergeing.."); log.debug("Passed record ID = "+theRecord.getId()+". Mergeing..");
entityManager.merge(theRecord); this.theRecord=(T) entityManager.find(theRecord.getClass(),theRecord.getId());
} }
this.contentHandler=new ContentHandler<T>(theRecord); this.contentHandler=new ContentHandler<T>(theRecord);
} }
@ -163,16 +163,22 @@ public abstract class AbstractRecordManager<T extends Record> {
try { try {
ValidationReport report=theRecord.validate(); ValidationReport report=theRecord.validate();
log.debug("Validated Report is "+report); log.debug("Validated Report is "+report);
if(publish && report.getStatus().equals(ValidationStatus.ERROR))
throw new ValidationException(report,"Cannot publish project. See validation report");
// storeInfo();
log.debug("Record is valid, storing changed content"); log.debug("Record is valid, storing changed content");
contentHandler.storeChanges(publish); contentHandler.storeChanges();
// storeInfo(); commit();
transaction.begin();
if(publish) { if(publish) {
if(report.getStatus().equals(ValidationStatus.ERROR))
throw new ValidationException(report,"Cannot publish project. See validation report");
log.debug("Publishing record "+theRecord);
contentHandler.publish();
commit();
log.debug("Registering centroid of "+theRecord); log.debug("Registering centroid of "+theRecord);
registerCentroid(); registerCentroid();
// storeInfo(); // storeInfo();
@ -180,18 +186,23 @@ public abstract class AbstractRecordManager<T extends Record> {
log.debug("Successufully committing "+theRecord); log.debug("Successufully committing "+theRecord);
transaction.commit();
log.debug("Calling postCommit");
postcommit();
return theRecord; return theRecord;
}catch(ValidationException | PublishException | PersistenceException e) { }catch(ValidationException | PublishException | PersistenceException e) {
log.warn("Committing session for "+theRecord); log.warn("Committing session for "+theRecord);
transaction.rollback(); if(transaction.isActive()) transaction.rollback();
throw e; throw e;
} }
} }
private void commit() {
entityManager.flush();
entityManager.clear();
transaction.commit();
log.debug("Calling postCommit");
postcommit();
}
protected abstract void postcommit(); protected abstract void postcommit();
public PublicationReport commitSafely(boolean publish) { public PublicationReport commitSafely(boolean publish) {
@ -201,39 +212,43 @@ public abstract class AbstractRecordManager<T extends Record> {
PublicationReport toReturn=new PublicationReport("Publication Report"); PublicationReport toReturn=new PublicationReport("Publication Report");
toReturn.setTheRecord(getRecord()); toReturn.setTheRecord(getRecord());
ValidationReport validation=theRecord.validate(); ValidationReport validation=theRecord.validate();
commit();
transaction.begin();
validation.setObjectName("Validation report for "+validation.getObjectName()); validation.setObjectName("Validation report for "+validation.getObjectName());
if(validation.getStatus().equals(ValidationStatus.ERROR)) { if(validation.getStatus().equals(ValidationStatus.ERROR)) {
toReturn.addMessage(publish?ValidationStatus.ERROR:ValidationStatus.WARNING, "Record not valid."); toReturn.addMessage(publish?ValidationStatus.ERROR:ValidationStatus.WARNING, "Record not valid.");
} }
toReturn.addChild(validation); toReturn.addChild(validation);
log.debug("Record is valid, storing changed content [Publish is :"+publish+"]");
try { try {
toReturn.addChild(contentHandler.storeChanges(publish)); log.debug("Storing changed content [Publish is :"+publish+"]");
toReturn.addChild(contentHandler.storeChanges());
commit();
transaction.begin();
if(publish) { if(publish) {
toReturn.addChild(contentHandler.publish());
commit();
log.debug("Registering centroid of "+theRecord); log.debug("Registering centroid of "+theRecord);
registerCentroid(); registerCentroid();
toReturn.addMessage(ValidationStatus.PASSED, "Inserito centroide per record "+theRecord.getId()); toReturn.addMessage(ValidationStatus.PASSED, "Inserito centroide per record "+theRecord.getId());
} }
log.debug("Committing session for "+theRecord); log.debug("Committing session for "+theRecord);
transaction.commit(); commit();
log.debug("Calling postCommit");
postcommit();
} catch (PersistenceException e) { } catch (PersistenceException e) {
toReturn.addChild(e.getReport()); toReturn.addChild(e.getReport());
log.warn("Unexpected internal exception ",e); log.warn("Unexpected internal exception ",e);
log.debug("Rollback Session for "+theRecord); log.debug("Rollback Session for "+theRecord);
transaction.rollback(); if(transaction.isActive()) transaction.rollback();
} catch (PublishException e) { } catch (PublishException e) {
toReturn.addMessage(ValidationStatus.WARNING, "Centroide non registrato"); toReturn.addMessage(ValidationStatus.WARNING, "Centroide non registrato");
log.warn("Unexpected internal exception ",e); log.warn("Unexpected internal exception ",e);
log.debug("Committing session for "+theRecord); log.debug("Committing session for "+theRecord);
transaction.commit(); log.debug("Calling postCommit"); if(transaction.isActive()) commit();
postcommit();
} }
@ -247,12 +262,45 @@ public abstract class AbstractRecordManager<T extends Record> {
} }
public void publish() {
PublicationReport toReturn=new PublicationReport("Publication Report");
toReturn.setTheRecord(getRecord());
transaction.begin();
try {
toReturn.addChild(contentHandler.publish());
commit();
transaction.begin();
log.debug("Registering centroid of "+theRecord);
registerCentroid();
toReturn.addMessage(ValidationStatus.PASSED, "Inserito centroide per record "+theRecord.getId());
log.debug("Committing session for "+theRecord);
commit();
}catch (PublishException e) {
toReturn.addMessage(ValidationStatus.WARNING, "Centroide non registrato");
log.warn("Unexpected internal exception ",e);
log.debug("Committing session for "+theRecord);
if(transaction.isActive())commit();
}
}
public void delete() {
onDelete();
removeCentroid();
transaction.commit();
}
protected abstract void onDelete();
public void close() { public void close() {
try { try {
if(transaction.isActive()) { if(transaction.isActive()) {
transaction.rollback(); transaction.rollback();
} }
entityManager.close();
}catch(Throwable t) { }catch(Throwable t) {
log.warn("Exception while closing ",t); log.warn("Exception while closing ",t);
} }
@ -263,6 +311,17 @@ public abstract class AbstractRecordManager<T extends Record> {
close(); close();
} }
private void removeCentroid() {
try {
PostgisDBManagerI db=PostgisDBManager.get();
PostgisTable centroidsTable=getCentroidsTable();
log.debug("Deleting centroid if present. ID is "+theRecord.getId());
db.deleteByFieldValue(centroidsTable, new Field(DBConstants.Concessioni.PRODUCT_ID,FieldType.TEXT), theRecord.getId()+"");
}catch(Exception e) {
log.warn("Unable to remove centroid ",e);
}
}
private void registerCentroid() throws PublishException{ private void registerCentroid() throws PublishException{
try { try {

View File

@ -10,11 +10,13 @@ import org.gcube.application.geoportal.model.concessioni.Concessione;
import org.gcube.application.geoportal.model.concessioni.LayerConcessione; import org.gcube.application.geoportal.model.concessioni.LayerConcessione;
import org.gcube.application.geoportal.model.concessioni.RelazioneScavo; import org.gcube.application.geoportal.model.concessioni.RelazioneScavo;
import org.gcube.application.geoportal.model.content.UploadedImage; import org.gcube.application.geoportal.model.content.UploadedImage;
import org.gcube.application.geoportal.model.content.WorkspaceContent;
import org.gcube.application.geoportal.model.db.DBConstants; import org.gcube.application.geoportal.model.db.DBConstants;
import org.gcube.application.geoportal.model.db.PostgisTable; import org.gcube.application.geoportal.model.db.PostgisTable;
import org.gcube.application.geoportal.model.fault.SDIInteractionException; import org.gcube.application.geoportal.model.fault.SDIInteractionException;
import org.gcube.application.geoportal.storage.ContentHandler; import org.gcube.application.geoportal.storage.ContentHandler;
import org.gcube.application.geoportal.storage.SDIManager; import org.gcube.application.geoportal.storage.SDIManager;
import org.gcube.application.geoportal.storage.WorkspaceManager;
import org.gcube.application.geoportal.utils.Serialization; import org.gcube.application.geoportal.utils.Serialization;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
@ -57,7 +59,7 @@ public class ConcessioneManager extends AbstractRecordManager<Concessione> {
//Check if already stored content //Check if already stored content
Concessione record=getRecord(); Concessione record=getRecord();
if(record.getRelazioneScavo()!=null && !record.getRelazioneScavo().getActualContent().isEmpty()) { if(record.getRelazioneScavo()!=null && !record.getRelazioneScavo().getActualContent().isEmpty()) {
handler.dispose(record.getRelazioneScavo()); handler.disposeQuietly(record.getRelazioneScavo());
} }
rel.setRecord(record); rel.setRecord(record);
@ -92,7 +94,7 @@ public class ConcessioneManager extends AbstractRecordManager<Concessione> {
//Check if already stored content //Check if already stored content
Concessione record=getRecord(); Concessione record=getRecord();
if(record.getPosizionamentoScavo()!=null && !record.getPosizionamentoScavo().getActualContent().isEmpty()) if(record.getPosizionamentoScavo()!=null && !record.getPosizionamentoScavo().getActualContent().isEmpty())
handler.dispose(record.getPosizionamentoScavo()); handler.disposeQuietly(record.getPosizionamentoScavo());
layer.setRecord(record); layer.setRecord(record);
@ -119,50 +121,29 @@ public class ConcessioneManager extends AbstractRecordManager<Concessione> {
@Override
protected void onDelete() {
log.debug("Deleting content for record "+getRecord());
// @Override ContentHandler<Concessione> handler=getContentHandler();
// protected Concessione storeInfo(Concessione toInsert) { Concessione c=getRecord();
// log.debug("Storing Record "+toInsert); handler.disposeQuietly(c.getRelazioneScavo());
// EntityManager em=getEMF().createEntityManager(); handler.disposeQuietly(c.getPosizionamentoScavo());
// try { if(c.getImmaginiRappresentative()!=null)
// em.getTransaction().begin(); for(UploadedImage img:c.getImmaginiRappresentative())
// RelazioneScavo relScavo=toInsert.getRelazioneScavo(); handler.disposeQuietly(img);
// toInsert.setRelazioneScavo(null);
// List<UploadedImage> uploadedImages=toInsert.getImmaginiRappresentative(); if(c.getPianteFineScavo()!=null)
// toInsert.setImmaginiRappresentative(null); for(LayerConcessione l:c.getPianteFineScavo())
// LayerConcessione posizionamentoScavo=toInsert.getPosizionamentoScavo(); handler.disposeQuietly(l);
// toInsert.setPosizionamentoScavo(null);
// List<LayerConcessione> pianteFineScavo= toInsert.getPianteFineScavo(); log.debug("Clearing folder.. ");
// toInsert.setPianteFineScavo(null); try {
// List<OtherContent> genericContent=toInsert.getGenericContent(); new WorkspaceManager(getRecord()).getWSBase().forceDelete();
// toInsert.setGenericContent(null); }catch(Exception e) {
// log.warn("Unable to delete base folder ",e);
// log.debug("Storing empty concessione.. "+toInsert); }
// em.persist(toInsert); }
//
// if(relScavo!=null) {
// log.debug("Atttaching relazione .. "+relScavo);
// relScavo.setRecord(toInsert);
// toInsert.setRelazioneScavo(relScavo);
// }
//
// if(uploadedImages!=null) {
// log.debug("Attaching images.. "+uploadedImages);
// for(UploadedImage image: uploadedImages)
// image.setRecord(toInsert);
// toInsert.setImmaginiRappresentative(uploadedImages);
// }
//
// if(posizionamentoScavo!=null) {
// toInsert.get
// }
//
//
// em.getTransaction().commit();
// }finally {
// em.close();
// }
// }

View File

@ -1,6 +1,7 @@
package org.gcube.application.geoportal.storage; package org.gcube.application.geoportal.storage;
import java.io.IOException; import java.io.IOException;
import java.net.MalformedURLException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.HashMap; import java.util.HashMap;
import java.util.Map; import java.util.Map;
@ -8,6 +9,7 @@ import java.util.Map.Entry;
import org.gcube.application.geoportal.model.InputStreamDescriptor; import org.gcube.application.geoportal.model.InputStreamDescriptor;
import org.gcube.application.geoportal.model.Record; import org.gcube.application.geoportal.model.Record;
import org.gcube.application.geoportal.model.concessioni.Concessione;
import org.gcube.application.geoportal.model.concessioni.LayerConcessione; import org.gcube.application.geoportal.model.concessioni.LayerConcessione;
import org.gcube.application.geoportal.model.concessioni.RelazioneScavo; import org.gcube.application.geoportal.model.concessioni.RelazioneScavo;
import org.gcube.application.geoportal.model.content.AssociatedContent; import org.gcube.application.geoportal.model.content.AssociatedContent;
@ -25,6 +27,7 @@ import org.gcube.application.geoportal.model.report.ValidationReport.ValidationS
import org.gcube.application.geoportal.utils.Files; import org.gcube.application.geoportal.utils.Files;
import org.gcube.common.storagehub.client.dsl.FolderContainer; import org.gcube.common.storagehub.client.dsl.FolderContainer;
import org.gcube.common.storagehub.model.exceptions.StorageHubException; import org.gcube.common.storagehub.model.exceptions.StorageHubException;
import org.gcube.data.transfer.library.faults.RemoteServiceException;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
@ -69,30 +72,86 @@ public class ContentHandler<T extends Record> {
} }
public void dispose(AssociatedContent content) { public void dispose(AssociatedContent content) throws StorageHubException, SDIInteractionException, IllegalArgumentException, MalformedURLException, RemoteServiceException{
log.debug("Deleting Content "+content);
WorkspaceManager wsManager=null;
SDIManager sdiManager=null;
for(PersistedContent p:content.getActualContent()) {
if(p instanceof WorkspaceContent) {
if(wsManager==null) wsManager=new WorkspaceManager(record);
wsManager.deleteFromWS((WorkspaceContent) p);
}else if (p instanceof GeoServerContent) {
if(sdiManager==null) sdiManager=new SDIManager();
sdiManager.deleteContent((GeoServerContent) p);
}
}
} }
public PublicationReport storeChanges(Boolean publish) throws PersistenceException { public void disposeQuietly(AssociatedContent content) {
try {
dispose(content);
} catch (IllegalArgumentException | MalformedURLException | RemoteServiceException | StorageHubException
| SDIInteractionException e) {
log.warn("Unable to delete content "+content,e);
}
}
public PublicationReport publish() {
log.debug("Publishing "+record.getNome());
PublicationReport toReturn=new PublicationReport("Publish report");
try {
SDIManager sdiManager=new SDIManager();
ArrayList<AssociatedContent> list=new ArrayList<AssociatedContent>();
//Concessione
Concessione conc=(Concessione) record;
String workspace= sdiManager.createWorkspace("gna_conc_"+record.getId());
list.add(conc.getPosizionamentoScavo());
list.addAll(conc.getPianteFineScavo());
for(AssociatedContent c:list) {
if(c instanceof LayerConcessione) {
try {
GeoServerContent geoserverPersisted=sdiManager.pushShapeLayerFileSet((SDILayerDescriptor)c, workspace);
geoserverPersisted.setAssociated(c);
c.getActualContent().add(geoserverPersisted);
}catch(SDIInteractionException e) {
log.warn("Unable to publish layers.",e);
toReturn.addMessage(ValidationStatus.WARNING, "Layer "+c.getTitolo()+" non pubblicato.");
}
toReturn.addMessage(ValidationStatus.PASSED, "Pubblicato layer "+c.getTitolo());
}
}
} catch (SDIInteractionException e1) {
toReturn.addMessage(ValidationStatus.WARNING, "Unable to publish layers "+e1.getMessage());
}
return toReturn;
}
public PublicationReport storeChanges() throws PersistenceException {
// //
log.debug("Starting to persist "+uploadedResources.size()+" resources "+record.getNome()); log.debug("Starting to persist "+uploadedResources.size()+" resources "+record.getNome());
PublicationReport toReturn=new PublicationReport("Storage report"); PublicationReport toReturn=new PublicationReport("Storage report");
try { try {
WorkspaceManager wsManager=new WorkspaceManager(record); WorkspaceManager wsManager=new WorkspaceManager(record);
SDIManager sdiManager=null;
if(publish)
sdiManager=new SDIManager();
for(Entry<AssociatedContent,ArrayList<TempFile>> entry:uploadedResources.entrySet()) { for(Entry<AssociatedContent,ArrayList<TempFile>> entry:uploadedResources.entrySet()) {
AssociatedContent content=entry.getKey(); AssociatedContent content=entry.getKey();
ArrayList<PersistedContent> persisted=new ArrayList<PersistedContent>(); ArrayList<PersistedContent> persisted=new ArrayList<PersistedContent>();
FolderContainer destination=null; FolderContainer destination=null;
String description=null; String description=null;
String workspace=null;
log.debug("Storing "+content); log.debug("Storing "+content);
if(content instanceof RelazioneScavo) { if(content instanceof RelazioneScavo) {
@ -108,22 +167,6 @@ public class ContentHandler<T extends Record> {
destination= wsManager.getSubFolder("layers/"+content.getTitolo()); destination= wsManager.getSubFolder("layers/"+content.getTitolo());
description="Layer concessione : "+content.getTitolo(); description="Layer concessione : "+content.getTitolo();
if(publish) {
try {
//if not present create workspace for current project
if(workspace==null)
workspace=sdiManager.createWorkspace("gna_conc_"+record.getId());
GeoServerContent geoserverPersisted=sdiManager.pushShapeLayerFileSet((SDILayerDescriptor)content, entry.getValue(),workspace);
geoserverPersisted.setAssociated(content);
persisted.add(geoserverPersisted);
}catch(SDIInteractionException e) {
log.warn("Unable to publish layers.",e);
toReturn.addMessage(ValidationStatus.WARNING, "Layer "+content.getTitolo()+" non pubblicato.");
}
toReturn.addMessage(ValidationStatus.PASSED, "Pubblicato layer "+content.getTitolo());
}
}else throw new Exception("Invalid SDI Content "+content); }else throw new Exception("Invalid SDI Content "+content);

View File

@ -3,20 +3,17 @@ package org.gcube.application.geoportal.storage;
import java.io.File; import java.io.File;
import java.net.MalformedURLException; import java.net.MalformedURLException;
import java.net.URL; import java.net.URL;
import java.util.List;
import org.gcube.application.geoportal.model.content.GeoServerContent; import org.gcube.application.geoportal.model.content.GeoServerContent;
import org.gcube.application.geoportal.model.content.PersistedContent;
import org.gcube.application.geoportal.model.content.WorkspaceContent;
import org.gcube.application.geoportal.model.fault.SDIInteractionException; import org.gcube.application.geoportal.model.fault.SDIInteractionException;
import org.gcube.application.geoportal.model.gis.SDILayerDescriptor; import org.gcube.application.geoportal.model.gis.SDILayerDescriptor;
import org.gcube.application.geoportal.utils.Files; import org.gcube.application.geoportal.utils.Files;
import org.gcube.common.storagehub.client.dsl.FileContainer;
import org.gcube.data.transfer.library.DataTransferClient; import org.gcube.data.transfer.library.DataTransferClient;
import org.gcube.data.transfer.library.TransferResult; import org.gcube.data.transfer.library.TransferResult;
import org.gcube.data.transfer.library.faults.DestinationNotSetException; import org.gcube.data.transfer.library.faults.RemoteServiceException;
import org.gcube.data.transfer.library.faults.FailedTransferException;
import org.gcube.data.transfer.library.faults.InitializationException;
import org.gcube.data.transfer.library.faults.InvalidDestinationException;
import org.gcube.data.transfer.library.faults.InvalidSourceException;
import org.gcube.data.transfer.library.faults.SourceNotSetException;
import org.gcube.data.transfer.model.Destination; import org.gcube.data.transfer.model.Destination;
import org.gcube.data.transfer.model.DestinationClashPolicy; import org.gcube.data.transfer.model.DestinationClashPolicy;
import org.gcube.spatial.data.gis.GISInterface; import org.gcube.spatial.data.gis.GISInterface;
@ -24,6 +21,7 @@ import org.gcube.spatial.data.gis.is.AbstractGeoServerDescriptor;
import it.geosolutions.geoserver.rest.GeoServerRESTPublisher; import it.geosolutions.geoserver.rest.GeoServerRESTPublisher;
import it.geosolutions.geoserver.rest.GeoServerRESTPublisher.UploadMethod; import it.geosolutions.geoserver.rest.GeoServerRESTPublisher.UploadMethod;
import it.geosolutions.geoserver.rest.GeoServerRESTReader;
import it.geosolutions.geoserver.rest.encoder.GSLayerEncoder; import it.geosolutions.geoserver.rest.encoder.GSLayerEncoder;
import it.geosolutions.geoserver.rest.encoder.datastore.GSPostGISDatastoreEncoder; import it.geosolutions.geoserver.rest.encoder.datastore.GSPostGISDatastoreEncoder;
import it.geosolutions.geoserver.rest.encoder.feature.GSFeatureTypeEncoder; import it.geosolutions.geoserver.rest.encoder.feature.GSFeatureTypeEncoder;
@ -54,7 +52,8 @@ public class SDIManager {
log.debug("Contacting Data Transfer from geoserver {} ",geoserverHostName); log.debug("Contacting Data Transfer from geoserver {} ",geoserverHostName);
dtGeoServer=DataTransferClient.getInstanceByEndpoint("http://"+geoserverHostName); dtGeoServer=DataTransferClient.getInstanceByEndpoint("http://"+geoserverHostName);
if(!gis.getCurrentGeoServer().getReader().existGeoserver())
throw new Exception("Geoserver not reachable");
}catch(Exception e) { }catch(Exception e) {
throw new SDIInteractionException("Unable to initialize SDI Manager",e); throw new SDIInteractionException("Unable to initialize SDI Manager",e);
} }
@ -62,8 +61,11 @@ public class SDIManager {
public String createWorkspace(String toCreate) throws SDIInteractionException { public String createWorkspace(String toCreate) throws SDIInteractionException {
try { try {
if(!gis.getCurrentGeoServer().getPublisher().createWorkspace(toCreate)) if(!gis.getCurrentGeoServer().getReader().getWorkspaceNames().contains(toCreate)) {
log.warn("Unable to create workspace "+toCreate+". Assuming already exisintg.."); log.debug("Creating workspace : "+toCreate);
if(!gis.getCurrentGeoServer().getPublisher().createWorkspace(toCreate))
throw new SDIInteractionException("Unable to create workspace "+toCreate);
}else log.debug("Workspace "+toCreate+" exists.");
return toCreate; return toCreate;
} catch (IllegalArgumentException | MalformedURLException e) { } catch (IllegalArgumentException | MalformedURLException e) {
throw new SDIInteractionException("Unable to create workspace "+toCreate,e); throw new SDIInteractionException("Unable to create workspace "+toCreate,e);
@ -71,56 +73,81 @@ public class SDIManager {
} }
// GEOSERVER-PERSISTENCE-ID / GNA / PROJECT-ID /LAYER-ID/ FILENAME // GEOSERVER-PERSISTENCE-ID / GNA / PROJECT-ID/ FILENAME
public GeoServerContent pushShapeLayerFileSet(SDILayerDescriptor currentElement,List<TempFile> files,String workspace) throws SDIInteractionException{ public GeoServerContent pushShapeLayerFileSet(SDILayerDescriptor currentElement,String workspace) throws SDIInteractionException{
try { try {
String remoteFolder=null; // String remoteFolder=null;
String fileName=null; // String fileName=null;
log.debug("Transferring "+files.size()+" files to geoserver @ "+geoserverHostName); log.debug("Publishing "+currentElement+" files to geoserver @ "+geoserverHostName);
GeoServerContent content=new GeoServerContent(); GeoServerContent content=new GeoServerContent();
content.setGeoserverHostName(geoserverHostName); content.setGeoserverHostName(geoserverHostName);
content.setWorkspace(workspace); content.setWorkspace(workspace);
WorkspaceManager wsManager=new WorkspaceManager(currentElement.getRecord());
for(TempFile f:files) { currentElement.getActualContent().forEach((PersistedContent c)->{
Destination destination=new Destination(f.getOriginalFileName()); try {
destination.setCreateSubfolders(true); if(c instanceof WorkspaceContent) {
destination.setOnExistingFileName(DestinationClashPolicy.REWRITE); WorkspaceContent wc=(WorkspaceContent) c;
destination.setOnExistingSubFolder(DestinationClashPolicy.ADD_SUFFIX); FileContainer fc=wsManager.getFileById(wc.getStorageID());
destination.setPersistenceId("geoserver");
destination.setSubFolder("GNA/"+currentElement.getRecord().getId()+"/"+currentElement.getLayerName());
TransferResult result=dtGeoServer.localFile(f.getTheFile(), destination);
content.getFileNames().add(f.getOriginalFileName());
remoteFolder=result.getRemotePath().substring(0,result.getRemotePath().lastIndexOf("/")); String filename=fc.get().getName();
fileName=f.getOriginalFileName();
content.setGeoserverPath(remoteFolder); Destination destination=new Destination(filename);
} destination.setCreateSubfolders(true);
destination.setOnExistingFileName(DestinationClashPolicy.REWRITE);
destination.setOnExistingSubFolder(DestinationClashPolicy.ADD_SUFFIX);
destination.setPersistenceId("geoserver");
destination.setSubFolder("GNA/"+currentElement.getRecord().getId()+"/"+filename);
log.debug("Sending "+wc+" to "+destination);
TransferResult result=dtGeoServer.httpSource(fc.getPublicLink(), destination);
log.debug("Transferred "+result);
content.getFileNames().add(filename);
content.setGeoserverPath(result.getRemotePath().substring(0,result.getRemotePath().lastIndexOf("/")));
}
}catch(Throwable t) {
log.warn("Unable to transfer Persisted content"+c,t);
}
});
if(content.getFileNames().isEmpty())
throw new SDIInteractionException("No Persisted content found in "+currentElement);
String fileName=content.getFileNames().get(0);
String remoteFolder=content.getGeoserverPath().substring(0,content.getGeoserverPath().lastIndexOf("/"));
String toSetLayerName=fileName.substring(0,fileName.lastIndexOf(".")); String toSetLayerName=fileName.substring(0,fileName.lastIndexOf("."));
int count=0;
while(gis.getCurrentGeoServer().getReader().getLayer(workspace,toSetLayerName)!=null){
count++;
toSetLayerName=fileName.substring(0,fileName.lastIndexOf("."))+"_"+count;
log.debug("layer for "+fileName+" already existing, trying "+toSetLayerName);
};
content.setStore(toSetLayerName+"_store");
String storeName=toSetLayerName+"_store";
content.setStore(storeName);
content.setFeatureType(toSetLayerName); content.setFeatureType(toSetLayerName);
GeoServerRESTPublisher publisher=gis.getCurrentGeoServer().getPublisher(); GeoServerRESTPublisher publisher=gis.getCurrentGeoServer().getPublisher();
log.debug("Trying to create remote workspace : "+workspace); log.debug("Trying to create remote workspace : "+workspace);
gis.getCurrentGeoServer().getPublisher().createWorkspace(workspace); createWorkspace(workspace);
log.debug("Publishing remote folder "+remoteFolder); log.debug("Publishing remote folder "+remoteFolder);
URL directoryPath=new URL("file:"+remoteFolder); URL directoryPath=new URL("file:"+remoteFolder+"/"+toSetLayerName+".shp");
//TODO Evaluate SRS //TODO Evaluate SRS
boolean published=publisher.publishShp( boolean published=publisher.publishShp(
workspace, workspace,
toSetLayerName+"_store", storeName,
null, null,
toSetLayerName, toSetLayerName,
// UploadMethod.FILE, // neeeds zip // UploadMethod.FILE, // neeeds zip
@ -138,9 +165,9 @@ public class SDIManager {
// TODO Metadata // TODO Metadata
return content; return content;
} catch (InvalidSourceException | SourceNotSetException | FailedTransferException | InitializationException // } catch (InvalidSourceException | SourceNotSetException | FailedTransferException | InitializationException
| InvalidDestinationException | DestinationNotSetException e) { // | InvalidDestinationException | DestinationNotSetException e) {
throw new SDIInteractionException("Unable to transfer fileSet for content "+currentElement,e); // throw new SDIInteractionException("Unable to transfer fileSet for content "+currentElement,e);
} catch (SDIInteractionException e) { } catch (SDIInteractionException e) {
throw e; throw e;
} catch (Throwable t) { } catch (Throwable t) {
@ -158,8 +185,13 @@ public class SDIManager {
encoder.setFetchSize(1000); encoder.setFetchSize(1000);
encoder.setValidateConnections(true); encoder.setValidateConnections(true);
try { try {
if(!gis.getCurrentGeoServer().getDataStoreManager().create(workspace, encoder)) log.debug("Looking for datastore "+storeName+" under "+workspace);
log.warn("Unable to create workspace "+storeName+". Assuming already exisintg..");
if(gis.getCurrentGeoServer().getReader().getDatastore(workspace,storeName)==null)
if(!gis.getCurrentGeoServer().getDataStoreManager().create(workspace, encoder))
throw new SDIInteractionException("Unable to create store "+storeName+" in "+workspace);
log.debug("Store "+storeName+" exists under "+workspace);
return storeName; return storeName;
} catch (IllegalArgumentException | MalformedURLException e) { } catch (IllegalArgumentException | MalformedURLException e) {
throw new SDIInteractionException("Unable to create store "+storeName,e); throw new SDIInteractionException("Unable to create store "+storeName,e);
@ -167,10 +199,13 @@ public class SDIManager {
} }
private String publishLayer(File sldFile,String name) throws SDIInteractionException { private String publishStyle(File sldFile,String name) throws SDIInteractionException {
try { try {
if(!gis.getCurrentGeoServer().getPublisher().publishStyle(sldFile, name)) if(!gis.getCurrentGeoServer().getReader().existsStyle(name)) {
log.warn("Unable to create style "+name+". Assuming already exisintg.."); log.debug("Registering style "+name);
if(!gis.getCurrentGeoServer().getPublisher().publishStyle(sldFile, name))
throw new SDIInteractionException("Unable to register style "+name);
}else log.debug("Style "+name+" already existing");
return name; return name;
} catch (IllegalArgumentException | MalformedURLException e) { } catch (IllegalArgumentException | MalformedURLException e) {
throw new SDIInteractionException("Unable to create style "+name,e); throw new SDIInteractionException("Unable to create style "+name,e);
@ -185,6 +220,8 @@ public class SDIManager {
fte.setEnabled(true); fte.setEnabled(true);
fte.setNativeCRS(DEFAULT_CRS); fte.setNativeCRS(DEFAULT_CRS);
fte.setTitle(name); fte.setTitle(name);
fte.setName(name);
String style="clustered_centroids"; String style="clustered_centroids";
@ -198,10 +235,11 @@ public class SDIManager {
//Checking store //Checking store
createStoreFromPostgisDB(workspace, storeName); createStoreFromPostgisDB(workspace, storeName);
//Checkig layer //Checkig layer
publishLayer(Files.getFileFromResources("styles/clustered_points.sld"),style); publishStyle(Files.getFileFromResources("styles/clustered_points.sld"),style);
if(gis.getCurrentGeoServer().getReader().getLayer(workspace, name)==null)
if(!gis.getCurrentGeoServer().getPublisher().publishDBLayer(workspace, storeName, fte, layerEncoder)) if(!gis.getCurrentGeoServer().getPublisher().publishDBLayer(workspace, storeName, fte, layerEncoder))
log.warn("Unable to create layer "+name+". Assuming already exisintg.."); throw new SDIInteractionException("Unable to create layer "+name);
log.debug("layer "+name+" already exists");
return name; return name;
} catch (IllegalArgumentException | MalformedURLException e) { } catch (IllegalArgumentException | MalformedURLException e) {
throw new SDIInteractionException("Unable to create layer "+name,e); throw new SDIInteractionException("Unable to create layer "+name,e);
@ -209,4 +247,23 @@ public class SDIManager {
} }
public void deleteContent(GeoServerContent toDelete) throws IllegalArgumentException, MalformedURLException, RemoteServiceException {
log.debug("Deleting geoserver layer "+toDelete);
//delete layer
GeoServerRESTPublisher publisher=gis.getCurrentGeoServer().getPublisher();
//delete store
publisher.removeDatastore(toDelete.getWorkspace(), toDelete.getStore(), true);
//delete WS if empty
GeoServerRESTReader reader=gis.getCurrentGeoServer().getReader();
if(reader.getDatastores(toDelete.getWorkspace()).isEmpty()) {
log.debug("Deleting emtpy workspace "+toDelete.getWorkspace());
publisher.removeWorkspace(toDelete.getWorkspace(), true);
}
//delete file
dtGeoServer.getWebClient().delete(toDelete.getGeoserverPath());
}
} }

View File

@ -54,7 +54,11 @@ public class WorkspaceManager {
} }
private FolderContainer getWSBase() throws StorageHubException { public FileContainer getFileById(String id) throws StorageHubException {
return getSGClient().open(id).asFile();
}
public FolderContainer getWSBase() throws StorageHubException {
if(wsBase==null) { if(wsBase==null) {
log.debug("WSBASE not set"); log.debug("WSBASE not set");
if(record.getFolderId()==null) { if(record.getFolderId()==null) {
@ -107,4 +111,8 @@ public class WorkspaceManager {
return content; return content;
} }
public void deleteFromWS(WorkspaceContent toDelete) throws StorageHubException {
getSGClient().open(toDelete.getStorageID()).asFile().forceDelete();
}
} }

View File

@ -17,10 +17,10 @@ import org.gcube.data.transfer.library.utils.ScopeUtils;
public class CheckContextConfiguration { public class CheckContextConfiguration {
public static void main(String[] args) throws ConfigurationException, StorageHubException { public static void main(String[] args) throws ConfigurationException, StorageHubException {
TokenSetter.set("/gcube/devNext/NextNext"); // TokenSetter.set("/gcube/devNext/NextNext");
// TokenSetter.set("/pred4s/preprod/preVRE"); // TokenSetter.set("/pred4s/preprod/preVRE");
// TokenSetter.set("/d4science.research-infrastructures.eu/D4OS/GeoNA-Prototype"); TokenSetter.set("/d4science.research-infrastructures.eu/D4OS/GeoNA-Prototype");
// System.out.println("Checking for internal .. "); // System.out.println("Checking for internal .. ");
// ConcessioneManager manager=ManagerFactory.getByRecord(new Concessione()); // ConcessioneManager manager=ManagerFactory.getByRecord(new Concessione());
@ -58,8 +58,8 @@ public class CheckContextConfiguration {
System.out.println("Base folder path : "+new WorkspaceManager(null).getApplicationBaseFolder().get().getPath()); System.out.println("Base folder path : "+new WorkspaceManager(null).getApplicationBaseFolder().get().getPath());
System.out.println("Inspecting internal DB "); // System.out.println("Inspecting internal DB ");
System.out.println("Found "+ManagerFactory.getList().size()+" records"); // System.out.println("Found "+ManagerFactory.getList().size()+" records");

View File

@ -49,8 +49,8 @@ public class TestModel {
concessione.setParoleChiaveICCD(Arrays.asList(new String[] {"vattelapesca","somthing something"})); concessione.setParoleChiaveICCD(Arrays.asList(new String[] {"vattelapesca","somthing something"}));
concessione.setCentroidLat(43.0); //N-S concessione.setCentroidLat(Double.parseDouble("33."+System.currentTimeMillis())); //N-S
concessione.setCentroidLong(9.0); //E-W concessione.setCentroidLong(Double.parseDouble("11."+System.currentTimeMillis())); //E-W
return concessione; return concessione;
} }
@ -92,7 +92,7 @@ public class TestModel {
// Piante fine scavo // Piante fine scavo
ArrayList<LayerConcessione> piante=new ArrayList<LayerConcessione>(); ArrayList<LayerConcessione> piante=new ArrayList<LayerConcessione>();
for(int i=0;i<4;i++) { for(int i=0;i<1;i++) {
LayerConcessione pianta=new LayerConcessione(); LayerConcessione pianta=new LayerConcessione();
pianta.setValutazioneQualita("Secondo me si"); pianta.setValutazioneQualita("Secondo me si");
pianta.setMetodoRaccoltaDati("Fattobbene"); pianta.setMetodoRaccoltaDati("Fattobbene");

View File

@ -8,6 +8,7 @@ import java.util.UUID;
import org.gcube.application.geoportal.model.content.GeoServerContent; import org.gcube.application.geoportal.model.content.GeoServerContent;
import org.gcube.application.geoportal.model.fault.SDIInteractionException; import org.gcube.application.geoportal.model.fault.SDIInteractionException;
import org.gcube.application.geoportal.model.gis.SDILayerDescriptor;
import org.gcube.application.geoportal.storage.SDIManager; import org.gcube.application.geoportal.storage.SDIManager;
import org.gcube.application.geoportal.utils.Files; import org.gcube.application.geoportal.utils.Files;
import org.gcube.application.geoportal.utils.Workspace; import org.gcube.application.geoportal.utils.Workspace;
@ -28,15 +29,14 @@ public class TestSDI {
SDIManager sdiManager=new SDIManager(); SDIManager sdiManager=new SDIManager();
// sdiManager.createWorkspace(workspaceName); // sdiManager.createWorkspace(workspaceName);
sdiManager.configureCentroidLayer("centroids_concessioni", "gna:test", "gna_test_postgis"); // sdiManager.configureCentroidLayer("centroids_concessioni", "gna:test", "gna_test_postgis");
// sdiManager.publishShapeFile(workspaceName, file.getPublicLink()); // sdiManager.publishShapeFile(workspaceName, file.getPublicLink());
//
// GeoServerContent content=sdiManager.pushLayerFile(Files.getFileFromResources("concessioni/pos.shp"), "pos.shp", 123, 1); // GeoServerContent content=sdiManager.pushLayerFile(Files.getFileFromResources("concessioni/pos.shp"), "pos.shp", 123, 1);
// System.out.println("content is "+content); // System.out.println("content is "+content);
// sdiManager.publishShapeFileDirectory(workspaceName, content.getGeoserverPath()); // sdiManager.publishShapeFileDirectory(workspaceName, content.getGeoserverPath());
//
} }

View File

@ -35,18 +35,22 @@ public class UseCases {
//CREATE NEW //CREATE NEW
int numConcessioni=1; int numConcessioni=2;
Concessione registered=null; Concessione registered=null;
System.out.println("Try to create.. "+numConcessioni); System.out.println("Try to create.. "+numConcessioni);
for(int i=0;i<numConcessioni;i++) for(int i=0;i<numConcessioni;i++)
registered=registerNewConcessione(); registered=registerNewConcessione();
long id=registered.getId();
Concessione loaded=readConcessione(registered.getId());
System.out.println("Registered is "+registered.getImmaginiRappresentative());
System.out.println("Reloaded"+loaded.getImmaginiRappresentative());
// long id=registered.getId();
// long id=22;
// Concessione loaded=readConcessione(id);
// //
// System.out.println("Publishing last "+loaded);
// ManagerFactory.getByRecord(loaded).publish();
// System.out.println("DONE");
//READ BY ID //READ BY ID
@ -124,7 +128,7 @@ public class UseCases {
//If true -> data are published into the SDI //If true -> data are published into the SDI
Boolean publish=false; Boolean publish=true;
//NB la validazione viene CMQ triggerata a commit time. //NB la validazione viene CMQ triggerata a commit time.
//In caso di errore viene lanciata una ValidationException //In caso di errore viene lanciata una ValidationException

View File

@ -1,6 +1,6 @@
log4j.rootLogger=WARN, stdout log4j.rootLogger=WARN, stdout
log4j.org.gcube.application=DEBUG, stdout log4j.logger.org.gcube.application=DEBUG, stdout
#CONSOLE #CONSOLE
log4j.appender.stdout=org.apache.log4j.ConsoleAppender log4j.appender.stdout=org.apache.log4j.ConsoleAppender
log4j.appender.stdout.layout=org.apache.log4j.PatternLayout log4j.appender.stdout.layout=org.apache.log4j.PatternLayout