Bug Fixes

This commit is contained in:
Fabio Sinibaldi 2020-12-03 12:51:38 +01:00
parent d78a565229
commit 40fb63b48e
12 changed files with 335 additions and 179 deletions

View File

@ -2,6 +2,10 @@ This project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.htm
# Changelog for org.gcube.application.geoportal-logic
## [v1.0.5-SNAPSHOT] - 2020-12-1
Deletion feature
## [v1.0.4] - 2020-11-9
Profiles adoption
Fixed collection management

View File

@ -4,7 +4,7 @@
<modelVersion>4.0.0</modelVersion>
<groupId>org.gcube.application</groupId>
<artifactId>geoportal-logic</artifactId>
<version>1.0.4</version>
<version>1.0.5-SNAPSHOT</version>
<name>Geoportal Logic</name>

View File

@ -11,6 +11,7 @@ import javax.persistence.EntityManagerFactory;
import javax.persistence.EntityTransaction;
import org.gcube.application.geoportal.model.Record;
import org.gcube.application.geoportal.model.content.AssociatedContent;
import org.gcube.application.geoportal.model.db.DBConstants;
import org.gcube.application.geoportal.model.db.PostgisTable;
import org.gcube.application.geoportal.model.db.PostgisTable.Field;
@ -131,9 +132,8 @@ public abstract class AbstractRecordManager<T extends Record> {
entityManager.persist(theRecord);
}else {
log.debug("Passed record ID = "+theRecord.getId()+". Mergeing..");
entityManager.merge(theRecord);
this.theRecord=(T) entityManager.find(theRecord.getClass(),theRecord.getId());
}
this.contentHandler=new ContentHandler<T>(theRecord);
}
@ -163,16 +163,22 @@ public abstract class AbstractRecordManager<T extends Record> {
try {
ValidationReport report=theRecord.validate();
log.debug("Validated Report is "+report);
if(publish && report.getStatus().equals(ValidationStatus.ERROR))
throw new ValidationException(report,"Cannot publish project. See validation report");
// storeInfo();
log.debug("Record is valid, storing changed content");
contentHandler.storeChanges(publish);
// storeInfo();
contentHandler.storeChanges();
commit();
transaction.begin();
if(publish) {
if(report.getStatus().equals(ValidationStatus.ERROR))
throw new ValidationException(report,"Cannot publish project. See validation report");
log.debug("Publishing record "+theRecord);
contentHandler.publish();
commit();
log.debug("Registering centroid of "+theRecord);
registerCentroid();
// storeInfo();
@ -180,20 +186,25 @@ public abstract class AbstractRecordManager<T extends Record> {
log.debug("Successufully committing "+theRecord);
transaction.commit();
log.debug("Calling postCommit");
postcommit();
return theRecord;
}catch(ValidationException | PublishException | PersistenceException e) {
log.warn("Committing session for "+theRecord);
transaction.rollback();
if(transaction.isActive()) transaction.rollback();
throw e;
}
}
protected abstract void postcommit();
private void commit() {
entityManager.flush();
entityManager.clear();
transaction.commit();
log.debug("Calling postCommit");
postcommit();
}
protected abstract void postcommit();
public PublicationReport commitSafely(boolean publish) {
log.debug("Safely publishing "+theRecord);
@ -201,40 +212,44 @@ public abstract class AbstractRecordManager<T extends Record> {
PublicationReport toReturn=new PublicationReport("Publication Report");
toReturn.setTheRecord(getRecord());
ValidationReport validation=theRecord.validate();
commit();
transaction.begin();
validation.setObjectName("Validation report for "+validation.getObjectName());
if(validation.getStatus().equals(ValidationStatus.ERROR)) {
toReturn.addMessage(publish?ValidationStatus.ERROR:ValidationStatus.WARNING, "Record not valid.");
}
toReturn.addChild(validation);
log.debug("Record is valid, storing changed content [Publish is :"+publish+"]");
try {
toReturn.addChild(contentHandler.storeChanges(publish));
log.debug("Storing changed content [Publish is :"+publish+"]");
toReturn.addChild(contentHandler.storeChanges());
commit();
transaction.begin();
if(publish) {
toReturn.addChild(contentHandler.publish());
commit();
log.debug("Registering centroid of "+theRecord);
registerCentroid();
registerCentroid();
toReturn.addMessage(ValidationStatus.PASSED, "Inserito centroide per record "+theRecord.getId());
}
log.debug("Committing session for "+theRecord);
transaction.commit();
log.debug("Calling postCommit");
postcommit();
commit();
} catch (PersistenceException e) {
toReturn.addChild(e.getReport());
log.warn("Unexpected internal exception ",e);
log.debug("Rollback Session for "+theRecord);
transaction.rollback();
if(transaction.isActive()) transaction.rollback();
} catch (PublishException e) {
toReturn.addMessage(ValidationStatus.WARNING, "Centroide non registrato");
log.warn("Unexpected internal exception ",e);
log.debug("Committing session for "+theRecord);
transaction.commit(); log.debug("Calling postCommit");
postcommit();
}
if(transaction.isActive()) commit();
}
try {
@ -246,13 +261,46 @@ public abstract class AbstractRecordManager<T extends Record> {
}
public void publish() {
PublicationReport toReturn=new PublicationReport("Publication Report");
toReturn.setTheRecord(getRecord());
transaction.begin();
try {
toReturn.addChild(contentHandler.publish());
commit();
transaction.begin();
log.debug("Registering centroid of "+theRecord);
registerCentroid();
toReturn.addMessage(ValidationStatus.PASSED, "Inserito centroide per record "+theRecord.getId());
log.debug("Committing session for "+theRecord);
commit();
}catch (PublishException e) {
toReturn.addMessage(ValidationStatus.WARNING, "Centroide non registrato");
log.warn("Unexpected internal exception ",e);
log.debug("Committing session for "+theRecord);
if(transaction.isActive())commit();
}
}
public void delete() {
onDelete();
removeCentroid();
transaction.commit();
}
protected abstract void onDelete();
public void close() {
try {
if(transaction.isActive()) {
transaction.rollback();
}
entityManager.close();
}catch(Throwable t) {
log.warn("Exception while closing ",t);
}
@ -263,6 +311,17 @@ public abstract class AbstractRecordManager<T extends Record> {
close();
}
private void removeCentroid() {
try {
PostgisDBManagerI db=PostgisDBManager.get();
PostgisTable centroidsTable=getCentroidsTable();
log.debug("Deleting centroid if present. ID is "+theRecord.getId());
db.deleteByFieldValue(centroidsTable, new Field(DBConstants.Concessioni.PRODUCT_ID,FieldType.TEXT), theRecord.getId()+"");
}catch(Exception e) {
log.warn("Unable to remove centroid ",e);
}
}
private void registerCentroid() throws PublishException{
try {

View File

@ -10,11 +10,13 @@ import org.gcube.application.geoportal.model.concessioni.Concessione;
import org.gcube.application.geoportal.model.concessioni.LayerConcessione;
import org.gcube.application.geoportal.model.concessioni.RelazioneScavo;
import org.gcube.application.geoportal.model.content.UploadedImage;
import org.gcube.application.geoportal.model.content.WorkspaceContent;
import org.gcube.application.geoportal.model.db.DBConstants;
import org.gcube.application.geoportal.model.db.PostgisTable;
import org.gcube.application.geoportal.model.fault.SDIInteractionException;
import org.gcube.application.geoportal.storage.ContentHandler;
import org.gcube.application.geoportal.storage.SDIManager;
import org.gcube.application.geoportal.storage.WorkspaceManager;
import org.gcube.application.geoportal.utils.Serialization;
import lombok.extern.slf4j.Slf4j;
@ -57,7 +59,7 @@ public class ConcessioneManager extends AbstractRecordManager<Concessione> {
//Check if already stored content
Concessione record=getRecord();
if(record.getRelazioneScavo()!=null && !record.getRelazioneScavo().getActualContent().isEmpty()) {
handler.dispose(record.getRelazioneScavo());
handler.disposeQuietly(record.getRelazioneScavo());
}
rel.setRecord(record);
@ -92,7 +94,7 @@ public class ConcessioneManager extends AbstractRecordManager<Concessione> {
//Check if already stored content
Concessione record=getRecord();
if(record.getPosizionamentoScavo()!=null && !record.getPosizionamentoScavo().getActualContent().isEmpty())
handler.dispose(record.getPosizionamentoScavo());
handler.disposeQuietly(record.getPosizionamentoScavo());
layer.setRecord(record);
@ -119,50 +121,29 @@ public class ConcessioneManager extends AbstractRecordManager<Concessione> {
@Override
protected void onDelete() {
log.debug("Deleting content for record "+getRecord());
// @Override
// protected Concessione storeInfo(Concessione toInsert) {
// log.debug("Storing Record "+toInsert);
// EntityManager em=getEMF().createEntityManager();
// try {
// em.getTransaction().begin();
// RelazioneScavo relScavo=toInsert.getRelazioneScavo();
// toInsert.setRelazioneScavo(null);
// List<UploadedImage> uploadedImages=toInsert.getImmaginiRappresentative();
// toInsert.setImmaginiRappresentative(null);
// LayerConcessione posizionamentoScavo=toInsert.getPosizionamentoScavo();
// toInsert.setPosizionamentoScavo(null);
// List<LayerConcessione> pianteFineScavo= toInsert.getPianteFineScavo();
// toInsert.setPianteFineScavo(null);
// List<OtherContent> genericContent=toInsert.getGenericContent();
// toInsert.setGenericContent(null);
//
// log.debug("Storing empty concessione.. "+toInsert);
// em.persist(toInsert);
//
// if(relScavo!=null) {
// log.debug("Atttaching relazione .. "+relScavo);
// relScavo.setRecord(toInsert);
// toInsert.setRelazioneScavo(relScavo);
// }
//
// if(uploadedImages!=null) {
// log.debug("Attaching images.. "+uploadedImages);
// for(UploadedImage image: uploadedImages)
// image.setRecord(toInsert);
// toInsert.setImmaginiRappresentative(uploadedImages);
// }
//
// if(posizionamentoScavo!=null) {
// toInsert.get
// }
//
//
// em.getTransaction().commit();
// }finally {
// em.close();
// }
// }
ContentHandler<Concessione> handler=getContentHandler();
Concessione c=getRecord();
handler.disposeQuietly(c.getRelazioneScavo());
handler.disposeQuietly(c.getPosizionamentoScavo());
if(c.getImmaginiRappresentative()!=null)
for(UploadedImage img:c.getImmaginiRappresentative())
handler.disposeQuietly(img);
if(c.getPianteFineScavo()!=null)
for(LayerConcessione l:c.getPianteFineScavo())
handler.disposeQuietly(l);
log.debug("Clearing folder.. ");
try {
new WorkspaceManager(getRecord()).getWSBase().forceDelete();
}catch(Exception e) {
log.warn("Unable to delete base folder ",e);
}
}

View File

@ -1,6 +1,7 @@
package org.gcube.application.geoportal.storage;
import java.io.IOException;
import java.net.MalformedURLException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map;
@ -8,6 +9,7 @@ import java.util.Map.Entry;
import org.gcube.application.geoportal.model.InputStreamDescriptor;
import org.gcube.application.geoportal.model.Record;
import org.gcube.application.geoportal.model.concessioni.Concessione;
import org.gcube.application.geoportal.model.concessioni.LayerConcessione;
import org.gcube.application.geoportal.model.concessioni.RelazioneScavo;
import org.gcube.application.geoportal.model.content.AssociatedContent;
@ -25,6 +27,7 @@ import org.gcube.application.geoportal.model.report.ValidationReport.ValidationS
import org.gcube.application.geoportal.utils.Files;
import org.gcube.common.storagehub.client.dsl.FolderContainer;
import org.gcube.common.storagehub.model.exceptions.StorageHubException;
import org.gcube.data.transfer.library.faults.RemoteServiceException;
import lombok.extern.slf4j.Slf4j;
@ -69,30 +72,86 @@ public class ContentHandler<T extends Record> {
}
public void dispose(AssociatedContent content) {
public void dispose(AssociatedContent content) throws StorageHubException, SDIInteractionException, IllegalArgumentException, MalformedURLException, RemoteServiceException{
log.debug("Deleting Content "+content);
WorkspaceManager wsManager=null;
SDIManager sdiManager=null;
for(PersistedContent p:content.getActualContent()) {
if(p instanceof WorkspaceContent) {
if(wsManager==null) wsManager=new WorkspaceManager(record);
wsManager.deleteFromWS((WorkspaceContent) p);
}else if (p instanceof GeoServerContent) {
if(sdiManager==null) sdiManager=new SDIManager();
sdiManager.deleteContent((GeoServerContent) p);
}
}
}
public PublicationReport storeChanges(Boolean publish) throws PersistenceException {
public void disposeQuietly(AssociatedContent content) {
try {
dispose(content);
} catch (IllegalArgumentException | MalformedURLException | RemoteServiceException | StorageHubException
| SDIInteractionException e) {
log.warn("Unable to delete content "+content,e);
}
}
public PublicationReport publish() {
log.debug("Publishing "+record.getNome());
PublicationReport toReturn=new PublicationReport("Publish report");
try {
SDIManager sdiManager=new SDIManager();
ArrayList<AssociatedContent> list=new ArrayList<AssociatedContent>();
//Concessione
Concessione conc=(Concessione) record;
String workspace= sdiManager.createWorkspace("gna_conc_"+record.getId());
list.add(conc.getPosizionamentoScavo());
list.addAll(conc.getPianteFineScavo());
for(AssociatedContent c:list) {
if(c instanceof LayerConcessione) {
try {
GeoServerContent geoserverPersisted=sdiManager.pushShapeLayerFileSet((SDILayerDescriptor)c, workspace);
geoserverPersisted.setAssociated(c);
c.getActualContent().add(geoserverPersisted);
}catch(SDIInteractionException e) {
log.warn("Unable to publish layers.",e);
toReturn.addMessage(ValidationStatus.WARNING, "Layer "+c.getTitolo()+" non pubblicato.");
}
toReturn.addMessage(ValidationStatus.PASSED, "Pubblicato layer "+c.getTitolo());
}
}
} catch (SDIInteractionException e1) {
toReturn.addMessage(ValidationStatus.WARNING, "Unable to publish layers "+e1.getMessage());
}
return toReturn;
}
public PublicationReport storeChanges() throws PersistenceException {
//
log.debug("Starting to persist "+uploadedResources.size()+" resources "+record.getNome());
PublicationReport toReturn=new PublicationReport("Storage report");
try {
WorkspaceManager wsManager=new WorkspaceManager(record);
SDIManager sdiManager=null;
if(publish)
sdiManager=new SDIManager();
for(Entry<AssociatedContent,ArrayList<TempFile>> entry:uploadedResources.entrySet()) {
AssociatedContent content=entry.getKey();
ArrayList<PersistedContent> persisted=new ArrayList<PersistedContent>();
FolderContainer destination=null;
String description=null;
String workspace=null;
log.debug("Storing "+content);
if(content instanceof RelazioneScavo) {
@ -108,22 +167,6 @@ public class ContentHandler<T extends Record> {
destination= wsManager.getSubFolder("layers/"+content.getTitolo());
description="Layer concessione : "+content.getTitolo();
if(publish) {
try {
//if not present create workspace for current project
if(workspace==null)
workspace=sdiManager.createWorkspace("gna_conc_"+record.getId());
GeoServerContent geoserverPersisted=sdiManager.pushShapeLayerFileSet((SDILayerDescriptor)content, entry.getValue(),workspace);
geoserverPersisted.setAssociated(content);
persisted.add(geoserverPersisted);
}catch(SDIInteractionException e) {
log.warn("Unable to publish layers.",e);
toReturn.addMessage(ValidationStatus.WARNING, "Layer "+content.getTitolo()+" non pubblicato.");
}
toReturn.addMessage(ValidationStatus.PASSED, "Pubblicato layer "+content.getTitolo());
}
}else throw new Exception("Invalid SDI Content "+content);
@ -134,15 +177,15 @@ public class ContentHandler<T extends Record> {
log.debug("Actually Storing files to WS folder "+destination.getId());
for(TempFile theFile : entry.getValue()) {
WorkspaceContent wsContent=wsManager.storeToWS(theFile.getTheFile(), destination, theFile.getOriginalFileName(), description);
wsContent.setAssociated(content);
persisted.add(wsContent);
}
toReturn.addMessage(ValidationStatus.PASSED, "Registrati "+entry.getValue().size()+" elementi in archivio per : "+content.getTitolo());
content.setActualContent(persisted);
}
return toReturn;

View File

@ -3,20 +3,17 @@ package org.gcube.application.geoportal.storage;
import java.io.File;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.List;
import org.gcube.application.geoportal.model.content.GeoServerContent;
import org.gcube.application.geoportal.model.content.PersistedContent;
import org.gcube.application.geoportal.model.content.WorkspaceContent;
import org.gcube.application.geoportal.model.fault.SDIInteractionException;
import org.gcube.application.geoportal.model.gis.SDILayerDescriptor;
import org.gcube.application.geoportal.utils.Files;
import org.gcube.common.storagehub.client.dsl.FileContainer;
import org.gcube.data.transfer.library.DataTransferClient;
import org.gcube.data.transfer.library.TransferResult;
import org.gcube.data.transfer.library.faults.DestinationNotSetException;
import org.gcube.data.transfer.library.faults.FailedTransferException;
import org.gcube.data.transfer.library.faults.InitializationException;
import org.gcube.data.transfer.library.faults.InvalidDestinationException;
import org.gcube.data.transfer.library.faults.InvalidSourceException;
import org.gcube.data.transfer.library.faults.SourceNotSetException;
import org.gcube.data.transfer.library.faults.RemoteServiceException;
import org.gcube.data.transfer.model.Destination;
import org.gcube.data.transfer.model.DestinationClashPolicy;
import org.gcube.spatial.data.gis.GISInterface;
@ -24,6 +21,7 @@ import org.gcube.spatial.data.gis.is.AbstractGeoServerDescriptor;
import it.geosolutions.geoserver.rest.GeoServerRESTPublisher;
import it.geosolutions.geoserver.rest.GeoServerRESTPublisher.UploadMethod;
import it.geosolutions.geoserver.rest.GeoServerRESTReader;
import it.geosolutions.geoserver.rest.encoder.GSLayerEncoder;
import it.geosolutions.geoserver.rest.encoder.datastore.GSPostGISDatastoreEncoder;
import it.geosolutions.geoserver.rest.encoder.feature.GSFeatureTypeEncoder;
@ -34,8 +32,8 @@ import lombok.extern.slf4j.Slf4j;
public class SDIManager {
static private String DEFAULT_CRS="EPSG:4326";
private GISInterface gis;
private DataTransferClient dtGeoServer;
private String geoserverHostName;
@ -54,7 +52,8 @@ public class SDIManager {
log.debug("Contacting Data Transfer from geoserver {} ",geoserverHostName);
dtGeoServer=DataTransferClient.getInstanceByEndpoint("http://"+geoserverHostName);
if(!gis.getCurrentGeoServer().getReader().existGeoserver())
throw new Exception("Geoserver not reachable");
}catch(Exception e) {
throw new SDIInteractionException("Unable to initialize SDI Manager",e);
}
@ -62,8 +61,11 @@ public class SDIManager {
public String createWorkspace(String toCreate) throws SDIInteractionException {
try {
if(!gis.getCurrentGeoServer().getPublisher().createWorkspace(toCreate))
log.warn("Unable to create workspace "+toCreate+". Assuming already exisintg..");
if(!gis.getCurrentGeoServer().getReader().getWorkspaceNames().contains(toCreate)) {
log.debug("Creating workspace : "+toCreate);
if(!gis.getCurrentGeoServer().getPublisher().createWorkspace(toCreate))
throw new SDIInteractionException("Unable to create workspace "+toCreate);
}else log.debug("Workspace "+toCreate+" exists.");
return toCreate;
} catch (IllegalArgumentException | MalformedURLException e) {
throw new SDIInteractionException("Unable to create workspace "+toCreate,e);
@ -71,56 +73,81 @@ public class SDIManager {
}
// GEOSERVER-PERSISTENCE-ID / GNA / PROJECT-ID /LAYER-ID/ FILENAME
// GEOSERVER-PERSISTENCE-ID / GNA / PROJECT-ID/ FILENAME
public GeoServerContent pushShapeLayerFileSet(SDILayerDescriptor currentElement,List<TempFile> files,String workspace) throws SDIInteractionException{
public GeoServerContent pushShapeLayerFileSet(SDILayerDescriptor currentElement,String workspace) throws SDIInteractionException{
try {
String remoteFolder=null;
String fileName=null;
// String remoteFolder=null;
// String fileName=null;
log.debug("Transferring "+files.size()+" files to geoserver @ "+geoserverHostName);
log.debug("Publishing "+currentElement+" files to geoserver @ "+geoserverHostName);
GeoServerContent content=new GeoServerContent();
content.setGeoserverHostName(geoserverHostName);
content.setWorkspace(workspace);
for(TempFile f:files) {
Destination destination=new Destination(f.getOriginalFileName());
destination.setCreateSubfolders(true);
destination.setOnExistingFileName(DestinationClashPolicy.REWRITE);
destination.setOnExistingSubFolder(DestinationClashPolicy.ADD_SUFFIX);
destination.setPersistenceId("geoserver");
destination.setSubFolder("GNA/"+currentElement.getRecord().getId()+"/"+currentElement.getLayerName());
TransferResult result=dtGeoServer.localFile(f.getTheFile(), destination);
content.getFileNames().add(f.getOriginalFileName());
WorkspaceManager wsManager=new WorkspaceManager(currentElement.getRecord());
currentElement.getActualContent().forEach((PersistedContent c)->{
try {
if(c instanceof WorkspaceContent) {
WorkspaceContent wc=(WorkspaceContent) c;
FileContainer fc=wsManager.getFileById(wc.getStorageID());
String filename=fc.get().getName();
Destination destination=new Destination(filename);
destination.setCreateSubfolders(true);
destination.setOnExistingFileName(DestinationClashPolicy.REWRITE);
destination.setOnExistingSubFolder(DestinationClashPolicy.ADD_SUFFIX);
destination.setPersistenceId("geoserver");
destination.setSubFolder("GNA/"+currentElement.getRecord().getId()+"/"+filename);
log.debug("Sending "+wc+" to "+destination);
TransferResult result=dtGeoServer.httpSource(fc.getPublicLink(), destination);
log.debug("Transferred "+result);
content.getFileNames().add(filename);
content.setGeoserverPath(result.getRemotePath().substring(0,result.getRemotePath().lastIndexOf("/")));
}
}catch(Throwable t) {
log.warn("Unable to transfer Persisted content"+c,t);
}
remoteFolder=result.getRemotePath().substring(0,result.getRemotePath().lastIndexOf("/"));
fileName=f.getOriginalFileName();
content.setGeoserverPath(remoteFolder);
}
});
if(content.getFileNames().isEmpty())
throw new SDIInteractionException("No Persisted content found in "+currentElement);
String fileName=content.getFileNames().get(0);
String remoteFolder=content.getGeoserverPath().substring(0,content.getGeoserverPath().lastIndexOf("/"));
String toSetLayerName=fileName.substring(0,fileName.lastIndexOf("."));
content.setStore(toSetLayerName+"_store");
int count=0;
while(gis.getCurrentGeoServer().getReader().getLayer(workspace,toSetLayerName)!=null){
count++;
toSetLayerName=fileName.substring(0,fileName.lastIndexOf("."))+"_"+count;
log.debug("layer for "+fileName+" already existing, trying "+toSetLayerName);
};
String storeName=toSetLayerName+"_store";
content.setStore(storeName);
content.setFeatureType(toSetLayerName);
GeoServerRESTPublisher publisher=gis.getCurrentGeoServer().getPublisher();
log.debug("Trying to create remote workspace : "+workspace);
gis.getCurrentGeoServer().getPublisher().createWorkspace(workspace);
createWorkspace(workspace);
log.debug("Publishing remote folder "+remoteFolder);
URL directoryPath=new URL("file:"+remoteFolder);
URL directoryPath=new URL("file:"+remoteFolder+"/"+toSetLayerName+".shp");
//TODO Evaluate SRS
boolean published=publisher.publishShp(
workspace,
toSetLayerName+"_store",
storeName,
null,
toSetLayerName,
// UploadMethod.FILE, // neeeds zip
@ -134,13 +161,13 @@ public class SDIManager {
}
currentElement.setLayerName(toSetLayerName);
// TODO Metadata
return content;
} catch (InvalidSourceException | SourceNotSetException | FailedTransferException | InitializationException
| InvalidDestinationException | DestinationNotSetException e) {
throw new SDIInteractionException("Unable to transfer fileSet for content "+currentElement,e);
// } catch (InvalidSourceException | SourceNotSetException | FailedTransferException | InitializationException
// | InvalidDestinationException | DestinationNotSetException e) {
// throw new SDIInteractionException("Unable to transfer fileSet for content "+currentElement,e);
} catch (SDIInteractionException e) {
throw e;
} catch (Throwable t) {
@ -158,36 +185,46 @@ public class SDIManager {
encoder.setFetchSize(1000);
encoder.setValidateConnections(true);
try {
if(!gis.getCurrentGeoServer().getDataStoreManager().create(workspace, encoder))
log.warn("Unable to create workspace "+storeName+". Assuming already exisintg..");
log.debug("Looking for datastore "+storeName+" under "+workspace);
if(gis.getCurrentGeoServer().getReader().getDatastore(workspace,storeName)==null)
if(!gis.getCurrentGeoServer().getDataStoreManager().create(workspace, encoder))
throw new SDIInteractionException("Unable to create store "+storeName+" in "+workspace);
log.debug("Store "+storeName+" exists under "+workspace);
return storeName;
} catch (IllegalArgumentException | MalformedURLException e) {
throw new SDIInteractionException("Unable to create store "+storeName,e);
}
}
private String publishLayer(File sldFile,String name) throws SDIInteractionException {
private String publishStyle(File sldFile,String name) throws SDIInteractionException {
try {
if(!gis.getCurrentGeoServer().getPublisher().publishStyle(sldFile, name))
log.warn("Unable to create style "+name+". Assuming already exisintg..");
if(!gis.getCurrentGeoServer().getReader().existsStyle(name)) {
log.debug("Registering style "+name);
if(!gis.getCurrentGeoServer().getPublisher().publishStyle(sldFile, name))
throw new SDIInteractionException("Unable to register style "+name);
}else log.debug("Style "+name+" already existing");
return name;
} catch (IllegalArgumentException | MalformedURLException e) {
throw new SDIInteractionException("Unable to create style "+name,e);
}
}
public String configureCentroidLayer(String name,String workspace,String storeName) throws SDIInteractionException {
GSFeatureTypeEncoder fte=new GSFeatureTypeEncoder();
fte.setAbstract("Centroid layer for "+name);
fte.setEnabled(true);
fte.setNativeCRS(DEFAULT_CRS);
fte.setTitle(name);
fte.setName(name);
String style="clustered_centroids";
GSLayerEncoder layerEncoder=new GSLayerEncoder();
layerEncoder.setDefaultStyle(style);
layerEncoder.setEnabled(true);
@ -198,15 +235,35 @@ public class SDIManager {
//Checking store
createStoreFromPostgisDB(workspace, storeName);
//Checkig layer
publishLayer(Files.getFileFromResources("styles/clustered_points.sld"),style);
if(!gis.getCurrentGeoServer().getPublisher().publishDBLayer(workspace, storeName, fte, layerEncoder))
log.warn("Unable to create layer "+name+". Assuming already exisintg..");
publishStyle(Files.getFileFromResources("styles/clustered_points.sld"),style);
if(gis.getCurrentGeoServer().getReader().getLayer(workspace, name)==null)
if(!gis.getCurrentGeoServer().getPublisher().publishDBLayer(workspace, storeName, fte, layerEncoder))
throw new SDIInteractionException("Unable to create layer "+name);
log.debug("layer "+name+" already exists");
return name;
} catch (IllegalArgumentException | MalformedURLException e) {
throw new SDIInteractionException("Unable to create layer "+name,e);
}
}
public void deleteContent(GeoServerContent toDelete) throws IllegalArgumentException, MalformedURLException, RemoteServiceException {
log.debug("Deleting geoserver layer "+toDelete);
//delete layer
GeoServerRESTPublisher publisher=gis.getCurrentGeoServer().getPublisher();
//delete store
publisher.removeDatastore(toDelete.getWorkspace(), toDelete.getStore(), true);
//delete WS if empty
GeoServerRESTReader reader=gis.getCurrentGeoServer().getReader();
if(reader.getDatastores(toDelete.getWorkspace()).isEmpty()) {
log.debug("Deleting emtpy workspace "+toDelete.getWorkspace());
publisher.removeWorkspace(toDelete.getWorkspace(), true);
}
//delete file
dtGeoServer.getWebClient().delete(toDelete.getGeoserverPath());
}
}

View File

@ -54,7 +54,11 @@ public class WorkspaceManager {
}
private FolderContainer getWSBase() throws StorageHubException {
public FileContainer getFileById(String id) throws StorageHubException {
return getSGClient().open(id).asFile();
}
public FolderContainer getWSBase() throws StorageHubException {
if(wsBase==null) {
log.debug("WSBASE not set");
if(record.getFolderId()==null) {
@ -107,4 +111,8 @@ public class WorkspaceManager {
return content;
}
public void deleteFromWS(WorkspaceContent toDelete) throws StorageHubException {
getSGClient().open(toDelete.getStorageID()).asFile().forceDelete();
}
}

View File

@ -17,10 +17,10 @@ import org.gcube.data.transfer.library.utils.ScopeUtils;
public class CheckContextConfiguration {
public static void main(String[] args) throws ConfigurationException, StorageHubException {
TokenSetter.set("/gcube/devNext/NextNext");
// TokenSetter.set("/gcube/devNext/NextNext");
// TokenSetter.set("/pred4s/preprod/preVRE");
// TokenSetter.set("/d4science.research-infrastructures.eu/D4OS/GeoNA-Prototype");
TokenSetter.set("/d4science.research-infrastructures.eu/D4OS/GeoNA-Prototype");
// System.out.println("Checking for internal .. ");
// ConcessioneManager manager=ManagerFactory.getByRecord(new Concessione());
@ -58,8 +58,8 @@ public class CheckContextConfiguration {
System.out.println("Base folder path : "+new WorkspaceManager(null).getApplicationBaseFolder().get().getPath());
System.out.println("Inspecting internal DB ");
System.out.println("Found "+ManagerFactory.getList().size()+" records");
// System.out.println("Inspecting internal DB ");
// System.out.println("Found "+ManagerFactory.getList().size()+" records");

View File

@ -49,8 +49,8 @@ public class TestModel {
concessione.setParoleChiaveICCD(Arrays.asList(new String[] {"vattelapesca","somthing something"}));
concessione.setCentroidLat(43.0); //N-S
concessione.setCentroidLong(9.0); //E-W
concessione.setCentroidLat(Double.parseDouble("33."+System.currentTimeMillis())); //N-S
concessione.setCentroidLong(Double.parseDouble("11."+System.currentTimeMillis())); //E-W
return concessione;
}
@ -92,7 +92,7 @@ public class TestModel {
// Piante fine scavo
ArrayList<LayerConcessione> piante=new ArrayList<LayerConcessione>();
for(int i=0;i<4;i++) {
for(int i=0;i<1;i++) {
LayerConcessione pianta=new LayerConcessione();
pianta.setValutazioneQualita("Secondo me si");
pianta.setMetodoRaccoltaDati("Fattobbene");

View File

@ -8,6 +8,7 @@ import java.util.UUID;
import org.gcube.application.geoportal.model.content.GeoServerContent;
import org.gcube.application.geoportal.model.fault.SDIInteractionException;
import org.gcube.application.geoportal.model.gis.SDILayerDescriptor;
import org.gcube.application.geoportal.storage.SDIManager;
import org.gcube.application.geoportal.utils.Files;
import org.gcube.application.geoportal.utils.Workspace;
@ -28,15 +29,14 @@ public class TestSDI {
SDIManager sdiManager=new SDIManager();
// sdiManager.createWorkspace(workspaceName);
sdiManager.configureCentroidLayer("centroids_concessioni", "gna:test", "gna_test_postgis");
// sdiManager.configureCentroidLayer("centroids_concessioni", "gna:test", "gna_test_postgis");
// sdiManager.publishShapeFile(workspaceName, file.getPublicLink());
//
// GeoServerContent content=sdiManager.pushLayerFile(Files.getFileFromResources("concessioni/pos.shp"), "pos.shp", 123, 1);
// System.out.println("content is "+content);
// sdiManager.publishShapeFileDirectory(workspaceName, content.getGeoserverPath());
//
}

View File

@ -35,18 +35,22 @@ public class UseCases {
//CREATE NEW
int numConcessioni=1;
int numConcessioni=2;
Concessione registered=null;
System.out.println("Try to create.. "+numConcessioni);
for(int i=0;i<numConcessioni;i++)
registered=registerNewConcessione();
long id=registered.getId();
Concessione loaded=readConcessione(registered.getId());
System.out.println("Registered is "+registered.getImmaginiRappresentative());
System.out.println("Reloaded"+loaded.getImmaginiRappresentative());
// long id=registered.getId();
// long id=22;
// Concessione loaded=readConcessione(id);
//
// System.out.println("Publishing last "+loaded);
// ManagerFactory.getByRecord(loaded).publish();
// System.out.println("DONE");
//READ BY ID
@ -124,7 +128,7 @@ public class UseCases {
//If true -> data are published into the SDI
Boolean publish=false;
Boolean publish=true;
//NB la validazione viene CMQ triggerata a commit time.
//In caso di errore viene lanciata una ValidationException

View File

@ -1,6 +1,6 @@
log4j.rootLogger=WARN, stdout
log4j.org.gcube.application=DEBUG, stdout
log4j.logger.org.gcube.application=DEBUG, stdout
#CONSOLE
log4j.appender.stdout=org.apache.log4j.ConsoleAppender
log4j.appender.stdout.layout=org.apache.log4j.PatternLayout