Publishing mongo Centroids

This commit is contained in:
Fabio Sinibaldi 2020-12-22 18:22:56 +01:00
parent 9a500d5ae1
commit 931bd443c2
10 changed files with 835 additions and 131 deletions

View File

@ -0,0 +1,319 @@
package org.gcube.application.geoportal.service.engine;
import java.io.File;
import java.net.MalformedURLException;
import java.net.URL;
import org.gcube.application.geoportal.common.model.legacy.BBOX;
import org.gcube.application.geoportal.common.model.legacy.GeoServerContent;
import org.gcube.application.geoportal.common.model.legacy.PersistedContent;
import org.gcube.application.geoportal.common.model.legacy.SDILayerDescriptor;
import org.gcube.application.geoportal.common.model.legacy.WorkspaceContent;
import org.gcube.application.geoportal.common.utils.Files;
import org.gcube.application.geoportal.service.model.internal.faults.SDIInteractionException;
import org.gcube.common.storagehub.client.dsl.FileContainer;
import org.gcube.data.transfer.library.DataTransferClient;
import org.gcube.data.transfer.library.TransferResult;
import org.gcube.data.transfer.library.faults.RemoteServiceException;
import org.gcube.data.transfer.model.Destination;
import org.gcube.data.transfer.model.DestinationClashPolicy;
import org.gcube.data.transfer.model.RemoteFileDescriptor;
import org.gcube.spatial.data.gis.GISInterface;
import org.gcube.spatial.data.gis.is.AbstractGeoServerDescriptor;
import it.geosolutions.geoserver.rest.GeoServerRESTPublisher;
import it.geosolutions.geoserver.rest.GeoServerRESTPublisher.UploadMethod;
import it.geosolutions.geoserver.rest.GeoServerRESTReader;
import it.geosolutions.geoserver.rest.decoder.RESTFeatureType;
import it.geosolutions.geoserver.rest.decoder.RESTLayer;
import it.geosolutions.geoserver.rest.encoder.GSLayerEncoder;
import it.geosolutions.geoserver.rest.encoder.datastore.GSPostGISDatastoreEncoder;
import it.geosolutions.geoserver.rest.encoder.feature.GSFeatureTypeEncoder;
import lombok.Getter;
import lombok.extern.slf4j.Slf4j;
@Slf4j
public class SDIManager {
static private String DEFAULT_CRS="EPSG:4326";
private GISInterface gis;
@Getter
private DataTransferClient dtGeoServer;
private String geoserverHostName;
public SDIManager() throws SDIInteractionException {
try{
log.debug("Initializing GIS Interface..");
gis=GISInterface.get();
AbstractGeoServerDescriptor geoserver=gis.getCurrentGeoServer();
if(geoserver==null)
throw new Exception("Unable to contact data transfer for geoserver ");
log.debug("Found geoserver descriptor "+geoserver);
geoserverHostName=new URL(gis.getCurrentGeoServer().getUrl()).getHost();
log.debug("Contacting Data Transfer from geoserver {} ",geoserverHostName);
dtGeoServer=DataTransferClient.getInstanceByEndpoint("http://"+geoserverHostName);
if(!gis.getCurrentGeoServer().getReader().existGeoserver())
throw new Exception("Geoserver not reachable");
}catch(Exception e) {
throw new SDIInteractionException("Unable to initialize SDI Manager",e);
}
}
public RemoteFileDescriptor getGeoServerRemoteFolder() throws RemoteServiceException {
return dtGeoServer.getWebClient().getInfo("geoserver/GNA");
}
public String createWorkspace(String toCreate) throws SDIInteractionException {
try {
if(!gis.getCurrentGeoServer().getReader().getWorkspaceNames().contains(toCreate)) {
log.debug("Creating workspace : "+toCreate);
if(!gis.getCurrentGeoServer().getPublisher().createWorkspace(toCreate))
throw new SDIInteractionException("Unable to create workspace "+toCreate);
}else log.debug("Workspace "+toCreate+" exists.");
return toCreate;
} catch (IllegalArgumentException | MalformedURLException e) {
throw new SDIInteractionException("Unable to create workspace "+toCreate,e);
}
}
// GEOSERVER-PERSISTENCE-ID / GNA / PROJECT-ID/ LAYER-ID /FILENAME(no extension)/...
public GeoServerContent pushShapeLayerFileSet(SDILayerDescriptor currentElement,String workspace, String projectId) throws SDIInteractionException{
try {
// String remoteFolder=null;
// String fileName=null;
log.debug("Publishing "+currentElement+" files to geoserver @ "+geoserverHostName);
GeoServerContent content=new GeoServerContent();
content.setGeoserverHostName(geoserverHostName);
content.setWorkspace(workspace);
WorkspaceManager wsManager=new WorkspaceManager();
currentElement.getActualContent().forEach((PersistedContent c)->{
try {
if(c instanceof WorkspaceContent) {
WorkspaceContent wc=(WorkspaceContent) c;
FileContainer fc=wsManager.getFileById(wc.getStorageID());
String completeFilename=Files.fixFilename(fc.get().getName());
String filename=completeFilename.contains(".")?completeFilename.substring(0, completeFilename.lastIndexOf(".")):completeFilename;
Destination destination=new Destination(completeFilename);
destination.setCreateSubfolders(true);
destination.setOnExistingFileName(DestinationClashPolicy.REWRITE);
destination.setOnExistingSubFolder(DestinationClashPolicy.APPEND);
destination.setPersistenceId("geoserver");
destination.setSubFolder("GNA/"+projectId+"/"+
currentElement.getMongo_id()+"/"+filename);
log.debug("Sending "+wc+" to "+destination);
TransferResult result=dtGeoServer.httpSource(fc.getPublicLink(), destination);
log.debug("Transferred "+result);
content.getFileNames().add(completeFilename);
content.setGeoserverPath(result.getRemotePath().substring(0,result.getRemotePath().lastIndexOf("/")));
}
}catch(Throwable t) {
log.warn("Unable to transfer Persisted content"+c,t);
}
});
if(content.getFileNames().isEmpty())
throw new SDIInteractionException("No Persisted content found in "+currentElement);
String completeFileName=content.getFileNames().get(0);
String filename=completeFileName.contains(".")?completeFileName.substring(0, completeFileName.lastIndexOf(".")):completeFileName;
String remoteFolder=content.getGeoserverPath();
String toSetLayerName=filename;
int count=0;
while(gis.getCurrentGeoServer().getReader().getLayer(workspace,toSetLayerName)!=null){
count++;
toSetLayerName=filename+"_"+count;
log.debug("layer for "+filename+" already existing, trying "+toSetLayerName);
};
String storeName=toSetLayerName+"_store";
content.setStore(storeName);
content.setFeatureType(toSetLayerName);
GeoServerRESTPublisher publisher=gis.getCurrentGeoServer().getPublisher();
log.debug("Trying to create remote workspace : "+workspace);
createWorkspace(workspace);
log.debug("Publishing remote folder "+remoteFolder);
URL directoryPath=new URL("file:"+remoteFolder+"/"+completeFileName);
//TODO Evaluate SRS
boolean published=publisher.publishShp(
workspace,
storeName,
null,
toSetLayerName,
// UploadMethod.FILE, // neeeds zip
UploadMethod.EXTERNAL, // needs shp
directoryPath.toURI(),
DEFAULT_CRS, //SRS
""); // default style
if(!published) {
throw new SDIInteractionException("Unable to publish layer "+toSetLayerName+" under "+workspace+". Unknown Geoserver fault.");
}
currentElement.setLayerName(toSetLayerName);
GeoServerRESTReader reader=gis.getCurrentGeoServer().getReader();
RESTLayer l=reader.getLayer(workspace, toSetLayerName);
RESTFeatureType f= reader.getFeatureType(l);
/*http://geoserver1.dev.d4science.org/geoserver/gna_conc_18/wms?
service=WMS&version=1.1.0&request=GetMap&layers=gna_conc_18:pos&
styles=&bbox=8.62091913167495,40.62975046683799,8.621178639172953,40.630257904721645&
width=392&height=768&srs=EPSG:4326&format=application/openlayers */
currentElement.setWmsLink(
String.format("https://%1$s/geoserver/%2$s/wms?"
+"service=WMS&version=1.1.0&request=GetMap&layers=%2$s:%3$s&"
+ "styles=&bbox=%4$f,%5$f,%6$f,%7$f&srs=%8$s&format=application/openlayers&width=%9$d&height=%10$d",
geoserverHostName,
workspace,
toSetLayerName,
f.getMinX(),
f.getMinY(),
f.getMaxX(),
f.getMaxY(),
DEFAULT_CRS,
400,
400));
currentElement.setWorkspace(workspace);
currentElement.setBbox(new BBOX(f.getMaxY(), f.getMaxX(), f.getMinY(), f.getMinX()));
// TODO Metadata
return content;
// } catch (InvalidSourceException | SourceNotSetException | FailedTransferException | InitializationException
// | InvalidDestinationException | DestinationNotSetException e) {
// throw new SDIInteractionException("Unable to transfer fileSet for content "+currentElement,e);
} catch (SDIInteractionException e) {
throw e;
} catch (Throwable t) {
throw new SDIInteractionException("Unexpected internal fault while interacting with SDI.",t);
}
}
private String createStoreFromPostgisDB(String workspace,String storeName) throws SDIInteractionException {
//SET BY PROVISIONING
GSPostGISDatastoreEncoder encoder=new GSPostGISDatastoreEncoder(storeName);
encoder.setJndiReferenceName("java:comp/env/jdbc/postgres");
encoder.setLooseBBox(true);
encoder.setDatabaseType("postgis");
encoder.setEnabled(true);
encoder.setFetchSize(1000);
encoder.setValidateConnections(true);
try {
log.debug("Looking for datastore "+storeName+" under "+workspace);
if(gis.getCurrentGeoServer().getReader().getDatastore(workspace,storeName)==null)
if(!gis.getCurrentGeoServer().getDataStoreManager().create(workspace, encoder))
throw new SDIInteractionException("Unable to create store "+storeName+" in "+workspace);
log.debug("Store "+storeName+" exists under "+workspace);
return storeName;
} catch (IllegalArgumentException | MalformedURLException e) {
throw new SDIInteractionException("Unable to create store "+storeName,e);
}
}
private String publishStyle(File sldFile,String name) throws SDIInteractionException {
try {
if(!gis.getCurrentGeoServer().getReader().existsStyle(name)) {
log.debug("Registering style "+name);
if(!gis.getCurrentGeoServer().getPublisher().publishStyle(sldFile, name))
throw new SDIInteractionException("Unable to register style "+name);
}else log.debug("Style "+name+" already existing");
return name;
} catch (IllegalArgumentException | MalformedURLException e) {
throw new SDIInteractionException("Unable to create style "+name,e);
}
}
public String configureCentroidLayer(String name,String workspace,String storeName) throws SDIInteractionException {
GSFeatureTypeEncoder fte=new GSFeatureTypeEncoder();
fte.setAbstract("Centroid layer for "+name);
fte.setEnabled(true);
fte.setNativeCRS(DEFAULT_CRS);
fte.setTitle(name);
fte.setName(name);
String style="clustered_centroids";
GSLayerEncoder layerEncoder=new GSLayerEncoder();
layerEncoder.setDefaultStyle(style);
layerEncoder.setEnabled(true);
layerEncoder.setQueryable(true);
try {
//Checking workspace
createWorkspace(workspace);
//Checking store
createStoreFromPostgisDB(workspace, storeName);
//Checkig layer
publishStyle(Files.getFileFromResources("styles/clustered_points.sld"),style);
if(gis.getCurrentGeoServer().getReader().getLayer(workspace, name)==null)
if(!gis.getCurrentGeoServer().getPublisher().publishDBLayer(workspace, storeName, fte, layerEncoder))
throw new SDIInteractionException("Unable to create layer "+name);
log.debug("layer "+name+" already exists");
return name;
} catch (IllegalArgumentException | MalformedURLException e) {
throw new SDIInteractionException("Unable to create layer "+name,e);
}
}
public void deleteContent(GeoServerContent toDelete) throws IllegalArgumentException, MalformedURLException, RemoteServiceException {
log.debug("Deleting geoserver layer "+toDelete);
//delete layer
GeoServerRESTPublisher publisher=gis.getCurrentGeoServer().getPublisher();
//delete store
publisher.removeDatastore(toDelete.getWorkspace(), toDelete.getStore(), true);
//delete WS if empty
GeoServerRESTReader reader=gis.getCurrentGeoServer().getReader();
if(reader.getDatastores(toDelete.getWorkspace()).isEmpty()) {
log.debug("Deleting emtpy workspace "+toDelete.getWorkspace());
publisher.removeWorkspace(toDelete.getWorkspace(), true);
}
//delete file
dtGeoServer.getWebClient().delete(toDelete.getGeoserverPath());
}
}

View File

@ -6,6 +6,7 @@ import java.io.InputStream;
import javax.validation.constraints.NotNull;
import org.gcube.application.geoportal.common.model.legacy.WorkspaceContent;
import org.gcube.application.geoportal.common.utils.Files;
import org.gcube.application.geoportal.model.fault.ConfigurationException;
import org.gcube.common.storagehub.client.dsl.FileContainer;
import org.gcube.common.storagehub.client.dsl.FolderContainer;
@ -127,11 +128,13 @@ public class WorkspaceManager {
@Synchronized
private static FolderContainer createFolder(FolderOptions opts, StorageHubClient sgClient) throws StorageHubException {
opts.setFolderName(Files.fixFilename(opts.getFolderName()));
return opts.getParent().newFolder(opts.getFolderName(),opts.getFolderDescription());
}
@Synchronized
private static FileContainer createFile(FileOptions opts, StorageHubClient sgClient) throws StorageHubException {
opts.setFileName(Files.fixFilename(opts.getFileName()));
return opts.getParent().uploadFile(opts.getIs(), opts.getFileName(), opts.getFileDescription());
}
}

View File

@ -9,6 +9,7 @@ import org.bson.Document;
import org.bson.types.ObjectId;
import org.gcube.application.geoportal.common.model.legacy.AssociatedContent;
import org.gcube.application.geoportal.common.model.legacy.Concessione;
import org.gcube.application.geoportal.common.model.legacy.GeoServerContent;
import org.gcube.application.geoportal.common.model.legacy.LayerConcessione;
import org.gcube.application.geoportal.common.model.legacy.OtherContent;
import org.gcube.application.geoportal.common.model.legacy.PersistedContent;
@ -16,14 +17,20 @@ import org.gcube.application.geoportal.common.model.legacy.RelazioneScavo;
import org.gcube.application.geoportal.common.model.legacy.SDILayerDescriptor;
import org.gcube.application.geoportal.common.model.legacy.UploadedImage;
import org.gcube.application.geoportal.common.model.legacy.WorkspaceContent;
import org.gcube.application.geoportal.common.model.legacy.report.PublicationReport;
import org.gcube.application.geoportal.common.model.legacy.report.ValidationReport;
import org.gcube.application.geoportal.common.model.legacy.report.ValidationReport.ValidationStatus;
import org.gcube.application.geoportal.common.rest.TempFile;
import org.gcube.application.geoportal.common.utils.Files;
import org.gcube.application.geoportal.model.fault.ConfigurationException;
import org.gcube.application.geoportal.model.fault.PublishException;
import org.gcube.application.geoportal.service.engine.ImplementationProvider;
import org.gcube.application.geoportal.service.engine.SDIManager;
import org.gcube.application.geoportal.service.engine.StorageClientProvider;
import org.gcube.application.geoportal.service.engine.WorkspaceManager;
import org.gcube.application.geoportal.service.engine.WorkspaceManager.FileOptions;
import org.gcube.application.geoportal.service.engine.WorkspaceManager.FolderOptions;
import org.gcube.application.geoportal.service.model.internal.faults.InvalidStateException;
import org.gcube.application.geoportal.service.model.internal.faults.SDIInteractionException;
import org.gcube.application.geoportal.service.utils.Serialization;
import org.gcube.common.storagehub.client.dsl.FolderContainer;
import org.gcube.common.storagehub.model.exceptions.StorageHubException;
@ -37,18 +44,18 @@ import lombok.extern.slf4j.Slf4j;
@Slf4j
public class ConcessioniMongoManager extends MongoManager{
public ConcessioniMongoManager() throws ConfigurationException {
super();
// TODO Auto-generated constructor stub
}
private static final String collectionName="legacyConcessioni";
private static final String DB_NAME="gna_dev";
private MongoDatabase db=null;
@Override
@Synchronized
protected MongoDatabase getDatabase() {
@ -57,31 +64,47 @@ public class ConcessioniMongoManager extends MongoManager{
}
return db;
}
protected static Document asDocument (Concessione c) throws JsonProcessingException {
Document toReturn=Document.parse(Serialization.write(c));
if(c.getMongo_id()!=null&&!c.getMongo_id().isEmpty())
toReturn.append(ID, new ObjectId(c.getMongo_id()));
toReturn.append(ID, asId(c.getMongo_id()));
return toReturn;
}
protected static Concessione asConcessione (Document d) throws JsonProcessingException, IOException {
return Serialization.read(d.toJson(), Concessione.class);
}
// *** PUBLIC METHODS
public Concessione registerNew(Concessione toRegister) throws IOException {
log.trace("Registering {} ",toRegister);
toRegister.setDefaults();
ObjectId id=insert(asDocument(toRegister), collectionName);
Concessione toReturn=asConcessione(getById(id,collectionName));
toReturn.setMongo_id(id.toHexString());
toReturn.setMongo_id(asString(id));
return asConcessione(replace(asDocument(toReturn),collectionName));
}
public Concessione update(Concessione toRegister) throws IOException {
public Concessione replace(Concessione toRegister) throws IOException {
log.trace("Replacing {} ",toRegister);
toRegister.setDefaults();
return asConcessione(replace(asDocument(toRegister),collectionName));
}
public Concessione update(String id,String json) throws IOException {
log.trace("Updating id {} with {} ",id,json);
Concessione toReturn=asConcessione(update(asId(id),asDoc(json),collectionName));
log.debug("Refreshing defaults..");
toReturn.setDefaults();
return asConcessione(replace(asDocument(toReturn),collectionName));
}
public List<Concessione> list(){
ArrayList<Concessione> toReturn=new ArrayList<>();
iterate(null, collectionName).forEach((Document d)->{
@ -94,62 +117,135 @@ public class ConcessioniMongoManager extends MongoManager{
});
return toReturn;
}
public Concessione getById(String id) throws JsonProcessingException, IOException {
log.debug("Loading by ID "+id);
return asConcessione(getById(new ObjectId(id),collectionName));
return asConcessione(getById(asId(id),collectionName));
}
public void deleteById(String id) {
delete(new ObjectId(id), collectionName);
delete(asId(id), collectionName);
}
public Concessione publish(String id) throws JsonProcessingException, IOException{
Concessione toReturn=asConcessione(getById(new ObjectId(id),collectionName));
public Concessione publish(String id) throws JsonProcessingException, IOException, InvalidStateException{
Concessione toReturn=asConcessione(getById(asId(id),collectionName));
toReturn.setDefaults();
toReturn.validate();
publish(toReturn);
// MATERIALIZE LAYERS
toReturn=publish(toReturn);
// replace(asDocument(toReturn),collectionName);
// CREATE INDEXES
toReturn=index(toReturn);
// replace(asDocument(toReturn),collectionName);
return asConcessione(replace(asDocument(toReturn),collectionName));
}
public Concessione persistContent(String id, String destinationPath, List<TempFile> files) throws Exception {
Concessione c = getById(id);
WorkspaceManager ws=new WorkspaceManager();
c.setDefaults();
//Check Init Base folder
FolderContainer baseFolder=null;
if(c.getFolderId()==null) {
String folderName="mConcessione"+"_"+c.getNome()+"_"+Serialization.FULL_FORMATTER.format(LocalDateTime.now());
log.info("Creating folder {} for Concessione ID {} ",folderName,id);
FolderContainer folder=ws.createFolder(new FolderOptions(folderName, "Base Folder for "+c.getNome(),null));
c.setFolderId(folder.getId());
public Concessione persistContent(String id, String destinationPath, List<TempFile> files) throws Exception{
log.info("Persisting {} files for path {} in concessione ",files.size(),destinationPath,id);
try{
Concessione c = getById(id);
WorkspaceManager ws=new WorkspaceManager();
//Check Init Base folder
FolderContainer baseFolder=null;
if(c.getFolderId()==null) {
String folderName=Files.fixFilename("mConcessione"+"_"+c.getNome()+"_"+Serialization.FULL_FORMATTER.format(LocalDateTime.now()));
log.info("Creating folder {} for Concessione ID {} ",folderName,id);
FolderContainer folder=ws.createFolder(new FolderOptions(folderName, "Base Folder for "+c.getNome(),null));
c.setFolderId(folder.getId());
}
log.debug("Folder id is : "+c.getFolderId());
baseFolder=ws.getFolderById(c.getFolderId());
AssociatedContent section=c.getContentByPath(destinationPath);
log.debug("Found section {} for path {}",section,destinationPath);
store(section,files,ws,baseFolder);
log.debug("Updating dafults for {} ",c);
c.setDefaults();
return asConcessione(replace(asDocument(c),collectionName));
}catch(Exception e) {
throw new Exception("Unable to save file.",e);
}
baseFolder=ws.getFolderById(c.getFolderId());
AssociatedContent section=c.getContentByPath(destinationPath);
store(section,files,ws,baseFolder);
return asConcessione(replace(asDocument(c),collectionName));
}
private static PublicationReport publish(Concessione c) {
//TODO implement
return null;
private static Concessione index(Concessione record) {
log.info("Indexing {} ",record.getId());
ValidationReport report= new ValidationReport("Index Report ");
PostgisIndex index;
try {
index = new PostgisIndex(record);
index.registerCentroid();
report.addMessage(ValidationStatus.PASSED, "Registered centroid");
} catch (SDIInteractionException | PublishException e) {
log.error("Unable to index {} ",record,e);
report.addMessage(ValidationStatus.WARNING, "Internal error while indexing.");
}
return record;
}
private static Concessione publish(Concessione conc) {
// CHECK CONDITION BY PROFILE
log.debug("Publishing "+conc.getNome());
ValidationReport report=new ValidationReport("Publish report");
try {
SDIManager sdiManager=new SDIManager();
ArrayList<AssociatedContent> list=new ArrayList<AssociatedContent>();
//Concessione
String workspace= sdiManager.createWorkspace("gna_conc_"+conc.getMongo_id());
list.add(conc.getPosizionamentoScavo());
list.addAll(conc.getPianteFineScavo());
for(AssociatedContent c:list) {
if(c instanceof LayerConcessione) {
try {
List<PersistedContent> p=c.getActualContent();
GeoServerContent geoserverPersisted=sdiManager.pushShapeLayerFileSet((SDILayerDescriptor)c, workspace, conc.getMongo_id());
// geoserverPersisted.setAssociated(c);
p.add(geoserverPersisted);
c.setActualContent(p);
}catch(SDIInteractionException e) {
log.warn("Unable to publish layers.",e);
report.addMessage(ValidationStatus.WARNING, "Layer "+c.getTitolo()+" non pubblicato.");
}
report.addMessage(ValidationStatus.PASSED, "Pubblicato layer "+c.getTitolo());
}
}
} catch (SDIInteractionException e1) {
report.addMessage(ValidationStatus.WARNING, "Unable to publish layers "+e1.getMessage());
}
conc.setReport(report);
return conc;
}
private static final void store(AssociatedContent content,List<TempFile> files, WorkspaceManager ws, FolderContainer base) throws Exception {
FolderContainer sectionParent=null;
if(content instanceof RelazioneScavo)
sectionParent = ws .createFolder(new FolderOptions(
"relazione","Relazione di scavo : "+content.getTitolo(),base));
else if (content instanceof UploadedImage)
sectionParent = ws .createFolder(new FolderOptions(
"imgs","Immagini rappresentative : "+content.getTitolo(),base));
else if (content instanceof SDILayerDescriptor)
//SDI Section
if(content instanceof LayerConcessione)
@ -160,7 +256,7 @@ public class ConcessioniMongoManager extends MongoManager{
sectionParent = ws .createFolder(new FolderOptions(
content.getTitolo(),"Relazione di scavo : "+content.getTitolo(),ws.getSubFolder(base,"other")));
else throw new Exception("Invalid Content "+content);
content.setActualContent(new ArrayList<PersistedContent>());
StorageClientProvider storage=ImplementationProvider.get().getStorageProvider();
for(TempFile f : files) {
@ -169,5 +265,5 @@ public class ConcessioniMongoManager extends MongoManager{
content.getActualContent().add(wsContent);
}
}
}

View File

@ -13,6 +13,7 @@ import com.mongodb.client.FindIterable;
import com.mongodb.client.MongoCollection;
import com.mongodb.client.MongoDatabase;
import com.mongodb.client.model.FindOneAndReplaceOptions;
import com.mongodb.client.model.FindOneAndUpdateOptions;
import com.mongodb.client.model.ReturnDocument;
import lombok.extern.slf4j.Slf4j;
@ -24,6 +25,11 @@ public abstract class MongoManager {
protected static final String ID="_id";
protected static final ObjectId asId(String id) {return new ObjectId(id);}
protected static final String asString(ObjectId id) {return id.toHexString();}
protected static final String asString(Document d) {return d.toJson();}
protected static final Document asDoc(String json) {return Document.parse(json);}
public MongoManager() throws ConfigurationException {
client=ImplementationProvider.get().getMongoClientProvider().getObject();
@ -100,7 +106,14 @@ public abstract class MongoManager {
}
public Document update(ObjectId id, Document updateSet, String collectionName) {
MongoDatabase database=getDatabase();
MongoCollection<Document> coll=database.getCollection(collectionName);
return coll.findOneAndUpdate(
eq(ID,id),
updateSet,
new FindOneAndUpdateOptions().returnDocument(ReturnDocument.AFTER));
}
//********** PROFILES

View File

@ -0,0 +1,169 @@
package org.gcube.application.geoportal.service.engine.mongo;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import org.gcube.application.geoportal.common.model.legacy.Concessione;
import org.gcube.application.geoportal.model.db.DBConstants;
import org.gcube.application.geoportal.model.db.PostgisTable;
import org.gcube.application.geoportal.model.db.PostgisTable.Field;
import org.gcube.application.geoportal.model.db.PostgisTable.FieldType;
import org.gcube.application.geoportal.model.fault.ConfigurationException;
import org.gcube.application.geoportal.model.fault.PublishException;
import org.gcube.application.geoportal.service.engine.SDIManager;
import org.gcube.application.geoportal.service.model.internal.faults.SDIInteractionException;
import org.gcube.application.geoportal.service.utils.Serialization;
import org.gcube.application.geoportal.storage.PostgisDBManager;
import org.gcube.application.geoportal.storage.PostgisDBManagerI;
import lombok.Getter;
import lombok.NonNull;
import lombok.extern.slf4j.Slf4j;
@Slf4j
public class PostgisIndex {
@NonNull
@Getter
private Concessione record;
private SDIManager sdiManager;
public PostgisIndex(Concessione record) throws SDIInteractionException {
super();
this.record = record;
this.sdiManager=new SDIManager();
}
protected PostgisTable getCentroidsTable() {
return DBConstants.Concessioni.CENTROIDS;
}
public void registerCentroid() throws PublishException{
try {
log.debug("Evaluating Centroid");
Map<String,String> centroidRow=evaluateCentroid();
log.debug("Contacting postgis DB .. ");
PostgisDBManagerI db=PostgisDBManager.get();
PostgisTable centroidsTable=getCentroidsTable();
log.debug("Inserting / updated centroid Row {} ",centroidRow);
PreparedStatement ps = db.prepareInsertStatement(centroidsTable, true, true);
log.debug("Deleting centroid if present. ID is "+record.getId());
db.deleteByFieldValue(centroidsTable, new Field(DBConstants.Concessioni.PRODUCT_ID,FieldType.TEXT), record.getId()+"");
centroidsTable.fillCSVPreparedStatament(centroidRow, ps, false);
ps.executeUpdate();
db.commit();
initCentroidLayer();
}catch(SQLException e) {
log.warn("Unable to publish Centroid for record "+record,e);
throw new PublishException("Unable to publish centroid.",e, null);
}catch(SDIInteractionException e) {
log.warn("Unable to publish Centroid Layer for record type "+getRecord().getRecordType(),e);
throw new PublishException("Unable to publish centroid.",e, null);
} catch (ConfigurationException e) {
log.warn("Unable to contact centroids db "+getRecord().getRecordType(),e);
throw new PublishException("Unable to publish centroid.",e, null);
}
}
protected void initCentroidLayer() throws SDIInteractionException {
log.debug("Checking for centroid layer configuration.. ");
sdiManager.configureCentroidLayer("centroids_concessioni", "gna", "gna_postgis");
}
public void removeCentroid() {
try {
PostgisDBManagerI db=PostgisDBManager.get();
PostgisTable centroidsTable=getCentroidsTable();
log.debug("Deleting centroid if present. ID is "+record.getId());
db.deleteByFieldValue(centroidsTable, new Field(DBConstants.Concessioni.PRODUCT_ID,FieldType.TEXT), record.getId()+"");
}catch(Exception e) {
log.warn("Unable to remove centroid ",e);
}
}
protected Map<String,String> evaluateCentroid(){
// CENTROID
Map<String,String> centroidsRow=new HashMap<String, String>();
centroidsRow.put(DBConstants.Concessioni.PRODUCT_ID, record.getId()+"");
centroidsRow.put(DBConstants.Concessioni.ANNO, record.getDataInizioProgetto().getYear()+"");
centroidsRow.put(DBConstants.Concessioni.NOME, record.getNome());
centroidsRow.put(DBConstants.Concessioni.REGIONE, ""); //TODO
if(record.getCentroidLat()==null||record.getCentroidLat()==0)
try {
log.debug("Evaluating Centroid latitude for record "+record);
record.setCentroidLat((record.getPosizionamentoScavo().getBbox().getMaxLat()+
record.getPosizionamentoScavo().getBbox().getMinLat())/2);
}catch (Throwable t) {
log.warn("Unable to evaluate centroid latitude "+t);
}
if(record.getCentroidLong()==null||record.getCentroidLong()==0)
try {
log.debug("Evaluating Centroid Longituted for record "+record);
record.setCentroidLong((record.getPosizionamentoScavo().getBbox().getMaxLong()+
record.getPosizionamentoScavo().getBbox().getMinLong())/2);
}catch (Throwable t) {
log.warn("Unable to evaluate centroid latitude "+t);
}
centroidsRow.put(DBConstants.Defaults.XCOORD_FIELD, record.getCentroidLong()+"");
centroidsRow.put(DBConstants.Defaults.YCOORD_FIELD, record.getCentroidLat()+"");
//Updated Schema
centroidsRow.put(DBConstants.Concessioni.DESCRIZIONE,record.getIntroduzione());
centroidsRow.put(DBConstants.Concessioni.CONTENUTO,record.getDescrizioneContenuto());
centroidsRow.put(DBConstants.Concessioni.AUTORE,asString(record.getAuthors()));
centroidsRow.put(DBConstants.Concessioni.CONTRIBUTORE,record.getContributore());
centroidsRow.put(DBConstants.Concessioni.TITOLARE,asString(record.getTitolari()));
centroidsRow.put(DBConstants.Concessioni.RESPONSABILE,record.getResponsabile());
centroidsRow.put(DBConstants.Concessioni.EDITORE,record.getEditore());
centroidsRow.put(DBConstants.Concessioni.FINANZIAMENTO,asString(record.getFontiFinanziamento()));
centroidsRow.put(DBConstants.Concessioni.SOGGETTO,asString(record.getSoggetto()));
centroidsRow.put(DBConstants.Concessioni.RISORSE,asString(record.getRisorseCorrelate()));
centroidsRow.put(DBConstants.Concessioni.DATE_SCAVO,Serialization.FULL_FORMATTER.format(record.getDataFineProgetto()));
centroidsRow.put(DBConstants.Concessioni.DATA_ARCHIVIAZIONE,Serialization.FULL_FORMATTER.format(record.getLastUpdateTime()));
centroidsRow.put(DBConstants.Concessioni.VERSIONE,record.getVersion());
centroidsRow.put(DBConstants.Concessioni.LICENZA,record.getLicenzaID());
centroidsRow.put(DBConstants.Concessioni.TITOLARE_LICENZA,asString(record.getTitolareLicenza()));
centroidsRow.put(DBConstants.Concessioni.ACCESSO,record.getPolicy().toString());
centroidsRow.put(DBConstants.Concessioni.PAROLE_CHIAVE,asString(record.getParoleChiaveLibere()));
return centroidsRow;
}
private static String asString(Collection<?> coll) {
if(coll==null||coll.isEmpty()) return "";
StringBuilder builder=new StringBuilder();
for(Object t : coll) {
builder.append(t.toString() +",");
}
return builder.substring(0, builder.lastIndexOf(","));
}
}

View File

@ -0,0 +1,38 @@
package org.gcube.application.geoportal.service.model.internal.faults;
public class InvalidStateException extends Exception {
/**
*
*/
private static final long serialVersionUID = 8926481061304048080L;
public InvalidStateException() {
super();
// TODO Auto-generated constructor stub
}
public InvalidStateException(String message, Throwable cause, boolean enableSuppression,
boolean writableStackTrace) {
super(message, cause, enableSuppression, writableStackTrace);
// TODO Auto-generated constructor stub
}
public InvalidStateException(String message, Throwable cause) {
super(message, cause);
// TODO Auto-generated constructor stub
}
public InvalidStateException(String message) {
super(message);
// TODO Auto-generated constructor stub
}
public InvalidStateException(Throwable cause) {
super(cause);
// TODO Auto-generated constructor stub
}
}

View File

@ -0,0 +1,38 @@
package org.gcube.application.geoportal.service.model.internal.faults;
public class SDIInteractionException extends Exception {
/**
*
*/
private static final long serialVersionUID = 1L;
public SDIInteractionException() {
super();
// TODO Auto-generated constructor stub
}
public SDIInteractionException(String message, Throwable cause, boolean enableSuppression,
boolean writableStackTrace) {
super(message, cause, enableSuppression, writableStackTrace);
// TODO Auto-generated constructor stub
}
public SDIInteractionException(String message, Throwable cause) {
super(message, cause);
// TODO Auto-generated constructor stub
}
public SDIInteractionException(String message) {
super(message);
// TODO Auto-generated constructor stub
}
public SDIInteractionException(Throwable cause) {
super(cause);
// TODO Auto-generated constructor stub
}
}

View File

@ -41,7 +41,7 @@ public class ConcessioniOverMongo {
protected String run() throws Exception, WebApplicationException {
Concessione c=Serialization.read(jsonString, Concessione.class);
ConcessioniMongoManager manager=new ConcessioniMongoManager();
manager.update(c);
manager.replace(c);
return Serialization.write(manager.getById(c.getMongo_id()));
}
@ -120,7 +120,7 @@ public class ConcessioniOverMongo {
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
@Path("{"+InterfaceConstants.Parameters.PROJECT_ID+"}")
public String replace(@PathParam(InterfaceConstants.Parameters.PROJECT_ID) String id,String jsonString) {
public String update(@PathParam(InterfaceConstants.Parameters.PROJECT_ID) String id,String jsonString) {
return new GuardedMethod<String> () {
@Override
protected String run() throws Exception, WebApplicationException {

View File

@ -1,10 +1,12 @@
package org.gcube.application.geoportal.service;
import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import java.io.FileInputStream;
import java.util.Collections;
import java.io.FileNotFoundException;
import java.util.ArrayList;
import java.util.List;
import javax.ws.rs.client.Entity;
@ -14,99 +16,112 @@ import javax.ws.rs.core.Response;
import org.gcube.application.geoportal.common.model.legacy.Concessione;
import org.gcube.application.geoportal.common.model.legacy.Concessione.Paths;
import org.gcube.application.geoportal.common.model.legacy.report.ValidationReport;
import org.gcube.application.geoportal.common.model.legacy.report.ValidationReport.ValidationStatus;
import org.gcube.application.geoportal.common.rest.AddSectionToConcessioneRequest;
import org.gcube.application.geoportal.common.rest.TempFile;
import org.gcube.application.geoportal.common.utils.Files;
import org.gcube.application.geoportal.common.utils.StorageUtils;
import org.gcube.application.geoportal.service.legacy.TokenSetter;
import org.gcube.application.geoportal.service.utils.Serialization;
import org.gcube.contentmanagement.blobstorage.transport.backend.RemoteBackendException;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import com.fasterxml.jackson.core.JsonProcessingException;
public class ConcessioniOverMongoTest extends BasicServiceTestUnit{
private static final String PATH="mongo-concessioni";
private static final String PUBLISH_PATH="publish";
private static final String FILES_PATH="registerFiles";
@Before
public void setContext() {
TokenSetter.set("/gcube/devsec/devVRE");
}
private static Concessione upload(WebTarget target,String id, String path, String ...files) throws Exception {
ArrayList<TempFile> array=new ArrayList<TempFile>();
for(String file:files)
array.add(new StorageUtils().putOntoStorage(new FileInputStream(
Files.getFileFromResources("concessioni/"+file)), file));
AddSectionToConcessioneRequest request=new AddSectionToConcessioneRequest();
request.setDestinationPath(path);
request.setStreams(array);
return check(target.path(FILES_PATH).path(id).request(MediaType.APPLICATION_JSON).post(Entity.entity(Serialization.write(request), MediaType.APPLICATION_JSON)),Concessione.class);
}
private static Concessione publish(WebTarget target, Concessione conc) throws Exception {
Response resp=target.path(PUBLISH_PATH).path(conc.getMongo_id()).request(MediaType.APPLICATION_JSON).
put(Entity.entity(Serialization.write(conc), MediaType.APPLICATION_JSON));
return check(resp,Concessione.class);
}
private static Concessione register(WebTarget target, Concessione c) throws Exception {
Response resp=target.request(MediaType.APPLICATION_JSON).post(Entity.entity(Serialization.write(c), MediaType.APPLICATION_JSON));
return check(resp,Concessione.class);
}
private static Concessione get(WebTarget target) throws Exception {
return register(target,TestModel.prepareConcessione());
}
// ********** TESTS
@Test
public void list() {
WebTarget target=target(PATH);
System.out.println(target.request(MediaType.APPLICATION_JSON).get(List.class));
}
@Test
public void createNew() throws Exception {
WebTarget target=target(PATH);
Concessione conc=TestModel.prepareConcessione();
Response resp=target.request(MediaType.APPLICATION_JSON).post(Entity.entity(Serialization.write(conc), MediaType.APPLICATION_JSON));
Concessione c=check(resp,Concessione.class);
Concessione c=register(target,TestModel.prepareConcessione());
Assert.assertTrue(c.getMongo_id()!=null&&!c.getMongo_id().isEmpty());
System.out.println("ID IS "+c.getMongo_id());
}
@Test
public void delete() throws Exception {
WebTarget target=target(PATH);
Concessione conc=TestModel.prepareConcessione();
Response resp=target.request(MediaType.APPLICATION_JSON).post(Entity.entity(Serialization.write(conc), MediaType.APPLICATION_JSON));
Concessione c=check(resp,Concessione.class);
System.out.println("ID IS "+c.getMongo_id());
Assert.assertTrue(c.getMongo_id()!=null&&!c.getMongo_id().isEmpty());
resp=target.path(c.getMongo_id()).request(MediaType.APPLICATION_JSON).delete();
check(resp,null);
Concessione c = get(target);
check(target.path(c.getMongo_id()).request(MediaType.APPLICATION_JSON).delete(),null);
}
@Test
public void getById() throws Exception {
WebTarget target=target(PATH);
Concessione conc=TestModel.prepareConcessione();
Response resp=target.request(MediaType.APPLICATION_JSON).post(Entity.entity(Serialization.write(conc), MediaType.APPLICATION_JSON));
Concessione c=check(resp,Concessione.class);
Assert.assertTrue(c.getMongo_id()!=null&&!c.getMongo_id().isEmpty());
resp=target.path(c.getMongo_id()).request(MediaType.APPLICATION_JSON).get();
Concessione c = get(target);
Response resp=target.path(c.getMongo_id()).request(MediaType.APPLICATION_JSON).get();
Concessione loaded=check(resp,Concessione.class);
System.out.println("Got by ID "+loaded);
Assert.assertTrue(loaded.getMongo_id()!=null&&!loaded.getMongo_id().isEmpty());
System.out.println("Got by ID "+loaded);
}
@Test
public void update() throws Exception {
WebTarget target=target(PATH);
Concessione conc=TestModel.prepareConcessione();
Response resp=target.request(MediaType.APPLICATION_JSON).post(Entity.entity(Serialization.write(conc), MediaType.APPLICATION_JSON));
Concessione c=check(resp,Concessione.class);
resp=target.path(c.getMongo_id()).request(MediaType.APPLICATION_JSON).get();
Concessione loaded=check(resp,Concessione.class);
Assert.assertTrue(c.getMongo_id()!=null&&!c.getMongo_id().isEmpty());
System.out.println("Modifying "+loaded);
String newTitle="Questo titolo l'ho modificato mo'";
loaded.setNome(newTitle);
resp=target.request(MediaType.APPLICATION_JSON).put(Entity.entity(Serialization.write(loaded), MediaType.APPLICATION_JSON));
WebTarget target=target(PATH);
Concessione c = get(target);
String newTitle="Questo titolo l'ho modificato mo nel test quello proprio apposta pewr questa cosa'";
c.setNome(newTitle);
Response resp=target.request(MediaType.APPLICATION_JSON).put(Entity.entity(Serialization.write(c), MediaType.APPLICATION_JSON));
Assert.assertTrue(check(resp,Concessione.class).getNome().equals(newTitle));
}
@Test
public void uploadFile() throws Exception {
WebTarget target=target(PATH);
@ -114,45 +129,57 @@ public class ConcessioniOverMongoTest extends BasicServiceTestUnit{
Concessione c=check(resp,Concessione.class);
Assert.assertTrue(c.getMongo_id()!=null&&!c.getMongo_id().isEmpty());
System.out.println("ID IS "+c.getMongo_id());
// Insert section
c.setRelazioneScavo(TestModel.prepareConcessione().getRelazioneScavo());
// c.getRelazioneScavo().setMongo_id(TestModel.rnd());
// c.getRelazioneScavo().setMongo_id(TestModel.rnd());
resp=target.request(MediaType.APPLICATION_JSON).put(Entity.entity(Serialization.write(c), MediaType.APPLICATION_JSON));
c=check(resp,Concessione.class);
// Add file
TempFile f=new StorageUtils().putOntoStorage(new FileInputStream(Files.getFileFromResources("concessioni/relazione.pdf")), "relazione.pdf");
AddSectionToConcessioneRequest request=new AddSectionToConcessioneRequest();
request.setDestinationPath(Paths.RELAZIONE);
request.setStreams(Collections.singletonList(f));
resp=target.path(FILES_PATH).path(c.getMongo_id()).request(MediaType.APPLICATION_JSON).post(Entity.entity(Serialization.write(request), MediaType.APPLICATION_JSON));
c=check(resp,Concessione.class);
c=upload(target,c.getMongo_id(),Paths.RELAZIONE,"relazione.pdf");
assertNotNull(c.getRelazioneScavo().getActualContent());
assertTrue(c.getRelazioneScavo().getActualContent().size()>0);
System.out.println("ADDED FILE TO "+c);
System.out.println("File is "+c.getRelazioneScavo().getActualContent().get(0));
}
@Test
public void publish() throws Exception {
WebTarget target=target(PATH);
Concessione conc=TestModel.prepareConcessione();
Response resp=target.request(MediaType.APPLICATION_JSON).post(Entity.entity(Serialization.write(conc), MediaType.APPLICATION_JSON));
Concessione c=check(resp,Concessione.class);
// System.out.println("ID IS "+c.getMongo_id());
resp=target.path(c.getMongo_id()).request(MediaType.APPLICATION_JSON).get();
Concessione loaded=check(resp,Concessione.class);
System.out.println("Modifying "+loaded);
String newTitle="Questo titolo l'ho modificato mo'";
loaded.setNome(newTitle);
Concessione c=TestModel.prepareConcessione();
resp=target.path(PUBLISH_PATH).path(c.getMongo_id()).request(MediaType.APPLICATION_JSON).
put(Entity.entity(Serialization.write(conc), MediaType.APPLICATION_JSON));
c.setNome("Concessione : publish test");
// Register new
c=register(target,c);
//Upload files
upload(target,c.getMongo_id(),Paths.RELAZIONE,"relazione.pdf");
upload(target,c.getMongo_id(),Paths.POSIZIONAMENTO,"pos.shp","pos.shx");
upload(target,c.getMongo_id(),Paths.piantaByIndex(0),"pos.shp","pos.shx");
upload(target,c.getMongo_id(),Paths.imgByIndex(0),"immagine.png");
upload(target,c.getMongo_id(),Paths.imgByIndex(1),"immagine2.png");
// Immagini
Concessione published=publish(target, c);
System.out.println("Published : "+published);
assertNotNull(published.getReport());
assertNotEquals(published.getReport().getStatus(),ValidationStatus.ERROR);
// TODO Validation
// Assert.assertTrue(check(resp,Concessione.class).getNome().equals(newTitle));
}
}

View File

@ -1,5 +1,6 @@
log4j.rootLogger=DEBUG, stdout
log4j.rootLogger=WARN, stdout
log4j.org.gcube.Threshold=DEBUG
#CONSOLE
log4j.appender.stdout=org.apache.log4j.ConsoleAppender
log4j.appender.stdout.Threshold=DEBUG