diff --git a/CHANGELOG.md b/CHANGELOG.md
index e67f6bb..f39b456 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -2,6 +2,12 @@ This project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.htm
# Changelog for org.gcube.application.geoportal-service
+## [v1.0.4] 2020-11-11
+Mongo integration with Concessione
+Project interface
+TempFile management
+WorkspaceContent and publication for Concessioni-over-mongo
+
## [v1.0.3] 2020-11-11
Fixed HTTP method
diff --git a/pom.xml b/pom.xml
index 619ed18..29c5d3f 100644
--- a/pom.xml
+++ b/pom.xml
@@ -4,7 +4,7 @@
4.0.0
org.gcube.application
geoportal-service
- 1.0.3
+ 1.0.4
Geoportal Service
war
diff --git a/src/main/java/org/gcube/application/geoportal/service/GeoPortalService.java b/src/main/java/org/gcube/application/geoportal/service/GeoPortalService.java
index 471012b..5d89e34 100644
--- a/src/main/java/org/gcube/application/geoportal/service/GeoPortalService.java
+++ b/src/main/java/org/gcube/application/geoportal/service/GeoPortalService.java
@@ -4,6 +4,7 @@ import javax.ws.rs.ApplicationPath;
import org.gcube.application.geoportal.common.rest.InterfaceConstants;
import org.gcube.application.geoportal.service.rest.Concessioni;
+import org.gcube.application.geoportal.service.rest.ConcessioniOverMongo;
import org.gcube.application.geoportal.service.rest.Profiles;
import org.gcube.application.geoportal.service.rest.Projects;
import org.gcube.application.geoportal.service.rest.Sections;
@@ -18,6 +19,7 @@ public class GeoPortalService extends ResourceConfig{
super();
//Register interrfaces
registerClasses(Concessioni.class);
+ registerClasses(ConcessioniOverMongo.class);
registerClasses(Projects.class);
registerClasses(Sections.class);
registerClasses(Profiles.class);
diff --git a/src/main/java/org/gcube/application/geoportal/service/engine/ImplementationProvider.java b/src/main/java/org/gcube/application/geoportal/service/engine/ImplementationProvider.java
index eb6f22a..b6059b5 100644
--- a/src/main/java/org/gcube/application/geoportal/service/engine/ImplementationProvider.java
+++ b/src/main/java/org/gcube/application/geoportal/service/engine/ImplementationProvider.java
@@ -41,6 +41,10 @@ public class ImplementationProvider {
private EMFProvider emfProvider=new ScopedEMFProvider();
+ @Getter
+ @Setter
+ private StorageHubProvider sHubProvider=new StorageHubProvider();
+
public void shutdown() {
// Stop JPA
AbstractRecordManager.shutdown();
diff --git a/src/main/java/org/gcube/application/geoportal/service/engine/SDIManager.java b/src/main/java/org/gcube/application/geoportal/service/engine/SDIManager.java
new file mode 100644
index 0000000..54e3526
--- /dev/null
+++ b/src/main/java/org/gcube/application/geoportal/service/engine/SDIManager.java
@@ -0,0 +1,322 @@
+package org.gcube.application.geoportal.service.engine;
+
+import java.io.File;
+import java.net.MalformedURLException;
+import java.net.URL;
+
+import org.gcube.application.geoportal.common.model.legacy.BBOX;
+import org.gcube.application.geoportal.common.model.legacy.GeoServerContent;
+import org.gcube.application.geoportal.common.model.legacy.PersistedContent;
+import org.gcube.application.geoportal.common.model.legacy.SDILayerDescriptor;
+import org.gcube.application.geoportal.common.model.legacy.WorkspaceContent;
+import org.gcube.application.geoportal.common.utils.Files;
+import org.gcube.application.geoportal.service.model.internal.faults.SDIInteractionException;
+import org.gcube.common.storagehub.client.dsl.FileContainer;
+import org.gcube.data.transfer.library.DataTransferClient;
+import org.gcube.data.transfer.library.TransferResult;
+import org.gcube.data.transfer.library.faults.RemoteServiceException;
+import org.gcube.data.transfer.model.Destination;
+import org.gcube.data.transfer.model.DestinationClashPolicy;
+import org.gcube.data.transfer.model.RemoteFileDescriptor;
+import org.gcube.spatial.data.gis.GISInterface;
+import org.gcube.spatial.data.gis.is.AbstractGeoServerDescriptor;
+
+import it.geosolutions.geoserver.rest.GeoServerRESTPublisher;
+import it.geosolutions.geoserver.rest.GeoServerRESTPublisher.UploadMethod;
+import it.geosolutions.geoserver.rest.GeoServerRESTReader;
+import it.geosolutions.geoserver.rest.decoder.RESTFeatureType;
+import it.geosolutions.geoserver.rest.decoder.RESTLayer;
+import it.geosolutions.geoserver.rest.encoder.GSLayerEncoder;
+import it.geosolutions.geoserver.rest.encoder.datastore.GSPostGISDatastoreEncoder;
+import it.geosolutions.geoserver.rest.encoder.feature.GSFeatureTypeEncoder;
+import lombok.Getter;
+import lombok.extern.slf4j.Slf4j;
+
+@Slf4j
+public class SDIManager {
+
+ static private String DEFAULT_CRS="EPSG:4326";
+
+
+ private GISInterface gis;
+ @Getter
+ private DataTransferClient dtGeoServer;
+ private String geoserverHostName;
+
+
+ public SDIManager() throws SDIInteractionException {
+ try{
+ log.debug("Initializing GIS Interface..");
+ gis=GISInterface.get();
+ AbstractGeoServerDescriptor geoserver=gis.getCurrentGeoServer();
+ if(geoserver==null)
+ throw new Exception("Unable to contact data transfer for geoserver ");
+
+ log.debug("Found geoserver descriptor "+geoserver);
+ geoserverHostName=new URL(gis.getCurrentGeoServer().getUrl()).getHost();
+
+ log.debug("Contacting Data Transfer from geoserver {} ",geoserverHostName);
+ dtGeoServer=DataTransferClient.getInstanceByEndpoint("http://"+geoserverHostName);
+ if(!gis.getCurrentGeoServer().getReader().existGeoserver())
+ throw new Exception("Geoserver not reachable");
+ }catch(Exception e) {
+ throw new SDIInteractionException("Unable to initialize SDI Manager",e);
+ }
+ }
+
+
+ public RemoteFileDescriptor getGeoServerRemoteFolder() throws RemoteServiceException {
+ return dtGeoServer.getWebClient().getInfo("geoserver/GNA");
+ }
+
+ public String createWorkspace(String toCreate) throws SDIInteractionException {
+ try {
+ if(!gis.getCurrentGeoServer().getReader().getWorkspaceNames().contains(toCreate)) {
+ log.debug("Creating workspace : "+toCreate);
+ if(!gis.getCurrentGeoServer().getPublisher().createWorkspace(toCreate))
+ throw new SDIInteractionException("Unable to create workspace "+toCreate);
+ }else log.debug("Workspace "+toCreate+" exists.");
+ return toCreate;
+ } catch (IllegalArgumentException | MalformedURLException e) {
+ throw new SDIInteractionException("Unable to create workspace "+toCreate,e);
+ }
+ }
+
+
+ // GEOSERVER-PERSISTENCE-ID / GNA / PROJECT-ID/ LAYER-ID /FILENAME(no extension)/...
+
+ public GeoServerContent pushShapeLayerFileSet(SDILayerDescriptor currentElement,String workspace, String projectId) throws SDIInteractionException{
+ try {
+// String remoteFolder=null;
+// String fileName=null;
+
+ log.debug("Publishing "+currentElement+" files to geoserver @ "+geoserverHostName);
+
+ GeoServerContent content=new GeoServerContent();
+ content.setGeoserverHostName(geoserverHostName);
+ content.setWorkspace(workspace);
+ WorkspaceManager wsManager=new WorkspaceManager();
+
+
+
+
+ currentElement.getActualContent().forEach((PersistedContent c)->{
+ try {
+ if(c instanceof WorkspaceContent) {
+ WorkspaceContent wc=(WorkspaceContent) c;
+ FileContainer fc=wsManager.getFileById(wc.getStorageID());
+
+ String completeFilename=Files.fixFilename(fc.get().getName());
+ String filename=completeFilename.contains(".")?completeFilename.substring(0, completeFilename.lastIndexOf(".")):completeFilename;
+
+
+ Destination destination=new Destination(completeFilename);
+ destination.setCreateSubfolders(true);
+ destination.setOnExistingFileName(DestinationClashPolicy.REWRITE);
+ destination.setOnExistingSubFolder(DestinationClashPolicy.APPEND);
+
+ destination.setPersistenceId("geoserver");
+ destination.setSubFolder("GNA/"+projectId+"/"+
+ currentElement.getMongo_id()+"/"+filename);
+
+ log.debug("Sending "+wc+" to "+destination);
+ TransferResult result=dtGeoServer.httpSource(fc.getPublicLink(), destination);
+ log.debug("Transferred "+result);
+
+
+ content.getFileNames().add(completeFilename);
+
+ content.setGeoserverPath(result.getRemotePath().substring(0,result.getRemotePath().lastIndexOf("/")));
+ }
+ }catch(Throwable t) {
+ log.warn("Unable to transfer Persisted content"+c,t);
+ }
+
+ });
+
+
+
+
+
+
+ if(content.getFileNames().isEmpty())
+ throw new SDIInteractionException("No Persisted content found in "+currentElement);
+
+ String completeFileName=content.getFileNames().get(0);
+ String filename=completeFileName.contains(".")?completeFileName.substring(0, completeFileName.lastIndexOf(".")):completeFileName;
+
+ String remoteFolder=content.getGeoserverPath();
+
+ String toSetLayerName=filename;
+ int count=0;
+ while(gis.getCurrentGeoServer().getReader().getLayer(workspace,toSetLayerName)!=null){
+ count++;
+ toSetLayerName=filename+"_"+count;
+ log.debug("layer for "+filename+" already existing, trying "+toSetLayerName);
+ };
+
+
+ String storeName=toSetLayerName+"_store";
+ content.setStore(storeName);
+ content.setFeatureType(toSetLayerName);
+
+ GeoServerRESTPublisher publisher=gis.getCurrentGeoServer().getPublisher();
+ log.debug("Trying to create remote workspace : "+workspace);
+ createWorkspace(workspace);
+
+ log.debug("Publishing remote folder "+remoteFolder);
+
+ URL directoryPath=new URL("file:"+remoteFolder+"/"+filename+".shp");
+
+
+ //TODO Evaluate SRS
+
+ boolean published=publisher.publishShp(
+ workspace,
+ storeName,
+ null,
+ toSetLayerName,
+ // UploadMethod.FILE, // neeeds zip
+ UploadMethod.EXTERNAL, // needs shp
+ directoryPath.toURI(),
+ DEFAULT_CRS, //SRS
+ ""); // default style
+
+ if(!published) {
+ throw new SDIInteractionException("Unable to publish layer "+toSetLayerName+" under "+workspace+". Unknown Geoserver fault.");
+ }
+
+ currentElement.setLayerName(toSetLayerName);
+ GeoServerRESTReader reader=gis.getCurrentGeoServer().getReader();
+ RESTLayer l=reader.getLayer(workspace, toSetLayerName);
+ RESTFeatureType f= reader.getFeatureType(l);
+ /*http://geoserver1.dev.d4science.org/geoserver/gna_conc_18/wms?
+ service=WMS&version=1.1.0&request=GetMap&layers=gna_conc_18:pos&
+ styles=&bbox=8.62091913167495,40.62975046683799,8.621178639172953,40.630257904721645&
+ width=392&height=768&srs=EPSG:4326&format=application/openlayers */
+
+
+ currentElement.setWmsLink(
+ String.format("https://%1$s/geoserver/%2$s/wms?"
+ +"service=WMS&version=1.1.0&request=GetMap&layers=%2$s:%3$s&"
+ + "styles=&bbox=%4$f,%5$f,%6$f,%7$f&srs=%8$s&format=application/openlayers&width=%9$d&height=%10$d",
+ geoserverHostName,
+ workspace,
+ toSetLayerName,
+ f.getMinX(),
+ f.getMinY(),
+ f.getMaxX(),
+ f.getMaxY(),
+ DEFAULT_CRS,
+ 400,
+ 400));
+
+ currentElement.setWorkspace(workspace);
+ currentElement.setBbox(new BBOX(f.getMaxY(), f.getMaxX(), f.getMinY(), f.getMinX()));
+
+
+ // TODO Metadata
+ return content;
+// } catch (InvalidSourceException | SourceNotSetException | FailedTransferException | InitializationException
+// | InvalidDestinationException | DestinationNotSetException e) {
+// throw new SDIInteractionException("Unable to transfer fileSet for content "+currentElement,e);
+ } catch (SDIInteractionException e) {
+ throw e;
+ } catch (Throwable t) {
+ throw new SDIInteractionException("Unexpected internal fault while interacting with SDI.",t);
+ }
+ }
+
+ private String createStoreFromPostgisDB(String workspace,String storeName) throws SDIInteractionException {
+ //SET BY PROVISIONING
+ GSPostGISDatastoreEncoder encoder=new GSPostGISDatastoreEncoder(storeName);
+ encoder.setJndiReferenceName("java:comp/env/jdbc/postgres");
+ encoder.setLooseBBox(true);
+ encoder.setDatabaseType("postgis");
+ encoder.setEnabled(true);
+ encoder.setFetchSize(1000);
+ encoder.setValidateConnections(true);
+ try {
+ log.debug("Looking for datastore "+storeName+" under "+workspace);
+
+ if(gis.getCurrentGeoServer().getReader().getDatastore(workspace,storeName)==null)
+
+ if(!gis.getCurrentGeoServer().getDataStoreManager().create(workspace, encoder))
+ throw new SDIInteractionException("Unable to create store "+storeName+" in "+workspace);
+ log.debug("Store "+storeName+" exists under "+workspace);
+ return storeName;
+ } catch (IllegalArgumentException | MalformedURLException e) {
+ throw new SDIInteractionException("Unable to create store "+storeName,e);
+ }
+
+ }
+
+ private String publishStyle(File sldFile,String name) throws SDIInteractionException {
+ try {
+ if(!gis.getCurrentGeoServer().getReader().existsStyle(name)) {
+ log.debug("Registering style "+name);
+ if(!gis.getCurrentGeoServer().getPublisher().publishStyle(sldFile, name))
+ throw new SDIInteractionException("Unable to register style "+name);
+ }else log.debug("Style "+name+" already existing");
+ return name;
+ } catch (IllegalArgumentException | MalformedURLException e) {
+ throw new SDIInteractionException("Unable to create style "+name,e);
+ }
+
+ }
+
+ public String configureCentroidLayer(String name,String workspace,String storeName) throws SDIInteractionException {
+
+ GSFeatureTypeEncoder fte=new GSFeatureTypeEncoder();
+ fte.setAbstract("Centroid layer for "+name);
+ fte.setEnabled(true);
+ fte.setNativeCRS(DEFAULT_CRS);
+ fte.setTitle(name);
+ fte.setName(name);
+
+
+ String style="clustered_centroids";
+
+ GSLayerEncoder layerEncoder=new GSLayerEncoder();
+ layerEncoder.setDefaultStyle(style);
+ layerEncoder.setEnabled(true);
+ layerEncoder.setQueryable(true);
+ try {
+ //Checking workspace
+ createWorkspace(workspace);
+ //Checking store
+ createStoreFromPostgisDB(workspace, storeName);
+ //Checkig layer
+ publishStyle(Files.getFileFromResources("styles/clustered_points.sld"),style);
+ if(gis.getCurrentGeoServer().getReader().getLayer(workspace, name)==null)
+ if(!gis.getCurrentGeoServer().getPublisher().publishDBLayer(workspace, storeName, fte, layerEncoder))
+ throw new SDIInteractionException("Unable to create layer "+name);
+ log.debug("layer "+name+" already exists");
+ return name;
+ } catch (IllegalArgumentException | MalformedURLException e) {
+ throw new SDIInteractionException("Unable to create layer "+name,e);
+ }
+
+
+ }
+
+
+ public void deleteContent(GeoServerContent toDelete) throws IllegalArgumentException, MalformedURLException, RemoteServiceException {
+ log.debug("Deleting geoserver layer "+toDelete);
+ //delete layer
+ GeoServerRESTPublisher publisher=gis.getCurrentGeoServer().getPublisher();
+ //delete store
+ publisher.removeDatastore(toDelete.getWorkspace(), toDelete.getStore(), true);
+ //delete WS if empty
+ GeoServerRESTReader reader=gis.getCurrentGeoServer().getReader();
+ if(reader.getDatastores(toDelete.getWorkspace()).isEmpty()) {
+ log.debug("Deleting emtpy workspace "+toDelete.getWorkspace());
+ publisher.removeWorkspace(toDelete.getWorkspace(), true);
+ }
+ //delete file
+
+ dtGeoServer.getWebClient().delete(toDelete.getGeoserverPath());
+ }
+
+
+}
diff --git a/src/main/java/org/gcube/application/geoportal/service/engine/StorageClientProvider.java b/src/main/java/org/gcube/application/geoportal/service/engine/StorageClientProvider.java
index 6ad3db3..a963a8d 100644
--- a/src/main/java/org/gcube/application/geoportal/service/engine/StorageClientProvider.java
+++ b/src/main/java/org/gcube/application/geoportal/service/engine/StorageClientProvider.java
@@ -39,6 +39,8 @@ public class StorageClientProvider extends AbstractScopedMap{
protected void dispose(IClient toDispose) {
try {
toDispose.close();
+ }catch (NullPointerException e) {
+ // expected if closed without uploading
}catch(Throwable t) {
log.warn(" unable to dispose "+toDispose,t);
}
diff --git a/src/main/java/org/gcube/application/geoportal/service/engine/StorageHubProvider.java b/src/main/java/org/gcube/application/geoportal/service/engine/StorageHubProvider.java
new file mode 100644
index 0000000..7aa2c8c
--- /dev/null
+++ b/src/main/java/org/gcube/application/geoportal/service/engine/StorageHubProvider.java
@@ -0,0 +1,25 @@
+package org.gcube.application.geoportal.service.engine;
+
+import org.gcube.application.geoportal.model.fault.ConfigurationException;
+import org.gcube.common.storagehub.client.dsl.StorageHubClient;
+
+public class StorageHubProvider implements Engine{
+
+
+ @Override
+ public StorageHubClient getObject() throws ConfigurationException {
+ return new StorageHubClient();
+ }
+
+ @Override
+ public void init() {
+ // TODO Auto-generated method stub
+
+ }
+ @Override
+ public void shustdown() {
+ // TODO Auto-generated method stub
+
+ }
+
+}
diff --git a/src/main/java/org/gcube/application/geoportal/service/engine/WorkspaceManager.java b/src/main/java/org/gcube/application/geoportal/service/engine/WorkspaceManager.java
new file mode 100644
index 0000000..c265e4e
--- /dev/null
+++ b/src/main/java/org/gcube/application/geoportal/service/engine/WorkspaceManager.java
@@ -0,0 +1,140 @@
+package org.gcube.application.geoportal.service.engine;
+
+import java.io.FileNotFoundException;
+import java.io.InputStream;
+
+import javax.validation.constraints.NotNull;
+
+import org.gcube.application.geoportal.common.model.legacy.WorkspaceContent;
+import org.gcube.application.geoportal.common.utils.Files;
+import org.gcube.application.geoportal.model.fault.ConfigurationException;
+import org.gcube.common.storagehub.client.dsl.FileContainer;
+import org.gcube.common.storagehub.client.dsl.FolderContainer;
+import org.gcube.common.storagehub.client.dsl.StorageHubClient;
+import org.gcube.common.storagehub.model.exceptions.StorageHubException;
+
+import lombok.AllArgsConstructor;
+import lombok.Getter;
+import lombok.NonNull;
+import lombok.RequiredArgsConstructor;
+import lombok.Setter;
+import lombok.Synchronized;
+import lombok.extern.slf4j.Slf4j;
+
+@Slf4j
+public class WorkspaceManager {
+
+ private static final String APP_FOLDER=".GNA_RECORDS";
+
+
+ private StorageHubClient sgClient=null;
+ private FolderContainer appBase=null;
+
+ @Getter
+ @Setter
+ @AllArgsConstructor
+ @RequiredArgsConstructor
+ public static class FolderOptions{
+ @NotNull
+ private String folderName;
+ private String folderDescription;
+ private FolderContainer parent;
+ }
+
+ @Getter
+ @Setter
+ @AllArgsConstructor
+ @RequiredArgsConstructor
+ public static class FileOptions{
+ @NotNull
+ private String fileName;
+ @NonNull
+ private InputStream is;
+
+ private String fileDescription;
+ private FolderContainer parent;
+
+ }
+
+
+ public WorkspaceManager() throws ConfigurationException, StorageHubException {
+ sgClient= ImplementationProvider.get().getSHubProvider().getObject();
+ appBase=getApplicationBaseFolder(sgClient);
+ }
+
+ public FolderContainer createFolder(FolderOptions opts) throws StorageHubException {
+ if(opts.getParent()==null)
+ opts.setParent(appBase);
+ return createFolder(opts,sgClient);
+ }
+
+ public FileContainer getFileById(String id) throws StorageHubException {
+ return sgClient.open(id).asFile();
+ }
+
+ public FolderContainer getFolderById(String id) throws StorageHubException {
+ return sgClient.open(id).asFolder();
+ }
+
+ public FolderContainer getSubFolder(FolderContainer parentFolder,String path) throws StorageHubException {
+ try{
+ return parentFolder.openByRelativePath(path).asFolder();
+ }catch(StorageHubException e) {
+ log.debug("Missing subPath "+path);
+ FolderContainer targetParent=parentFolder;
+ String targetName=path;
+ if(path.contains("/")) {
+ String parent=path.substring(0, path.lastIndexOf("/"));
+ log.debug("Checking intermediate "+parent);
+ targetParent=getSubFolder(parentFolder,parent);
+ targetName=path.substring(path.lastIndexOf("/")+1);
+ }
+ log.debug("Creating "+targetName);
+ return createFolder(new FolderOptions(targetName,"",targetParent),sgClient);
+ }
+ }
+
+
+ public WorkspaceContent storeToWS(FileOptions opts) throws FileNotFoundException, StorageHubException {
+ FileContainer item=createFile(opts,sgClient);
+ item=sgClient.open(item.getId()).asFile();
+
+ WorkspaceContent content=new WorkspaceContent();
+ content.setLink(item.getPublicLink().toString());
+ content.setMimetype(item.get().getContent().getMimeType());
+ content.setStorageID(item.getId());
+ return content;
+
+ }
+
+ public void deleteFromWS(WorkspaceContent toDelete) throws StorageHubException {
+ sgClient.open(toDelete.getStorageID()).asFile().forceDelete();
+ }
+
+ // STATIC SYNCH METHODS
+
+ @Synchronized
+ private static FolderContainer getApplicationBaseFolder(StorageHubClient sgClient) throws StorageHubException {
+ FolderContainer vre=sgClient.openVREFolder();
+ try {
+ return vre.openByRelativePath(APP_FOLDER).asFolder();
+ }catch(StorageHubException e) {
+ log.debug("APP Fodler missing. Initializing..");
+ FolderContainer toReturn= vre.newFolder(APP_FOLDER, "Base folder for GNA records");
+ toReturn.setHidden();
+ return toReturn;
+ }
+ }
+
+ @Synchronized
+ private static FolderContainer createFolder(FolderOptions opts, StorageHubClient sgClient) throws StorageHubException {
+ opts.setFolderName(Files.fixFilename(opts.getFolderName()));
+ return opts.getParent().newFolder(opts.getFolderName(),opts.getFolderDescription());
+ }
+
+ @Synchronized
+ private static FileContainer createFile(FileOptions opts, StorageHubClient sgClient) throws StorageHubException {
+ opts.setFileName(Files.fixFilename(opts.getFileName()));
+ return opts.getParent().uploadFile(opts.getIs(), opts.getFileName(), opts.getFileDescription());
+ }
+}
diff --git a/src/main/java/org/gcube/application/geoportal/service/engine/cache/MongoClientProvider.java b/src/main/java/org/gcube/application/geoportal/service/engine/cache/MongoClientProvider.java
index 9cb751a..ae30f18 100644
--- a/src/main/java/org/gcube/application/geoportal/service/engine/cache/MongoClientProvider.java
+++ b/src/main/java/org/gcube/application/geoportal/service/engine/cache/MongoClientProvider.java
@@ -10,6 +10,8 @@ import com.mongodb.MongoClientOptions;
import com.mongodb.MongoCredential;
import com.mongodb.ServerAddress;
+import lombok.extern.slf4j.Slf4j;
+@Slf4j
public class MongoClientProvider extends AbstractScopedMap{
public MongoClientProvider() {
@@ -21,9 +23,13 @@ public class MongoClientProvider extends AbstractScopedMap{
@Override
protected MongoClient retrieveObject() throws ConfigurationException {
MongoConnection conn=ImplementationProvider.get().getMongoConnectionProvider().getObject();
+ log.debug("Connecting to "+conn);
+
MongoCredential credential = MongoCredential.createCredential(conn.getUser(), conn.getDatabase(),
conn.getPassword().toCharArray());
+
+
MongoClientOptions options = MongoClientOptions.builder().sslEnabled(true).build();
return new MongoClient(new ServerAddress(conn.getHosts().get(0),conn.getPort()),
diff --git a/src/main/java/org/gcube/application/geoportal/service/engine/mongo/ConcessioniMongoManager.java b/src/main/java/org/gcube/application/geoportal/service/engine/mongo/ConcessioniMongoManager.java
new file mode 100644
index 0000000..7f0eeb0
--- /dev/null
+++ b/src/main/java/org/gcube/application/geoportal/service/engine/mongo/ConcessioniMongoManager.java
@@ -0,0 +1,277 @@
+package org.gcube.application.geoportal.service.engine.mongo;
+
+import java.io.IOException;
+import java.time.LocalDateTime;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.function.Consumer;
+
+import org.bson.Document;
+import org.bson.types.ObjectId;
+import org.gcube.application.geoportal.common.model.legacy.AssociatedContent;
+import org.gcube.application.geoportal.common.model.legacy.Concessione;
+import org.gcube.application.geoportal.common.model.legacy.GeoServerContent;
+import org.gcube.application.geoportal.common.model.legacy.LayerConcessione;
+import org.gcube.application.geoportal.common.model.legacy.OtherContent;
+import org.gcube.application.geoportal.common.model.legacy.PersistedContent;
+import org.gcube.application.geoportal.common.model.legacy.RelazioneScavo;
+import org.gcube.application.geoportal.common.model.legacy.SDILayerDescriptor;
+import org.gcube.application.geoportal.common.model.legacy.UploadedImage;
+import org.gcube.application.geoportal.common.model.legacy.WorkspaceContent;
+import org.gcube.application.geoportal.common.model.legacy.report.ValidationReport;
+import org.gcube.application.geoportal.common.model.legacy.report.ValidationReport.ValidationStatus;
+import org.gcube.application.geoportal.common.rest.TempFile;
+import org.gcube.application.geoportal.common.utils.Files;
+import org.gcube.application.geoportal.model.fault.ConfigurationException;
+import org.gcube.application.geoportal.model.fault.PublishException;
+import org.gcube.application.geoportal.service.engine.ImplementationProvider;
+import org.gcube.application.geoportal.service.engine.SDIManager;
+import org.gcube.application.geoportal.service.engine.StorageClientProvider;
+import org.gcube.application.geoportal.service.engine.WorkspaceManager;
+import org.gcube.application.geoportal.service.engine.WorkspaceManager.FileOptions;
+import org.gcube.application.geoportal.service.engine.WorkspaceManager.FolderOptions;
+import org.gcube.application.geoportal.service.model.internal.faults.InvalidStateException;
+import org.gcube.application.geoportal.service.model.internal.faults.SDIInteractionException;
+import org.gcube.application.geoportal.service.utils.Serialization;
+import org.gcube.common.storagehub.client.dsl.FolderContainer;
+import org.gcube.common.storagehub.model.exceptions.StorageHubException;
+
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.mongodb.client.MongoDatabase;
+
+import lombok.Synchronized;
+import lombok.extern.slf4j.Slf4j;
+
+@Slf4j
+public class ConcessioniMongoManager extends MongoManager{
+
+
+
+ public ConcessioniMongoManager() throws ConfigurationException {
+ super();
+ // TODO Auto-generated constructor stub
+ }
+ private static final String collectionName="legacyConcessioni";
+ private static final String DB_NAME="gna_dev";
+
+
+ private MongoDatabase db=null;
+
+ @Override
+ @Synchronized
+ protected MongoDatabase getDatabase() {
+ if(db==null) {
+ db=client.getDatabase(DB_NAME);
+ }
+ return db;
+ }
+
+ protected static Document asDocument (Concessione c) throws JsonProcessingException {
+ Document toReturn=Document.parse(Serialization.write(c));
+ if(c.getMongo_id()!=null&&!c.getMongo_id().isEmpty())
+ toReturn.append(ID, asId(c.getMongo_id()));
+ return toReturn;
+ }
+
+ protected static Concessione asConcessione (Document d) throws JsonProcessingException, IOException {
+ return Serialization.read(d.toJson(), Concessione.class);
+ }
+
+
+ // *** PUBLIC METHODS
+
+
+ public Concessione registerNew(Concessione toRegister) throws IOException {
+ log.trace("Registering {} ",toRegister);
+ toRegister.setDefaults();
+ ObjectId id=insert(asDocument(toRegister), collectionName);
+
+ Concessione toReturn=asConcessione(getById(id,collectionName));
+ toReturn.setMongo_id(asString(id));
+
+ return asConcessione(replace(asDocument(toReturn),collectionName));
+ }
+
+ public Concessione replace(Concessione toRegister) throws IOException {
+ log.trace("Replacing {} ",toRegister);
+ toRegister.setDefaults();
+ return asConcessione(replace(asDocument(toRegister),collectionName));
+ }
+
+ public Concessione update(String id,String json) throws IOException {
+ log.trace("Updating id {} with {} ",id,json);
+ Concessione toReturn=asConcessione(update(asId(id),asDoc(json),collectionName));
+ log.debug("Refreshing defaults..");
+ toReturn.setDefaults();
+ return asConcessione(replace(asDocument(toReturn),collectionName));
+ }
+
+
+
+ public List list(){
+ ArrayList toReturn=new ArrayList<>();
+ iterate(null, collectionName).forEach(
+ new Consumer() {
+ @Override
+ public void accept(Document d) {
+ try {
+ toReturn.add(asConcessione(d));
+ }catch(Throwable t) {
+ log.error("Unable to read Document as concessione ",t);
+ log.debug("Document was "+d.toJson());
+ }
+ }
+ });
+ return toReturn;
+ }
+
+ public Concessione getById(String id) throws JsonProcessingException, IOException {
+ log.debug("Loading by ID "+id);
+ return asConcessione(getById(asId(id),collectionName));
+ }
+ public void deleteById(String id) {
+ delete(asId(id), collectionName);
+ }
+
+ public Concessione publish(String id) throws JsonProcessingException, IOException, InvalidStateException{
+ Concessione toReturn=asConcessione(getById(asId(id),collectionName));
+ toReturn.setDefaults();
+ toReturn.validate();
+
+ // MATERIALIZE LAYERS
+ toReturn=publish(toReturn);
+ // replace(asDocument(toReturn),collectionName);
+
+ // CREATE INDEXES
+ toReturn=index(toReturn);
+ // replace(asDocument(toReturn),collectionName);
+
+ return asConcessione(replace(asDocument(toReturn),collectionName));
+ }
+
+
+ public Concessione persistContent(String id, String destinationPath, List files) throws Exception{
+ log.info("Persisting {} files for path {} in concessione ",files.size(),destinationPath,id);
+ try{
+ Concessione c = getById(id);
+ WorkspaceManager ws=new WorkspaceManager();
+ //Check Init Base folder
+ FolderContainer baseFolder=null;
+ if(c.getFolderId()==null) {
+ String folderName=Files.fixFilename("mConcessione"+"_"+c.getNome()+"_"+Serialization.FULL_FORMATTER.format(LocalDateTime.now()));
+ log.info("Creating folder {} for Concessione ID {} ",folderName,id);
+ FolderContainer folder=ws.createFolder(new FolderOptions(folderName, "Base Folder for "+c.getNome(),null));
+ c.setFolderId(folder.getId());
+ }
+
+ log.debug("Folder id is : "+c.getFolderId());
+ baseFolder=ws.getFolderById(c.getFolderId());
+
+ AssociatedContent section=c.getContentByPath(destinationPath);
+ log.debug("Found section {} for path {}",section,destinationPath);
+ store(section,files,ws,baseFolder);
+ log.debug("Updating dafults for {} ",c);
+ c.setDefaults();
+ return asConcessione(replace(asDocument(c),collectionName));
+ }catch(Exception e) {
+ throw new Exception("Unable to save file.",e);
+ }
+ }
+
+ private static Concessione index(Concessione record) {
+ log.info("Indexing {} ",record.getId());
+ ValidationReport report= new ValidationReport("Index Report ");
+ PostgisIndex index;
+ try {
+ index = new PostgisIndex(record);
+ index.registerCentroid();
+ report.addMessage(ValidationStatus.PASSED, "Registered centroid");
+ } catch (SDIInteractionException | PublishException e) {
+ log.error("Unable to index {} ",record,e);
+ report.addMessage(ValidationStatus.WARNING, "Internal error while indexing.");
+ }
+ return record;
+ }
+
+
+
+
+
+ private static Concessione publish(Concessione conc) {
+
+ // CHECK CONDITION BY PROFILE
+
+
+ log.debug("Publishing "+conc.getNome());
+
+ ValidationReport report=new ValidationReport("Publish report");
+ try {
+ SDIManager sdiManager=new SDIManager();
+ ArrayList list=new ArrayList();
+
+ //Concessione
+ String workspace= sdiManager.createWorkspace("gna_conc_"+conc.getMongo_id());
+ list.add(conc.getPosizionamentoScavo());
+ list.addAll(conc.getPianteFineScavo());
+
+ for(AssociatedContent c:list) {
+ if(c instanceof LayerConcessione) {
+ try {
+ List p=c.getActualContent();
+
+ GeoServerContent geoserverPersisted=sdiManager.pushShapeLayerFileSet((SDILayerDescriptor)c, workspace, conc.getMongo_id());
+ // geoserverPersisted.setAssociated(c);
+
+
+ p.add(geoserverPersisted);
+ c.setActualContent(p);
+ }catch(SDIInteractionException e) {
+ log.warn("Unable to publish layers.",e);
+ report.addMessage(ValidationStatus.WARNING, "Layer "+c.getTitolo()+" non pubblicato.");
+ }
+ report.addMessage(ValidationStatus.PASSED, "Pubblicato layer "+c.getTitolo());
+ }
+ }
+
+
+ } catch (SDIInteractionException e1) {
+ report.addMessage(ValidationStatus.WARNING, "Unable to publish layers "+e1.getMessage());
+ }
+
+ conc.setReport(report);
+ return conc;
+ }
+
+ private static final void store(AssociatedContent content,List files, WorkspaceManager ws, FolderContainer base) throws Exception {
+ FolderContainer sectionParent=null;
+
+ if(content instanceof RelazioneScavo)
+ sectionParent = ws .createFolder(new FolderOptions(
+ "relazione","Relazione di scavo : "+content.getTitolo(),base));
+
+ else if (content instanceof UploadedImage)
+ sectionParent = ws .createFolder(new FolderOptions(
+ "imgs","Immagini rappresentative : "+content.getTitolo(),base));
+
+ else if (content instanceof SDILayerDescriptor)
+ //SDI Section
+ if(content instanceof LayerConcessione)
+ sectionParent = ws .createFolder(new FolderOptions(
+ content.getTitolo(),"Layer Concessione : "+content.getTitolo(),ws.getSubFolder(base,"layers")));
+ else throw new Exception("Invalid SDI Content "+content);
+ else if (content instanceof OtherContent )
+ sectionParent = ws .createFolder(new FolderOptions(
+ content.getTitolo(),"Relazione di scavo : "+content.getTitolo(),ws.getSubFolder(base,"other")));
+ else throw new Exception("Invalid Content "+content);
+
+ content.setActualContent(new ArrayList());
+ StorageClientProvider storage=ImplementationProvider.get().getStorageProvider();
+ for(TempFile f : files) {
+ WorkspaceContent wsContent=ws.storeToWS(new FileOptions(f.getFilename(), storage.open(f.getId()), "Imported via GeoPortal", sectionParent));
+ log.debug("Registered "+wsContent+" for "+content);
+ content.getActualContent().add(wsContent);
+ }
+ content.setMongo_id(asString(new ObjectId()));
+ }
+
+}
diff --git a/src/main/java/org/gcube/application/geoportal/service/engine/mongo/MongoManager.java b/src/main/java/org/gcube/application/geoportal/service/engine/mongo/MongoManager.java
index 68e7968..f57cafb 100644
--- a/src/main/java/org/gcube/application/geoportal/service/engine/mongo/MongoManager.java
+++ b/src/main/java/org/gcube/application/geoportal/service/engine/mongo/MongoManager.java
@@ -1,71 +1,119 @@
package org.gcube.application.geoportal.service.engine.mongo;
+import static com.mongodb.client.model.Filters.eq;
+
import org.bson.Document;
-import org.gcube.application.geoportal.common.model.profile.Profile;
-import org.gcube.application.geoportal.common.model.project.Project;
+import org.bson.types.ObjectId;
import org.gcube.application.geoportal.model.fault.ConfigurationException;
+import org.gcube.application.geoportal.service.engine.ImplementationProvider;
import com.mongodb.MongoClient;
import com.mongodb.client.FindIterable;
import com.mongodb.client.MongoCollection;
import com.mongodb.client.MongoDatabase;
+import com.mongodb.client.model.FindOneAndReplaceOptions;
+import com.mongodb.client.model.FindOneAndUpdateOptions;
+import com.mongodb.client.model.ReturnDocument;
+
+import lombok.extern.slf4j.Slf4j;
+@Slf4j
public abstract class MongoManager {
- private MongoClient client=null;
+ protected MongoClient client=null;
+ protected static final String ID="_id";
+ protected static final ObjectId asId(String id) {return new ObjectId(id);}
+ protected static final String asString(ObjectId id) {return id.toHexString();}
+ protected static final String asString(Document d) {return d.toJson();}
+ protected static final Document asDoc(String json) {return Document.parse(json);}
public MongoManager() throws ConfigurationException {
-// client=ImplementationProvider.get().getMongoClientProvider().getObject();
+ client=ImplementationProvider.get().getMongoClientProvider().getObject();
- // init profile
+ log.info("Got Mongo Client at "+client.getConnectPoint());
+ // NOT AUTHORIZED
+// log.debug("Existing databases "+client.getDatabaseNames());
}
// private abstract MongoDatabase getDatabase() {
// return client.getDatabase("gna-db");
// }
+ // TODO check if existing DB
protected abstract MongoDatabase getDatabase();
//*********** PROJECTS
- public void insert(Project proj, Profile profile) {
- MongoDatabase database=getDatabase();
- // TODO check if existing DB
-
- String collectionName=profile.getName();
-
+ // NB BsonId
+ protected ObjectId insert(Document proj, String collectionName) {
+ MongoDatabase database=getDatabase();
MongoCollection collection = database.getCollection(collectionName);
- // TODO check if existing collection
-
- collection.insertOne(Document.parse(proj.toString()));
+ // Check if _id is present
+ ObjectId id=proj.getObjectId(ID);
+ if(id==null) {
+ proj.append(ID, new ObjectId());
+ id=proj.getObjectId(ID);
+ }
+ collection.insertOne(Document.parse(proj.toJson()));
+ return id;
}
-// public Project update(Project proj) {
-//
-// }
- public void delete(String id) {
-
+ public void delete(ObjectId id, String collectionName) {
+ MongoDatabase database=getDatabase();
+ MongoCollection collection = database.getCollection(collectionName);
+ collection.deleteOne(eq(ID,id));
+ }
+
+
+
+ public Document getById(ObjectId id,String collectionName) {
+ MongoDatabase database=getDatabase();
+ MongoCollection coll=database.getCollection(collectionName);
+ return coll.find(new Document(ID,id)).first();
}
- public Document getById(String id,Profile p) {
+
+
+ public FindIterable iterate(Document filter,String collectionName) {
MongoDatabase database=getDatabase();
- MongoCollection coll=database.getCollection(p.getName());
-
- return coll.find(new Document("id",id)).first();
+ MongoCollection coll=database.getCollection(collectionName);
+ if(filter==null)
+ return coll.find();
+ else
+ return coll.find(filter);
}
- public FindIterable iterate(Document filter,Profile p) {
+
+ public FindIterable iterateForClass(Document filter,String collectionName,Class clazz) {
MongoDatabase database=getDatabase();
- MongoCollection coll=database.getCollection(p.getName());
+ MongoCollection coll=database.getCollection(collectionName);
+ if(filter==null)
+ return coll.find(clazz);
+ else
+ return coll.find(filter,clazz);
+ }
+
+ public Document replace(Document toUpdate,String collectionName) {
+ MongoDatabase database=getDatabase();
+ MongoCollection coll=database.getCollection(collectionName);
+ return coll.findOneAndReplace(
+ eq(ID,toUpdate.getObjectId(ID)), toUpdate,new FindOneAndReplaceOptions().returnDocument(ReturnDocument.AFTER));
- return coll.find(filter);
}
+ public Document update(ObjectId id, Document updateSet, String collectionName) {
+ MongoDatabase database=getDatabase();
+ MongoCollection coll=database.getCollection(collectionName);
+ return coll.findOneAndUpdate(
+ eq(ID,id),
+ updateSet,
+ new FindOneAndUpdateOptions().returnDocument(ReturnDocument.AFTER));
+ }
//********** PROFILES
diff --git a/src/main/java/org/gcube/application/geoportal/service/engine/mongo/PostgisIndex.java b/src/main/java/org/gcube/application/geoportal/service/engine/mongo/PostgisIndex.java
new file mode 100644
index 0000000..a51c5b3
--- /dev/null
+++ b/src/main/java/org/gcube/application/geoportal/service/engine/mongo/PostgisIndex.java
@@ -0,0 +1,169 @@
+package org.gcube.application.geoportal.service.engine.mongo;
+
+import java.sql.PreparedStatement;
+import java.sql.SQLException;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.gcube.application.geoportal.common.model.legacy.Concessione;
+import org.gcube.application.geoportal.model.db.DBConstants;
+import org.gcube.application.geoportal.model.db.PostgisTable;
+import org.gcube.application.geoportal.model.db.PostgisTable.Field;
+import org.gcube.application.geoportal.model.db.PostgisTable.FieldType;
+import org.gcube.application.geoportal.model.fault.ConfigurationException;
+import org.gcube.application.geoportal.model.fault.PublishException;
+import org.gcube.application.geoportal.service.engine.SDIManager;
+import org.gcube.application.geoportal.service.model.internal.faults.SDIInteractionException;
+import org.gcube.application.geoportal.service.utils.Serialization;
+import org.gcube.application.geoportal.storage.PostgisDBManager;
+import org.gcube.application.geoportal.storage.PostgisDBManagerI;
+
+import lombok.Getter;
+import lombok.NonNull;
+import lombok.extern.slf4j.Slf4j;
+
+@Slf4j
+public class PostgisIndex {
+
+ @NonNull
+ @Getter
+ private Concessione record;
+ private SDIManager sdiManager;
+
+
+ public PostgisIndex(Concessione record) throws SDIInteractionException {
+ super();
+ this.record = record;
+ this.sdiManager=new SDIManager();
+ }
+
+
+ protected PostgisTable getCentroidsTable() {
+ return DBConstants.Concessioni.CENTROIDS;
+ }
+
+
+ public void registerCentroid() throws PublishException{
+
+ try {
+ log.debug("Evaluating Centroid");
+ Map centroidRow=evaluateCentroid();
+
+ log.debug("Contacting postgis DB .. ");
+ PostgisDBManagerI db=PostgisDBManager.get();
+
+ PostgisTable centroidsTable=getCentroidsTable();
+ log.debug("Inserting / updated centroid Row {} ",centroidRow);
+
+ PreparedStatement ps = db.prepareInsertStatement(centroidsTable, true, true);
+
+ log.debug("Deleting centroid if present. ID is "+record.getId());
+ db.deleteByFieldValue(centroidsTable, new Field(DBConstants.Concessioni.PRODUCT_ID,FieldType.TEXT), record.getId()+"");
+
+ centroidsTable.fillCSVPreparedStatament(centroidRow, ps, false);
+ ps.executeUpdate();
+ db.commit();
+
+ initCentroidLayer();
+
+
+ }catch(SQLException e) {
+ log.warn("Unable to publish Centroid for record "+record,e);
+ throw new PublishException("Unable to publish centroid.",e, null);
+ }catch(SDIInteractionException e) {
+ log.warn("Unable to publish Centroid Layer for record type "+getRecord().getRecordType(),e);
+ throw new PublishException("Unable to publish centroid.",e, null);
+ } catch (ConfigurationException e) {
+ log.warn("Unable to contact centroids db "+getRecord().getRecordType(),e);
+ throw new PublishException("Unable to publish centroid.",e, null);
+ }
+
+ }
+
+ protected void initCentroidLayer() throws SDIInteractionException {
+ log.debug("Checking for centroid layer configuration.. ");
+
+
+ sdiManager.configureCentroidLayer("centroids_concessioni", "gna", "gna_postgis");
+ }
+
+
+ public void removeCentroid() {
+ try {
+ PostgisDBManagerI db=PostgisDBManager.get();
+ PostgisTable centroidsTable=getCentroidsTable();
+ log.debug("Deleting centroid if present. ID is "+record.getId());
+ db.deleteByFieldValue(centroidsTable, new Field(DBConstants.Concessioni.PRODUCT_ID,FieldType.TEXT), record.getId()+"");
+ }catch(Exception e) {
+ log.warn("Unable to remove centroid ",e);
+ }
+ }
+
+
+ protected Map evaluateCentroid(){
+
+
+ // CENTROID
+ Map centroidsRow=new HashMap();
+ centroidsRow.put(DBConstants.Concessioni.PRODUCT_ID, record.getId()+"");
+ centroidsRow.put(DBConstants.Concessioni.ANNO, record.getDataInizioProgetto().getYear()+"");
+ centroidsRow.put(DBConstants.Concessioni.NOME, record.getNome());
+ centroidsRow.put(DBConstants.Concessioni.REGIONE, ""); //TODO
+
+
+
+ if(record.getCentroidLat()==null||record.getCentroidLat()==0)
+ try {
+ log.debug("Evaluating Centroid latitude for record "+record);
+ record.setCentroidLat((record.getPosizionamentoScavo().getBbox().getMaxLat()+
+ record.getPosizionamentoScavo().getBbox().getMinLat())/2);
+ }catch (Throwable t) {
+ log.warn("Unable to evaluate centroid latitude "+t);
+ }
+
+ if(record.getCentroidLong()==null||record.getCentroidLong()==0)
+ try {
+ log.debug("Evaluating Centroid Longituted for record "+record);
+ record.setCentroidLong((record.getPosizionamentoScavo().getBbox().getMaxLong()+
+ record.getPosizionamentoScavo().getBbox().getMinLong())/2);
+ }catch (Throwable t) {
+ log.warn("Unable to evaluate centroid latitude "+t);
+ }
+
+
+ centroidsRow.put(DBConstants.Defaults.XCOORD_FIELD, record.getCentroidLong()+"");
+ centroidsRow.put(DBConstants.Defaults.YCOORD_FIELD, record.getCentroidLat()+"");
+
+ //Updated Schema
+ centroidsRow.put(DBConstants.Concessioni.DESCRIZIONE,record.getIntroduzione());
+ centroidsRow.put(DBConstants.Concessioni.CONTENUTO,record.getDescrizioneContenuto());
+ centroidsRow.put(DBConstants.Concessioni.AUTORE,asString(record.getAuthors()));
+ centroidsRow.put(DBConstants.Concessioni.CONTRIBUTORE,record.getContributore());
+ centroidsRow.put(DBConstants.Concessioni.TITOLARE,asString(record.getTitolari()));
+ centroidsRow.put(DBConstants.Concessioni.RESPONSABILE,record.getResponsabile());
+ centroidsRow.put(DBConstants.Concessioni.EDITORE,record.getEditore());
+ centroidsRow.put(DBConstants.Concessioni.FINANZIAMENTO,asString(record.getFontiFinanziamento()));
+ centroidsRow.put(DBConstants.Concessioni.SOGGETTO,asString(record.getSoggetto()));
+ centroidsRow.put(DBConstants.Concessioni.RISORSE,asString(record.getRisorseCorrelate()));
+ centroidsRow.put(DBConstants.Concessioni.DATE_SCAVO,Serialization.FULL_FORMATTER.format(record.getDataFineProgetto()));
+ centroidsRow.put(DBConstants.Concessioni.DATA_ARCHIVIAZIONE,Serialization.FULL_FORMATTER.format(record.getLastUpdateTime()));
+ centroidsRow.put(DBConstants.Concessioni.VERSIONE,record.getVersion());
+ centroidsRow.put(DBConstants.Concessioni.LICENZA,record.getLicenzaID());
+ centroidsRow.put(DBConstants.Concessioni.TITOLARE_LICENZA,asString(record.getTitolareLicenza()));
+ centroidsRow.put(DBConstants.Concessioni.ACCESSO,record.getPolicy().toString());
+ centroidsRow.put(DBConstants.Concessioni.PAROLE_CHIAVE,asString(record.getParoleChiaveLibere()));
+
+ return centroidsRow;
+ }
+
+
+ private static String asString(Collection> coll) {
+ if(coll==null||coll.isEmpty()) return "";
+ StringBuilder builder=new StringBuilder();
+ for(Object t : coll) {
+ builder.append(t.toString() +",");
+ }
+ return builder.substring(0, builder.lastIndexOf(","));
+ }
+}
diff --git a/src/main/java/org/gcube/application/geoportal/service/model/internal/faults/InvalidStateException.java b/src/main/java/org/gcube/application/geoportal/service/model/internal/faults/InvalidStateException.java
new file mode 100644
index 0000000..f2f39fa
--- /dev/null
+++ b/src/main/java/org/gcube/application/geoportal/service/model/internal/faults/InvalidStateException.java
@@ -0,0 +1,38 @@
+package org.gcube.application.geoportal.service.model.internal.faults;
+
+public class InvalidStateException extends Exception {
+
+ /**
+ *
+ */
+ private static final long serialVersionUID = 8926481061304048080L;
+
+ public InvalidStateException() {
+ super();
+ // TODO Auto-generated constructor stub
+ }
+
+ public InvalidStateException(String message, Throwable cause, boolean enableSuppression,
+ boolean writableStackTrace) {
+ super(message, cause, enableSuppression, writableStackTrace);
+ // TODO Auto-generated constructor stub
+ }
+
+ public InvalidStateException(String message, Throwable cause) {
+ super(message, cause);
+ // TODO Auto-generated constructor stub
+ }
+
+ public InvalidStateException(String message) {
+ super(message);
+ // TODO Auto-generated constructor stub
+ }
+
+ public InvalidStateException(Throwable cause) {
+ super(cause);
+ // TODO Auto-generated constructor stub
+ }
+
+
+
+}
diff --git a/src/main/java/org/gcube/application/geoportal/service/model/internal/faults/SDIInteractionException.java b/src/main/java/org/gcube/application/geoportal/service/model/internal/faults/SDIInteractionException.java
new file mode 100644
index 0000000..635b714
--- /dev/null
+++ b/src/main/java/org/gcube/application/geoportal/service/model/internal/faults/SDIInteractionException.java
@@ -0,0 +1,38 @@
+package org.gcube.application.geoportal.service.model.internal.faults;
+
+public class SDIInteractionException extends Exception {
+
+ /**
+ *
+ */
+ private static final long serialVersionUID = 1L;
+
+ public SDIInteractionException() {
+ super();
+ // TODO Auto-generated constructor stub
+ }
+
+ public SDIInteractionException(String message, Throwable cause, boolean enableSuppression,
+ boolean writableStackTrace) {
+ super(message, cause, enableSuppression, writableStackTrace);
+ // TODO Auto-generated constructor stub
+ }
+
+ public SDIInteractionException(String message, Throwable cause) {
+ super(message, cause);
+ // TODO Auto-generated constructor stub
+ }
+
+ public SDIInteractionException(String message) {
+ super(message);
+ // TODO Auto-generated constructor stub
+ }
+
+ public SDIInteractionException(Throwable cause) {
+ super(cause);
+ // TODO Auto-generated constructor stub
+ }
+
+
+
+}
diff --git a/src/main/java/org/gcube/application/geoportal/service/rest/ConcessioniOverMongo.java b/src/main/java/org/gcube/application/geoportal/service/rest/ConcessioniOverMongo.java
new file mode 100644
index 0000000..4e502d5
--- /dev/null
+++ b/src/main/java/org/gcube/application/geoportal/service/rest/ConcessioniOverMongo.java
@@ -0,0 +1,172 @@
+package org.gcube.application.geoportal.service.rest;
+
+import java.time.LocalDateTime;
+
+import javax.ws.rs.Consumes;
+import javax.ws.rs.DELETE;
+import javax.ws.rs.GET;
+import javax.ws.rs.POST;
+import javax.ws.rs.PUT;
+import javax.ws.rs.Path;
+import javax.ws.rs.PathParam;
+import javax.ws.rs.Produces;
+import javax.ws.rs.WebApplicationException;
+import javax.ws.rs.core.MediaType;
+
+import org.gcube.application.geoportal.common.model.legacy.Concessione;
+import org.gcube.application.geoportal.common.rest.AddSectionToConcessioneRequest;
+import org.gcube.application.geoportal.common.rest.InterfaceConstants;
+import org.gcube.application.geoportal.common.rest.TempFile;
+import org.gcube.application.geoportal.service.engine.WorkspaceManager;
+import org.gcube.application.geoportal.service.engine.WorkspaceManager.FolderOptions;
+import org.gcube.application.geoportal.service.engine.mongo.ConcessioniMongoManager;
+import org.gcube.application.geoportal.service.utils.Serialization;
+import org.gcube.common.storagehub.client.dsl.FolderContainer;
+import org.json.JSONArray;
+import org.json.JSONObject;
+
+import lombok.extern.slf4j.Slf4j;
+
+@Path(InterfaceConstants.Methods.MONGO_CONCESSIONI)
+@Slf4j
+public class ConcessioniOverMongo {
+
+
+ @PUT
+ @Consumes(MediaType.APPLICATION_JSON)
+ @Produces(MediaType.APPLICATION_JSON)
+ public String replace(String jsonString) {
+ return new GuardedMethod () {
+ @Override
+ protected String run() throws Exception, WebApplicationException {
+ Concessione c=Serialization.read(jsonString, Concessione.class);
+ ConcessioniMongoManager manager=new ConcessioniMongoManager();
+ manager.replace(c);
+
+ return Serialization.write(manager.getById(c.getMongo_id()));
+ }
+ }.execute().getResult();
+ }
+
+ @POST
+ @Consumes(MediaType.APPLICATION_JSON)
+ @Produces(MediaType.APPLICATION_JSON)
+ public String createNew(String jsonString) {
+ return new GuardedMethod () {
+ @Override
+ protected String run() throws Exception, WebApplicationException {
+ Concessione c=Serialization.read(jsonString, Concessione.class);
+ ConcessioniMongoManager manager=new ConcessioniMongoManager();
+ return Serialization.write(manager.registerNew(c));
+ }
+ }.execute().getResult();
+ }
+
+
+
+ @GET
+ @Produces(MediaType.APPLICATION_JSON)
+ public String list() {
+ return new GuardedMethod () {
+ protected String run() throws Exception ,WebApplicationException {
+ ConcessioniMongoManager manager=new ConcessioniMongoManager();
+ JSONArray toReturn=new JSONArray();
+ manager.list().forEach((Concessione c) -> {
+ try{
+ toReturn.put(new JSONObject(Serialization.write(c)));
+ }catch(Throwable t) {
+ log.error("Unable to serialize "+c);
+ }
+ });
+ return toReturn.toString();
+
+ };
+ }.execute().getResult();
+
+
+ }
+
+
+ // BY ID
+ @GET
+ @Produces(MediaType.APPLICATION_JSON)
+ @Path("{"+InterfaceConstants.Parameters.PROJECT_ID+"}")
+ public String getById(@PathParam(InterfaceConstants.Parameters.PROJECT_ID) String id) {
+ return new GuardedMethod () {
+ @Override
+ protected String run() throws Exception, WebApplicationException {
+ ConcessioniMongoManager manager=new ConcessioniMongoManager();
+ return Serialization.write(manager.getById(id));
+ }
+ }.execute().getResult();
+ }
+
+ @DELETE
+ @Produces(MediaType.APPLICATION_JSON)
+ @Path("{"+InterfaceConstants.Parameters.PROJECT_ID+"}")
+ public void deleteById(@PathParam(InterfaceConstants.Parameters.PROJECT_ID) String id) {
+ new GuardedMethod () {
+ @Override
+ protected Concessione run() throws Exception, WebApplicationException {
+ ConcessioniMongoManager manager=new ConcessioniMongoManager();
+ manager.deleteById(id);
+ return null;
+ }
+ }.execute();
+ }
+
+
+ @PUT
+ @Consumes(MediaType.APPLICATION_JSON)
+ @Produces(MediaType.APPLICATION_JSON)
+ @Path("{"+InterfaceConstants.Parameters.PROJECT_ID+"}")
+ public String update(@PathParam(InterfaceConstants.Parameters.PROJECT_ID) String id,String jsonString) {
+ return new GuardedMethod () {
+ @Override
+ protected String run() throws Exception, WebApplicationException {
+// Concessione c=Serialization.read(jsonString, Concessione.class);
+// ConcessioniMongoManager manager=new ConcessioniMongoManager();
+// manager.update(c);
+//
+// return Serialization.write(manager.getById(c.getMongo_id()));
+ throw new RuntimeException("TO IMPLEMENT");
+ }
+ }.execute().getResult();
+ }
+
+
+ @PUT
+ @Produces(MediaType.APPLICATION_JSON)
+ @Path("/{"+InterfaceConstants.Methods.PUBLISH_PATH+"}/{"+InterfaceConstants.Parameters.PROJECT_ID+"}")
+ public String publish(@PathParam(InterfaceConstants.Parameters.PROJECT_ID) String id) {
+ return new GuardedMethod () {
+ @Override
+ protected String run() throws Exception, WebApplicationException {
+ ConcessioniMongoManager manager=new ConcessioniMongoManager();
+ return Serialization.write(manager.publish(id));
+ }
+ }.execute().getResult();
+ }
+
+ @POST
+ @Consumes(MediaType.APPLICATION_JSON)
+ @Produces(MediaType.APPLICATION_JSON)
+ @Path("/"+InterfaceConstants.Methods.REGISTER_FILES_PATH+"/{"+InterfaceConstants.Parameters.PROJECT_ID+"}")
+ public String registerFile(@PathParam(InterfaceConstants.Parameters.PROJECT_ID) String id,String jsonRequest) {
+ return new GuardedMethod () {
+ @Override
+ protected String run() throws Exception, WebApplicationException {
+ AddSectionToConcessioneRequest request=Serialization.read(jsonRequest,AddSectionToConcessioneRequest.class);
+ log.info("Registering {} file(s) for {} Concessione ID {}",
+ request.getStreams().size(),
+ request.getDestinationPath(),id);
+ ConcessioniMongoManager manager=new ConcessioniMongoManager();
+ Concessione toReturn= manager.persistContent(id, request.getDestinationPath(), request.getStreams());
+
+ log.debug("Returning "+toReturn);
+ return Serialization.write(toReturn);
+ }
+ }.execute().getResult();
+ }
+
+}
diff --git a/src/main/java/org/gcube/application/geoportal/service/utils/Serialization.java b/src/main/java/org/gcube/application/geoportal/service/utils/Serialization.java
index 66c9013..08dbaff 100644
--- a/src/main/java/org/gcube/application/geoportal/service/utils/Serialization.java
+++ b/src/main/java/org/gcube/application/geoportal/service/utils/Serialization.java
@@ -1,6 +1,7 @@
package org.gcube.application.geoportal.service.utils;
import java.io.IOException;
+import java.time.format.DateTimeFormatter;
import org.gcube.application.geoportal.model.Record;
import org.gcube.application.geoportal.model.concessioni.Concessione;
@@ -17,6 +18,9 @@ import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule;
public class Serialization {
+
+ public static final DateTimeFormatter FULL_FORMATTER=DateTimeFormatter.ofPattern("uuuuMMdd_HH-mm-ss");
+
public static ObjectMapper mapper;
static {
diff --git a/src/test/java/org/gcube/application/geoportal/service/BasicServiceTestUnit.java b/src/test/java/org/gcube/application/geoportal/service/BasicServiceTestUnit.java
index 61c9866..e05a8f9 100644
--- a/src/test/java/org/gcube/application/geoportal/service/BasicServiceTestUnit.java
+++ b/src/test/java/org/gcube/application/geoportal/service/BasicServiceTestUnit.java
@@ -2,17 +2,24 @@ package org.gcube.application.geoportal.service;
import javax.persistence.EntityManagerFactory;
import javax.ws.rs.core.Application;
+import javax.ws.rs.core.Response;
import org.gcube.application.geoportal.managers.AbstractRecordManager;
import org.gcube.application.geoportal.model.fault.ConfigurationException;
+import org.gcube.application.geoportal.model.report.PublicationReport;
import org.gcube.application.geoportal.service.engine.ImplementationProvider;
import org.gcube.application.geoportal.service.engine.ScopedEMFProvider;
import org.gcube.application.geoportal.service.engine.StorageClientProvider;
+import org.gcube.application.geoportal.service.engine.cache.MongoClientProvider;
+import org.gcube.application.geoportal.service.engine.cache.MongoConnectionProvider;
import org.gcube.application.geoportal.service.legacy.TokenSetter;
+import org.gcube.application.geoportal.service.utils.Serialization;
import org.gcube.contentmanagement.blobstorage.service.IClient;
import org.glassfish.jersey.test.JerseyTest;
import org.junit.BeforeClass;
+import com.mongodb.MongoClient;
+
public class BasicServiceTestUnit extends JerseyTest {
@@ -33,7 +40,6 @@ public class BasicServiceTestUnit extends JerseyTest {
@Override
public EntityManagerFactory getFactory() {
-// System.err.println("***********************SETTING DEBUG CONTEXT******************");
TokenSetter.set(scope);
return super.getFactory();
}
@@ -47,5 +53,33 @@ public class BasicServiceTestUnit extends JerseyTest {
}
});
+
+ ImplementationProvider.get().setMongoConnectionProvider(new MongoConnectionProvider() {
+ @Override
+ public org.gcube.application.geoportal.service.model.internal.db.MongoConnection getObject() throws ConfigurationException {
+ TokenSetter.set(scope);
+ return super.getObject();
+ }
+ });
+
+ ImplementationProvider.get().setMongoClientProvider(new MongoClientProvider() {
+ @Override
+ public MongoClient getObject() throws ConfigurationException {
+ TokenSetter.set(scope);
+ return super.getObject();
+ }
+ });
+
+ }
+
+
+ protected static T check(Response resp, Class clazz) throws Exception {
+ String resString=resp.readEntity(String.class);
+ if(resp.getStatus()<200||resp.getStatus()>=300)
+ throw new Exception("RESP STATUS IS "+resp.getStatus()+". Message : "+resString);
+ System.out.println("Resp String is "+resString);
+ if(clazz!=null)
+ return Serialization.read(resString, clazz);
+ else return null;
}
}
diff --git a/src/test/java/org/gcube/application/geoportal/service/ConcessioniOverMongoTest.java b/src/test/java/org/gcube/application/geoportal/service/ConcessioniOverMongoTest.java
new file mode 100644
index 0000000..4b64d40
--- /dev/null
+++ b/src/test/java/org/gcube/application/geoportal/service/ConcessioniOverMongoTest.java
@@ -0,0 +1,190 @@
+package org.gcube.application.geoportal.service;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+
+import java.io.FileInputStream;
+import java.util.ArrayList;
+import java.util.List;
+
+import javax.ws.rs.client.Entity;
+import javax.ws.rs.client.WebTarget;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+
+import org.gcube.application.geoportal.common.model.legacy.Concessione;
+import org.gcube.application.geoportal.common.model.legacy.Concessione.Paths;
+import org.gcube.application.geoportal.common.model.legacy.LayerConcessione;
+import org.gcube.application.geoportal.common.model.legacy.report.ValidationReport.ValidationStatus;
+import org.gcube.application.geoportal.common.rest.AddSectionToConcessioneRequest;
+import org.gcube.application.geoportal.common.rest.InterfaceConstants;
+import org.gcube.application.geoportal.common.rest.TempFile;
+import org.gcube.application.geoportal.common.utils.Files;
+import org.gcube.application.geoportal.common.utils.StorageUtils;
+import org.gcube.application.geoportal.service.legacy.TokenSetter;
+import org.gcube.application.geoportal.service.utils.Serialization;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+
+public class ConcessioniOverMongoTest extends BasicServiceTestUnit{
+
+
+ private static final String PATH=InterfaceConstants.Methods.MONGO_CONCESSIONI;
+
+ private static final String PUBLISH_PATH=InterfaceConstants.Methods.PUBLISH_PATH;
+ private static final String FILES_PATH=InterfaceConstants.Methods.REGISTER_FILES_PATH;
+
+
+ @Before
+ public void setContext() {
+ TokenSetter.set("/gcube/devsec/devVRE");
+ }
+
+
+ private static Concessione upload(WebTarget target,String id, String path, String ...files) throws Exception {
+ ArrayList array=new ArrayList();
+ for(String file:files)
+ array.add(new StorageUtils().putOntoStorage(new FileInputStream(
+ Files.getFileFromResources("concessioni/"+file)), file));
+
+
+ AddSectionToConcessioneRequest request=new AddSectionToConcessioneRequest();
+ request.setDestinationPath(path);
+
+ request.setStreams(array);
+
+ return check(target.path(FILES_PATH).path(id).request(MediaType.APPLICATION_JSON).post(Entity.entity(Serialization.write(request), MediaType.APPLICATION_JSON)),Concessione.class);
+
+ }
+
+
+ private static Concessione publish(WebTarget target, Concessione conc) throws Exception {
+ Response resp=target.path(PUBLISH_PATH).path(conc.getMongo_id()).request(MediaType.APPLICATION_JSON).
+ put(Entity.entity(Serialization.write(conc), MediaType.APPLICATION_JSON));
+ return check(resp,Concessione.class);
+ }
+ private static Concessione register(WebTarget target, Concessione c) throws Exception {
+ Response resp=target.request(MediaType.APPLICATION_JSON).post(Entity.entity(Serialization.write(c), MediaType.APPLICATION_JSON));
+ return check(resp,Concessione.class);
+ }
+
+ private static Concessione get(WebTarget target) throws Exception {
+ return register(target,TestModel.prepareConcessione());
+ }
+
+ // ********** TESTS
+
+ @Test
+ public void list() {
+ WebTarget target=target(PATH);
+ System.out.println(target.request(MediaType.APPLICATION_JSON).get(List.class));
+ }
+
+ @Test
+ public void createNew() throws Exception {
+ WebTarget target=target(PATH);
+ Concessione c=register(target,TestModel.prepareConcessione());
+ Assert.assertTrue(c.getMongo_id()!=null&&!c.getMongo_id().isEmpty());
+ }
+
+
+ @Test
+ public void delete() throws Exception {
+ WebTarget target=target(PATH);
+ Concessione c = get(target);
+ check(target.path(c.getMongo_id()).request(MediaType.APPLICATION_JSON).delete(),null);
+ }
+
+
+ @Test
+ public void getById() throws Exception {
+ WebTarget target=target(PATH);
+ Concessione c = get(target);
+ Response resp=target.path(c.getMongo_id()).request(MediaType.APPLICATION_JSON).get();
+ Concessione loaded=check(resp,Concessione.class);
+ Assert.assertTrue(loaded.getMongo_id()!=null&&!loaded.getMongo_id().isEmpty());
+ System.out.println("Got by ID "+loaded);
+ }
+
+
+ @Test
+ public void update() throws Exception {
+ WebTarget target=target(PATH);
+ Concessione c = get(target);
+ String newTitle="Questo titolo l'ho modificato mo nel test quello proprio apposta pewr questa cosa'";
+ c.setNome(newTitle);
+ Response resp=target.request(MediaType.APPLICATION_JSON).put(Entity.entity(Serialization.write(c), MediaType.APPLICATION_JSON));
+ Assert.assertTrue(check(resp,Concessione.class).getNome().equals(newTitle));
+ }
+
+ @Test
+ public void uploadFile() throws Exception {
+ WebTarget target=target(PATH);
+ Response resp=target.request(MediaType.APPLICATION_JSON).post(Entity.entity(Serialization.write(TestModel.prepareEmptyConcessione()), MediaType.APPLICATION_JSON));
+ Concessione c=check(resp,Concessione.class);
+ Assert.assertTrue(c.getMongo_id()!=null&&!c.getMongo_id().isEmpty());
+ System.out.println("ID IS "+c.getMongo_id());
+
+ // Insert section
+ c.setRelazioneScavo(TestModel.prepareConcessione().getRelazioneScavo());
+ // c.getRelazioneScavo().setMongo_id(TestModel.rnd());
+
+ resp=target.request(MediaType.APPLICATION_JSON).put(Entity.entity(Serialization.write(c), MediaType.APPLICATION_JSON));
+
+
+
+
+ c=upload(target,c.getMongo_id(),Paths.RELAZIONE,"relazione.pdf");
+ assertNotNull(c.getRelazioneScavo().getActualContent());
+ assertTrue(c.getRelazioneScavo().getActualContent().size()>0);
+
+ System.out.println("File is "+c.getRelazioneScavo().getActualContent().get(0));
+ }
+
+
+
+ @Test
+ public void publish() throws Exception {
+ WebTarget target=target(PATH);
+ Concessione c=TestModel.prepareConcessione(1,2);
+
+ c.setNome("Concessione : publish test");
+
+
+
+ // Register new
+ c=register(target,c);
+
+ //Upload files
+ upload(target,c.getMongo_id(),Paths.RELAZIONE,"relazione.pdf");
+ upload(target,c.getMongo_id(),Paths.POSIZIONAMENTO,"pos.shp","pos.shx");
+
+ // Clash on workspaces
+ upload(target,c.getMongo_id(),Paths.piantaByIndex(0),"pianta.shp","pianta.shx");
+ upload(target,c.getMongo_id(),Paths.imgByIndex(0),"immagine.png");
+ upload(target,c.getMongo_id(),Paths.imgByIndex(1),"immagine2.png");
+
+
+
+ // Immagini
+ Concessione published=publish(target, c);
+ System.out.println("Published : "+published);
+ assertNotNull(published.getReport());
+ assertEquals(published.getReport().getStatus(),ValidationStatus.PASSED);
+
+ assertEquals(published.getImmaginiRappresentative().size(),2);
+ assertEquals(published.getPianteFineScavo().size(),1);
+ assertNotNull(published.getPosizionamentoScavo().getWmsLink());
+ for(LayerConcessione l : published.getPianteFineScavo())
+ assertNotNull(l.getWmsLink());
+ assertNotNull(published.getCentroidLat());
+ assertNotNull(published.getCentroidLong());
+ }
+
+
+
+
+}
diff --git a/src/test/java/org/gcube/application/geoportal/service/TestModel.java b/src/test/java/org/gcube/application/geoportal/service/TestModel.java
new file mode 100644
index 0000000..205ac09
--- /dev/null
+++ b/src/test/java/org/gcube/application/geoportal/service/TestModel.java
@@ -0,0 +1,129 @@
+package org.gcube.application.geoportal.service;
+
+import java.time.LocalDateTime;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.UUID;
+
+import org.bson.types.ObjectId;
+import org.gcube.application.geoportal.common.model.legacy.AccessPolicy;
+import org.gcube.application.geoportal.common.model.legacy.Concessione;
+import org.gcube.application.geoportal.common.model.legacy.LayerConcessione;
+import org.gcube.application.geoportal.common.model.legacy.RelazioneScavo;
+import org.gcube.application.geoportal.common.model.legacy.UploadedImage;
+
+
+
+public class TestModel {
+
+
+ public static Concessione prepareEmptyConcessione() {
+ Concessione concessione=new Concessione();
+
+ // Generic fields
+
+ // Concessione fields
+
+ concessione.setNome("MONGO Italia, forse, ma su ");
+ concessione.setIntroduzione("This is my MONGO project");
+ concessione.setDescrizioneContenuto("It contains this and that");
+
+ concessione.setAuthors(Arrays.asList(new String[] {"Some one","Some, oneelse"}));
+
+ concessione.setContributore("Contrib 1");
+ concessione.setTitolari(Arrays.asList(new String[] {"Some one","Some, oneelse"}));
+ concessione.setResponsabile("Someone");
+ concessione.setEditore("Editore");
+
+ concessione.setFontiFinanziamento(Arrays.asList(new String[] {"Big pharma","Pentagon"}));
+
+
+ concessione.setSoggetto(Arrays.asList(new String[] {"Research Excavation","Archeology"}));
+
+
+ concessione.setDataInizioProgetto(LocalDateTime.now());
+ concessione.setDataFineProgetto(LocalDateTime.now());
+
+ concessione.setLicenzaID("CC-BY");
+
+ concessione.setTitolareLicenza(Arrays.asList(new String[] {"Qualcun altro"}));
+ concessione.setTitolareCopyright(Arrays.asList(new String[] {"Chiedilo in giro"}));
+
+ concessione.setParoleChiaveLibere(Arrays.asList(new String[] {"Robba","Stuff"}));
+ concessione.setParoleChiaveICCD(Arrays.asList(new String[] {"vattelapesca","somthing something"}));
+
+
+ concessione.setCentroidLat(43.0); //N-S
+ concessione.setCentroidLong(9.0); //E-W
+
+ return concessione;
+ }
+
+ public static final Concessione setIds(Concessione c) {
+// c.setMongo_id(rnd());
+ c.getRelazioneScavo().setMongo_id(rnd());
+ c.getPosizionamentoScavo().setMongo_id(rnd());
+ c.getPianteFineScavo().forEach((LayerConcessione l)->{l.setMongo_id(rnd());});
+ c.getImmaginiRappresentative().forEach((UploadedImage i)->{i.setMongo_id(rnd());});
+ return c;
+ }
+
+ public static final String rnd() {
+ return new ObjectId().toHexString();
+ }
+ public static Concessione prepareConcessione() {
+ return prepareConcessione(4,2);
+ }
+
+ public static Concessione prepareConcessione(int pianteCount ,int imgsCount) {
+
+ Concessione concessione=prepareEmptyConcessione();
+
+
+
+ // Attachments
+
+ // Relazione scavo
+ RelazioneScavo relScavo=new RelazioneScavo();
+
+ relScavo.setAbstractIta("simple abstract section");
+ relScavo.setResponsabili(concessione.getAuthors());
+
+ concessione.setRelazioneScavo(relScavo);
+ //Immagini rappresentative
+ ArrayList imgs=new ArrayList<>();
+ for(int i=0;i piante=new ArrayList();
+ for(int i=0;i