Compare commits

...
This repository has been archived on 2021-09-09. You can view files and clone it, but cannot push or open issues or pull requests.

22 Commits

Author SHA1 Message Date
Fabio Sinibaldi ab3bfe0cd3 Fixed Projections in queries 2021-09-09 13:35:09 +02:00
Fabio Sinibaldi 76d1113af0 TTL Object retrieving fix 2021-09-07 11:28:11 +02:00
Fabio Sinibaldi 3c129484d4 TTL Object retrieving fix 2021-09-07 10:30:27 +02:00
Fabio Sinibaldi ad56a91310 Clear FileSet Feature 2021-09-03 15:28:50 +02:00
Fabio Sinibaldi b5d72ddd82 Refactored Serialization means 2021-09-02 18:05:59 +02:00
Fabio Sinibaldi 9533313b67 Refactored test context handling 2021-09-02 12:24:07 +02:00
Fabio Sinibaldi da354a3e82 Refactored REST model to common library 2021-09-01 18:43:13 +02:00
Fabio Sinibaldi cb63acd47e Refactored REST model to common library 2021-09-01 18:36:50 +02:00
Fabio Sinibaldi bb622c3b45 Refactored REST model to common library 2021-09-01 18:16:36 +02:00
Fabio Sinibaldi bde5d820d9 Refactored REST model to common library 2021-09-01 18:14:18 +02:00
Fabio Sinibaldi 66b0f604f1 Query Interface 2021-08-06 18:33:34 +02:00
Fabio Sinibaldi 3ab16c1f3e Validated filter 2021-08-06 16:26:44 +02:00
Fabio Sinibaldi 351bc79324 Introduced Search Feature 2021-08-06 16:17:24 +02:00
Fabio Sinibaldi cd04e5f49e Fixed commit upon deletion of centroids 2021-08-04 17:09:30 +02:00
Fabio Sinibaldi 38a8b89963 Scoped Postgis DB Manager 2021-08-04 16:22:29 +02:00
Fabio Sinibaldi 5e9154d87e Fixed postgis de indexing 2021-08-03 17:44:53 +02:00
Fabio Sinibaldi 36d70987b7 Style imported from legacy library 2021-08-03 17:11:38 +02:00
Fabio Sinibaldi 4df61c0ca9 Exposed method unpublish 2021-08-03 16:24:49 +02:00
Fabio Sinibaldi d08bb8315a Fixed Layer deletion on multiple GS instances 2021-08-03 15:33:28 +02:00
Fabio Sinibaldi 1ec69d1ad0 Catched Deletion Exception 2021-08-03 12:50:02 +02:00
Fabio Sinibaldi b68257d150 Purge upon deletion (optional) 2021-08-02 17:43:30 +02:00
Fabio Sinibaldi 815a972c82 Removed geoportal-logic library dependency 2021-07-30 18:21:29 +02:00
59 changed files with 2105 additions and 1248 deletions

65
pom.xml
View File

@ -46,17 +46,6 @@
<!-- SMARTGEARS -->
<!-- <dependency> -->
<!-- <groupId>org.gcube.core</groupId> -->
<!-- <artifactId>common-smartgears-app</artifactId> -->
<!-- <exclusions> -->
<!-- <exclusion> -->
<!-- <groupId>org.javassist</groupId> -->
<!-- <artifactId>javassist</artifactId> -->
<!-- </exclusion> -->
<!-- </exclusions> -->
<!-- </dependency> -->
<dependency>
<groupId>org.gcube.core</groupId>
<artifactId>common-smartgears</artifactId>
@ -90,23 +79,50 @@
</exclusions>
</dependency>
<!-- INTERNAL LOGIC -->
<dependency>
<groupId>org.gcube.application</groupId>
<artifactId>geoportal-common</artifactId>
<version>[1.0.0,2.0.0)</version>
</dependency>
<!-- SDI -->
<dependency>
<groupId>org.gcube.application</groupId>
<artifactId>geoportal-logic</artifactId>
<version>[1.0.14,2.0.0)</version>
<groupId>org.gcube.spatial.data</groupId>
<artifactId>gis-interface</artifactId>
<version>[2.4.6,3.0.0)</version>
</dependency>
<!-- POSTGRES DRIVERS -->
<dependency>
<groupId>net.postgis</groupId>
<artifactId>postgis-jdbc</artifactId>
<version>2.5.0</version>
</dependency>
<!-- GS communication -->
<dependency>
<groupId>org.gcube.spatial.data</groupId>
<artifactId>gcube-geoserver-client</artifactId>
<version>[1.0.0-SNAPSHOT,)</version>
</dependency>
<!-- DT -->
<dependency>
<groupId>org.gcube.data.transfer</groupId>
<artifactId>data-transfer-library</artifactId>
<version>[1.2.1,2.0.0]</version>
</dependency>
<!-- Storage HUB -->
<dependency>
<groupId>org.gcube.common</groupId>
<artifactId>storagehub-client-library</artifactId>
<version>[1.0.0,2.0.0)</version>
</dependency>
<!-- &lt;!&ndash; override gis-interface &ndash;&gt;-->
<!-- <dependency>-->
<!-- <groupId>org.gcube.spatial.data</groupId>-->
<!-- <artifactId>gis-interface</artifactId>-->
<!-- <version>[2.4.6,3.0.0)</version>-->
<!-- </dependency>-->
<!-- MONGO -->
@ -119,18 +135,9 @@
<!-- Used to write centroids -->
<!-- <dependency> <groupId>net.postgis</groupId> <artifactId>postgis-jdbc</artifactId>
<version>2.5.0</version> </dependency> -->
<!-- jackson java time -->
<!-- Serialization from library -->
<!-- <dependency> <groupId>com.fasterxml.jackson.datatype</groupId> <artifactId>jackson-datatype-jsr310</artifactId>
<version>2.8.8</version> </dependency> -->
<!-- GPKG -->
<!-- Read Geopackage -->
<!-- <dependency> <groupId>mil.nga.geopackage</groupId> <artifactId>geopackage</artifactId>

View File

@ -1,15 +1,16 @@
package org.gcube.application.geoportal.service;
import javax.ws.rs.ApplicationPath;
import com.fasterxml.jackson.jaxrs.json.JacksonJaxbJsonProvider;
import org.gcube.application.geoportal.common.rest.InterfaceConstants;
import org.gcube.application.geoportal.service.rest.Concessioni;
import org.gcube.application.geoportal.service.rest.ConcessioniOverMongo;
import org.gcube.application.geoportal.service.rest.Profiles;
import org.gcube.application.geoportal.service.rest.Projects;
import org.gcube.application.geoportal.service.rest.Sections;
import org.gcube.application.geoportal.service.utils.Serialization;
import org.glassfish.jersey.server.ResourceConfig;
import javax.ws.rs.ApplicationPath;
@ApplicationPath(InterfaceConstants.APPLICATION_PATH)
public class GeoPortalService extends ResourceConfig{
@ -18,13 +19,20 @@ public class GeoPortalService extends ResourceConfig{
public GeoPortalService() {
super();
//Register interrfaces
registerClasses(Concessioni.class);
// registerClasses(Concessioni.class);
registerClasses(ConcessioniOverMongo.class);
registerClasses(Projects.class);
registerClasses(Sections.class);
registerClasses(Profiles.class);
JacksonJaxbJsonProvider provider = new JacksonJaxbJsonProvider();
provider.setMapper(Serialization.mapper);
register(provider);
}

View File

@ -1,11 +0,0 @@
package org.gcube.application.geoportal.service.engine;
import org.gcube.application.geoportal.model.fault.ConfigurationException;
public interface Engine <T> {
public void init();
public void shustdown();
public T getObject() throws ConfigurationException;
}

View File

@ -1,13 +1,12 @@
package org.gcube.application.geoportal.service.engine;
import org.gcube.application.geoportal.managers.AbstractRecordManager;
import org.gcube.application.geoportal.managers.EMFProvider;
import org.gcube.application.geoportal.service.engine.cache.MongoClientProvider;
import org.gcube.application.geoportal.service.engine.cache.MongoConnectionProvider;
//import org.gcube.application.geoportal.managers.AbstractRecordManager;
//import org.gcube.application.geoportal.managers.EMFProvider;
import lombok.Getter;
import lombok.Setter;
import lombok.Synchronized;
import org.gcube.application.geoportal.service.engine.providers.*;
public class ImplementationProvider {
@ -35,10 +34,14 @@ public class ImplementationProvider {
@Setter
private StorageClientProvider storageProvider=new StorageClientProvider();
@Getter
@Setter
private EMFProvider emfProvider=new ScopedEMFProvider();
private PostgisConnectionProvider dbProvider=new PostgisConnectionProvider();
// @Getter
// @Setter
// private EMFProvider emfProvider=new ScopedEMFProvider();
@Getter
@ -47,13 +50,13 @@ public class ImplementationProvider {
public void shutdown() {
// Stop JPA
AbstractRecordManager.shutdown();
// AbstractRecordManager.shutdown();
mongoConnectionProvider.shustdown();
mongoClientProvider.shustdown();
}
public void startup() {
AbstractRecordManager.setDefaultProvider(emfProvider);
// AbstractRecordManager.setDefaultProvider(emfProvider);
mongoConnectionProvider.init();
mongoClientProvider.init();
}

View File

@ -7,17 +7,13 @@ import it.geosolutions.geoserver.rest.decoder.RESTFeatureType;
import it.geosolutions.geoserver.rest.decoder.RESTLayer;
import it.geosolutions.geoserver.rest.encoder.GSLayerEncoder;
import it.geosolutions.geoserver.rest.encoder.datastore.GSPostGISDatastoreEncoder;
import it.geosolutions.geoserver.rest.encoder.feature.FeatureTypeAttribute;
import it.geosolutions.geoserver.rest.encoder.feature.GSAttributeEncoder;
import it.geosolutions.geoserver.rest.encoder.feature.GSFeatureTypeEncoder;
import lombok.Getter;
import lombok.extern.slf4j.Slf4j;
import org.gcube.application.geoportal.common.model.legacy.*;
import org.gcube.application.geoportal.common.model.rest.DatabaseConnection;
import org.gcube.application.geoportal.common.utils.Files;
import org.gcube.application.geoportal.model.db.DBConstants;
import org.gcube.application.geoportal.model.db.DatabaseConnection;
import org.gcube.application.geoportal.model.db.PostgisTable;
import org.gcube.application.geoportal.service.engine.mongo.PostgisIndex;
import org.gcube.application.geoportal.service.model.internal.db.PostgisTable;
import org.gcube.application.geoportal.service.model.internal.faults.SDIInteractionException;
import org.gcube.common.storagehub.client.dsl.FileContainer;
import org.gcube.data.transfer.library.DataTransferClient;
@ -31,11 +27,7 @@ import org.gcube.spatial.data.gis.is.AbstractGeoServerDescriptor;
import java.io.File;
import java.net.MalformedURLException;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URL;
import java.util.HashMap;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
@ -315,7 +307,7 @@ public class SDIManager {
}
public String configureCentroidLayer(String name,String workspace,String storeName,PostgisTable table, DatabaseConnection connection) throws SDIInteractionException {
public String configureCentroidLayer(String name, String workspace, String storeName, PostgisTable table, DatabaseConnection connection) throws SDIInteractionException {
GSFeatureTypeEncoder fte=new GSFeatureTypeEncoder();
fte.setAbstract("Centroid layer for "+name);
@ -345,10 +337,27 @@ public class SDIManager {
//Checking layer
publishStyle(Files.getFileFromResources("styles/clustered_points.sld"),style);
log.info("Creating layer in {} : {} with FTE {} , LE {}",workspace,storeName,fte,layerEncoder);
if(currentGeoserver.getReader().getLayer(workspace, name)==null)
if(!currentGeoserver.getPublisher().publishDBLayer(workspace, storeName, fte, layerEncoder))
throw new SDIInteractionException("Unable to create layer "+name);
log.debug("layer "+name+" already exists");
String link=String.format("https://%1$s/geoserver/%2$s/wms?"
+"service=WMS&version=1.1.0&request=GetMap&layers=%2$s:%3$s&"
+ "styles=&bbox=%4$s,%5$s,%6$s,%7$s&srs=%8$s&format=application/openlayers&width=%9$d&height=%10$d",
geoserverHostName,
workspace,
name,
"-1563071.166172796",
"4789738.204048398",
"4334926.486925308",
"5828118.072551585",
EPSG_4326,
400,
400);
return name;
} catch (IllegalArgumentException | MalformedURLException e) {
throw new SDIInteractionException("Unable to create layer "+name,e);
@ -359,20 +368,42 @@ public class SDIManager {
public void deleteContent(GeoServerContent toDelete) throws IllegalArgumentException, MalformedURLException, RemoteServiceException {
log.debug("Deleting geoserver layer "+toDelete);
log.info("Deleting geoserver layer "+toDelete);
String geoserverHostName=toDelete.getGeoserverHostName();
log.debug("Looking for geoserver {}",geoserverHostName);
AbstractGeoServerDescriptor geoServerDescriptor=null;
for(AbstractGeoServerDescriptor gs :gis.getCurrentCacheElements(false)){
log.debug("Checking gs {}",gs);
if(new URL(gs.getUrl()).getHost().equals(geoserverHostName))
geoServerDescriptor=gs;
}
if(geoServerDescriptor == null) throw new IllegalArgumentException("Unable to find geoserver "+geoserverHostName);
GeoServerRESTPublisher publisher=geoServerDescriptor.getPublisher();
//delete layer
GeoServerRESTPublisher publisher=currentGeoserver.getPublisher();
//delete store
log.debug("Removing DS {} : {} ",toDelete.getWorkspace(),toDelete.getStore());
publisher.removeDatastore(toDelete.getWorkspace(), toDelete.getStore(), true);
//delete WS if empty
GeoServerRESTReader reader=currentGeoserver.getReader();
GeoServerRESTReader reader=geoServerDescriptor.getReader();
log.debug("Checking if WS {} is empty",toDelete.getWorkspace());
if(reader.getDatastores(toDelete.getWorkspace()).isEmpty()) {
log.debug("Deleting emtpy workspace "+toDelete.getWorkspace());
publisher.removeWorkspace(toDelete.getWorkspace(), true);
}
//delete file
dtGeoServer.getWebClient().delete(toDelete.getGeoserverPath());
// TODO REMOVE HARDCODED PATCH
String path=toDelete.getGeoserverPath().replace("/srv/geoserver_data","geoserver");
log.info("Deleting files at {} [{}]",path,toDelete.getGeoserverPath());
// path=toDelete.getGeoserverPath();
dtGeoServer.getWebClient().delete(path);
}

View File

@ -1,205 +0,0 @@
package org.gcube.application.geoportal.service.engine;
import java.io.IOException;
import java.io.UncheckedIOException;
import java.net.URL;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Properties;
import javax.persistence.EntityManagerFactory;
import javax.persistence.SharedCacheMode;
import javax.persistence.ValidationMode;
import javax.persistence.spi.ClassTransformer;
import javax.persistence.spi.PersistenceUnitInfo;
import javax.persistence.spi.PersistenceUnitTransactionType;
import javax.sql.DataSource;
import org.gcube.application.geoportal.managers.EMFProvider;
import org.gcube.application.geoportal.model.db.DatabaseConnection;
import org.gcube.application.geoportal.model.fault.ConfigurationException;
import org.gcube.application.geoportal.utils.ISUtils;
import org.hibernate.cfg.AvailableSettings;
import org.hibernate.jpa.HibernatePersistenceProvider;
import jersey.repackaged.com.google.common.collect.ImmutableMap;
import lombok.extern.slf4j.Slf4j;
@Slf4j
public class ScopedEMFProvider extends AbstractScopedMap<EntityManagerFactory> implements EMFProvider {
public ScopedEMFProvider() {
super("EMF Cache");
// setTTL(Duration.of(10, ChronoUnit.MINUTES));
}
@Override
protected void dispose(EntityManagerFactory toDispose) {
if(toDispose!=null) {
if(toDispose.isOpen()) toDispose.close();
toDispose=null;
}
}
@Override
public EntityManagerFactory getFactory() {
try {
return getObject();
} catch (ConfigurationException e) {
throw new RuntimeException("Unable to get Factory ",e);
}
}
@Override
public void init() {
}
@Override
protected EntityManagerFactory retrieveObject() throws ConfigurationException {
DatabaseConnection conn=ISUtils.queryForDB("postgresql", "internal-db");
log.debug("Found Internal Database : "+conn);
return new HibernatePersistenceProvider().createContainerEntityManagerFactory(
archiverPersistenceUnitInfo(),
ImmutableMap.<String, Object>builder()
.put(AvailableSettings.JPA_JDBC_DRIVER, "org.postgresql.Driver")
.put(AvailableSettings.JPA_JDBC_URL, conn.getUrl())
.put(AvailableSettings.DIALECT, org.hibernate.dialect.PostgreSQLDialect.class)
.put(AvailableSettings.HBM2DDL_AUTO, org.hibernate.tool.schema.Action.UPDATE)
.put(AvailableSettings.SHOW_SQL, true)
.put(AvailableSettings.QUERY_STARTUP_CHECKING, false)
.put(AvailableSettings.GENERATE_STATISTICS, false)
.put(AvailableSettings.USE_REFLECTION_OPTIMIZER, false)
.put(AvailableSettings.USE_SECOND_LEVEL_CACHE, false)
.put(AvailableSettings.USE_QUERY_CACHE, false)
.put(AvailableSettings.USE_STRUCTURED_CACHE, false)
.put(AvailableSettings.STATEMENT_BATCH_SIZE, 20)
.put(AvailableSettings.JPA_JDBC_USER, conn.getUser())
.put(AvailableSettings.JPA_JDBC_PASSWORD, conn.getPwd())
.build());
}
@Override
public void shutdown() {
super.shustdown();
}
///** *
private static PersistenceUnitInfo archiverPersistenceUnitInfo() {
final List<String> MANAGED_CLASSES=Arrays.asList(new String[] {
"org.gcube.application.geoportal.model.Record",
"org.gcube.application.geoportal.model.concessioni.Concessione",
"org.gcube.application.geoportal.model.concessioni.LayerConcessione",
"org.gcube.application.geoportal.model.concessioni.RelazioneScavo",
"org.gcube.application.geoportal.model.content.AssociatedContent",
"org.gcube.application.geoportal.model.content.GeoServerContent",
"org.gcube.application.geoportal.model.content.OtherContent",
"org.gcube.application.geoportal.model.content.PersistedContent",
"org.gcube.application.geoportal.model.content.UploadedImage",
"org.gcube.application.geoportal.model.content.WorkspaceContent",
"org.gcube.application.geoportal.model.gis.ShapeFileLayerDescriptor",
"org.gcube.application.geoportal.model.gis.SDILayerDescriptor"});
return new PersistenceUnitInfo() {
@Override
public String getPersistenceUnitName() {
return "ApplicationPersistenceUnit";
}
@Override
public String getPersistenceProviderClassName() {
return "org.hibernate.jpa.HibernatePersistenceProvider";
}
@Override
public PersistenceUnitTransactionType getTransactionType() {
return PersistenceUnitTransactionType.RESOURCE_LOCAL;
}
@Override
public DataSource getJtaDataSource() {
return null;
}
@Override
public DataSource getNonJtaDataSource() {
return null;
}
@Override
public List<String> getMappingFileNames() {
return Collections.emptyList();
}
@Override
public List<URL> getJarFileUrls() {
try {
return Collections.list(this.getClass()
.getClassLoader()
.getResources(""));
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
@Override
public URL getPersistenceUnitRootUrl() {
return null;
}
@Override
public List<String> getManagedClassNames() {
return MANAGED_CLASSES;
}
@Override
public boolean excludeUnlistedClasses() {
return true;
}
@Override
public SharedCacheMode getSharedCacheMode() {
return null;
}
@Override
public ValidationMode getValidationMode() {
return null;
}
@Override
public Properties getProperties() {
return new Properties();
}
@Override
public String getPersistenceXMLSchemaVersion() {
return null;
}
@Override
public ClassLoader getClassLoader() {
return null;
}
@Override
public void addTransformer(ClassTransformer transformer) {
}
@Override
public ClassLoader getNewTempClassLoader() {
return null;
}
};
}
}

View File

@ -1,25 +1,18 @@
package org.gcube.application.geoportal.service.engine;
import java.io.FileNotFoundException;
import java.io.InputStream;
import javax.validation.constraints.NotNull;
import lombok.*;
import lombok.extern.slf4j.Slf4j;
import org.gcube.application.geoportal.common.model.legacy.WorkspaceContent;
import org.gcube.application.geoportal.common.utils.Files;
import org.gcube.application.geoportal.model.fault.ConfigurationException;
import org.gcube.application.geoportal.service.model.internal.faults.ConfigurationException;
import org.gcube.common.storagehub.client.dsl.FileContainer;
import org.gcube.common.storagehub.client.dsl.FolderContainer;
import org.gcube.common.storagehub.client.dsl.StorageHubClient;
import org.gcube.common.storagehub.model.exceptions.StorageHubException;
import lombok.AllArgsConstructor;
import lombok.Getter;
import lombok.NonNull;
import lombok.RequiredArgsConstructor;
import lombok.Setter;
import lombok.Synchronized;
import lombok.extern.slf4j.Slf4j;
import javax.validation.constraints.NotNull;
import java.io.FileNotFoundException;
import java.io.InputStream;
@Slf4j
public class WorkspaceManager {
@ -76,6 +69,10 @@ public class WorkspaceManager {
return sgClient.open(id).asFolder();
}
public void removeFolderById(String id) throws StorageHubException {
sgClient.open(id).asFolder().delete();
}
public FolderContainer getSubFolder(FolderContainer parentFolder,String path) throws StorageHubException {
try{
return parentFolder.openByRelativePath(path).asFolder();
@ -114,7 +111,7 @@ public class WorkspaceManager {
// STATIC SYNCH METHODS
@Synchronized
private static FolderContainer getApplicationBaseFolder(StorageHubClient sgClient) throws StorageHubException {
public static FolderContainer getApplicationBaseFolder(StorageHubClient sgClient) throws StorageHubException {
FolderContainer vre=sgClient.openVREFolder();
try {
return vre.openByRelativePath(APP_FOLDER).asFolder();

View File

@ -1,48 +1,38 @@
package org.gcube.application.geoportal.service.engine.mongo;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.mongodb.client.MongoDatabase;
import lombok.Synchronized;
import lombok.extern.slf4j.Slf4j;
import org.bson.Document;
import org.bson.types.ObjectId;
import org.gcube.application.geoportal.common.model.legacy.*;
import org.gcube.application.geoportal.common.model.legacy.report.ValidationReport;
import org.gcube.application.geoportal.common.model.legacy.report.ValidationReport.ValidationStatus;
import org.gcube.application.geoportal.common.model.rest.QueryRequest;
import org.gcube.application.geoportal.common.rest.TempFile;
import org.gcube.application.geoportal.common.utils.Files;
import org.gcube.application.geoportal.service.engine.ImplementationProvider;
import org.gcube.application.geoportal.service.engine.SDIManager;
import org.gcube.application.geoportal.service.engine.WorkspaceManager;
import org.gcube.application.geoportal.service.engine.WorkspaceManager.FileOptions;
import org.gcube.application.geoportal.service.engine.WorkspaceManager.FolderOptions;
import org.gcube.application.geoportal.service.engine.postgis.PostgisIndex;
import org.gcube.application.geoportal.service.engine.providers.StorageClientProvider;
import org.gcube.application.geoportal.service.model.internal.faults.*;
import org.gcube.application.geoportal.service.utils.Serialization;
import org.gcube.common.storagehub.client.dsl.FolderContainer;
import org.gcube.data.transfer.library.faults.RemoteServiceException;
import java.io.IOException;
import java.net.MalformedURLException;
import java.sql.SQLException;
import java.time.LocalDateTime;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.function.Consumer;
import org.bson.Document;
import org.bson.types.ObjectId;
import org.gcube.application.geoportal.common.model.legacy.AssociatedContent;
import org.gcube.application.geoportal.common.model.legacy.Concessione;
import org.gcube.application.geoportal.common.model.legacy.GeoServerContent;
import org.gcube.application.geoportal.common.model.legacy.LayerConcessione;
import org.gcube.application.geoportal.common.model.legacy.OtherContent;
import org.gcube.application.geoportal.common.model.legacy.PersistedContent;
import org.gcube.application.geoportal.common.model.legacy.RelazioneScavo;
import org.gcube.application.geoportal.common.model.legacy.SDILayerDescriptor;
import org.gcube.application.geoportal.common.model.legacy.UploadedImage;
import org.gcube.application.geoportal.common.model.legacy.WorkspaceContent;
import org.gcube.application.geoportal.common.model.legacy.report.ValidationReport;
import org.gcube.application.geoportal.common.model.legacy.report.ValidationReport.ValidationStatus;
import org.gcube.application.geoportal.common.rest.TempFile;
import org.gcube.application.geoportal.common.utils.Files;
import org.gcube.application.geoportal.model.fault.ConfigurationException;
import org.gcube.application.geoportal.model.fault.PublishException;
import org.gcube.application.geoportal.service.engine.ImplementationProvider;
import org.gcube.application.geoportal.service.engine.SDIManager;
import org.gcube.application.geoportal.service.engine.StorageClientProvider;
import org.gcube.application.geoportal.service.engine.WorkspaceManager;
import org.gcube.application.geoportal.service.engine.WorkspaceManager.FileOptions;
import org.gcube.application.geoportal.service.engine.WorkspaceManager.FolderOptions;
import org.gcube.application.geoportal.service.model.internal.faults.InvalidStateException;
import org.gcube.application.geoportal.service.model.internal.faults.SDIInteractionException;
import org.gcube.application.geoportal.service.utils.Serialization;
import org.gcube.common.storagehub.client.dsl.FolderContainer;
import org.gcube.common.storagehub.model.exceptions.StorageHubException;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.mongodb.client.MongoDatabase;
import lombok.Synchronized;
import lombok.extern.slf4j.Slf4j;
@Slf4j
public class ConcessioniMongoManager extends MongoManager{
@ -106,31 +96,92 @@ public class ConcessioniMongoManager extends MongoManager{
return asConcessione(replace(asDocument(toReturn),collectionName));
}
public List<Concessione> list(){
ArrayList<Concessione> toReturn=new ArrayList<>();
iterate(null, collectionName).forEach(
public Iterable<Concessione> list(){
LinkedBlockingQueue queue=new LinkedBlockingQueue<Concessione>();
iterate(null,null, collectionName).forEach(
new Consumer<Document>() {
@Override
public void accept(Document d) {
try {
toReturn.add(asConcessione(d));
queue.put(asConcessione(d));
}catch(Throwable t) {
log.error("Unable to read Document as concessione ",t);
log.debug("Document was "+d.toJson());
log.debug("Document was "+d.toJson());
}
}
});
return toReturn;
return queue;
}
public Concessione getById(String id) throws JsonProcessingException, IOException {
public Iterable<Concessione> search(Document filter){
log.info("Searching concessione for filter {} ",filter);
LinkedBlockingQueue queue=new LinkedBlockingQueue<Concessione>();
iterate(filter,null,collectionName).forEach(
(Consumer<? super Document>) (Document d)->{try{
queue.put(asConcessione(d));
}catch(Throwable t){log.warn("Unable to translate "+d);}});
log.info("Returned {} elements ",queue.size());
return queue;
}
public Iterable<Document> query(QueryRequest queryRequest){
log.info("Searching concessione for filter {} ",queryRequest);
LinkedBlockingQueue queue=new LinkedBlockingQueue<Concessione>();
query(queryRequest,collectionName).forEach(
(Consumer<? super Document>) (Document d)->{try{
queue.put(d);
}catch(Throwable t){log.warn("Unable to translate "+d);}});
log.info("Returned {} elements ",queue.size());
return queue;
}
public Concessione getById(String id)throws IOException {
log.debug("Loading by ID "+id);
return asConcessione(getById(asId(id),collectionName));
}
public void deleteById(String id) {
delete(asId(id), collectionName);
public void deleteById(String id,boolean force) throws DeletionException {
log.debug("Deleting by ID {}, force {}",id,force);
try{
Concessione concessione =unpublish(id);
try{
// UNPUBLISH
if (!concessione.getReport().getStatus().equals(ValidationStatus.PASSED)&&!force)
throw new DeletionException("Unable to unpublish "+concessione.getMongo_id());
//clean WS
concessione = removeContent(concessione);
if (!concessione.getReport().getStatus().equals(ValidationStatus.PASSED)&&!force)
throw new DeletionException("Unable to unpublish "+concessione.getMongo_id());
delete(asId(id), collectionName);
}catch(DeletionException e) {
//storing updated - partially deleted
replace(asDocument(concessione), collectionName);
throw e;
}
}catch(Throwable t){
throw new DeletionException("Unable to delete "+id,t);
}
}
public Concessione unpublish(String id) throws DeletionException {
try{
Concessione toReturn=asConcessione(getById(asId(id),collectionName));
removeFromIndex(toReturn);
log.debug("Removed from centroids "+toReturn.getMongo_id());
toReturn = unpublish(toReturn);
log.debug("Concessione after unpublishing is "+toReturn);
return asConcessione(replace(asDocument(toReturn),collectionName));
}catch(Throwable t){
throw new DeletionException("Unable to unpublish "+id,t);
}
}
public Concessione publish(String id) throws JsonProcessingException, IOException, InvalidStateException{
@ -150,6 +201,62 @@ public class ConcessioniMongoManager extends MongoManager{
}
private static Concessione removeContent(Concessione concessione) throws DeletionException {
if(concessione.getFolderId()==null) {
log.debug("No content for " + concessione.getMongo_id());
return concessione;
}
try {
log.debug("Removing content for " + concessione.getMongo_id());
WorkspaceManager manager = new WorkspaceManager();
manager.removeFolderById(concessione.getFolderId());
//Removing references from Object
concessione.setFolderId(null);
ArrayList<AssociatedContent> list = new ArrayList<>();
list.add(concessione.getPosizionamentoScavo());
list.addAll(concessione.getPianteFineScavo());
list.addAll(concessione.getImmaginiRappresentative());
list.addAll(concessione.getGenericContent());
for (AssociatedContent c : list) {
c.getActualContent().clear();
}
return concessione;
}catch(Throwable t){
throw new DeletionException("Unable to delete from WS ",t);
}
}
public Concessione unregisterFileset(String id, String toClearPath) throws Exception {
log.info("Clearing Fileset at {} for {} ",toClearPath,id);
try {
WorkspaceManager ws=new WorkspaceManager();
Concessione c = getById(id);
AssociatedContent toClearContent=c.getContentByPath(toClearPath);
log.debug("Found content {} for path {}",toClearContent,toClearPath);
//checking if published content
for(PersistedContent persisted : toClearContent.getActualContent()){
if(persisted instanceof GeoServerContent) throw new Exception ("Cannot clear concessione "+id+" at "+toClearContent+", because it is published.");
}
for(PersistedContent persisted : toClearContent.getActualContent()){
if(persisted instanceof WorkspaceContent) ws.deleteFromWS((WorkspaceContent) persisted);
}
toClearContent.getActualContent().clear();
log.debug("Updating dafults for {} ",c);
c.setDefaults();
return asConcessione(replace(asDocument(c),collectionName));
}catch(Exception e) {
throw new Exception("Unable to unregister files.",e);
}
}
public Concessione persistContent(String id, String destinationPath, List<TempFile> files) throws Exception{
log.info("Persisting {} files for path {} in concessione ",files.size(),destinationPath,id);
try{
@ -157,7 +264,7 @@ public class ConcessioniMongoManager extends MongoManager{
WorkspaceManager ws=new WorkspaceManager();
//Check Init Base folder
FolderContainer baseFolder=null;
if(c.getFolderId()==null) {
if(c.getFolderId()==null) {
String folderName=Files.fixFilename("mConcessione"+"_"+c.getNome()+"_"+Serialization.FULL_FORMATTER.format(LocalDateTime.now()));
log.info("Creating folder {} for Concessione ID {} ",folderName,id);
FolderContainer folder=ws.createFolder(new FolderOptions(folderName, "Base Folder for "+c.getNome(),null));
@ -193,7 +300,20 @@ public class ConcessioniMongoManager extends MongoManager{
return record;
}
private static Concessione removeFromIndex(Concessione record) {
log.info("Removing from index {} ",record.getMongo_id());
ValidationReport report= new ValidationReport("Remove From Index Report ");
PostgisIndex index;
try {
index = new PostgisIndex();
index.removeCentroid(record);
report.addMessage(ValidationStatus.PASSED, "Removed centroid");
} catch (SDIInteractionException | SQLException | ConfigurationException e) {
log.error("Unable to reove from index {} ",record,e);
report.addMessage(ValidationStatus.WARNING, "Internal error while removing from index.");
}
return record;
}
@ -207,7 +327,7 @@ public class ConcessioniMongoManager extends MongoManager{
ValidationReport report=new ValidationReport("Publish report");
try {
SDIManager sdiManager=new SDIManager();
ArrayList<AssociatedContent> list=new ArrayList<AssociatedContent>();
ArrayList<AssociatedContent> list=new ArrayList<AssociatedContent>();
//Concessione
String workspace= sdiManager.createWorkspace("gna_conc_"+conc.getMongo_id());
@ -242,24 +362,60 @@ public class ConcessioniMongoManager extends MongoManager{
return conc;
}
private static final Concessione unpublish(Concessione concessione){
ValidationReport report=new ValidationReport("Unpublish report");
try{
SDIManager sdi=new SDIManager();
ArrayList<AssociatedContent> list=new ArrayList<AssociatedContent>();
list.add(concessione.getPosizionamentoScavo());
list.addAll(concessione.getPianteFineScavo());
for(AssociatedContent c:list) {
if(c instanceof LayerConcessione) {
List<PersistedContent> contents=c.getActualContent();
List<PersistedContent> toRemove=new ArrayList<>();
for(PersistedContent p:contents){
if(p instanceof GeoServerContent){
try {
sdi.deleteContent((GeoServerContent) p);
toRemove.add(p);
} catch (MalformedURLException e) {
e.printStackTrace();
} catch (RemoteServiceException e) {
e.printStackTrace();
}
}
}
c.getActualContent().removeAll(toRemove);
}
}
}catch(SDIInteractionException e){
report.addMessage(ValidationStatus.WARNING, "Unable to unpublish layers "+e.getMessage());
}
concessione.setReport(report);
return concessione;
}
private static final void store(AssociatedContent content,List<TempFile> files, WorkspaceManager ws, FolderContainer base) throws Exception {
FolderContainer sectionParent=null;
FolderContainer sectionParent=null;
if(content instanceof RelazioneScavo)
sectionParent = ws .createFolder(new FolderOptions(
"relazione","Relazione di scavo : "+content.getTitolo(),base));
else if (content instanceof UploadedImage)
else if (content instanceof UploadedImage)
sectionParent = ws .createFolder(new FolderOptions(
"imgs","Immagini rappresentative : "+content.getTitolo(),base));
else if (content instanceof SDILayerDescriptor)
//SDI Section
if(content instanceof LayerConcessione)
if(content instanceof LayerConcessione)
sectionParent = ws .createFolder(new FolderOptions(
content.getTitolo(),"Layer Concessione : "+content.getTitolo(),ws.getSubFolder(base,"layers")));
else throw new Exception("Invalid SDI Content "+content);
else if (content instanceof OtherContent )
else if (content instanceof OtherContent )
sectionParent = ws .createFolder(new FolderOptions(
content.getTitolo(),"Relazione di scavo : "+content.getTitolo(),ws.getSubFolder(base,"other")));
else throw new Exception("Invalid Content "+content);

View File

@ -1,13 +1,7 @@
package org.gcube.application.geoportal.service.engine.mongo;
import static com.mongodb.client.model.Filters.eq;
import org.bson.Document;
import org.bson.types.ObjectId;
import org.gcube.application.geoportal.model.fault.ConfigurationException;
import org.gcube.application.geoportal.service.engine.ImplementationProvider;
import com.mongodb.Block;
import com.mongodb.MongoClient;
import com.mongodb.client.FindIterable;
import com.mongodb.client.MongoCollection;
@ -15,8 +9,16 @@ import com.mongodb.client.MongoDatabase;
import com.mongodb.client.model.FindOneAndReplaceOptions;
import com.mongodb.client.model.FindOneAndUpdateOptions;
import com.mongodb.client.model.ReturnDocument;
import lombok.extern.slf4j.Slf4j;
import org.bson.Document;
import org.bson.types.ObjectId;
import org.gcube.application.geoportal.common.model.rest.QueryRequest;
import org.gcube.application.geoportal.service.engine.ImplementationProvider;
import org.gcube.application.geoportal.service.model.internal.faults.ConfigurationException;
import static com.mongodb.client.model.Filters.eq;
import static com.mongodb.client.model.Sorts.ascending;
import static com.mongodb.client.model.Sorts.descending;
@Slf4j
public abstract class MongoManager {
@ -79,16 +81,45 @@ public abstract class MongoManager {
}
public FindIterable<Document> iterate(Document filter,String collectionName) {
public FindIterable<Document> iterate(Document filter, Document projection, String collectionName) {
log.debug("Iterate over {} ",collectionName);
MongoDatabase database=getDatabase();
MongoCollection<Document> coll=database.getCollection(collectionName);
if(filter==null)
return coll.find();
else
return coll.find(filter);
MongoCollection<Document> coll=database.getCollection(collectionName);
if(filter == null) filter=new Document();
log.debug("Applying Filter "+filter.toJson());
if(projection != null ) {
log.debug("Applying projection "+projection.toJson());
return coll.find(filter).projection(projection);
}else return coll.find(filter);
}
public FindIterable<Document> query(QueryRequest request, String collectionName){
FindIterable<Document> toReturn=iterate(request.getFilter(), request.getProjection(),collectionName);
if(request.getOrdering()!=null){
if(request.getOrdering().getDirection().equals(QueryRequest.OrderedRequest.Direction.ASCENDING))
toReturn=toReturn.sort(ascending(request.getOrdering().getFields()));
else toReturn=toReturn.sort(descending(request.getOrdering().getFields()));
}
//Paging
if(request.getPaging()!=null){
QueryRequest.PagedRequest paging=request.getPaging();
toReturn=toReturn.skip(paging.getOffset()).limit(paging.getLimit());
}
return toReturn;
}
public <T> FindIterable<T> iterateForClass(Document filter,String collectionName,Class<T> clazz) {
MongoDatabase database=getDatabase();
MongoCollection<Document> coll=database.getCollection(collectionName);

View File

@ -0,0 +1,194 @@
package org.gcube.application.geoportal.service.engine.postgis;
import lombok.Synchronized;
import lombok.extern.slf4j.Slf4j;
import org.gcube.application.geoportal.common.model.legacy.BBOX;
import org.gcube.application.geoportal.common.model.rest.DatabaseConnection;
import org.gcube.application.geoportal.service.model.internal.db.PostgisTable;
import org.gcube.application.geoportal.service.model.internal.faults.ConfigurationException;
import org.gcube.application.geoportal.service.model.internal.faults.DataParsingException;
import org.gcube.application.geoportal.service.utils.ISUtils;
import java.sql.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
@Slf4j
public class PostgisDBManager implements PostgisDBManagerI {
@Synchronized
public static PostgisDBManager get() throws SQLException, ConfigurationException {
if(config==null) {
log.debug("Looking for Default Configuration.. ");
// TODO GENERIC
DatabaseConnection defaultConfiguration=
ISUtils.queryForDB("postgis", "Concessioni");
log.debug("Found configuration : "+defaultConfiguration);
config=defaultConfiguration;
}
return new PostgisDBManager();
}
public static PostgisDBManagerI get(boolean autocommit) throws SQLException, ConfigurationException {
PostgisDBManager toReturn=get();
toReturn.conn.setAutoCommit(autocommit);
return toReturn;
}
private static DatabaseConnection config;
private static Connection getConnection() throws SQLException {
Connection toReturn= DriverManager.getConnection(config.getUrl(),config.getUser(),config.getPwd());
//TODO configure behaviour
toReturn.setAutoCommit(false);
return toReturn;
}
private Connection conn=null;
private PostgisDBManager() throws SQLException {
conn=getConnection();
}
@Override
public void create(PostgisTable toCreate) throws SQLException {
String createStmt=toCreate.getCreateStatement();
log.debug("Executing create : "+createStmt);
conn.createStatement().executeUpdate(createStmt);
}
/* (non-Javadoc)
* @see org.gcube.application.geoportal.PostgisDBManagerI#commit()
*/
@Override
public void commit() throws SQLException {
conn.commit();
}
// /* (non-Javadoc)
// * @see org.gcube.application.geoportal.PostgisDBManagerI#evaluateBoundingBox(org.gcube.application.geoportal.model.PostgisTable)
// */
// @Override
// public BBOX evaluateBoundingBox(PostgisTable table) throws SQLException, DataParsingException {
// ResultSet rs=conn.createStatement().executeQuery("Select ST_Extent("+table.getGeometryColumn()+") as extent from "+table.getTablename());
// if(rs.next())
// return DBUtils.parseST_Extent(rs.getString("extent"));
// else throw new SQLException("No extent returned");
// }
/* (non-Javadoc)
* @see org.gcube.application.geoportal.PostgisDBManagerI#evaluateBoundingBox(org.gcube.application.geoportal.model.PostgisTable)
*/
@Override
public PostgisTable.POINT evaluateCentroid(PostgisTable table) throws SQLException, DataParsingException {
ResultSet rs=conn.createStatement().executeQuery("Select ST_AsText(ST_Centroid(ST_Collect("+table.getGeometryColumn()+"))) as centroid from "+table.getTablename());
if(rs.next())
return PostgisTable.POINT.parsePOINT(rs.getString("centroid"));
else throw new SQLException("No extent returned");
}
/* (non-Javadoc)
* @see org.gcube.application.geoportal.PostgisDBManagerI#prepareInsertStatement(org.gcube.application.geoportal.model.PostgisTable, boolean, boolean)
*/
@Override
public PreparedStatement prepareInsertStatement(PostgisTable target, boolean createTable, boolean geometryAsText) throws SQLException {
if(createTable) {
create(target);
}
String insertStmt=target.getInsertionStatement(geometryAsText);
log.debug("Preparing insert statement : "+insertStmt);
return conn.prepareStatement(insertStmt);
}
@Override
public int deleteByFieldValue(PostgisTable target, PostgisTable.Field field, Object value) throws SQLException {
String query=target.getDeleteByFieldStatement(field);
log.debug("Preparing DELETE SQL {} with field {} = {} ",query,field,value);
PreparedStatement stmt = conn.prepareStatement(query);
target.setObjectInPreparedStatement(field, value, stmt, 1);
int result=stmt.executeUpdate();
log.debug("DELETED {} rows ",result);
return result;
}
@Override
public DatabaseConnection getConnectionDescriptor() {
return config;
}
/* (non-Javadoc)
* @see org.gcube.application.geoportal.PostgisDBManagerI#deleteTable(java.lang.String)
*/
@Override
public void deleteTable(String tableName) throws SQLException {
conn.createStatement().executeUpdate("DROP TABLE "+tableName);
}
/* (non-Javadoc)
* @see org.gcube.application.geoportal.PostgisDBManagerI#truncate(java.lang.String)
*/
@Override
public void truncate(String tableName) throws SQLException{
conn.createStatement().executeUpdate("TRUNCATE Table "+tableName);
}
@Override
public ResultSet queryAll(PostgisTable table) throws SQLException {
// TODO Check schema
return conn.createStatement().executeQuery("Select * from "+table.getTablename());
}
// *********************** INNER UTILS CLASS
protected static class DBUtils {
private static Pattern pattern = Pattern.compile("(?!=\\d\\.\\d\\.)([\\d.]+)");
public static BBOX parseST_Extent(String extent) throws DataParsingException {
//BOX(11.9122574810083 44.2514144864263,11.9761128271586 44.2912342569845)
try {
log.debug("Parsing BBOX "+extent);
Matcher m=pattern.matcher(extent);
// Scanner sc = new Scanner(extent);
// double minLong = sc.nextDouble(),
// minLat = sc.nextDouble(),
// maxLong = sc.nextDouble(),
// maxLat= sc.nextDouble();
if(!m.find()) throw new DataParsingException("Unable to get minLong ");
Double minLong=Double.parseDouble(m.group(1));
if(!m.find()) throw new DataParsingException("Unable to get minLat ");
Double minLat=Double.parseDouble(m.group(1));
if(!m.find()) throw new DataParsingException("Unable to get maxLong ");
Double maxLong=Double.parseDouble(m.group(1));
if(!m.find()) throw new DataParsingException("Unable to get maxLat ");
Double maxLat=Double.parseDouble(m.group(1));
return new BBOX(maxLat, maxLong, minLat, minLong);
}catch(Throwable t) {
throw new DataParsingException("Invalid BBOX "+extent,t);
}
}
}
}

View File

@ -0,0 +1,37 @@
package org.gcube.application.geoportal.service.engine.postgis;
import org.gcube.application.geoportal.common.model.rest.DatabaseConnection;
import org.gcube.application.geoportal.service.model.internal.db.PostgisTable;
import org.gcube.application.geoportal.service.model.internal.faults.DataParsingException;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
public interface PostgisDBManagerI {
void commit() throws SQLException;
PreparedStatement prepareInsertStatement(PostgisTable target, boolean createTable, boolean geometryAsText)
throws SQLException;
void deleteTable(String tableName) throws SQLException;
void truncate(String tableName) throws SQLException;
void create(PostgisTable toCreate) throws SQLException;
PostgisTable.POINT evaluateCentroid(PostgisTable table) throws SQLException, DataParsingException;
ResultSet queryAll(PostgisTable table) throws SQLException;
int deleteByFieldValue(PostgisTable target, PostgisTable.Field field, Object value) throws SQLException;
DatabaseConnection getConnectionDescriptor();
}

View File

@ -1,4 +1,19 @@
package org.gcube.application.geoportal.service.engine.mongo;
package org.gcube.application.geoportal.service.engine.postgis;
import lombok.extern.slf4j.Slf4j;
import org.gcube.application.geoportal.common.model.legacy.Concessione;
import org.gcube.application.geoportal.common.model.rest.DatabaseConnection;
import org.gcube.application.geoportal.common.model.rest.PostgisIndexDescriptor;
import org.gcube.application.geoportal.service.engine.ImplementationProvider;
import org.gcube.application.geoportal.service.engine.SDIManager;
import org.gcube.application.geoportal.service.model.internal.db.DBConstants;
import org.gcube.application.geoportal.service.model.internal.db.PostgisTable;
import org.gcube.application.geoportal.service.model.internal.db.PostgisTable.Field;
import org.gcube.application.geoportal.service.model.internal.db.PostgisTable.FieldType;
import org.gcube.application.geoportal.service.model.internal.faults.ConfigurationException;
import org.gcube.application.geoportal.service.model.internal.faults.PublishException;
import org.gcube.application.geoportal.service.model.internal.faults.SDIInteractionException;
import org.gcube.application.geoportal.service.utils.Serialization;
import java.sql.PreparedStatement;
import java.sql.SQLException;
@ -6,22 +21,7 @@ import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import org.gcube.application.geoportal.common.model.legacy.Concessione;
import org.gcube.application.geoportal.model.db.DBConstants;
import org.gcube.application.geoportal.model.db.PostgisTable;
import org.gcube.application.geoportal.model.db.PostgisTable.Field;
import org.gcube.application.geoportal.model.db.PostgisTable.FieldType;
import org.gcube.application.geoportal.model.fault.ConfigurationException;
import org.gcube.application.geoportal.model.fault.PublishException;
import org.gcube.application.geoportal.service.engine.SDIManager;
import org.gcube.application.geoportal.service.model.internal.faults.SDIInteractionException;
import org.gcube.application.geoportal.service.utils.Serialization;
import org.gcube.application.geoportal.storage.PostgisDBManager;
import org.gcube.application.geoportal.storage.PostgisDBManagerI;
import lombok.Getter;
import lombok.NonNull;
import lombok.extern.slf4j.Slf4j;
;
@Slf4j
public class PostgisIndex {
@ -37,28 +37,35 @@ public class PostgisIndex {
private SDIManager sdiManager;
private String wmsLink=null;
private static PostgisDBManager getDB() throws ConfigurationException {
return ImplementationProvider.get().getDbProvider().getObject();
};
public PostgisIndex() throws SDIInteractionException, SQLException, ConfigurationException {
super();
this.sdiManager=new SDIManager();
init();
this.wmsLink=init();
}
public PostgisIndexDescriptor getInfo() throws ConfigurationException, SDIInteractionException, SQLException {
DatabaseConnection conn=getDB().getConnectionDescriptor();
return new PostgisIndexDescriptor(conn,wmsLink);
}
protected PostgisTable getCentroidsTable() {
return DBConstants.Concessioni.CENTROIDS;
}
public void init() throws SQLException, ConfigurationException, SDIInteractionException {
public String init() throws SQLException, ConfigurationException, SDIInteractionException {
log.debug("Contacting postgis DB .. ");
PostgisDBManagerI db=PostgisDBManager.get();
PostgisDBManagerI db=ImplementationProvider.get().getDbProvider().getObject();
log.debug("Checking if centroids table exists..");
PostgisTable table=getCentroidsTable();
db.create(table);
db.commit();
sdiManager.configureCentroidLayer("centroids_concessioni", "gna", "gna_postgis",table,db.getConnectionDescriptor());
return sdiManager.configureCentroidLayer("centroids_concessioni", "gna", "gna_postgis",table,db.getConnectionDescriptor());
}
@ -70,7 +77,7 @@ public class PostgisIndex {
Map<String,String> centroidRow=evaluateCentroid(record);
log.debug("Contacting postgis DB .. ");
PostgisDBManagerI db=PostgisDBManager.get();
PostgisDBManagerI db=ImplementationProvider.get().getDbProvider().getObject();
PostgisTable centroidsTable=getCentroidsTable();
log.debug("Inserting / updated centroid Row {} ",centroidRow);
@ -100,10 +107,12 @@ public class PostgisIndex {
public void removeCentroid(Concessione record) {
try {
PostgisDBManagerI db=PostgisDBManager.get();
PostgisDBManagerI db=ImplementationProvider.get().getDbProvider().getObject();
PostgisTable centroidsTable=getCentroidsTable();
log.debug("Deleting centroid if present. ID is "+record.getId());
db.deleteByFieldValue(centroidsTable, new Field(DBConstants.Concessioni.PRODUCT_ID,FieldType.TEXT), record.getMongo_id());
log.debug("Deleting centroid if present. ID is "+record.getMongo_id());
int result= db.deleteByFieldValue(centroidsTable, new Field(DBConstants.Concessioni.PRODUCT_ID,FieldType.TEXT), record.getMongo_id());
db.commit();
log.info("Removed {} entries from gif Index with mongo id {} ",result,record.getMongo_id());
}catch(Exception e) {
log.warn("Unable to remove centroid ",e);
}

View File

@ -1,25 +1,23 @@
package org.gcube.application.geoportal.service.engine;
import java.time.LocalDateTime;
import java.time.temporal.TemporalAmount;
import java.util.concurrent.ConcurrentHashMap;
import java.util.function.BiFunction;
import org.gcube.application.geoportal.model.fault.ConfigurationException;
import org.gcube.application.geoportal.service.utils.ContextUtils;
package org.gcube.application.geoportal.service.engine.providers;
import lombok.NonNull;
import lombok.RequiredArgsConstructor;
import lombok.Setter;
import lombok.Synchronized;
import lombok.extern.slf4j.Slf4j;
import org.gcube.application.geoportal.service.model.internal.faults.ConfigurationException;
import org.gcube.application.geoportal.service.utils.ContextUtils;
import java.time.LocalDateTime;
import java.time.temporal.TemporalAmount;
import java.util.concurrent.ConcurrentHashMap;
@Slf4j
@RequiredArgsConstructor
public abstract class AbstractScopedMap<T> implements Engine<T>{
// scope-> object
private ConcurrentHashMap<String,TTLObject<T>> scopeMap=new ConcurrentHashMap<String,TTLObject<T>>();
private ConcurrentHashMap<String, TTLObject<T>> scopeMap=new ConcurrentHashMap<String,TTLObject<T>>();
@Setter
private TemporalAmount TTL=null;
@ -30,16 +28,23 @@ public abstract class AbstractScopedMap<T> implements Engine<T>{
public T getObject() throws ConfigurationException {
String currentScope=ContextUtils.getCurrentScope();
log.debug(name+" : obtaining object for context "+currentScope);
scopeMap.putIfAbsent(currentScope, new TTLObject<T>(LocalDateTime.now(),retrieveObject()));
TTLObject<T> found=scopeMap.get(currentScope);
if(found== null){
log.debug(name+" : init object for context "+currentScope);
TTLObject<T> toPut=new TTLObject<T>(LocalDateTime.now(),retrieveObject());
scopeMap.put(currentScope, toPut);
return toPut.getTheObject();
}
if(TTL!=null) {
if(!found.getCreationTime().plus(TTL).isBefore(LocalDateTime.now())) {
log.debug(name+" : elapsed TTL, disposing..");
dispose(found.getTheObject());
found=scopeMap.put(currentScope, new TTLObject<T>(LocalDateTime.now(),retrieveObject()));
}
}
}else {log.debug(name+" : TTL is null, never disposing..");}
return found.getTheObject();
}

View File

@ -0,0 +1,12 @@
package org.gcube.application.geoportal.service.engine.providers;
import org.gcube.application.geoportal.service.model.internal.faults.ConfigurationException;
public interface Engine <T> {
public void init();
public void shustdown();
public T getObject() throws ConfigurationException;
}

View File

@ -1,23 +1,21 @@
package org.gcube.application.geoportal.service.engine.cache;
import org.gcube.application.geoportal.model.fault.ConfigurationException;
import org.gcube.application.geoportal.service.engine.AbstractScopedMap;
import org.gcube.application.geoportal.service.engine.ImplementationProvider;
import org.gcube.application.geoportal.service.model.internal.db.MongoConnection;
package org.gcube.application.geoportal.service.engine.providers;
import com.mongodb.MongoClient;
import com.mongodb.MongoClientOptions;
import com.mongodb.MongoCredential;
import com.mongodb.ServerAddress;
import lombok.extern.slf4j.Slf4j;
import org.gcube.application.geoportal.service.engine.ImplementationProvider;
import org.gcube.application.geoportal.service.model.internal.db.MongoConnection;
import org.gcube.application.geoportal.service.model.internal.faults.ConfigurationException;
@Slf4j
public class MongoClientProvider extends AbstractScopedMap<MongoClient>{
public MongoClientProvider() {
super("MongoClient cache");
// setTTL(Duration.of(10,ChronoUnit.MINUTES));
// setTTL(Duration.of(10, ChronoUnit.MINUTES));
}
@Override

View File

@ -1,14 +1,13 @@
package org.gcube.application.geoportal.service.engine.cache;
package org.gcube.application.geoportal.service.engine.providers;
import org.gcube.application.geoportal.service.ServiceConstants;
import org.gcube.application.geoportal.service.model.internal.db.MongoConnection;
import org.gcube.application.geoportal.service.model.internal.faults.ConfigurationException;
import org.gcube.application.geoportal.service.utils.ISUtils;
import java.time.Duration;
import java.time.temporal.ChronoUnit;
import org.gcube.application.geoportal.model.fault.ConfigurationException;
import org.gcube.application.geoportal.service.ServiceConstants;
import org.gcube.application.geoportal.service.engine.AbstractScopedMap;
import org.gcube.application.geoportal.service.model.internal.db.MongoConnection;
import org.gcube.application.geoportal.service.utils.ISUtils;
public class MongoConnectionProvider extends AbstractScopedMap<MongoConnection>{
public MongoConnectionProvider() {

View File

@ -0,0 +1,33 @@
package org.gcube.application.geoportal.service.engine.providers;
import org.gcube.application.geoportal.service.engine.postgis.PostgisDBManager;
import org.gcube.application.geoportal.service.model.internal.faults.ConfigurationException;
import java.sql.SQLException;
public class PostgisConnectionProvider extends AbstractScopedMap<PostgisDBManager>{
public PostgisConnectionProvider() {
super("Postgis connection descriptor cache");
}
@Override
protected PostgisDBManager retrieveObject() throws ConfigurationException {
try {
return PostgisDBManager.get();
} catch (SQLException throwables) {
throw new ConfigurationException(throwables);
}
}
@Override
protected void dispose(PostgisDBManager toDispose) {
// toDispose.close();
}
@Override
public void init() {
//
}
}

View File

@ -1,9 +1,10 @@
package org.gcube.application.geoportal.service.engine;
package org.gcube.application.geoportal.service.engine.providers;
import org.gcube.application.geoportal.common.model.profile.Profile;
import org.gcube.application.geoportal.service.model.internal.faults.ConfigurationException;
import java.util.Map;
import org.gcube.application.geoportal.common.model.profile.Profile;
import org.gcube.application.geoportal.model.fault.ConfigurationException;
public class ProfileMapCache extends AbstractScopedMap<Map<String,Profile>> {

View File

@ -1,16 +1,8 @@
package org.gcube.application.geoportal.service.engine;
package org.gcube.application.geoportal.service.engine.providers;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.MalformedURLException;
import java.net.URL;
import java.time.Duration;
import java.time.temporal.ChronoUnit;
import org.eclipse.persistence.internal.sessions.remote.SequencingFunctionCall.GetNextValue;
import lombok.extern.slf4j.Slf4j;
import org.gcube.application.geoportal.common.rest.InterfaceConstants;
import org.gcube.application.geoportal.model.fault.ConfigurationException;
import org.gcube.application.geoportal.service.model.internal.faults.ConfigurationException;
import org.gcube.application.geoportal.service.utils.ContextUtils;
import org.gcube.contentmanagement.blobstorage.service.IClient;
import org.gcube.contentmanagement.blobstorage.transport.backend.RemoteBackendException;
@ -19,9 +11,14 @@ import org.gcube.contentmanager.storageclient.wrapper.MemoryType;
import org.gcube.contentmanager.storageclient.wrapper.StorageClient;
import org.gcube.data.transfer.library.utils.Utils;
import lombok.extern.slf4j.Slf4j;
import java.io.IOException;
import java.io.InputStream;
import java.net.MalformedURLException;
import java.net.URL;
import java.time.Duration;
import java.time.temporal.ChronoUnit;
@Slf4j
public class StorageClientProvider extends AbstractScopedMap<IClient>{
public class StorageClientProvider extends AbstractScopedMap<IClient> {
@ -37,8 +34,9 @@ public class StorageClientProvider extends AbstractScopedMap<IClient>{
@Override
protected void dispose(IClient toDispose) {
try {
toDispose.close();
try {
//TODO ASK
// toDispose.close();
}catch (NullPointerException e) {
// expected if closed without uploading
}catch(Throwable t) {

View File

@ -1,9 +1,9 @@
package org.gcube.application.geoportal.service.engine;
package org.gcube.application.geoportal.service.engine.providers;
import org.gcube.application.geoportal.model.fault.ConfigurationException;
import org.gcube.application.geoportal.service.model.internal.faults.ConfigurationException;
import org.gcube.common.storagehub.client.dsl.StorageHubClient;
public class StorageHubProvider implements Engine<StorageHubClient>{
public class StorageHubProvider implements Engine<StorageHubClient> {
@Override

View File

@ -1,11 +1,11 @@
package org.gcube.application.geoportal.service.engine;
import java.time.LocalDateTime;
package org.gcube.application.geoportal.service.engine.providers;
import lombok.AllArgsConstructor;
import lombok.Getter;
import lombok.ToString;
import java.time.LocalDateTime;
@Getter
@ToString
@AllArgsConstructor

View File

@ -0,0 +1,138 @@
package org.gcube.application.geoportal.service.model.internal.db;
import org.gcube.application.geoportal.service.model.internal.db.PostgisTable.Field;
import org.gcube.application.geoportal.service.model.internal.db.PostgisTable.FieldType;
import org.gcube.application.geoportal.service.model.internal.db.PostgisTable.GeometryType;
import java.util.ArrayList;
public class DBConstants {
public static enum TYPE{
Concessioni,Mosi,Mopr
}
public static class Defaults{
public static final String DEFAULT_GEOMETRY_COLUMN_NAME="geom";
public static final String INTERNAL_ID="internal_id";
public static final String XCOORD_FIELD="xcoord";
public static final String YCOORD_FIELD="ycoord";
}
/**
* nome,anno,regione,xcentroid,ycentroid,csv,shp,
* geopackage,nome_csv,nome_shp,nome_geo,
* poligono,punti,linee,
* nome_progetto, descrizione_progetto,descrizione_contenuto,autore,contributore,
* titolare_dati,responsabile,editore,
* finanziamento,soggetto,
* risorse_correlate,
* date_scavo,data_archiviazione,
* versione,licenza,titolare_licenza_copyright,accesso_dati,parole_chiave
*
* @author FabioISTI
*
*/
public static class Concessioni{
public static final String PRODUCT_ID="product_id";
public static final String NOME="nome";
public static final String ANNO="anno";
public static final String REGIONE="regione";
public static final String GEOMETRY=Defaults.DEFAULT_GEOMETRY_COLUMN_NAME;
//Extension
public static final String DESCRIZIONE="descrizione";
public static final String CONTENUTO="contenuto";
public static final String AUTORE="autore";
public static final String CONTRIBUTORE="contributore";
public static final String TITOLARE="titolare";
public static final String RESPONSABILE="responsabile";
public static final String EDITORE="editore";
public static final String FINANZIAMENTO="finanziamento";
public static final String SOGGETTO="soggetto";
public static final String RISORSE="risorse";
public static final String DATE_SCAVO="date_scavo";
public static final String DATA_ARCHIVIAZIONE="data_archiviazione";
public static final String VERSIONE="versione";
public static final String LICENZA="licenza";
public static final String TITOLARE_LICENZA="titolare_licenza";
public static final String ACCESSO="accesso";
public static final String PAROLE_CHIAVE="parole_chiave";
public static final ArrayList<Field> COLUMNS=new ArrayList<PostgisTable.Field>();
public static final PostgisTable CENTROIDS=new PostgisTable("centroids_concessioni",
COLUMNS, GeometryType.POINT);
static {
CENTROIDS.getFields().add(new Field(Defaults.INTERNAL_ID,FieldType.AUTOINCREMENT));
CENTROIDS.getFields().add(new Field(PRODUCT_ID,FieldType.TEXT));
CENTROIDS.getFields().add(new Field(NOME,FieldType.TEXT));
CENTROIDS.getFields().add(new Field(ANNO,FieldType.INT));
CENTROIDS.getFields().add(new Field(REGIONE,FieldType.TEXT));
CENTROIDS.getFields().add(new Field(GEOMETRY,FieldType.GEOMETRY));
//EXtenions
CENTROIDS.getFields().add(new Field(DESCRIZIONE,FieldType.TEXT));
CENTROIDS.getFields().add(new Field(CONTENUTO,FieldType.TEXT));
CENTROIDS.getFields().add(new Field(AUTORE,FieldType.TEXT));
CENTROIDS.getFields().add(new Field(CONTRIBUTORE,FieldType.TEXT));
CENTROIDS.getFields().add(new Field(TITOLARE,FieldType.TEXT));
CENTROIDS.getFields().add(new Field(RESPONSABILE,FieldType.TEXT));
CENTROIDS.getFields().add(new Field(EDITORE,FieldType.TEXT));
CENTROIDS.getFields().add(new Field(FINANZIAMENTO,FieldType.TEXT));
CENTROIDS.getFields().add(new Field(SOGGETTO,FieldType.TEXT));
CENTROIDS.getFields().add(new Field(RISORSE,FieldType.TEXT));
CENTROIDS.getFields().add(new Field(DATE_SCAVO,FieldType.TEXT));
CENTROIDS.getFields().add(new Field(DATA_ARCHIVIAZIONE,FieldType.TEXT));
CENTROIDS.getFields().add(new Field(VERSIONE,FieldType.TEXT));
CENTROIDS.getFields().add(new Field(LICENZA,FieldType.TEXT));
CENTROIDS.getFields().add(new Field(TITOLARE_LICENZA,FieldType.TEXT));
CENTROIDS.getFields().add(new Field(ACCESSO,FieldType.TEXT));
CENTROIDS.getFields().add(new Field(PAROLE_CHIAVE,FieldType.TEXT));
}
}
public static class MOSI{
public static final String CODE="code";
public static final String GEOMETRY=Defaults.DEFAULT_GEOMETRY_COLUMN_NAME;
public static final ArrayList<Field> COLUMNS=new ArrayList<PostgisTable.Field>();
public static final PostgisTable CENTROID_MOSI=new PostgisTable("centroids_mosi",
new ArrayList<Field>(), GeometryType.POINT);
static {
CENTROID_MOSI.getFields().add(new Field(Defaults.INTERNAL_ID,FieldType.AUTOINCREMENT));
CENTROID_MOSI.getFields().add(new Field(GEOMETRY,FieldType.GEOMETRY));
CENTROID_MOSI.getFields().add(new Field(CODE,FieldType.TEXT));
}
}
public static class MOPR{
public static final PostgisTable CENTROID_MOPR=new PostgisTable("centroids_mopr",
new ArrayList<Field>(), GeometryType.POINT);
}
public static class INTERNAL{
public static final String DB_NAME="gna_internal_db";
public static final String RECORD="RECORD";
// public static final String CONCESSIONE="CONCESSIONE";
// public static final String
}
}

View File

@ -1,10 +1,10 @@
package org.gcube.application.geoportal.service.model.internal.db;
import lombok.Data;
import java.util.ArrayList;
import java.util.List;
import lombok.Data;
@Data
public class MongoConnection {

View File

@ -0,0 +1,314 @@
package org.gcube.application.geoportal.service.model.internal.db;
import lombok.*;
import lombok.extern.slf4j.Slf4j;
import org.gcube.application.geoportal.common.model.legacy.BBOX;
import org.gcube.application.geoportal.service.model.internal.faults.DataParsingException;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.text.DecimalFormat;
import java.text.NumberFormat;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
@Slf4j
@RequiredArgsConstructor
@Getter
@ToString
public class PostgisTable {
@Getter
@AllArgsConstructor
public static enum GeometryType{
MULTIPOINT("4326","geometry (MULTIPOINT,4326)","",""),
POINT("4326","geometry (POINT,4326)","",""),
LINE("4326","geometry (MULTILINESTRING,4326)","",""),
POLYGON("4326","geometry (MULTIPOLYGON,4326)","","");
private final String SRID;
private final String definition;
private final String InsertWKT;
private final String insertWKB;
}
@RequiredArgsConstructor
@Getter
@ToString
public static class Field{
@NonNull
private String name;
@NonNull
private FieldType type;
private Boolean isIndexed;
private Object constantValue;
}
@Getter
@AllArgsConstructor
public enum FieldType{
INT("int",java.sql.Types.INTEGER),
TEXT("text",java.sql.Types.LONGVARCHAR),
FLOAT("float",java.sql.Types.FLOAT),
GEOMETRY("",0),
AUTOINCREMENT("BIGSERIAL PRIMARY KEY",java.sql.Types.BIGINT);
private String definition;
private int sqlType;
}
@RequiredArgsConstructor
@Getter
@ToString
public static class POINT{
private static Pattern pattern = Pattern.compile("(?!=\\d\\.\\d\\.)([\\d.]+)");
public static POINT parsePOINT(String point) throws DataParsingException {
//POINT(8.30230113965909 44.8011688237011)
// x,y
try {
log.debug("Parsing POINT "+point);
Matcher m=pattern.matcher(point);
if(!m.find()) throw new DataParsingException("Unable to get x ");
Double x=Double.parseDouble(m.group(1));
if(!m.find()) throw new DataParsingException("Unable to get y ");
Double y=Double.parseDouble(m.group(1));
return new POINT(x,y);
}catch(Throwable t) {
throw new DataParsingException("Invalid POINT "+point,t);
}
}
@NonNull
private Double x;
@NonNull
private Double y;
}
private static final NumberFormat DECIMAL_FORMAT=NumberFormat.getInstance(Locale.US);
static {
((DecimalFormat) DECIMAL_FORMAT).setGroupingUsed(false);
}
public String getGeometryColumn() {
for(Field f:fields)
if(f.getType().equals(FieldType.GEOMETRY)) return f.getName();
return null;
}
@NonNull
private String tablename;
@NonNull
private List<Field> fields;
@NonNull
private GeometryType geometryColumnType;
@Setter
private BBOX boundingBox=null;
@Setter
private POINT centroid=null;
public void setTablename(String tablename) {
this.tablename = sanitizeFieldName(tablename);
}
public String getCreateStatement() {
StringBuilder stmt=new StringBuilder();
stmt.append("CREATE TABLE IF NOT EXISTS "+tablename+"( ");
for(Field field:fields){
String fieldDefinition=field.getType().getDefinition();
if(field.getType().equals(FieldType.GEOMETRY))
fieldDefinition=this.getGeometryColumnType().definition;
stmt.append(field.getName()+" "+fieldDefinition+",");
}
stmt.deleteCharAt(stmt.lastIndexOf(","));
stmt.append(")");
return stmt.toString();
}
public String getDeleteByFieldStatement(Field field) {
return "DELETE FROM "+tablename+" WHERE "+field.getName()+" = ? ";
}
public String getInsertionStatement(boolean geometryText) {
StringBuilder fieldList=new StringBuilder();
StringBuilder fieldInsertion=new StringBuilder();
for(Field field:fields) {
switch(field.getType()) {
case AUTOINCREMENT : break;
case GEOMETRY : {
fieldList.append(field.getName()+",");
if(geometryText)
fieldInsertion.append("ST_GeomFromText(?, 4326),");
else
fieldInsertion.append("ST_GeomFromWKB(?, 4326),");
break;
}
default : {
fieldList.append(field.getName()+",");
fieldInsertion.append("?,");
}
}
}
fieldList.deleteCharAt(fieldList.lastIndexOf(","));
fieldInsertion.deleteCharAt(fieldInsertion.lastIndexOf(","));
return "Insert into "+tablename+" ("+fieldList+") VALUES ("+fieldInsertion+")";
}
public void fillObjectsPreparedStatement(Map<String,Object> row, PreparedStatement toFill) throws SQLException {
int psFieldIndex=0;
HashMap<String,Object> rowValues=new HashMap<String,Object>();
for(Map.Entry<String,Object> entry:row.entrySet())
rowValues.put(sanitizeFieldName(entry.getKey()), entry.getValue());
for(Field field:fields) {
if(!field.getType().equals(FieldType.AUTOINCREMENT)) {
psFieldIndex++;
Object value=rowValues.get(field.getName());
setObjectInPreparedStatement(field,value,toFill,psFieldIndex);
}
}
}
public void setObjectInPreparedStatement(Field field,Object value, PreparedStatement toFill, int psFieldIndex) throws SQLException {
if(value==null) {
try{
toFill.setNull(psFieldIndex, field.getType().sqlType);
}catch(SQLException e) {
log.error("Unable to set null for field "+field);
throw e;
}
}
else
switch(field.getType()) {
case FLOAT :{
toFill.setFloat(psFieldIndex, (Float)value);
break;
}
case INT : {
toFill.setInt(psFieldIndex, (Integer)value);
break;
}
case TEXT : {
toFill.setString(psFieldIndex, value.toString());
break;
}
case GEOMETRY : {
toFill.setBytes(psFieldIndex, (byte[])value);
}
}
}
public void fillCSVPreparedStatament(Map<String,String> row, PreparedStatement toFill,boolean explicitGeometry) throws SQLException {
int psFieldIndex=0;
HashMap<String,String> rowValues=new HashMap<String,String>();
for(Map.Entry<String,String> entry:row.entrySet())
rowValues.put(sanitizeFieldName(entry.getKey()), entry.getValue());
for(Field field:fields) {
if(!field.getType().equals(FieldType.AUTOINCREMENT)) {
psFieldIndex++;
String value=rowValues.get(field.getName());
// if(value==null||value.equalsIgnoreCase("null")) toFill.setNull(psFieldIndex, field.getType().sqlType);
// else
switch(field.getType()) {
case FLOAT :{
try{
toFill.setFloat(psFieldIndex, Float.parseFloat(value));
}catch(NumberFormatException e) {
throw new SQLException(field+" cannot be null. CSV Row is "+rowValues,e);
}
break;
}
case INT : {
try{
toFill.setInt(psFieldIndex, Integer.parseInt(value));
}catch(NumberFormatException e) {
log.warn("Skipping value for "+field+" row was "+rowValues,e);
toFill.setNull(psFieldIndex, java.sql.Types.INTEGER);
}
break;
}
case TEXT : {
toFill.setString(psFieldIndex, value.toString());
break;
}
case GEOMETRY : {
if(explicitGeometry) {
toFill.setString(psFieldIndex,value);
}else {
switch(geometryColumnType){
case POINT: {
String xRepresentation=DECIMAL_FORMAT.format(Double.parseDouble(rowValues.get(DBConstants.Defaults.XCOORD_FIELD)));
String yRepresentation=DECIMAL_FORMAT.format(Double.parseDouble(rowValues.get(DBConstants.Defaults.YCOORD_FIELD)));
toFill.setString(psFieldIndex, "POINT("+xRepresentation+" "+
yRepresentation+")");
break;
}
default :{
toFill.setString(psFieldIndex,rowValues.get("wkt"));
break;
}
}
}
break;
}
}
}
}
}
public static String sanitizeFieldName(String fieldName) {
// return fieldName.toLowerCase().replaceAll(" ", "_").replaceAll("\\.", "").replaceAll("-", "_").replaceAll("////","_");
return fieldName.toLowerCase().replaceAll("[^a-z0-9_\\\\]", "_");
}
}

View File

@ -0,0 +1,38 @@
package org.gcube.application.geoportal.service.model.internal.faults;
public class ConfigurationException extends Exception {
/**
*
*/
private static final long serialVersionUID = -3810929853461018566L;
public ConfigurationException() {
super();
// TODO Auto-generated constructor stub
}
public ConfigurationException(String message, Throwable cause, boolean enableSuppression,
boolean writableStackTrace) {
super(message, cause, enableSuppression, writableStackTrace);
// TODO Auto-generated constructor stub
}
public ConfigurationException(String message, Throwable cause) {
super(message, cause);
// TODO Auto-generated constructor stub
}
public ConfigurationException(String message) {
super(message);
// TODO Auto-generated constructor stub
}
public ConfigurationException(Throwable cause) {
super(cause);
// TODO Auto-generated constructor stub
}
}

View File

@ -0,0 +1,30 @@
package org.gcube.application.geoportal.service.model.internal.faults;
public class DataParsingException extends Exception {
public DataParsingException() {
// TODO Auto-generated constructor stub
}
public DataParsingException(String message) {
super(message);
// TODO Auto-generated constructor stub
}
public DataParsingException(Throwable cause) {
super(cause);
// TODO Auto-generated constructor stub
}
public DataParsingException(String message, Throwable cause) {
super(message, cause);
// TODO Auto-generated constructor stub
}
public DataParsingException(String message, Throwable cause, boolean enableSuppression,
boolean writableStackTrace) {
super(message, cause, enableSuppression, writableStackTrace);
// TODO Auto-generated constructor stub
}
}

View File

@ -0,0 +1,23 @@
package org.gcube.application.geoportal.service.model.internal.faults;
public class DeletionException extends Exception {
public DeletionException() {
}
public DeletionException(String message) {
super(message);
}
public DeletionException(String message, Throwable cause) {
super(message, cause);
}
public DeletionException(Throwable cause) {
super(cause);
}
public DeletionException(String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace) {
super(message, cause, enableSuppression, writableStackTrace);
}
}

View File

@ -0,0 +1,42 @@
package org.gcube.application.geoportal.service.model.internal.faults;
import lombok.NonNull;
import org.gcube.spatial.data.gis.model.report.PublishResponse;
public class PublishException extends Exception {
/**
*
*/
private static final long serialVersionUID = -1356876669436308224L;
@NonNull
private PublishResponse resp;
public PublishException(PublishResponse resp) {
super();
this.resp=resp;
}
public PublishException(String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace,PublishResponse resp) {
super(message, cause, enableSuppression, writableStackTrace);
this.resp=resp;
}
public PublishException(String message, Throwable cause,PublishResponse resp) {
super(message, cause);
this.resp=resp;
}
public PublishException(String message,PublishResponse resp) {
super(message);
this.resp=resp;
}
public PublishException(Throwable cause) {
super(cause);
// TODO Auto-generated constructor stub
}
}

View File

@ -1,46 +0,0 @@
package org.gcube.application.geoportal.service.model.internal.rest;
import java.util.ArrayList;
import java.util.List;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlElements;
import javax.xml.bind.annotation.XmlRootElement;
import org.gcube.application.geoportal.model.concessioni.LayerConcessione;
import org.gcube.application.geoportal.model.concessioni.RelazioneScavo;
import org.gcube.application.geoportal.model.content.AssociatedContent;
import org.gcube.application.geoportal.model.content.OtherContent;
import org.gcube.application.geoportal.model.content.UploadedImage;
import org.gcube.application.geoportal.model.gis.SDILayerDescriptor;
import lombok.Data;
@XmlRootElement
@Data
public class AddSectionToConcessioneRequest {
public static enum Section{
RELAZIONE,UPLOADED_IMG,PIANTA,POSIZIONAMENTO,OTHER
}
@XmlRootElement
@Data
public static class SHUBFileDescriptor {
private String filename;
private String shubID;
}
private Section section;
@XmlElements({
@XmlElement(type=OtherContent.class),
@XmlElement(type=RelazioneScavo.class),
@XmlElement(type=SDILayerDescriptor.class),
@XmlElement(type=LayerConcessione.class),
@XmlElement(type=UploadedImage.class),
})
private AssociatedContent toRegister;
private List<SHUBFileDescriptor> streams=new ArrayList<AddSectionToConcessioneRequest.SHUBFileDescriptor>();
}

View File

@ -1,174 +0,0 @@
package org.gcube.application.geoportal.service.rest;
import java.util.Collection;
import javax.ws.rs.Consumes;
import javax.ws.rs.GET;
import javax.ws.rs.PUT;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response.Status;
import org.gcube.application.geoportal.common.rest.InterfaceConstants;
import org.gcube.application.geoportal.managers.ConcessioneManager;
import org.gcube.application.geoportal.managers.ManagerFactory;
import org.gcube.application.geoportal.model.InputStreamDescriptor;
import org.gcube.application.geoportal.model.concessioni.Concessione;
import org.gcube.application.geoportal.model.concessioni.LayerConcessione;
import org.gcube.application.geoportal.model.concessioni.RelazioneScavo;
import org.gcube.application.geoportal.model.content.UploadedImage;
import org.gcube.application.geoportal.model.report.PublicationReport;
import org.gcube.application.geoportal.service.engine.ImplementationProvider;
import org.gcube.application.geoportal.service.engine.StorageClientProvider;
import org.gcube.application.geoportal.service.model.internal.rest.AddSectionToConcessioneRequest;
import org.gcube.application.geoportal.service.model.internal.rest.AddSectionToConcessioneRequest.SHUBFileDescriptor;
import org.gcube.application.geoportal.service.utils.Serialization;
import org.json.JSONArray;
import lombok.extern.slf4j.Slf4j;
@Path(InterfaceConstants.Methods.CONCESSIONI)
@Slf4j
public class Concessioni {
@PUT
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
@Path("publish/{"+InterfaceConstants.Parameters.PROJECT_ID+"}")
public String publish(@PathParam(InterfaceConstants.Parameters.PROJECT_ID) String id) {
try {
log.info("Publishing Concessione by id {} ",id);
Concessione conc=(Concessione) ConcessioneManager.getByID(Long.parseLong(id));
ConcessioneManager manager=ManagerFactory.getByRecord(conc);
log.debug("Loaded object {} ",conc);
PublicationReport rep=manager.commitSafely(true);
String toReturn=rep.prettyPrint();
log.debug("Publication report to send is "+toReturn);
return toReturn;
}catch(WebApplicationException e){
log.warn("Unable to serve request",e);
throw e;
}catch(Throwable e){
log.warn("Unable to serve request",e);
throw new WebApplicationException("Unable to serve request", e);
}
}
@GET
@Produces(MediaType.APPLICATION_JSON)
@Path("{"+InterfaceConstants.Parameters.PROJECT_ID+"}")
public String getById(@PathParam(InterfaceConstants.Parameters.PROJECT_ID) String id) {
try {
log.info("Loading Concessione by id {} ",id);
Concessione toReturn=(Concessione) ConcessioneManager.getByID(Long.parseLong(id));
if(toReturn==null)
throw new WebApplicationException("Concessione non trovata",Status.NOT_FOUND);
log.debug("Loaded object {} ",toReturn);
return Serialization.write(toReturn);
}catch(WebApplicationException e){
log.warn("Unable to serve request",e);
throw e;
}catch(Throwable e){
log.warn("Unable to serve request",e);
throw new WebApplicationException("Unable to serve request", e);
}
}
@PUT
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
public String registerNew(String toRegister) {
try {
log.info("Registering new Concessione "+toRegister);
Concessione conc=Serialization.read(toRegister, Concessione.class);
ConcessioneManager manager=ManagerFactory.registerNew(conc);
manager.commitSafely(false);
return Serialization.write(manager.getRecord());
}catch(WebApplicationException e){
log.warn("Unable to serve request",e);
throw e;
}catch(Throwable e){
log.warn("Unable to serve request",e);
throw new WebApplicationException("Unable to serve request", e);
}
}
@PUT
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
@Path("section/{"+InterfaceConstants.Parameters.PROJECT_ID+"}")
public String addSection(@PathParam(InterfaceConstants.Parameters.PROJECT_ID) String id,
AddSectionToConcessioneRequest request) {
try {
log.info("Adding section to Concessione {} ",id);
Concessione toReturn=(Concessione) ConcessioneManager.getByID(Long.parseLong(id));
ConcessioneManager manager=ManagerFactory.getByRecord(toReturn);
log.debug("Loaded object {} ",toReturn);
log.debug("Request is {}",request);
InputStreamDescriptor[] streams=new InputStreamDescriptor[request.getStreams().size()];
StorageClientProvider storage=ImplementationProvider.get().getStorageProvider();
for(int i=0;i<streams.length;i++) {
SHUBFileDescriptor sent=request.getStreams().get(i);
streams[i]=new InputStreamDescriptor(storage.open(sent.getShubID()), sent.getFilename());
}
switch(request.getSection()) {
case PIANTA :manager.addPiantaFineScavo((LayerConcessione) request.getToRegister(),streams);
break;
case POSIZIONAMENTO : manager.setPosizionamento((LayerConcessione) request.getToRegister(),streams);
break;
case RELAZIONE : manager.setRelazioneScavo((RelazioneScavo)request.getToRegister(), streams[0]);
break;
case UPLOADED_IMG : manager.addImmagineRappresentativa((UploadedImage)request.getToRegister(), streams[0]);
break;
default : throw new Exception("Unrecognized section");
}
// PublicationReport report=manager.commitSafely(false);
Concessione c=manager.commit(false);
log.debug("Published "+Serialization.write(c));
return c.validate().prettyPrint();
}catch(WebApplicationException e){
log.warn("Unable to serve request",e);
throw e;
}catch(Throwable e){
log.warn("Unable to serve request",e);
throw new WebApplicationException("Unable to serve request", e);
}
}
@GET
@Produces(MediaType.APPLICATION_JSON)
public String getList(){
try {
Collection<Concessione> toReturn=ManagerFactory.getList(Concessione.class);
log.debug("Found "+toReturn.size()+" elements..");
JSONArray array=new JSONArray();
for(Concessione found:toReturn) {
array.put(Serialization.write(found));
}
return array.toString();
}catch(WebApplicationException e){
log.warn("Unable to serve request",e);
throw e;
}catch(Throwable e){
log.warn("Unable to serve request",e);
throw new WebApplicationException("Unable to serve request", e);
}
}
}

View File

@ -1,49 +1,56 @@
package org.gcube.application.geoportal.service.rest;
import java.time.LocalDateTime;
import javax.ws.rs.Consumes;
import javax.ws.rs.DELETE;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.PUT;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.MediaType;
import org.gcube.application.geoportal.common.model.legacy.Concessione;
import org.gcube.application.geoportal.common.rest.AddSectionToConcessioneRequest;
import org.gcube.application.geoportal.common.rest.InterfaceConstants;
import org.gcube.application.geoportal.common.rest.TempFile;
import org.gcube.application.geoportal.service.engine.WorkspaceManager;
import org.gcube.application.geoportal.service.engine.WorkspaceManager.FolderOptions;
import org.gcube.application.geoportal.service.engine.mongo.ConcessioniMongoManager;
import org.gcube.application.geoportal.service.utils.Serialization;
import org.gcube.common.storagehub.client.dsl.FolderContainer;
import org.json.JSONArray;
import org.json.JSONObject;
import lombok.extern.slf4j.Slf4j;
import org.bson.Document;
import org.gcube.application.geoportal.common.model.legacy.Concessione;
import org.gcube.application.geoportal.common.model.rest.AddSectionToConcessioneRequest;
import org.gcube.application.geoportal.common.model.rest.Configuration;
import org.gcube.application.geoportal.common.model.rest.QueryRequest;
import org.gcube.application.geoportal.common.rest.InterfaceConstants;
import org.gcube.application.geoportal.service.engine.mongo.ConcessioniMongoManager;
import org.gcube.application.geoportal.service.engine.postgis.PostgisIndex;
import org.gcube.application.geoportal.service.model.internal.faults.DeletionException;
import org.gcube.application.geoportal.service.utils.Serialization;
import org.json.JSONArray;
import javax.ws.rs.*;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
@Path(InterfaceConstants.Methods.MONGO_CONCESSIONI)
@Slf4j
public class ConcessioniOverMongo {
@GET
@Path(InterfaceConstants.Methods.CONFIGURATION_PATH)
@Produces(MediaType.APPLICATION_JSON)
public Configuration getConfiguration(){
return new GuardedMethod<Configuration>(){
@Override
protected Configuration run() throws Exception, WebApplicationException {
Configuration toReturn = new Configuration();
toReturn.setIndex(new PostgisIndex().getInfo());
log.info("Returning configuration {} ",toReturn);
return toReturn;
}
}.execute().getResult();
}
@PUT
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
public String replace(String jsonString) {
return new GuardedMethod<String> () {
public Concessione replace(Concessione c) {
return new GuardedMethod<Concessione> () {
@Override
protected String run() throws Exception, WebApplicationException {
Concessione c=Serialization.read(jsonString, Concessione.class);
protected Concessione run() throws Exception, WebApplicationException {
//Concessione c=Serialization.read(jsonString, Concessione.class);
ConcessioniMongoManager manager=new ConcessioniMongoManager();
manager.replace(c);
return Serialization.write(manager.getById(c.getMongo_id()));
// return Serialization.write(manager.getById(c.getMongo_id()));
return manager.getById(c.getMongo_id());
}
}.execute().getResult();
}
@ -51,13 +58,13 @@ public class ConcessioniOverMongo {
@POST
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
public String createNew(String jsonString) {
return new GuardedMethod<String> () {
public Concessione createNew(Concessione c) {
return new GuardedMethod<Concessione>() {
@Override
protected String run() throws Exception, WebApplicationException {
Concessione c=Serialization.read(jsonString, Concessione.class);
protected Concessione run() throws Exception, WebApplicationException {
ConcessioniMongoManager manager=new ConcessioniMongoManager();
return Serialization.write(manager.registerNew(c));
return manager.registerNew(c);
}
}.execute().getResult();
}
@ -66,19 +73,12 @@ public class ConcessioniOverMongo {
@GET
@Produces(MediaType.APPLICATION_JSON)
public String list() {
return new GuardedMethod<String> () {
protected String run() throws Exception ,WebApplicationException {
public Iterable<Concessione> list() {
return new GuardedMethod<Iterable<Concessione>>() {
protected Iterable<Concessione> run() throws Exception ,WebApplicationException {
ConcessioniMongoManager manager=new ConcessioniMongoManager();
JSONArray toReturn=new JSONArray();
manager.list().forEach((Concessione c) -> {
try{
toReturn.put(new JSONObject(Serialization.write(c)));
}catch(Throwable t) {
log.error("Unable to serialize "+c);
}
});
return toReturn.toString();
return manager.list();
};
}.execute().getResult();
@ -91,12 +91,12 @@ public class ConcessioniOverMongo {
@GET
@Produces(MediaType.APPLICATION_JSON)
@Path("{"+InterfaceConstants.Parameters.PROJECT_ID+"}")
public String getById(@PathParam(InterfaceConstants.Parameters.PROJECT_ID) String id) {
return new GuardedMethod<String> () {
public Concessione getById(@PathParam(InterfaceConstants.Parameters.PROJECT_ID) String id) {
return new GuardedMethod<Concessione>() {
@Override
protected String run() throws Exception, WebApplicationException {
protected Concessione run() throws Exception, WebApplicationException {
ConcessioniMongoManager manager=new ConcessioniMongoManager();
return Serialization.write(manager.getById(id));
return manager.getById(id);
}
}.execute().getResult();
}
@ -104,13 +104,19 @@ public class ConcessioniOverMongo {
@DELETE
@Produces(MediaType.APPLICATION_JSON)
@Path("{"+InterfaceConstants.Parameters.PROJECT_ID+"}")
public void deleteById(@PathParam(InterfaceConstants.Parameters.PROJECT_ID) String id) {
public void deleteById(@PathParam(InterfaceConstants.Parameters.PROJECT_ID) String id,
@QueryParam(InterfaceConstants.Parameters.FORCE) Boolean forceOption) {
new GuardedMethod<Concessione> () {
@Override
protected Concessione run() throws Exception, WebApplicationException {
ConcessioniMongoManager manager=new ConcessioniMongoManager();
manager.deleteById(id);
return null;
try{
Boolean force=(forceOption!=null)?forceOption:false;
ConcessioniMongoManager manager=new ConcessioniMongoManager();
manager.deleteById(id,force);
return null;
}catch(DeletionException e){
throw new WebApplicationException("Unable to delete "+id,e, Response.Status.EXPECTATION_FAILED);
}
}
}.execute();
}
@ -120,16 +126,13 @@ public class ConcessioniOverMongo {
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
@Path("{"+InterfaceConstants.Parameters.PROJECT_ID+"}")
public String update(@PathParam(InterfaceConstants.Parameters.PROJECT_ID) String id,String jsonString) {
return new GuardedMethod<String> () {
public Concessione update(@PathParam(InterfaceConstants.Parameters.PROJECT_ID) String id,Concessione c) {
return new GuardedMethod<Concessione>() {
@Override
protected String run() throws Exception, WebApplicationException {
// Concessione c=Serialization.read(jsonString, Concessione.class);
// ConcessioniMongoManager manager=new ConcessioniMongoManager();
// manager.update(c);
//
// return Serialization.write(manager.getById(c.getMongo_id()));
throw new RuntimeException("TO IMPLEMENT");
protected Concessione run() throws Exception, WebApplicationException {
ConcessioniMongoManager manager=new ConcessioniMongoManager();
c.setMongo_id(id);
return manager.replace(c);
}
}.execute().getResult();
}
@ -138,35 +141,107 @@ public class ConcessioniOverMongo {
@PUT
@Produces(MediaType.APPLICATION_JSON)
@Path("/{"+InterfaceConstants.Methods.PUBLISH_PATH+"}/{"+InterfaceConstants.Parameters.PROJECT_ID+"}")
public String publish(@PathParam(InterfaceConstants.Parameters.PROJECT_ID) String id) {
return new GuardedMethod<String> () {
public Concessione publish(@PathParam(InterfaceConstants.Parameters.PROJECT_ID) String id) {
return new GuardedMethod<Concessione>() {
@Override
protected String run() throws Exception, WebApplicationException {
protected Concessione run() throws Exception, WebApplicationException {
ConcessioniMongoManager manager=new ConcessioniMongoManager();
return Serialization.write(manager.publish(id));
return manager.publish(id);
}
}.execute().getResult();
}
@DELETE
@Path("/{"+InterfaceConstants.Methods.PUBLISH_PATH+"}/{"+InterfaceConstants.Parameters.PROJECT_ID+"}")
public Concessione unpublish(@PathParam(InterfaceConstants.Parameters.PROJECT_ID) String id) {
log.info("Unpublishing {} ",id);
return new GuardedMethod<Concessione>() {
@Override
protected Concessione run() throws Exception, WebApplicationException {
ConcessioniMongoManager manager=new ConcessioniMongoManager();
return manager.unpublish(id);
}
}.execute().getResult();
}
@POST
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
@Path("/"+InterfaceConstants.Methods.REGISTER_FILES_PATH+"/{"+InterfaceConstants.Parameters.PROJECT_ID+"}")
public String registerFile(@PathParam(InterfaceConstants.Parameters.PROJECT_ID) String id,String jsonRequest) {
return new GuardedMethod<String> () {
public Concessione registerFile(@PathParam(InterfaceConstants.Parameters.PROJECT_ID) String id, AddSectionToConcessioneRequest request) {
return new GuardedMethod<Concessione>() {
@Override
protected String run() throws Exception, WebApplicationException {
AddSectionToConcessioneRequest request=Serialization.read(jsonRequest,AddSectionToConcessioneRequest.class);
protected Concessione run() throws Exception, WebApplicationException {
log.info("Registering {} file(s) for {} Concessione ID {}",
request.getStreams().size(),
request.getDestinationPath(),id);
ConcessioniMongoManager manager=new ConcessioniMongoManager();
Concessione toReturn= manager.persistContent(id, request.getDestinationPath(), request.getStreams());
log.debug("Returning "+toReturn);
return Serialization.write(toReturn);
return manager.persistContent(id, request.getDestinationPath(), request.getStreams());
}
}.execute().getResult();
}
@POST
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
@Path("/"+InterfaceConstants.Methods.DELETE_FILES_PATH+"/{"+InterfaceConstants.Parameters.PROJECT_ID+"}")
public Concessione clearFileset(@PathParam(InterfaceConstants.Parameters.PROJECT_ID) String id, String path) {
return new GuardedMethod<Concessione>() {
@Override
protected Concessione run() throws Exception, WebApplicationException {
log.info("Clearing files of {} Concessione ID {}",path,id);
ConcessioniMongoManager manager=new ConcessioniMongoManager();
return manager.unregisterFileset(id,path);
}
}.execute().getResult();
}
@POST
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
@Path("/"+InterfaceConstants.Methods.SEARCH_PATH)
public Iterable<Concessione> search(String filter){
return new GuardedMethod<Iterable<Concessione>>() {
@Override
protected Iterable<Concessione> run() throws Exception, WebApplicationException {
ConcessioniMongoManager manager=new ConcessioniMongoManager();
return manager.search(Document.parse(filter));
}
}.execute().getResult();
}
@POST
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
@Path("/"+InterfaceConstants.Methods.QUERY_PATH)
public String query(String queryString){
return new GuardedMethod<String>() {
@Override
protected String run() throws Exception, WebApplicationException {
ConcessioniMongoManager manager=new ConcessioniMongoManager();
Document queryDocument=Document.parse(queryString);
QueryRequest req=new QueryRequest();
if(queryDocument.containsKey("ordering"))
req.setOrdering(Serialization.read(((Document)queryDocument.get("ordering")).toJson(),QueryRequest.OrderedRequest.class));
if(queryDocument.containsKey("paging"))
req.setPaging(Serialization.read(((Document)queryDocument.get("paging")).toJson(),QueryRequest.PagedRequest.class));
req.setProjection(queryDocument.get("projection",Document.class));
req.setFilter(queryDocument.get("filter",Document.class));
StringBuilder builder=new StringBuilder("[");
manager.query(req).forEach(d->{builder.append(d.toJson()+",");});
builder.deleteCharAt(builder.length()-1);
builder.append("]");
return builder.toString();
}
}.execute().getResult();
}
}

View File

@ -1,18 +1,34 @@
package org.gcube.application.geoportal.service.rest;
import lombok.extern.slf4j.Slf4j;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.Response.Status;
import lombok.extern.slf4j.Slf4j;
import java.util.ArrayList;
import java.util.List;
@Slf4j
public abstract class GuardedMethod<T> {
private static List<Runnable> preoperations=new ArrayList<>();
public static void addPreoperation(Runnable preoperation){
preoperations.add(preoperation);
}
private T result=null;
public GuardedMethod<T> execute() throws WebApplicationException{
try {
if(!preoperations.isEmpty()) {
log.debug("Running preops (size : {} )", preoperations.size());
for (Runnable r : preoperations)
r.run();
}
log.debug("Executing actual method..");
result=run();
return this;
}catch(WebApplicationException e) {

View File

@ -1,24 +1,14 @@
package org.gcube.application.geoportal.service.rest;
import java.util.Collections;
import java.util.List;
import javax.ws.rs.Consumes;
import javax.ws.rs.DELETE;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.PUT;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.MediaType;
import lombok.extern.slf4j.Slf4j;
import org.bson.Document;
import org.gcube.application.geoportal.common.model.project.Project;
import org.gcube.application.geoportal.common.rest.InterfaceConstants;
import lombok.extern.slf4j.Slf4j;
import javax.ws.rs.*;
import javax.ws.rs.core.MediaType;
import java.util.Collections;
import java.util.List;
@Path(InterfaceConstants.Methods.PROJECTS)
@Slf4j

View File

@ -1,12 +1,11 @@
package org.gcube.application.geoportal.service.rest;
import lombok.extern.slf4j.Slf4j;
import org.gcube.application.geoportal.common.rest.InterfaceConstants;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import org.gcube.application.geoportal.common.rest.InterfaceConstants;
import lombok.extern.slf4j.Slf4j;
@Slf4j
@Path(InterfaceConstants.Methods.SECTIONS+"/{"+InterfaceConstants.Parameters.PROJECT_ID+"}")
public class Sections {

View File

@ -1,12 +1,11 @@
package org.gcube.application.geoportal.service.utils;
import static org.gcube.common.authorization.client.Constants.authorizationService;
import lombok.extern.slf4j.Slf4j;
import org.gcube.common.authorization.library.AuthorizationEntry;
import org.gcube.common.authorization.library.provider.SecurityTokenProvider;
import org.gcube.common.scope.api.ScopeProvider;
import lombok.extern.slf4j.Slf4j;
import static org.gcube.common.authorization.client.Constants.authorizationService;
@Slf4j
public class ContextUtils {

View File

@ -1,15 +1,9 @@
package org.gcube.application.geoportal.service.utils;
import static org.gcube.resources.discovery.icclient.ICFactory.clientFor;
import static org.gcube.resources.discovery.icclient.ICFactory.queryFor;
import java.util.List;
import java.util.Map;
import org.gcube.application.geoportal.model.db.DatabaseConnection;
import org.gcube.application.geoportal.model.fault.ConfigurationException;
import org.gcube.application.geoportal.common.model.rest.DatabaseConnection;
import org.gcube.application.geoportal.service.ServiceConstants;
import org.gcube.application.geoportal.service.model.internal.db.MongoConnection;
import org.gcube.application.geoportal.service.model.internal.faults.ConfigurationException;
import org.gcube.common.encryption.encrypter.StringEncrypter;
import org.gcube.common.resources.gcore.ServiceEndpoint;
import org.gcube.common.resources.gcore.ServiceEndpoint.AccessPoint;
@ -17,9 +11,15 @@ import org.gcube.common.resources.gcore.ServiceEndpoint.Property;
import org.gcube.resources.discovery.client.api.DiscoveryClient;
import org.gcube.resources.discovery.client.queries.api.SimpleQuery;
import java.util.List;
import java.util.Map;
import static org.gcube.resources.discovery.icclient.ICFactory.clientFor;
import static org.gcube.resources.discovery.icclient.ICFactory.queryFor;
public class ISUtils {
public static DatabaseConnection queryForDB(String platform,String flag) throws ConfigurationException {
public static DatabaseConnection queryForDB(String platform, String flag) throws ConfigurationException {
List<AccessPoint> found=getAP(platform, flag);
if(found.size()>1) {
@ -29,8 +29,11 @@ public class ISUtils {
}
AccessPoint point=found.get(0);
return new DatabaseConnection(point.username(),decryptString(point.password()),point.address());
DatabaseConnection toReturn=new DatabaseConnection();
toReturn.setPwd(decryptString(point.password()));
toReturn.setUser(point.username());
toReturn.setUrl(point.address());
return toReturn;
}

View File

@ -1,21 +1,15 @@
package org.gcube.application.geoportal.service.utils;
import java.io.IOException;
import java.time.format.DateTimeFormatter;
import org.gcube.application.geoportal.model.Record;
import org.gcube.application.geoportal.model.concessioni.Concessione;
import org.gcube.application.geoportal.model.concessioni.LayerConcessione;
import org.gcube.application.geoportal.model.content.AssociatedContent;
import org.gcube.application.geoportal.model.content.OtherContent;
import org.gcube.application.geoportal.model.content.PersistedContent;
import org.gcube.application.geoportal.model.content.UploadedImage;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.DeserializationFeature;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.SerializationFeature;
import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule;
import java.io.IOException;
import java.time.format.DateTimeFormatter;
import java.util.Iterator;
public class Serialization {
@ -26,69 +20,26 @@ public class Serialization {
static {
mapper=new ObjectMapper();
mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES,false);
mapper.registerModule(new JavaTimeModule());
mapper.configure(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS, false);
mapper.registerModule(new JavaTimeModule());
}
public static <T> T read(String jsonString,Class<T> clazz) throws JsonProcessingException, IOException {
return mapper.readerFor(clazz).readValue(jsonString);
}
public static <T> Iterator<T> readCollection(String jsonString, Class<T> clazz) throws IOException {
return mapper.readerFor(clazz).readValues(jsonString);
}
public static String write(Object toWrite) throws JsonProcessingException {
if(toWrite instanceof Concessione)
detach((Concessione) toWrite);
String toReturn= mapper.writeValueAsString(toWrite);
if(toWrite instanceof Concessione)
reattach((Concessione) toWrite);
return toReturn;
}
// Avoid infiniteLoop in JPA
private static void detach(Concessione c) {
if (c!=null) {
detach(c.getRelazioneScavo());
detach(c.getPosizionamentoScavo());
if(c.getPianteFineScavo()!=null)
c.getPianteFineScavo().forEach((LayerConcessione l)->{detach(l);});
if(c.getImmaginiRappresentative()!=null)
c.getImmaginiRappresentative().forEach(((UploadedImage u)->{detach(u);}));
if(c.getGenericContent()!=null)
c.getGenericContent().forEach(((OtherContent u)->{detach(u);}));
}
}
private static void detach(AssociatedContent a) {
if(a!=null) {
a.setRecord(null);
if(a.getActualContent()!=null)
a.getActualContent().forEach((PersistedContent p)->{p.setAssociated(null);});
}
}
private static void reattach(Concessione c) {
if(c!=null) {
reattach(c.getRelazioneScavo(),c);
reattach(c.getPosizionamentoScavo(),c);
if(c.getPianteFineScavo()!=null)
c.getPianteFineScavo().forEach((LayerConcessione l)->{reattach(l,c);});
if(c.getImmaginiRappresentative()!=null)
c.getImmaginiRappresentative().forEach(((UploadedImage u)->{reattach(u,c);}));
if(c.getGenericContent()!=null)
c.getGenericContent().forEach(((OtherContent u)->{reattach(u,c);}));
}
}
private static void reattach(AssociatedContent a,Record r) {
if(a!=null) {
a.setRecord(r);
if(a.getActualContent()!=null)
a.getActualContent().forEach((PersistedContent p)->{p.setAssociated(a);});
}
}
}

View File

@ -0,0 +1,173 @@
<?xml version="1.0" encoding="ISO-8859-1"?>
<StyledLayerDescriptor version="1.0.0"
xsi:schemaLocation="http://www.opengis.net/sld StyledLayerDescriptor.xsd"
xmlns="http://www.opengis.net/sld"
xmlns:ogc="http://www.opengis.net/ogc"
xmlns:xlink="http://www.w3.org/1999/xlink"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<NamedLayer>
<Name>vol_stacked_point</Name>
<UserStyle>
<!-- Styles can have names, titles and abstracts -->
<Title>Stacked Point</Title>
<Abstract>Styles archeomar using stacked points</Abstract>
<FeatureTypeStyle>
<Transformation>
<ogc:Function name="vec:PointStacker">
<ogc:Function name="parameter">
<ogc:Literal>data</ogc:Literal>
</ogc:Function>
<ogc:Function name="parameter">
<ogc:Literal>cellSize</ogc:Literal>
<ogc:Literal>30</ogc:Literal>
</ogc:Function>
<ogc:Function name="parameter">
<ogc:Literal>outputBBOX</ogc:Literal>
<ogc:Function name="env">
<ogc:Literal>wms_bbox</ogc:Literal>
</ogc:Function>
</ogc:Function>
<ogc:Function name="parameter">
<ogc:Literal>outputWidth</ogc:Literal>
<ogc:Function name="env">
<ogc:Literal>wms_width</ogc:Literal>
</ogc:Function>
</ogc:Function>
<ogc:Function name="parameter">
<ogc:Literal>outputHeight</ogc:Literal>
<ogc:Function name="env">
<ogc:Literal>wms_height</ogc:Literal>
</ogc:Function>
</ogc:Function>
</ogc:Function>
</Transformation>
<Rule>
<Name>rule1</Name>
<Title>Site</Title>
<ogc:Filter>
<ogc:PropertyIsLessThanOrEqualTo>
<ogc:PropertyName>count</ogc:PropertyName>
<ogc:Literal>1</ogc:Literal>
</ogc:PropertyIsLessThanOrEqualTo>
</ogc:Filter>
<PointSymbolizer>
<Graphic>
<Mark>
<WellKnownName>circle</WellKnownName>
<Fill>
<CssParameter name="fill">#FF0000</CssParameter>
</Fill>
</Mark>
<Size>8</Size>
</Graphic>
</PointSymbolizer>
</Rule>
<Rule>
<Name>rule29</Name>
<Title>2-9 Sites</Title>
<ogc:Filter>
<ogc:PropertyIsBetween>
<ogc:PropertyName>count</ogc:PropertyName>
<ogc:LowerBoundary>
<ogc:Literal>2</ogc:Literal>
</ogc:LowerBoundary>
<ogc:UpperBoundary>
<ogc:Literal>9</ogc:Literal>
</ogc:UpperBoundary>
</ogc:PropertyIsBetween>
</ogc:Filter>
<PointSymbolizer>
<Graphic>
<Mark>
<WellKnownName>circle</WellKnownName>
<Fill>
<CssParameter name="fill">#AA0000</CssParameter>
</Fill>
</Mark>
<Size>14</Size>
</Graphic>
</PointSymbolizer>
<TextSymbolizer>
<Label>
<ogc:PropertyName>count</ogc:PropertyName>
</Label>
<Font>
<CssParameter name="font-family">Arial</CssParameter>
<CssParameter name="font-size">12</CssParameter>
<CssParameter name="font-weight">bold</CssParameter>
</Font>
<LabelPlacement>
<PointPlacement>
<AnchorPoint>
<AnchorPointX>0.5</AnchorPointX>
<AnchorPointY>0.8</AnchorPointY>
</AnchorPoint>
</PointPlacement>
</LabelPlacement>
<Halo>
<Radius>2</Radius>
<Fill>
<CssParameter name="fill">#AA0000</CssParameter>
<CssParameter name="fill-opacity">0.9</CssParameter>
</Fill>
</Halo>
<Fill>
<CssParameter name="fill">#FFFFFF</CssParameter>
<CssParameter name="fill-opacity">1.0</CssParameter>
</Fill>
</TextSymbolizer>
</Rule>
<Rule>
<Name>rule10</Name>
<Title>10 Sites</Title>
<ogc:Filter>
<ogc:PropertyIsGreaterThan>
<ogc:PropertyName>count</ogc:PropertyName>
<ogc:Literal>9</ogc:Literal>
</ogc:PropertyIsGreaterThan>
</ogc:Filter>
<PointSymbolizer>
<Graphic>
<Mark>
<WellKnownName>circle</WellKnownName>
<Fill>
<CssParameter name="fill">#AA0000</CssParameter>
</Fill>
</Mark>
<Size>22</Size>
</Graphic>
</PointSymbolizer>
<TextSymbolizer>
<Label>
<ogc:PropertyName>count</ogc:PropertyName>
</Label>
<Font>
<CssParameter name="font-family">Arial</CssParameter>
<CssParameter name="font-size">12</CssParameter>
<CssParameter name="font-weight">bold</CssParameter>
</Font>
<LabelPlacement>
<PointPlacement>
<AnchorPoint>
<AnchorPointX>0.5</AnchorPointX>
<AnchorPointY>0.8</AnchorPointY>
</AnchorPoint>
</PointPlacement>
</LabelPlacement>
<Halo>
<Radius>2</Radius>
<Fill>
<CssParameter name="fill">#AA0000</CssParameter>
<CssParameter name="fill-opacity">0.9</CssParameter>
</Fill>
</Halo>
<Fill>
<CssParameter name="fill">#FFFFFF</CssParameter>
<CssParameter name="fill-opacity">1.0</CssParameter>
</Fill>
</TextSymbolizer>
</Rule>
</FeatureTypeStyle>
</UserStyle>
</NamedLayer>
</StyledLayerDescriptor>

View File

@ -1,24 +1,12 @@
package org.gcube.application.geoportal.service;
import javax.persistence.EntityManagerFactory;
import javax.ws.rs.core.Application;
import javax.ws.rs.core.Response;
import org.gcube.application.geoportal.managers.AbstractRecordManager;
import org.gcube.application.geoportal.model.fault.ConfigurationException;
import org.gcube.application.geoportal.model.report.PublicationReport;
import org.gcube.application.geoportal.service.engine.ImplementationProvider;
import org.gcube.application.geoportal.service.engine.ScopedEMFProvider;
import org.gcube.application.geoportal.service.engine.StorageClientProvider;
import org.gcube.application.geoportal.service.engine.cache.MongoClientProvider;
import org.gcube.application.geoportal.service.engine.cache.MongoConnectionProvider;
import org.gcube.application.geoportal.service.legacy.TokenSetter;
import org.gcube.application.geoportal.service.rest.GuardedMethod;
import org.gcube.application.geoportal.service.utils.Serialization;
import org.gcube.contentmanagement.blobstorage.service.IClient;
import org.glassfish.jersey.test.JerseyTest;
import org.junit.BeforeClass;
import com.mongodb.MongoClient;
import javax.ws.rs.core.Application;
import javax.ws.rs.core.Response;
public class BasicServiceTestUnit extends JerseyTest {
@ -29,22 +17,20 @@ public class BasicServiceTestUnit extends JerseyTest {
return new GeoPortalService();
}
protected static String scope="/gcube/devsec/devVRE";
@BeforeClass
public static void init() {
String scope="/gcube/devsec/devVRE";
AbstractRecordManager.setDefaultProvider(
new ScopedEMFProvider(){
GuardedMethod.addPreoperation(new Runnable() {
@Override
public EntityManagerFactory getFactory() {
public void run() {
TokenSetter.set(scope);
return super.getFactory();
}
});
/*
ImplementationProvider.get().setStorageProvider(new StorageClientProvider() {
@Override
public IClient getObject() throws ConfigurationException {
@ -52,8 +38,8 @@ public class BasicServiceTestUnit extends JerseyTest {
return super.getObject();
}
});
ImplementationProvider.get().setMongoConnectionProvider(new MongoConnectionProvider() {
@Override
public org.gcube.application.geoportal.service.model.internal.db.MongoConnection getObject() throws ConfigurationException {
@ -61,7 +47,7 @@ public class BasicServiceTestUnit extends JerseyTest {
return super.getObject();
}
});
ImplementationProvider.get().setMongoClientProvider(new MongoClientProvider() {
@Override
public MongoClient getObject() throws ConfigurationException {
@ -69,7 +55,14 @@ public class BasicServiceTestUnit extends JerseyTest {
return super.getObject();
}
});
ImplementationProvider.get().setDbProvider(new PostgisConnectionProvider() {
@Override
public PostgisDBManager getObject() throws ConfigurationException {
TokenSetter.set(scope);
return super.getObject();
}
});*/
}
@ -79,7 +72,10 @@ public class BasicServiceTestUnit extends JerseyTest {
throw new Exception("RESP STATUS IS "+resp.getStatus()+". Message : "+resString);
System.out.println("Resp String is "+resString);
if(clazz!=null)
return Serialization.read(resString, clazz);
if (clazz==String.class)
return (T) resString;
else
return Serialization.read(resString, clazz);
else return null;
}
}

View File

@ -1,34 +1,34 @@
package org.gcube.application.geoportal.service;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import java.io.FileInputStream;
import java.util.ArrayList;
import java.util.List;
import javax.ws.rs.client.Entity;
import javax.ws.rs.client.WebTarget;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import org.gcube.application.geoportal.common.model.legacy.Concessione;
import org.gcube.application.geoportal.common.model.legacy.Concessione.Paths;
import org.gcube.application.geoportal.common.model.legacy.LayerConcessione;
import org.gcube.application.geoportal.common.model.legacy.report.ValidationReport.ValidationStatus;
import org.gcube.application.geoportal.common.rest.AddSectionToConcessioneRequest;
import org.gcube.application.geoportal.common.model.rest.AddSectionToConcessioneRequest;
import org.gcube.application.geoportal.common.rest.InterfaceConstants;
import org.gcube.application.geoportal.common.rest.TempFile;
import org.gcube.application.geoportal.common.utils.Files;
import org.gcube.application.geoportal.common.utils.StorageUtils;
import org.gcube.application.geoportal.service.legacy.TokenSetter;
import org.gcube.application.geoportal.service.utils.Serialization;
import org.json.JSONObject;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import javax.ws.rs.client.Entity;
import javax.ws.rs.client.WebTarget;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import java.io.FileInputStream;
import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.NoSuchElementException;
import java.util.concurrent.atomic.AtomicLong;
import static org.junit.Assert.*;
public class ConcessioniOverMongoTest extends BasicServiceTestUnit{
@ -37,14 +37,14 @@ public class ConcessioniOverMongoTest extends BasicServiceTestUnit{
private static final String PUBLISH_PATH=InterfaceConstants.Methods.PUBLISH_PATH;
private static final String FILES_PATH=InterfaceConstants.Methods.REGISTER_FILES_PATH;
@Before
public void setContext() {
TokenSetter.set("/gcube/devsec/devVRE");
public void setContext(){
TokenSetter.set(scope);
}
private static Concessione upload(WebTarget target,String id, String path, String ...files) throws Exception {
ArrayList<TempFile> array=new ArrayList<TempFile>();
for(String file:files)
array.add(new StorageUtils().putOntoStorage(new FileInputStream(
@ -66,6 +66,12 @@ public class ConcessioniOverMongoTest extends BasicServiceTestUnit{
put(Entity.entity(Serialization.write(conc), MediaType.APPLICATION_JSON));
return check(resp,Concessione.class);
}
private static Concessione unpublish(WebTarget target, String id) throws Exception {
Response resp=target.path(PUBLISH_PATH).path(id).request(MediaType.APPLICATION_JSON).
delete();
return check(resp,Concessione.class);
}
private static Concessione register(WebTarget target, Concessione c) throws Exception {
Response resp=target.request(MediaType.APPLICATION_JSON).post(Entity.entity(Serialization.write(c), MediaType.APPLICATION_JSON));
return check(resp,Concessione.class);
@ -74,7 +80,37 @@ public class ConcessioniOverMongoTest extends BasicServiceTestUnit{
private static Concessione get(WebTarget target) throws Exception {
return register(target,TestModel.prepareConcessione());
}
private static Concessione getById(WebTarget target ,String id) throws Exception {
return check(target.path(id).request(MediaType.APPLICATION_JSON).get(),Concessione.class);
}
private static Iterator<Concessione> search(String query, WebTarget target) throws Exception {
String result= check(target.path(InterfaceConstants.Methods.SEARCH_PATH).request(MediaType.APPLICATION_JSON_TYPE).post(
Entity.entity(query,MediaType.APPLICATION_JSON)),String.class);
return Serialization.readCollection(result,Concessione.class);
}
private static Iterator<Concessione> searchFile(String filename, WebTarget target) throws Exception {
String query= Files.readFileAsString(Files.getFileFromResources(
"concessioni/jsonFilters/"+filename).getAbsolutePath(), Charset.defaultCharset());
return search(query,target);
}
private static <T> Iterator<T> queryFile(String filename, WebTarget target, Class<T> clazz) throws Exception {
String queryString= Files.readFileAsString(Files.getFileFromResources(
"concessioni/jsonQueries/"+filename).getAbsolutePath(), Charset.defaultCharset());
String result = check(target.path(InterfaceConstants.Methods.QUERY_PATH).request(MediaType.APPLICATION_JSON_TYPE).post(
Entity.entity(queryString,MediaType.APPLICATION_JSON)),String.class);
return Serialization.readCollection(result,clazz);
}
private static Iterator<Concessione> queryFile(String filename, WebTarget target) throws Exception {
return queryFile(filename,target,Concessione.class);
}
// ********** TESTS
@Test
@ -83,6 +119,37 @@ public class ConcessioniOverMongoTest extends BasicServiceTestUnit{
System.out.println(target.request(MediaType.APPLICATION_JSON).get(List.class));
}
@Test
public void search() throws Exception {
WebTarget target=target(PATH);
AtomicLong validatedCount= new AtomicLong(0);
Iterator<Concessione> it=searchFile("validated.json",target);
it.forEachRemaining(concessione -> {validatedCount.incrementAndGet();});
System.out.println("Validated : "+ validatedCount.get());
}
@Test
public void query() throws Exception {
WebTarget target=target(PATH);
try {
// System.out.println("Last Registered : " + queryFile("lastRegistered.json", target).next().getCreationTime());
// System.out.println("First Registered : " + queryFile("firstRegistered.json", target).next().getCreationTime());
System.out.println("Last Names by Fabio : ");
queryFile("lastNamesRegisteredByFabio.json", target, JSONObject.class).forEachRemaining(c -> {System.out.println(c);});
System.out.println("Publication warning messages : ");
queryFile("publicationWarningMessages.json", target, JSONObject.class).forEachRemaining(c -> {System.out.println(c);});
}catch(NoSuchElementException e){
System.out.println("NO element found, probably empty DB");
}
}
@Test
public void getConfiguration() throws Exception {
WebTarget target=target(PATH);
System.out.println(check(target.path(InterfaceConstants.Methods.CONFIGURATION_PATH).request(MediaType.APPLICATION_JSON).get(),String.class));
}
@Test
public void createNew() throws Exception {
WebTarget target=target(PATH);
@ -95,10 +162,38 @@ public class ConcessioniOverMongoTest extends BasicServiceTestUnit{
public void delete() throws Exception {
WebTarget target=target(PATH);
Concessione c = get(target);
check(target.path(c.getMongo_id()).request(MediaType.APPLICATION_JSON).delete(),null);
Concessione published=getFullPublished(target);
check(target.path(published.getMongo_id()).request(MediaType.APPLICATION_JSON).delete(),null);
}
@Test
public void republish() throws Exception{
WebTarget target=target(PATH);
Concessione published=getFullPublished(target);
published = unpublish(target,published.getMongo_id());
System.out.println("Republishing..");
published=publish(target,published);
Assert.assertEquals(published.getReport().getStatus(),ValidationStatus.PASSED);
}
// @Test
// public void handlePrecise() throws Exception {
// WebTarget target=target(PATH);
// String id="610415af02ad3d05b5f81ee3";
// publish(target,unpublish(target,id));
// target.path(id).queryParam(InterfaceConstants.Parameters.FORCE,true).request(MediaType.APPLICATION_JSON).delete();
// }
@Test
public void getById() throws Exception {
WebTarget target=target(PATH);
@ -144,33 +239,43 @@ public class ConcessioniOverMongoTest extends BasicServiceTestUnit{
System.out.println("File is "+c.getRelazioneScavo().getActualContent().get(0));
}
@Test
public void testClearFileSet() throws Exception {
WebTarget target=target(PATH);
Concessione published=getFullPublished(target);
String path=Paths.POSIZIONAMENTO;
Response resp=
target.path(InterfaceConstants.Methods.DELETE_FILES_PATH).path(published.getMongo_id()).
request(MediaType.APPLICATION_JSON).put(Entity.entity(path, MediaType.APPLICATION_JSON));
//Expecting error for deletion
assertTrue(resp.getStatus()>=300);
System.out.println("Error for deletion is "+resp.readEntity(String.class));
resp=
target.path(InterfaceConstants.Methods.PUBLISH_PATH).path(published.getMongo_id()).
request(MediaType.APPLICATION_JSON).delete();
check(resp,null);
//Actually cleaning posizionamento
published=check(
target.path(InterfaceConstants.Methods.DELETE_FILES_PATH).path(published.getMongo_id()).
request(MediaType.APPLICATION_JSON).post(Entity.entity(path, MediaType.APPLICATION_JSON)),Concessione.class);
assertTrue(published.getPosizionamentoScavo().getActualContent().isEmpty());
path=Paths.piantaByIndex(0);
published=check(
target.path(InterfaceConstants.Methods.DELETE_FILES_PATH).path(published.getMongo_id()).
request(MediaType.APPLICATION_JSON).post(Entity.entity(path, MediaType.APPLICATION_JSON)),Concessione.class);
assertTrue(published.getPianteFineScavo().get(0).getActualContent().isEmpty());
}
@Test
public void publish() throws Exception {
WebTarget target=target(PATH);
Concessione c=TestModel.prepareConcessione(1,2);
c.setNome("Concessione : publish test");
// Register new
c=register(target,c);
//Upload files
upload(target,c.getMongo_id(),Paths.RELAZIONE,"relazione.pdf");
upload(target,c.getMongo_id(),Paths.POSIZIONAMENTO,"pos.shp","pos.shx");
// Clash on workspaces
upload(target,c.getMongo_id(),Paths.piantaByIndex(0),"pianta.shp","pianta.shx");
upload(target,c.getMongo_id(),Paths.imgByIndex(0),"immagine.png");
upload(target,c.getMongo_id(),Paths.imgByIndex(1),"immagine2.png");
// Immagini
Concessione published=publish(target, c);
Concessione published=getFullPublished(target);
System.out.println("Published : "+published);
System.out.println("Report is : "+published.getReport());
assertNotNull(published.getReport());
@ -185,7 +290,31 @@ public class ConcessioniOverMongoTest extends BasicServiceTestUnit{
assertNotNull(published.getCentroidLong());
}
private Concessione getFullPublished(WebTarget target) throws Exception {
Concessione c=TestModel.prepareConcessione(1,2);
c.setNome("Concessione : publish test");
// Register new
c=register(target,c);
//Upload files
upload(target,c.getMongo_id(),Paths.RELAZIONE,"relazione.pdf");
upload(target,c.getMongo_id(),Paths.POSIZIONAMENTO,"pos.shp","pos.shx");
// Clash on workspaces
upload(target,c.getMongo_id(),Paths.piantaByIndex(0),"pianta.shp","pianta.shx");
upload(target,c.getMongo_id(),Paths.imgByIndex(0),"immagine.png");
upload(target,c.getMongo_id(),Paths.imgByIndex(1),"immagine2.png");
// Immagini
Concessione published=publish(target, c);
return published;
}
}

View File

@ -1,30 +1,43 @@
package org.gcube.application.geoportal.service.legacy;
import java.io.IOException;
import org.bson.Document;
import org.gcube.application.geoportal.common.model.profile.Profile;
import org.gcube.application.geoportal.common.utils.Files;
import org.gcube.application.geoportal.model.fault.ConfigurationException;
import org.gcube.application.geoportal.service.engine.ImplementationProvider;
import org.gcube.application.geoportal.service.engine.cache.MongoClientProvider;
import org.gcube.application.geoportal.service.engine.mongo.MongoManager;
import org.gcube.application.geoportal.service.utils.Serialization;
import org.junit.BeforeClass;
import org.junit.Test;
package org.gcube.application.geoportal.service;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.mongodb.Block;
import com.mongodb.MongoClient;
import com.mongodb.client.MongoCollection;
import com.mongodb.client.MongoDatabase;
import com.mongodb.client.model.Projections;
import org.bson.Document;
import org.gcube.application.geoportal.service.engine.ImplementationProvider;
import org.gcube.application.geoportal.service.engine.mongo.MongoManager;
import org.gcube.application.geoportal.service.engine.providers.MongoClientProvider;
import org.gcube.application.geoportal.service.model.internal.faults.ConfigurationException;
import org.junit.BeforeClass;
import org.junit.Test;
import java.io.IOException;
public class MongoTests {
public static class MongoTester extends MongoManager {
public MongoTester() throws ConfigurationException {
}
public MongoCollection<Document> getCollection(){return getDatabase().getCollection("legacyConcessioni");}
@Override
protected MongoDatabase getDatabase() {
return client.getDatabase("gna_dev");
}
}
@BeforeClass
public static final void init() {
ImplementationProvider.get().setMongoClientProvider(new MongoClientProvider() {
@Override
public MongoClient getObject() throws ConfigurationException {
TokenSetter.set("/gcube/devNext/NextNext");
TokenSetter.set("/gcube/devsec/devVRE");
return super.getObject();
}
});
@ -43,10 +56,27 @@ public class MongoTests {
// MongoManager manager=new MongoManager();
// Profile f=Serialization.mapper.readerFor(Profile.class).readValue(
// Files.getFileFromResources("fakeProfile.json"));
//
//
// manager.iterate(new Document(),f).forEach(printBlock);
}
@Test
public void queries() throws ConfigurationException {
MongoTester tester=new MongoTester();
System.out.println("Using builders..");
tester.getCollection().find(Document.parse("{\"report.status\" : \"WARNING\"}")).
projection(Projections.include("nome")).forEach(printBlock);
System.out.println("Deserializing documents");
tester.getCollection().find(Document.parse("{\"report.status\" : \"WARNING\"}")).
projection(Document.parse("{\"nome\" : 1}")).forEach(printBlock);
}
@Test
public void checkQuerySerialization(){
System.out.println(Projections.include("nome"));
}
// @Test
// public void writeProject() {
// MongoManager manager=new MongoManager();

View File

@ -1,18 +1,14 @@
package org.gcube.application.geoportal.service;
import java.util.Collections;
import java.util.List;
import org.bson.Document;
import org.gcube.application.geoportal.common.rest.InterfaceConstants;
import org.junit.Test;
import javax.ws.rs.client.Entity;
import javax.ws.rs.client.WebTarget;
import javax.ws.rs.core.MediaType;
import org.bson.Document;
import org.gcube.application.geoportal.common.model.project.Project;
import org.gcube.application.geoportal.common.rest.InterfaceConstants;
import org.junit.Test;
import com.fasterxml.jackson.databind.ObjectMapper;
import java.util.Collections;
import java.util.List;
public class ProjectTests extends BasicServiceTestUnit{

View File

@ -1,15 +1,14 @@
package org.gcube.application.geoportal.service;
import org.gcube.application.geoportal.model.fault.ConfigurationException;
import org.gcube.application.geoportal.service.engine.SDIManager;
import org.gcube.application.geoportal.service.engine.mongo.PostgisIndex;
import org.gcube.application.geoportal.service.legacy.TokenSetter;
import org.gcube.application.geoportal.service.engine.postgis.PostgisIndex;
import org.gcube.application.geoportal.service.model.internal.faults.ConfigurationException;
import org.gcube.application.geoportal.service.model.internal.faults.SDIInteractionException;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import javax.validation.constraints.AssertTrue;
import java.sql.SQLException;
import java.util.regex.Matcher;

View File

@ -1,16 +1,11 @@
package org.gcube.application.geoportal.service;
import org.bson.types.ObjectId;
import org.gcube.application.geoportal.common.model.legacy.*;
import java.time.LocalDateTime;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.UUID;
import org.bson.types.ObjectId;
import org.gcube.application.geoportal.common.model.legacy.AccessPolicy;
import org.gcube.application.geoportal.common.model.legacy.Concessione;
import org.gcube.application.geoportal.common.model.legacy.LayerConcessione;
import org.gcube.application.geoportal.common.model.legacy.RelazioneScavo;
import org.gcube.application.geoportal.common.model.legacy.UploadedImage;

View File

@ -1,10 +1,10 @@
package org.gcube.application.geoportal.service.legacy;
import java.util.Properties;
package org.gcube.application.geoportal.service;
import org.gcube.common.authorization.library.provider.SecurityTokenProvider;
import org.gcube.common.scope.api.ScopeProvider;
import java.util.Properties;
public class TokenSetter {

View File

@ -1,188 +0,0 @@
package org.gcube.application.geoportal.service.legacy;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.Collection;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.client.Entity;
import javax.ws.rs.client.WebTarget;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import org.gcube.application.geoportal.common.rest.InterfaceConstants;
import org.gcube.application.geoportal.common.utils.Files;
import org.gcube.application.geoportal.model.concessioni.Concessione;
import org.gcube.application.geoportal.model.concessioni.LayerConcessione;
import org.gcube.application.geoportal.model.content.AssociatedContent;
import org.gcube.application.geoportal.model.content.UploadedImage;
import org.gcube.application.geoportal.model.fault.ConfigurationException;
import org.gcube.application.geoportal.model.report.PublicationReport;
import org.gcube.application.geoportal.model.report.ValidationReport.ValidationStatus;
import org.gcube.application.geoportal.service.BasicServiceTestUnit;
import org.gcube.application.geoportal.service.engine.ImplementationProvider;
import org.gcube.application.geoportal.service.model.internal.rest.AddSectionToConcessioneRequest;
import org.gcube.application.geoportal.service.model.internal.rest.AddSectionToConcessioneRequest.SHUBFileDescriptor;
import org.gcube.application.geoportal.service.model.internal.rest.AddSectionToConcessioneRequest.Section;
import org.gcube.application.geoportal.service.utils.Serialization;
import org.gcube.com.fasterxml.jackson.core.JsonProcessingException;
import org.gcube.contentmanagement.blobstorage.transport.backend.RemoteBackendException;
import org.junit.Test;
import lombok.extern.slf4j.Slf4j;
@Slf4j
public class ConcessioniTest extends BasicServiceTestUnit {
@Test
public void readId() {
WebTarget target=target(InterfaceConstants.Methods.CONCESSIONI);
try{
System.out.println(target.path("9").request(MediaType.APPLICATION_JSON).get(String.class));
}catch(WebApplicationException e) {
if(e.getResponse().getStatus()==404)
System.out.println("Object not found");
else throw e;
}
}
@Test
public void list() throws JsonProcessingException, IOException{
WebTarget target=target(InterfaceConstants.Methods.CONCESSIONI);
System.err.println(target.getUri());
Collection coll=target.request(MediaType.APPLICATION_JSON).get(Collection.class);
System.out.println("Size of collection is "+coll.size());
System.out.println("Iterating through objects.. ");
for(Object o:coll) {
System.out.println("Object class is "+o.getClass());
Concessione c=Serialization.read(o.toString(), Concessione.class);
System.out.println("Concessione is : "+Serialization.write(c));
}
}
@Test
public void failPublish() throws com.fasterxml.jackson.core.JsonProcessingException, IOException {
Concessione toCreate=OLDTestModel.prepareEmptyConcessione();
Concessione conc=pushConcessione(toCreate);
System.out.println(publish(conc.getId()+"").prettyPrint());
}
@Test
public void createNew() throws IOException {
Concessione toCreate=OLDTestModel.prepareEmptyConcessione();
pushConcessione(toCreate);
}
@Test
public void publishNew() throws IOException, RemoteBackendException, ConfigurationException {
Concessione toCreate=OLDTestModel.prepareEmptyConcessione();
Concessione registered = pushConcessione(toCreate);
System.out.println("Registered at "+Serialization.write(registered));
Concessione fullTemplate=OLDTestModel.prepareConcessione();
//Push Relazione
publishSection(registered.getId()+"",formRequest(Section.RELAZIONE,fullTemplate.getRelazioneScavo(),"concessioni/relazione.pdf"));
assertNotNull(getById(registered.getId()+"").getRelazioneScavo());
//Push Immagini
for(UploadedImage img:fullTemplate.getImmaginiRappresentative())
publishSection(registered.getId()+"",formRequest(Section.UPLOADED_IMG,img,"concessioni/immagine.png"));
assertNotNull(getById(registered.getId()+"").getImmaginiRappresentative());
assertTrue(getById(registered.getId()+"").getImmaginiRappresentative().size()==fullTemplate.getImmaginiRappresentative().size());
//Push Posizinamento
publishSection(registered.getId()+"",formRequest(Section.POSIZIONAMENTO,fullTemplate.getPosizionamentoScavo(),"concessioni/pos.dbf","concessioni/pos.shp"));
assertNotNull(getById(registered.getId()+"").getPosizionamentoScavo());
//Push piante
for(LayerConcessione l:fullTemplate.getPianteFineScavo())
publishSection(registered.getId()+"",formRequest(Section.PIANTA,l,"concessioni/pos.dbf","concessioni/pos.shp"));
assertNotNull(getById(registered.getId()+"").getPianteFineScavo());
assertTrue(getById(registered.getId()+"").getPianteFineScavo().size()==fullTemplate.getPianteFineScavo().size());
Concessione reloaded = getById(registered.getId()+"");
System.out.println("Goind to publish "+Serialization.write(reloaded));
PublicationReport report=publish(registered.getId()+"");
System.out.println("REPORT IS "+report.prettyPrint()+"");
// System.out.println("Concessione is "+report.getTheRecord().asJson());
assertTrue(report.getStatus().equals(ValidationStatus.PASSED));
}
// ACTUAL METHODS
private PublicationReport publish(String id) throws com.fasterxml.jackson.core.JsonProcessingException, IOException {
WebTarget target=target(InterfaceConstants.Methods.CONCESSIONI);
Response resp=target.path("publish").path(id).request(MediaType.APPLICATION_JSON).put(Entity.entity("sopmething", MediaType.APPLICATION_JSON));
if(resp.getStatus()<200||resp.getStatus()>=300)
System.err.println("RESP STATUS IS "+resp.getStatus());
String resString=resp.readEntity(String.class);
System.out.println("Resp String is "+resString);
PublicationReport registered=Serialization.read(resString, PublicationReport.class);
System.out.println("Registered concessione at : "+registered);
return registered;
}
private AddSectionToConcessioneRequest formRequest(Section section,AssociatedContent content,String... files) throws RemoteBackendException, FileNotFoundException, ConfigurationException {
AddSectionToConcessioneRequest toReturn=new AddSectionToConcessioneRequest();
toReturn.setSection(section);
toReturn.setToRegister(content);
for(String f:files) {
SHUBFileDescriptor desc=new SHUBFileDescriptor();
desc.setFilename(f.substring(f.lastIndexOf("/")));
String sId=ImplementationProvider.get().getStorageProvider().store(
new FileInputStream(Files.getFileFromResources(f)));
desc.setShubID(sId);
toReturn.getStreams().add(desc);
}
return toReturn;
}
private Response publishSection(String id, AddSectionToConcessioneRequest request) {
WebTarget target=target(InterfaceConstants.Methods.CONCESSIONI);
Response resp=target.path("section").path(id).request(MediaType.APPLICATION_JSON).put(Entity.entity(request, MediaType.APPLICATION_JSON));
if(resp.getStatus()<200||resp.getStatus()>=300) {
System.err.println("RESP STATUS IS "+resp.getStatus());
System.err.println("RESP IS "+resp.readEntity(String.class));
}
return resp;
}
private Concessione pushConcessione(Concessione c) throws com.fasterxml.jackson.core.JsonProcessingException, IOException {
WebTarget target=target(InterfaceConstants.Methods.CONCESSIONI);
Response resp=target.request(MediaType.APPLICATION_JSON).put(Entity.entity(Serialization.write(c), MediaType.APPLICATION_JSON));
String resString=resp.readEntity(String.class);
System.out.println("Resp String is "+resString);
Concessione registered=Serialization.read(resString, Concessione.class);
System.out.println("Registered concessione at : "+registered);
return registered;
}
private Concessione getById(String id) throws com.fasterxml.jackson.core.JsonProcessingException, IOException {
WebTarget target=target(InterfaceConstants.Methods.CONCESSIONI);
Response resp=target.path(id).request(MediaType.APPLICATION_JSON).get();
if(resp.getStatus()<200||resp.getStatus()>=300)
System.err.println("RESP STATUS IS "+resp.getStatus());
String resString=resp.readEntity(String.class);
System.out.println("Resp String is "+resString);
return Serialization.read(resString, Concessione.class);
}
}

View File

@ -1,108 +0,0 @@
package org.gcube.application.geoportal.service.legacy;
import java.time.LocalDateTime;
import java.util.ArrayList;
import java.util.Arrays;
import org.gcube.application.geoportal.model.AccessPolicy;
import org.gcube.application.geoportal.model.concessioni.Concessione;
import org.gcube.application.geoportal.model.concessioni.LayerConcessione;
import org.gcube.application.geoportal.model.concessioni.RelazioneScavo;
import org.gcube.application.geoportal.model.content.UploadedImage;
public class OLDTestModel {
public static Concessione prepareEmptyConcessione() {
Concessione concessione=new Concessione();
// Generic fields
// Concessione fields
concessione.setNome("Italia, forse");
concessione.setIntroduzione("This is my project");
concessione.setDescrizioneContenuto("It contains this and that");
concessione.setAuthors(Arrays.asList(new String[] {"Some one","Some, oneelse"}));
concessione.setContributore("Contrib 1");
concessione.setTitolari(Arrays.asList(new String[] {"Some one","Some, oneelse"}));
concessione.setResponsabile("Someone");
concessione.setEditore("Editore");
concessione.setFontiFinanziamento(Arrays.asList(new String[] {"Big pharma","Pentagon"}));
concessione.setSoggetto(Arrays.asList(new String[] {"Research Excavation","Archeology"}));
concessione.setDataInizioProgetto(LocalDateTime.now());
concessione.setDataFineProgetto(LocalDateTime.now());
concessione.setLicenzaID("CC-BY");
concessione.setTitolareLicenza(Arrays.asList(new String[] {"Qualcun altro"}));
concessione.setTitolareCopyright(Arrays.asList(new String[] {"Chiedilo in giro"}));
concessione.setParoleChiaveLibere(Arrays.asList(new String[] {"Robba","Stuff"}));
concessione.setParoleChiaveICCD(Arrays.asList(new String[] {"vattelapesca","somthing something"}));
concessione.setCentroidLat(43.0); //N-S
concessione.setCentroidLong(9.0); //E-W
return concessione;
}
public static Concessione prepareConcessione() {
Concessione concessione=prepareEmptyConcessione();
// Attachments
// Relazione scavo
RelazioneScavo relScavo=new RelazioneScavo();
relScavo.setAbstractSection("simple abstract section");
relScavo.setResponsabili(concessione.getAuthors());
concessione.setRelazioneScavo(relScavo);
//Immagini rappresentative
ArrayList<UploadedImage> imgs=new ArrayList<>();
for(int i=0;i<5;i++) {
UploadedImage img=new UploadedImage();
img.setTitolo("My image number "+i);
img.setDidascalia("You can see my image number "+i);
img.setFormat("TIFF");
img.setCreationTime(LocalDateTime.now());
img.setResponsabili(concessione.getAuthors());
imgs.add(img);
}
concessione.setImmaginiRappresentative(imgs);
//Posizionamento
LayerConcessione posizionamento=new LayerConcessione();
posizionamento.setValutazioneQualita("Secondo me si");
posizionamento.setMetodoRaccoltaDati("Fattobbene");
posizionamento.setScalaAcquisizione("1:10000");
posizionamento.setAuthors(concessione.getAuthors());
concessione.setPosizionamentoScavo(posizionamento);
// Piante fine scavo
ArrayList<LayerConcessione> piante=new ArrayList<LayerConcessione>();
for(int i=0;i<4;i++) {
LayerConcessione pianta=new LayerConcessione();
pianta.setValutazioneQualita("Secondo me si");
pianta.setMetodoRaccoltaDati("Fattobbene");
pianta.setScalaAcquisizione("1:10000");
pianta.setAuthors(concessione.getAuthors());
pianta.setPolicy(AccessPolicy.RESTRICTED);
piante.add(pianta);
}
concessione.setPianteFineScavo(piante);
return concessione;
}
}

View File

@ -0,0 +1,31 @@
package org.gcube.application.geoportal.service.ws;
import org.gcube.application.geoportal.service.TokenSetter;
import org.gcube.common.storagehub.client.dsl.StorageHubClient;
import org.gcube.common.storagehub.model.exceptions.StorageHubException;
import org.gcube.common.storagehub.model.items.*;
public class DescribeWSFolder {
public static void main(String[] args) throws StorageHubException {
String context="/gcube/devsec/devVRE";
String folderID="fea4a885-7e60-4294-83d0-82162e7462f4";
TokenSetter.set(context);
StorageHubClient shc = new StorageHubClient();
FolderItem folder=shc.open(folderID).asFolder().get();
System.out.println("PATH : "+folder.getPath());
System.out.println("HIDDEN : "+folder.isHidden());
System.out.println("Description : "+folder.getDescription());
System.out.println("Listing... ");
for (Item item : shc.open(folderID).asFolder().list().getItems()) {
System.out.println("name:" + item.getName() + " is a File?: " + (item instanceof AbstractFileItem));
System.out.println("name:" + item.getName() + " is a folder?: " + (item instanceof FolderItem));
System.out.println("name:" + item.getName() + " is a shared folder?: " + (item instanceof SharedFolder));
System.out.println("name:" + item.getName() + " is a VRE folder?: " + (item instanceof VreFolder));
}
}
}

View File

@ -0,0 +1,12 @@
package org.gcube.application.geoportal.service.ws;
public class DisplayWorkspaceTree {
public static void main(String[] args) {
String context="/gcube/devsec/devVRE";
String folderId=null; // NB null ==
}
}

View File

@ -0,0 +1,28 @@
package org.gcube.application.geoportal.service.ws;
import org.gcube.application.geoportal.service.TokenSetter;
import org.gcube.application.geoportal.service.engine.WorkspaceManager;
import org.gcube.common.storagehub.client.dsl.FolderContainer;
import org.gcube.common.storagehub.client.dsl.StorageHubClient;
import org.gcube.common.storagehub.model.exceptions.StorageHubException;
public class SetWSFolderVisibility {
public static void main(String[] args) throws StorageHubException {
String context="/gcube/devsec/devVRE";
Boolean setVisible=true;
TokenSetter.set(context);
StorageHubClient shc = new StorageHubClient();
FolderContainer folderContainer = WorkspaceManager.getApplicationBaseFolder(shc);
if(setVisible)
folderContainer.setVisible();
else
folderContainer.setHidden(); //will not appear in the workspace GUI
System.out.println("Done, children count "+folderContainer.list().getItems().size());
}
}

View File

@ -0,0 +1 @@
{}

View File

@ -0,0 +1,3 @@
{
"id" : {$gt : 0}
}

View File

@ -0,0 +1,4 @@
{
"centroidLat" : 0
}

View File

@ -0,0 +1,3 @@
{
"creationUser" : {$ne : "fabio.sinibaldi"}
}

View File

@ -0,0 +1,3 @@
{
"report.status": {$eq : "PASSED"}
}

View File

@ -0,0 +1,10 @@
{
"paging" : {
"offset" : 0,
"limit" : 1
},
"ordering" : {
"direction" : "ASCENDING",
"fields" : ["creationTime","nome"]
}
}

View File

@ -0,0 +1,18 @@
{
"paging" : {
"offset" : 0,
"limit" : 1
},
"ordering" : {
"direction" : "DESCENDING",
"fields" : ["creationTime","nome"]
},
"filter" : {
"creationUser" : {$eq : "fabio.sinibaldi"}
},
"projection" : {
"nome" : 1
}
}

View File

@ -0,0 +1,11 @@
{
"paging" : {
"offset" : 0,
"limit" : 1
},
"ordering" : {
"direction" : "DESCENDING",
"fields" : ["creationTime","nome"]
}
}

View File

@ -0,0 +1,10 @@
{
"ordering" : {
"direction" : "DESCENDING",
"fields" : ["nome"]
},
"filter" : {"report.status" : {"$eq" : "WARNING"}},
"projection" : {"report.warningMessages" : 1}
}