Removed legacy

This commit is contained in:
Fabio Sinibaldi 2022-09-20 14:46:22 +02:00
parent 26e6350178
commit 71d383e162
13 changed files with 44 additions and 1858 deletions

View File

@ -4,21 +4,22 @@ import com.fasterxml.jackson.jaxrs.json.JacksonJaxbJsonProvider;
import lombok.extern.slf4j.Slf4j;
import org.gcube.application.cms.caches.Engine;
import org.gcube.application.cms.implementations.ImplementationProvider;
import org.gcube.application.cms.implementations.StorageHubProvider;
import org.gcube.application.cms.serialization.Serialization;
import org.gcube.application.geoportal.common.rest.InterfaceConstants;
import org.gcube.application.geoportal.common.utils.StorageUtils;
import org.gcube.application.geoportal.service.engine.mongo.UCDManagerI;
import org.gcube.application.geoportal.service.engine.postgis.PostgisDBManagerI;
import org.gcube.application.geoportal.service.engine.providers.*;
import org.gcube.application.geoportal.service.engine.providers.ConfigurationCache;
import org.gcube.application.geoportal.service.engine.providers.MongoClientProvider;
import org.gcube.application.geoportal.service.engine.providers.PluginManager;
import org.gcube.application.geoportal.service.engine.providers.StorageClientProvider;
import org.gcube.application.geoportal.service.engine.providers.ucd.ProfileMap;
import org.gcube.application.geoportal.service.engine.providers.ucd.SingleISResourceUCDProvider;
import org.gcube.application.geoportal.service.engine.providers.ucd.UCDManager;
import org.gcube.application.geoportal.service.model.internal.db.Mongo;
import org.gcube.application.geoportal.service.rest.*;
import org.gcube.application.cms.serialization.Serialization;
import org.gcube.common.storagehub.client.dsl.StorageHubClient;
import org.gcube.application.geoportal.service.rest.DocsGenerator;
import org.gcube.application.geoportal.service.rest.Plugins;
import org.gcube.application.geoportal.service.rest.ProfiledDocuments;
import org.gcube.application.geoportal.service.rest.UseCaseDescriptors;
import org.glassfish.jersey.server.ResourceConfig;
import javax.ws.rs.ApplicationPath;
@ -43,7 +44,6 @@ public class GeoPortalService extends ResourceConfig{
register(provider);
registerClasses(ConcessioniOverMongo.class);
registerClasses(ProfiledDocuments.class);
registerClasses(UseCaseDescriptors.class);
registerClasses(Plugins.class);
@ -53,7 +53,6 @@ public class GeoPortalService extends ResourceConfig{
ImplementationProvider.get().setEngine(new MongoClientProvider(), Mongo.class);
ImplementationProvider.get().setEngine(new StorageClientProvider(), StorageUtils.class);
ImplementationProvider.get().setEngine(new PostgisConnectionProvider(), PostgisDBManagerI.class);
ImplementationProvider.get().setEngine(new SingleISResourceUCDProvider(), ProfileMap.class);
ImplementationProvider.get().setEngine(new PluginManager(), PluginManager.PluginMap.class);
ImplementationProvider.get().setEngine(new UCDManager(),UCDManagerI.class);

View File

@ -1,433 +0,0 @@
package org.gcube.application.geoportal.service.engine.materialization;
import it.geosolutions.geoserver.rest.GeoServerRESTPublisher;
import it.geosolutions.geoserver.rest.GeoServerRESTPublisher.UploadMethod;
import it.geosolutions.geoserver.rest.GeoServerRESTReader;
import it.geosolutions.geoserver.rest.decoder.RESTFeatureType;
import it.geosolutions.geoserver.rest.decoder.RESTLayer;
import it.geosolutions.geoserver.rest.encoder.GSLayerEncoder;
import it.geosolutions.geoserver.rest.encoder.datastore.GSPostGISDatastoreEncoder;
import it.geosolutions.geoserver.rest.encoder.feature.GSFeatureTypeEncoder;
import lombok.Getter;
import lombok.extern.slf4j.Slf4j;
import org.gcube.application.geoportal.common.model.legacy.*;
import org.gcube.application.geoportal.common.model.rest.DatabaseConnection;
import org.gcube.application.geoportal.common.utils.Files;
import org.gcube.application.cms.implementations.WorkspaceManager;
import org.gcube.application.geoportal.service.model.internal.db.PostgisTable;
import org.gcube.application.geoportal.service.model.internal.faults.SDIInteractionException;
import org.gcube.common.storagehub.client.dsl.FileContainer;
import org.gcube.data.transfer.library.DataTransferClient;
import org.gcube.data.transfer.library.TransferResult;
import org.gcube.data.transfer.library.faults.RemoteServiceException;
import org.gcube.data.transfer.model.Destination;
import org.gcube.data.transfer.model.DestinationClashPolicy;
import org.gcube.data.transfer.model.RemoteFileDescriptor;
import org.gcube.spatial.data.gis.GISInterface;
import org.gcube.spatial.data.gis.is.AbstractGeoServerDescriptor;
import java.io.File;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
@Slf4j
public class SDIManager {
static private final String EPSG_4326="EPSG:4326";
static private final String WGS84_FULL="GEOGCS[\"WGS 84\", DATUM[\"World Geodetic System 1984\", SPHEROID[\"WGS 84\", 6378137.0, 298.257223563, AUTHORITY[\"EPSG\",\"7030\"]],"+
"AUTHORITY[\"EPSG\",\"6326\"]], PRIMEM[\"Greenwich\", 0.0, AUTHORITY[\"EPSG\",\"8901\"]], UNIT[\"degree\", 0.017453292519943295],"+
"AXIS[\"Geodetic longitude\", EAST], AXIS[\"Geodetic latitude\", NORTH], AUTHORITY[\"EPSG\",\"4326\"]]";
public static final Pattern HOSTNAME_PATTERN=Pattern.compile("(?<=\\:\\/\\/)[^\\:]*");
public static final Pattern PORT_PATTERN=Pattern.compile("(?<=\\:)[\\d]+");
public static final Pattern DB_NAME_PATTERN=Pattern.compile("(?<=\\/)[^\\/]*(?=$)");
private final GISInterface gis;
@Getter
private final DataTransferClient dtGeoServer;
private final String geoserverHostName;
private final AbstractGeoServerDescriptor currentGeoserver;
public SDIManager() throws SDIInteractionException {
try{
log.debug("Initializing GIS Interface..");
gis=GISInterface.get();
currentGeoserver=gis.getCurrentGeoServer();
if(currentGeoserver==null)
throw new Exception("Unable to contact data transfer for geoserver ");
log.debug("Found geoserver descriptor "+currentGeoserver);
geoserverHostName=new URL(currentGeoserver.getUrl()).getHost();
log.debug("Contacting Data Transfer from geoserver {} ",geoserverHostName);
dtGeoServer=DataTransferClient.getInstanceByEndpoint("https://"+geoserverHostName);
if(!currentGeoserver.getReader().existGeoserver())
throw new Exception("Geoserver not reachable");
}catch(Exception e) {
throw new SDIInteractionException("Unable to initialize SDI Manager",e);
}
}
public RemoteFileDescriptor getGeoServerRemoteFolder() throws RemoteServiceException {
return dtGeoServer.getWebClient().getInfo("geoserver/GNA");
}
public String createWorkspace(String toCreate) throws SDIInteractionException {
try {
if(!currentGeoserver.getReader().getWorkspaceNames().contains(toCreate)) {
log.debug("Creating workspace : "+toCreate);
if(!currentGeoserver.getPublisher().createWorkspace(toCreate))
throw new SDIInteractionException("Unable to create workspace "+toCreate);
}else log.debug("Workspace "+toCreate+" exists.");
return toCreate;
} catch (IllegalArgumentException | MalformedURLException e) {
throw new SDIInteractionException("Unable to create workspace "+toCreate,e);
}
}
// GEOSERVER-PERSISTENCE-ID / GNA / PROJECT-ID/ LAYER-ID /FILENAME(no extension)/...
public GeoServerContent pushShapeLayerFileSet(SDILayerDescriptor currentElement,String workspace, String projectId) throws SDIInteractionException{
try {
// String remoteFolder=null;
// String fileName=null;
log.debug("Publishing "+currentElement+" files to geoserver @ "+geoserverHostName);
if(currentElement.getActualContent()==null||currentElement.getActualContent().isEmpty())
throw new SDIInteractionException("Nothing to publish");
GeoServerContent content=new GeoServerContent();
content.setGeoserverHostName(geoserverHostName);
content.setWorkspace(workspace);
WorkspaceManager wsManager=new WorkspaceManager();
// ******** IDENTIFY LAYER NAME correct layer name
// Must be unique under same WS
// equal to shp base name
String baseName= "";
// Chose layer name first identifying filename
for(PersistedContent p:currentElement.getActualContent()){
if(p instanceof WorkspaceContent) {
WorkspaceContent w= (WorkspaceContent) p;
if(w.getName().endsWith(".shp")) {
log.debug("SHP is {}",w.getName());
baseName=Files.fixFilename(w.getName().substring(0,w.getName().lastIndexOf('.')));
break;
}
}
}
String toSetLayerName=baseName;
//Check if layer already exists
int count=0;
GeoServerRESTReader gsReader=currentGeoserver.getReader();
while(gsReader.getLayer(workspace,toSetLayerName)!=null){
count++;
toSetLayerName=baseName+"_"+count;
log.debug("layer for "+baseName+" already existing, trying "+toSetLayerName);
}
String folderRelativePath="GNA/" + projectId + "/" +
currentElement.getMongo_id() + "/" + toSetLayerName;
for (PersistedContent c : currentElement.getActualContent()) {
if (c instanceof WorkspaceContent) {
WorkspaceContent wc = (WorkspaceContent) c;
FileContainer fc = wsManager.getFileById(wc.getStorageID());
String completeFilename = Files.fixFilename(fc.get().getName());
completeFilename=completeFilename.replaceAll(baseName, toSetLayerName);
Destination destination = new Destination(completeFilename);
destination.setCreateSubfolders(true);
destination.setOnExistingFileName(DestinationClashPolicy.REWRITE);
destination.setOnExistingSubFolder(DestinationClashPolicy.APPEND);
destination.setPersistenceId("geoserver");
destination.setSubFolder(folderRelativePath);
log.debug("Sending " + wc + " to " + destination);
TransferResult result = SDIManager.this.getDtGeoServer().httpSource(fc.getPublicLink(), destination);
log.debug("Transferred " + result);
content.getFileNames().add(completeFilename);
content.setGeoserverPath(result.getRemotePath().substring(0, result.getRemotePath().lastIndexOf("/")));
}
}
// String completeFileName=content.getFileNames().get(0);
// String filename=completeFileName.contains(".")?completeFileName.substring(0, completeFileName.lastIndexOf(".")):completeFileName;
//
// String remoteFolder=content.getGeoserverPath();
String storeName=toSetLayerName+"_store";
content.setStore(storeName);
content.setFeatureType(toSetLayerName);
GeoServerRESTPublisher publisher=currentGeoserver.getPublisher();
log.debug("Trying to create remote workspace : "+workspace);
createWorkspace(workspace);
String folderAbsolutePath=content.getGeoserverPath();
log.debug("Publishing remote folder "+folderAbsolutePath);
URL directoryPath=new URL("file:"+folderAbsolutePath+"/"+toSetLayerName+".shp");
//TODO Evaluate SRS
boolean published=publisher.publishShp(
workspace,
storeName,
null,
toSetLayerName,
// UploadMethod.FILE, // neeeds zip
UploadMethod.EXTERNAL, // needs shp
directoryPath.toURI(),
EPSG_4326, //SRS
""); // default style
if(!published) {
throw new SDIInteractionException("Unable to publish layer "+toSetLayerName+" under "+workspace+". Unknown Geoserver fault.");
}
currentElement.setLayerName(toSetLayerName);
RESTLayer l=gsReader.getLayer(workspace, toSetLayerName);
RESTFeatureType f= gsReader.getFeatureType(l);
/*https://geoserver1.dev.d4science.org/geoserver/gna_conc_18/wms?
service=WMS&version=1.1.0&request=GetMap&layers=gna_conc_18:pos&
styles=&bbox=8.62091913167495,40.62975046683799,8.621178639172953,40.630257904721645&
width=392&height=768&srs=EPSG:4326&format=application/openlayers */
currentElement.setWmsLink(
String.format("https://%1$s/geoserver/%2$s/wms?"
+"service=WMS&version=1.1.0&request=GetMap&layers=%2$s:%3$s&"
+ "styles=&bbox=%4$f,%5$f,%6$f,%7$f&srs=%8$s&format=application/openlayers&width=%9$d&height=%10$d",
geoserverHostName,
workspace,
toSetLayerName,
f.getMinX(),
f.getMinY(),
f.getMaxX(),
f.getMaxY(),
EPSG_4326,
400,
400));
currentElement.setWorkspace(workspace);
currentElement.setBbox(new BBOX(f.getMaxY(), f.getMaxX(), f.getMinY(), f.getMinX()));
// TODO Metadata
return content;
// } catch (InvalidSourceException | SourceNotSetException | FailedTransferException | InitializationException
// | InvalidDestinationException | DestinationNotSetException e) {
// throw new SDIInteractionException("Unable to transfer fileSet for content "+currentElement,e);
} catch (SDIInteractionException e) {
throw e;
} catch (Throwable t) {
throw new SDIInteractionException("Unexpected internal fault while interacting with SDI.",t);
}
}
private String createStore(GSPostGISDatastoreEncoder encoder, String workspace) throws SDIInteractionException {
String storeName=encoder.getName();
try {
log.debug("Looking for datastore "+storeName+" under "+workspace);
if(currentGeoserver.getReader().getDatastore(workspace,storeName)==null)
if(!currentGeoserver.getDataStoreManager().create(workspace, encoder))
throw new SDIInteractionException("Unable to create store "+storeName+" in "+workspace);
log.debug("Store "+storeName+" exists under "+workspace);
return storeName;
} catch (IllegalArgumentException | MalformedURLException e) {
throw new SDIInteractionException("Unable to create store "+storeName,e);
}
}
private String createStoreFromPostgisDB(String workspace, String storeName, DatabaseConnection connection) throws SDIInteractionException{
String connectionUrl=connection.getUrl();
Matcher hostname=HOSTNAME_PATTERN.matcher(connectionUrl);
if (!hostname.find()) throw new SDIInteractionException("Unable to get Hostname from "+connection);
Matcher port = PORT_PATTERN.matcher(connectionUrl);
if (!port.find()) throw new SDIInteractionException("Unable to get PORT from "+connection);
Matcher db = DB_NAME_PATTERN.matcher(connectionUrl);
if (!db.find()) throw new SDIInteractionException("Unable to get DB from "+connection);
GSPostGISDatastoreEncoder encoder=new GSPostGISDatastoreEncoder(storeName);
encoder.setHost(hostname.group());
encoder.setPort(Integer.parseInt(port.group()));
encoder.setDatabase(db.group());
encoder.setSchema("public");
encoder.setUser(connection.getUser());
encoder.setPassword(connection.getPwd());
encoder.setLooseBBox(true);
encoder.setDatabaseType("postgis");
encoder.setEnabled(true);
encoder.setFetchSize(1000);
encoder.setValidateConnections(true);
return createStore(encoder,workspace);
}
private String createStoreFromJNDIDB(String workspace,String storeName) throws SDIInteractionException {
//SET BY PROVISIONING
GSPostGISDatastoreEncoder encoder=new GSPostGISDatastoreEncoder(storeName);
encoder.setJndiReferenceName("java:comp/env/jdbc/postgres");
encoder.setLooseBBox(true);
encoder.setDatabaseType("postgis");
encoder.setEnabled(true);
encoder.setFetchSize(1000);
encoder.setValidateConnections(true);
return createStore(encoder,workspace);
}
private String publishStyle(File sldFile,String name) throws SDIInteractionException {
try {
if(!currentGeoserver.getReader().existsStyle(name)) {
log.debug("Registering style "+name);
if(!currentGeoserver.getPublisher().publishStyle(sldFile, name))
throw new SDIInteractionException("Unable to register style "+name);
}else log.debug("Style "+name+" already existing");
return name;
} catch (IllegalArgumentException | MalformedURLException e) {
throw new SDIInteractionException("Unable to create style "+name,e);
}
}
public String configureCentroidLayer(String name, String workspace, String storeName, PostgisTable table, DatabaseConnection connection) throws SDIInteractionException {
GSFeatureTypeEncoder fte=new GSFeatureTypeEncoder();
fte.setAbstract("Centroid layer for "+name);
fte.setEnabled(true);
fte.setNativeCRS(WGS84_FULL);
fte.setTitle(name);
fte.setName(name);
// GeoServer loads all fields
// fte.setAttribute(attrs);
fte.setLatLonBoundingBox(-180.0, -90.0, 180.0, 90.0, WGS84_FULL);
String style="clustered_centroids";
GSLayerEncoder layerEncoder=new GSLayerEncoder();
layerEncoder.setDefaultStyle(style);
layerEncoder.setEnabled(true);
layerEncoder.setQueryable(true);
try {
//Checking workspace
createWorkspace(workspace);
//Checking store
createStoreFromPostgisDB(workspace, storeName,connection);
//Checking layer
publishStyle(Files.getFileFromResources("styles/clustered_points.sld"),style);
log.info("Creating layer in {} : {} with FTE {} , LE {}",workspace,storeName,fte,layerEncoder);
if(currentGeoserver.getReader().getLayer(workspace, name)==null)
if(!currentGeoserver.getPublisher().publishDBLayer(workspace, storeName, fte, layerEncoder))
throw new SDIInteractionException("Unable to create layer "+name);
log.debug("layer "+name+" already exists");
String link=String.format("https://%1$s/geoserver/%2$s/wms?"
+"service=WMS&version=1.1.0&request=GetMap&layers=%2$s:%3$s&"
+ "styles=&bbox=%4$s,%5$s,%6$s,%7$s&srs=%8$s&format=application/openlayers&width=%9$d&height=%10$d",
geoserverHostName,
workspace,
name,
"-1563071.166172796",
"4789738.204048398",
"4334926.486925308",
"5828118.072551585",
EPSG_4326,
400,
400);
return name;
} catch (IllegalArgumentException | MalformedURLException e) {
throw new SDIInteractionException("Unable to create layer "+name,e);
}
}
public void deleteContent(GeoServerContent toDelete) throws IllegalArgumentException, MalformedURLException, RemoteServiceException {
log.info("Deleting geoserver layer "+toDelete);
String geoserverHostName=toDelete.getGeoserverHostName();
log.debug("Looking for geoserver {}",geoserverHostName);
AbstractGeoServerDescriptor geoServerDescriptor=null;
for(AbstractGeoServerDescriptor gs :gis.getCurrentCacheElements(false)){
log.debug("Checking gs {}",gs);
if(new URL(gs.getUrl()).getHost().equals(geoserverHostName))
geoServerDescriptor=gs;
}
if(geoServerDescriptor == null) throw new IllegalArgumentException("Unable to find geoserver "+geoserverHostName);
GeoServerRESTPublisher publisher=geoServerDescriptor.getPublisher();
//delete layer
//delete store
log.debug("Removing DS {} : {} ",toDelete.getWorkspace(),toDelete.getStore());
publisher.removeDatastore(toDelete.getWorkspace(), toDelete.getStore(), true);
//delete WS if empty
GeoServerRESTReader reader=geoServerDescriptor.getReader();
log.debug("Checking if WS {} is empty",toDelete.getWorkspace());
if(reader.getDatastores(toDelete.getWorkspace()).isEmpty()) {
log.debug("Deleting emtpy workspace "+toDelete.getWorkspace());
publisher.removeWorkspace(toDelete.getWorkspace(), true);
}
//delete file
// TODO REMOVE HARDCODED PATCH
String path=toDelete.getGeoserverPath().replace("/srv/geoserver_data","geoserver");
log.info("Deleting files at {} [{}]",path,toDelete.getGeoserverPath());
// path=toDelete.getGeoserverPath();
dtGeoServer.getWebClient().delete(path);
}
}

View File

@ -1,510 +0,0 @@
package org.gcube.application.geoportal.service.engine.mongo;
import com.fasterxml.jackson.core.JsonProcessingException;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.io.IOUtils;
import org.bson.Document;
import org.bson.types.ObjectId;
import org.gcube.application.geoportal.common.faults.PathException;
import org.gcube.application.geoportal.common.faults.StorageException;
import org.gcube.application.geoportal.common.model.legacy.*;
import org.gcube.application.geoportal.common.model.legacy.report.ValidationReport;
import org.gcube.application.geoportal.common.model.legacy.report.ValidationReport.ValidationStatus;
import org.gcube.application.geoportal.common.model.rest.ConfigurationException;
import org.gcube.application.geoportal.common.model.rest.QueryRequest;
import org.gcube.application.geoportal.common.model.rest.TempFile;
import org.gcube.application.geoportal.common.utils.Files;
import org.gcube.application.geoportal.common.utils.StorageUtils;
import org.gcube.application.cms.implementations.ImplementationProvider;
import org.gcube.application.geoportal.service.engine.materialization.SDIManager;
import org.gcube.application.cms.implementations.WorkspaceManager;
import org.gcube.application.cms.implementations.WorkspaceManager.FileOptions;
import org.gcube.application.cms.implementations.WorkspaceManager.FolderOptions;
import org.gcube.application.geoportal.service.engine.postgis.PostgisIndex;
import org.gcube.application.geoportal.service.model.internal.faults.*;
import org.gcube.application.cms.serialization.Serialization;
import org.gcube.application.geoportal.service.utils.UserUtils;
import org.gcube.common.storagehub.client.dsl.FolderContainer;
import org.gcube.common.storagehub.model.exceptions.StorageHubException;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.sql.SQLException;
import java.time.LocalDateTime;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.function.Consumer;
@Slf4j
public class ConcessioniMongoManager extends MongoManager{
public ConcessioniMongoManager() throws ConfigurationException {
super();
init(collectionName);
}
private static final String collectionName="legacyConcessioni";
//private static final String DB_NAME="gna_dev";
//
// private MongoDatabase db=null;
//
// @Override
// @Synchronized
// protected MongoDatabase getDatabase() {
// if(db==null) {
// String toUseDB=super.client.getConnection().getDatabase();
// log.info("Connecting to DB {} ",toUseDB);
//
// // TODO MAP OF DATABASES?
// db=client.getTheClient().getDatabase(toUseDB);
// }
// return db;
// }
protected static Document asDocument (Concessione c) throws JsonProcessingException {
Document toReturn=Document.parse(Serialization.write(c));
if(c.getMongo_id()!=null&&!c.getMongo_id().isEmpty())
toReturn.append(ID, asId(c.getMongo_id()));
return toReturn;
}
protected static Concessione asConcessione (Document d) throws JsonProcessingException, IOException {
return Serialization.read(d.toJson(), Concessione.class);
}
@Override
protected String mongoIDFieldName() {
return ID;
}
/****************************** PUBLIC METHODS ***********************/
public Concessione registerNew(Concessione toRegister) throws IOException {
log.trace("Going to register {} ",toRegister);
toRegister=onUpdate(toRegister);
log.trace("Concessione with defaults is {}",toRegister);
ObjectId id=insertDoc(asDocument(toRegister));
log.trace("Obtained id {}",id);
Concessione toReturn=asConcessione(getDocById(id));
toReturn.setMongo_id(asString(id));
toReturn = asConcessione(replaceDoc(asDocument(toReturn),id));
log.debug("Registered {} ",toReturn);
return toReturn;
}
public Concessione replace(Concessione toRegister) throws IOException {
log.trace("Replacing {} ",toRegister);
toRegister=onUpdate(toRegister);
return asConcessione(replaceDoc(asDocument(toRegister),new ObjectId(toRegister.getMongo_id())));
}
public Iterable<Concessione> list(){
LinkedBlockingQueue queue=new LinkedBlockingQueue<Concessione>();
iterateDoc(null,null).forEach(
new Consumer<Document>() {
@Override
public void accept(Document d) {
try {
queue.put(asConcessione(d));
}catch(Throwable t) {
log.error("Unable to read Document as concessione ",t);
log.debug("Document was "+d.toJson());
}
}
});
return queue;
}
public Iterable<Concessione> search(Document filter){
log.info("Searching concessione for filter {} ",filter);
LinkedBlockingQueue queue=new LinkedBlockingQueue<Concessione>();
iterateDoc(filter,null).forEach(
(Consumer<? super Document>) (Document d)->{try{
queue.put(asConcessione(d));
}catch(Throwable t){log.warn("Unable to translate "+d);}});
log.info("Returned {} elements ",queue.size());
return queue;
}
public Iterable<Document> query(QueryRequest queryRequest){
log.info("Searching concessione for filter {} ",queryRequest);
LinkedBlockingQueue queue=new LinkedBlockingQueue<Concessione>();
queryDoc(queryRequest).forEach(
(Consumer<? super Document>) (Document d)->{try{
queue.put(d);
}catch(Throwable t){log.warn("Unable to translate "+d);}});
log.info("Returned {} elements ",queue.size());
return queue;
}
public Concessione getById(String id)throws IOException {
log.debug("Loading by ID "+id);
return asConcessione(getDocById(asId(id)));
}
public void deleteById(String id,boolean force) throws DeletionException {
log.debug("Deleting by ID {}, force {}",id,force);
try{
Concessione concessione =unpublish(id);
try{
// UNPUBLISH
if (!concessione.getReport().getStatus().equals(ValidationStatus.PASSED)&&!force)
throw new DeletionException("Unable to unpublish "+concessione.getMongo_id());
//clean WS
concessione = removeContent(concessione);
if (!concessione.getReport().getStatus().equals(ValidationStatus.PASSED)&&!force)
throw new DeletionException("Unable to clean "+concessione.getMongo_id());
deleteDoc(asId(id));
}catch(DeletionException e) {
//storing updated - partially deleted
log.error("Error while trying to delete",e);
concessione=onUpdate(concessione);
replaceDoc(asDocument(concessione),new ObjectId(concessione.getMongo_id()));
throw e;
}
}catch(Throwable t){
log.error("Unable to delete "+id,t);
throw new DeletionException("Unable to delete "+id,t);
}
}
public Concessione unpublish(String id) throws DeletionException {
try{
Concessione toReturn=asConcessione(getDocById(asId(id)));
removeFromIndex(toReturn);
log.debug("Removed from centroids "+toReturn.getMongo_id());
toReturn = unpublish(toReturn);
log.debug("Concessione after unpublishing is "+toReturn);
toReturn = onUpdate(toReturn);
return asConcessione(replaceDoc(asDocument(toReturn),new ObjectId(toReturn.getMongo_id())));
}catch(Throwable t){
throw new DeletionException("Unable to unpublish "+id,t);
}
}
public Concessione publish(String id) throws IOException{
Concessione toReturn=asConcessione(getDocById(asId(id)));
// unpublish if published
toReturn=onUpdate(toReturn);
// try unpublish
toReturn = unpublish(toReturn);
toReturn.validate();
// MATERIALIZE LAYERS
toReturn=publish(toReturn);
// replace(asDocument(toReturn),collectionName);
// CREATE INDEXES
toReturn=index(toReturn);
// replace(asDocument(toReturn),collectionName);
return asConcessione(replaceDoc(asDocument(toReturn),new ObjectId(toReturn.getMongo_id())));
}
public Concessione unregisterFileset(String id, String toClearPath) throws Exception {
log.info("Clearing Fileset at {} for {} ",toClearPath,id);
try {
WorkspaceManager ws=new WorkspaceManager();
Concessione c = getById(id);
AssociatedContent toClearContent=c.getContentByPath(toClearPath);
log.debug("Found content {} for path {}",toClearContent,toClearPath);
//checking if published content
for(PersistedContent persisted : toClearContent.getActualContent()){
if(persisted instanceof GeoServerContent) throw new Exception ("Cannot clear concessione "+id+" at "+toClearContent+", because it is published.");
}
for(PersistedContent persisted : toClearContent.getActualContent()){
if(persisted instanceof WorkspaceContent) ws.deleteFromWS((WorkspaceContent) persisted);
}
toClearContent.getActualContent().clear();
c=onUpdate(c);
return asConcessione(replaceDoc(asDocument(c),new ObjectId(c.getMongo_id())));
}catch(Exception e) {
throw new Exception("Unable to unregister files.",e);
}
}
public Concessione persistContent(String id, String destinationPath, List<TempFile> files) throws Exception{
log.info("Persisting {} files for path {} in concessione ",files.size(),destinationPath,id);
try{
Concessione c = getById(id);
StorageUtils storage=ImplementationProvider.get().getProvidedObjectByClass(StorageUtils.class);
WorkspaceManager ws=new WorkspaceManager();
//Check Init Base folder
FolderContainer baseFolder=null;
if(c.getFolderId()==null) {
String folderName=Files.fixFilename("mConcessione"+"_"+c.getNome()+"_"+Serialization.FULL_FORMATTER.format(LocalDateTime.now()));
log.info("Creating folder {} for Concessione ID {} ",folderName,id);
FolderContainer folder=ws.createFolder(new FolderOptions(folderName, "Base Folder for "+c.getNome(),null));
c.setFolderId(folder.getId());
}
log.debug("Folder id is : "+c.getFolderId());
baseFolder=ws.getFolderById(c.getFolderId());
AssociatedContent section=c.getContentByPath(destinationPath);
log.debug("Found section {} for path {}",section,destinationPath);
store(section,files,ws,storage,baseFolder);
c=onUpdate(c);
return asConcessione(replaceDoc(asDocument(c),new ObjectId(c.getMongo_id())));
}catch(Exception e) {
throw new Exception("Unable to save file.",e);
}
}
/************************** STATIC ROUTINES *******************************/
/**
Sets Accounting data and Defaults
*/
private static Concessione onUpdate(Concessione c){
log.debug("Updating Account data for {} ",c);
// SET ACCOUNTING
c.setLastUpdateTime(LocalDateTime.now());
try{
c.setLastUpdateUser(UserUtils.getCurrent().getUser().getId());
}catch(Throwable t){
log.warn("Unable to get User details ",t);
}
log.debug("Updating defaults for {}",c);
// Set Defaults
c.setDefaults();
return c;
}
private static Concessione index(Concessione record) {
log.info("Indexing {} ",record.getId());
ValidationReport report= new ValidationReport("Index Report ");
PostgisIndex index;
try {
index = new PostgisIndex();
index.registerCentroid(record);
report.addMessage(ValidationStatus.PASSED, "Registered centroid");
} catch (SDIInteractionException | PublishException | SQLException | ConfigurationException e) {
log.error("Unable to index {} ",record,e);
report.addMessage(ValidationStatus.WARNING, "Internal error while indexing.");
}
return record;
}
private static Concessione removeFromIndex(Concessione record) {
log.info("Removing from index {} ",record.getMongo_id());
ValidationReport report= new ValidationReport("Remove From Index Report ");
PostgisIndex index;
try {
index = new PostgisIndex();
index.removeCentroid(record);
report.addMessage(ValidationStatus.PASSED, "Removed centroid");
} catch (SDIInteractionException | SQLException | ConfigurationException e) {
log.error("Unable to remove from index {} ",record,e);
report.addMessage(ValidationStatus.WARNING, "Internal error while removing from index.");
}
return record;
}
private static Concessione publish(Concessione conc) {
// CHECK CONDITION BY PROFILE
log.debug("Publishing "+conc.getNome());
ValidationReport report=new ValidationReport("Publish report");
try {
SDIManager sdiManager=new SDIManager();
ArrayList<AssociatedContent> list=new ArrayList<AssociatedContent>();
//Concessione
String workspace= sdiManager.createWorkspace("gna_conc_"+conc.getMongo_id());
list.add(conc.getPosizionamentoScavo());
list.addAll(conc.getPianteFineScavo());
for(AssociatedContent c:list) {
if(c instanceof LayerConcessione) {
try {
List<PersistedContent> p=c.getActualContent();
GeoServerContent geoserverPersisted=sdiManager.pushShapeLayerFileSet((SDILayerDescriptor)c, workspace, conc.getMongo_id());
// geoserverPersisted.setAssociated(c);
p.add(geoserverPersisted);
c.setActualContent(p);
}catch(SDIInteractionException e) {
log.warn("Unable to publish layers.",e);
report.addMessage(ValidationStatus.WARNING, "Layer "+c.getTitolo()+" non pubblicato.");
}
report.addMessage(ValidationStatus.PASSED, "Pubblicato layer "+c.getTitolo());
}
}
} catch (SDIInteractionException e1) {
report.addMessage(ValidationStatus.WARNING, "Unable to publish layers "+e1.getMessage());
}
conc.setReport(report);
return conc;
}
private static final Concessione unpublish(Concessione concessione){
ValidationReport report=new ValidationReport("Unpublish report");
try{
SDIManager sdi=new SDIManager();
ArrayList<AssociatedContent> list=new ArrayList<AssociatedContent>();
list.add(concessione.getPosizionamentoScavo());
list.addAll(concessione.getPianteFineScavo());
for(AssociatedContent c:list) {
if(c instanceof LayerConcessione) {
List<PersistedContent> contents=c.getActualContent();
List<PersistedContent> toRemove=new ArrayList<>();
for(PersistedContent p:contents){
if(p instanceof GeoServerContent){
try {
sdi.deleteContent((GeoServerContent) p);
toRemove.add(p);
} catch (Throwable t) {
report.addMessage(ValidationStatus.WARNING,"Cannot delete "+((GeoServerContent) p).getFeatureType());
}
}
}
// Remove GIS coordinates
((LayerConcessione) c).setLayerID(null);
((LayerConcessione) c).setBbox(null);
((LayerConcessione) c).setWmsLink(null);
((LayerConcessione) c).setWorkspace(null);
((LayerConcessione) c).setLayerName(null);
//Remove reference to removed content
c.getActualContent().removeAll(toRemove);
}
}
concessione.setCentroidLat(null);
concessione.setCentroidLong(null);
}catch(SDIInteractionException e){
report.addMessage(ValidationStatus.WARNING, "Unable to unpublish layers "+e.getMessage());
}
concessione.setReport(report);
return concessione;
}
private static final void store(AssociatedContent content,List<TempFile> files, WorkspaceManager ws, StorageUtils storage, FolderContainer base) throws StorageHubException, StorageException, PathException {
FolderContainer sectionParent=null;
if(content instanceof RelazioneScavo)
sectionParent = ws .createFolder(new FolderOptions(
"relazione","Relazione di scavo : "+content.getTitolo(),base));
else if(content instanceof AbstractRelazione)
sectionParent = ws .createFolder(new FolderOptions(
"abstract_relazione","Abstract relazione di scavo : "+content.getTitolo(),base));
else if (content instanceof UploadedImage)
sectionParent = ws .createFolder(new FolderOptions(
"imgs","Immagini rappresentative : "+content.getTitolo(),base));
else if (content instanceof SDILayerDescriptor)
//SDI Section
if(content instanceof LayerConcessione)
sectionParent = ws .createFolder(new FolderOptions(
content.getTitolo(),"Layer Concessione : "+content.getTitolo(),ws.getSubFolder(base,"layers")));
else throw new PathException("Invalid SDI Content "+content);
else if (content instanceof OtherContent )
sectionParent = ws .createFolder(new FolderOptions(
content.getTitolo(),"Relazione di scavo : "+content.getTitolo(),ws.getSubFolder(base,"other")));
else throw new PathException("Invalid Content "+content);
content.setActualContent(new ArrayList<PersistedContent>());
for(TempFile f : files) {
InputStream is=null;
try{
log.debug("Opening temp file {}",f);
String fileUrl=storage.getURL(f.getId());
log.debug("Got URL {} from ID {}",fileUrl,f.getId());
is=new URL(fileUrl).openStream();
WorkspaceContent wsContent=ws.storeToWS(new FileOptions(f.getFilename(),is,
"Imported via GeoPortal", sectionParent));
log.debug("Registered "+wsContent+" for "+content);
content.getActualContent().add(wsContent);
}catch(StorageHubException | IOException e){
throw new StorageException("Unable to store "+f,e);
}finally{
if(is!=null)
IOUtils.closeQuietly(is);
}
}
content.setMongo_id(asString(new ObjectId()));
}
private static Concessione removeContent(Concessione concessione) throws DeletionException {
if(concessione.getFolderId()==null) {
log.debug("No content for " + concessione.getMongo_id());
return concessione;
}
try {
log.debug("Removing content for " + concessione.getMongo_id());
WorkspaceManager manager = new WorkspaceManager();
manager.removeFolderById(concessione.getFolderId());
//Removing references from Object
concessione.setFolderId(null);
ArrayList<AssociatedContent> list = new ArrayList<>();
list.add(concessione.getPosizionamentoScavo());
list.addAll(concessione.getPianteFineScavo());
list.addAll(concessione.getImmaginiRappresentative());
list.addAll(concessione.getGenericContent());
for (AssociatedContent c : list) {
c.getActualContent().clear();
}
return concessione;
}catch(Throwable t){
throw new DeletionException("Unable to delete from WS ",t);
}
}
}

View File

@ -1,202 +0,0 @@
package org.gcube.application.geoportal.service.engine.postgis;
import lombok.extern.slf4j.Slf4j;
import org.gcube.application.cms.implementations.ISInterface;
import org.gcube.application.cms.implementations.ImplementationProvider;
import org.gcube.application.geoportal.common.model.legacy.BBOX;
import org.gcube.application.geoportal.common.model.rest.ConfigurationException;
import org.gcube.application.geoportal.common.model.rest.DatabaseConnection;
import org.gcube.application.geoportal.service.model.internal.db.PostgisTable;
import org.gcube.application.geoportal.service.model.internal.faults.DataParsingException;
import java.sql.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
@Slf4j
public class PostgisDBManager implements PostgisDBManagerI {
// @Synchronized
// public static PostgisDBManager get() throws SQLException, ConfigurationException {
// if(config==null) {
// log.debug("Looking for Default Configuration.. ");
//
// // TODO GENERIC
// DatabaseConnection defaultConfiguration=
// ImplementationProvider.get().getIsProvider().getIS().queryForDB("postgis", "Concessioni");
//
// log.debug("Found configuration : "+defaultConfiguration);
// config=defaultConfiguration;
// }
// return new PostgisDBManager();
// }
//
//
public static PostgisDBManagerI get() throws SQLException, ConfigurationException {
return get(false);
}
private static PostgisDBManagerI get(boolean autocommit) throws SQLException, ConfigurationException {
DatabaseConnection config = ImplementationProvider.get().getProvidedObjectByClass(ISInterface.class)
.queryForDatabase("Database","postgis", "GNA_DB","Concessioni");
log.debug("Found configuration : "+config);
PostgisDBManager toReturn=new PostgisDBManager(config);
toReturn.conn.setAutoCommit(autocommit);
return toReturn;
}
// private static DatabaseConnection config;
private Connection conn=null;
private DatabaseConnection config;
private PostgisDBManager(DatabaseConnection config) throws SQLException {
this.config=config;
log.info("Opening connection to {}",config);
conn= DriverManager.getConnection(config.getUrl(),config.getUser(),config.getPwd());
//TODO configure behaviour
conn.setAutoCommit(false);
}
@Override
public void create(PostgisTable toCreate) throws SQLException {
String createStmt=toCreate.getCreateStatement();
log.debug("Executing create : "+createStmt);
conn.createStatement().executeUpdate(createStmt);
}
/* (non-Javadoc)
* @see org.gcube.application.geoportal.PostgisDBManagerI#commit()
*/
@Override
public void commit() throws SQLException {
conn.commit();
}
// /* (non-Javadoc)
// * @see org.gcube.application.geoportal.PostgisDBManagerI#evaluateBoundingBox(org.gcube.application.geoportal.model.PostgisTable)
// */
// @Override
// public BBOX evaluateBoundingBox(PostgisTable table) throws SQLException, DataParsingException {
// ResultSet rs=conn.createStatement().executeQuery("Select ST_Extent("+table.getGeometryColumn()+") as extent from "+table.getTablename());
// if(rs.next())
// return DBUtils.parseST_Extent(rs.getString("extent"));
// else throw new SQLException("No extent returned");
// }
/* (non-Javadoc)
* @see org.gcube.application.geoportal.PostgisDBManagerI#evaluateBoundingBox(org.gcube.application.geoportal.model.PostgisTable)
*/
@Override
public PostgisTable.POINT evaluateCentroid(PostgisTable table) throws SQLException, DataParsingException {
ResultSet rs=conn.createStatement().executeQuery("Select ST_AsText(ST_Centroid(ST_Collect("+table.getGeometryColumn()+"))) as centroid from "+table.getTablename());
if(rs.next())
return PostgisTable.POINT.parsePOINT(rs.getString("centroid"));
else throw new SQLException("No extent returned");
}
/* (non-Javadoc)
* @see org.gcube.application.geoportal.PostgisDBManagerI#prepareInsertStatement(org.gcube.application.geoportal.model.PostgisTable, boolean, boolean)
*/
@Override
public PreparedStatement prepareInsertStatement(PostgisTable target, boolean createTable, boolean geometryAsText) throws SQLException {
if(createTable) {
create(target);
}
String insertStmt=target.getInsertionStatement(geometryAsText);
log.debug("Preparing insert statement : "+insertStmt);
return conn.prepareStatement(insertStmt);
}
@Override
public int deleteByFieldValue(PostgisTable target, PostgisTable.Field field, Object value) throws SQLException {
String query=target.getDeleteByFieldStatement(field);
log.debug("Preparing DELETE SQL {} with field {} = {} ",query,field,value);
PreparedStatement stmt = conn.prepareStatement(query);
target.setObjectInPreparedStatement(field, value, stmt, 1);
int result=stmt.executeUpdate();
log.debug("DELETED {} rows ",result);
return result;
}
@Override
public DatabaseConnection getConnectionDescriptor() {
return config;
}
/* (non-Javadoc)
* @see org.gcube.application.geoportal.PostgisDBManagerI#deleteTable(java.lang.String)
*/
@Override
public void deleteTable(String tableName) throws SQLException {
conn.createStatement().executeUpdate("DROP TABLE "+tableName);
}
/* (non-Javadoc)
* @see org.gcube.application.geoportal.PostgisDBManagerI#truncate(java.lang.String)
*/
@Override
public void truncate(String tableName) throws SQLException{
conn.createStatement().executeUpdate("TRUNCATE Table "+tableName);
}
@Override
public ResultSet queryAll(PostgisTable table) throws SQLException {
// TODO Check schema
return conn.createStatement().executeQuery("Select * from "+table.getTablename());
}
// *********************** INNER UTILS CLASS
protected static class DBUtils {
private static Pattern pattern = Pattern.compile("(?!=\\d\\.\\d\\.)([\\d.]+)");
public static BBOX parseST_Extent(String extent) throws DataParsingException {
//BOX(11.9122574810083 44.2514144864263,11.9761128271586 44.2912342569845)
try {
log.debug("Parsing BBOX "+extent);
Matcher m=pattern.matcher(extent);
// Scanner sc = new Scanner(extent);
// double minLong = sc.nextDouble(),
// minLat = sc.nextDouble(),
// maxLong = sc.nextDouble(),
// maxLat= sc.nextDouble();
if(!m.find()) throw new DataParsingException("Unable to get minLong ");
Double minLong=Double.parseDouble(m.group(1));
if(!m.find()) throw new DataParsingException("Unable to get minLat ");
Double minLat=Double.parseDouble(m.group(1));
if(!m.find()) throw new DataParsingException("Unable to get maxLong ");
Double maxLong=Double.parseDouble(m.group(1));
if(!m.find()) throw new DataParsingException("Unable to get maxLat ");
Double maxLat=Double.parseDouble(m.group(1));
return new BBOX(maxLat, maxLong, minLat, minLong);
}catch(Throwable t) {
throw new DataParsingException("Invalid BBOX "+extent,t);
}
}
}
}

View File

@ -1,37 +0,0 @@
package org.gcube.application.geoportal.service.engine.postgis;
import org.gcube.application.geoportal.common.model.rest.DatabaseConnection;
import org.gcube.application.geoportal.service.model.internal.db.PostgisTable;
import org.gcube.application.geoportal.service.model.internal.faults.DataParsingException;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
public interface PostgisDBManagerI {
void commit() throws SQLException;
PreparedStatement prepareInsertStatement(PostgisTable target, boolean createTable, boolean geometryAsText)
throws SQLException;
void deleteTable(String tableName) throws SQLException;
void truncate(String tableName) throws SQLException;
void create(PostgisTable toCreate) throws SQLException;
PostgisTable.POINT evaluateCentroid(PostgisTable table) throws SQLException, DataParsingException;
ResultSet queryAll(PostgisTable table) throws SQLException;
int deleteByFieldValue(PostgisTable target, PostgisTable.Field field, Object value) throws SQLException;
DatabaseConnection getConnectionDescriptor();
}

View File

@ -1,187 +0,0 @@
package org.gcube.application.geoportal.service.engine.postgis;
import lombok.extern.slf4j.Slf4j;
import org.gcube.application.cms.implementations.ImplementationProvider;
import org.gcube.application.cms.serialization.Serialization;
import org.gcube.application.geoportal.common.model.legacy.Concessione;
import org.gcube.application.geoportal.common.model.rest.ConfigurationException;
import org.gcube.application.geoportal.common.model.rest.DatabaseConnection;
import org.gcube.application.geoportal.common.model.rest.PostgisIndexDescriptor;
import org.gcube.application.geoportal.service.engine.materialization.SDIManager;
import org.gcube.application.geoportal.service.model.internal.db.DBConstants;
import org.gcube.application.geoportal.service.model.internal.db.PostgisTable;
import org.gcube.application.geoportal.service.model.internal.db.PostgisTable.Field;
import org.gcube.application.geoportal.service.model.internal.db.PostgisTable.FieldType;
import org.gcube.application.geoportal.service.model.internal.faults.PublishException;
import org.gcube.application.geoportal.service.model.internal.faults.SDIInteractionException;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
;
@Slf4j
public class PostgisIndex {
static{
try {
Class.forName("org.postgresql.Driver");
Class.forName("org.postgis.DriverWrapper");
} catch (Exception var2) {
throw new RuntimeException(var2);
}
}
private SDIManager sdiManager;
private String wmsLink=null;
private static PostgisDBManagerI getDB() throws ConfigurationException {
return ImplementationProvider.get().getProvidedObjectByClass(PostgisDBManagerI.class);
};
public PostgisIndex() throws SDIInteractionException, SQLException, ConfigurationException {
super();
this.sdiManager=new SDIManager();
this.wmsLink=init();
}
public PostgisIndexDescriptor getInfo() throws ConfigurationException, SDIInteractionException, SQLException {
DatabaseConnection conn=getDB().getConnectionDescriptor();
return new PostgisIndexDescriptor(conn,wmsLink);
}
protected PostgisTable getCentroidsTable() {
return DBConstants.Concessioni.CENTROIDS;
}
public String init() throws SQLException, ConfigurationException, SDIInteractionException {
log.debug("Contacting postgis DB .. ");
PostgisDBManagerI db=ImplementationProvider.get().getProvidedObjectByClass(PostgisDBManagerI.class);
log.debug("Checking if centroids table exists..");
PostgisTable table=getCentroidsTable();
db.create(table);
db.commit();
return sdiManager.configureCentroidLayer("centroids_concessioni", "gna", "gna_postgis",table,db.getConnectionDescriptor());
}
public void registerCentroid(Concessione record) throws PublishException{
try {
log.debug("Evaluating Centroid");
Map<String,String> centroidRow=evaluateCentroid(record);
log.debug("Contacting postgis DB .. ");
PostgisDBManagerI db=ImplementationProvider.get().getProvidedObjectByClass(PostgisDBManagerI.class);
PostgisTable centroidsTable=getCentroidsTable();
log.debug("Inserting / updated centroid Row {} ",centroidRow);
PreparedStatement ps = db.prepareInsertStatement(centroidsTable, true, true);
log.debug("Deleting centroid if present. ID is "+record.getId());
db.deleteByFieldValue(centroidsTable, new Field(DBConstants.Concessioni.PRODUCT_ID,FieldType.TEXT), record.getMongo_id());
centroidsTable.fillCSVPreparedStatament(centroidRow, ps, false);
ps.executeUpdate();
db.commit();
}catch(SQLException e) {
log.warn("Unable to publish Centroid for record "+record,e);
throw new PublishException("Unable to publish centroid.",e, null);
} catch (ConfigurationException e) {
log.warn("Unable to contact centroids db "+record.getRecordType(),e);
throw new PublishException("Unable to publish centroid.",e, null);
}
}
public void removeCentroid(Concessione record) {
try {
PostgisDBManagerI db=ImplementationProvider.get().getProvidedObjectByClass(PostgisDBManagerI.class);
PostgisTable centroidsTable=getCentroidsTable();
log.debug("Deleting centroid if present. ID is "+record.getMongo_id());
int result= db.deleteByFieldValue(centroidsTable, new Field(DBConstants.Concessioni.PRODUCT_ID,FieldType.TEXT), record.getMongo_id());
db.commit();
log.info("Removed {} entries from gif Index with mongo id {} ",result,record.getMongo_id());
}catch(Exception e) {
log.warn("Unable to remove centroid ",e);
}
}
protected static Map<String,String> evaluateCentroid(Concessione record){
// CENTROID
Map<String,String> centroidsRow=new HashMap<String, String>();
centroidsRow.put(DBConstants.Concessioni.PRODUCT_ID, record.getMongo_id());
centroidsRow.put(DBConstants.Concessioni.ANNO, record.getDataInizioProgetto().getYear()+"");
centroidsRow.put(DBConstants.Concessioni.NOME, record.getNome());
centroidsRow.put(DBConstants.Concessioni.REGIONE, ""); //TODO
if(record.getCentroidLat()==null||record.getCentroidLat()==0)
try {
log.debug("Evaluating Centroid latitude for record "+record);
record.setCentroidLat((record.getPosizionamentoScavo().getBbox().getMaxLat()+
record.getPosizionamentoScavo().getBbox().getMinLat())/2);
}catch (Throwable t) {
log.warn("Unable to evaluate centroid latitude "+t);
}
if(record.getCentroidLong()==null||record.getCentroidLong()==0)
try {
log.debug("Evaluating Centroid Longituted for record "+record);
record.setCentroidLong((record.getPosizionamentoScavo().getBbox().getMaxLong()+
record.getPosizionamentoScavo().getBbox().getMinLong())/2);
}catch (Throwable t) {
log.warn("Unable to evaluate centroid latitude "+t);
}
centroidsRow.put(DBConstants.Defaults.XCOORD_FIELD, record.getCentroidLong()+"");
centroidsRow.put(DBConstants.Defaults.YCOORD_FIELD, record.getCentroidLat()+"");
//Updated Schema
centroidsRow.put(DBConstants.Concessioni.DESCRIZIONE,record.getIntroduzione());
centroidsRow.put(DBConstants.Concessioni.CONTENUTO,record.getDescrizioneContenuto());
centroidsRow.put(DBConstants.Concessioni.AUTORE,asString(record.getAuthors()));
centroidsRow.put(DBConstants.Concessioni.CONTRIBUTORE,record.getContributore());
centroidsRow.put(DBConstants.Concessioni.TITOLARE,asString(record.getTitolari()));
centroidsRow.put(DBConstants.Concessioni.RESPONSABILE,record.getResponsabile());
centroidsRow.put(DBConstants.Concessioni.EDITORE,record.getEditore());
centroidsRow.put(DBConstants.Concessioni.FINANZIAMENTO,asString(record.getFontiFinanziamento()));
centroidsRow.put(DBConstants.Concessioni.SOGGETTO,asString(record.getSoggetto()));
centroidsRow.put(DBConstants.Concessioni.RISORSE,asString(record.getRisorseCorrelate()));
centroidsRow.put(DBConstants.Concessioni.DATE_SCAVO, Serialization.FULL_FORMATTER.format(record.getDataFineProgetto()));
centroidsRow.put(DBConstants.Concessioni.DATA_ARCHIVIAZIONE,Serialization.FULL_FORMATTER.format(record.getLastUpdateTime()));
centroidsRow.put(DBConstants.Concessioni.VERSIONE,record.getVersion());
centroidsRow.put(DBConstants.Concessioni.LICENZA,record.getLicenzaID());
centroidsRow.put(DBConstants.Concessioni.TITOLARE_LICENZA,asString(record.getTitolareLicenza()));
centroidsRow.put(DBConstants.Concessioni.ACCESSO,record.getPolicy().toString());
centroidsRow.put(DBConstants.Concessioni.PAROLE_CHIAVE,asString(record.getParoleChiaveLibere()));
return centroidsRow;
}
private static String asString(Collection<?> coll) {
if(coll==null||coll.isEmpty()) return "";
StringBuilder builder=new StringBuilder();
for(Object t : coll) {
builder.append(t.toString() +",");
}
return builder.substring(0, builder.lastIndexOf(","));
}
}

View File

@ -1,38 +0,0 @@
package org.gcube.application.geoportal.service.engine.providers;
import org.gcube.application.cms.caches.AbstractScopedMap;
import org.gcube.application.geoportal.common.model.rest.ConfigurationException;
import org.gcube.application.geoportal.service.engine.postgis.PostgisDBManager;
import org.gcube.application.geoportal.service.engine.postgis.PostgisDBManagerI;
import java.sql.SQLException;
import java.time.Duration;
import java.time.temporal.ChronoUnit;
public class PostgisConnectionProvider extends AbstractScopedMap<PostgisDBManagerI> {
public PostgisConnectionProvider() {
super("Postgis connection descriptor cache");
setTTL(Duration.of(30, ChronoUnit.SECONDS));
}
@Override
protected PostgisDBManagerI retrieveObject(String key) throws ConfigurationException {
try {
return PostgisDBManager.get();
} catch (SQLException throwables) {
throw new ConfigurationException(throwables);
}
}
@Override
protected void dispose(PostgisDBManagerI toDispose) {
// toDispose.close();
}
@Override
public void init() {
//
}
}

View File

@ -1,42 +0,0 @@
package org.gcube.application.geoportal.service.model.internal.faults;
import lombok.NonNull;
import org.gcube.spatial.data.gis.model.report.PublishResponse;
public class PublishException extends Exception {
/**
*
*/
private static final long serialVersionUID = -1356876669436308224L;
@NonNull
private PublishResponse resp;
public PublishException(PublishResponse resp) {
super();
this.resp=resp;
}
public PublishException(String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace,PublishResponse resp) {
super(message, cause, enableSuppression, writableStackTrace);
this.resp=resp;
}
public PublishException(String message, Throwable cause,PublishResponse resp) {
super(message, cause);
this.resp=resp;
}
public PublishException(String message,PublishResponse resp) {
super(message);
this.resp=resp;
}
public PublishException(Throwable cause) {
super(cause);
// TODO Auto-generated constructor stub
}
}

View File

@ -1,38 +0,0 @@
package org.gcube.application.geoportal.service.model.internal.faults;
public class SDIInteractionException extends Exception {
/**
*
*/
private static final long serialVersionUID = 1L;
public SDIInteractionException() {
super();
// TODO Auto-generated constructor stub
}
public SDIInteractionException(String message, Throwable cause, boolean enableSuppression,
boolean writableStackTrace) {
super(message, cause, enableSuppression, writableStackTrace);
// TODO Auto-generated constructor stub
}
public SDIInteractionException(String message, Throwable cause) {
super(message, cause);
// TODO Auto-generated constructor stub
}
public SDIInteractionException(String message) {
super(message);
// TODO Auto-generated constructor stub
}
public SDIInteractionException(Throwable cause) {
super(cause);
// TODO Auto-generated constructor stub
}
}

View File

@ -1,236 +0,0 @@
package org.gcube.application.geoportal.service.rest;
import lombok.extern.slf4j.Slf4j;
import org.bson.Document;
import org.gcube.application.geoportal.common.model.legacy.Concessione;
import org.gcube.application.geoportal.common.model.rest.AddSectionToConcessioneRequest;
import org.gcube.application.geoportal.common.model.configuration.Configuration;
import org.gcube.application.geoportal.common.rest.InterfaceConstants;
import org.gcube.application.geoportal.service.engine.mongo.ConcessioniMongoManager;
import org.gcube.application.geoportal.service.model.internal.faults.DeletionException;
import org.gcube.application.cms.serialization.Serialization;
import org.json.JSONArray;
import javax.ws.rs.*;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
@Path(InterfaceConstants.Methods.MONGO_CONCESSIONI)
@Slf4j
public class ConcessioniOverMongo {
@GET
@Path(InterfaceConstants.Methods.CONFIGURATION_PATH)
@Produces(MediaType.APPLICATION_JSON)
public Configuration getConfiguration(){
return new GuardedMethod<Configuration>(){
@Override
protected Configuration run() throws Exception, WebApplicationException {
Configuration toReturn = new Configuration();
//toReturn.setIndex(new PostgisIndex().getInfo());
log.info("Returning configuration {} ",toReturn);
return toReturn;
}
}.execute().getResult();
}
@PUT
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
public Concessione replace(Concessione c) {
return new GuardedMethod<Concessione> () {
@Override
protected Concessione run() throws Exception, WebApplicationException {
//Concessione c=Serialization.read(jsonString, Concessione.class);
ConcessioniMongoManager manager=new ConcessioniMongoManager();
manager.replace(c);
// return Serialization.write(manager.getById(c.getMongo_id()));
return manager.getById(c.getMongo_id());
}
}.execute().getResult();
}
@POST
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
public Concessione createNew(Concessione c) {
return new GuardedMethod<Concessione>() {
@Override
protected Concessione run() throws Exception, WebApplicationException {
ConcessioniMongoManager manager=new ConcessioniMongoManager();
return manager.registerNew(c);
}
}.execute().getResult();
}
@GET
@Produces(MediaType.APPLICATION_JSON)
public Iterable<Concessione> list() {
return new GuardedMethod<Iterable<Concessione>>() {
protected Iterable<Concessione> run() throws Exception ,WebApplicationException {
ConcessioniMongoManager manager=new ConcessioniMongoManager();
JSONArray toReturn=new JSONArray();
return manager.list();
};
}.execute().getResult();
}
// BY ID
@GET
@Produces(MediaType.APPLICATION_JSON)
@Path("{"+InterfaceConstants.Parameters.PROJECT_ID+"}")
public Concessione getById(@PathParam(InterfaceConstants.Parameters.PROJECT_ID) String id) {
return new GuardedMethod<Concessione>() {
@Override
protected Concessione run() throws Exception, WebApplicationException {
ConcessioniMongoManager manager=new ConcessioniMongoManager();
return manager.getById(id);
}
}.execute().getResult();
}
@DELETE
@Produces(MediaType.APPLICATION_JSON)
@Path("{"+InterfaceConstants.Parameters.PROJECT_ID+"}")
public void deleteById(@PathParam(InterfaceConstants.Parameters.PROJECT_ID) String id,
@QueryParam(InterfaceConstants.Parameters.FORCE) Boolean forceOption) {
new GuardedMethod<Concessione> () {
@Override
protected Concessione run() throws Exception, WebApplicationException {
try{
Boolean force=(forceOption!=null)?forceOption:false;
ConcessioniMongoManager manager=new ConcessioniMongoManager();
manager.deleteById(id,force);
return null;
}catch(DeletionException e){
throw new WebApplicationException("Unable to delete "+id,e, Response.Status.EXPECTATION_FAILED);
}
}
}.execute();
}
@PUT
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
@Path("{"+InterfaceConstants.Parameters.PROJECT_ID+"}")
public Concessione update(@PathParam(InterfaceConstants.Parameters.PROJECT_ID) String id,Concessione c) {
return new GuardedMethod<Concessione>() {
@Override
protected Concessione run() throws Exception, WebApplicationException {
ConcessioniMongoManager manager=new ConcessioniMongoManager();
c.setMongo_id(id);
return manager.replace(c);
}
}.execute().getResult();
}
@PUT
@Produces(MediaType.APPLICATION_JSON)
@Path("/{"+InterfaceConstants.Methods.PUBLISH_PATH+"}/{"+InterfaceConstants.Parameters.PROJECT_ID+"}")
public Concessione publish(@PathParam(InterfaceConstants.Parameters.PROJECT_ID) String id) {
return new GuardedMethod<Concessione>() {
@Override
protected Concessione run() throws Exception, WebApplicationException {
ConcessioniMongoManager manager=new ConcessioniMongoManager();
return manager.publish(id);
}
}.execute().getResult();
}
@DELETE
@Path("/{"+InterfaceConstants.Methods.PUBLISH_PATH+"}/{"+InterfaceConstants.Parameters.PROJECT_ID+"}")
public Concessione unpublish(@PathParam(InterfaceConstants.Parameters.PROJECT_ID) String id) {
log.info("Unpublishing {} ",id);
return new GuardedMethod<Concessione>() {
@Override
protected Concessione run() throws Exception, WebApplicationException {
ConcessioniMongoManager manager=new ConcessioniMongoManager();
return manager.unpublish(id);
}
}.execute().getResult();
}
@POST
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
@Path("/"+InterfaceConstants.Methods.REGISTER_FILES_PATH+"/{"+InterfaceConstants.Parameters.PROJECT_ID+"}")
public Concessione registerFile(@PathParam(InterfaceConstants.Parameters.PROJECT_ID) String id, AddSectionToConcessioneRequest request) {
return new GuardedMethod<Concessione>() {
@Override
protected Concessione run() throws Exception, WebApplicationException {
log.info("Registering {} file(s) for {} Concessione ID {}",
request.getStreams().size(),
request.getDestinationPath(),id);
request.validate();
ConcessioniMongoManager manager=new ConcessioniMongoManager();
return manager.persistContent(id, request.getDestinationPath(), request.getStreams());
}
}.execute().getResult();
}
@POST
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
@Path("/"+InterfaceConstants.Methods.DELETE_FILES_PATH+"/{"+InterfaceConstants.Parameters.PROJECT_ID+"}")
public Concessione clearFileset(@PathParam(InterfaceConstants.Parameters.PROJECT_ID) String id, String path) {
return new GuardedMethod<Concessione>() {
@Override
protected Concessione run() throws Exception, WebApplicationException {
log.info("Clearing files of {} Concessione ID {}",path,id);
ConcessioniMongoManager manager=new ConcessioniMongoManager();
return manager.unregisterFileset(id,path);
}
}.execute().getResult();
}
@POST
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
@Path("/"+InterfaceConstants.Methods.SEARCH_PATH)
public Iterable<Concessione> search(String filter){
return new GuardedMethod<Iterable<Concessione>>() {
@Override
protected Iterable<Concessione> run() throws Exception, WebApplicationException {
ConcessioniMongoManager manager=new ConcessioniMongoManager();
return manager.search(Document.parse(filter));
}
}.execute().getResult();
}
@POST
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
@Path("/"+InterfaceConstants.Methods.QUERY_PATH)
public String query(String queryString){
return new GuardedMethod<String>() {
@Override
protected String run() throws Exception, WebApplicationException {
ConcessioniMongoManager manager=new ConcessioniMongoManager();
StringBuilder builder=new StringBuilder("[");
manager.query(Serialization.parseQuery(queryString))
.forEach(d->{builder.append(d.toJson()+",");});
if(builder.length()>1)
builder.deleteCharAt(builder.length()-1);
builder.append("]");
return builder.toString();
}
}.execute().getResult();
}
}

View File

@ -2,15 +2,13 @@ package org.gcube.application.geoportal.service.engine.caches;
import com.mongodb.MongoWaitQueueFullException;
import lombok.extern.slf4j.Slf4j;
import org.gcube.application.cms.implementations.ImplementationProvider;
import org.gcube.application.cms.tests.TokenSetter;
import org.gcube.application.cms.tests.model.concessioni.TestConcessioniModel;
import org.gcube.application.geoportal.common.model.rest.ConfigurationException;
import org.gcube.application.geoportal.common.utils.StorageUtils;
import org.gcube.application.geoportal.common.utils.tests.GCubeTest;
import org.gcube.application.geoportal.service.BasicServiceTestUnit;
import org.gcube.application.cms.implementations.ImplementationProvider;
import org.gcube.application.geoportal.service.engine.mongo.ConcessioniMongoManager;
import org.gcube.application.geoportal.common.model.rest.ConfigurationException;
import org.junit.Test;
import java.io.File;
@ -69,39 +67,39 @@ public class Caches extends BasicServiceTestUnit {
// @Test
public void mongoconnections() throws ConfigurationException, InterruptedException {
assumeTrue(GCubeTest.isTestInfrastructureEnabled());
TokenSetter.set(GCubeTest.getContext());
ExecutorService service = Executors.newFixedThreadPool(1000);
LocalDateTime start=LocalDateTime.now();
AtomicLong executed = new AtomicLong(0);
AtomicLong launched = new AtomicLong(0);
//for 100 secs
while(Duration.between(start,LocalDateTime.now()).
compareTo(Duration.of(100, ChronoUnit.SECONDS))<0){
service.execute(new Runnable() {
@Override
public void run() {
try {
new ConcessioniMongoManager().list();
} catch (ConfigurationException e) {
e.printStackTrace();
} catch (MongoWaitQueueFullException e) {
log.info("Too many connections... ");
}finally{
executed.incrementAndGet();
try {Thread.sleep(500);} catch (InterruptedException i) {}
}
}
});
launched.incrementAndGet();
}
while (!service.awaitTermination(2, TimeUnit.MINUTES)) {
log.info("Waiting .. completed {}, out of {} ",executed.get(),launched.get());
if(executed.get()==launched.get()) service.shutdown();
}
}
// public void mongoconnections() throws ConfigurationException, InterruptedException {
// assumeTrue(GCubeTest.isTestInfrastructureEnabled());
// TokenSetter.set(GCubeTest.getContext());
// ExecutorService service = Executors.newFixedThreadPool(1000);
// LocalDateTime start=LocalDateTime.now();
// AtomicLong executed = new AtomicLong(0);
// AtomicLong launched = new AtomicLong(0);
// //for 100 secs
// while(Duration.between(start,LocalDateTime.now()).
// compareTo(Duration.of(100, ChronoUnit.SECONDS))<0){
// service.execute(new Runnable() {
// @Override
// public void run() {
// try {
// new ConcessioniMongoManager().list();
// } catch (ConfigurationException e) {
// e.printStackTrace();
// } catch (MongoWaitQueueFullException e) {
// log.info("Too many connections... ");
// }finally{
// executed.incrementAndGet();
// try {Thread.sleep(500);} catch (InterruptedException i) {}
// }
// }
// });
// launched.incrementAndGet();
// }
//
// while (!service.awaitTermination(2, TimeUnit.MINUTES)) {
// log.info("Waiting .. completed {}, out of {} ",executed.get(),launched.get());
// if(executed.get()==launched.get()) service.shutdown();
// }
// }
//@Test

View File

@ -1,79 +0,0 @@
package org.gcube.application.geoportal.service.engine.materialization;
import lombok.extern.slf4j.Slf4j;
import org.gcube.application.cms.tests.TokenSetter;
import org.gcube.application.cms.tests.model.concessioni.TestConcessioniModel;
import org.gcube.application.geoportal.common.model.legacy.Concessione;
import org.gcube.application.geoportal.common.model.legacy.GeoServerContent;
import org.gcube.application.geoportal.common.model.legacy.LayerConcessione;
import org.gcube.application.geoportal.common.utils.Files;
import org.gcube.application.geoportal.common.utils.tests.GCubeTest;
import org.gcube.application.geoportal.service.engine.postgis.PostgisIndex;
import org.gcube.application.geoportal.common.model.rest.ConfigurationException;
import org.gcube.application.geoportal.service.model.internal.faults.SDIInteractionException;
import org.gcube.application.cms.serialization.Serialization;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import java.io.File;
import java.io.IOException;
import java.nio.charset.Charset;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import java.util.regex.Matcher;
import static org.junit.Assume.assumeTrue;
@Slf4j
public class SDITests extends GCubeTest {
@Test
public void registerCentroidsLayer() throws SDIInteractionException, SQLException, ConfigurationException {
assumeTrue(isTestInfrastructureEnabled());
PostgisIndex index=new PostgisIndex();
}
@Test
public void testRegexp(){
Matcher hostMatcher=SDIManager.HOSTNAME_PATTERN.matcher("jdbc:postgresql://postgresql-srv-dev.d4science.org:5432/geoserver_dev_db");
Assert.assertTrue(hostMatcher.find());
System.out.println("HOST :\t"+hostMatcher.group());
Matcher portMatcher=SDIManager.PORT_PATTERN.matcher("jdbc:postgresql://postgresql-srv-dev.d4science.org:5432/geoserver_dev_db");
Assert.assertTrue(portMatcher.find());
System.out.println("PORT :\t"+portMatcher.group());
Matcher dbMatcher=SDIManager.DB_NAME_PATTERN.matcher("jdbc:postgresql://postgresql-srv-dev.d4science.org:5432/geoserver_dev_db");
Assert.assertTrue(dbMatcher.find());
System.out.println("DB :\t"+dbMatcher.group());
}
// @Test
//Disabled because dependant on state
public void registerLayer() throws IOException, SDIInteractionException {
Concessione toTest= Serialization.read(Files.readFileAsString(
new File(TestConcessioniModel.getBaseFolder(),"transferTest.json").getAbsolutePath(), Charset.defaultCharset()), Concessione.class);
SDIManager manager = new SDIManager();
List<LayerConcessione> toPush=new ArrayList<>();
toPush.add(toTest.getPosizionamentoScavo());
toPush.addAll(toTest.getPianteFineScavo());
for (LayerConcessione l :toPush){
GeoServerContent content=manager.pushShapeLayerFileSet(l,toTest.getFolderId(),toTest.getMongo_id()+"_"+System.currentTimeMillis());
System.out.println("Published "+content);
}
}
}

View File

@ -1,29 +1,20 @@
package org.gcube.application.geoportal.service.engine.providers;
import com.mongodb.client.MongoCollection;
import com.mongodb.client.model.FindOneAndReplaceOptions;
import com.mongodb.client.model.ReturnDocument;
import org.bson.Document;
import org.bson.types.ObjectId;
import org.gcube.application.cms.serialization.Serialization;
import org.gcube.application.cms.tests.TokenSetter;
import org.gcube.application.cms.tests.model.BasicTests;
import org.gcube.application.geoportal.common.model.JSONPathWrapper;
import org.gcube.application.geoportal.common.model.legacy.Concessione;
import org.gcube.application.geoportal.common.model.legacy.report.ValidationReport;
import org.gcube.application.geoportal.common.model.rest.ConfigurationException;
import org.gcube.application.geoportal.common.utils.tests.GCubeTest;
import org.gcube.application.geoportal.service.engine.mongo.MongoManager;
import org.gcube.application.geoportal.service.engine.postgis.PostgisIndex;
import org.gcube.application.geoportal.service.model.internal.db.Mongo;
import org.junit.Test;
import java.util.function.Consumer;
import static com.mongodb.client.model.Filters.eq;
import static com.mongodb.client.model.Updates.combine;
import static com.mongodb.client.model.Updates.set;
import static junit.framework.TestCase.assertNotNull;
import static org.junit.Assume.assumeTrue;