SDI Indexing
This commit is contained in:
parent
02ce058348
commit
2a41352748
|
@ -6,14 +6,13 @@ import org.bson.BsonDocument;
|
||||||
import org.bson.BsonString;
|
import org.bson.BsonString;
|
||||||
import org.bson.BsonValue;
|
import org.bson.BsonValue;
|
||||||
import org.bson.Document;
|
import org.bson.Document;
|
||||||
import org.gcube.application.cms.concessioni.sdi.SDIManager;
|
import org.gcube.application.cms.plugins.IndexerPluginInterface;
|
||||||
import org.gcube.application.cms.concessioni.sdi.faults.SDIInteractionException;
|
|
||||||
import org.gcube.application.cms.plugins.InitializablePlugin;
|
|
||||||
import org.gcube.application.cms.plugins.MaterializationPlugin;
|
import org.gcube.application.cms.plugins.MaterializationPlugin;
|
||||||
import org.gcube.application.cms.plugins.PluginManagerInterface;
|
import org.gcube.application.cms.plugins.PluginManagerInterface;
|
||||||
import org.gcube.application.cms.plugins.faults.EventException;
|
import org.gcube.application.cms.plugins.faults.EventException;
|
||||||
import org.gcube.application.cms.plugins.reports.*;
|
import org.gcube.application.cms.plugins.reports.*;
|
||||||
import org.gcube.application.cms.plugins.requests.EventExecutionRequest;
|
import org.gcube.application.cms.plugins.requests.EventExecutionRequest;
|
||||||
|
import org.gcube.application.cms.plugins.requests.IndexDocumentRequest;
|
||||||
import org.gcube.application.cms.plugins.requests.MaterializationRequest;
|
import org.gcube.application.cms.plugins.requests.MaterializationRequest;
|
||||||
import org.gcube.application.cms.serialization.Serialization;
|
import org.gcube.application.cms.serialization.Serialization;
|
||||||
import org.gcube.application.cms.custom.gna.concessioni.model.ProfiledConcessione;
|
import org.gcube.application.cms.custom.gna.concessioni.model.ProfiledConcessione;
|
||||||
|
@ -38,6 +37,11 @@ import org.gcube.application.geoportal.common.utils.Files;
|
||||||
@Slf4j
|
@Slf4j
|
||||||
public class ConcessioniLifeCycleManager implements LifecycleManager {
|
public class ConcessioniLifeCycleManager implements LifecycleManager {
|
||||||
|
|
||||||
|
private static class Phases {
|
||||||
|
public static final String PENDING_APPROVAL="Pending Approval";
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
private static final PluginDescriptor DESCRIPTOR=new PluginDescriptor("GNA-CONCESSIONI-LC", PluginDescriptor.BaseTypes.LIFECYCLE_MANAGER);
|
private static final PluginDescriptor DESCRIPTOR=new PluginDescriptor("GNA-CONCESSIONI-LC", PluginDescriptor.BaseTypes.LIFECYCLE_MANAGER);
|
||||||
static {
|
static {
|
||||||
DESCRIPTOR.setDescription("GNA Concessioni. This plugin supports custom lifecycle management for the GNA Concessioni UseCase.");
|
DESCRIPTOR.setDescription("GNA Concessioni. This plugin supports custom lifecycle management for the GNA Concessioni UseCase.");
|
||||||
|
@ -50,6 +54,8 @@ public class ConcessioniLifeCycleManager implements LifecycleManager {
|
||||||
// TODO DISCOVER
|
// TODO DISCOVER
|
||||||
MaterializationPlugin plugin;
|
MaterializationPlugin plugin;
|
||||||
|
|
||||||
|
IndexerPluginInterface indexerPlugin;
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public InitializationReport initInContext() throws InitializationException {
|
public InitializationReport initInContext() throws InitializationException {
|
||||||
InitializationReport report = new InitializationReport();
|
InitializationReport report = new InitializationReport();
|
||||||
|
@ -69,7 +75,7 @@ public class ConcessioniLifeCycleManager implements LifecycleManager {
|
||||||
try{
|
try{
|
||||||
report.setStatus(Report.Status.OK);
|
report.setStatus(Report.Status.OK);
|
||||||
plugin= (MaterializationPlugin) pluginManager.getById("SDI-Default-Materializer");
|
plugin= (MaterializationPlugin) pluginManager.getById("SDI-Default-Materializer");
|
||||||
if(plugin == null ) throw new InitializationException("SDI-Default-Materializer not found");
|
indexerPlugin = (IndexerPluginInterface) pluginManager.getById("SDI-Indexer-Plugin");
|
||||||
} catch (Throwable e) {
|
} catch (Throwable e) {
|
||||||
log.error("Unable to initialize plugins ",e);
|
log.error("Unable to initialize plugins ",e);
|
||||||
report.setStatus(Report.Status.WARNING);
|
report.setStatus(Report.Status.WARNING);
|
||||||
|
@ -114,11 +120,14 @@ public class ConcessioniLifeCycleManager implements LifecycleManager {
|
||||||
MaterializationReport matRep = plugin.materialize(matReq);
|
MaterializationReport matRep = plugin.materialize(matReq);
|
||||||
|
|
||||||
|
|
||||||
|
report.setResultingDocument(matRep.getResultingDocument());
|
||||||
switch(matRep.getStatus()){
|
switch(matRep.getStatus()){
|
||||||
case OK : {
|
case OK : {
|
||||||
info.setPhase("Pending Approval");
|
info.setPhase(Phases.PENDING_APPROVAL);
|
||||||
|
|
||||||
//TODO Index-confidential
|
//TODO Index-confidential
|
||||||
|
|
||||||
|
|
||||||
//TODO Notifications
|
//TODO Notifications
|
||||||
|
|
||||||
break;
|
break;
|
||||||
|
@ -145,6 +154,38 @@ public class ConcessioniLifeCycleManager implements LifecycleManager {
|
||||||
|
|
||||||
case "APPROVE DRAFT":{
|
case "APPROVE DRAFT":{
|
||||||
// Index-published
|
// Index-published
|
||||||
|
|
||||||
|
if(!request.getDocument().getLifecycleInformation().getPhase()
|
||||||
|
.equals(Phases.PENDING_APPROVAL))
|
||||||
|
throw new StepException("Document is not in "+Phases.PENDING_APPROVAL+" phase");
|
||||||
|
|
||||||
|
IndexDocumentRequest indexRequest = new IndexDocumentRequest();
|
||||||
|
indexRequest.setDocument(request.getDocument());
|
||||||
|
indexRequest.setProfile(request.getProfile());
|
||||||
|
Document callParameters = new Document();
|
||||||
|
callParameters.put("workspace","gna_concessioni_"+request.getContext());
|
||||||
|
callParameters.put("indexName","gna_concessioni_centroids_"+request.getContext());
|
||||||
|
indexRequest.setCallParameters(callParameters);
|
||||||
|
|
||||||
|
IndexDocumentReport indexReport = indexerPlugin.index(indexRequest);
|
||||||
|
|
||||||
|
switch(indexReport.getStatus()){
|
||||||
|
case OK : {
|
||||||
|
info.setPhase("PUBLISHED");
|
||||||
|
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
case ERROR : {
|
||||||
|
info.setLastOperationStatus(LifecycleInformation.Status.ERROR);
|
||||||
|
indexReport.getMessages().forEach(s -> info.addErrorMessage(s));
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
case WARNING : {
|
||||||
|
info.setLastOperationStatus(LifecycleInformation.Status.WARNING);
|
||||||
|
indexReport.getMessages().forEach(s -> info.addWarningMessage(s));
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -170,6 +211,11 @@ public class ConcessioniLifeCycleManager implements LifecycleManager {
|
||||||
log.info("Executing Event {}",request);
|
log.info("Executing Event {}",request);
|
||||||
EventExecutionReport report=new EventExecutionReport();
|
EventExecutionReport report=new EventExecutionReport();
|
||||||
report.setTheRequest(request);
|
report.setTheRequest(request);
|
||||||
|
if(report.getToSetLifecycleInformation()==null){
|
||||||
|
LifecycleInformation lc=new LifecycleInformation();
|
||||||
|
report.setStatus(Report.Status.OK);
|
||||||
|
report.setToSetLifecycleInformation(lc);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
TriggeredEvents info=new TriggeredEvents();
|
TriggeredEvents info=new TriggeredEvents();
|
||||||
|
|
|
@ -1,282 +0,0 @@
|
||||||
package org.gcube.application.cms.concessioni.sdi;
|
|
||||||
|
|
||||||
import it.geosolutions.geoserver.rest.GeoServerRESTPublisher;
|
|
||||||
import it.geosolutions.geoserver.rest.GeoServerRESTReader;
|
|
||||||
import it.geosolutions.geoserver.rest.decoder.RESTFeatureType;
|
|
||||||
import it.geosolutions.geoserver.rest.decoder.RESTLayer;
|
|
||||||
import lombok.Getter;
|
|
||||||
import lombok.extern.slf4j.Slf4j;
|
|
||||||
import org.bson.Document;
|
|
||||||
import org.gcube.application.cms.concessioni.sdi.faults.SDIInteractionException;
|
|
||||||
import org.gcube.application.cms.plugins.requests.BaseExecutionRequest;
|
|
||||||
import org.gcube.application.cms.serialization.Serialization;
|
|
||||||
import org.gcube.application.geoportal.common.model.document.filesets.GCubeSDILayer;
|
|
||||||
|
|
||||||
import org.gcube.application.geoportal.common.model.document.filesets.RegisteredFile;
|
|
||||||
import org.gcube.application.geoportal.common.model.document.filesets.RegisteredFileSet;
|
|
||||||
|
|
||||||
import org.gcube.application.geoportal.common.utils.Files;
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
import org.gcube.data.transfer.library.DataTransferClient;
|
|
||||||
import org.gcube.data.transfer.library.TransferResult;
|
|
||||||
import org.gcube.data.transfer.model.Destination;
|
|
||||||
import org.gcube.data.transfer.model.DestinationClashPolicy;
|
|
||||||
import org.gcube.spatial.data.gis.GISInterface;
|
|
||||||
import org.gcube.spatial.data.gis.is.AbstractGeoServerDescriptor;
|
|
||||||
|
|
||||||
import java.net.MalformedURLException;
|
|
||||||
import java.net.URL;
|
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.regex.Pattern;
|
|
||||||
|
|
||||||
@Slf4j
|
|
||||||
public class SDIManager {
|
|
||||||
|
|
||||||
|
|
||||||
static private final String EPSG_4326="EPSG:4326";
|
|
||||||
static private final String WGS84_FULL="GEOGCS[\"WGS 84\", DATUM[\"World Geodetic System 1984\", SPHEROID[\"WGS 84\", 6378137.0, 298.257223563, AUTHORITY[\"EPSG\",\"7030\"]],"+
|
|
||||||
"AUTHORITY[\"EPSG\",\"6326\"]], PRIMEM[\"Greenwich\", 0.0, AUTHORITY[\"EPSG\",\"8901\"]], UNIT[\"degree\", 0.017453292519943295],"+
|
|
||||||
"AXIS[\"Geodetic longitude\", EAST], AXIS[\"Geodetic latitude\", NORTH], AUTHORITY[\"EPSG\",\"4326\"]]";
|
|
||||||
|
|
||||||
|
|
||||||
public static final Pattern HOSTNAME_PATTERN=Pattern.compile("(?<=\\:\\/\\/)[^\\:]*");
|
|
||||||
public static final Pattern PORT_PATTERN=Pattern.compile("(?<=\\:)[\\d]+");
|
|
||||||
public static final Pattern DB_NAME_PATTERN=Pattern.compile("(?<=\\/)[^\\/]*(?=$)");
|
|
||||||
|
|
||||||
|
|
||||||
private final GISInterface gis;
|
|
||||||
@Getter
|
|
||||||
private final DataTransferClient dtGeoServer;
|
|
||||||
@Getter
|
|
||||||
private final String geoserverHostName;
|
|
||||||
@Getter
|
|
||||||
private final AbstractGeoServerDescriptor currentGeoserver;
|
|
||||||
|
|
||||||
|
|
||||||
public SDIManager() throws SDIInteractionException {
|
|
||||||
try{
|
|
||||||
log.debug("Initializing GIS Interface..");
|
|
||||||
gis=GISInterface.get();
|
|
||||||
currentGeoserver=gis.getCurrentGeoServer();
|
|
||||||
if(currentGeoserver==null)
|
|
||||||
throw new Exception("Unable to contact data transfer for geoserver ");
|
|
||||||
|
|
||||||
log.debug("Found geoserver descriptor "+currentGeoserver);
|
|
||||||
geoserverHostName=new URL(currentGeoserver.getUrl()).getHost();
|
|
||||||
|
|
||||||
log.debug("Contacting Data Transfer from geoserver {} ",geoserverHostName);
|
|
||||||
dtGeoServer=DataTransferClient.getInstanceByEndpoint("https://"+geoserverHostName);
|
|
||||||
if(!currentGeoserver.getReader().existGeoserver())
|
|
||||||
throw new Exception("Geoserver not reachable");
|
|
||||||
}catch(Exception e) {
|
|
||||||
throw new SDIInteractionException("Unable to initialize SDI Manager",e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Expected paramters :
|
|
||||||
* - "workspace"
|
|
||||||
* - "layerTitle"
|
|
||||||
* - "documentID"
|
|
||||||
* - "basePersistencePath" (profile specific, e.g. "GNA")
|
|
||||||
*
|
|
||||||
* @param fileSet
|
|
||||||
* @param params
|
|
||||||
* @return
|
|
||||||
* @throws SDIInteractionException
|
|
||||||
*/
|
|
||||||
public RegisteredFileSet materializeLayer(RegisteredFileSet fileSet, Document params) throws SDIInteractionException{
|
|
||||||
try {
|
|
||||||
log.debug("Materializing FS {} on {} ", fileSet, geoserverHostName);
|
|
||||||
|
|
||||||
// validate parameters
|
|
||||||
String workspace = BaseExecutionRequest.getMandatory("workspace", params);
|
|
||||||
String documentID = BaseExecutionRequest.getMandatory("documentID", params);
|
|
||||||
String basePersistencePAth = BaseExecutionRequest.getMandatory("basePersistencePath", params);
|
|
||||||
|
|
||||||
|
|
||||||
// check if empty
|
|
||||||
if (fileSet.getPayloads().isEmpty()) throw new SDIInteractionException("No payload to materialize");
|
|
||||||
|
|
||||||
|
|
||||||
Document geoserverInfo = new Document();
|
|
||||||
geoserverInfo.put("_type", "Geoserver");
|
|
||||||
geoserverInfo.put("workspace", workspace);
|
|
||||||
|
|
||||||
|
|
||||||
// Evaluate Layer Name
|
|
||||||
String baseName = getToUseBaseLayerName(fileSet);
|
|
||||||
log.debug("Base layer name is {}, checking conflicts.. ");
|
|
||||||
String toSetLayerName = baseName;
|
|
||||||
//Check if layer already exists
|
|
||||||
int count = 0;
|
|
||||||
GeoServerRESTReader gsReader = currentGeoserver.getReader();
|
|
||||||
while (gsReader.getLayer(workspace, toSetLayerName) != null) {
|
|
||||||
count++;
|
|
||||||
toSetLayerName = baseName + "_" + count;
|
|
||||||
log.debug("layer for " + baseName + " already existing, trying " + toSetLayerName);
|
|
||||||
}
|
|
||||||
geoserverInfo.put("layerName", toSetLayerName);
|
|
||||||
log.debug("Layer name will be {} ", toSetLayerName);
|
|
||||||
|
|
||||||
|
|
||||||
String folderRelativePath = basePersistencePAth + "/" + documentID + "/" + fileSet.getUUID() + "/" + toSetLayerName;
|
|
||||||
log.debug("GS Relative destination path is {} ", folderRelativePath);
|
|
||||||
geoserverInfo.put("persistencePath", folderRelativePath);
|
|
||||||
|
|
||||||
List<String> filenames = new ArrayList<>();
|
|
||||||
|
|
||||||
String absolutePath = null;
|
|
||||||
|
|
||||||
for (Object o : fileSet.getPayloads()) {
|
|
||||||
RegisteredFile file = Serialization.convert(o, RegisteredFile.class);
|
|
||||||
log.info("Sending {} to GS {} at {} ", file, geoserverHostName, folderRelativePath);
|
|
||||||
String completeFilename = Files.fixFilename(file.getName());
|
|
||||||
completeFilename = completeFilename.replaceAll(baseName, toSetLayerName);
|
|
||||||
|
|
||||||
|
|
||||||
Destination destination = new Destination(completeFilename);
|
|
||||||
destination.setCreateSubfolders(true);
|
|
||||||
destination.setOnExistingFileName(DestinationClashPolicy.REWRITE);
|
|
||||||
destination.setOnExistingSubFolder(DestinationClashPolicy.APPEND);
|
|
||||||
|
|
||||||
destination.setPersistenceId("geoserver");
|
|
||||||
destination.setSubFolder(folderRelativePath);
|
|
||||||
|
|
||||||
log.debug("Sending {} to {}", file, destination);
|
|
||||||
TransferResult result = getDtGeoServer().httpSource(new URL(file.getLink()), destination);
|
|
||||||
log.debug("Transferred " + result);
|
|
||||||
|
|
||||||
|
|
||||||
filenames.add(completeFilename);
|
|
||||||
// NB Clash con subfolder is APPEND, thus FOLDER is expected to be the one specified by caller
|
|
||||||
//geoserverInfo.put(""result.getRemotePath().substring(0, result.getRemotePath().lastIndexOf("/")));
|
|
||||||
absolutePath = result.getRemotePath().substring(0, result.getRemotePath().lastIndexOf("/"));
|
|
||||||
}
|
|
||||||
geoserverInfo.put("files", filenames);
|
|
||||||
|
|
||||||
// Publishing layer in GS
|
|
||||||
String storeName = toSetLayerName + "_store";
|
|
||||||
geoserverInfo.put("storeName", storeName);
|
|
||||||
|
|
||||||
GeoServerRESTPublisher publisher = currentGeoserver.getPublisher();
|
|
||||||
log.debug("Trying to create remote workspace : " + workspace);
|
|
||||||
createWorkspace(workspace);
|
|
||||||
|
|
||||||
|
|
||||||
log.debug("Publishing remote folder " + absolutePath);
|
|
||||||
|
|
||||||
URL directoryPath = new URL("file:" + absolutePath + "/" + toSetLayerName + ".shp");
|
|
||||||
|
|
||||||
//TODO Evaluate SRS
|
|
||||||
|
|
||||||
boolean published = publisher.publishShp(
|
|
||||||
workspace,
|
|
||||||
storeName,
|
|
||||||
null,
|
|
||||||
toSetLayerName,
|
|
||||||
// UploadMethod.FILE, // neeeds zip
|
|
||||||
GeoServerRESTPublisher.UploadMethod.EXTERNAL, // needs shp
|
|
||||||
directoryPath.toURI(),
|
|
||||||
EPSG_4326, //SRS
|
|
||||||
""); // default style
|
|
||||||
|
|
||||||
if (!published) {
|
|
||||||
throw new SDIInteractionException("Unable to publish layer " + toSetLayerName + " under " + workspace + ". Unknown Geoserver fault.");
|
|
||||||
}
|
|
||||||
|
|
||||||
RESTLayer l = gsReader.getLayer(workspace, toSetLayerName);
|
|
||||||
RESTFeatureType f = gsReader.getFeatureType(l);
|
|
||||||
/*https://geoserver1.dev.d4science.org/geoserver/gna_conc_18/wms?
|
|
||||||
service=WMS&version=1.1.0&request=GetMap&layers=gna_conc_18:pos&
|
|
||||||
styles=&bbox=8.62091913167495,40.62975046683799,8.621178639172953,40.630257904721645&
|
|
||||||
width=392&height=768&srs=EPSG:4326&format=application/openlayers */
|
|
||||||
|
|
||||||
|
|
||||||
List<Document> ogcLinks = new ArrayList<>();
|
|
||||||
|
|
||||||
Document wmsLink = new Document();
|
|
||||||
wmsLink.put("wms", String.format("https://%1$s/geoserver/%2$s/wms?"
|
|
||||||
+ "service=WMS&version=1.1.0&request=GetMap&layers=%2$s:%3$s&"
|
|
||||||
+ "styles=&bbox=%4$f,%5$f,%6$f,%7$f&srs=%8$s&format=application/openlayers&width=%9$d&height=%10$d",
|
|
||||||
geoserverHostName,
|
|
||||||
workspace,
|
|
||||||
toSetLayerName,
|
|
||||||
f.getMinX(),
|
|
||||||
f.getMinY(),
|
|
||||||
f.getMaxX(),
|
|
||||||
f.getMaxY(),
|
|
||||||
EPSG_4326,
|
|
||||||
400,
|
|
||||||
400));
|
|
||||||
ogcLinks.add(wmsLink);
|
|
||||||
|
|
||||||
List<Document> platformInfo = new ArrayList<>();
|
|
||||||
platformInfo.add(geoserverInfo);
|
|
||||||
// TODO Metadata
|
|
||||||
|
|
||||||
|
|
||||||
// Materialization object
|
|
||||||
GCubeSDILayer materialization = new GCubeSDILayer();
|
|
||||||
materialization.put(GCubeSDILayer.OGC_LINKS, ogcLinks);
|
|
||||||
materialization.put(GCubeSDILayer.B_BOX, new GCubeSDILayer.BBOX(f.getMaxX(), f.getMaxY(), f.getMinX(), f.getMinY()));
|
|
||||||
materialization.put(GCubeSDILayer.PLATFORM_INFO, platformInfo);
|
|
||||||
|
|
||||||
log.info("Generated Materialization {}", materialization);
|
|
||||||
|
|
||||||
//Add Materialization to registered file set
|
|
||||||
List materializations = fileSet.getMaterializations();
|
|
||||||
if (materializations == null) materializations = new ArrayList();
|
|
||||||
materializations.add(materialization);
|
|
||||||
fileSet.put(RegisteredFileSet.MATERIALIZATIONS, materializations);
|
|
||||||
|
|
||||||
return fileSet;
|
|
||||||
}catch(SDIInteractionException e){
|
|
||||||
throw e;
|
|
||||||
}catch (Throwable t){
|
|
||||||
throw new SDIInteractionException("Unexpected exception while trying to materialize File Set "+t.getMessage(),t);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
public String createWorkspace(String toCreate) throws SDIInteractionException {
|
|
||||||
try {
|
|
||||||
if(!currentGeoserver.getReader().getWorkspaceNames().contains(toCreate)) {
|
|
||||||
log.debug("Creating workspace : "+toCreate);
|
|
||||||
if(!currentGeoserver.getPublisher().createWorkspace(toCreate))
|
|
||||||
throw new SDIInteractionException("Unable to create workspace "+toCreate);
|
|
||||||
}else log.debug("Workspace "+toCreate+" exists.");
|
|
||||||
return toCreate;
|
|
||||||
} catch (IllegalArgumentException | MalformedURLException e) {
|
|
||||||
throw new SDIInteractionException("Unable to create workspace "+toCreate,e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
private static final String getToUseBaseLayerName(RegisteredFileSet fileset){
|
|
||||||
// ******** IDENTIFY LAYER NAME correct layer name
|
|
||||||
// Must be unique under same WS
|
|
||||||
// equal to shp base name
|
|
||||||
|
|
||||||
String baseName= "";
|
|
||||||
|
|
||||||
// Chose layer name first identifying filename
|
|
||||||
for(Object p:fileset.getPayloads()){
|
|
||||||
RegisteredFile f = Serialization.convert(p,RegisteredFile.class);
|
|
||||||
String name=f.getName();
|
|
||||||
if(name.endsWith(".shp")) {
|
|
||||||
log.debug("SHP is {}",name);
|
|
||||||
baseName= Files.fixFilename(name.substring(0,name.lastIndexOf('.')));
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return baseName;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
}
|
|
|
@ -0,0 +1,139 @@
|
||||||
|
package org.gcube.application.cms.sdi.engine;
|
||||||
|
|
||||||
|
import static org.gcube.application.cms.sdi.engine.PostgisTable.*;
|
||||||
|
|
||||||
|
import java.util.ArrayList;
|
||||||
|
|
||||||
|
public class DBConstants {
|
||||||
|
|
||||||
|
public static enum TYPE{
|
||||||
|
|
||||||
|
Concessioni,Mosi,Mopr
|
||||||
|
}
|
||||||
|
|
||||||
|
public static class Defaults{
|
||||||
|
public static final String DEFAULT_GEOMETRY_COLUMN_NAME="geom";
|
||||||
|
public static final String INTERNAL_ID="internal_id";
|
||||||
|
|
||||||
|
public static final String XCOORD_FIELD="xcoord";
|
||||||
|
public static final String YCOORD_FIELD="ycoord";
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* nome,anno,regione,xcentroid,ycentroid,csv,shp,
|
||||||
|
* geopackage,nome_csv,nome_shp,nome_geo,
|
||||||
|
* poligono,punti,linee,
|
||||||
|
* nome_progetto, descrizione_progetto,descrizione_contenuto,autore,contributore,
|
||||||
|
* titolare_dati,responsabile,editore,
|
||||||
|
* finanziamento,soggetto,
|
||||||
|
* risorse_correlate,
|
||||||
|
* date_scavo,data_archiviazione,
|
||||||
|
* versione,licenza,titolare_licenza_copyright,accesso_dati,parole_chiave
|
||||||
|
*
|
||||||
|
* @author FabioISTI
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
public static class Concessioni{
|
||||||
|
|
||||||
|
public static final String PRODUCT_ID="product_id";
|
||||||
|
public static final String NOME="nome";
|
||||||
|
public static final String ANNO="anno";
|
||||||
|
public static final String REGIONE="regione";
|
||||||
|
public static final String GEOMETRY= Defaults.DEFAULT_GEOMETRY_COLUMN_NAME;
|
||||||
|
|
||||||
|
//Extension
|
||||||
|
public static final String DESCRIZIONE="descrizione";
|
||||||
|
public static final String CONTENUTO="contenuto";
|
||||||
|
public static final String AUTORE="autore";
|
||||||
|
public static final String CONTRIBUTORE="contributore";
|
||||||
|
public static final String TITOLARE="titolare";
|
||||||
|
public static final String RESPONSABILE="responsabile";
|
||||||
|
public static final String EDITORE="editore";
|
||||||
|
public static final String FINANZIAMENTO="finanziamento";
|
||||||
|
public static final String SOGGETTO="soggetto";
|
||||||
|
public static final String RISORSE="risorse";
|
||||||
|
public static final String DATE_SCAVO="date_scavo";
|
||||||
|
public static final String DATA_ARCHIVIAZIONE="data_archiviazione";
|
||||||
|
public static final String VERSIONE="versione";
|
||||||
|
public static final String LICENZA="licenza";
|
||||||
|
public static final String TITOLARE_LICENZA="titolare_licenza";
|
||||||
|
public static final String ACCESSO="accesso";
|
||||||
|
public static final String PAROLE_CHIAVE="parole_chiave";
|
||||||
|
|
||||||
|
|
||||||
|
//
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
public static final ArrayList<Field> COLUMNS=new ArrayList<PostgisTable.Field>();
|
||||||
|
public static final PostgisTable CENTROIDS=new PostgisTable("centroids_concessioni",
|
||||||
|
COLUMNS, GeometryType.POINT);
|
||||||
|
|
||||||
|
static {
|
||||||
|
CENTROIDS.getFields().add(new Field(Defaults.INTERNAL_ID,FieldType.AUTOINCREMENT));
|
||||||
|
CENTROIDS.getFields().add(new Field(PRODUCT_ID,FieldType.TEXT));
|
||||||
|
CENTROIDS.getFields().add(new Field(NOME,FieldType.TEXT));
|
||||||
|
CENTROIDS.getFields().add(new Field(ANNO,FieldType.INT));
|
||||||
|
CENTROIDS.getFields().add(new Field(REGIONE,FieldType.TEXT));
|
||||||
|
CENTROIDS.getFields().add(new Field(GEOMETRY,FieldType.GEOMETRY));
|
||||||
|
|
||||||
|
//EXtenions
|
||||||
|
CENTROIDS.getFields().add(new Field(DESCRIZIONE,FieldType.TEXT));
|
||||||
|
CENTROIDS.getFields().add(new Field(CONTENUTO,FieldType.TEXT));
|
||||||
|
CENTROIDS.getFields().add(new Field(AUTORE,FieldType.TEXT));
|
||||||
|
CENTROIDS.getFields().add(new Field(CONTRIBUTORE,FieldType.TEXT));
|
||||||
|
CENTROIDS.getFields().add(new Field(TITOLARE,FieldType.TEXT));
|
||||||
|
CENTROIDS.getFields().add(new Field(RESPONSABILE,FieldType.TEXT));
|
||||||
|
CENTROIDS.getFields().add(new Field(EDITORE,FieldType.TEXT));
|
||||||
|
CENTROIDS.getFields().add(new Field(FINANZIAMENTO,FieldType.TEXT));
|
||||||
|
CENTROIDS.getFields().add(new Field(SOGGETTO,FieldType.TEXT));
|
||||||
|
CENTROIDS.getFields().add(new Field(RISORSE,FieldType.TEXT));
|
||||||
|
CENTROIDS.getFields().add(new Field(DATE_SCAVO,FieldType.TEXT));
|
||||||
|
CENTROIDS.getFields().add(new Field(DATA_ARCHIVIAZIONE,FieldType.TEXT));
|
||||||
|
CENTROIDS.getFields().add(new Field(VERSIONE,FieldType.TEXT));
|
||||||
|
CENTROIDS.getFields().add(new Field(LICENZA,FieldType.TEXT));
|
||||||
|
CENTROIDS.getFields().add(new Field(TITOLARE_LICENZA,FieldType.TEXT));
|
||||||
|
CENTROIDS.getFields().add(new Field(ACCESSO,FieldType.TEXT));
|
||||||
|
CENTROIDS.getFields().add(new Field(PAROLE_CHIAVE,FieldType.TEXT));
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public static class MOSI{
|
||||||
|
|
||||||
|
public static final String CODE="code";
|
||||||
|
public static final String GEOMETRY= Defaults.DEFAULT_GEOMETRY_COLUMN_NAME;
|
||||||
|
|
||||||
|
|
||||||
|
public static final ArrayList<Field> COLUMNS=new ArrayList<PostgisTable.Field>();
|
||||||
|
public static final PostgisTable CENTROID_MOSI=new PostgisTable("centroids_mosi",
|
||||||
|
new ArrayList<Field>(), GeometryType.POINT);
|
||||||
|
|
||||||
|
static {
|
||||||
|
CENTROID_MOSI.getFields().add(new Field(Defaults.INTERNAL_ID,FieldType.AUTOINCREMENT));
|
||||||
|
CENTROID_MOSI.getFields().add(new Field(GEOMETRY,FieldType.GEOMETRY));
|
||||||
|
CENTROID_MOSI.getFields().add(new Field(CODE,FieldType.TEXT));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public static class MOPR{
|
||||||
|
public static final PostgisTable CENTROID_MOPR=new PostgisTable("centroids_mopr",
|
||||||
|
new ArrayList<Field>(), GeometryType.POINT);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
public static class INTERNAL{
|
||||||
|
public static final String DB_NAME="gna_internal_db";
|
||||||
|
|
||||||
|
public static final String RECORD="RECORD";
|
||||||
|
// public static final String CONCESSIONE="CONCESSIONE";
|
||||||
|
// public static final String
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,148 @@
|
||||||
|
package org.gcube.application.cms.sdi.engine;
|
||||||
|
|
||||||
|
import lombok.Synchronized;
|
||||||
|
import lombok.extern.slf4j.Slf4j;
|
||||||
|
import org.gcube.application.cms.sdi.faults.DataParsingException;
|
||||||
|
import org.gcube.application.geoportal.common.model.legacy.BBOX;
|
||||||
|
import org.gcube.application.geoportal.common.model.rest.ConfigurationException;
|
||||||
|
import org.gcube.application.geoportal.common.model.rest.DatabaseConnection;
|
||||||
|
import java.sql.*;
|
||||||
|
import java.util.regex.Matcher;
|
||||||
|
import java.util.regex.Pattern;
|
||||||
|
|
||||||
|
@Slf4j
|
||||||
|
public class PostgisDBManager implements PostgisDBManagerI {
|
||||||
|
|
||||||
|
private Connection conn=null;
|
||||||
|
|
||||||
|
|
||||||
|
public PostgisDBManager(Connection conn) throws SQLException {
|
||||||
|
this.conn=conn;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void create(PostgisTable toCreate) throws SQLException {
|
||||||
|
String createStmt=toCreate.getCreateStatement();
|
||||||
|
log.debug("Executing create : "+createStmt);
|
||||||
|
conn.createStatement().executeUpdate(createStmt);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/* (non-Javadoc)
|
||||||
|
* @see org.gcube.application.geoportal.PostgisDBManagerI#commit()
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
public void commit() throws SQLException {
|
||||||
|
conn.commit();
|
||||||
|
}
|
||||||
|
|
||||||
|
// /* (non-Javadoc)
|
||||||
|
// * @see org.gcube.application.geoportal.PostgisDBManagerI#evaluateBoundingBox(org.gcube.application.geoportal.model.PostgisTable)
|
||||||
|
// */
|
||||||
|
// @Override
|
||||||
|
// public BBOX evaluateBoundingBox(PostgisTable table) throws SQLException, DataParsingException {
|
||||||
|
// ResultSet rs=conn.createStatement().executeQuery("Select ST_Extent("+table.getGeometryColumn()+") as extent from "+table.getTablename());
|
||||||
|
// if(rs.next())
|
||||||
|
// return DBUtils.parseST_Extent(rs.getString("extent"));
|
||||||
|
// else throw new SQLException("No extent returned");
|
||||||
|
// }
|
||||||
|
|
||||||
|
/* (non-Javadoc)
|
||||||
|
* @see org.gcube.application.geoportal.PostgisDBManagerI#evaluateBoundingBox(org.gcube.application.geoportal.model.PostgisTable)
|
||||||
|
*/
|
||||||
|
// @Override
|
||||||
|
// public PostgisTable.POINT evaluateCentroid(PostgisTable table) throws SQLException, DataParsingException {
|
||||||
|
// ResultSet rs=conn.createStatement().executeQuery("Select ST_AsText(ST_Centroid(ST_Collect("+table.getGeometryColumn()+"))) as centroid from "+table.getTablename());
|
||||||
|
// if(rs.next())
|
||||||
|
// return PostgisTable.POINT.parsePOINT(rs.getString("centroid"));
|
||||||
|
// else throw new SQLException("No extent returned");
|
||||||
|
// }
|
||||||
|
|
||||||
|
/* (non-Javadoc)
|
||||||
|
* @see org.gcube.application.geoportal.PostgisDBManagerI#prepareInsertStatement(org.gcube.application.geoportal.model.PostgisTable, boolean, boolean)
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
public PreparedStatement prepareInsertStatement(PostgisTable target, boolean createTable, boolean geometryAsText) throws SQLException {
|
||||||
|
if(createTable) {
|
||||||
|
create(target);
|
||||||
|
}
|
||||||
|
String insertStmt=target.getInsertionStatement(geometryAsText);
|
||||||
|
log.debug("Preparing insert statement : "+insertStmt);
|
||||||
|
return conn.prepareStatement(insertStmt);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int deleteByFieldValue(PostgisTable target, PostgisTable.Field field, Object value) throws SQLException {
|
||||||
|
String query=target.getDeleteByFieldStatement(field);
|
||||||
|
log.debug("Preparing DELETE SQL {} with field {} = {} ",query,field,value);
|
||||||
|
|
||||||
|
PreparedStatement stmt = conn.prepareStatement(query);
|
||||||
|
target.setObjectInPreparedStatement(field, value, stmt, 1);
|
||||||
|
int result=stmt.executeUpdate();
|
||||||
|
log.debug("DELETED {} rows ",result);
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/* (non-Javadoc)
|
||||||
|
* @see org.gcube.application.geoportal.PostgisDBManagerI#deleteTable(java.lang.String)
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
public void deleteTable(String tableName) throws SQLException {
|
||||||
|
conn.createStatement().executeUpdate("DROP TABLE "+tableName);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* (non-Javadoc)
|
||||||
|
* @see org.gcube.application.geoportal.PostgisDBManagerI#truncate(java.lang.String)
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
public void truncate(String tableName) throws SQLException{
|
||||||
|
conn.createStatement().executeUpdate("TRUNCATE Table "+tableName);
|
||||||
|
}
|
||||||
|
|
||||||
|
// @Override
|
||||||
|
// public ResultSet queryAll(PostgisTable table) throws SQLException {
|
||||||
|
// // TODO Check schema
|
||||||
|
// return conn.createStatement().executeQuery("Select * from "+table.getTablename());
|
||||||
|
// }
|
||||||
|
|
||||||
|
|
||||||
|
// *********************** INNER UTILS CLASS
|
||||||
|
|
||||||
|
protected static class DBUtils {
|
||||||
|
|
||||||
|
|
||||||
|
private static Pattern pattern = Pattern.compile("(?!=\\d\\.\\d\\.)([\\d.]+)");
|
||||||
|
|
||||||
|
public static BBOX parseST_Extent(String extent) throws DataParsingException {
|
||||||
|
//BOX(11.9122574810083 44.2514144864263,11.9761128271586 44.2912342569845)
|
||||||
|
try {
|
||||||
|
log.debug("Parsing BBOX "+extent);
|
||||||
|
Matcher m=pattern.matcher(extent);
|
||||||
|
|
||||||
|
// Scanner sc = new Scanner(extent);
|
||||||
|
// double minLong = sc.nextDouble(),
|
||||||
|
// minLat = sc.nextDouble(),
|
||||||
|
// maxLong = sc.nextDouble(),
|
||||||
|
// maxLat= sc.nextDouble();
|
||||||
|
|
||||||
|
if(!m.find()) throw new DataParsingException("Unable to get minLong ");
|
||||||
|
Double minLong=Double.parseDouble(m.group(1));
|
||||||
|
|
||||||
|
if(!m.find()) throw new DataParsingException("Unable to get minLat ");
|
||||||
|
Double minLat=Double.parseDouble(m.group(1));
|
||||||
|
|
||||||
|
if(!m.find()) throw new DataParsingException("Unable to get maxLong ");
|
||||||
|
Double maxLong=Double.parseDouble(m.group(1));
|
||||||
|
|
||||||
|
if(!m.find()) throw new DataParsingException("Unable to get maxLat ");
|
||||||
|
Double maxLat=Double.parseDouble(m.group(1));
|
||||||
|
return new BBOX(maxLat, maxLong, minLat, minLong);
|
||||||
|
}catch(Throwable t) {
|
||||||
|
throw new DataParsingException("Invalid BBOX "+extent,t);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -0,0 +1,38 @@
|
||||||
|
package org.gcube.application.cms.sdi.engine;
|
||||||
|
|
||||||
|
import org.gcube.application.cms.sdi.faults.DataParsingException;
|
||||||
|
import org.gcube.application.geoportal.common.model.rest.DatabaseConnection;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
import java.sql.PreparedStatement;
|
||||||
|
import java.sql.ResultSet;
|
||||||
|
import java.sql.SQLException;
|
||||||
|
|
||||||
|
public interface PostgisDBManagerI {
|
||||||
|
|
||||||
|
void commit() throws SQLException;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
void create(PostgisTable toCreate) throws SQLException;
|
||||||
|
PreparedStatement prepareInsertStatement(PostgisTable target, boolean createTable, boolean geometryAsText)
|
||||||
|
throws SQLException;
|
||||||
|
void deleteTable(String tableName) throws SQLException;
|
||||||
|
void truncate(String tableName) throws SQLException;
|
||||||
|
int deleteByFieldValue(PostgisTable target, PostgisTable.Field field, Object value) throws SQLException;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
// DatabaseConnection getConnectionDescriptor();
|
||||||
|
|
||||||
|
// PostgisTable.POINT evaluateCentroid(PostgisTable table) throws SQLException, DataParsingException;
|
||||||
|
|
||||||
|
|
||||||
|
// ResultSet queryAll(PostgisTable table) throws SQLException;
|
||||||
|
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,105 @@
|
||||||
|
package org.gcube.application.cms.sdi.engine;
|
||||||
|
|
||||||
|
import it.geosolutions.geoserver.rest.encoder.GSLayerEncoder;
|
||||||
|
import it.geosolutions.geoserver.rest.encoder.feature.GSFeatureTypeEncoder;
|
||||||
|
import lombok.Data;
|
||||||
|
import lombok.NonNull;
|
||||||
|
import lombok.RequiredArgsConstructor;
|
||||||
|
import lombok.extern.slf4j.Slf4j;
|
||||||
|
import org.bson.Document;
|
||||||
|
import org.gcube.application.cms.sdi.faults.SDIInteractionException;
|
||||||
|
import org.gcube.application.geoportal.common.model.profile.Profile;
|
||||||
|
import org.gcube.application.geoportal.common.model.rest.DatabaseConnection;
|
||||||
|
import org.gcube.application.geoportal.common.utils.Files;
|
||||||
|
|
||||||
|
import java.net.MalformedURLException;
|
||||||
|
import java.sql.Connection;
|
||||||
|
import java.sql.DriverManager;
|
||||||
|
import java.sql.PreparedStatement;
|
||||||
|
import java.sql.SQLException;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
|
||||||
|
@Slf4j
|
||||||
|
public class PostgisIndexer {
|
||||||
|
|
||||||
|
|
||||||
|
public static void init() throws ClassNotFoundException {
|
||||||
|
|
||||||
|
Class.forName("org.postgresql.Driver");
|
||||||
|
Class.forName("org.postgis.DriverWrapper");
|
||||||
|
|
||||||
|
};
|
||||||
|
|
||||||
|
@NonNull
|
||||||
|
SDIManagerWrapper manager;
|
||||||
|
@NonNull
|
||||||
|
Profile profile;
|
||||||
|
|
||||||
|
@NonNull
|
||||||
|
DatabaseConnection connectionParameters;
|
||||||
|
|
||||||
|
PostgisDBManagerI dbManager=null;
|
||||||
|
|
||||||
|
public PostgisIndexer(SDIManagerWrapper manager, Profile profile,
|
||||||
|
DatabaseConnection postgisConnection) throws SQLException {
|
||||||
|
log.info("POSTGIS Index for {} Connecting to {} ",profile.getId(),postgisConnection);
|
||||||
|
this.connectionParameters=postgisConnection;
|
||||||
|
dbManager=new PostgisDBManager(DriverManager.
|
||||||
|
getConnection(connectionParameters.getUrl(),
|
||||||
|
connectionParameters.getUser(),
|
||||||
|
connectionParameters.getPwd()));
|
||||||
|
this.manager=manager;
|
||||||
|
this.profile = profile;
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
PostgisTable table = null;
|
||||||
|
|
||||||
|
public void initIndex(String indexName, List<PostgisTable.Field> fields, String workspace,String storeName) throws SQLException, SDIInteractionException {
|
||||||
|
log.info("Check/init index for {} ",profile.getId());
|
||||||
|
table = new PostgisTable(indexName,fields, PostgisTable.GeometryType.POINT);
|
||||||
|
log.trace("Index Postgis Table is {} ",table);
|
||||||
|
log.debug("Create if missing..");
|
||||||
|
// Check if table exists
|
||||||
|
dbManager.create(table);
|
||||||
|
|
||||||
|
log.debug("Checking/ registering index layer in GS ");
|
||||||
|
manager.configureCentroidLayer(indexName,workspace,storeName,table,connectionParameters);
|
||||||
|
|
||||||
|
|
||||||
|
// TODO Additional layers
|
||||||
|
// Create layer
|
||||||
|
// Publish cross related layers
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
public void insert(Document toInsertRecord)throws SDIInteractionException {
|
||||||
|
log.info("Inserting {} in index {}",toInsertRecord,table.getTablename());
|
||||||
|
try {
|
||||||
|
PreparedStatement ps = dbManager.prepareInsertStatement(table, false, true);
|
||||||
|
table.fillObjectsPreparedStatement(toInsertRecord, ps);
|
||||||
|
ps.execute();
|
||||||
|
}catch (Throwable t ){
|
||||||
|
log.error("Unable to insert {} into {} ",toInsertRecord,table.getTablename(),t);
|
||||||
|
throw new SDIInteractionException("Unable to insert record in postgis index "+table.getTablename(),t);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void deleteByStringValue(PostgisTable.Field field, String id) throws SDIInteractionException {
|
||||||
|
log.info("Deleting {}={} from index {}",field.getName(), id,table.getTablename());
|
||||||
|
try {
|
||||||
|
dbManager.deleteByFieldValue(table,field,id);
|
||||||
|
}catch (Throwable t ){
|
||||||
|
log.error("Unable to delete {}={} from index {}",field.getName(), id,table.getTablename(),t);
|
||||||
|
throw new SDIInteractionException("Unable to delete record in postgis index "+table.getTablename(),t);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,316 @@
|
||||||
|
package org.gcube.application.cms.sdi.engine;
|
||||||
|
|
||||||
|
import lombok.*;
|
||||||
|
import lombok.extern.slf4j.Slf4j;
|
||||||
|
import org.gcube.application.cms.sdi.faults.DataParsingException;
|
||||||
|
import org.gcube.application.geoportal.common.model.legacy.BBOX;
|
||||||
|
|
||||||
|
|
||||||
|
import java.sql.PreparedStatement;
|
||||||
|
import java.sql.SQLException;
|
||||||
|
import java.text.DecimalFormat;
|
||||||
|
import java.text.NumberFormat;
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Locale;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.regex.Matcher;
|
||||||
|
import java.util.regex.Pattern;
|
||||||
|
|
||||||
|
@Slf4j
|
||||||
|
@RequiredArgsConstructor
|
||||||
|
@Getter
|
||||||
|
@ToString
|
||||||
|
public class PostgisTable {
|
||||||
|
|
||||||
|
|
||||||
|
@Getter
|
||||||
|
@AllArgsConstructor
|
||||||
|
public static enum GeometryType{
|
||||||
|
MULTIPOINT("4326","geometry (MULTIPOINT,4326)","",""),
|
||||||
|
POINT("4326","geometry (POINT,4326)","",""),
|
||||||
|
LINE("4326","geometry (MULTILINESTRING,4326)","",""),
|
||||||
|
POLYGON("4326","geometry (MULTIPOLYGON,4326)","","");
|
||||||
|
private final String SRID;
|
||||||
|
private final String definition;
|
||||||
|
private final String InsertWKT;
|
||||||
|
private final String insertWKB;
|
||||||
|
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@RequiredArgsConstructor
|
||||||
|
@Getter
|
||||||
|
@Setter
|
||||||
|
@ToString
|
||||||
|
public static class Field{
|
||||||
|
@NonNull
|
||||||
|
private String name;
|
||||||
|
@NonNull
|
||||||
|
private FieldType type;
|
||||||
|
private Boolean isIndexed;
|
||||||
|
private Object constantValue;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Getter
|
||||||
|
@AllArgsConstructor
|
||||||
|
public enum FieldType{
|
||||||
|
INT("int",java.sql.Types.INTEGER),
|
||||||
|
TEXT("text",java.sql.Types.LONGVARCHAR),
|
||||||
|
FLOAT("float",java.sql.Types.FLOAT),
|
||||||
|
GEOMETRY("",0),
|
||||||
|
AUTOINCREMENT("BIGSERIAL PRIMARY KEY",java.sql.Types.BIGINT);
|
||||||
|
|
||||||
|
private String definition;
|
||||||
|
private int sqlType;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@RequiredArgsConstructor
|
||||||
|
@Getter
|
||||||
|
@ToString
|
||||||
|
public static class POINT{
|
||||||
|
|
||||||
|
private static Pattern pattern = Pattern.compile("(?!=\\d\\.\\d\\.)([\\d.]+)");
|
||||||
|
|
||||||
|
public static POINT parsePOINT(String point) throws DataParsingException {
|
||||||
|
//POINT(8.30230113965909 44.8011688237011)
|
||||||
|
// x,y
|
||||||
|
try {
|
||||||
|
log.debug("Parsing POINT "+point);
|
||||||
|
Matcher m=pattern.matcher(point);
|
||||||
|
|
||||||
|
if(!m.find()) throw new DataParsingException("Unable to get x ");
|
||||||
|
Double x=Double.parseDouble(m.group(1));
|
||||||
|
|
||||||
|
if(!m.find()) throw new DataParsingException("Unable to get y ");
|
||||||
|
Double y=Double.parseDouble(m.group(1));
|
||||||
|
|
||||||
|
return new POINT(x,y);
|
||||||
|
}catch(Throwable t) {
|
||||||
|
throw new DataParsingException("Invalid POINT "+point,t);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@NonNull
|
||||||
|
private Double x;
|
||||||
|
@NonNull
|
||||||
|
private Double y;
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
private static final NumberFormat DECIMAL_FORMAT=NumberFormat.getInstance(Locale.US);
|
||||||
|
|
||||||
|
static {
|
||||||
|
((DecimalFormat) DECIMAL_FORMAT).setGroupingUsed(false);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
public String getGeometryColumn() {
|
||||||
|
for(Field f:fields)
|
||||||
|
if(f.getType().equals(FieldType.GEOMETRY)) return f.getName();
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@NonNull
|
||||||
|
private String tablename;
|
||||||
|
|
||||||
|
@NonNull
|
||||||
|
private List<Field> fields;
|
||||||
|
|
||||||
|
|
||||||
|
@NonNull
|
||||||
|
private GeometryType geometryColumnType;
|
||||||
|
|
||||||
|
@Setter
|
||||||
|
private BBOX boundingBox=null;
|
||||||
|
|
||||||
|
@Setter
|
||||||
|
private POINT centroid=null;
|
||||||
|
|
||||||
|
public void setTablename(String tablename) {
|
||||||
|
this.tablename = sanitizeFieldName(tablename);
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getCreateStatement() {
|
||||||
|
StringBuilder stmt=new StringBuilder();
|
||||||
|
stmt.append("CREATE TABLE IF NOT EXISTS "+tablename+"( ");
|
||||||
|
for(Field field:fields){
|
||||||
|
|
||||||
|
String fieldDefinition=field.getType().getDefinition();
|
||||||
|
if(field.getType().equals(FieldType.GEOMETRY))
|
||||||
|
fieldDefinition=this.getGeometryColumnType().definition;
|
||||||
|
|
||||||
|
stmt.append(field.getName()+" "+fieldDefinition+",");
|
||||||
|
}
|
||||||
|
stmt.deleteCharAt(stmt.lastIndexOf(","));
|
||||||
|
stmt.append(")");
|
||||||
|
return stmt.toString();
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getDeleteByFieldStatement(Field field) {
|
||||||
|
return "DELETE FROM "+tablename+" WHERE "+field.getName()+" = ? ";
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
public String getInsertionStatement(boolean geometryText) {
|
||||||
|
StringBuilder fieldList=new StringBuilder();
|
||||||
|
StringBuilder fieldInsertion=new StringBuilder();
|
||||||
|
|
||||||
|
for(Field field:fields) {
|
||||||
|
switch(field.getType()) {
|
||||||
|
case AUTOINCREMENT : break;
|
||||||
|
case GEOMETRY : {
|
||||||
|
fieldList.append(field.getName()+",");
|
||||||
|
if(geometryText)
|
||||||
|
fieldInsertion.append("ST_GeomFromText(?, 4326),");
|
||||||
|
else
|
||||||
|
fieldInsertion.append("ST_GeomFromWKB(?, 4326),");
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
default : {
|
||||||
|
fieldList.append(field.getName()+",");
|
||||||
|
fieldInsertion.append("?,");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fieldList.deleteCharAt(fieldList.lastIndexOf(","));
|
||||||
|
fieldInsertion.deleteCharAt(fieldInsertion.lastIndexOf(","));
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
return "Insert into "+tablename+" ("+fieldList+") VALUES ("+fieldInsertion+")";
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
public void fillObjectsPreparedStatement(Map<String,Object> row, PreparedStatement toFill) throws SQLException {
|
||||||
|
int psFieldIndex=0;
|
||||||
|
HashMap<String,Object> rowValues=new HashMap<String,Object>();
|
||||||
|
for(Map.Entry<String,Object> entry:row.entrySet())
|
||||||
|
rowValues.put(sanitizeFieldName(entry.getKey()), entry.getValue());
|
||||||
|
|
||||||
|
|
||||||
|
for(Field field:fields) {
|
||||||
|
if(!field.getType().equals(FieldType.AUTOINCREMENT)) {
|
||||||
|
psFieldIndex++;
|
||||||
|
|
||||||
|
Object value=rowValues.get(field.getName());
|
||||||
|
setObjectInPreparedStatement(field,value,toFill,psFieldIndex);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
public void setObjectInPreparedStatement(Field field,Object value, PreparedStatement toFill, int psFieldIndex) throws SQLException {
|
||||||
|
if(value==null) {
|
||||||
|
try{
|
||||||
|
toFill.setNull(psFieldIndex, field.getType().sqlType);
|
||||||
|
}catch(SQLException e) {
|
||||||
|
log.error("Unable to set null for field "+field);
|
||||||
|
throw e;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else
|
||||||
|
switch(field.getType()) {
|
||||||
|
case FLOAT :{
|
||||||
|
toFill.setFloat(psFieldIndex, (Float)value);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
case INT : {
|
||||||
|
toFill.setInt(psFieldIndex, (Integer)value);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
case TEXT : {
|
||||||
|
toFill.setString(psFieldIndex, value.toString());
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
case GEOMETRY : {
|
||||||
|
toFill.setBytes(psFieldIndex, (byte[])value);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void fillCSVPreparedStatament(Map<String,String> row, PreparedStatement toFill,boolean explicitGeometry) throws SQLException {
|
||||||
|
int psFieldIndex=0;
|
||||||
|
|
||||||
|
HashMap<String,String> rowValues=new HashMap<String,String>();
|
||||||
|
for(Map.Entry<String,String> entry:row.entrySet())
|
||||||
|
rowValues.put(sanitizeFieldName(entry.getKey()), entry.getValue());
|
||||||
|
|
||||||
|
for(Field field:fields) {
|
||||||
|
|
||||||
|
|
||||||
|
if(!field.getType().equals(FieldType.AUTOINCREMENT)) {
|
||||||
|
psFieldIndex++;
|
||||||
|
|
||||||
|
String value=rowValues.get(field.getName());
|
||||||
|
|
||||||
|
// if(value==null||value.equalsIgnoreCase("null")) toFill.setNull(psFieldIndex, field.getType().sqlType);
|
||||||
|
// else
|
||||||
|
switch(field.getType()) {
|
||||||
|
case FLOAT :{
|
||||||
|
try{
|
||||||
|
toFill.setFloat(psFieldIndex, Float.parseFloat(value));
|
||||||
|
}catch(NumberFormatException e) {
|
||||||
|
throw new SQLException(field+" cannot be null. CSV Row is "+rowValues,e);
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
case INT : {
|
||||||
|
try{
|
||||||
|
toFill.setInt(psFieldIndex, Integer.parseInt(value));
|
||||||
|
}catch(NumberFormatException e) {
|
||||||
|
log.warn("Skipping value for "+field+" row was "+rowValues,e);
|
||||||
|
toFill.setNull(psFieldIndex, java.sql.Types.INTEGER);
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
case TEXT : {
|
||||||
|
toFill.setString(psFieldIndex, value.toString());
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
case GEOMETRY : {
|
||||||
|
if(explicitGeometry) {
|
||||||
|
toFill.setString(psFieldIndex,value);
|
||||||
|
}else {
|
||||||
|
switch(geometryColumnType){
|
||||||
|
case POINT: {
|
||||||
|
String xRepresentation=DECIMAL_FORMAT.format(Double.parseDouble(rowValues.get(DBConstants.Defaults.XCOORD_FIELD)));
|
||||||
|
String yRepresentation=DECIMAL_FORMAT.format(Double.parseDouble(rowValues.get(DBConstants.Defaults.YCOORD_FIELD)));
|
||||||
|
|
||||||
|
toFill.setString(psFieldIndex, "POINT("+xRepresentation+" "+
|
||||||
|
yRepresentation+")");
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
default :{
|
||||||
|
toFill.setString(psFieldIndex,rowValues.get("wkt"));
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public static String sanitizeFieldName(String fieldName) {
|
||||||
|
// return fieldName.toLowerCase().replaceAll(" ", "_").replaceAll("\\.", "").replaceAll("-", "_").replaceAll("////","_");
|
||||||
|
return fieldName.toLowerCase().replaceAll("[^a-z0-9_\\\\]", "_");
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,180 @@
|
||||||
|
package org.gcube.application.cms.sdi.engine;
|
||||||
|
|
||||||
|
import it.geosolutions.geoserver.rest.GeoServerRESTPublisher;
|
||||||
|
import it.geosolutions.geoserver.rest.GeoServerRESTReader;
|
||||||
|
import it.geosolutions.geoserver.rest.decoder.RESTFeatureType;
|
||||||
|
import it.geosolutions.geoserver.rest.decoder.RESTLayer;
|
||||||
|
import it.geosolutions.geoserver.rest.encoder.datastore.GSPostGISDatastoreEncoder;
|
||||||
|
import lombok.Getter;
|
||||||
|
import lombok.extern.slf4j.Slf4j;
|
||||||
|
import org.bson.Document;
|
||||||
|
import org.gcube.application.cms.sdi.faults.SDIInteractionException;
|
||||||
|
import org.gcube.application.cms.plugins.requests.BaseExecutionRequest;
|
||||||
|
import org.gcube.application.cms.serialization.Serialization;
|
||||||
|
import org.gcube.application.geoportal.common.model.document.filesets.GCubeSDILayer;
|
||||||
|
|
||||||
|
import org.gcube.application.geoportal.common.model.document.filesets.RegisteredFile;
|
||||||
|
import org.gcube.application.geoportal.common.model.document.filesets.RegisteredFileSet;
|
||||||
|
|
||||||
|
import org.gcube.application.geoportal.common.model.rest.DatabaseConnection;
|
||||||
|
import org.gcube.application.geoportal.common.utils.Files;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
import org.gcube.data.transfer.library.DataTransferClient;
|
||||||
|
import org.gcube.data.transfer.library.TransferResult;
|
||||||
|
import org.gcube.data.transfer.model.Destination;
|
||||||
|
import org.gcube.data.transfer.model.DestinationClashPolicy;
|
||||||
|
import org.gcube.spatial.data.gis.GISInterface;
|
||||||
|
import org.gcube.spatial.data.gis.is.AbstractGeoServerDescriptor;
|
||||||
|
|
||||||
|
import java.io.File;
|
||||||
|
import java.net.MalformedURLException;
|
||||||
|
import java.net.URL;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.regex.Matcher;
|
||||||
|
import java.util.regex.Pattern;
|
||||||
|
|
||||||
|
@Slf4j
|
||||||
|
public class SDIManager {
|
||||||
|
|
||||||
|
|
||||||
|
static protected final String EPSG_4326="EPSG:4326";
|
||||||
|
static final String WGS84_FULL="GEOGCS[\"WGS 84\", DATUM[\"World Geodetic System 1984\", SPHEROID[\"WGS 84\", 6378137.0, 298.257223563, AUTHORITY[\"EPSG\",\"7030\"]],"+
|
||||||
|
"AUTHORITY[\"EPSG\",\"6326\"]], PRIMEM[\"Greenwich\", 0.0, AUTHORITY[\"EPSG\",\"8901\"]], UNIT[\"degree\", 0.017453292519943295],"+
|
||||||
|
"AXIS[\"Geodetic longitude\", EAST], AXIS[\"Geodetic latitude\", NORTH], AUTHORITY[\"EPSG\",\"4326\"]]";
|
||||||
|
|
||||||
|
|
||||||
|
public static final Pattern HOSTNAME_PATTERN=Pattern.compile("(?<=\\:\\/\\/)[^\\:]*");
|
||||||
|
public static final Pattern PORT_PATTERN=Pattern.compile("(?<=\\:)[\\d]+");
|
||||||
|
public static final Pattern DB_NAME_PATTERN=Pattern.compile("(?<=\\/)[^\\/]*(?=$)");
|
||||||
|
|
||||||
|
|
||||||
|
private final GISInterface gis;
|
||||||
|
@Getter
|
||||||
|
private final DataTransferClient dtGeoServer;
|
||||||
|
@Getter
|
||||||
|
private final String geoserverHostName;
|
||||||
|
@Getter
|
||||||
|
private final AbstractGeoServerDescriptor currentGeoserver;
|
||||||
|
|
||||||
|
|
||||||
|
public SDIManager() throws SDIInteractionException {
|
||||||
|
try{
|
||||||
|
log.debug("Initializing GIS Interface..");
|
||||||
|
gis=GISInterface.get();
|
||||||
|
currentGeoserver=gis.getCurrentGeoServer();
|
||||||
|
if(currentGeoserver==null)
|
||||||
|
throw new Exception("Unable to contact data transfer for geoserver ");
|
||||||
|
|
||||||
|
log.debug("Found geoserver descriptor "+currentGeoserver);
|
||||||
|
geoserverHostName=new URL(currentGeoserver.getUrl()).getHost();
|
||||||
|
|
||||||
|
log.debug("Contacting Data Transfer from geoserver {} ",geoserverHostName);
|
||||||
|
dtGeoServer=DataTransferClient.getInstanceByEndpoint("https://"+geoserverHostName);
|
||||||
|
if(!currentGeoserver.getReader().existGeoserver())
|
||||||
|
throw new Exception("Geoserver not reachable");
|
||||||
|
}catch(Exception e) {
|
||||||
|
throw new SDIInteractionException("Unable to initialize SDI Manager",e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
public String createWorkspace(String toCreate) throws SDIInteractionException {
|
||||||
|
try {
|
||||||
|
if(!currentGeoserver.getReader().getWorkspaceNames().contains(toCreate)) {
|
||||||
|
log.debug("Creating workspace : "+toCreate);
|
||||||
|
if(!currentGeoserver.getPublisher().createWorkspace(toCreate))
|
||||||
|
throw new SDIInteractionException("Unable to create workspace "+toCreate);
|
||||||
|
}else log.debug("Workspace "+toCreate+" exists.");
|
||||||
|
return toCreate;
|
||||||
|
} catch (IllegalArgumentException | MalformedURLException e) {
|
||||||
|
throw new SDIInteractionException("Unable to create workspace "+toCreate,e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
protected static final String getToUseBaseLayerName(RegisteredFileSet fileset){
|
||||||
|
// ******** IDENTIFY LAYER NAME correct layer name
|
||||||
|
// Must be unique under same WS
|
||||||
|
// equal to shp base name
|
||||||
|
|
||||||
|
String baseName= "";
|
||||||
|
|
||||||
|
// Chose layer name first identifying filename
|
||||||
|
for(Object p:fileset.getPayloads()){
|
||||||
|
RegisteredFile f = Serialization.convert(p,RegisteredFile.class);
|
||||||
|
String name=f.getName();
|
||||||
|
if(name.endsWith(".shp")) {
|
||||||
|
log.debug("SHP is {}",name);
|
||||||
|
baseName= Files.fixFilename(name.substring(0,name.lastIndexOf('.')));
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return baseName;
|
||||||
|
}
|
||||||
|
|
||||||
|
protected String createStore(GSPostGISDatastoreEncoder encoder, String workspace) throws SDIInteractionException {
|
||||||
|
String storeName=encoder.getName();
|
||||||
|
try {
|
||||||
|
log.debug("Looking for datastore "+storeName+" under "+workspace);
|
||||||
|
|
||||||
|
if(currentGeoserver.getReader().getDatastore(workspace,storeName)==null)
|
||||||
|
|
||||||
|
if(!currentGeoserver.getDataStoreManager().create(workspace, encoder))
|
||||||
|
throw new SDIInteractionException("Unable to create store "+storeName+" in "+workspace);
|
||||||
|
log.debug("Store "+storeName+" exists under "+workspace);
|
||||||
|
return storeName;
|
||||||
|
} catch (IllegalArgumentException | MalformedURLException e) {
|
||||||
|
throw new SDIInteractionException("Unable to create store "+storeName,e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
protected String createStoreFromPostgisDB(String workspace, String storeName, DatabaseConnection connection) throws SDIInteractionException{
|
||||||
|
String connectionUrl=connection.getUrl();
|
||||||
|
|
||||||
|
Matcher hostname=HOSTNAME_PATTERN.matcher(connectionUrl);
|
||||||
|
if (!hostname.find()) throw new SDIInteractionException("Unable to get Hostname from "+connection);
|
||||||
|
|
||||||
|
Matcher port = PORT_PATTERN.matcher(connectionUrl);
|
||||||
|
if (!port.find()) throw new SDIInteractionException("Unable to get PORT from "+connection);
|
||||||
|
|
||||||
|
Matcher db = DB_NAME_PATTERN.matcher(connectionUrl);
|
||||||
|
if (!db.find()) throw new SDIInteractionException("Unable to get DB from "+connection);
|
||||||
|
|
||||||
|
|
||||||
|
GSPostGISDatastoreEncoder encoder=new GSPostGISDatastoreEncoder(storeName);
|
||||||
|
encoder.setHost(hostname.group());
|
||||||
|
encoder.setPort(Integer.parseInt(port.group()));
|
||||||
|
encoder.setDatabase(db.group());
|
||||||
|
encoder.setSchema("public");
|
||||||
|
encoder.setUser(connection.getUser());
|
||||||
|
encoder.setPassword(connection.getPwd());
|
||||||
|
encoder.setLooseBBox(true);
|
||||||
|
encoder.setDatabaseType("postgis");
|
||||||
|
encoder.setEnabled(true);
|
||||||
|
encoder.setFetchSize(1000);
|
||||||
|
encoder.setValidateConnections(true);
|
||||||
|
|
||||||
|
return createStore(encoder,workspace);
|
||||||
|
}
|
||||||
|
|
||||||
|
protected String publishStyle(File sldFile, String name) throws SDIInteractionException {
|
||||||
|
try {
|
||||||
|
if(!currentGeoserver.getReader().existsStyle(name)) {
|
||||||
|
log.debug("Registering style "+name);
|
||||||
|
if(!currentGeoserver.getPublisher().publishStyle(sldFile, name))
|
||||||
|
throw new SDIInteractionException("Unable to register style "+name);
|
||||||
|
}else log.debug("Style "+name+" already existing");
|
||||||
|
return name;
|
||||||
|
} catch (IllegalArgumentException | MalformedURLException e) {
|
||||||
|
throw new SDIInteractionException("Unable to create style "+name,e);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,257 @@
|
||||||
|
package org.gcube.application.cms.sdi.engine;
|
||||||
|
|
||||||
|
import it.geosolutions.geoserver.rest.GeoServerRESTPublisher;
|
||||||
|
import it.geosolutions.geoserver.rest.GeoServerRESTReader;
|
||||||
|
import it.geosolutions.geoserver.rest.decoder.RESTFeatureType;
|
||||||
|
import it.geosolutions.geoserver.rest.decoder.RESTLayer;
|
||||||
|
import it.geosolutions.geoserver.rest.encoder.GSLayerEncoder;
|
||||||
|
import it.geosolutions.geoserver.rest.encoder.feature.GSFeatureTypeEncoder;
|
||||||
|
import lombok.extern.slf4j.Slf4j;
|
||||||
|
import org.bson.Document;
|
||||||
|
import org.gcube.application.cms.caches.AbstractScopedMap;
|
||||||
|
import org.gcube.application.cms.plugins.requests.BaseExecutionRequest;
|
||||||
|
import org.gcube.application.cms.sdi.faults.SDIInteractionException;
|
||||||
|
import org.gcube.application.cms.serialization.Serialization;
|
||||||
|
import org.gcube.application.geoportal.common.model.document.filesets.GCubeSDILayer;
|
||||||
|
import org.gcube.application.geoportal.common.model.document.filesets.RegisteredFile;
|
||||||
|
import org.gcube.application.geoportal.common.model.document.filesets.RegisteredFileSet;
|
||||||
|
import org.gcube.application.geoportal.common.model.rest.ConfigurationException;
|
||||||
|
import org.gcube.application.geoportal.common.model.rest.DatabaseConnection;
|
||||||
|
import org.gcube.application.geoportal.common.utils.Files;
|
||||||
|
import org.gcube.data.transfer.library.TransferResult;
|
||||||
|
import org.gcube.data.transfer.model.Destination;
|
||||||
|
import org.gcube.data.transfer.model.DestinationClashPolicy;
|
||||||
|
import sun.reflect.generics.scope.AbstractScope;
|
||||||
|
|
||||||
|
import java.net.MalformedURLException;
|
||||||
|
import java.net.URL;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
@Slf4j
|
||||||
|
public class SDIManagerWrapper extends SDIManager{
|
||||||
|
|
||||||
|
public SDIManagerWrapper() throws SDIInteractionException {
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Expected paramters :
|
||||||
|
* - "workspace"
|
||||||
|
* - "layerTitle"
|
||||||
|
* - "documentID"
|
||||||
|
* - "basePersistencePath" (profile specific, e.g. "GNA")
|
||||||
|
*
|
||||||
|
* @param fileSet
|
||||||
|
* @param params
|
||||||
|
* @return
|
||||||
|
* @throws SDIInteractionException
|
||||||
|
*/
|
||||||
|
public RegisteredFileSet materializeLayer(RegisteredFileSet fileSet, Document params) throws SDIInteractionException{
|
||||||
|
try {
|
||||||
|
log.debug("Materializing FS {} on {} ", fileSet, getGeoserverHostName());
|
||||||
|
|
||||||
|
// validate parameters
|
||||||
|
String workspace = BaseExecutionRequest.getMandatory("workspace", params);
|
||||||
|
String documentID = BaseExecutionRequest.getMandatory("documentID", params);
|
||||||
|
String basePersistencePAth = BaseExecutionRequest.getMandatory("basePersistencePath", params);
|
||||||
|
|
||||||
|
|
||||||
|
// check if empty
|
||||||
|
if (fileSet.getPayloads().isEmpty()) throw new SDIInteractionException("No payload to materialize");
|
||||||
|
|
||||||
|
|
||||||
|
Document geoserverInfo = new Document();
|
||||||
|
geoserverInfo.put("_type", "Geoserver");
|
||||||
|
geoserverInfo.put("workspace", workspace);
|
||||||
|
|
||||||
|
|
||||||
|
// Evaluate Layer Name
|
||||||
|
String baseName = getToUseBaseLayerName(fileSet);
|
||||||
|
log.debug("Base layer name is {}, checking conflicts.. ");
|
||||||
|
String toSetLayerName = baseName;
|
||||||
|
//Check if layer already exists
|
||||||
|
int count = 0;
|
||||||
|
GeoServerRESTReader gsReader = getCurrentGeoserver().getReader();
|
||||||
|
while (gsReader.getLayer(workspace, toSetLayerName) != null) {
|
||||||
|
count++;
|
||||||
|
toSetLayerName = baseName + "_" + count;
|
||||||
|
log.debug("layer for " + baseName + " already existing, trying " + toSetLayerName);
|
||||||
|
}
|
||||||
|
geoserverInfo.put("layerName", toSetLayerName);
|
||||||
|
log.debug("Layer name will be {}", toSetLayerName);
|
||||||
|
|
||||||
|
|
||||||
|
String folderRelativePath = basePersistencePAth + "/" + documentID + "/" + fileSet.getUUID() + "/" + toSetLayerName;
|
||||||
|
log.debug("GS Relative destination path is {}", folderRelativePath);
|
||||||
|
geoserverInfo.put("persistencePath", folderRelativePath);
|
||||||
|
|
||||||
|
List<String> filenames = new ArrayList<>();
|
||||||
|
|
||||||
|
String absolutePath = null;
|
||||||
|
|
||||||
|
for (Object o : fileSet.getPayloads()) {
|
||||||
|
RegisteredFile file = Serialization.convert(o, RegisteredFile.class);
|
||||||
|
log.info("Sending {} to GS {} at {} ", file, getGeoserverHostName(), folderRelativePath);
|
||||||
|
String completeFilename = Files.fixFilename(file.getName());
|
||||||
|
completeFilename = completeFilename.replaceAll(baseName, toSetLayerName);
|
||||||
|
|
||||||
|
|
||||||
|
Destination destination = new Destination(completeFilename);
|
||||||
|
destination.setCreateSubfolders(true);
|
||||||
|
destination.setOnExistingFileName(DestinationClashPolicy.REWRITE);
|
||||||
|
destination.setOnExistingSubFolder(DestinationClashPolicy.APPEND);
|
||||||
|
|
||||||
|
destination.setPersistenceId("geoserver");
|
||||||
|
destination.setSubFolder(folderRelativePath);
|
||||||
|
|
||||||
|
log.debug("Sending {} to {}", file, destination);
|
||||||
|
TransferResult result = getDtGeoServer().httpSource(new URL(file.getLink()), destination);
|
||||||
|
log.debug("Transferred " + result);
|
||||||
|
|
||||||
|
|
||||||
|
filenames.add(completeFilename);
|
||||||
|
// NB Clash con subfolder is APPEND, thus FOLDER is expected to be the one specified by caller
|
||||||
|
//geoserverInfo.put(""result.getRemotePath().substring(0, result.getRemotePath().lastIndexOf("/")));
|
||||||
|
absolutePath = result.getRemotePath().substring(0, result.getRemotePath().lastIndexOf("/"));
|
||||||
|
}
|
||||||
|
geoserverInfo.put("files", filenames);
|
||||||
|
|
||||||
|
// Publishing layer in GS
|
||||||
|
String storeName = toSetLayerName + "_store";
|
||||||
|
geoserverInfo.put("storeName", storeName);
|
||||||
|
|
||||||
|
GeoServerRESTPublisher publisher = getCurrentGeoserver().getPublisher();
|
||||||
|
log.debug("Trying to create remote workspace : " + workspace);
|
||||||
|
createWorkspace(workspace);
|
||||||
|
|
||||||
|
|
||||||
|
log.debug("Publishing remote folder " + absolutePath);
|
||||||
|
|
||||||
|
URL directoryPath = new URL("file:" + absolutePath + "/" + toSetLayerName + ".shp");
|
||||||
|
|
||||||
|
//TODO Evaluate SRS
|
||||||
|
|
||||||
|
boolean published = publisher.publishShp(
|
||||||
|
workspace,
|
||||||
|
storeName,
|
||||||
|
null,
|
||||||
|
toSetLayerName,
|
||||||
|
// UploadMethod.FILE, // neeeds zip
|
||||||
|
GeoServerRESTPublisher.UploadMethod.EXTERNAL, // needs shp
|
||||||
|
directoryPath.toURI(),
|
||||||
|
EPSG_4326, //SRS
|
||||||
|
""); // default style
|
||||||
|
|
||||||
|
if (!published) {
|
||||||
|
throw new SDIInteractionException("Unable to publish layer " + toSetLayerName + " under " + workspace + ". Unknown Geoserver fault.");
|
||||||
|
}
|
||||||
|
|
||||||
|
RESTLayer l = gsReader.getLayer(workspace, toSetLayerName);
|
||||||
|
RESTFeatureType f = gsReader.getFeatureType(l);
|
||||||
|
|
||||||
|
|
||||||
|
List<Document> ogcLinks = new ArrayList<>();
|
||||||
|
|
||||||
|
Document wmsLink = new Document();
|
||||||
|
wmsLink.put("wms", String.format("https://%1$s/geoserver/%2$s/wms?"
|
||||||
|
+ "service=WMS&version=1.1.0&request=GetMap&layers=%2$s:%3$s&"
|
||||||
|
+ "styles=&bbox=%4$f,%5$f,%6$f,%7$f&srs=%8$s&format=application/openlayers&width=%9$d&height=%10$d",
|
||||||
|
getGeoserverHostName(),
|
||||||
|
workspace,
|
||||||
|
toSetLayerName,
|
||||||
|
f.getMinX(),
|
||||||
|
f.getMinY(),
|
||||||
|
f.getMaxX(),
|
||||||
|
f.getMaxY(),
|
||||||
|
EPSG_4326,
|
||||||
|
400,
|
||||||
|
400));
|
||||||
|
ogcLinks.add(wmsLink);
|
||||||
|
|
||||||
|
List<Document> platformInfo = new ArrayList<>();
|
||||||
|
platformInfo.add(geoserverInfo);
|
||||||
|
// TODO Metadata
|
||||||
|
|
||||||
|
|
||||||
|
// Materialization object
|
||||||
|
GCubeSDILayer materialization = new GCubeSDILayer();
|
||||||
|
materialization.put(GCubeSDILayer.OGC_LINKS, ogcLinks);
|
||||||
|
materialization.put(GCubeSDILayer.B_BOX, new GCubeSDILayer.BBOX(f.getMaxX(), f.getMaxY(), f.getMinX(), f.getMinY()));
|
||||||
|
materialization.put(GCubeSDILayer.PLATFORM_INFO, platformInfo);
|
||||||
|
|
||||||
|
log.info("Generated Materialization {}", materialization);
|
||||||
|
|
||||||
|
//Add Materialization to registered file set
|
||||||
|
List materializations = fileSet.getMaterializations();
|
||||||
|
if (materializations == null) materializations = new ArrayList();
|
||||||
|
materializations.add(materialization);
|
||||||
|
fileSet.put(RegisteredFileSet.MATERIALIZATIONS, materializations);
|
||||||
|
|
||||||
|
return fileSet;
|
||||||
|
}catch(SDIInteractionException e){
|
||||||
|
throw e;
|
||||||
|
}catch (Throwable t){
|
||||||
|
throw new SDIInteractionException("Unexpected exception while trying to materialize File Set "+t.getMessage(),t);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
public String configureCentroidLayer(String name, String workspace, String storeName, PostgisTable table, DatabaseConnection connection) throws SDIInteractionException {
|
||||||
|
|
||||||
|
GSFeatureTypeEncoder fte=new GSFeatureTypeEncoder();
|
||||||
|
fte.setAbstract("Centroid layer for "+name);
|
||||||
|
fte.setEnabled(true);
|
||||||
|
fte.setNativeCRS(WGS84_FULL);
|
||||||
|
fte.setTitle(name);
|
||||||
|
fte.setName(name);
|
||||||
|
|
||||||
|
|
||||||
|
// GeoServer loads all fields
|
||||||
|
// fte.setAttribute(attrs);
|
||||||
|
|
||||||
|
|
||||||
|
fte.setLatLonBoundingBox(-180.0, -90.0, 180.0, 90.0, WGS84_FULL);
|
||||||
|
|
||||||
|
String style="clustered_centroids";
|
||||||
|
|
||||||
|
GSLayerEncoder layerEncoder=new GSLayerEncoder();
|
||||||
|
layerEncoder.setDefaultStyle(style);
|
||||||
|
layerEncoder.setEnabled(true);
|
||||||
|
layerEncoder.setQueryable(true);
|
||||||
|
try {
|
||||||
|
//Checking workspace
|
||||||
|
createWorkspace(workspace);
|
||||||
|
//Checking store
|
||||||
|
createStoreFromPostgisDB(workspace, storeName,connection);
|
||||||
|
//Checking layer
|
||||||
|
publishStyle(Files.getFileFromResources("styles/clustered_points.sld"),style);
|
||||||
|
|
||||||
|
log.info("Creating layer in {} : {} with FTE {} , LE {}",workspace,storeName,fte,layerEncoder);
|
||||||
|
if(getCurrentGeoserver().getReader().getLayer(workspace, name)==null)
|
||||||
|
if(!getCurrentGeoserver().getPublisher().publishDBLayer(workspace, storeName, fte, layerEncoder))
|
||||||
|
throw new SDIInteractionException("Unable to create layer "+name);
|
||||||
|
log.debug("layer "+name+" already exists");
|
||||||
|
|
||||||
|
|
||||||
|
String link=String.format("https://%1$s/geoserver/%2$s/wms?"
|
||||||
|
+"service=WMS&version=1.1.0&request=GetMap&layers=%2$s:%3$s&"
|
||||||
|
+ "styles=&bbox=%4$s,%5$s,%6$s,%7$s&srs=%8$s&format=application/openlayers&width=%9$d&height=%10$d",
|
||||||
|
getGeoserverHostName(),
|
||||||
|
workspace,
|
||||||
|
name,
|
||||||
|
"-1563071.166172796",
|
||||||
|
"4789738.204048398",
|
||||||
|
"4334926.486925308",
|
||||||
|
"5828118.072551585",
|
||||||
|
EPSG_4326,
|
||||||
|
400,
|
||||||
|
400);
|
||||||
|
|
||||||
|
return name;
|
||||||
|
} catch (IllegalArgumentException | MalformedURLException e) {
|
||||||
|
throw new SDIInteractionException("Unable to create layer "+name,e);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,26 @@
|
||||||
|
package org.gcube.application.cms.sdi.faults;
|
||||||
|
|
||||||
|
public class DataParsingException extends Exception {
|
||||||
|
|
||||||
|
public DataParsingException() {
|
||||||
|
}
|
||||||
|
|
||||||
|
public DataParsingException(String message) {
|
||||||
|
super(message);
|
||||||
|
}
|
||||||
|
|
||||||
|
public DataParsingException(Throwable cause) {
|
||||||
|
super(cause);
|
||||||
|
}
|
||||||
|
|
||||||
|
public DataParsingException(String message, Throwable cause) {
|
||||||
|
super(message, cause);
|
||||||
|
}
|
||||||
|
|
||||||
|
public DataParsingException(String message, Throwable cause, boolean enableSuppression,
|
||||||
|
boolean writableStackTrace) {
|
||||||
|
super(message, cause, enableSuppression, writableStackTrace);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -1,4 +1,4 @@
|
||||||
package org.gcube.application.cms.concessioni.sdi.faults;
|
package org.gcube.application.cms.sdi.faults;
|
||||||
|
|
||||||
public class SDIInteractionException extends Exception {
|
public class SDIInteractionException extends Exception {
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
package org.gcube.application.cms.concessioni.sdi.model;
|
package org.gcube.application.cms.sdi.model;
|
||||||
|
|
||||||
import org.bson.Document;
|
import org.bson.Document;
|
||||||
|
|
|
@ -0,0 +1,82 @@
|
||||||
|
package org.gcube.application.cms.sdi.plugins;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.databind.cfg.ContextAttributes;
|
||||||
|
import lombok.Synchronized;
|
||||||
|
import lombok.extern.slf4j.Slf4j;
|
||||||
|
import org.gcube.application.cms.caches.AbstractScopedMap;
|
||||||
|
import org.gcube.application.cms.implementations.ISInterface;
|
||||||
|
import org.gcube.application.cms.implementations.ImplementationProvider;
|
||||||
|
import org.gcube.application.cms.plugins.AbstractPlugin;
|
||||||
|
import org.gcube.application.cms.plugins.InitializablePlugin;
|
||||||
|
import org.gcube.application.cms.plugins.faults.InitializationException;
|
||||||
|
import org.gcube.application.cms.plugins.faults.ShutDownException;
|
||||||
|
import org.gcube.application.cms.plugins.model.PluginDescriptor;
|
||||||
|
import org.gcube.application.cms.plugins.reports.InitializationReport;
|
||||||
|
import org.gcube.application.cms.plugins.reports.Report;
|
||||||
|
import org.gcube.application.cms.sdi.engine.SDIManager;
|
||||||
|
import org.gcube.application.cms.sdi.engine.SDIManagerWrapper;
|
||||||
|
import org.gcube.application.geoportal.common.model.rest.ConfigurationException;
|
||||||
|
import org.gcube.application.geoportal.common.model.rest.DatabaseConnection;
|
||||||
|
|
||||||
|
import java.time.Duration;
|
||||||
|
import java.time.temporal.ChronoUnit;
|
||||||
|
import java.time.temporal.TemporalAmount;
|
||||||
|
import java.time.temporal.TemporalUnit;
|
||||||
|
|
||||||
|
@Slf4j
|
||||||
|
public abstract class SDIAbstractPlugin extends AbstractPlugin implements InitializablePlugin {
|
||||||
|
|
||||||
|
protected static AbstractScopedMap<SDIManagerWrapper> sdiCache;
|
||||||
|
|
||||||
|
protected static AbstractScopedMap<DatabaseConnection> postgisCache;
|
||||||
|
|
||||||
|
@Synchronized
|
||||||
|
private static void initCache(){
|
||||||
|
if(sdiCache==null) {
|
||||||
|
log.info("Creating internal caches.. ");
|
||||||
|
sdiCache = new AbstractScopedMap<SDIManagerWrapper>("SDI-CACHE") {
|
||||||
|
@Override
|
||||||
|
protected SDIManagerWrapper retrieveObject() throws ConfigurationException {
|
||||||
|
try {
|
||||||
|
return new SDIManagerWrapper();
|
||||||
|
} catch (Exception e) {
|
||||||
|
throw new ConfigurationException(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
sdiCache.setTTL(Duration.of(10, ChronoUnit.MINUTES));
|
||||||
|
}
|
||||||
|
if(postgisCache==null) {
|
||||||
|
postgisCache = new AbstractScopedMap<DatabaseConnection>("POSTGIS-CREDENTIALS") {
|
||||||
|
@Override
|
||||||
|
protected DatabaseConnection retrieveObject() throws ConfigurationException {
|
||||||
|
try {
|
||||||
|
return ImplementationProvider.get().getEngineByClass(ISInterface.class).
|
||||||
|
queryForDatabase("postgis", "Concessioni");
|
||||||
|
} catch (Exception e) {
|
||||||
|
throw new ConfigurationException(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
postgisCache.setTTL(Duration.of(10, ChronoUnit.MINUTES));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public InitializationReport init() throws InitializationException {
|
||||||
|
InitializationReport report = new InitializationReport();
|
||||||
|
try{
|
||||||
|
initCache();
|
||||||
|
report.setStatus(Report.Status.OK);
|
||||||
|
} catch (Throwable e) {
|
||||||
|
throw new InitializationException("SDI Plugins : Unable to initialize Internal Caches ",e);
|
||||||
|
}
|
||||||
|
return report;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void shutdown() throws ShutDownException {
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,214 @@
|
||||||
|
package org.gcube.application.cms.sdi.plugins;
|
||||||
|
|
||||||
|
import lombok.Data;
|
||||||
|
import lombok.extern.slf4j.Slf4j;
|
||||||
|
import org.bson.Document;
|
||||||
|
import org.gcube.application.cms.caches.AbstractScopedMap;
|
||||||
|
import org.gcube.application.cms.implementations.ISInterface;
|
||||||
|
import org.gcube.application.cms.implementations.ImplementationProvider;
|
||||||
|
import org.gcube.application.cms.plugins.AbstractPlugin;
|
||||||
|
import org.gcube.application.cms.plugins.IndexerPluginInterface;
|
||||||
|
import org.gcube.application.cms.plugins.InitializablePlugin;
|
||||||
|
import org.gcube.application.cms.plugins.faults.InitializationException;
|
||||||
|
import org.gcube.application.cms.plugins.faults.ShutDownException;
|
||||||
|
import org.gcube.application.cms.plugins.model.PluginDescriptor;
|
||||||
|
import org.gcube.application.cms.plugins.reports.IndexDocumentReport;
|
||||||
|
import org.gcube.application.cms.plugins.reports.InitializationReport;
|
||||||
|
import org.gcube.application.cms.plugins.reports.MaterializationReport;
|
||||||
|
import org.gcube.application.cms.plugins.reports.Report;
|
||||||
|
import org.gcube.application.cms.plugins.requests.BaseExecutionRequest;
|
||||||
|
import org.gcube.application.cms.plugins.requests.IndexDocumentRequest;
|
||||||
|
import org.gcube.application.cms.sdi.engine.PostgisIndexer;
|
||||||
|
import org.gcube.application.cms.sdi.engine.PostgisTable;
|
||||||
|
import org.gcube.application.cms.sdi.engine.SDIManager;
|
||||||
|
import org.gcube.application.cms.sdi.engine.SDIManagerWrapper;
|
||||||
|
import org.gcube.application.cms.sdi.faults.SDIInteractionException;
|
||||||
|
import org.gcube.application.cms.serialization.Serialization;
|
||||||
|
import org.gcube.application.geoportal.common.model.JSONPathWrapper;
|
||||||
|
import org.gcube.application.geoportal.common.model.document.ComparableVersion;
|
||||||
|
import org.gcube.application.geoportal.common.model.document.ProfiledDocument;
|
||||||
|
import org.gcube.application.geoportal.common.model.document.filesets.GCubeSDILayer;
|
||||||
|
import org.gcube.application.geoportal.common.model.profile.HandlerDeclaration;
|
||||||
|
import org.gcube.application.geoportal.common.model.profile.Profile;
|
||||||
|
import org.gcube.application.geoportal.common.model.rest.ConfigurationException;
|
||||||
|
import org.gcube.application.geoportal.common.model.rest.DatabaseConnection;
|
||||||
|
import org.gcube.application.geoportal.common.utils.ContextUtils;
|
||||||
|
|
||||||
|
import javax.swing.border.AbstractBorder;
|
||||||
|
import java.time.Duration;
|
||||||
|
import java.time.temporal.ChronoUnit;
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
@Slf4j
|
||||||
|
public class SDIIndexerPlugin extends SDIAbstractPlugin implements IndexerPluginInterface {
|
||||||
|
|
||||||
|
@Data
|
||||||
|
private class MappingObject{
|
||||||
|
private String name;
|
||||||
|
private String type;
|
||||||
|
private String path;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
private static final PluginDescriptor DESCRIPTOR=new PluginDescriptor("SDI-Indexer-Plugin",
|
||||||
|
PluginDescriptor.BaseTypes.INDEXER);
|
||||||
|
|
||||||
|
static {
|
||||||
|
DESCRIPTOR.setDescription("SDI Indexer. " +
|
||||||
|
"Manage Centroids layers.");
|
||||||
|
DESCRIPTOR.setVersion(new ComparableVersion("1.0.0"));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public PluginDescriptor getDescriptor() {
|
||||||
|
return DESCRIPTOR;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public InitializationReport initInContext() throws InitializationException {
|
||||||
|
InitializationReport report = new InitializationReport();
|
||||||
|
report.setStatus(Report.Status.OK);
|
||||||
|
return report;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Expected parameters :
|
||||||
|
* - indexName (unique)
|
||||||
|
* - workspace
|
||||||
|
*
|
||||||
|
* @param request
|
||||||
|
* @return
|
||||||
|
*/
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public IndexDocumentReport index(IndexDocumentRequest request) {
|
||||||
|
|
||||||
|
log.info("Indxer {} : Performing {} ",this.getDescriptor().getId(),request);
|
||||||
|
|
||||||
|
ProfiledDocument profiledDocument=request.getDocument();
|
||||||
|
Profile profile = request.getProfile();
|
||||||
|
Document requestArguments=request.getCallParameters();
|
||||||
|
Document profileConfiguration =getConfigurationFromProfile(profile).getConfiguration();
|
||||||
|
|
||||||
|
IndexDocumentReport report= new IndexDocumentReport();
|
||||||
|
|
||||||
|
log.debug("Profile Configuration is {} ",profileConfiguration);
|
||||||
|
|
||||||
|
try{
|
||||||
|
// ********* INIT INDEX
|
||||||
|
// TODO CACHE
|
||||||
|
PostgisIndexer indexer = new PostgisIndexer(sdiCache.getObject(),profile,postgisCache.getObject());
|
||||||
|
|
||||||
|
|
||||||
|
// SCHEMA
|
||||||
|
log.debug("Profile {} : Evaluating Index schema.. ",profile.getId());
|
||||||
|
List<PostgisTable.Field> fields = null; // TODO From Profile
|
||||||
|
fields.add(new PostgisTable.Field("geom", PostgisTable.FieldType.GEOMETRY));
|
||||||
|
fields.add(new PostgisTable.Field("projectid", PostgisTable.FieldType.TEXT));
|
||||||
|
List mappingObjs= profileConfiguration.get("explicitFieldMapping",List.class);
|
||||||
|
if(mappingObjs!=null){
|
||||||
|
mappingObjs.forEach(o -> {
|
||||||
|
log.trace("Mapping is {} ",o);
|
||||||
|
MappingObject m = Serialization.convert(o,MappingObject.class);
|
||||||
|
fields.add(new PostgisTable.Field(m.getName(), PostgisTable.FieldType.valueOf(m.getType())));
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
indexer.initIndex(requestArguments.getString("indexName"),
|
||||||
|
fields,
|
||||||
|
requestArguments.getString("workspace"),
|
||||||
|
requestArguments.getString("indexName"));
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
// ************* PREPARE RECORD
|
||||||
|
|
||||||
|
|
||||||
|
JSONPathWrapper documentNavigator=new JSONPathWrapper(Serialization.write(profiledDocument));
|
||||||
|
|
||||||
|
Document doc = requestArguments;
|
||||||
|
// DEFAULT VALUES
|
||||||
|
doc.put("projectid",profiledDocument.get_id());
|
||||||
|
|
||||||
|
// ********************** EVALAUTE POSITION
|
||||||
|
log.debug("indexing Profile {} : Evaluating Centroid... ",profile.getId());
|
||||||
|
if(profiledDocument.getSpatialReference()!=null){
|
||||||
|
log.debug("Using user defined spatial reference "+profiledDocument.getSpatialReference());
|
||||||
|
//TODO USE GEOJSON Position
|
||||||
|
throw new Exception("Not yet implemented");
|
||||||
|
}else {
|
||||||
|
log.debug("Profile {} : Getting evaluation paths from profile.. ",profile.getId());
|
||||||
|
List bboxEvaluationPaths = profileConfiguration.get("bboxEvaluation",List.class);
|
||||||
|
if(bboxEvaluationPaths==null || bboxEvaluationPaths.isEmpty())
|
||||||
|
throw new Exception("Missing configuration bboxEvaluation");
|
||||||
|
|
||||||
|
GCubeSDILayer.BBOX toSet = null;
|
||||||
|
for(Object pathObj : bboxEvaluationPaths){
|
||||||
|
log.debug("Profile {} : Evaluating path {} ",profile.getId(),pathObj);
|
||||||
|
for(String path : documentNavigator.getMatchingPaths(pathObj.toString())) {
|
||||||
|
Object bboxObject = documentNavigator.getByPath(path);
|
||||||
|
log.info("Matched path {}, value is {} ",path,bboxObject);
|
||||||
|
GCubeSDILayer.BBOX box = Serialization.convert(bboxObject, GCubeSDILayer.BBOX.class);
|
||||||
|
|
||||||
|
if(toSet == null) toSet = box;
|
||||||
|
if(box.getMaxX()>toSet.getMaxX()) toSet.setMaxX(box.getMaxX());
|
||||||
|
if(box.getMaxY()>toSet.getMaxY()) toSet.setMaxY(box.getMaxY());
|
||||||
|
|
||||||
|
if(box.getMinX()<toSet.getMinX()) toSet.setMinX(box.getMinX());
|
||||||
|
if(box.getMinY()<toSet.getMinY()) toSet.setMinY(box.getMinY());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
log.info("Evaluated BBOX {} ",toSet);
|
||||||
|
String wkt = String .format("POINT (%1$d %2$d) ",
|
||||||
|
toSet.getMaxX()-toSet.getMinX(),
|
||||||
|
toSet.getMaxY()-toSet.getMinY());
|
||||||
|
|
||||||
|
// TODO SET Spatial reference
|
||||||
|
|
||||||
|
doc.put("geom",wkt);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
//*********** Additional Values from profile
|
||||||
|
|
||||||
|
log.info("Setting additional values");
|
||||||
|
if(mappingObjs!=null){
|
||||||
|
mappingObjs.forEach(o -> {
|
||||||
|
log.trace("Mapping is {} ",o);
|
||||||
|
MappingObject m = Serialization.convert(o,MappingObject.class);
|
||||||
|
doc.put(m.getName(),documentNavigator.getByPath(m.getPath()).get(0));
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
indexer.insert(doc);
|
||||||
|
|
||||||
|
String finalDocument = documentNavigator.getValueCTX().jsonString();
|
||||||
|
log.debug("Final document after indexing is {} ",finalDocument);
|
||||||
|
report.setResultingDocument(Document.parse(finalDocument));
|
||||||
|
report.setStatus(Report.Status.OK);
|
||||||
|
}catch (SDIInteractionException e){
|
||||||
|
log.error("Unable to index "+request,e);
|
||||||
|
report.setStatus(Report.Status.ERROR);
|
||||||
|
report.putMessage(e.getMessage());
|
||||||
|
}catch (Throwable t){
|
||||||
|
log.error("Unable to index "+request,t);
|
||||||
|
report.setStatus(Report.Status.ERROR);
|
||||||
|
report.putMessage(t.getMessage());
|
||||||
|
}finally{
|
||||||
|
return report;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -1,12 +1,13 @@
|
||||||
package org.gcube.application.cms.concessioni.plugins;
|
package org.gcube.application.cms.sdi.plugins;
|
||||||
|
|
||||||
import lombok.Data;
|
import lombok.Data;
|
||||||
import lombok.Getter;
|
|
||||||
import lombok.Synchronized;
|
import lombok.Synchronized;
|
||||||
import lombok.extern.slf4j.Slf4j;
|
import lombok.extern.slf4j.Slf4j;
|
||||||
import org.bson.Document;
|
import org.bson.Document;
|
||||||
import org.gcube.application.cms.concessioni.sdi.SDIManager;
|
import org.gcube.application.cms.plugins.AbstractPlugin;
|
||||||
import org.gcube.application.cms.concessioni.sdi.faults.SDIInteractionException;
|
import org.gcube.application.cms.sdi.engine.SDIManager;
|
||||||
|
import org.gcube.application.cms.sdi.engine.SDIManagerWrapper;
|
||||||
|
import org.gcube.application.cms.sdi.faults.SDIInteractionException;
|
||||||
import org.gcube.application.cms.plugins.MaterializationPlugin;
|
import org.gcube.application.cms.plugins.MaterializationPlugin;
|
||||||
import org.gcube.application.cms.plugins.faults.InitializationException;
|
import org.gcube.application.cms.plugins.faults.InitializationException;
|
||||||
import org.gcube.application.cms.plugins.faults.MaterializationException;
|
import org.gcube.application.cms.plugins.faults.MaterializationException;
|
||||||
|
@ -31,7 +32,7 @@ import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
|
||||||
@Slf4j
|
@Slf4j
|
||||||
public class SDIMaterializerPlugin implements MaterializationPlugin {
|
public class SDIMaterializerPlugin extends AbstractPlugin implements MaterializationPlugin {
|
||||||
|
|
||||||
@Data
|
@Data
|
||||||
public static class MaterializationConfig{
|
public static class MaterializationConfig{
|
||||||
|
@ -39,10 +40,10 @@ public class SDIMaterializerPlugin implements MaterializationPlugin {
|
||||||
private String documentPath;
|
private String documentPath;
|
||||||
}
|
}
|
||||||
|
|
||||||
Map<String,SDIManager> sdiManagerMap=null;
|
Map<String, SDIManagerWrapper> sdiManagerMap=null;
|
||||||
|
|
||||||
|
|
||||||
private SDIManager getSDIManager(){
|
private SDIManagerWrapper getSDIManager(){
|
||||||
return sdiManagerMap.get(ContextUtils.getCurrentScope());
|
return sdiManagerMap.get(ContextUtils.getCurrentScope());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -54,7 +55,7 @@ public class SDIMaterializerPlugin implements MaterializationPlugin {
|
||||||
String context = ContextUtils.getCurrentScope();
|
String context = ContextUtils.getCurrentScope();
|
||||||
if(getSDIManager()==null) {
|
if(getSDIManager()==null) {
|
||||||
log.info("Initializing in " + context);
|
log.info("Initializing in " + context);
|
||||||
sdiManagerMap.put(context,new SDIManager());
|
sdiManagerMap.put(context,new SDIManagerWrapper());
|
||||||
}
|
}
|
||||||
report.setStatus(Report.Status.OK);
|
report.setStatus(Report.Status.OK);
|
||||||
report.putMessage("Initialized SDI Manager in "+context+" pointing to "+getSDIManager().getGeoserverHostName());
|
report.putMessage("Initialized SDI Manager in "+context+" pointing to "+getSDIManager().getGeoserverHostName());
|
||||||
|
@ -93,12 +94,7 @@ public class SDIMaterializerPlugin implements MaterializationPlugin {
|
||||||
ProfiledDocument profiledDocument=request.getDocument();
|
ProfiledDocument profiledDocument=request.getDocument();
|
||||||
Profile profile = request.getProfile();
|
Profile profile = request.getProfile();
|
||||||
Document requestArguments=request.getCallParameters();
|
Document requestArguments=request.getCallParameters();
|
||||||
Document profileConfiguration =null;
|
Document profileConfiguration =getConfigurationFromProfile(profile).getConfiguration();
|
||||||
for (HandlerDeclaration h : profile.getHandlers()) {
|
|
||||||
if (h.getId().equals("SDI-Default-Materializer")) {
|
|
||||||
profileConfiguration = h.getConfiguration();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
MaterializationReport report= new MaterializationReport();
|
MaterializationReport report= new MaterializationReport();
|
||||||
|
|
||||||
|
@ -141,8 +137,9 @@ public class SDIMaterializerPlugin implements MaterializationPlugin {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
String finalDocument = documentNavigator.getValueCTX().jsonString();
|
||||||
report.setResultingDocument(Document.parse(documentNavigator.getValueCTX().jsonString()));
|
log.debug("Final document after materialization is {} ",finalDocument);
|
||||||
|
report.setResultingDocument(Document.parse(finalDocument));
|
||||||
report.setStatus(Report.Status.OK);
|
report.setStatus(Report.Status.OK);
|
||||||
}catch (SDIInteractionException e){
|
}catch (SDIInteractionException e){
|
||||||
log.error("Unable to materialize "+request,e);
|
log.error("Unable to materialize "+request,e);
|
Loading…
Reference in New Issue