2022-02-18 18:11:12 +01:00
|
|
|
package org.gcube.application.cms.sdi.engine;
|
|
|
|
|
|
|
|
import it.geosolutions.geoserver.rest.GeoServerRESTPublisher;
|
|
|
|
import it.geosolutions.geoserver.rest.GeoServerRESTReader;
|
2022-10-18 18:21:39 +02:00
|
|
|
import it.geosolutions.geoserver.rest.decoder.RESTFeatureType;
|
|
|
|
import it.geosolutions.geoserver.rest.decoder.RESTLayer;
|
2022-02-18 18:11:12 +01:00
|
|
|
import it.geosolutions.geoserver.rest.encoder.GSLayerEncoder;
|
|
|
|
import it.geosolutions.geoserver.rest.encoder.feature.GSFeatureTypeEncoder;
|
|
|
|
import lombok.extern.slf4j.Slf4j;
|
|
|
|
import org.bson.Document;
|
|
|
|
import org.gcube.application.cms.plugins.requests.BaseExecutionRequest;
|
|
|
|
import org.gcube.application.cms.sdi.faults.SDIInteractionException;
|
2022-03-17 17:54:00 +01:00
|
|
|
import org.gcube.application.cms.sdi.model.GCubeSDILayerBuilder;
|
2022-08-03 18:15:10 +02:00
|
|
|
import org.gcube.application.cms.sdi.model.SupportedFormat;
|
2022-02-18 18:11:12 +01:00
|
|
|
import org.gcube.application.cms.serialization.Serialization;
|
|
|
|
import org.gcube.application.geoportal.common.model.document.filesets.RegisteredFile;
|
|
|
|
import org.gcube.application.geoportal.common.model.document.filesets.RegisteredFileSet;
|
2022-09-26 16:11:20 +02:00
|
|
|
import org.gcube.application.geoportal.common.model.document.filesets.sdi.GCubeSDILayer;
|
2022-03-30 12:57:14 +02:00
|
|
|
import org.gcube.application.geoportal.common.model.document.filesets.sdi.GeoServerPlatform;
|
|
|
|
import org.gcube.application.geoportal.common.model.document.filesets.sdi.PlatformInfo;
|
2022-02-18 18:11:12 +01:00
|
|
|
import org.gcube.application.geoportal.common.model.rest.DatabaseConnection;
|
|
|
|
import org.gcube.application.geoportal.common.utils.Files;
|
|
|
|
import org.gcube.data.transfer.library.TransferResult;
|
2022-08-03 18:15:10 +02:00
|
|
|
import org.gcube.data.transfer.library.faults.*;
|
2022-02-18 18:11:12 +01:00
|
|
|
import org.gcube.data.transfer.model.Destination;
|
|
|
|
import org.gcube.data.transfer.model.DestinationClashPolicy;
|
2022-03-30 12:57:14 +02:00
|
|
|
import org.gcube.spatial.data.gis.is.AbstractGeoServerDescriptor;
|
2022-02-18 18:11:12 +01:00
|
|
|
|
2022-08-03 18:15:10 +02:00
|
|
|
import java.io.File;
|
2022-02-18 18:11:12 +01:00
|
|
|
import java.net.MalformedURLException;
|
|
|
|
import java.net.URL;
|
|
|
|
import java.util.ArrayList;
|
|
|
|
import java.util.List;
|
|
|
|
|
2022-08-03 18:15:10 +02:00
|
|
|
import static it.geosolutions.geoserver.rest.encoder.GSResourceEncoder.ProjectionPolicy.REPROJECT_TO_DECLARED;
|
2022-03-17 17:54:00 +01:00
|
|
|
import static org.gcube.application.cms.sdi.model.GCubeSDILayerBuilder.EPSG_4326;
|
|
|
|
import static org.gcube.application.cms.sdi.model.GCubeSDILayerBuilder.WGS84_FULL;
|
|
|
|
|
2022-02-18 18:11:12 +01:00
|
|
|
@Slf4j
|
|
|
|
public class SDIManagerWrapper extends SDIManager{
|
|
|
|
|
|
|
|
public SDIManagerWrapper() throws SDIInteractionException {
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Expected paramters :
|
|
|
|
* - "workspace"
|
|
|
|
* - "layerTitle"
|
|
|
|
* - "documentID"
|
2022-03-04 14:23:20 +01:00
|
|
|
* - "basePersistencePath" (useCaseDescriptor specific, e.g. "GNA")
|
2022-02-18 18:11:12 +01:00
|
|
|
*
|
|
|
|
* @param fileSet
|
|
|
|
* @param params
|
|
|
|
* @return
|
|
|
|
* @throws SDIInteractionException
|
|
|
|
*/
|
|
|
|
public RegisteredFileSet materializeLayer(RegisteredFileSet fileSet, Document params) throws SDIInteractionException{
|
|
|
|
try {
|
|
|
|
log.debug("Materializing FS {} on {} ", fileSet, getGeoserverHostName());
|
|
|
|
|
|
|
|
// validate parameters
|
|
|
|
String workspace = BaseExecutionRequest.getMandatory("workspace", params);
|
|
|
|
String documentID = BaseExecutionRequest.getMandatory("documentID", params);
|
|
|
|
String basePersistencePAth = BaseExecutionRequest.getMandatory("basePersistencePath", params);
|
|
|
|
|
|
|
|
|
|
|
|
// check if empty
|
|
|
|
if (fileSet.getPayloads().isEmpty()) throw new SDIInteractionException("No payload to materialize");
|
|
|
|
|
|
|
|
|
2022-03-17 17:54:00 +01:00
|
|
|
// Document geoserverInfo = new Document();
|
|
|
|
GCubeSDILayerBuilder layerBuilder=new GCubeSDILayerBuilder();
|
|
|
|
layerBuilder.setWorkspace(workspace);
|
|
|
|
layerBuilder.setHost(getGeoserverHostName());
|
2022-02-18 18:11:12 +01:00
|
|
|
|
|
|
|
|
2022-08-03 18:15:10 +02:00
|
|
|
|
|
|
|
// Evaluate layer data in filesets
|
|
|
|
// TODO optimize cicles
|
|
|
|
List<SupportedFormat> toCheckFormats=SupportedFormat.getByExtension(".tif",".shp");
|
|
|
|
SupportedFormat selectedFormat = null;
|
|
|
|
String baseName=null;
|
|
|
|
for(SupportedFormat format: toCheckFormats){
|
|
|
|
log.debug("Checking for {}",format);
|
|
|
|
for (Object o : fileSet.getPayloads()) {
|
|
|
|
RegisteredFile file = Serialization.convert(o, RegisteredFile.class);
|
|
|
|
format.consider(file);
|
|
|
|
}
|
|
|
|
if(format.getIsProposedFilesetValid()){
|
|
|
|
log.debug("Selected format is {}. Fileset is {}",format,format.getToUseFileSet());
|
|
|
|
selectedFormat = format;
|
|
|
|
// get basename for layer
|
|
|
|
baseName = format.getToUseFileSet().get(0).getName();
|
|
|
|
baseName= Files.fixFilename(baseName.substring(0,baseName.lastIndexOf('.')));
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if(selectedFormat==null)
|
|
|
|
throw new SDIInteractionException("Unable to identify layer format. Configured formats are "+toCheckFormats);
|
|
|
|
|
|
|
|
|
2022-02-18 18:11:12 +01:00
|
|
|
// Evaluate Layer Name
|
2022-02-24 18:09:30 +01:00
|
|
|
log.debug("Base layer name is {}, checking conflicts.. ",baseName);
|
2022-02-18 18:11:12 +01:00
|
|
|
String toSetLayerName = baseName;
|
2022-08-03 18:15:10 +02:00
|
|
|
|
|
|
|
//Checking if layer already exists
|
2022-02-18 18:11:12 +01:00
|
|
|
int count = 0;
|
|
|
|
GeoServerRESTReader gsReader = getCurrentGeoserver().getReader();
|
|
|
|
while (gsReader.getLayer(workspace, toSetLayerName) != null) {
|
|
|
|
count++;
|
|
|
|
toSetLayerName = baseName + "_" + count;
|
|
|
|
log.debug("layer for " + baseName + " already existing, trying " + toSetLayerName);
|
|
|
|
}
|
|
|
|
log.debug("Layer name will be {}", toSetLayerName);
|
2022-03-17 17:54:00 +01:00
|
|
|
layerBuilder.setLayerName(toSetLayerName);
|
2022-02-18 18:11:12 +01:00
|
|
|
|
|
|
|
|
2022-03-17 17:54:00 +01:00
|
|
|
|
2022-02-18 18:11:12 +01:00
|
|
|
|
|
|
|
// Publishing layer in GS
|
|
|
|
String storeName = toSetLayerName + "_store";
|
2022-03-17 17:54:00 +01:00
|
|
|
layerBuilder.setStoreName(storeName);
|
2022-02-18 18:11:12 +01:00
|
|
|
|
|
|
|
GeoServerRESTPublisher publisher = getCurrentGeoserver().getPublisher();
|
|
|
|
log.debug("Trying to create remote workspace : " + workspace);
|
|
|
|
createWorkspace(workspace);
|
|
|
|
|
|
|
|
|
2022-08-03 11:42:02 +02:00
|
|
|
|
2022-02-18 18:11:12 +01:00
|
|
|
|
2022-08-03 18:15:10 +02:00
|
|
|
// Actually publishing files into GS
|
|
|
|
|
|
|
|
Boolean published = null;
|
|
|
|
|
|
|
|
switch(selectedFormat.getFileExtension()){
|
|
|
|
case ".tif" : {
|
|
|
|
RegisteredFile f = selectedFormat.getToUseFileSet().get(0);
|
|
|
|
File temp= null;
|
|
|
|
try {
|
|
|
|
temp = Files.downloadFromUrl(f.getName(),f.getLink());
|
|
|
|
published = publisher.publishGeoTIFF(workspace, storeName, toSetLayerName,temp,EPSG_4326,REPROJECT_TO_DECLARED,"raster");
|
|
|
|
}finally{ if (temp!=null) java.nio.file.Files.deleteIfExists(temp.toPath());}
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
case ".shp" :{
|
|
|
|
//TODO Evaluate SRS
|
|
|
|
String absolutePath = transferFilesetToGS(basePersistencePAth,documentID,selectedFormat.getToUseFileSet(),
|
|
|
|
fileSet.getUUID(),toSetLayerName,baseName,layerBuilder);
|
|
|
|
URL directoryPath = new URL("file:" + absolutePath + "/" + toSetLayerName + ".shp");
|
|
|
|
published = publisher.publishShp(
|
|
|
|
workspace,
|
|
|
|
storeName,
|
|
|
|
null,
|
|
|
|
toSetLayerName,
|
|
|
|
// UploadMethod.FILE, // neeeds zip
|
|
|
|
GeoServerRESTPublisher.UploadMethod.EXTERNAL, // needs shp
|
|
|
|
directoryPath.toURI(),
|
|
|
|
EPSG_4326, //SRS
|
|
|
|
"");
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
default: {
|
|
|
|
throw new SDIInteractionException("Unsupported data format");
|
|
|
|
}
|
|
|
|
}
|
2022-02-18 18:11:12 +01:00
|
|
|
|
2022-08-03 18:15:10 +02:00
|
|
|
// check if success
|
2022-02-18 18:11:12 +01:00
|
|
|
if (!published) {
|
|
|
|
throw new SDIInteractionException("Unable to publish layer " + toSetLayerName + " under " + workspace + ". Unknown Geoserver fault.");
|
|
|
|
}
|
|
|
|
|
2022-10-18 18:21:39 +02:00
|
|
|
RESTLayer l = gsReader.getLayer(workspace, toSetLayerName);
|
|
|
|
RESTFeatureType f = gsReader.getFeatureType(l);
|
|
|
|
|
|
|
|
|
|
|
|
layerBuilder.setBBOX(f.getMaxX(),f.getMinX(),f.getMaxY(),f.getMinY(),0d,0d);
|
2022-02-18 18:11:12 +01:00
|
|
|
|
|
|
|
|
2022-03-17 17:54:00 +01:00
|
|
|
GCubeSDILayer materialization = layerBuilder.getLayer();
|
2022-02-18 18:11:12 +01:00
|
|
|
log.info("Generated Materialization {}", materialization);
|
|
|
|
|
|
|
|
//Add Materialization to registered file set
|
|
|
|
List materializations = fileSet.getMaterializations();
|
|
|
|
if (materializations == null) materializations = new ArrayList();
|
|
|
|
materializations.add(materialization);
|
|
|
|
fileSet.put(RegisteredFileSet.MATERIALIZATIONS, materializations);
|
|
|
|
|
|
|
|
return fileSet;
|
|
|
|
}catch(SDIInteractionException e){
|
|
|
|
throw e;
|
|
|
|
}catch (Throwable t){
|
|
|
|
throw new SDIInteractionException("Unexpected exception while trying to materialize File Set "+t.getMessage(),t);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-08-03 18:15:10 +02:00
|
|
|
private String transferFilesetToGS(String basePersistencePAth,String documentID,List<RegisteredFile> fileSet,
|
|
|
|
String fileSetUUID,
|
|
|
|
String toSetLayerName, String baseName, GCubeSDILayerBuilder layerBuilder)
|
|
|
|
throws MalformedURLException, InvalidSourceException, SourceNotSetException, InvalidDestinationException,
|
|
|
|
DestinationNotSetException, InitializationException, FailedTransferException {
|
|
|
|
String folderRelativePath = basePersistencePAth + "/" + documentID + "/" + fileSetUUID + "/" + toSetLayerName;
|
|
|
|
log.debug("GS Relative destination path is {}", folderRelativePath);
|
|
|
|
layerBuilder.setPersistencePath(folderRelativePath);
|
|
|
|
|
|
|
|
List<String> filenames = new ArrayList<>();
|
|
|
|
|
|
|
|
String absolutePath = null;
|
|
|
|
|
|
|
|
for (RegisteredFile file : fileSet) {
|
|
|
|
log.info("Sending {} to GS {} at {} ", file, getGeoserverHostName(), folderRelativePath);
|
|
|
|
String completeFilename = Files.fixFilename(file.getName());
|
|
|
|
completeFilename = completeFilename.replaceAll(baseName, toSetLayerName);
|
|
|
|
|
|
|
|
|
|
|
|
Destination destination = new Destination(completeFilename);
|
|
|
|
destination.setCreateSubfolders(true);
|
|
|
|
destination.setOnExistingFileName(DestinationClashPolicy.REWRITE);
|
|
|
|
destination.setOnExistingSubFolder(DestinationClashPolicy.APPEND);
|
|
|
|
|
|
|
|
destination.setPersistenceId("geoserver");
|
|
|
|
destination.setSubFolder(folderRelativePath);
|
|
|
|
|
|
|
|
log.debug("Sending {} to {}", file, destination);
|
|
|
|
TransferResult result = getDtGeoServer().httpSource(new URL(file.getLink()), destination);
|
|
|
|
log.debug("Transferred " + result);
|
|
|
|
|
|
|
|
|
|
|
|
filenames.add(completeFilename);
|
|
|
|
// NB Clash con subfolder is APPEND, thus FOLDER is expected to be the one specified by caller
|
|
|
|
//geoserverInfo.put(""result.getRemotePath().substring(0, result.getRemotePath().lastIndexOf("/")));
|
|
|
|
absolutePath = result.getRemotePath().substring(0, result.getRemotePath().lastIndexOf("/"));
|
|
|
|
}
|
|
|
|
layerBuilder.setFiles(filenames);
|
|
|
|
return absolutePath;
|
|
|
|
}
|
2022-02-18 18:11:12 +01:00
|
|
|
|
2022-03-30 12:57:14 +02:00
|
|
|
public void deleteLayer(GCubeSDILayer toDelete) throws SDIInteractionException {
|
|
|
|
log.trace("Deleting {}",toDelete);
|
|
|
|
try{
|
|
|
|
AbstractGeoServerDescriptor gs=getCurrentGeoserver();
|
|
|
|
GeoServerRESTPublisher publisher = gs.getPublisher();
|
|
|
|
for(Object platformObj : toDelete.getPlatformInfo()){
|
|
|
|
PlatformInfo info =Serialization.convert(platformObj, PlatformInfo.class);
|
|
|
|
switch(info.getType()){
|
|
|
|
case GeoServerPlatform.GS_PLATFORM:{
|
|
|
|
GeoServerPlatform gsInfo = Serialization.convert(info,GeoServerPlatform.class);
|
|
|
|
log.trace("Deleting {} ",gsInfo);
|
|
|
|
// remove store (recursion deletes related layers)
|
|
|
|
log.trace("Removing datastore {}:{}",gsInfo.getWorkspace(),gsInfo.getStoreName());
|
|
|
|
if(!publisher.removeDatastore(gsInfo.getWorkspace(),gsInfo.getStoreName(),true))
|
|
|
|
throw new SDIInteractionException("Unable to remove store "+gsInfo.getWorkspace()+":"+gsInfo.getStoreName());
|
|
|
|
// remove ws if empty
|
|
|
|
log.trace("Checking if empty WS {}",gsInfo.getWorkspace());
|
|
|
|
if(gs.getReader().getDatastores(gsInfo.getWorkspace()).isEmpty())
|
|
|
|
if(!publisher.removeWorkspace(gsInfo.getWorkspace(),true))
|
|
|
|
throw new SDIInteractionException("Unable to remove WS "+gsInfo.getWorkspace());
|
|
|
|
|
|
|
|
// remove actual files data
|
|
|
|
// TODO REMOVE HARDCODED PATCH
|
|
|
|
String path=gsInfo.getPersistencePath().replace("/srv/geoserver_data","geoserver");
|
|
|
|
log.info("Deleting files at {} [{}]",path,gsInfo.getPersistencePath());
|
|
|
|
getDtGeoServer().getWebClient().delete(path);
|
|
|
|
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
default : {
|
|
|
|
throw new SDIInteractionException("Unable to manage platform "+info);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}catch(SDIInteractionException e){
|
|
|
|
throw e;
|
|
|
|
}catch (Throwable t){
|
|
|
|
throw new SDIInteractionException("Unexpected exception while trying to materialize File Set "+t.getMessage(),t);
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
2022-03-17 17:54:00 +01:00
|
|
|
public GCubeSDILayer configureCentroidLayer(String name, String workspace, String storeName, PostgisTable table, DatabaseConnection connection) throws SDIInteractionException {
|
|
|
|
|
|
|
|
GCubeSDILayerBuilder builder = new GCubeSDILayerBuilder()
|
|
|
|
.setWorkspace(workspace)
|
|
|
|
.setStoreName(storeName)
|
2022-10-03 15:32:59 +02:00
|
|
|
.setHost(getGeoserverHostName())
|
|
|
|
.setLayerName(name);
|
2022-02-18 18:11:12 +01:00
|
|
|
|
|
|
|
GSFeatureTypeEncoder fte=new GSFeatureTypeEncoder();
|
|
|
|
fte.setAbstract("Centroid layer for "+name);
|
|
|
|
fte.setEnabled(true);
|
|
|
|
fte.setNativeCRS(WGS84_FULL);
|
|
|
|
fte.setTitle(name);
|
|
|
|
fte.setName(name);
|
|
|
|
|
|
|
|
fte.setLatLonBoundingBox(-180.0, -90.0, 180.0, 90.0, WGS84_FULL);
|
|
|
|
|
|
|
|
String style="clustered_centroids";
|
|
|
|
|
|
|
|
GSLayerEncoder layerEncoder=new GSLayerEncoder();
|
|
|
|
layerEncoder.setDefaultStyle(style);
|
|
|
|
layerEncoder.setEnabled(true);
|
|
|
|
layerEncoder.setQueryable(true);
|
|
|
|
try {
|
|
|
|
//Checking workspace
|
|
|
|
createWorkspace(workspace);
|
|
|
|
//Checking store
|
|
|
|
createStoreFromPostgisDB(workspace, storeName,connection);
|
|
|
|
//Checking layer
|
|
|
|
publishStyle(Files.getFileFromResources("styles/clustered_points.sld"),style);
|
|
|
|
|
|
|
|
log.info("Creating layer in {} : {} with FTE {} , LE {}",workspace,storeName,fte,layerEncoder);
|
|
|
|
if(getCurrentGeoserver().getReader().getLayer(workspace, name)==null)
|
|
|
|
if(!getCurrentGeoserver().getPublisher().publishDBLayer(workspace, storeName, fte, layerEncoder))
|
|
|
|
throw new SDIInteractionException("Unable to create layer "+name);
|
|
|
|
log.debug("layer "+name+" already exists");
|
|
|
|
|
|
|
|
|
2022-03-17 17:54:00 +01:00
|
|
|
return builder.getLayer();
|
2022-02-18 18:11:12 +01:00
|
|
|
} catch (IllegalArgumentException | MalformedURLException e) {
|
|
|
|
throw new SDIInteractionException("Unable to create layer "+name,e);
|
|
|
|
}
|
2022-03-17 17:54:00 +01:00
|
|
|
}
|
2022-02-18 18:11:12 +01:00
|
|
|
}
|