package org.gcube.application.cms.sdi.engine; import it.geosolutions.geoserver.rest.GeoServerRESTPublisher; import it.geosolutions.geoserver.rest.GeoServerRESTReader; import it.geosolutions.geoserver.rest.decoder.RESTFeatureType; import it.geosolutions.geoserver.rest.decoder.RESTLayer; import it.geosolutions.geoserver.rest.encoder.GSLayerEncoder; import it.geosolutions.geoserver.rest.encoder.feature.GSFeatureTypeEncoder; import lombok.extern.slf4j.Slf4j; import org.bson.Document; import org.gcube.application.cms.plugins.requests.BaseExecutionRequest; import org.gcube.application.cms.sdi.faults.SDIInteractionException; import org.gcube.application.cms.sdi.model.GCubeSDILayerBuilder; import org.gcube.application.cms.serialization.Serialization; import org.gcube.application.geoportal.common.model.document.filesets.sdi.GCubeSDILayer; import org.gcube.application.geoportal.common.model.document.filesets.RegisteredFile; import org.gcube.application.geoportal.common.model.document.filesets.RegisteredFileSet; import org.gcube.application.geoportal.common.model.document.filesets.sdi.GeoServerPlatform; import org.gcube.application.geoportal.common.model.document.filesets.sdi.PlatformInfo; import org.gcube.application.geoportal.common.model.rest.DatabaseConnection; import org.gcube.application.geoportal.common.utils.Files; import org.gcube.data.transfer.library.TransferResult; import org.gcube.data.transfer.model.Destination; import org.gcube.data.transfer.model.DestinationClashPolicy; import org.gcube.spatial.data.gis.is.AbstractGeoServerDescriptor; import java.net.MalformedURLException; import java.net.URL; import java.util.ArrayList; import java.util.List; import static org.gcube.application.cms.sdi.model.GCubeSDILayerBuilder.EPSG_4326; import static org.gcube.application.cms.sdi.model.GCubeSDILayerBuilder.WGS84_FULL; @Slf4j public class SDIManagerWrapper extends SDIManager{ public SDIManagerWrapper() throws SDIInteractionException { } /** * Expected paramters : * - "workspace" * - "layerTitle" * - "documentID" * - "basePersistencePath" (useCaseDescriptor specific, e.g. "GNA") * * @param fileSet * @param params * @return * @throws SDIInteractionException */ public RegisteredFileSet materializeLayer(RegisteredFileSet fileSet, Document params) throws SDIInteractionException{ try { log.debug("Materializing FS {} on {} ", fileSet, getGeoserverHostName()); // validate parameters String workspace = BaseExecutionRequest.getMandatory("workspace", params); String documentID = BaseExecutionRequest.getMandatory("documentID", params); String basePersistencePAth = BaseExecutionRequest.getMandatory("basePersistencePath", params); // check if empty if (fileSet.getPayloads().isEmpty()) throw new SDIInteractionException("No payload to materialize"); // Document geoserverInfo = new Document(); GCubeSDILayerBuilder layerBuilder=new GCubeSDILayerBuilder(); layerBuilder.setWorkspace(workspace); layerBuilder.setHost(getGeoserverHostName()); // Evaluate Layer Name String baseName = getToUseBaseLayerName(fileSet); log.debug("Base layer name is {}, checking conflicts.. ",baseName); String toSetLayerName = baseName; //Check if layer already exists int count = 0; GeoServerRESTReader gsReader = getCurrentGeoserver().getReader(); while (gsReader.getLayer(workspace, toSetLayerName) != null) { count++; toSetLayerName = baseName + "_" + count; log.debug("layer for " + baseName + " already existing, trying " + toSetLayerName); } log.debug("Layer name will be {}", toSetLayerName); layerBuilder.setLayerName(toSetLayerName); String folderRelativePath = basePersistencePAth + "/" + documentID + "/" + fileSet.getUUID() + "/" + toSetLayerName; log.debug("GS Relative destination path is {}", folderRelativePath); layerBuilder.setPersistencePath(folderRelativePath); List filenames = new ArrayList<>(); String absolutePath = null; for (Object o : fileSet.getPayloads()) { RegisteredFile file = Serialization.convert(o, RegisteredFile.class); log.info("Sending {} to GS {} at {} ", file, getGeoserverHostName(), folderRelativePath); String completeFilename = Files.fixFilename(file.getName()); completeFilename = completeFilename.replaceAll(baseName, toSetLayerName); Destination destination = new Destination(completeFilename); destination.setCreateSubfolders(true); destination.setOnExistingFileName(DestinationClashPolicy.REWRITE); destination.setOnExistingSubFolder(DestinationClashPolicy.APPEND); destination.setPersistenceId("geoserver"); destination.setSubFolder(folderRelativePath); log.debug("Sending {} to {}", file, destination); TransferResult result = getDtGeoServer().httpSource(new URL(file.getLink()), destination); log.debug("Transferred " + result); filenames.add(completeFilename); // NB Clash con subfolder is APPEND, thus FOLDER is expected to be the one specified by caller //geoserverInfo.put(""result.getRemotePath().substring(0, result.getRemotePath().lastIndexOf("/"))); absolutePath = result.getRemotePath().substring(0, result.getRemotePath().lastIndexOf("/")); } layerBuilder.setFiles(filenames); // Publishing layer in GS String storeName = toSetLayerName + "_store"; layerBuilder.setStoreName(storeName); GeoServerRESTPublisher publisher = getCurrentGeoserver().getPublisher(); log.debug("Trying to create remote workspace : " + workspace); createWorkspace(workspace); log.debug("Publishing remote folder " + absolutePath); URL directoryPath = new URL("file:" + absolutePath + "/" + toSetLayerName + ".shp"); //TODO Evaluate SRS boolean published = publisher.publishShp( workspace, storeName, null, toSetLayerName, // UploadMethod.FILE, // neeeds zip GeoServerRESTPublisher.UploadMethod.EXTERNAL, // needs shp directoryPath.toURI(), EPSG_4326, //SRS ""); // default style if (!published) { throw new SDIInteractionException("Unable to publish layer " + toSetLayerName + " under " + workspace + ". Unknown Geoserver fault."); } RESTLayer l = gsReader.getLayer(workspace, toSetLayerName); RESTFeatureType f = gsReader.getFeatureType(l); GCubeSDILayer materialization = layerBuilder.getLayer(); log.info("Generated Materialization {}", materialization); //Add Materialization to registered file set List materializations = fileSet.getMaterializations(); if (materializations == null) materializations = new ArrayList(); materializations.add(materialization); fileSet.put(RegisteredFileSet.MATERIALIZATIONS, materializations); return fileSet; }catch(SDIInteractionException e){ throw e; }catch (Throwable t){ throw new SDIInteractionException("Unexpected exception while trying to materialize File Set "+t.getMessage(),t); } } public void deleteLayer(GCubeSDILayer toDelete) throws SDIInteractionException { log.trace("Deleting {}",toDelete); try{ AbstractGeoServerDescriptor gs=getCurrentGeoserver(); GeoServerRESTPublisher publisher = gs.getPublisher(); for(Object platformObj : toDelete.getPlatformInfo()){ PlatformInfo info =Serialization.convert(platformObj, PlatformInfo.class); switch(info.getType()){ case GeoServerPlatform.GS_PLATFORM:{ GeoServerPlatform gsInfo = Serialization.convert(info,GeoServerPlatform.class); log.trace("Deleting {} ",gsInfo); // remove store (recursion deletes related layers) log.trace("Removing datastore {}:{}",gsInfo.getWorkspace(),gsInfo.getStoreName()); if(!publisher.removeDatastore(gsInfo.getWorkspace(),gsInfo.getStoreName(),true)) throw new SDIInteractionException("Unable to remove store "+gsInfo.getWorkspace()+":"+gsInfo.getStoreName()); // remove ws if empty log.trace("Checking if empty WS {}",gsInfo.getWorkspace()); if(gs.getReader().getDatastores(gsInfo.getWorkspace()).isEmpty()) if(!publisher.removeWorkspace(gsInfo.getWorkspace(),true)) throw new SDIInteractionException("Unable to remove WS "+gsInfo.getWorkspace()); // remove actual files data // TODO REMOVE HARDCODED PATCH String path=gsInfo.getPersistencePath().replace("/srv/geoserver_data","geoserver"); log.info("Deleting files at {} [{}]",path,gsInfo.getPersistencePath()); getDtGeoServer().getWebClient().delete(path); break; } default : { throw new SDIInteractionException("Unable to manage platform "+info); } } } }catch(SDIInteractionException e){ throw e; }catch (Throwable t){ throw new SDIInteractionException("Unexpected exception while trying to materialize File Set "+t.getMessage(),t); } } public GCubeSDILayer configureCentroidLayer(String name, String workspace, String storeName, PostgisTable table, DatabaseConnection connection) throws SDIInteractionException { GCubeSDILayerBuilder builder = new GCubeSDILayerBuilder() .setWorkspace(workspace) .setStoreName(storeName) .setHost(getGeoserverHostName()); GSFeatureTypeEncoder fte=new GSFeatureTypeEncoder(); fte.setAbstract("Centroid layer for "+name); fte.setEnabled(true); fte.setNativeCRS(WGS84_FULL); fte.setTitle(name); fte.setName(name); fte.setLatLonBoundingBox(-180.0, -90.0, 180.0, 90.0, WGS84_FULL); String style="clustered_centroids"; GSLayerEncoder layerEncoder=new GSLayerEncoder(); layerEncoder.setDefaultStyle(style); layerEncoder.setEnabled(true); layerEncoder.setQueryable(true); try { //Checking workspace createWorkspace(workspace); //Checking store createStoreFromPostgisDB(workspace, storeName,connection); //Checking layer publishStyle(Files.getFileFromResources("styles/clustered_points.sld"),style); log.info("Creating layer in {} : {} with FTE {} , LE {}",workspace,storeName,fte,layerEncoder); if(getCurrentGeoserver().getReader().getLayer(workspace, name)==null) if(!getCurrentGeoserver().getPublisher().publishDBLayer(workspace, storeName, fte, layerEncoder)) throw new SDIInteractionException("Unable to create layer "+name); log.debug("layer "+name+" already exists"); return builder.getLayer(); } catch (IllegalArgumentException | MalformedURLException e) { throw new SDIInteractionException("Unable to create layer "+name,e); } } }