258 lines
11 KiB
Java
258 lines
11 KiB
Java
package org.gcube.application.cms.sdi.engine;
|
|
|
|
import it.geosolutions.geoserver.rest.GeoServerRESTPublisher;
|
|
import it.geosolutions.geoserver.rest.GeoServerRESTReader;
|
|
import it.geosolutions.geoserver.rest.decoder.RESTFeatureType;
|
|
import it.geosolutions.geoserver.rest.decoder.RESTLayer;
|
|
import it.geosolutions.geoserver.rest.encoder.GSLayerEncoder;
|
|
import it.geosolutions.geoserver.rest.encoder.feature.GSFeatureTypeEncoder;
|
|
import lombok.extern.slf4j.Slf4j;
|
|
import org.bson.Document;
|
|
import org.gcube.application.cms.caches.AbstractScopedMap;
|
|
import org.gcube.application.cms.plugins.requests.BaseExecutionRequest;
|
|
import org.gcube.application.cms.sdi.faults.SDIInteractionException;
|
|
import org.gcube.application.cms.serialization.Serialization;
|
|
import org.gcube.application.geoportal.common.model.document.filesets.GCubeSDILayer;
|
|
import org.gcube.application.geoportal.common.model.document.filesets.RegisteredFile;
|
|
import org.gcube.application.geoportal.common.model.document.filesets.RegisteredFileSet;
|
|
import org.gcube.application.geoportal.common.model.rest.ConfigurationException;
|
|
import org.gcube.application.geoportal.common.model.rest.DatabaseConnection;
|
|
import org.gcube.application.geoportal.common.utils.Files;
|
|
import org.gcube.data.transfer.library.TransferResult;
|
|
import org.gcube.data.transfer.model.Destination;
|
|
import org.gcube.data.transfer.model.DestinationClashPolicy;
|
|
import sun.reflect.generics.scope.AbstractScope;
|
|
|
|
import java.net.MalformedURLException;
|
|
import java.net.URL;
|
|
import java.util.ArrayList;
|
|
import java.util.List;
|
|
|
|
@Slf4j
|
|
public class SDIManagerWrapper extends SDIManager{
|
|
|
|
public SDIManagerWrapper() throws SDIInteractionException {
|
|
}
|
|
|
|
/**
|
|
* Expected paramters :
|
|
* - "workspace"
|
|
* - "layerTitle"
|
|
* - "documentID"
|
|
* - "basePersistencePath" (profile specific, e.g. "GNA")
|
|
*
|
|
* @param fileSet
|
|
* @param params
|
|
* @return
|
|
* @throws SDIInteractionException
|
|
*/
|
|
public RegisteredFileSet materializeLayer(RegisteredFileSet fileSet, Document params) throws SDIInteractionException{
|
|
try {
|
|
log.debug("Materializing FS {} on {} ", fileSet, getGeoserverHostName());
|
|
|
|
// validate parameters
|
|
String workspace = BaseExecutionRequest.getMandatory("workspace", params);
|
|
String documentID = BaseExecutionRequest.getMandatory("documentID", params);
|
|
String basePersistencePAth = BaseExecutionRequest.getMandatory("basePersistencePath", params);
|
|
|
|
|
|
// check if empty
|
|
if (fileSet.getPayloads().isEmpty()) throw new SDIInteractionException("No payload to materialize");
|
|
|
|
|
|
Document geoserverInfo = new Document();
|
|
geoserverInfo.put("_type", "Geoserver");
|
|
geoserverInfo.put("workspace", workspace);
|
|
|
|
|
|
// Evaluate Layer Name
|
|
String baseName = getToUseBaseLayerName(fileSet);
|
|
log.debug("Base layer name is {}, checking conflicts.. ",baseName);
|
|
String toSetLayerName = baseName;
|
|
//Check if layer already exists
|
|
int count = 0;
|
|
GeoServerRESTReader gsReader = getCurrentGeoserver().getReader();
|
|
while (gsReader.getLayer(workspace, toSetLayerName) != null) {
|
|
count++;
|
|
toSetLayerName = baseName + "_" + count;
|
|
log.debug("layer for " + baseName + " already existing, trying " + toSetLayerName);
|
|
}
|
|
geoserverInfo.put("layerName", toSetLayerName);
|
|
log.debug("Layer name will be {}", toSetLayerName);
|
|
|
|
|
|
String folderRelativePath = basePersistencePAth + "/" + documentID + "/" + fileSet.getUUID() + "/" + toSetLayerName;
|
|
log.debug("GS Relative destination path is {}", folderRelativePath);
|
|
geoserverInfo.put("persistencePath", folderRelativePath);
|
|
|
|
List<String> filenames = new ArrayList<>();
|
|
|
|
String absolutePath = null;
|
|
|
|
for (Object o : fileSet.getPayloads()) {
|
|
RegisteredFile file = Serialization.convert(o, RegisteredFile.class);
|
|
log.info("Sending {} to GS {} at {} ", file, getGeoserverHostName(), folderRelativePath);
|
|
String completeFilename = Files.fixFilename(file.getName());
|
|
completeFilename = completeFilename.replaceAll(baseName, toSetLayerName);
|
|
|
|
|
|
Destination destination = new Destination(completeFilename);
|
|
destination.setCreateSubfolders(true);
|
|
destination.setOnExistingFileName(DestinationClashPolicy.REWRITE);
|
|
destination.setOnExistingSubFolder(DestinationClashPolicy.APPEND);
|
|
|
|
destination.setPersistenceId("geoserver");
|
|
destination.setSubFolder(folderRelativePath);
|
|
|
|
log.debug("Sending {} to {}", file, destination);
|
|
TransferResult result = getDtGeoServer().httpSource(new URL(file.getLink()), destination);
|
|
log.debug("Transferred " + result);
|
|
|
|
|
|
filenames.add(completeFilename);
|
|
// NB Clash con subfolder is APPEND, thus FOLDER is expected to be the one specified by caller
|
|
//geoserverInfo.put(""result.getRemotePath().substring(0, result.getRemotePath().lastIndexOf("/")));
|
|
absolutePath = result.getRemotePath().substring(0, result.getRemotePath().lastIndexOf("/"));
|
|
}
|
|
geoserverInfo.put("files", filenames);
|
|
|
|
// Publishing layer in GS
|
|
String storeName = toSetLayerName + "_store";
|
|
geoserverInfo.put("storeName", storeName);
|
|
|
|
GeoServerRESTPublisher publisher = getCurrentGeoserver().getPublisher();
|
|
log.debug("Trying to create remote workspace : " + workspace);
|
|
createWorkspace(workspace);
|
|
|
|
|
|
log.debug("Publishing remote folder " + absolutePath);
|
|
|
|
URL directoryPath = new URL("file:" + absolutePath + "/" + toSetLayerName + ".shp");
|
|
|
|
//TODO Evaluate SRS
|
|
|
|
boolean published = publisher.publishShp(
|
|
workspace,
|
|
storeName,
|
|
null,
|
|
toSetLayerName,
|
|
// UploadMethod.FILE, // neeeds zip
|
|
GeoServerRESTPublisher.UploadMethod.EXTERNAL, // needs shp
|
|
directoryPath.toURI(),
|
|
EPSG_4326, //SRS
|
|
""); // default style
|
|
|
|
if (!published) {
|
|
throw new SDIInteractionException("Unable to publish layer " + toSetLayerName + " under " + workspace + ". Unknown Geoserver fault.");
|
|
}
|
|
|
|
RESTLayer l = gsReader.getLayer(workspace, toSetLayerName);
|
|
RESTFeatureType f = gsReader.getFeatureType(l);
|
|
|
|
|
|
List<Document> ogcLinks = new ArrayList<>();
|
|
|
|
Document wmsLink = new Document();
|
|
wmsLink.put("wms", String.format("https://%1$s/geoserver/%2$s/wms?"
|
|
+ "service=WMS&version=1.1.0&request=GetMap&layers=%2$s:%3$s&"
|
|
+ "styles=&bbox=%4$f,%5$f,%6$f,%7$f&srs=%8$s&format=application/openlayers&width=%9$d&height=%10$d",
|
|
getGeoserverHostName(),
|
|
workspace,
|
|
toSetLayerName,
|
|
f.getMinX(),
|
|
f.getMinY(),
|
|
f.getMaxX(),
|
|
f.getMaxY(),
|
|
EPSG_4326,
|
|
400,
|
|
400));
|
|
ogcLinks.add(wmsLink);
|
|
|
|
List<Document> platformInfo = new ArrayList<>();
|
|
platformInfo.add(geoserverInfo);
|
|
// TODO Metadata
|
|
|
|
|
|
// Materialization object
|
|
GCubeSDILayer materialization = new GCubeSDILayer();
|
|
materialization.put(GCubeSDILayer.OGC_LINKS, ogcLinks);
|
|
materialization.put(GCubeSDILayer.B_BOX, new GCubeSDILayer.BBOX(f.getMaxX(), f.getMaxY(), f.getMinX(), f.getMinY()));
|
|
materialization.put(GCubeSDILayer.PLATFORM_INFO, platformInfo);
|
|
|
|
log.info("Generated Materialization {}", materialization);
|
|
|
|
//Add Materialization to registered file set
|
|
List materializations = fileSet.getMaterializations();
|
|
if (materializations == null) materializations = new ArrayList();
|
|
materializations.add(materialization);
|
|
fileSet.put(RegisteredFileSet.MATERIALIZATIONS, materializations);
|
|
|
|
return fileSet;
|
|
}catch(SDIInteractionException e){
|
|
throw e;
|
|
}catch (Throwable t){
|
|
throw new SDIInteractionException("Unexpected exception while trying to materialize File Set "+t.getMessage(),t);
|
|
}
|
|
}
|
|
|
|
|
|
public String configureCentroidLayer(String name, String workspace, String storeName, PostgisTable table, DatabaseConnection connection) throws SDIInteractionException {
|
|
|
|
GSFeatureTypeEncoder fte=new GSFeatureTypeEncoder();
|
|
fte.setAbstract("Centroid layer for "+name);
|
|
fte.setEnabled(true);
|
|
fte.setNativeCRS(WGS84_FULL);
|
|
fte.setTitle(name);
|
|
fte.setName(name);
|
|
|
|
|
|
// GeoServer loads all fields
|
|
// fte.setAttribute(attrs);
|
|
|
|
|
|
fte.setLatLonBoundingBox(-180.0, -90.0, 180.0, 90.0, WGS84_FULL);
|
|
|
|
String style="clustered_centroids";
|
|
|
|
GSLayerEncoder layerEncoder=new GSLayerEncoder();
|
|
layerEncoder.setDefaultStyle(style);
|
|
layerEncoder.setEnabled(true);
|
|
layerEncoder.setQueryable(true);
|
|
try {
|
|
//Checking workspace
|
|
createWorkspace(workspace);
|
|
//Checking store
|
|
createStoreFromPostgisDB(workspace, storeName,connection);
|
|
//Checking layer
|
|
publishStyle(Files.getFileFromResources("styles/clustered_points.sld"),style);
|
|
|
|
log.info("Creating layer in {} : {} with FTE {} , LE {}",workspace,storeName,fte,layerEncoder);
|
|
if(getCurrentGeoserver().getReader().getLayer(workspace, name)==null)
|
|
if(!getCurrentGeoserver().getPublisher().publishDBLayer(workspace, storeName, fte, layerEncoder))
|
|
throw new SDIInteractionException("Unable to create layer "+name);
|
|
log.debug("layer "+name+" already exists");
|
|
|
|
|
|
String link=String.format("https://%1$s/geoserver/%2$s/wms?"
|
|
+"service=WMS&version=1.1.0&request=GetMap&layers=%2$s:%3$s&"
|
|
+ "styles=&bbox=%4$s,%5$s,%6$s,%7$s&srs=%8$s&format=application/openlayers&width=%9$d&height=%10$d",
|
|
getGeoserverHostName(),
|
|
workspace,
|
|
name,
|
|
"-1563071.166172796",
|
|
"4789738.204048398",
|
|
"4334926.486925308",
|
|
"5828118.072551585",
|
|
EPSG_4326,
|
|
400,
|
|
400);
|
|
|
|
return name;
|
|
} catch (IllegalArgumentException | MalformedURLException e) {
|
|
throw new SDIInteractionException("Unable to create layer "+name,e);
|
|
}
|
|
|
|
|
|
}
|
|
}
|