Introduced Geotiffs, moved providers into frameworks

This commit is contained in:
Fabio Sinibaldi 2022-08-03 18:15:10 +02:00
parent 32ab7c7cfc
commit c601dcdda6
23 changed files with 406 additions and 105 deletions

View File

@ -80,6 +80,12 @@
<artifactId>jackson-datatype-jsr310</artifactId>
</dependency>
<!-- Storage HUB -->
<dependency>
<groupId>org.gcube.common</groupId>
<artifactId>storagehub-client-library</artifactId>
</dependency>
<!-- https://mvnrepository.com/artifact/de.grundid.opendatalab/geojson-jackson -->
<dependency>

View File

@ -6,6 +6,7 @@ import lombok.extern.slf4j.Slf4j;
import org.gcube.application.cms.caches.Cache;
import org.gcube.application.cms.caches.Engine;
import org.gcube.application.geoportal.common.model.rest.ConfigurationException;
import org.gcube.common.storagehub.client.dsl.StorageHubClient;
import java.util.HashMap;
import java.util.HashSet;
@ -43,6 +44,7 @@ public class ImplementationProvider {
private ImplementationProvider(){
//Defaults
setEngine(new DefaultISProvider(),ISInterface.class);
setEngine(new StorageHubProvider(), StorageHubClient.class);
}
public Map<Class,String> getManagerList(){
HashMap<Class,String> toReturn=new HashMap<>();

View File

@ -1,4 +1,4 @@
package org.gcube.application.geoportal.service.engine.providers;
package org.gcube.application.cms.implementations;
import org.gcube.application.cms.caches.Engine;
import org.gcube.application.geoportal.common.model.rest.ConfigurationException;

View File

@ -1,4 +1,4 @@
package org.gcube.application.geoportal.service.engine;
package org.gcube.application.cms.implementations;
import lombok.*;
import lombok.extern.slf4j.Slf4j;
@ -13,7 +13,6 @@ import org.gcube.common.storagehub.client.dsl.FolderContainer;
import org.gcube.common.storagehub.client.dsl.StorageHubClient;
import org.gcube.common.storagehub.model.exceptions.StorageHubException;
import javax.validation.constraints.NotNull;
import java.io.FileNotFoundException;
import java.io.InputStream;
@ -32,7 +31,7 @@ public class WorkspaceManager {
@AllArgsConstructor
@RequiredArgsConstructor
public static class FolderOptions{
@NotNull
@NonNull
private String folderName;
private String folderDescription;
private FolderContainer parent;
@ -43,7 +42,7 @@ public class WorkspaceManager {
@AllArgsConstructor
@RequiredArgsConstructor
public static class FileOptions{
@NotNull
@NonNull
private String fileName;
@NonNull
private InputStream is;

View File

@ -1,10 +1,15 @@
package org.gcube.application.cms.tests.plugins;
import lombok.extern.slf4j.Slf4j;
import org.gcube.application.cms.caches.Engine;
import org.gcube.application.cms.implementations.ImplementationProvider;
import org.gcube.application.cms.plugins.Plugin;
import org.gcube.application.cms.plugins.PluginsReflections;
import org.gcube.application.cms.tests.TokenSetter;
import org.gcube.application.cms.tests.model.BasicTests;
import org.gcube.application.geoportal.common.model.document.accounting.Context;
import org.gcube.application.geoportal.common.model.rest.ConfigurationException;
import org.gcube.application.geoportal.common.utils.StorageUtils;
import org.gcube.application.geoportal.common.utils.tests.GCubeTest;
import org.junit.Assert;
import org.junit.BeforeClass;
@ -37,6 +42,23 @@ public abstract class BasicPluginTest extends BasicTests {
plugins.forEach(PluginCheks.descriptor);
plugins.forEach(PluginCheks.init);
System.out.println("Plugin Loading OK");
ImplementationProvider.get().setEngine(new Engine<StorageUtils>() {
@Override
public void init() {}
@Override
public void shutdown() {}
@Override
public StorageUtils getObject() throws ConfigurationException {
TokenSetter.set(GCubeTest.getContext());
return new StorageUtils();
}
},StorageUtils.class);
}

View File

@ -64,9 +64,9 @@ public class GCubeSDILayer extends Materialization {
return toReturn;
}
public static final BBOX WORLD=new BBOX(180d,90d,-180d,-90d);
public static final BBOX WORLD=new BBOX(180d,-180d,90d,-90d);
public static final BBOX WORLD_3D=new BBOX(180d,90d,-180d,-90d);
public static final BBOX WORLD_3D=new BBOX(180d,-180d,90d,-90d);
public static final String MAX_X="_maxX";
public static final String MAX_Y="_maxY";

View File

@ -1,11 +1,14 @@
package org.gcube.application.geoportal.common.utils;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.io.IOUtils;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.nio.charset.Charset;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.*;
@ -98,4 +101,15 @@ public class Files {
return toFix.toLowerCase().
replaceAll("[\\-\\*\\+\\/\\\\ \\[\\]\\(\\)\\.\\\"\\:\\;\\|\\=]","_")+extension;
}
public static final File downloadFromUrl(String name,String url) throws IOException {
Path dest =java.nio.file.Files.createTempDirectory("downloads_").resolve(name);
InputStream is = null;
try {
java.nio.file.Files.copy(new URL(url).openStream(), dest);
return dest.toFile();
}finally {
if(is!=null) IOUtils.closeQuietly(is);
}
}
}

View File

@ -1,11 +1,11 @@
package org.gcube.application.geoportal.common.utils;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.InputStream;
import java.io.*;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.UUID;
import org.gcube.application.geoportal.common.faults.StorageException;
import org.gcube.application.geoportal.common.rest.InterfaceConstants;
import org.gcube.application.geoportal.common.model.rest.TempFile;
import org.gcube.contentmanagement.blobstorage.service.IClient;
@ -74,4 +74,13 @@ public class StorageUtils {
public static final String getUniqueString(){
return UUID.randomUUID().toString();
}
public File download(String id,String name) throws IOException, StorageException {
Path p = Files.createTempDirectory(id).resolve(name);
client.get().LFile(p.toAbsolutePath().toString()).RFile(id);
File toReturn =p.toFile();
if(!toReturn.exists()) throw new StorageException("Unable to download "+id);
return p.toFile();
}
}

View File

@ -84,11 +84,6 @@
<artifactId>storage-manager-wrapper</artifactId>
</dependency>
<!-- Storage HUB -->
<dependency>
<groupId>org.gcube.common</groupId>
<artifactId>storagehub-client-library</artifactId>
</dependency>
<!-- enunciate deps -->

View File

@ -4,14 +4,13 @@ import com.fasterxml.jackson.jaxrs.json.JacksonJaxbJsonProvider;
import lombok.extern.slf4j.Slf4j;
import org.gcube.application.cms.caches.Engine;
import org.gcube.application.cms.implementations.ImplementationProvider;
import org.gcube.application.cms.implementations.StorageHubProvider;
import org.gcube.application.geoportal.common.rest.InterfaceConstants;
import org.gcube.application.geoportal.common.utils.StorageUtils;
import org.gcube.application.geoportal.service.engine.mongo.UCDManagerI;
import org.gcube.application.geoportal.service.engine.postgis.PostgisDBManager;
import org.gcube.application.geoportal.service.engine.postgis.PostgisDBManagerI;
import org.gcube.application.geoportal.service.engine.providers.*;
import org.gcube.application.geoportal.service.engine.providers.ucd.ProfileMap;
import org.gcube.application.geoportal.service.engine.providers.ucd.LocalFolderProfileMapCache;
import org.gcube.application.geoportal.service.engine.providers.ucd.SingleISResourceUCDProvider;
import org.gcube.application.geoportal.service.engine.providers.ucd.UCDManager;
import org.gcube.application.geoportal.service.model.internal.db.Mongo;
@ -20,7 +19,6 @@ import org.gcube.application.geoportal.service.rest.*;
import org.gcube.application.cms.serialization.Serialization;
import org.gcube.common.storagehub.client.dsl.StorageHubClient;
import org.geotoolkit.internal.sql.table.Entry;
import org.glassfish.jersey.server.ResourceConfig;
import javax.ws.rs.ApplicationPath;
@ -52,7 +50,6 @@ public class GeoPortalService extends ResourceConfig{
ImplementationProvider.get().setEngine(new PostgisConnectionProvider(), PostgisDBManagerI.class);
ImplementationProvider.get().setEngine(new SingleISResourceUCDProvider(), ProfileMap.class);
ImplementationProvider.get().setEngine(new PluginManager(), PluginManager.PluginMap.class);
ImplementationProvider.get().setEngine(new StorageHubProvider(), StorageHubClient.class);
ImplementationProvider.get().setEngine(new UCDManager(),UCDManagerI.class);
ImplementationProvider.get().setEngine(new ConfigurationCache(), ConfigurationCache.ConfigurationMap.class);

View File

@ -13,7 +13,7 @@ import lombok.extern.slf4j.Slf4j;
import org.gcube.application.geoportal.common.model.legacy.*;
import org.gcube.application.geoportal.common.model.rest.DatabaseConnection;
import org.gcube.application.geoportal.common.utils.Files;
import org.gcube.application.geoportal.service.engine.WorkspaceManager;
import org.gcube.application.cms.implementations.WorkspaceManager;
import org.gcube.application.geoportal.service.model.internal.db.PostgisTable;
import org.gcube.application.geoportal.service.model.internal.faults.SDIInteractionException;
import org.gcube.common.storagehub.client.dsl.FileContainer;
@ -29,7 +29,6 @@ import org.gcube.spatial.data.gis.is.AbstractGeoServerDescriptor;
import java.io.File;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.function.Consumer;
import java.util.regex.Matcher;
import java.util.regex.Pattern;

View File

@ -17,9 +17,9 @@ import org.gcube.application.geoportal.common.utils.Files;
import org.gcube.application.geoportal.common.utils.StorageUtils;
import org.gcube.application.cms.implementations.ImplementationProvider;
import org.gcube.application.geoportal.service.engine.materialization.SDIManager;
import org.gcube.application.geoportal.service.engine.WorkspaceManager;
import org.gcube.application.geoportal.service.engine.WorkspaceManager.FileOptions;
import org.gcube.application.geoportal.service.engine.WorkspaceManager.FolderOptions;
import org.gcube.application.cms.implementations.WorkspaceManager;
import org.gcube.application.cms.implementations.WorkspaceManager.FileOptions;
import org.gcube.application.cms.implementations.WorkspaceManager.FolderOptions;
import org.gcube.application.geoportal.service.engine.postgis.PostgisIndex;
import org.gcube.application.geoportal.service.model.internal.faults.*;
import org.gcube.application.cms.serialization.Serialization;

View File

@ -47,10 +47,9 @@ import org.gcube.application.geoportal.common.model.JSONPathWrapper;
import org.gcube.application.geoportal.common.utils.ContextUtils;
import org.gcube.application.geoportal.common.utils.StorageUtils;
import org.gcube.application.cms.implementations.ImplementationProvider;
import org.gcube.application.geoportal.service.engine.WorkspaceManager;
import org.gcube.application.cms.implementations.WorkspaceManager;
import org.gcube.application.geoportal.common.model.rest.ConfigurationException;
import org.gcube.application.geoportal.service.engine.providers.PluginManager;
import org.gcube.application.geoportal.service.engine.providers.ucd.ProfileMap;
import org.gcube.application.geoportal.service.model.internal.faults.*;
import org.gcube.application.cms.serialization.Serialization;
import org.gcube.application.geoportal.service.utils.UserUtils;

View File

@ -10,7 +10,7 @@ import org.gcube.application.cms.serialization.Serialization;
import org.gcube.application.geoportal.common.model.document.filesets.RegisteredFile;
import org.gcube.application.geoportal.common.model.rest.ConfigurationException;
import org.gcube.application.geoportal.common.model.useCaseDescriptor.UseCaseDescriptor;
import org.gcube.application.geoportal.service.engine.WorkspaceManager;
import org.gcube.application.cms.implementations.WorkspaceManager;
import org.gcube.common.resources.gcore.GenericResource;
import org.gcube.common.storagehub.client.dsl.FolderContainer;
@ -24,7 +24,6 @@ import java.io.StringReader;
import java.io.StringWriter;
import java.net.URL;
import java.nio.charset.StandardCharsets;
import java.text.SimpleDateFormat;
import java.time.Duration;
import java.time.LocalDateTime;
import java.time.temporal.ChronoUnit;

View File

@ -1,7 +1,6 @@
package org.gcube.application.geoportal.service.ws;
import org.gcube.application.cms.tests.TokenSetter;
import org.gcube.application.geoportal.service.engine.WorkspaceManager;
import org.gcube.common.storagehub.client.dsl.FolderContainer;
import org.gcube.common.storagehub.client.dsl.StorageHubClient;
import org.gcube.common.storagehub.model.exceptions.StorageHubException;

View File

@ -1,7 +1,7 @@
package org.gcube.application.geoportal.service.ws;
import org.gcube.application.cms.tests.TokenSetter;
import org.gcube.application.geoportal.service.engine.WorkspaceManager;
import org.gcube.application.cms.implementations.WorkspaceManager;
import org.gcube.common.storagehub.client.dsl.FolderContainer;
import org.gcube.common.storagehub.client.dsl.StorageHubClient;
import org.gcube.common.storagehub.model.exceptions.StorageHubException;

View File

@ -1,7 +1,7 @@
package org.gcube.application.geoportal.service.ws;
import org.gcube.application.cms.tests.TokenSetter;
import org.gcube.application.geoportal.service.engine.WorkspaceManager;
import org.gcube.application.cms.implementations.WorkspaceManager;
import org.gcube.common.storagehub.client.dsl.FolderContainer;
import org.gcube.common.storagehub.client.dsl.StorageHubClient;
import org.gcube.common.storagehub.model.exceptions.StorageHubException;

View File

@ -76,25 +76,25 @@ public class SDIManager {
}
protected static final String getToUseBaseLayerName(RegisteredFileSet fileset){
// ******** IDENTIFY LAYER NAME correct layer name
// Must be unique under same WS
// equal to shp base name
String baseName= "";
// Chose layer name first identifying filename
for(Object p:fileset.getPayloads()){
RegisteredFile f = Serialization.convert(p,RegisteredFile.class);
String name=f.getName();
if(name.endsWith(".shp")) {
log.debug("SHP is {}",name);
baseName= Files.fixFilename(name.substring(0,name.lastIndexOf('.')));
break;
}
}
return baseName;
}
// protected static final String getToUseBaseLayerName(RegisteredFileSet fileset){
// // ******** IDENTIFY LAYER NAME correct layer name
// // Must be unique under same WS
// // equal to shp base name
//
// String baseName= "";
//
// // Chose layer name first identifying filename
// for(Object p:fileset.getPayloads()){
// RegisteredFile f = Serialization.convert(p,RegisteredFile.class);
// String name=f.getName();
// if(name.endsWith(".shp")||name.endsWith(".tif")) {
// log.debug("File for layer is {}",name);
// baseName= Files.fixFilename(name.substring(0,name.lastIndexOf('.')));
// break;
// }
// }
// return baseName;
// }
protected String createStore(GSPostGISDatastoreEncoder encoder, String workspace) throws SDIInteractionException {
String storeName=encoder.getName();

View File

@ -5,12 +5,16 @@ import it.geosolutions.geoserver.rest.GeoServerRESTReader;
import it.geosolutions.geoserver.rest.decoder.RESTFeatureType;
import it.geosolutions.geoserver.rest.decoder.RESTLayer;
import it.geosolutions.geoserver.rest.encoder.GSLayerEncoder;
import it.geosolutions.geoserver.rest.encoder.coverage.GSCoverageEncoder;
import it.geosolutions.geoserver.rest.encoder.feature.GSFeatureTypeEncoder;
import lombok.extern.slf4j.Slf4j;
import org.bson.Document;
import org.gcube.application.cms.implementations.ImplementationProvider;
import org.gcube.application.cms.implementations.WorkspaceManager;
import org.gcube.application.cms.plugins.requests.BaseExecutionRequest;
import org.gcube.application.cms.sdi.faults.SDIInteractionException;
import org.gcube.application.cms.sdi.model.GCubeSDILayerBuilder;
import org.gcube.application.cms.sdi.model.SupportedFormat;
import org.gcube.application.cms.serialization.Serialization;
import org.gcube.application.geoportal.common.model.document.filesets.sdi.GCubeSDILayer;
import org.gcube.application.geoportal.common.model.document.filesets.RegisteredFile;
@ -19,16 +23,21 @@ import org.gcube.application.geoportal.common.model.document.filesets.sdi.GeoSer
import org.gcube.application.geoportal.common.model.document.filesets.sdi.PlatformInfo;
import org.gcube.application.geoportal.common.model.rest.DatabaseConnection;
import org.gcube.application.geoportal.common.utils.Files;
import org.gcube.application.geoportal.common.utils.StorageUtils;
import org.gcube.common.storagehub.client.dsl.StorageHubClient;
import org.gcube.data.transfer.library.TransferResult;
import org.gcube.data.transfer.library.faults.*;
import org.gcube.data.transfer.model.Destination;
import org.gcube.data.transfer.model.DestinationClashPolicy;
import org.gcube.spatial.data.gis.is.AbstractGeoServerDescriptor;
import java.io.File;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.ArrayList;
import java.util.List;
import static it.geosolutions.geoserver.rest.encoder.GSResourceEncoder.ProjectionPolicy.REPROJECT_TO_DECLARED;
import static org.gcube.application.cms.sdi.model.GCubeSDILayerBuilder.EPSG_4326;
import static org.gcube.application.cms.sdi.model.GCubeSDILayerBuilder.WGS84_FULL;
@ -70,11 +79,37 @@ public class SDIManagerWrapper extends SDIManager{
layerBuilder.setHost(getGeoserverHostName());
// Evaluate layer data in filesets
// TODO optimize cicles
List<SupportedFormat> toCheckFormats=SupportedFormat.getByExtension(".tif",".shp");
SupportedFormat selectedFormat = null;
String baseName=null;
for(SupportedFormat format: toCheckFormats){
log.debug("Checking for {}",format);
for (Object o : fileSet.getPayloads()) {
RegisteredFile file = Serialization.convert(o, RegisteredFile.class);
format.consider(file);
}
if(format.getIsProposedFilesetValid()){
log.debug("Selected format is {}. Fileset is {}",format,format.getToUseFileSet());
selectedFormat = format;
// get basename for layer
baseName = format.getToUseFileSet().get(0).getName();
baseName= Files.fixFilename(baseName.substring(0,baseName.lastIndexOf('.')));
break;
}
}
if(selectedFormat==null)
throw new SDIInteractionException("Unable to identify layer format. Configured formats are "+toCheckFormats);
// Evaluate Layer Name
String baseName = getToUseBaseLayerName(fileSet);
log.debug("Base layer name is {}, checking conflicts.. ",baseName);
String toSetLayerName = baseName;
//Check if layer already exists
//Checking if layer already exists
int count = 0;
GeoServerRESTReader gsReader = getCurrentGeoserver().getReader();
while (gsReader.getLayer(workspace, toSetLayerName) != null) {
@ -86,41 +121,7 @@ public class SDIManagerWrapper extends SDIManager{
layerBuilder.setLayerName(toSetLayerName);
String folderRelativePath = basePersistencePAth + "/" + documentID + "/" + fileSet.getUUID() + "/" + toSetLayerName;
log.debug("GS Relative destination path is {}", folderRelativePath);
layerBuilder.setPersistencePath(folderRelativePath);
List<String> filenames = new ArrayList<>();
String absolutePath = null;
for (Object o : fileSet.getPayloads()) {
RegisteredFile file = Serialization.convert(o, RegisteredFile.class);
log.info("Sending {} to GS {} at {} ", file, getGeoserverHostName(), folderRelativePath);
String completeFilename = Files.fixFilename(file.getName());
completeFilename = completeFilename.replaceAll(baseName, toSetLayerName);
Destination destination = new Destination(completeFilename);
destination.setCreateSubfolders(true);
destination.setOnExistingFileName(DestinationClashPolicy.REWRITE);
destination.setOnExistingSubFolder(DestinationClashPolicy.APPEND);
destination.setPersistenceId("geoserver");
destination.setSubFolder(folderRelativePath);
log.debug("Sending {} to {}", file, destination);
TransferResult result = getDtGeoServer().httpSource(new URL(file.getLink()), destination);
log.debug("Transferred " + result);
filenames.add(completeFilename);
// NB Clash con subfolder is APPEND, thus FOLDER is expected to be the one specified by caller
//geoserverInfo.put(""result.getRemotePath().substring(0, result.getRemotePath().lastIndexOf("/")));
absolutePath = result.getRemotePath().substring(0, result.getRemotePath().lastIndexOf("/"));
}
layerBuilder.setFiles(filenames);
// Publishing layer in GS
String storeName = toSetLayerName + "_store";
@ -131,31 +132,51 @@ public class SDIManagerWrapper extends SDIManager{
createWorkspace(workspace);
log.debug("Publishing remote folder " + absolutePath);
//
URL directoryPath = new URL("file:" + absolutePath + "/" + toSetLayerName + ".tif");
// Actually publishing files into GS
//TODO Evaluate SRS
Boolean published = null;
boolean published = publisher.publishShp(
workspace,
storeName,
null,
toSetLayerName,
// UploadMethod.FILE, // neeeds zip
GeoServerRESTPublisher.UploadMethod.EXTERNAL, // needs shp
directoryPath.toURI(),
EPSG_4326, //SRS
""); // default style
switch(selectedFormat.getFileExtension()){
case ".tif" : {
RegisteredFile f = selectedFormat.getToUseFileSet().get(0);
File temp= null;
try {
temp = Files.downloadFromUrl(f.getName(),f.getLink());
published = publisher.publishGeoTIFF(workspace, storeName, toSetLayerName,temp,EPSG_4326,REPROJECT_TO_DECLARED,"raster");
}finally{ if (temp!=null) java.nio.file.Files.deleteIfExists(temp.toPath());}
break;
}
case ".shp" :{
//TODO Evaluate SRS
String absolutePath = transferFilesetToGS(basePersistencePAth,documentID,selectedFormat.getToUseFileSet(),
fileSet.getUUID(),toSetLayerName,baseName,layerBuilder);
URL directoryPath = new URL("file:" + absolutePath + "/" + toSetLayerName + ".shp");
published = publisher.publishShp(
workspace,
storeName,
null,
toSetLayerName,
// UploadMethod.FILE, // neeeds zip
GeoServerRESTPublisher.UploadMethod.EXTERNAL, // needs shp
directoryPath.toURI(),
EPSG_4326, //SRS
"");
break;
}
default: {
throw new SDIInteractionException("Unsupported data format");
}
}
// check if success
if (!published) {
throw new SDIInteractionException("Unable to publish layer " + toSetLayerName + " under " + workspace + ". Unknown Geoserver fault.");
}
RESTLayer l = gsReader.getLayer(workspace, toSetLayerName);
RESTFeatureType f = gsReader.getFeatureType(l);
// RESTLayer l = gsReader.getLayer(workspace, toSetLayerName);
// RESTFeatureType f = gsReader.getFeatureType(l);
GCubeSDILayer materialization = layerBuilder.getLayer();
@ -175,6 +196,46 @@ public class SDIManagerWrapper extends SDIManager{
}
}
private String transferFilesetToGS(String basePersistencePAth,String documentID,List<RegisteredFile> fileSet,
String fileSetUUID,
String toSetLayerName, String baseName, GCubeSDILayerBuilder layerBuilder)
throws MalformedURLException, InvalidSourceException, SourceNotSetException, InvalidDestinationException,
DestinationNotSetException, InitializationException, FailedTransferException {
String folderRelativePath = basePersistencePAth + "/" + documentID + "/" + fileSetUUID + "/" + toSetLayerName;
log.debug("GS Relative destination path is {}", folderRelativePath);
layerBuilder.setPersistencePath(folderRelativePath);
List<String> filenames = new ArrayList<>();
String absolutePath = null;
for (RegisteredFile file : fileSet) {
log.info("Sending {} to GS {} at {} ", file, getGeoserverHostName(), folderRelativePath);
String completeFilename = Files.fixFilename(file.getName());
completeFilename = completeFilename.replaceAll(baseName, toSetLayerName);
Destination destination = new Destination(completeFilename);
destination.setCreateSubfolders(true);
destination.setOnExistingFileName(DestinationClashPolicy.REWRITE);
destination.setOnExistingSubFolder(DestinationClashPolicy.APPEND);
destination.setPersistenceId("geoserver");
destination.setSubFolder(folderRelativePath);
log.debug("Sending {} to {}", file, destination);
TransferResult result = getDtGeoServer().httpSource(new URL(file.getLink()), destination);
log.debug("Transferred " + result);
filenames.add(completeFilename);
// NB Clash con subfolder is APPEND, thus FOLDER is expected to be the one specified by caller
//geoserverInfo.put(""result.getRemotePath().substring(0, result.getRemotePath().lastIndexOf("/")));
absolutePath = result.getRemotePath().substring(0, result.getRemotePath().lastIndexOf("/"));
}
layerBuilder.setFiles(filenames);
return absolutePath;
}
public void deleteLayer(GCubeSDILayer toDelete) throws SDIInteractionException {
log.trace("Deleting {}",toDelete);

View File

@ -0,0 +1,62 @@
package org.gcube.application.cms.sdi.model;
import lombok.*;
import lombok.extern.slf4j.Slf4j;
import org.gcube.application.geoportal.common.model.document.filesets.RegisteredFile;
import org.gcube.application.geoportal.common.model.document.filesets.RegisteredFileSet;
import java.util.ArrayList;
@Getter
@RequiredArgsConstructor
public abstract class SupportedFormat {
public static ArrayList<SupportedFormat> getByExtension(String... extensions){
ArrayList<SupportedFormat> toReturn = new ArrayList<>();
for (String extension : extensions) {
extension=extension.toLowerCase();
switch (extension){
case ".tif" : {toReturn.add(new SupportedFormat(".tif") {
@Override
public void consider(RegisteredFile f) {
if (f.getName().endsWith(getFileExtension())){
getToUseFileSet().add(f);
isProposedFilesetValid=true;
}
}
});
break;
}
case ".shp":{toReturn.add(new SupportedFormat(".shp") {
@Override
public void consider(RegisteredFile f) {
getToUseFileSet().add(f);
if (f.getName().endsWith(getFileExtension())){
isProposedFilesetValid=true;
}
}
});
break;
}
}
}
return toReturn;
}
@NonNull
private String fileExtension;
protected Boolean isProposedFilesetValid=false;
private ArrayList<RegisteredFile> toUseFileSet = new ArrayList<>();
public abstract void consider(RegisteredFile f);
@Override
public String toString() {
return fileExtension ;
}
}

View File

@ -2,6 +2,7 @@ package org.gcube.application.cms.sdi.plugins;
import com.fasterxml.jackson.core.JsonProcessingException;
import org.bson.Document;
import org.gcube.application.cms.implementations.ImplementationProvider;
import org.gcube.application.cms.plugins.faults.InvalidPluginRequestException;
import org.gcube.application.cms.plugins.faults.MaterializationException;
import org.gcube.application.cms.plugins.faults.PluginExecutionException;
@ -12,10 +13,20 @@ import org.gcube.application.cms.serialization.Serialization;
import org.gcube.application.cms.tests.TestDocuments;
import org.gcube.application.cms.tests.TestProfiles;
import org.gcube.application.cms.tests.plugins.BasicPluginTest;
import org.gcube.application.geoportal.common.faults.StorageException;
import org.gcube.application.geoportal.common.model.JSONPathWrapper;
import org.gcube.application.geoportal.common.model.document.Project;
import org.gcube.application.geoportal.common.model.document.filesets.RegisteredFile;
import org.gcube.application.geoportal.common.model.document.filesets.RegisteredFileSet;
import org.gcube.application.geoportal.common.model.rest.ConfigurationException;
import org.gcube.application.geoportal.common.utils.Files;
import org.gcube.application.geoportal.common.utils.StorageUtils;
import org.gcube.application.geoportal.common.utils.tests.GCubeTest;
import org.junit.Test;
import java.io.File;
import java.io.IOException;
import static junit.framework.TestCase.assertTrue;
import static org.junit.Assume.assumeTrue;
@ -24,14 +35,30 @@ public class MaterializerTests extends BasicPluginTest {
@Test
public void testShape() throws JsonProcessingException, PluginExecutionException {
assumeTrue(GCubeTest.isTestInfrastructureEnabled());
Project doc= TestDocuments.documentMap.get("registeredFile.json");
Project doc= TestDocuments.documentMap.get("registeredGeoTif.json");
// doc.setProfileID("sdi-tests");
materialize(doc);
}
@Test
public void download() throws ConfigurationException, IOException, StorageException {
assumeTrue(GCubeTest.isTestInfrastructureEnabled());
Project doc= TestDocuments.documentMap.get("registeredGeoTif.json");
for(Object o : new JSONPathWrapper(doc.getTheDocument().toJson()).getByPath("$..fileset")){
RegisteredFileSet fs = Serialization.convert(o,RegisteredFileSet.class);
for(Object oo: fs.getPayloads()){
RegisteredFile f = Serialization.convert(oo,RegisteredFile.class);
File file = Files.downloadFromUrl(f.getName(),f.getLink());
// Files.deleteIfExists(file.toPath());
}
}
}

View File

@ -36,6 +36,14 @@
{"STEP" : "PUBLISH", "roles" :[]}
]
}
}, {
"_id" : "SDI-Default-Materializer",
"_type" : "Materializer",
"_configuration" : {
"registeredFileSetPaths" : [
{"schemaField" : "section._children[?(@.fileset)]","documentPath" : "section.fileset"}
]
}
},
{
"_id" : "DEFAULT-SINGLE-STEP",

View File

@ -0,0 +1,103 @@
{
"_id": "62ea45d8270ce23808c7fe4c",
"_version": "1.0.1",
"_info":
{
"_creationInfo":
{
"_user":
{
"_username": "fabio.sinibaldi"
},
"_context":
{
"_id": "/gcube/devsec/devVRE",
"_name": "/devVRE"
},
"_instant": "2022-08-03T11:54:26.546"
},
"_lastEditInfo":
{
"_user":
{
"_username": "fabio.sinibaldi"
},
"_context":
{
"_id": "/gcube/devsec/devVRE",
"_name": "/devVRE"
},
"_instant": "2022-08-03T11:56:00.987"
},
"_access":
{
"_policy": "OPEN",
"_license": ""
}
},
"_profileID": "basic",
"_profileVersion": "1.0.0",
"_lifecycleInformation":
{
"_phase": "DRAFT",
"_lastInvokedStep": null,
"_lastOperationStatus": "OK",
"_errorMessages":
[],
"_warningMessages":
[],
"_triggeredEvents":
[
{
"event": "INIT_DOCUMENT",
"lastOperationStatus": "OK",
"errorMessages": null,
"warningMessages": null
}
],
"_notes": null
},
"_relationships": null,
"_identificationReferences": null,
"_theDocument":
{
"section":
{
"title": "My Title",
"fileset":
{
"customField": "customFieldValue",
"_uuid": "12fac10b-fada-4f43-b491-76a77ab02209",
"_creationInfo":
{
"_user":
{
"_username": "fabio.sinibaldi"
},
"_context":
{
"_id": "/gcube/devsec/devVRE",
"_name": "/devVRE"
},
"_instant": "2022-08-03T11:55:18.888"
},
"_access":
{
"_policy": "OPEN",
"_license": ""
},
"_folderID": "b751a1b6-4cf0-43ff-82b2-e2f301edf245",
"_payloads":
[
{
"_mimetype": "image/tiff",
"_storageID": "06ff5d37-5015-46f8-91a2-527ffab9a30f",
"_link": "https://data-d.d4science.org/shub/E_NGw3SXNwRlA5cHc0T1JQN2RZeTFUTEt3TmtRckxxZ1pIZXFtaWJld1RybzlSem9oREplWXRxdnFyQTVtejVUaA==",
"_name": "sample.tif"
}
]
}
}
},
"_lock": null
}