gcube-cms-suite/geoportal-service/src/main/java/org/gcube/application/geoportal/service/engine/mongo/ProfiledMongoManager.java

579 lines
29 KiB
Java
Raw Blame History

This file contains invisible Unicode characters

This file contains invisible Unicode characters that are indistinguishable to humans but may be processed differently by a computer. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

package org.gcube.application.geoportal.service.engine.mongo;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.mongodb.client.MongoCollection;
import com.mongodb.client.MongoDatabase;
import com.vdurmont.semver4j.Semver;
import lombok.Getter;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.collections.ArrayStack;
import org.apache.commons.io.IOUtils;
import org.bson.Document;
import org.bson.types.ObjectId;
import org.gcube.application.cms.plugins.LifecycleManager;
import org.gcube.application.cms.plugins.faults.EventException;
import org.gcube.application.cms.plugins.faults.StepException;
import org.gcube.application.cms.plugins.model.PluginDescriptor;
import org.gcube.application.cms.plugins.reports.DocumentHandlingReport;
import org.gcube.application.cms.plugins.reports.StepExecutionReport;
import org.gcube.application.cms.plugins.requests.BaseRequest;
import org.gcube.application.cms.plugins.requests.EventExecutionRequest;
import org.gcube.application.cms.plugins.requests.StepExecutionRequest;
import org.gcube.application.geoportal.common.faults.StorageException;
import org.gcube.application.geoportal.common.model.configuration.Archive;
import org.gcube.application.geoportal.common.model.configuration.Index;
import org.gcube.application.geoportal.common.model.document.*;
import org.gcube.application.geoportal.common.model.document.access.Access;
import org.gcube.application.geoportal.common.model.document.access.AccessPolicy;
import org.gcube.application.geoportal.common.model.document.accounting.AccountingInfo;
import org.gcube.application.geoportal.common.model.document.accounting.PublicationInfo;
import org.gcube.application.geoportal.common.model.document.filesets.RegisteredFile;
import org.gcube.application.geoportal.common.model.document.filesets.RegisteredFileSet;
import org.gcube.application.geoportal.common.model.document.lifecycle.LifecycleInformation;
import org.gcube.application.geoportal.common.model.useCaseDescriptor.Field;
import org.gcube.application.geoportal.common.model.useCaseDescriptor.HandlerDeclaration;
import org.gcube.application.geoportal.common.model.useCaseDescriptor.UseCaseDescriptor;
import org.gcube.application.geoportal.common.model.configuration.Configuration;
import org.gcube.application.geoportal.common.model.rest.QueryRequest;
import org.gcube.application.geoportal.common.model.rest.RegisterFileSetRequest;
import org.gcube.application.geoportal.common.model.rest.TempFile;
import org.gcube.application.geoportal.common.model.JSONPathWrapper;
import org.gcube.application.geoportal.common.utils.ContextUtils;
import org.gcube.application.geoportal.common.utils.StorageUtils;
import org.gcube.application.cms.implementations.ImplementationProvider;
import org.gcube.application.geoportal.service.engine.WorkspaceManager;
import org.gcube.application.geoportal.common.model.rest.ConfigurationException;
import org.gcube.application.geoportal.service.engine.providers.PluginManager;
import org.gcube.application.geoportal.service.engine.providers.ProfileMapCache;
import org.gcube.application.geoportal.service.model.internal.faults.DeletionException;
import org.gcube.application.cms.serialization.Serialization;
import org.gcube.application.geoportal.service.model.internal.faults.RegistrationException;
import org.gcube.application.geoportal.service.utils.UserUtils;
import org.gcube.common.storagehub.client.dsl.FolderContainer;
import org.gcube.common.storagehub.model.exceptions.StorageHubException;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.Response;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.security.InvalidParameterException;
import java.time.LocalDateTime;
import java.util.*;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.function.Consumer;
import static org.gcube.application.cms.serialization.Serialization.*;
@Slf4j
public class ProfiledMongoManager extends MongoManager implements MongoManagerI<Project>{
@Getter
UseCaseDescriptor useCaseDescriptor;
@Override
protected String mongoIDFieldName() {
return ID;
}
public ProfiledMongoManager(String profileId) throws ConfigurationException, RegistrationException {
// Check UseCaseDescriptor ID
log.info("Loading useCaseDescriptor ID {} ",profileId);
if(profileId==null) throw new InvalidParameterException("UseCaseDescriptor ID cannot be null");
Map<String, UseCaseDescriptor> profiles=ImplementationProvider.get().getEngineByManagedClass(ProfileMapCache.ProfileMap.class);
useCaseDescriptor = ImplementationProvider.get().getEngineByManagedClass(UCDManagerI.class).getById(profileId);
if(useCaseDescriptor == null )
throw new WebApplicationException("UseCaseDescriptor " + profileId + " not registered", Response.Status.NOT_FOUND);
// Connect to DB
init(getToUseCollectionName());
}
private String getToUseCollectionName(){
//TODO collection name in UCD
return useCaseDescriptor.getId();
}
@Getter(lazy = true)
private final LifecycleManager manager=getLCManager();
private LifecycleManager getLCManager() {
try{
LifecycleManager toReturn=null;
//Getting Lifecycle Manager declaration from UseCaseDescriptor
List<HandlerDeclaration> handlerDeclarations= useCaseDescriptor.getHandlersMapByType().get(PluginDescriptor.BaseTypes.LIFECYCLE_MANAGER);
if(handlerDeclarations==null || handlerDeclarations.isEmpty()) throw new ConfigurationException("No Lifecycle Handler defined for useCaseDescriptor ID "+ useCaseDescriptor.getId());
if(handlerDeclarations.size()>1) throw new ConfigurationException("Too many Lifecycle Handlers defined ("+handlerDeclarations+") in useCaseDescriptor ID "+ useCaseDescriptor.getId());
HandlerDeclaration lcHandlerDeclaration=handlerDeclarations.get(0);
// Loading Lifecycle Manager
log.debug("Looking for handler {} ",lcHandlerDeclaration);
toReturn=(LifecycleManager) ImplementationProvider.get().
getEngineByManagedClass(PluginManager.PluginMap.class).get(lcHandlerDeclaration.getId());
if(toReturn==null) throw new ConfigurationException("Unable to find Lifecycle Manager Plugin. ID "+lcHandlerDeclaration.getId());
return toReturn;
} catch(Throwable t){
log.warn("Unable to load LC Manager ",t);
return null;
}
}
@Override
public Project registerNew(Document toRegisterDoc) throws IOException, StepException, EventException {
log.info("Registering new document in {} ", useCaseDescriptor.getId());
log.debug("Going to register {}",toRegisterDoc.toJson());
Project toRegister = new Project();
toRegister.setTheDocument(toRegisterDoc);
PublicationInfo pubInfo=new PublicationInfo();
pubInfo.setCreationInfo(UserUtils.getCurrent().asInfo());
// TODO Set Access From UseCaseDescriptor
Access access=new Access();
access.setLicense("");
access.setPolicy(AccessPolicy.OPEN);
pubInfo.setAccess(access);
toRegister.setInfo(pubInfo);
toRegister.setProfileID(useCaseDescriptor.getId());
toRegister.setProfileVersion(useCaseDescriptor.getVersion());
toRegister.setVersion(new Semver("1.0.0"));
LifecycleInformation draftInfo=new LifecycleInformation().cleanState();
draftInfo.setPhase(LifecycleInformation.DRAFT_PHASE);
draftInfo.setLastOperationStatus(LifecycleInformation.Status.OK);
toRegister.setLifecycleInformation(draftInfo);
// Apply Lifecycle
toRegister = triggerEvent(toRegister,EventExecutionRequest.Events.ON_INIT_DOCUMENT,null);
log.debug("Going to register {} ",toRegister);
// Insert object
ObjectId id =insertDoc(asDocumentWithId(toRegister));
log.info("Obtained id {} ",id);
return getByID(id.toHexString());
}
@Override
public Project update(String id, Document toSet) throws IOException, EventException {
log.trace("Replacing {} ",toSet);
Project toUpdate=getByID(id);
toUpdate.setTheDocument(toSet);
toUpdate.getLifecycleInformation().cleanState();
toUpdate=onUpdate(toUpdate);
Project toReturn =convert(replaceDoc(asDocumentWithId(toUpdate),new ObjectId(id)), Project.class);
log.debug("Updated Project is {}",toReturn);
return toReturn;
}
private Project onUpdate(Project toUpdate) throws EventException {
UserUtils.AuthenticatedUser u = UserUtils.getCurrent();
toUpdate.getInfo().setLastEditInfo(u.asInfo());
toUpdate.setVersion(toUpdate.getVersion().withIncPatch());
return triggerEvent(toUpdate,EventExecutionRequest.Events.ON_INIT_DOCUMENT,null);
}
@Override
public void delete(String id,boolean force) throws DeletionException {
log.debug("Deleting by ID {}, force {}",id,force);
try{
Project doc =getByID(id);
// TODO INVOKE LIFECYCLE
//if(!force&&isPublished(id)) throw new Exception("Cannot delete published documents. Unpublish it or use force = true");
try{
// TODO CHECK PHASE AND STATUS
// DEINDEX
// DEMATERIALIZE
// DELETE CONTENT
// DELETE ENTRY
throw new DeletionException("IMPLEMENT THIS");
// delete(asId(id), getCollectionName());
}catch(DeletionException e) {
//storing updated - partially deleted
// concessione=onUpdate(concessione);
// replace(asDocumentWithId(concessione), collectionName);
throw e;
}
}catch(Throwable t){
throw new DeletionException("Unable to delete "+id,t);
}
}
@Override
public Project getByID(String id) throws WebApplicationException{
Document doc=getDocById(asId(id));
if(doc==null) throw new WebApplicationException("No document with ID "+id);
return convert(doc, Project.class);
}
@Override
public Iterable<Document> query(QueryRequest queryRequest) {
log.info("Querying {} ",queryRequest);
LinkedBlockingQueue queue=new LinkedBlockingQueue<Project>();
queryDoc(queryRequest).forEach(
(Consumer<? super Document>) (Document d)->{try{
queue.put(d);
}catch(Throwable t){log.warn("Unable to translate "+d);}});
log.info("Returned {} elements ",queue.size());
return queue;
}
@Override
public Iterable<Project> filter(QueryRequest queryRequest) {
log.info("Searching concessione for filter {} ",queryRequest);
LinkedBlockingQueue queue=new LinkedBlockingQueue<Project>();
queryDoc(queryRequest).forEach(
(Consumer<? super Document>) (Document d)->{try{
queue.put(d);
}catch(Throwable t){log.warn("Unable to translate "+d);}});
log.info("Returned {} elements ",queue.size());
return queue;
}
@Override
public Project performStep(String id, String step, Document options) throws StepException, JsonProcessingException {
Project document = getByID(id);
try{
document.getLifecycleInformation().cleanState();
document = step(document, step, options);
} catch(Throwable t){
log.error("[UseCaseDescriptor {} ] ERROR Invoking Step {} on document {}" , useCaseDescriptor.getId(),step,id,t);
LifecycleInformation info = new LifecycleInformation();
info.setPhase(document.getLifecycleInformation().getPhase());
info.setLastOperationStatus(LifecycleInformation.Status.ERROR);
info.addErrorMessage(t.getMessage());
info.setLastInvokedStep(step);
document.setLifecycleInformation(info);
}finally{
log.info("Storing {} [UseCaseDescriptor {}] After Step {}, Status : {} " ,id, useCaseDescriptor.getId(),
step,document.getLifecycleInformation().getLastOperationStatus());
log.debug("LifecycleInformation is {} ",document.getLifecycleInformation());
if(log.isTraceEnabled())log.trace("Document is {} ",Serialization.write(document));
return convert(replaceDoc(asDocumentWithId(document),new ObjectId(id)), Project.class);
}
}
/**
* NB Put at path :
*
* Path Examples
* artifact
* images
* images[1]
* layers[?(@.name = 'myName')].fileset
*
*
*
*/
@Override
public Project registerFileSet(String id, RegisterFileSetRequest request) throws ConfigurationException, StorageHubException, StorageException, StepException, JsonProcessingException, DeletionException, EventException {
log.info("Registering Fileset for {} [useCaseDescriptor ID {}], Request is {} ",id, useCaseDescriptor.getId(),request);
List<TempFile> files=request.getStreams();
Document attributes =request.getAttributes();
Project doc=getByID(id);
doc.getLifecycleInformation().cleanState();
doc.getLifecycleInformation().setLastOperationStatus(LifecycleInformation.Status.OK);
WorkspaceManager ws=new WorkspaceManager();
StorageUtils storage=ImplementationProvider.get().getEngineByManagedClass(StorageUtils.class);
log.debug("Checking field {} definition in {}",request.getFieldDefinitionPath(), useCaseDescriptor.getId());
Field fieldDefinition=getFieldDefinition(useCaseDescriptor,request.getFieldDefinitionPath());
JSONPathWrapper docWrapper=new JSONPathWrapper(doc.getTheDocument().toJson());
List<String> matchingPaths = docWrapper.getMatchingPaths(request.getParentPath());
if(matchingPaths.size()>1) throw new WebApplicationException("Multiple Destination matching parent path "+request.getParentPath(),Response.Status.BAD_REQUEST);
if(matchingPaths.isEmpty()) throw new WebApplicationException("PArent path not found at "+request.getParentPath(),Response.Status.BAD_REQUEST);
String parentMatchingPath = matchingPaths.get(0);
List<Object> foundElementsByMatchingPaths = docWrapper.getByPath(parentMatchingPath);
if(foundElementsByMatchingPaths == null || foundElementsByMatchingPaths.isEmpty())
throw new WebApplicationException("No element found at "+ parentMatchingPath,Response.Status.BAD_REQUEST);
Document parent = Serialization.asDocument(foundElementsByMatchingPaths.get(0));
// PREPARE REGISTERED FS
// MANAGE CLASH
switch (request.getClashOption()){
case REPLACE_EXISTING: {
if(fieldDefinition.isCollection())
throw new WebApplicationException("Cannot replace repeatable field "+request.getFieldDefinitionPath()+".",Response.Status.BAD_REQUEST);
// DELETE EXISTING AND PUT
RegisteredFileSet toDelete = Serialization.convert(parent.get(request.getFieldName()),RegisteredFileSet.class);
if(!(toDelete == null)&&!(toDelete.isEmpty()))
deleteFileSetRoutine(toDelete,false,ws);
RegisteredFileSet fs = prepareRegisteredFileSet(doc.getInfo(),doc.getId(), useCaseDescriptor.getId(), request.getAttributes(),files,storage,ws);
log.debug("Registered Fileset for [ID {} useCaseDescriptor {}] is {} ",fs,doc.getId(),doc.getProfileID());
docWrapper.putElement(parentMatchingPath,request.getFieldName(),fs);
break;}
case MERGE_EXISTING: {
if(fieldDefinition.isCollection())
throw new WebApplicationException("Cannot merge repeatable field "+request.getFieldDefinitionPath()+".",Response.Status.BAD_REQUEST);
RegisteredFileSet original = Serialization.convert(parent.get(request.getFieldName()),RegisteredFileSet.class);
// MERGE ATTRIBUTES AND PUT
Document toUseAttributes=request.getAttributes();
if(original!=null) toUseAttributes.putAll(original);
RegisteredFileSet fs = prepareRegisteredFileSet(doc.getInfo(),doc.getId(), useCaseDescriptor.getId(), toUseAttributes,files,storage,ws);
log.debug("Registered Fileset for [ID {} useCaseDescriptor {}] is {} ",fs,doc.getId(),doc.getProfileID());
docWrapper.putElement(parentMatchingPath,request.getFieldName(),fs);
break;}
case APPEND: {
if(!fieldDefinition.isCollection())
throw new WebApplicationException("Cannot add to single field "+request.getFieldDefinitionPath()+".",Response.Status.BAD_REQUEST);
RegisteredFileSet fs = prepareRegisteredFileSet(doc.getInfo(),doc.getId(), useCaseDescriptor.getId(), request.getAttributes(),files,storage,ws);
log.debug("Registered Fileset for [ID {} useCaseDescriptor {}] is {} ",fs,doc.getId(),doc.getProfileID());
docWrapper.addElementToArray(String.format("%1ds['%2$s']",parentMatchingPath,request.getFieldName()),fs);
break;}
default: {throw new WebApplicationException("Unexpected clash policy "+request.getClashOption(),Response.Status.BAD_REQUEST);}
}
log.debug("Setting result on profiled document");
doc.setTheDocument(Document.parse(docWrapper.getValueCTX().jsonString()));
doc=onUpdate(doc);
return convert(replaceDoc(asDocumentWithId(doc),new ObjectId(id)), Project.class);
}
@Override
public Project deleteFileSet(String id, String path, Boolean force) throws ConfigurationException, StorageHubException, JsonProcessingException, DeletionException, EventException {
log.info("Deleting Fileset for {} [useCaseDescriptor ID {}], at {} [force {} ]",id, useCaseDescriptor.getId(),path,force);
Project doc = getByID(id);
doc.getLifecycleInformation().cleanState();
doc.getLifecycleInformation().cleanState().setLastOperationStatus(LifecycleInformation.Status.OK);
JSONPathWrapper wrapper = new JSONPathWrapper(doc.getTheDocument().toJson());
List<String> matchingPaths=wrapper.getMatchingPaths(path);
if(matchingPaths.isEmpty()) throw new WebApplicationException("No Registered FileSet found at "+path,Response.Status.BAD_REQUEST);
if(matchingPaths.size()>1) throw new WebApplicationException("Multiple Fileset ("+matchingPaths.size()+") matching "+path,Response.Status.BAD_REQUEST);
RegisteredFileSet fs = Serialization.convert(wrapper.getByPath(path),RegisteredFileSet.class);
log.debug("Going to delete {}",fs);
deleteFileSetRoutine(fs,force,new WorkspaceManager());
log.debug("Removing FS from document [ID : ] by path {}",id,path);
wrapper.setElement(path,null);
doc=onUpdate(doc);
return convert(replaceDoc(asDocumentWithId(doc),new ObjectId(id)), Project.class);
}
@Override
public Configuration getConfiguration() throws ConfigurationException{
log.debug("Asking configuration for {} in {} ", useCaseDescriptor.getId(), UserUtils.getCurrent().getContext());
Configuration toReturn= new Configuration();
List<Archive> archives = new ArrayList<>();
toReturn.setArchives(archives);
List<Index> indexes=new ArrayList<>();
toReturn.setIndexes(indexes);
// Set Basic Info
toReturn.setProfileId(this.getUseCaseDescriptor().getId());
toReturn.setContext(ContextUtils.getCurrentScope());
toReturn.setLastUpdatedTime(LocalDateTime.now());
// Add Mongo Info
Archive mongoArchive = new Archive("DOCUMENT-STORE-COLLECTION");
MongoCollection coll=getCollection();
mongoArchive.put("count",coll.count());
mongoArchive.put("collection_name",getToUseCollectionName());
archives.add(mongoArchive);
// TODO ADD TEXT INDEXES
// Set WS Info
try {
archives.add(new WorkspaceManager().getConfiguration());
}catch (Exception e) {
toReturn.addErrorMessage("Unable to get WS info "+e.getMessage());
log.error("Unable to get WS Configuration",e);
}
// ADD LC Infos
AccountingInfo user = UserUtils.getCurrent().asInfo();
try{
Configuration lcConfig = getLCManager().getCurrentConfiguration(new BaseRequest(useCaseDescriptor,user.getUser(),user.getContext()));
log.info("Configuration is {} ",lcConfig);
if(lcConfig.getArchives()!=null)
archives.addAll(lcConfig.getArchives());
if(lcConfig.getIndexes()!=null)
indexes.addAll(lcConfig.getIndexes());
}catch(ConfigurationException e){
toReturn.addErrorMessage("Unable to get Lifecycle info "+e.getMessage());
log.error("Unable to get Lifecycle info ",e);
}
log.debug("Returning current configuration {}",toReturn);
return toReturn;
}
private Project step(Project theDocument, String step, Document callParameters){
try{
log.info("[UseCaseDescriptor {}] Invoking Step {} on {}" , useCaseDescriptor.getId(),step,getManager().getDescriptor());
AccountingInfo user= UserUtils.getCurrent().asInfo();
StepExecutionRequest request=new StepExecutionRequest(useCaseDescriptor,user.getUser(),user.getContext(),theDocument,step);
log.debug("Requesting Step Execution {}",request);
StepExecutionReport report= getManager().performStep(request);
Project toReturn = report.prepareResult();
// EVENTS
if(report.getToTriggerEvents()!=null) {
Iterator<EventExecutionRequest> eventIT = report.getToTriggerEvents().listIterator();
while (!toReturn.getLifecycleInformation().getLastOperationStatus().equals(LifecycleInformation.Status.ERROR)
&& eventIT.hasNext()) {
EventExecutionRequest triggerRequest = eventIT.next();
log.info("Triggering {} ", triggerRequest);
toReturn = triggerEvent(toReturn, triggerRequest.getEvent(), triggerRequest.getCallParameters());
}
}
// STEPS
if(report.getCascadeSteps()!=null) {
Iterator<StepExecutionRequest> stepIT = report.getCascadeSteps().listIterator();
while (!toReturn.getLifecycleInformation().getLastOperationStatus().equals(LifecycleInformation.Status.ERROR)
&& stepIT.hasNext()) {
StepExecutionRequest triggerRequest = stepIT.next();
log.info("Triggering {} ", triggerRequest);
toReturn = step(toReturn, triggerRequest.getStep(), triggerRequest.getCallParameters());
}
}
return report.prepareResult();
}catch(Throwable t){
log.error("Unable to perform step "+step,t);
theDocument.getLifecycleInformation().addErrorMessage("Unable to perform step "+step+" cause : "+ t.getMessage());
theDocument.getLifecycleInformation().setLastOperationStatus(LifecycleInformation.Status.ERROR);
return theDocument;
}
}
private Project triggerEvent(Project theDocument, String event, Document parameters) {
try{
log.info("[UseCaseDescriptor {}] triggering event {} on {}" , useCaseDescriptor.getId(),event,getManager().getDescriptor());
AccountingInfo user= UserUtils.getCurrent().asInfo();
EventExecutionRequest request= new EventExecutionRequest(useCaseDescriptor,user.getUser(),user.getContext(),theDocument,event);
log.debug("Triggering {}",request);
DocumentHandlingReport report = getManager().onEvent(request);
return report.prepareResult();
} catch (Throwable t){
log.error("Unable to trigger event "+event,t);
theDocument.getLifecycleInformation().addErrorMessage("Unable to trigger "+event+" cause : "+ t.getMessage());
theDocument.getLifecycleInformation().setLastOperationStatus(LifecycleInformation.Status.ERROR);
return theDocument;
}
}
private static final RegisteredFileSet prepareRegisteredFileSet(PublicationInfo defaultPublicationInfo,String docID, String profileID,
Document attributes,List<TempFile> files, StorageUtils storage,WorkspaceManager ws) throws StorageHubException, StorageException {
log.debug("Preparing Registered FileSet..");
RegisteredFileSet toReturn = new RegisteredFileSet();
if(attributes!=null) toReturn.putAll(attributes);
String uuid = UUID.randomUUID().toString();
toReturn.putIfAbsent(RegisteredFileSet.UUID, uuid);
toReturn.putIfAbsent(RegisteredFileSet.CREATION_INFO,UserUtils.getCurrent().asInfo());
toReturn.putIfAbsent(RegisteredFileSet.ACCESS,defaultPublicationInfo.getAccess());
// FOLDER
String folderID=toReturn.getFolderId();
log.trace("Folder ID is {} ",folderID);
FolderContainer sectionFolder=null;
if(folderID==null || folderID.isEmpty()) {
FolderContainer base = ws.createFolder(new WorkspaceManager.FolderOptions(
docID, "Base Folder for profiled document. UseCaseDescriptor " + profileID, null));
sectionFolder = ws.createFolder(new WorkspaceManager.FolderOptions(
docID + "_" + uuid, "Registered Fileset uuid " + uuid, base));
toReturn.put(RegisteredFileSet.FOLDER_ID, sectionFolder.getId());
}else {
sectionFolder = ws.getFolderById(folderID);
}
ArrayList<RegisteredFile> registeredFiles=new ArrayList<>();
if(toReturn.containsKey(RegisteredFileSet.PAYLOADS))
registeredFiles.addAll(toReturn.getPayloads());
for (TempFile f : files) {
InputStream is=null;
try{
log.debug("Opening temp file {}",f);
String fileUrl=storage.getURL(f.getId());
log.debug("Got URL {} from ID {}",fileUrl,f.getId());
is=new URL(fileUrl).openStream();
RegisteredFile registered=ws.registerFile(new WorkspaceManager.FileOptions(f.getFilename(),is,
"Imported via gcube CMS service ", sectionFolder));
log.debug("Registered "+registered);
registeredFiles.add(registered);
}catch(StorageHubException | IOException e){
throw new StorageException("Unable to store "+f,e);
}finally{
if(is!=null)
IOUtils.closeQuietly(is);
}
}
toReturn.put(RegisteredFileSet.PAYLOADS,registeredFiles);
// TODO MERGE
//toReturn.remove(RegisteredFileSet.MATERIALIZATIONS);
return toReturn;
}
private static void deleteFileSetRoutine(RegisteredFileSet fs, Boolean force, WorkspaceManager ws) throws DeletionException, StorageHubException {
log.debug("Deleting Registered FS {}");
if(fs.getMaterializations()!=null && !fs.getMaterializations().isEmpty()){
if(!force) throw new DeletionException("Fileset (uuid "+fs.getUUID()+") already materialized. Use force = true");
else throw new RuntimeException("Implement this");
// TODO manager force deletion
// NB handlers for materialization types
}
log.trace("FileSet ID {} : deleting ws folder {}",fs.getUUID(),fs.getFolderId());
if(fs.getPayloads()!=null)
ws.deleteItem(fs.getFolderId());
}
private static Field getFieldDefinition(UseCaseDescriptor useCaseDescriptor, String fieldPath)throws WebApplicationException{
JSONPathWrapper schemaWrapper= new JSONPathWrapper(useCaseDescriptor.getSchema().toJson());
List<Field> fieldDefinitions=schemaWrapper.getByPath(fieldPath,Field.class);
if(fieldDefinitions==null || fieldDefinitions.isEmpty())
throw new WebApplicationException("No Field found in schema "+ useCaseDescriptor.getId()+" at "+fieldPath, Response.Status.BAD_REQUEST);
if(fieldDefinitions.size()>1)
throw new WebApplicationException("Multiple field definitions ("+fieldDefinitions.size()+") found in "+ useCaseDescriptor.getId()+" for "+fieldPath,Response.Status.BAD_REQUEST);
Field fieldDefinition=Serialization.convert(fieldDefinitions.get(0),Field.class);
if(fieldDefinition==null)
throw new WebApplicationException("Found field is null ["+ useCaseDescriptor.getId()+" for "+fieldPath+"]",Response.Status.BAD_REQUEST);
log.trace("Field definition is {}",fieldDefinition);
return fieldDefinition;
}
}