Minio integration

multipleStorageBackends
Lucio Lelii 2 years ago
parent e11bb536a7
commit 4f87677674

@ -296,7 +296,7 @@
<scope>runtime</scope>
</dependency>
<dependency>
<groupId>org.reflections</groupId>
<artifactId>reflections</artifactId>
@ -406,7 +406,7 @@
<artifactId>jersey-test-framework-provider-grizzly2</artifactId>
<scope>test</scope>
</dependency>
<!-- Storage dependencies -->
<dependency>
<groupId>org.gcube.contentmanagement</groupId>
@ -419,13 +419,15 @@
<artifactId>storage-manager-wrapper</artifactId>
<version>[3.0.0-SNAPSHOT,4.0.0-SNAPSHOT)</version>
</dependency>
<!-- https://mvnrepository.com/artifact/io.minio/minio -->
<dependency>
<groupId>com.amazonaws</groupId>
<artifactId>aws-java-sdk</artifactId>
<version>1.11.163</version>
<groupId>io.minio</groupId>
<artifactId>minio</artifactId>
<version>8.3.3</version>
</dependency>
</dependencies>
<build>
<finalName>${project.artifactId}</finalName>

@ -36,4 +36,5 @@ public class Constants {
public static final List<String> WRITE_PROTECTED_FOLDER = Arrays.asList(Constants.OLD_VRE_FOLDER_PARENT_NAME, Constants.TRASH_ROOT_FOLDER_NAME);
public static final List<String> PROTECTED_FOLDER = Arrays.asList(Constants.WORKSPACE_ROOT_FOLDER_NAME, Constants.OLD_VRE_FOLDER_PARENT_NAME, Constants.TRASH_ROOT_FOLDER_NAME);
}

@ -349,11 +349,10 @@ public class Utils {
}
public static void setContentFromMetaInfo(AbstractFileItem item, MetaInfo contentInfo) {
item.getContent().setManagedBy(contentInfo.getManagedBy());
item.getContent().setSize(contentInfo.getSize());
item.getContent().setRemotePath(contentInfo.getRemotePath());
item.getContent().setSize(contentInfo.getSize());
item.getContent().setStorageId(contentInfo.getStorageId());
item.getContent().setPayloadBackend(contentInfo.getPayloadBackend());
}
}

@ -77,9 +77,9 @@ public class CompressHandler {
try {
AbstractFileItem fileItem = (AbstractFileItem)item;
StorageBackendFactory sbf = storageBackendHandler.get(fileItem.getContent().getManagedBy());
StorageBackendFactory sbf = storageBackendHandler.get(fileItem.getContent().getPayloadBackend());
StorageBackend sb = sbf.create(null);
StorageBackend sb = sbf.create(fileItem.getContent().getPayloadBackend());
InputStream streamToWrite = sb.download(fileItem.getContent());
if (streamToWrite == null){

@ -111,9 +111,9 @@ public class TrashHandler {
try {
StorageBackendFactory sbf = storageBackendHandler.get(item.getContent().getManagedBy());
StorageBackendFactory sbf = storageBackendHandler.get(item.getContent().getPayloadBackend());
StorageBackend sb = sbf.create(null);
StorageBackend sb = sbf.create(item.getContent().getPayloadBackend());
contentSet.add(new ContentPair(item.getContent(), sb));

@ -82,7 +82,6 @@ public class Item2NodeConverter {
logger.debug("error setting value",e);
}
}
}
return newNode;
@ -94,7 +93,7 @@ public class Item2NodeConverter {
}
private void iterateItemNodeAttributeFields(Object object, Node parentNode, String nodeName) throws Exception{
AttributeRootNode attributeRootNode = object.getClass().getAnnotation(AttributeRootNode.class);
Node newNode;
@ -145,6 +144,21 @@ public class Item2NodeConverter {
iterateItemNodeAttributeFields(obj,newNode, field.getName()+(i++));
}
} else if (field.isAnnotationPresent(NodeAttribute.class)){
NodeAttribute nodeAttribute = field.getAnnotation(NodeAttribute.class);
if (nodeAttribute.isReadOnly()) continue;
String subNodeName = nodeAttribute.value();
logger.trace("retrieving field node "+field.getName());
field.setAccessible(true);
try{
Object obj = field.get(object);
if (obj!=null)
iterateItemNodeAttributeFields(obj, newNode, subNodeName);
} catch (Exception e ) {
logger.debug("error setting value",e);
}
}
}
}
@ -197,22 +211,9 @@ public class Item2NodeConverter {
node.setProperty(NodeProperty.LAST_MODIFIED_BY.toString(), item.getLastModifiedBy());
node.setProperty(NodeProperty.LAST_ACTION.toString(), action.name());
for (Field field : retrieveAllFields(item.getContent().getClass())){
if (field.isAnnotationPresent(Attribute.class)){
Attribute attribute = field.getAnnotation(Attribute.class);
if (attribute.isReadOnly()) continue;
field.setAccessible(true);
try{
//Class<?> returnType = field.getType();
Values values = getObjectValue(field.getType(), field.get(item.getContent()));
if (values.isMulti()) contentNode.setProperty(attribute.value(), values.getValues() );
else contentNode.setProperty(attribute.value(), values.getValue());
} catch (Exception e ) {
logger.debug("error setting value for attribute "+attribute.value(),e);
}
}
}
replaceContentFieldIterator(contentNode, item.getContent().getClass(),item.getContent());
} catch (RepositoryException e) {
logger.error("error writing repository",e);
@ -220,7 +221,41 @@ public class Item2NodeConverter {
}
}
//VALID ONLY FOR CONTENT
private void replaceContentFieldIterator(Node node, Class<?> clazz, Object instance) {
for (Field field : retrieveAllFields(clazz)){
if (field.isAnnotationPresent(Attribute.class)){
Attribute attribute = field.getAnnotation(Attribute.class);
if (attribute.isReadOnly()) continue;
field.setAccessible(true);
try{
//Class<?> returnType = field.getType();
Values values = getObjectValue(field.getType(), field.get(instance));
if (values.isMulti()) node.setProperty(attribute.value(), values.getValues() );
else node.setProperty(attribute.value(), values.getValue());
} catch (Exception e ) {
logger.debug("error setting value for attribute "+attribute.value(),e);
}
} else if (field.isAnnotationPresent(NodeAttribute.class)){
NodeAttribute nodeAttribute = field.getAnnotation(NodeAttribute.class);
if (nodeAttribute.isReadOnly()) continue;
String subNodeName = nodeAttribute.value();
logger.trace("retrieving field node "+field.getName());
field.setAccessible(true);
try{
Object obj = field.get(instance);
if (obj!=null)
iterateItemNodeAttributeFields(obj, node, subNodeName);
} catch (Exception e ) {
logger.debug("error setting value",e);
}
}
}
}
public void updateHidden(Node node, Boolean hidden,String login) throws RepositoryException {
Utils.setPropertyOnChangeNode(node, login, ItemAction.UPDATED);
node.setProperty(NodeProperty.HIDDEN.toString(), hidden);

@ -56,6 +56,7 @@ import org.gcube.data.access.storagehub.handlers.items.builders.FolderCreationPa
import org.gcube.data.access.storagehub.handlers.items.builders.GCubeItemCreationParameters;
import org.gcube.data.access.storagehub.handlers.items.builders.URLCreationParameters;
import org.gcube.data.access.storagehub.handlers.plugins.StorageBackendHandler;
import org.glassfish.jersey.media.multipart.FormDataContentDisposition;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -141,7 +142,7 @@ public class ItemHandler {
}
private Node create(FileCreationParameters params, Node destination) throws Exception{
Node newNode = createFileItemInternally(params.getSession(), destination, params.getStream(), params.getName(), params.getDescription(), params.getUser(), true);
Node newNode = createFileItemInternally(params.getSession(), destination, params.getStream(), params.getName(), params.getDescription(), params.getFileDetails(), params.getUser(), true);
params.getSession().save();
versionHandler.checkinContentNode(newNode);
log.info("file with id {} correctly created",newNode.getIdentifier());
@ -190,13 +191,13 @@ public class ItemHandler {
log.debug("creating file with entire path {}, name {}, parentPath {} ", entirePath, name, parentPath);
Node fileNode = null;
if (parentPath.isEmpty())
fileNode = createFileItemInternally(params.getSession(), parentDirectoryNode, input, name, "", params.getUser(), false);
fileNode = createFileItemInternally(params.getSession(), parentDirectoryNode, input, name, "", params.getFileDetails(), params.getUser(), false);
else {
Node parentNode = directoryNodeMap.get(parentPath);
if (parentNode ==null)
parentNode = createPath(parentPath, directoryNodeMap, parentDirectoryNode, params.getSession(), params.getUser());
fileNode = createFileItemInternally(params.getSession(), parentNode, input, name, "", params.getUser(), false);
fileNode = createFileItemInternally(params.getSession(), parentNode, input, name, "",params.getFileDetails(), params.getUser(), false);
}
fileNodes.add(fileNode);
}catch(Exception e) {
@ -246,17 +247,15 @@ public class ItemHandler {
return newNode;
}
private Node createFileItemInternally(Session ses, Node destinationNode, InputStream stream, String name, String description, String login, boolean withLock) throws RepositoryException, StorageHubException{
private Node createFileItemInternally(Session ses, Node destinationNode, InputStream stream, String name, String description, FormDataContentDisposition fileDetails, String login, boolean withLock) throws RepositoryException, StorageHubException{
log.debug("UPLOAD: starting preparing file");
log.trace("UPLOAD: starting preparing file");
Node newNode;
FolderItem destinationItem = node2Item.getItem(destinationNode, Excludes.ALL);
StorageBackendFactory sbf = storageBackendHandler.get(destinationItem.getBackend().getStorageName());
//TODO: add metadata taken from content node
StorageBackend sb = sbf.create(destinationItem.getBackend().getParameters());
StorageBackendFactory sbf = storageBackendHandler.get(destinationItem.getBackend());
StorageBackend sb = sbf.create(destinationItem.getBackend());
String relativePath = destinationNode.getPath();
@ -268,7 +267,7 @@ public class ItemHandler {
newNode = ses.getNode(newNodePath);
authChecker.checkWriteAuthorizationControl(ses, login, newNode.getIdentifier(), false);
AbstractFileItem item = fillItemWithContent(stream, sb, name, description, relativePath,login);
AbstractFileItem item = fillItemWithContent(stream, sb, name, description, fileDetails, relativePath,login);
if (withLock) {
try {
@ -289,7 +288,7 @@ public class ItemHandler {
}
else {
authChecker.checkWriteAuthorizationControl(ses, login, destinationNode.getIdentifier(), true);
AbstractFileItem item = fillItemWithContent(stream, sb, name, description, relativePath, login);
AbstractFileItem item = fillItemWithContent(stream, sb, name, description, fileDetails, relativePath, login);
if (withLock) {
try {
log.debug("trying to acquire lock");
@ -314,16 +313,16 @@ public class ItemHandler {
private AbstractFileItem fillItemWithContent(InputStream stream, StorageBackend storageBackend, String name, String description, String relPath, String login) throws BackendGenericError{
log.debug("UPLOAD: filling content");
ContentHandler handler = getContentHandler(stream, storageBackend, name, relPath, login);
private AbstractFileItem fillItemWithContent(InputStream stream, StorageBackend storageBackend, String name, String description, FormDataContentDisposition fileDetails, String relPath, String login) throws BackendGenericError{
log.trace("UPLOAD: filling content");
ContentHandler handler = getContentHandler(stream, storageBackend, name, fileDetails, relPath, login);
AbstractFileItem item =handler.buildItem(name, description, login);
return item ;
}
private ContentHandler getContentHandler(InputStream stream, StorageBackend storageBackend, String name, String relPath, String login) throws BackendGenericError {
private ContentHandler getContentHandler(InputStream stream, StorageBackend storageBackend, String name, FormDataContentDisposition fileDetails, String relPath, String login) throws BackendGenericError {
log.debug("UPLOAD: handling content");
log.trace("UPLOAD: handling content");
final MultipleOutputStream mos;
try{
@ -354,7 +353,7 @@ public class ItemHandler {
is1.reset();
handler.initiliseSpecificContent(is1, name, mimeType);
log.debug("UPLOAD: reading the mimetype - finished in {}",System.currentTimeMillis()-start);
log.trace("UPLOAD: reading the mimetype - finished in {}",System.currentTimeMillis()-start);
} catch (Throwable e) {
log.error("error retrieving mimeType",e);
throw new RuntimeException(e);
@ -370,7 +369,11 @@ public class ItemHandler {
public MetaInfo call() throws Exception {
try(InputStream is1 = mos.get()){
log.debug("UPLOAD: upload on - start");
MetaInfo info = storageBackend.upload(is1, relPath, name);
MetaInfo info;
if (fileDetails !=null && fileDetails.getSize()>0)
info = storageBackend.upload(is1, relPath, name, fileDetails.getSize());
else
info = storageBackend.upload(is1, relPath, name);
log.debug("UPLOAD: upload on storage - stop");
return info;
}catch (Throwable e) {
@ -385,18 +388,20 @@ public class ItemHandler {
Future<MetaInfo> uploaderF = executor.submit(AuthorizedTasks.bind(uploader));
long start = System.currentTimeMillis();
log.debug("UPLOAD: writing the stream - start");
log.trace("UPLOAD: writing the stream - start");
try {
mos.startWriting();
ContentHandler handler = detectorF.get();
MetaInfo info = uploaderF.get();
log.debug("UPLOAD: writing the stream - finished in {}",System.currentTimeMillis()-start);
log.trace("UPLOAD: writing the stream - finished in {}",System.currentTimeMillis()-start);
handler.getContent().setData(NodeConstants.CONTENT_NAME);
handler.getContent().setStorageId(info.getStorageId());
handler.getContent().setSize(info.getSize());
handler.getContent().setRemotePath(info.getRemotePath());
handler.getContent().setManagedBy(info.getManagedBy());
handler.getContent().setRemotePath(info.getRemotePath());
handler.getContent().setPayloadBackend(info.getPayloadBackend());
log.trace("UPLOAD: content payload seta as {} ", handler.getContent().getPayloadBackend());
return handler;
}catch (Exception e) {
throw new BackendGenericError(e);

@ -83,7 +83,7 @@ public class Node2ItemConverter {
setGenericFields(node.getFrozenNode(), Content.class, null, content);
return content;
}
public Message getMessageItem(Node node) throws RepositoryException{
if (!(node.getPrimaryNodeType().getName().equals("nthl:itemSentRequest")
|| node.getPrimaryNodeType().getName().equals("nthl:itemSentRequestSH")))
@ -95,7 +95,7 @@ public class Node2ItemConverter {
}catch (Throwable e) {
msg.setWithAttachments(false);
}
setRootItemCommonFields(node, Collections.emptyList(), Message.class, msg);
return msg;
}
@ -129,7 +129,7 @@ public class Node2ItemConverter {
item.setExternalManaged(false);
}
item.setLocked(node.isLocked());
setRootItemCommonFields(node, excludes, classToHandle, item);
@ -137,7 +137,7 @@ public class Node2ItemConverter {
return item;
}
private <T extends Item> boolean hasTypedParent(Node node, Class<T> parentType) throws BackendGenericError, RepositoryException{
if(node==null) return false;
@ -146,16 +146,16 @@ public class Node2ItemConverter {
private <T extends RootItem> void setRootItemCommonFields(Node node, List<String> excludes, Class<T> classToHandle, T instance) throws RepositoryException{
try{
instance.setParentId(node.getParent().getIdentifier());
instance.setParentPath(node.getParent().getPath());
}catch (Throwable e) {
logger.trace("Root node doesn't have a parent");
}
instance.setRelatedNode(node);
instance.setId(node.getIdentifier());
instance.setName(Text.unescapeIllegalJcrChars(node.getName()));
@ -166,7 +166,7 @@ public class Node2ItemConverter {
setGenericFields(node, classToHandle, excludes, instance);
}
private <T> void setGenericFields(Node node, Class<T> classToHandle,List<String> excludes, T instance){
for (Field field : retrieveAllFields(classToHandle)){
if (field.isAnnotationPresent(Attribute.class)){
@ -192,7 +192,7 @@ public class Node2ItemConverter {
}
}
}
private <T> T iterateNodeAttributeFields(Class<T> clazz, Node node) throws Exception{
T obj = clazz.newInstance();
for (Field field : retrieveAllFields(clazz)){
@ -200,84 +200,113 @@ public class Node2ItemConverter {
setAttributeFieldCheckingDefault(field, obj, node);
} else if (field.isAnnotationPresent(MapAttribute.class)){
logger.trace("found field {} of type annotated as MapAttribute in class {} and node name {}", field.getName(), clazz.getName(), node.getName());
field.setAccessible(true);
String exclude = field.getAnnotation(MapAttribute.class).excludeStartWith();
Map<String, Object> mapToset = new HashMap<String, Object>();
PropertyIterator iterator = node.getProperties();
if (iterator!=null) {
while (iterator.hasNext()){
Property prop = iterator.nextProperty();
if (!exclude.isEmpty() && prop.getName().startsWith(exclude)) continue;
try{
logger.trace("adding {} in the map",prop.getName());
mapToset.put(prop.getName(), getPropertyValue(prop));
}catch(PathNotFoundException e){
logger.warn("the property {} is not mapped",prop.getName());
} catch (Exception e ) {
logger.debug("error setting value {}",e.getMessage());
}
}
}
field.set(obj, mapToset);
setMapAttribute(field, obj, node);
} else if (field.isAnnotationPresent(ListNodes.class)){
logger.trace("found field {} of type annotated as ListNodes in class {} on node {}", field.getName(), clazz.getName(), node.getName());
setListNode(field, obj, node);
} else if (field.isAnnotationPresent(NodeAttribute.class)){
logger.debug("found field {} of type annotated as NodeAttribute in class {} on node {}", field.getName(), clazz.getName(), node.getName());
String fieldNodeName = field.getAnnotation(NodeAttribute.class).value();
//for now it excludes only first level node
//if (excludes!=null && excludes.contains(fieldNodeName)) continue;
//for now it excludes only first level node
logger.info("retrieving field node {} on field {}", fieldNodeName, field.getName());
field.setAccessible(true);
String exclude = field.getAnnotation(ListNodes.class).excludeTypeStartWith();
String include = field.getAnnotation(ListNodes.class).includeTypeStartWith();
try{
Node fieldNode = node.getNode(fieldNodeName);
logger.info("looking in node {} searched with {}",fieldNode.getName(),fieldNodeName);
field.set(obj, iterateNodeAttributeFields(field.getType(), fieldNode));
}catch(PathNotFoundException e){
logger.warn("the current node dosn't contain {} node",fieldNodeName);
} catch (Exception e ) {
logger.warn("error setting value",e);
}
Class listType = field.getAnnotation(ListNodes.class).listClass();
Map<String, Class> subTypesMap = Collections.emptyMap();
}
}
return obj;
}
if (!typeToSubtypeMap.containsKey(listType)) {
private <T> void setMapAttribute(Field field, T instance, Node node) throws Exception{
field.setAccessible(true);
String exclude = field.getAnnotation(MapAttribute.class).excludeStartWith();
Map<String, Object> mapToset = new HashMap<String, Object>();
PropertyIterator iterator = node.getProperties();
if (iterator!=null) {
while (iterator.hasNext()){
Property prop = iterator.nextProperty();
if (!exclude.isEmpty() && prop.getName().startsWith(exclude)) continue;
try{
logger.trace("adding {} in the map",prop.getName());
Configuration config = new ConfigurationBuilder().forPackages(listType.getPackage().getName());
Reflections reflections = new Reflections(config);
Set<Class> subTypes = reflections.getSubTypesOf(listType);
mapToset.put(prop.getName(), getPropertyValue(prop));
}catch(PathNotFoundException e){
logger.warn("the property {} is not mapped",prop.getName());
} catch (Exception e ) {
logger.debug("error setting value {}",e.getMessage());
}
}
}
field.set(instance, mapToset);
}
if (subTypes.size()>0) {
subTypesMap = new HashMap<>();
for (Class subtype: subTypes)
if (subtype.isAnnotationPresent(AttributeRootNode.class)) {
AttributeRootNode attributeRootNode = (AttributeRootNode)subtype.getAnnotation(AttributeRootNode.class);
subTypesMap.put(attributeRootNode.value(), subtype);
}
} else logger.trace("no subtypes found for {}",listType.getName());
private <T> void setListNode(Field field, T instance, Node node) throws Exception{
field.setAccessible(true);
String exclude = field.getAnnotation(ListNodes.class).excludeTypeStartWith();
String include = field.getAnnotation(ListNodes.class).includeTypeStartWith();
typeToSubtypeMap.put(listType, subTypesMap);
Class listType = field.getAnnotation(ListNodes.class).listClass();
} else {
logger.info("subtypes already found in cache");
subTypesMap = typeToSubtypeMap.get(listType);
}
Map<String, Class> subTypesMap = Collections.emptyMap();
List<Object> toSetList = new ArrayList<>();
if (!typeToSubtypeMap.containsKey(listType)) {
NodeIterator iterator = node.getNodes();
Configuration config = new ConfigurationBuilder().forPackages(listType.getPackage().getName());
Reflections reflections = new Reflections(config);
Set<Class> subTypes = reflections.getSubTypesOf(listType);
while (iterator.hasNext()){
Node currentNode = iterator.nextNode();
String primaryType = currentNode.getPrimaryNodeType().getName();
if (subTypes.size()>0) {
subTypesMap = new HashMap<>();
for (Class subtype: subTypes)
if (subtype.isAnnotationPresent(AttributeRootNode.class)) {
AttributeRootNode attributeRootNode = (AttributeRootNode)subtype.getAnnotation(AttributeRootNode.class);
subTypesMap.put(attributeRootNode.value(), subtype);
}
} else logger.trace("no subtypes found for {}",listType.getName());
logger.trace("the current node {} has a list",currentNode.getName());
typeToSubtypeMap.put(listType, subTypesMap);
if (!include.isEmpty() && !primaryType.startsWith(include))
continue;
if (!exclude.isEmpty() && primaryType.startsWith(exclude))
continue;
if (subTypesMap.containsKey(primaryType))
toSetList.add(iterateNodeAttributeFields(subTypesMap.get(primaryType), currentNode));
else toSetList.add(iterateNodeAttributeFields(listType, currentNode));
}
if (toSetList.size()!=0) field.set(obj, toSetList);
}
} else {
logger.info("subtypes already found in cache");
subTypesMap = typeToSubtypeMap.get(listType);
}
return obj;
List<Object> toSetList = new ArrayList<>();
NodeIterator iterator = node.getNodes();
while (iterator.hasNext()){
Node currentNode = iterator.nextNode();
String primaryType = currentNode.getPrimaryNodeType().getName();
logger.trace("the current node {} has a list",currentNode.getName());
if (!include.isEmpty() && !primaryType.startsWith(include))
continue;
if (!exclude.isEmpty() && primaryType.startsWith(exclude))
continue;
if (subTypesMap.containsKey(primaryType))
toSetList.add(iterateNodeAttributeFields(subTypesMap.get(primaryType), currentNode));
else toSetList.add(iterateNodeAttributeFields(listType, currentNode));
}
if (toSetList.size()!=0) field.set(instance, toSetList);
}
private <T> void setAttributeFieldCheckingDefault(Field field, T instance, Node node) {
Attribute attribute = field.getAnnotation(Attribute.class);
field.setAccessible(true);
@ -301,8 +330,8 @@ public class Node2ItemConverter {
logger.debug("error setting value for property {} ",attribute.value());
}
}
@SuppressWarnings({ "unchecked" })
private Object getPropertyValue(Class returnType, Property prop) throws Exception{
if (returnType.equals(String.class)) return prop.getString();

@ -1,10 +0,0 @@
package org.gcube.data.access.storagehub.handlers.plugins;
import javax.inject.Singleton;
@Singleton
public class OperationMediator {
}

@ -9,6 +9,7 @@ import javax.inject.Inject;
import javax.inject.Singleton;
import org.gcube.common.storagehub.model.exceptions.PluginNotFoundException;
import org.gcube.common.storagehub.model.items.nodes.PayloadBackend;
import org.gcube.common.storagehub.model.storages.StorageBackendFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -20,7 +21,7 @@ public class StorageBackendHandler {
@Inject
Instance<StorageBackendFactory> factories;
Map<String, StorageBackendFactory> storagebackendMap= new HashMap<String, StorageBackendFactory>();
@PostConstruct
@ -38,10 +39,10 @@ public class StorageBackendHandler {
throw new RuntimeException("storage backend implementation not found");
}
public StorageBackendFactory get(String storageName) throws PluginNotFoundException {
if (!storagebackendMap.containsKey(storageName))
throw new PluginNotFoundException(String.format("implementation for storage %s not found", storageName));
return storagebackendMap.get(storageName);
public StorageBackendFactory get(PayloadBackend payload) throws PluginNotFoundException {
if (payload == null || !storagebackendMap.containsKey(payload.getStorageName()))
throw new PluginNotFoundException(String.format("implementation for storage %s not found", payload.getStorageName()));
return storagebackendMap.get(payload.getStorageName());
}
}

@ -0,0 +1,50 @@
package org.gcube.data.access.storagehub.handlers.plugins;
import java.io.InputStream;
import javax.inject.Inject;
import javax.inject.Singleton;
import org.gcube.common.storagehub.model.exceptions.StorageHubException;
import org.gcube.common.storagehub.model.items.nodes.Content;
import org.gcube.common.storagehub.model.items.nodes.PayloadBackend;
import org.gcube.common.storagehub.model.storages.MetaInfo;
import org.gcube.common.storagehub.model.storages.StorageBackend;
import org.gcube.common.storagehub.model.storages.StorageBackendFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@Singleton
public class StorageOperationMediator {
Logger log = LoggerFactory.getLogger(StorageOperationMediator.class);
@Inject
StorageBackendHandler storageBackendHandler;
public MetaInfo copy(Content source, PayloadBackend destination, String newName, String newParentPath) throws StorageHubException{
log.info("creating Storages for source {} and destination {}", source.getPayloadBackend(), destination.getStorageName());
StorageBackendFactory sourceSBF = storageBackendHandler.get(source.getPayloadBackend());
//TODO: add metadata taken from content node
StorageBackend sourceSB = sourceSBF.create(source.getPayloadBackend());
StorageBackendFactory destSBF = storageBackendHandler.get(destination);
StorageBackend destSB = destSBF.create(destination);
if (sourceSB.equals(destSB)) {
log.info("source and destintiona are the same storage");
return sourceSB.onCopy(source, newParentPath, newName);
}else {
log.info("source and destintiona are different storage");
InputStream stream = sourceSB.download(source);
MetaInfo info = destSB.upload(stream, newParentPath, newName, source.getSize());
return info;
}
}
public boolean move(){
return true;
}
}

@ -194,7 +194,6 @@ public class ItemSharing extends Impersonable{
@PUT
@Path("{id}/share")
@Consumes(MediaType.MULTIPART_FORM_DATA)
@Deprecated
public String share(@FormDataParam("users") Set<String> users, @FormDataParam("defaultAccessType") AccessType accessType){
InnerMethodName.instance.set("shareFolder");
Session ses = null;

@ -9,13 +9,11 @@ import javax.inject.Inject;
import javax.jcr.RepositoryException;
import javax.jcr.Session;
import javax.servlet.ServletContext;
import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.Consumes;
import javax.ws.rs.FormParam;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.QueryParam;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
@ -224,6 +222,7 @@ public class ItemsCreator extends Impersonable{
}
/*
@POST
@Consumes(MediaType.APPLICATION_OCTET_STREAM)
@Path("/{id}/create/FILE")
@ -259,7 +258,7 @@ public class ItemsCreator extends Impersonable{
}
return toReturn;
}
}*/
@POST
@Consumes(MediaType.MULTIPART_FORM_DATA)
@ -274,7 +273,9 @@ public class ItemsCreator extends Impersonable{
String toReturn = null;
try{
log.debug("UPLOAD: call started");
long size = fileDetail.getSize();
log.info("UPLOAD: call started with file size {}",size);
ses = repository.getRepository().login(CredentialHandler.getAdminCredentials(context));
ItemsParameterBuilder<FileCreationParameters> builder = FileCreationParameters.builder().name(name).description(description).stream(stream).fileDetails(fileDetail)
.on(id).with(ses).author(currentUser);

@ -90,7 +90,7 @@ import org.gcube.data.access.storagehub.handlers.TrashHandler;
import org.gcube.data.access.storagehub.handlers.VersionHandler;
import org.gcube.data.access.storagehub.handlers.items.Item2NodeConverter;
import org.gcube.data.access.storagehub.handlers.items.Node2ItemConverter;
import org.gcube.data.access.storagehub.handlers.plugins.OperationMediator;
import org.gcube.data.access.storagehub.handlers.plugins.StorageOperationMediator;
import org.gcube.data.access.storagehub.handlers.plugins.StorageBackendHandler;
import org.gcube.smartgears.annotations.ManagedBy;
import org.gcube.smartgears.utils.InnerMethodName;
@ -135,7 +135,7 @@ public class ItemsManager extends Impersonable{
CompressHandler compressHandler;
@Inject
OperationMediator opMediator;
StorageOperationMediator opMediator;
@Inject
StorageBackendHandler storageBackendHandler;
@ -710,14 +710,12 @@ public class ItemsManager extends Impersonable{
if (version.getName().equals(versionName)) {
Content content = node2Item.getContentFromVersion(version);
StorageBackendFactory sbf = storageBackendHandler.get(content.getManagedBy());
//TODO: add metadata taken from content node
StorageBackend sb = sbf.create(null);
StorageBackendFactory sbf = storageBackendHandler.get(content.getPayloadBackend());
StorageBackend sb = sbf.create(content.getPayloadBackend());
final InputStream streamToWrite = sb.download(content);
log.debug("retrieved storage id is {} with storageBackend {} (stream is null? {})",content.getStorageId(), sb.getName(), streamToWrite==null );
log.debug("retrieved storage id is {} with storageBackend {} (stream is null? {})",content.getStorageId(), sbf.getName(), streamToWrite==null );
String oldfilename = FilenameUtils.getBaseName(currentItem.getTitle());
String ext = FilenameUtils.getExtension(currentItem.getTitle());
@ -863,9 +861,9 @@ public class ItemsManager extends Impersonable{
Content content = fileItem.getContent();
StorageBackendFactory sbf = storageBackendHandler.get(content.getManagedBy());
StorageBackendFactory sbf = storageBackendHandler.get(content.getPayloadBackend());
StorageBackend sb = sbf.create(null);
StorageBackend sb = sbf.create(content.getPayloadBackend());
final InputStream streamToWrite = sb.download(content);
@ -978,7 +976,7 @@ public class ItemsManager extends Impersonable{
final Node nodeToCopy = ses.getNodeByIdentifier(id);
final Node destination = ses.getNodeByIdentifier(destinationId);
//Item destinationItem = node2Item.getItem(destination,null);
FolderItem destinationItem = (FolderItem)node2Item.getItem(destination,null);
final Item item = node2Item.getItem(nodeToCopy, Arrays.asList(NodeConstants.ACCOUNTING_NAME, NodeConstants.METADATA_NAME));
@ -1001,14 +999,8 @@ public class ItemsManager extends Impersonable{
Content contentToCopy = ((AbstractFileItem) item).getContent();
//TODO : understand if it is needed
//opMediator.copy(((AbstractFileItem) item).getContent(),destinationItem, uniqueName);
StorageBackendFactory sbf = storageBackendHandler.get(contentToCopy.getManagedBy());
MetaInfo contentInfo = opMediator.copy(contentToCopy, destinationItem.getBackend(), destination.getPath(), uniqueName);
//TODO: add metadata taken from content node
StorageBackend sb = sbf.create(null);
MetaInfo contentInfo = sb.onCopy(contentToCopy, destination.getPath(), uniqueName);
Utils.setContentFromMetaInfo((AbstractFileItem) item, contentInfo);
item2Node.replaceContent(newNode, (AbstractFileItem) item, ItemAction.CLONED);

@ -297,6 +297,9 @@ public class MessageManager extends Impersonable{
message.setBody(body);
message.setName(UUID.randomUUID().toString());
User user = ses.getUserManager().getAuthorizable(currentUser, User.class);
if (user ==null)
throw new InvalidCallParameters("invalid storagehub user: "+currentUser);
Owner owner = new Owner();
owner.setUserId(user.getID());
owner.setUserName(user.getPrincipal().getName());
@ -413,10 +416,8 @@ public class MessageManager extends Impersonable{
Content contentToCopy = newNodeItem.getContent();
StorageBackendFactory sbf = storageBackendHandler.get(contentToCopy.getManagedBy());
//TODO: add metadata taken from content node
StorageBackend sb = sbf.create(null);
StorageBackendFactory sbf = storageBackendHandler.get(contentToCopy.getPayloadBackend());
StorageBackend sb = sbf.create(contentToCopy.getPayloadBackend());
MetaInfo contentInfo = sb.onCopy(contentToCopy, destination.getPath(), uniqueName);

@ -0,0 +1,45 @@
package org.gcube.data.access.storagehub.storage.backend.impl;
import java.util.HashMap;
import java.util.Map;
import java.util.UUID;
import javax.annotation.PostConstruct;
import javax.inject.Singleton;
import org.gcube.common.storagehub.model.Constants;
import org.gcube.common.storagehub.model.Metadata;
import org.gcube.common.storagehub.model.exceptions.InvalidCallParameters;
import org.gcube.common.storagehub.model.items.nodes.PayloadBackend;
import org.gcube.common.storagehub.model.storages.StorageBackend;
import org.gcube.common.storagehub.model.storages.StorageBackendFactory;
@Singleton
public class DefaultGcubeMinIoStorageBackendFactory implements StorageBackendFactory {
private StorageBackend singleton;
@PostConstruct
public void init(){
Map<String, Object> params = new HashMap<String, Object>();
params.put("bucketName", "storagehub-default" );
params.put("key", "minio_admin" );
params.put("secret", "8334e0e6acb173a8f915026b1bc4c7" );
params.put("url", "https://minio.dev.d4science.org/" );
Metadata metadata = new Metadata(params);
this.singleton = new S3Backend(new PayloadBackend(getName(), metadata), (String) -> UUID.randomUUID().toString());
}
@Override
public String getName() {
return Constants.DEFAULT_MINIO_STORAGE;
}
@Override
public StorageBackend create(PayloadBackend payloadConfiguration) throws InvalidCallParameters {
if (payloadConfiguration.getParameters().isEmpty())
return singleton;
else return new S3Backend(payloadConfiguration, (String) -> UUID.randomUUID().toString() );
}
}

@ -2,11 +2,13 @@ package org.gcube.data.access.storagehub.storage.backend.impl;
import java.io.InputStream;
import java.nio.file.Paths;
import java.util.Map;
import java.util.UUID;
import org.gcube.common.authorization.library.provider.AuthorizationProvider;
import org.gcube.common.storagehub.model.Constants;
import org.gcube.common.storagehub.model.items.nodes.Content;
import org.gcube.common.storagehub.model.items.nodes.PayloadBackend;
import org.gcube.common.storagehub.model.storages.MetaInfo;
import org.gcube.common.storagehub.model.storages.StorageBackend;
import org.gcube.contentmanagement.blobstorage.service.IClient;
@ -16,23 +18,24 @@ import org.gcube.contentmanager.storageclient.wrapper.StorageClient;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class GCubeMongoStorageBackend implements StorageBackend {
public class GCubeMongoStorageBackend extends StorageBackend {
private static final Logger log = LoggerFactory.getLogger(GCubeMongoStorageBackend.class);
private final static String SERVICE_NAME = "home-library";
private final static String SERVICE_CLASS = "org.gcube.portlets.user";
@Override
public InputStream download(Content content) {
return getStorageClient(AuthorizationProvider.instance.get().getClient().getId()).getClient().get().RFileAsInputStream(content.getStorageId());
public GCubeMongoStorageBackend(PayloadBackend payloadConf) {
super(payloadConf);
}
@Override
public String getName() {
return Constants.mongoStorageConstant;
public InputStream download(Content content) {
return getStorageClient(AuthorizationProvider.instance.get().getClient().getId()).getClient().get().RFileAsInputStream(content.getStorageId());
}
protected StorageClient getStorageClient(String login){
return new StorageClient(SERVICE_CLASS, SERVICE_NAME, login, AccessType.SHARED, MemoryType.PERSISTENT);
@ -45,13 +48,13 @@ public class GCubeMongoStorageBackend implements StorageBackend {
String newRemotePath = Paths.get(newParentPath, newName).toString();
String newStorageID = getStorageClient(AuthorizationProvider.instance.get().getClient().getId()).getClient().copyFile(true).from(content.getStorageId()).to(newRemotePath);
log.info("The id returned by storage is {}", newStorageID);
return new MetaInfo(content.getSize(),newStorageID, newRemotePath, getName());
return new MetaInfo(content.getSize(),newStorageID, newRemotePath, getPayloadConfiguration());
}
@Override
public MetaInfo onMove(Content content, String newParentPath) {
//new contentPath can be set as remotePath to the storage backend ?
return new MetaInfo(content.getSize(),content.getStorageId(), content.getRemotePath(), getName());
return new MetaInfo(content.getSize(),content.getStorageId(), content.getRemotePath(), getPayloadConfiguration());
}
@Override
@ -62,10 +65,15 @@ public class GCubeMongoStorageBackend implements StorageBackend {
String remotePath= String.format("%s/%s-%s",relPath,uid,name);
String storageId =storageClient.put(true).LFile(stream).RFile(remotePath);
long size = storageClient.getSize().RFileById(storageId);
MetaInfo info = new MetaInfo(size, storageId, remotePath, getName());
MetaInfo info = new MetaInfo(size, storageId, remotePath, getPayloadConfiguration());
return info;
}
@Override
public MetaInfo upload(InputStream stream, String relPath, String name, Long size) {
return this.upload(stream, relPath, name);
}
@Override
public void onDelete(Content content) {
log.debug("deleting");

@ -1,23 +1,25 @@
package org.gcube.data.access.storagehub.storage.backend.impl;
import javax.inject.Singleton;
import org.gcube.common.storagehub.model.Constants;
import org.gcube.common.storagehub.model.Metadata;
import org.gcube.common.storagehub.model.exceptions.InvalidCallParameters;
import org.gcube.common.storagehub.model.items.nodes.PayloadBackend;
import org.gcube.common.storagehub.model.storages.StorageBackend;
import org.gcube.common.storagehub.model.storages.StorageBackendFactory;
@Singleton
public class GCubeMongoStorageBackendFactory implements StorageBackendFactory {
@Override
public String getName() {
return Constants.mongoStorageConstant;
return Constants.MONGO_STORAGE;
}
@Override
public StorageBackend create(Metadata parameter) throws InvalidCallParameters {
return new GCubeMongoStorageBackend();
public StorageBackend create(PayloadBackend payloadConfiguration) throws InvalidCallParameters {
return new GCubeMongoStorageBackend(payloadConfiguration);
}
}

@ -1,126 +1,129 @@
package org.gcube.data.access.storagehub.storage.backend.impl;
import java.io.InputStream;
import java.util.HashMap;
import java.util.Map;
import java.util.function.Function;
import org.gcube.common.authorization.library.provider.AuthorizationProvider;
import org.gcube.common.storagehub.model.items.nodes.Content;
import org.gcube.common.storagehub.model.items.nodes.PayloadBackend;
import org.gcube.common.storagehub.model.storages.MetaInfo;
import org.gcube.common.storagehub.model.storages.StorageBackend;
import com.amazonaws.auth.AWSCredentials;
import com.amazonaws.auth.AWSStaticCredentialsProvider;
import com.amazonaws.auth.BasicAWSCredentials;
import com.amazonaws.regions.Regions;
import com.amazonaws.services.s3.AmazonS3;
import com.amazonaws.services.s3.AmazonS3ClientBuilder;
import com.amazonaws.services.s3.model.CopyObjectRequest;
import com.amazonaws.services.s3.model.ObjectMetadata;
import com.amazonaws.services.s3.model.PutObjectResult;
import com.amazonaws.services.s3.model.S3Object;
import com.amazonaws.services.s3.model.S3ObjectInputStream;
public class S3Backend implements StorageBackend{
Map<String, Object> parameters;
import io.minio.CopyObjectArgs;
import io.minio.CopySource;
import io.minio.GetObjectArgs;
import io.minio.MinioClient;
import io.minio.PutObjectArgs;
import io.minio.RemoveObjectArgs;
import io.minio.StatObjectArgs;
import io.minio.StatObjectResponse;
public class S3Backend extends StorageBackend{
Function<Void, String> keyGenerator;
AWSCredentials credentials = new BasicAWSCredentials("user", "password");
final static Regions clientRegion = Regions.DEFAULT_REGION;
String bucketName;
MinioClient client;
private static final long PART_SIZE = 100000000;
public S3Backend(Map<String, Object> parameters, Function<Void, String> keyGenerator) {
super();
this.parameters = parameters;
public S3Backend(PayloadBackend payloadConfiguration, Function<Void, String> keyGenerator) {
super(payloadConfiguration);
this.keyGenerator = keyGenerator;
Map<String, Object> parameters = payloadConfiguration.getParameters();
this.bucketName = (String)parameters.get("bucketName");
}
@Override
public String getName() {
return "s3";
String accessKey = (String)parameters.get("key");
String secret = (String)parameters.get("secret");
String url = (String)parameters.get("url");
client =
MinioClient.builder()
.endpoint(url)
.credentials(accessKey, secret)
.build();
}
@Override
public MetaInfo onCopy(Content content, String newParentPath, String newName) {
String sourceKey = content.getStorageId();
String destinationKey = keyGenerator.apply(null);
try {
AmazonS3 s3Client = AmazonS3ClientBuilder.standard()
.withCredentials(new AWSStaticCredentialsProvider(credentials))
.withRegion(clientRegion)
.build();
CopySource source = CopySource.builder().bucket(bucketName).object(sourceKey).build();
// Copy the object into a new object in the same bucket.
CopyObjectRequest copyObjRequest = new CopyObjectRequest(bucketName, sourceKey, bucketName, destinationKey);
s3Client.copyObject(copyObjRequest);
CopyObjectArgs copyObjRequest = CopyObjectArgs.builder().source(source).bucket(bucketName).object(destinationKey).build();
client.copyObject(copyObjRequest);
} catch (Exception e) {
throw new RuntimeException("error copying file on s3");
throw new RuntimeException("error copying file on s3", e);
}
return new MetaInfo(content.getSize(), destinationKey, null, getName());
return new MetaInfo(content.getSize(), destinationKey, null, getPayloadConfiguration());
}
@Override
public MetaInfo onMove(Content content, String newParentPath) {
//new contentPath can be set as remotePath to the storage backend ?
return new MetaInfo(content.getSize(),content.getStorageId(), content.getRemotePath(), getName());
return new MetaInfo(content.getSize(),content.getStorageId(), content.getRemotePath(), getPayloadConfiguration());
}
@Override
public void onDelete(Content content) {
// TODO Auto-generated method stub
try {
String storageId = content.getStorageId();
client.removeObject(RemoveObjectArgs.builder().bucket(bucketName).object(storageId).build());
} catch (Exception e) {
throw new RuntimeException("error deleting file on s3", e);
}
}
@Override
public MetaInfo upload(InputStream stream, String relativePath, String name) {
return this.upload(stream, relativePath, name, null);
}
@Override
public MetaInfo upload(InputStream stream, String relativePath, String name, Long size) {
try {
AmazonS3 s3Client = AmazonS3ClientBuilder.standard()
.withCredentials(new AWSStaticCredentialsProvider(credentials))
.withRegion(clientRegion)
.build();
String storageId = keyGenerator.apply(null);
ObjectMetadata metadata = new ObjectMetadata();
metadata.addUserMetadata("user", AuthorizationProvider.instance.get().getClient().getId());
PutObjectResult result = s3Client.putObject(
bucketName,
keyGenerator.apply(null),
stream,
metadata
);
long size = result.getMetadata().getContentLength();
s3Client.getObjectMetadata(bucketName, );
Map<String, String> headers = new HashMap<>();
headers.put("X-Amz-Storage-Class", "REDUCED_REDUNDANCY");
Map<String, String> userMetadata = new HashMap<>();
userMetadata.put("user", AuthorizationProvider.instance.get().getClient().getId());
client.putObject(
PutObjectArgs.builder().bucket(bucketName).object(storageId).stream(
stream, size == null || size<=0?-1:size, size == null || size<=0?PART_SIZE:-1)
.headers(headers).tags(userMetadata)
.build());
long fileSize;
if (size != null && size>0)
fileSize = size;
else {
StatObjectResponse resp = client.statObject(StatObjectArgs.builder().bucket(bucketName).object(storageId).build());
fileSize = resp.size();
}
return new MetaInfo(content.getSize(),storageId, null, getName());
return new MetaInfo(fileSize,storageId, null, getPayloadConfiguration());
} catch (Exception e) {
throw new RuntimeException("error copying file on s3");
throw new RuntimeException("error uploading file on s3", e);
}
}
@Override
public InputStream download(Content item) {
public InputStream download(Content content) {
try {
AmazonS3 s3Client = AmazonS3ClientBuilder.standard()
.withCredentials(new AWSStaticCredentialsProvider(credentials))
.withRegion(clientRegion)
.build();
S3Object s3object = s3Client.getObject(bucketName, item.getStorageId());
S3ObjectInputStream inputStream = s3object.getObjectContent();
String storageId = content.getStorageId();
InputStream inputStream = client.getObject(GetObjectArgs.builder().bucket(bucketName).object(storageId).build());
return inputStream;
} catch (Exception e) {
throw new RuntimeException("error copying file on s3");
throw new RuntimeException("error downloading file from s3",e);
}
}

@ -25,7 +25,7 @@ The projects leading to this software have received funding from a series of
Version
--------------------------------------------------
1.5.0-SNAPSHOT (2021-11-26)
1.5.0-SNAPSHOT (2021-12-02)
Please see the file named "changelog.xml" in this directory for the release notes.

@ -0,0 +1,45 @@
package org.gcube.data.access.fs;
import java.io.File;
import java.io.FileInputStream;
import java.io.InputStream;
import java.util.Collections;
import org.gcube.common.authorization.library.provider.AuthorizationProvider;
import org.gcube.common.authorization.library.provider.UserInfo;
import org.gcube.common.authorization.library.utils.Caller;
import org.gcube.common.storagehub.model.items.nodes.Content;
import org.gcube.common.storagehub.model.storages.MetaInfo;
import org.gcube.common.storagehub.model.storages.StorageBackend;
import org.gcube.data.access.storagehub.storage.backend.impl.DefaultGcubeMinIoStorageBackendFactory;
import org.junit.Assert;
import org.junit.Test;
public class StorageTest {
@Test
public void testS3() throws Exception{
try {
AuthorizationProvider.instance.set(new Caller(new UserInfo("testUser", Collections.emptyList()), ""));
DefaultGcubeMinIoStorageBackendFactory factory = new DefaultGcubeMinIoStorageBackendFactory();
factory.init();
File file = new File("/home/lucio/Downloads/lombok.jar");
InputStream stream = new FileInputStream(file);
Assert.assertNotNull(stream);
StorageBackend sb = factory.create(null);
Assert.assertNotNull(sb);
MetaInfo inof = sb.upload(stream, "/path", "uploadTest", file.length());
Content content = new Content();
content.setStorageId(inof.getStorageId());
sb.onDelete(content);
}catch (Exception e) {
e.printStackTrace();
throw e;
}
}
}
Loading…
Cancel
Save