Minio integration

This commit is contained in:
Lucio Lelii 2021-12-03 16:55:54 +01:00
parent e11bb536a7
commit 4f87677674
21 changed files with 461 additions and 253 deletions

14
pom.xml
View File

@ -296,7 +296,7 @@
<scope>runtime</scope> <scope>runtime</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.reflections</groupId> <groupId>org.reflections</groupId>
<artifactId>reflections</artifactId> <artifactId>reflections</artifactId>
@ -406,7 +406,7 @@
<artifactId>jersey-test-framework-provider-grizzly2</artifactId> <artifactId>jersey-test-framework-provider-grizzly2</artifactId>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<!-- Storage dependencies --> <!-- Storage dependencies -->
<dependency> <dependency>
<groupId>org.gcube.contentmanagement</groupId> <groupId>org.gcube.contentmanagement</groupId>
@ -419,13 +419,15 @@
<artifactId>storage-manager-wrapper</artifactId> <artifactId>storage-manager-wrapper</artifactId>
<version>[3.0.0-SNAPSHOT,4.0.0-SNAPSHOT)</version> <version>[3.0.0-SNAPSHOT,4.0.0-SNAPSHOT)</version>
</dependency> </dependency>
<!-- https://mvnrepository.com/artifact/io.minio/minio -->
<dependency> <dependency>
<groupId>com.amazonaws</groupId> <groupId>io.minio</groupId>
<artifactId>aws-java-sdk</artifactId> <artifactId>minio</artifactId>
<version>1.11.163</version> <version>8.3.3</version>
</dependency> </dependency>
</dependencies> </dependencies>
<build> <build>
<finalName>${project.artifactId}</finalName> <finalName>${project.artifactId}</finalName>

View File

@ -36,4 +36,5 @@ public class Constants {
public static final List<String> WRITE_PROTECTED_FOLDER = Arrays.asList(Constants.OLD_VRE_FOLDER_PARENT_NAME, Constants.TRASH_ROOT_FOLDER_NAME); public static final List<String> WRITE_PROTECTED_FOLDER = Arrays.asList(Constants.OLD_VRE_FOLDER_PARENT_NAME, Constants.TRASH_ROOT_FOLDER_NAME);
public static final List<String> PROTECTED_FOLDER = Arrays.asList(Constants.WORKSPACE_ROOT_FOLDER_NAME, Constants.OLD_VRE_FOLDER_PARENT_NAME, Constants.TRASH_ROOT_FOLDER_NAME); public static final List<String> PROTECTED_FOLDER = Arrays.asList(Constants.WORKSPACE_ROOT_FOLDER_NAME, Constants.OLD_VRE_FOLDER_PARENT_NAME, Constants.TRASH_ROOT_FOLDER_NAME);
} }

View File

@ -349,11 +349,10 @@ public class Utils {
} }
public static void setContentFromMetaInfo(AbstractFileItem item, MetaInfo contentInfo) { public static void setContentFromMetaInfo(AbstractFileItem item, MetaInfo contentInfo) {
item.getContent().setManagedBy(contentInfo.getManagedBy());
item.getContent().setSize(contentInfo.getSize()); item.getContent().setSize(contentInfo.getSize());
item.getContent().setRemotePath(contentInfo.getRemotePath()); item.getContent().setRemotePath(contentInfo.getRemotePath());
item.getContent().setSize(contentInfo.getSize()); item.getContent().setSize(contentInfo.getSize());
item.getContent().setStorageId(contentInfo.getStorageId()); item.getContent().setPayloadBackend(contentInfo.getPayloadBackend());
} }
} }

View File

@ -77,9 +77,9 @@ public class CompressHandler {
try { try {
AbstractFileItem fileItem = (AbstractFileItem)item; AbstractFileItem fileItem = (AbstractFileItem)item;
StorageBackendFactory sbf = storageBackendHandler.get(fileItem.getContent().getManagedBy()); StorageBackendFactory sbf = storageBackendHandler.get(fileItem.getContent().getPayloadBackend());
StorageBackend sb = sbf.create(null); StorageBackend sb = sbf.create(fileItem.getContent().getPayloadBackend());
InputStream streamToWrite = sb.download(fileItem.getContent()); InputStream streamToWrite = sb.download(fileItem.getContent());
if (streamToWrite == null){ if (streamToWrite == null){

View File

@ -111,9 +111,9 @@ public class TrashHandler {
try { try {
StorageBackendFactory sbf = storageBackendHandler.get(item.getContent().getManagedBy()); StorageBackendFactory sbf = storageBackendHandler.get(item.getContent().getPayloadBackend());
StorageBackend sb = sbf.create(null); StorageBackend sb = sbf.create(item.getContent().getPayloadBackend());
contentSet.add(new ContentPair(item.getContent(), sb)); contentSet.add(new ContentPair(item.getContent(), sb));

View File

@ -82,7 +82,6 @@ public class Item2NodeConverter {
logger.debug("error setting value",e); logger.debug("error setting value",e);
} }
} }
} }
return newNode; return newNode;
@ -94,7 +93,7 @@ public class Item2NodeConverter {
} }
private void iterateItemNodeAttributeFields(Object object, Node parentNode, String nodeName) throws Exception{ private void iterateItemNodeAttributeFields(Object object, Node parentNode, String nodeName) throws Exception{
AttributeRootNode attributeRootNode = object.getClass().getAnnotation(AttributeRootNode.class); AttributeRootNode attributeRootNode = object.getClass().getAnnotation(AttributeRootNode.class);
Node newNode; Node newNode;
@ -145,6 +144,21 @@ public class Item2NodeConverter {
iterateItemNodeAttributeFields(obj,newNode, field.getName()+(i++)); iterateItemNodeAttributeFields(obj,newNode, field.getName()+(i++));
} }
} else if (field.isAnnotationPresent(NodeAttribute.class)){
NodeAttribute nodeAttribute = field.getAnnotation(NodeAttribute.class);
if (nodeAttribute.isReadOnly()) continue;
String subNodeName = nodeAttribute.value();
logger.trace("retrieving field node "+field.getName());
field.setAccessible(true);
try{
Object obj = field.get(object);
if (obj!=null)
iterateItemNodeAttributeFields(obj, newNode, subNodeName);
} catch (Exception e ) {
logger.debug("error setting value",e);
}
} }
} }
} }
@ -197,22 +211,9 @@ public class Item2NodeConverter {
node.setProperty(NodeProperty.LAST_MODIFIED_BY.toString(), item.getLastModifiedBy()); node.setProperty(NodeProperty.LAST_MODIFIED_BY.toString(), item.getLastModifiedBy());
node.setProperty(NodeProperty.LAST_ACTION.toString(), action.name()); node.setProperty(NodeProperty.LAST_ACTION.toString(), action.name());
for (Field field : retrieveAllFields(item.getContent().getClass())){
if (field.isAnnotationPresent(Attribute.class)){ replaceContentFieldIterator(contentNode, item.getContent().getClass(),item.getContent());
Attribute attribute = field.getAnnotation(Attribute.class);
if (attribute.isReadOnly()) continue;
field.setAccessible(true);
try{
//Class<?> returnType = field.getType();
Values values = getObjectValue(field.getType(), field.get(item.getContent()));
if (values.isMulti()) contentNode.setProperty(attribute.value(), values.getValues() );
else contentNode.setProperty(attribute.value(), values.getValue());
} catch (Exception e ) {
logger.debug("error setting value for attribute "+attribute.value(),e);
}
}
}
} catch (RepositoryException e) { } catch (RepositoryException e) {
logger.error("error writing repository",e); logger.error("error writing repository",e);
@ -220,7 +221,41 @@ public class Item2NodeConverter {
} }
} }
//VALID ONLY FOR CONTENT
private void replaceContentFieldIterator(Node node, Class<?> clazz, Object instance) {
for (Field field : retrieveAllFields(clazz)){
if (field.isAnnotationPresent(Attribute.class)){
Attribute attribute = field.getAnnotation(Attribute.class);
if (attribute.isReadOnly()) continue;
field.setAccessible(true);
try{
//Class<?> returnType = field.getType();
Values values = getObjectValue(field.getType(), field.get(instance));
if (values.isMulti()) node.setProperty(attribute.value(), values.getValues() );
else node.setProperty(attribute.value(), values.getValue());
} catch (Exception e ) {
logger.debug("error setting value for attribute "+attribute.value(),e);
}
} else if (field.isAnnotationPresent(NodeAttribute.class)){
NodeAttribute nodeAttribute = field.getAnnotation(NodeAttribute.class);
if (nodeAttribute.isReadOnly()) continue;
String subNodeName = nodeAttribute.value();
logger.trace("retrieving field node "+field.getName());
field.setAccessible(true);
try{
Object obj = field.get(instance);
if (obj!=null)
iterateItemNodeAttributeFields(obj, node, subNodeName);
} catch (Exception e ) {
logger.debug("error setting value",e);
}
}
}
}
public void updateHidden(Node node, Boolean hidden,String login) throws RepositoryException { public void updateHidden(Node node, Boolean hidden,String login) throws RepositoryException {
Utils.setPropertyOnChangeNode(node, login, ItemAction.UPDATED); Utils.setPropertyOnChangeNode(node, login, ItemAction.UPDATED);
node.setProperty(NodeProperty.HIDDEN.toString(), hidden); node.setProperty(NodeProperty.HIDDEN.toString(), hidden);

View File

@ -56,6 +56,7 @@ import org.gcube.data.access.storagehub.handlers.items.builders.FolderCreationPa
import org.gcube.data.access.storagehub.handlers.items.builders.GCubeItemCreationParameters; import org.gcube.data.access.storagehub.handlers.items.builders.GCubeItemCreationParameters;
import org.gcube.data.access.storagehub.handlers.items.builders.URLCreationParameters; import org.gcube.data.access.storagehub.handlers.items.builders.URLCreationParameters;
import org.gcube.data.access.storagehub.handlers.plugins.StorageBackendHandler; import org.gcube.data.access.storagehub.handlers.plugins.StorageBackendHandler;
import org.glassfish.jersey.media.multipart.FormDataContentDisposition;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@ -141,7 +142,7 @@ public class ItemHandler {
} }
private Node create(FileCreationParameters params, Node destination) throws Exception{ private Node create(FileCreationParameters params, Node destination) throws Exception{
Node newNode = createFileItemInternally(params.getSession(), destination, params.getStream(), params.getName(), params.getDescription(), params.getUser(), true); Node newNode = createFileItemInternally(params.getSession(), destination, params.getStream(), params.getName(), params.getDescription(), params.getFileDetails(), params.getUser(), true);
params.getSession().save(); params.getSession().save();
versionHandler.checkinContentNode(newNode); versionHandler.checkinContentNode(newNode);
log.info("file with id {} correctly created",newNode.getIdentifier()); log.info("file with id {} correctly created",newNode.getIdentifier());
@ -190,13 +191,13 @@ public class ItemHandler {
log.debug("creating file with entire path {}, name {}, parentPath {} ", entirePath, name, parentPath); log.debug("creating file with entire path {}, name {}, parentPath {} ", entirePath, name, parentPath);
Node fileNode = null; Node fileNode = null;
if (parentPath.isEmpty()) if (parentPath.isEmpty())
fileNode = createFileItemInternally(params.getSession(), parentDirectoryNode, input, name, "", params.getUser(), false); fileNode = createFileItemInternally(params.getSession(), parentDirectoryNode, input, name, "", params.getFileDetails(), params.getUser(), false);
else { else {
Node parentNode = directoryNodeMap.get(parentPath); Node parentNode = directoryNodeMap.get(parentPath);
if (parentNode ==null) if (parentNode ==null)
parentNode = createPath(parentPath, directoryNodeMap, parentDirectoryNode, params.getSession(), params.getUser()); parentNode = createPath(parentPath, directoryNodeMap, parentDirectoryNode, params.getSession(), params.getUser());
fileNode = createFileItemInternally(params.getSession(), parentNode, input, name, "", params.getUser(), false); fileNode = createFileItemInternally(params.getSession(), parentNode, input, name, "",params.getFileDetails(), params.getUser(), false);
} }
fileNodes.add(fileNode); fileNodes.add(fileNode);
}catch(Exception e) { }catch(Exception e) {
@ -246,17 +247,15 @@ public class ItemHandler {
return newNode; return newNode;
} }
private Node createFileItemInternally(Session ses, Node destinationNode, InputStream stream, String name, String description, String login, boolean withLock) throws RepositoryException, StorageHubException{ private Node createFileItemInternally(Session ses, Node destinationNode, InputStream stream, String name, String description, FormDataContentDisposition fileDetails, String login, boolean withLock) throws RepositoryException, StorageHubException{
log.debug("UPLOAD: starting preparing file"); log.trace("UPLOAD: starting preparing file");
Node newNode; Node newNode;
FolderItem destinationItem = node2Item.getItem(destinationNode, Excludes.ALL); FolderItem destinationItem = node2Item.getItem(destinationNode, Excludes.ALL);
StorageBackendFactory sbf = storageBackendHandler.get(destinationItem.getBackend().getStorageName()); StorageBackendFactory sbf = storageBackendHandler.get(destinationItem.getBackend());
StorageBackend sb = sbf.create(destinationItem.getBackend());
//TODO: add metadata taken from content node
StorageBackend sb = sbf.create(destinationItem.getBackend().getParameters());
String relativePath = destinationNode.getPath(); String relativePath = destinationNode.getPath();
@ -268,7 +267,7 @@ public class ItemHandler {
newNode = ses.getNode(newNodePath); newNode = ses.getNode(newNodePath);
authChecker.checkWriteAuthorizationControl(ses, login, newNode.getIdentifier(), false); authChecker.checkWriteAuthorizationControl(ses, login, newNode.getIdentifier(), false);
AbstractFileItem item = fillItemWithContent(stream, sb, name, description, relativePath,login); AbstractFileItem item = fillItemWithContent(stream, sb, name, description, fileDetails, relativePath,login);
if (withLock) { if (withLock) {
try { try {
@ -289,7 +288,7 @@ public class ItemHandler {
} }
else { else {
authChecker.checkWriteAuthorizationControl(ses, login, destinationNode.getIdentifier(), true); authChecker.checkWriteAuthorizationControl(ses, login, destinationNode.getIdentifier(), true);
AbstractFileItem item = fillItemWithContent(stream, sb, name, description, relativePath, login); AbstractFileItem item = fillItemWithContent(stream, sb, name, description, fileDetails, relativePath, login);
if (withLock) { if (withLock) {
try { try {
log.debug("trying to acquire lock"); log.debug("trying to acquire lock");
@ -314,16 +313,16 @@ public class ItemHandler {
private AbstractFileItem fillItemWithContent(InputStream stream, StorageBackend storageBackend, String name, String description, String relPath, String login) throws BackendGenericError{ private AbstractFileItem fillItemWithContent(InputStream stream, StorageBackend storageBackend, String name, String description, FormDataContentDisposition fileDetails, String relPath, String login) throws BackendGenericError{
log.debug("UPLOAD: filling content"); log.trace("UPLOAD: filling content");
ContentHandler handler = getContentHandler(stream, storageBackend, name, relPath, login); ContentHandler handler = getContentHandler(stream, storageBackend, name, fileDetails, relPath, login);
AbstractFileItem item =handler.buildItem(name, description, login); AbstractFileItem item =handler.buildItem(name, description, login);
return item ; return item ;
} }
private ContentHandler getContentHandler(InputStream stream, StorageBackend storageBackend, String name, String relPath, String login) throws BackendGenericError { private ContentHandler getContentHandler(InputStream stream, StorageBackend storageBackend, String name, FormDataContentDisposition fileDetails, String relPath, String login) throws BackendGenericError {
log.debug("UPLOAD: handling content"); log.trace("UPLOAD: handling content");
final MultipleOutputStream mos; final MultipleOutputStream mos;
try{ try{
@ -354,7 +353,7 @@ public class ItemHandler {
is1.reset(); is1.reset();
handler.initiliseSpecificContent(is1, name, mimeType); handler.initiliseSpecificContent(is1, name, mimeType);
log.debug("UPLOAD: reading the mimetype - finished in {}",System.currentTimeMillis()-start); log.trace("UPLOAD: reading the mimetype - finished in {}",System.currentTimeMillis()-start);
} catch (Throwable e) { } catch (Throwable e) {
log.error("error retrieving mimeType",e); log.error("error retrieving mimeType",e);
throw new RuntimeException(e); throw new RuntimeException(e);
@ -370,7 +369,11 @@ public class ItemHandler {
public MetaInfo call() throws Exception { public MetaInfo call() throws Exception {
try(InputStream is1 = mos.get()){ try(InputStream is1 = mos.get()){
log.debug("UPLOAD: upload on - start"); log.debug("UPLOAD: upload on - start");
MetaInfo info = storageBackend.upload(is1, relPath, name); MetaInfo info;
if (fileDetails !=null && fileDetails.getSize()>0)
info = storageBackend.upload(is1, relPath, name, fileDetails.getSize());
else
info = storageBackend.upload(is1, relPath, name);
log.debug("UPLOAD: upload on storage - stop"); log.debug("UPLOAD: upload on storage - stop");
return info; return info;
}catch (Throwable e) { }catch (Throwable e) {
@ -385,18 +388,20 @@ public class ItemHandler {
Future<MetaInfo> uploaderF = executor.submit(AuthorizedTasks.bind(uploader)); Future<MetaInfo> uploaderF = executor.submit(AuthorizedTasks.bind(uploader));
long start = System.currentTimeMillis(); long start = System.currentTimeMillis();
log.debug("UPLOAD: writing the stream - start"); log.trace("UPLOAD: writing the stream - start");
try { try {
mos.startWriting(); mos.startWriting();
ContentHandler handler = detectorF.get(); ContentHandler handler = detectorF.get();
MetaInfo info = uploaderF.get(); MetaInfo info = uploaderF.get();
log.debug("UPLOAD: writing the stream - finished in {}",System.currentTimeMillis()-start); log.trace("UPLOAD: writing the stream - finished in {}",System.currentTimeMillis()-start);
handler.getContent().setData(NodeConstants.CONTENT_NAME); handler.getContent().setData(NodeConstants.CONTENT_NAME);
handler.getContent().setStorageId(info.getStorageId()); handler.getContent().setStorageId(info.getStorageId());
handler.getContent().setSize(info.getSize()); handler.getContent().setSize(info.getSize());
handler.getContent().setRemotePath(info.getRemotePath()); handler.getContent().setRemotePath(info.getRemotePath());
handler.getContent().setManagedBy(info.getManagedBy());
handler.getContent().setPayloadBackend(info.getPayloadBackend());
log.trace("UPLOAD: content payload seta as {} ", handler.getContent().getPayloadBackend());
return handler; return handler;
}catch (Exception e) { }catch (Exception e) {
throw new BackendGenericError(e); throw new BackendGenericError(e);

View File

@ -83,7 +83,7 @@ public class Node2ItemConverter {
setGenericFields(node.getFrozenNode(), Content.class, null, content); setGenericFields(node.getFrozenNode(), Content.class, null, content);
return content; return content;
} }
public Message getMessageItem(Node node) throws RepositoryException{ public Message getMessageItem(Node node) throws RepositoryException{
if (!(node.getPrimaryNodeType().getName().equals("nthl:itemSentRequest") if (!(node.getPrimaryNodeType().getName().equals("nthl:itemSentRequest")
|| node.getPrimaryNodeType().getName().equals("nthl:itemSentRequestSH"))) || node.getPrimaryNodeType().getName().equals("nthl:itemSentRequestSH")))
@ -95,7 +95,7 @@ public class Node2ItemConverter {
}catch (Throwable e) { }catch (Throwable e) {
msg.setWithAttachments(false); msg.setWithAttachments(false);
} }
setRootItemCommonFields(node, Collections.emptyList(), Message.class, msg); setRootItemCommonFields(node, Collections.emptyList(), Message.class, msg);
return msg; return msg;
} }
@ -129,7 +129,7 @@ public class Node2ItemConverter {
item.setExternalManaged(false); item.setExternalManaged(false);
} }
item.setLocked(node.isLocked()); item.setLocked(node.isLocked());
setRootItemCommonFields(node, excludes, classToHandle, item); setRootItemCommonFields(node, excludes, classToHandle, item);
@ -137,7 +137,7 @@ public class Node2ItemConverter {
return item; return item;
} }
private <T extends Item> boolean hasTypedParent(Node node, Class<T> parentType) throws BackendGenericError, RepositoryException{ private <T extends Item> boolean hasTypedParent(Node node, Class<T> parentType) throws BackendGenericError, RepositoryException{
if(node==null) return false; if(node==null) return false;
@ -146,16 +146,16 @@ public class Node2ItemConverter {
private <T extends RootItem> void setRootItemCommonFields(Node node, List<String> excludes, Class<T> classToHandle, T instance) throws RepositoryException{ private <T extends RootItem> void setRootItemCommonFields(Node node, List<String> excludes, Class<T> classToHandle, T instance) throws RepositoryException{
try{ try{
instance.setParentId(node.getParent().getIdentifier()); instance.setParentId(node.getParent().getIdentifier());
instance.setParentPath(node.getParent().getPath()); instance.setParentPath(node.getParent().getPath());
}catch (Throwable e) { }catch (Throwable e) {
logger.trace("Root node doesn't have a parent"); logger.trace("Root node doesn't have a parent");
} }
instance.setRelatedNode(node); instance.setRelatedNode(node);
instance.setId(node.getIdentifier()); instance.setId(node.getIdentifier());
instance.setName(Text.unescapeIllegalJcrChars(node.getName())); instance.setName(Text.unescapeIllegalJcrChars(node.getName()));
@ -166,7 +166,7 @@ public class Node2ItemConverter {
setGenericFields(node, classToHandle, excludes, instance); setGenericFields(node, classToHandle, excludes, instance);
} }
private <T> void setGenericFields(Node node, Class<T> classToHandle,List<String> excludes, T instance){ private <T> void setGenericFields(Node node, Class<T> classToHandle,List<String> excludes, T instance){
for (Field field : retrieveAllFields(classToHandle)){ for (Field field : retrieveAllFields(classToHandle)){
if (field.isAnnotationPresent(Attribute.class)){ if (field.isAnnotationPresent(Attribute.class)){
@ -192,7 +192,7 @@ public class Node2ItemConverter {
} }
} }
} }
private <T> T iterateNodeAttributeFields(Class<T> clazz, Node node) throws Exception{ private <T> T iterateNodeAttributeFields(Class<T> clazz, Node node) throws Exception{
T obj = clazz.newInstance(); T obj = clazz.newInstance();
for (Field field : retrieveAllFields(clazz)){ for (Field field : retrieveAllFields(clazz)){
@ -200,84 +200,113 @@ public class Node2ItemConverter {
setAttributeFieldCheckingDefault(field, obj, node); setAttributeFieldCheckingDefault(field, obj, node);
} else if (field.isAnnotationPresent(MapAttribute.class)){ } else if (field.isAnnotationPresent(MapAttribute.class)){
logger.trace("found field {} of type annotated as MapAttribute in class {} and node name {}", field.getName(), clazz.getName(), node.getName()); logger.trace("found field {} of type annotated as MapAttribute in class {} and node name {}", field.getName(), clazz.getName(), node.getName());
field.setAccessible(true); setMapAttribute(field, obj, node);
String exclude = field.getAnnotation(MapAttribute.class).excludeStartWith();
Map<String, Object> mapToset = new HashMap<String, Object>();
PropertyIterator iterator = node.getProperties();
if (iterator!=null) {
while (iterator.hasNext()){
Property prop = iterator.nextProperty();
if (!exclude.isEmpty() && prop.getName().startsWith(exclude)) continue;
try{
logger.trace("adding {} in the map",prop.getName());
mapToset.put(prop.getName(), getPropertyValue(prop));
}catch(PathNotFoundException e){
logger.warn("the property {} is not mapped",prop.getName());
} catch (Exception e ) {
logger.debug("error setting value {}",e.getMessage());
}
}
}
field.set(obj, mapToset);
} else if (field.isAnnotationPresent(ListNodes.class)){ } else if (field.isAnnotationPresent(ListNodes.class)){
logger.trace("found field {} of type annotated as ListNodes in class {} on node {}", field.getName(), clazz.getName(), node.getName()); logger.trace("found field {} of type annotated as ListNodes in class {} on node {}", field.getName(), clazz.getName(), node.getName());
setListNode(field, obj, node);
} else if (field.isAnnotationPresent(NodeAttribute.class)){
logger.debug("found field {} of type annotated as NodeAttribute in class {} on node {}", field.getName(), clazz.getName(), node.getName());
String fieldNodeName = field.getAnnotation(NodeAttribute.class).value();
//for now it excludes only first level node
//if (excludes!=null && excludes.contains(fieldNodeName)) continue;
//for now it excludes only first level node
logger.info("retrieving field node {} on field {}", fieldNodeName, field.getName());
field.setAccessible(true); field.setAccessible(true);
String exclude = field.getAnnotation(ListNodes.class).excludeTypeStartWith(); try{
String include = field.getAnnotation(ListNodes.class).includeTypeStartWith(); Node fieldNode = node.getNode(fieldNodeName);
logger.info("looking in node {} searched with {}",fieldNode.getName(),fieldNodeName);
Class listType = field.getAnnotation(ListNodes.class).listClass(); field.set(obj, iterateNodeAttributeFields(field.getType(), fieldNode));
}catch(PathNotFoundException e){
Map<String, Class> subTypesMap = Collections.emptyMap(); logger.warn("the current node dosn't contain {} node",fieldNodeName);
} catch (Exception e ) {
if (!typeToSubtypeMap.containsKey(listType)) { logger.warn("error setting value",e);
Configuration config = new ConfigurationBuilder().forPackages(listType.getPackage().getName());
Reflections reflections = new Reflections(config);
Set<Class> subTypes = reflections.getSubTypesOf(listType);
if (subTypes.size()>0) {
subTypesMap = new HashMap<>();
for (Class subtype: subTypes)
if (subtype.isAnnotationPresent(AttributeRootNode.class)) {
AttributeRootNode attributeRootNode = (AttributeRootNode)subtype.getAnnotation(AttributeRootNode.class);
subTypesMap.put(attributeRootNode.value(), subtype);
}
} else logger.trace("no subtypes found for {}",listType.getName());
typeToSubtypeMap.put(listType, subTypesMap);
} else {
logger.info("subtypes already found in cache");
subTypesMap = typeToSubtypeMap.get(listType);
} }
List<Object> toSetList = new ArrayList<>();
NodeIterator iterator = node.getNodes();
while (iterator.hasNext()){
Node currentNode = iterator.nextNode();
String primaryType = currentNode.getPrimaryNodeType().getName();
logger.trace("the current node {} has a list",currentNode.getName());
if (!include.isEmpty() && !primaryType.startsWith(include))
continue;
if (!exclude.isEmpty() && primaryType.startsWith(exclude))
continue;
if (subTypesMap.containsKey(primaryType))
toSetList.add(iterateNodeAttributeFields(subTypesMap.get(primaryType), currentNode));
else toSetList.add(iterateNodeAttributeFields(listType, currentNode));
}
if (toSetList.size()!=0) field.set(obj, toSetList);
} }
} }
return obj; return obj;
} }
private <T> void setMapAttribute(Field field, T instance, Node node) throws Exception{
field.setAccessible(true);
String exclude = field.getAnnotation(MapAttribute.class).excludeStartWith();
Map<String, Object> mapToset = new HashMap<String, Object>();
PropertyIterator iterator = node.getProperties();
if (iterator!=null) {
while (iterator.hasNext()){
Property prop = iterator.nextProperty();
if (!exclude.isEmpty() && prop.getName().startsWith(exclude)) continue;
try{
logger.trace("adding {} in the map",prop.getName());
mapToset.put(prop.getName(), getPropertyValue(prop));
}catch(PathNotFoundException e){
logger.warn("the property {} is not mapped",prop.getName());
} catch (Exception e ) {
logger.debug("error setting value {}",e.getMessage());
}
}
}
field.set(instance, mapToset);
}
private <T> void setListNode(Field field, T instance, Node node) throws Exception{
field.setAccessible(true);
String exclude = field.getAnnotation(ListNodes.class).excludeTypeStartWith();
String include = field.getAnnotation(ListNodes.class).includeTypeStartWith();
Class listType = field.getAnnotation(ListNodes.class).listClass();
Map<String, Class> subTypesMap = Collections.emptyMap();
if (!typeToSubtypeMap.containsKey(listType)) {
Configuration config = new ConfigurationBuilder().forPackages(listType.getPackage().getName());
Reflections reflections = new Reflections(config);
Set<Class> subTypes = reflections.getSubTypesOf(listType);
if (subTypes.size()>0) {
subTypesMap = new HashMap<>();
for (Class subtype: subTypes)
if (subtype.isAnnotationPresent(AttributeRootNode.class)) {
AttributeRootNode attributeRootNode = (AttributeRootNode)subtype.getAnnotation(AttributeRootNode.class);
subTypesMap.put(attributeRootNode.value(), subtype);
}
} else logger.trace("no subtypes found for {}",listType.getName());
typeToSubtypeMap.put(listType, subTypesMap);
} else {
logger.info("subtypes already found in cache");
subTypesMap = typeToSubtypeMap.get(listType);
}
List<Object> toSetList = new ArrayList<>();
NodeIterator iterator = node.getNodes();
while (iterator.hasNext()){
Node currentNode = iterator.nextNode();
String primaryType = currentNode.getPrimaryNodeType().getName();
logger.trace("the current node {} has a list",currentNode.getName());
if (!include.isEmpty() && !primaryType.startsWith(include))
continue;
if (!exclude.isEmpty() && primaryType.startsWith(exclude))
continue;
if (subTypesMap.containsKey(primaryType))
toSetList.add(iterateNodeAttributeFields(subTypesMap.get(primaryType), currentNode));
else toSetList.add(iterateNodeAttributeFields(listType, currentNode));
}
if (toSetList.size()!=0) field.set(instance, toSetList);
}
private <T> void setAttributeFieldCheckingDefault(Field field, T instance, Node node) { private <T> void setAttributeFieldCheckingDefault(Field field, T instance, Node node) {
Attribute attribute = field.getAnnotation(Attribute.class); Attribute attribute = field.getAnnotation(Attribute.class);
field.setAccessible(true); field.setAccessible(true);
@ -301,8 +330,8 @@ public class Node2ItemConverter {
logger.debug("error setting value for property {} ",attribute.value()); logger.debug("error setting value for property {} ",attribute.value());
} }
} }
@SuppressWarnings({ "unchecked" }) @SuppressWarnings({ "unchecked" })
private Object getPropertyValue(Class returnType, Property prop) throws Exception{ private Object getPropertyValue(Class returnType, Property prop) throws Exception{
if (returnType.equals(String.class)) return prop.getString(); if (returnType.equals(String.class)) return prop.getString();

View File

@ -1,10 +0,0 @@
package org.gcube.data.access.storagehub.handlers.plugins;
import javax.inject.Singleton;
@Singleton
public class OperationMediator {
}

View File

@ -9,6 +9,7 @@ import javax.inject.Inject;
import javax.inject.Singleton; import javax.inject.Singleton;
import org.gcube.common.storagehub.model.exceptions.PluginNotFoundException; import org.gcube.common.storagehub.model.exceptions.PluginNotFoundException;
import org.gcube.common.storagehub.model.items.nodes.PayloadBackend;
import org.gcube.common.storagehub.model.storages.StorageBackendFactory; import org.gcube.common.storagehub.model.storages.StorageBackendFactory;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@ -20,7 +21,7 @@ public class StorageBackendHandler {
@Inject @Inject
Instance<StorageBackendFactory> factories; Instance<StorageBackendFactory> factories;
Map<String, StorageBackendFactory> storagebackendMap= new HashMap<String, StorageBackendFactory>(); Map<String, StorageBackendFactory> storagebackendMap= new HashMap<String, StorageBackendFactory>();
@PostConstruct @PostConstruct
@ -38,10 +39,10 @@ public class StorageBackendHandler {
throw new RuntimeException("storage backend implementation not found"); throw new RuntimeException("storage backend implementation not found");
} }
public StorageBackendFactory get(String storageName) throws PluginNotFoundException { public StorageBackendFactory get(PayloadBackend payload) throws PluginNotFoundException {
if (!storagebackendMap.containsKey(storageName)) if (payload == null || !storagebackendMap.containsKey(payload.getStorageName()))
throw new PluginNotFoundException(String.format("implementation for storage %s not found", storageName)); throw new PluginNotFoundException(String.format("implementation for storage %s not found", payload.getStorageName()));
return storagebackendMap.get(storageName); return storagebackendMap.get(payload.getStorageName());
} }
} }

View File

@ -0,0 +1,50 @@
package org.gcube.data.access.storagehub.handlers.plugins;
import java.io.InputStream;
import javax.inject.Inject;
import javax.inject.Singleton;
import org.gcube.common.storagehub.model.exceptions.StorageHubException;
import org.gcube.common.storagehub.model.items.nodes.Content;
import org.gcube.common.storagehub.model.items.nodes.PayloadBackend;
import org.gcube.common.storagehub.model.storages.MetaInfo;
import org.gcube.common.storagehub.model.storages.StorageBackend;
import org.gcube.common.storagehub.model.storages.StorageBackendFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@Singleton
public class StorageOperationMediator {
Logger log = LoggerFactory.getLogger(StorageOperationMediator.class);
@Inject
StorageBackendHandler storageBackendHandler;
public MetaInfo copy(Content source, PayloadBackend destination, String newName, String newParentPath) throws StorageHubException{
log.info("creating Storages for source {} and destination {}", source.getPayloadBackend(), destination.getStorageName());
StorageBackendFactory sourceSBF = storageBackendHandler.get(source.getPayloadBackend());
//TODO: add metadata taken from content node
StorageBackend sourceSB = sourceSBF.create(source.getPayloadBackend());
StorageBackendFactory destSBF = storageBackendHandler.get(destination);
StorageBackend destSB = destSBF.create(destination);
if (sourceSB.equals(destSB)) {
log.info("source and destintiona are the same storage");
return sourceSB.onCopy(source, newParentPath, newName);
}else {
log.info("source and destintiona are different storage");
InputStream stream = sourceSB.download(source);
MetaInfo info = destSB.upload(stream, newParentPath, newName, source.getSize());
return info;
}
}
public boolean move(){
return true;
}
}

View File

@ -194,7 +194,6 @@ public class ItemSharing extends Impersonable{
@PUT @PUT
@Path("{id}/share") @Path("{id}/share")
@Consumes(MediaType.MULTIPART_FORM_DATA) @Consumes(MediaType.MULTIPART_FORM_DATA)
@Deprecated
public String share(@FormDataParam("users") Set<String> users, @FormDataParam("defaultAccessType") AccessType accessType){ public String share(@FormDataParam("users") Set<String> users, @FormDataParam("defaultAccessType") AccessType accessType){
InnerMethodName.instance.set("shareFolder"); InnerMethodName.instance.set("shareFolder");
Session ses = null; Session ses = null;

View File

@ -9,13 +9,11 @@ import javax.inject.Inject;
import javax.jcr.RepositoryException; import javax.jcr.RepositoryException;
import javax.jcr.Session; import javax.jcr.Session;
import javax.servlet.ServletContext; import javax.servlet.ServletContext;
import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.Consumes; import javax.ws.rs.Consumes;
import javax.ws.rs.FormParam; import javax.ws.rs.FormParam;
import javax.ws.rs.POST; import javax.ws.rs.POST;
import javax.ws.rs.Path; import javax.ws.rs.Path;
import javax.ws.rs.PathParam; import javax.ws.rs.PathParam;
import javax.ws.rs.QueryParam;
import javax.ws.rs.core.Context; import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType; import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response; import javax.ws.rs.core.Response;
@ -224,6 +222,7 @@ public class ItemsCreator extends Impersonable{
} }
/*
@POST @POST
@Consumes(MediaType.APPLICATION_OCTET_STREAM) @Consumes(MediaType.APPLICATION_OCTET_STREAM)
@Path("/{id}/create/FILE") @Path("/{id}/create/FILE")
@ -259,7 +258,7 @@ public class ItemsCreator extends Impersonable{
} }
return toReturn; return toReturn;
} }*/
@POST @POST
@Consumes(MediaType.MULTIPART_FORM_DATA) @Consumes(MediaType.MULTIPART_FORM_DATA)
@ -274,7 +273,9 @@ public class ItemsCreator extends Impersonable{
String toReturn = null; String toReturn = null;
try{ try{
log.debug("UPLOAD: call started"); long size = fileDetail.getSize();
log.info("UPLOAD: call started with file size {}",size);
ses = repository.getRepository().login(CredentialHandler.getAdminCredentials(context)); ses = repository.getRepository().login(CredentialHandler.getAdminCredentials(context));
ItemsParameterBuilder<FileCreationParameters> builder = FileCreationParameters.builder().name(name).description(description).stream(stream).fileDetails(fileDetail) ItemsParameterBuilder<FileCreationParameters> builder = FileCreationParameters.builder().name(name).description(description).stream(stream).fileDetails(fileDetail)
.on(id).with(ses).author(currentUser); .on(id).with(ses).author(currentUser);

View File

@ -90,7 +90,7 @@ import org.gcube.data.access.storagehub.handlers.TrashHandler;
import org.gcube.data.access.storagehub.handlers.VersionHandler; import org.gcube.data.access.storagehub.handlers.VersionHandler;
import org.gcube.data.access.storagehub.handlers.items.Item2NodeConverter; import org.gcube.data.access.storagehub.handlers.items.Item2NodeConverter;
import org.gcube.data.access.storagehub.handlers.items.Node2ItemConverter; import org.gcube.data.access.storagehub.handlers.items.Node2ItemConverter;
import org.gcube.data.access.storagehub.handlers.plugins.OperationMediator; import org.gcube.data.access.storagehub.handlers.plugins.StorageOperationMediator;
import org.gcube.data.access.storagehub.handlers.plugins.StorageBackendHandler; import org.gcube.data.access.storagehub.handlers.plugins.StorageBackendHandler;
import org.gcube.smartgears.annotations.ManagedBy; import org.gcube.smartgears.annotations.ManagedBy;
import org.gcube.smartgears.utils.InnerMethodName; import org.gcube.smartgears.utils.InnerMethodName;
@ -135,7 +135,7 @@ public class ItemsManager extends Impersonable{
CompressHandler compressHandler; CompressHandler compressHandler;
@Inject @Inject
OperationMediator opMediator; StorageOperationMediator opMediator;
@Inject @Inject
StorageBackendHandler storageBackendHandler; StorageBackendHandler storageBackendHandler;
@ -710,14 +710,12 @@ public class ItemsManager extends Impersonable{
if (version.getName().equals(versionName)) { if (version.getName().equals(versionName)) {
Content content = node2Item.getContentFromVersion(version); Content content = node2Item.getContentFromVersion(version);
StorageBackendFactory sbf = storageBackendHandler.get(content.getManagedBy()); StorageBackendFactory sbf = storageBackendHandler.get(content.getPayloadBackend());
StorageBackend sb = sbf.create(content.getPayloadBackend());
//TODO: add metadata taken from content node
StorageBackend sb = sbf.create(null);
final InputStream streamToWrite = sb.download(content); final InputStream streamToWrite = sb.download(content);
log.debug("retrieved storage id is {} with storageBackend {} (stream is null? {})",content.getStorageId(), sb.getName(), streamToWrite==null ); log.debug("retrieved storage id is {} with storageBackend {} (stream is null? {})",content.getStorageId(), sbf.getName(), streamToWrite==null );
String oldfilename = FilenameUtils.getBaseName(currentItem.getTitle()); String oldfilename = FilenameUtils.getBaseName(currentItem.getTitle());
String ext = FilenameUtils.getExtension(currentItem.getTitle()); String ext = FilenameUtils.getExtension(currentItem.getTitle());
@ -863,9 +861,9 @@ public class ItemsManager extends Impersonable{
Content content = fileItem.getContent(); Content content = fileItem.getContent();
StorageBackendFactory sbf = storageBackendHandler.get(content.getManagedBy()); StorageBackendFactory sbf = storageBackendHandler.get(content.getPayloadBackend());
StorageBackend sb = sbf.create(null); StorageBackend sb = sbf.create(content.getPayloadBackend());
final InputStream streamToWrite = sb.download(content); final InputStream streamToWrite = sb.download(content);
@ -978,7 +976,7 @@ public class ItemsManager extends Impersonable{
final Node nodeToCopy = ses.getNodeByIdentifier(id); final Node nodeToCopy = ses.getNodeByIdentifier(id);
final Node destination = ses.getNodeByIdentifier(destinationId); final Node destination = ses.getNodeByIdentifier(destinationId);
//Item destinationItem = node2Item.getItem(destination,null); FolderItem destinationItem = (FolderItem)node2Item.getItem(destination,null);
final Item item = node2Item.getItem(nodeToCopy, Arrays.asList(NodeConstants.ACCOUNTING_NAME, NodeConstants.METADATA_NAME)); final Item item = node2Item.getItem(nodeToCopy, Arrays.asList(NodeConstants.ACCOUNTING_NAME, NodeConstants.METADATA_NAME));
@ -1001,14 +999,8 @@ public class ItemsManager extends Impersonable{
Content contentToCopy = ((AbstractFileItem) item).getContent(); Content contentToCopy = ((AbstractFileItem) item).getContent();
//TODO : understand if it is needed MetaInfo contentInfo = opMediator.copy(contentToCopy, destinationItem.getBackend(), destination.getPath(), uniqueName);
//opMediator.copy(((AbstractFileItem) item).getContent(),destinationItem, uniqueName);
StorageBackendFactory sbf = storageBackendHandler.get(contentToCopy.getManagedBy());
//TODO: add metadata taken from content node
StorageBackend sb = sbf.create(null);
MetaInfo contentInfo = sb.onCopy(contentToCopy, destination.getPath(), uniqueName);
Utils.setContentFromMetaInfo((AbstractFileItem) item, contentInfo); Utils.setContentFromMetaInfo((AbstractFileItem) item, contentInfo);
item2Node.replaceContent(newNode, (AbstractFileItem) item, ItemAction.CLONED); item2Node.replaceContent(newNode, (AbstractFileItem) item, ItemAction.CLONED);

View File

@ -297,6 +297,9 @@ public class MessageManager extends Impersonable{
message.setBody(body); message.setBody(body);
message.setName(UUID.randomUUID().toString()); message.setName(UUID.randomUUID().toString());
User user = ses.getUserManager().getAuthorizable(currentUser, User.class); User user = ses.getUserManager().getAuthorizable(currentUser, User.class);
if (user ==null)
throw new InvalidCallParameters("invalid storagehub user: "+currentUser);
Owner owner = new Owner(); Owner owner = new Owner();
owner.setUserId(user.getID()); owner.setUserId(user.getID());
owner.setUserName(user.getPrincipal().getName()); owner.setUserName(user.getPrincipal().getName());
@ -413,10 +416,8 @@ public class MessageManager extends Impersonable{
Content contentToCopy = newNodeItem.getContent(); Content contentToCopy = newNodeItem.getContent();
StorageBackendFactory sbf = storageBackendHandler.get(contentToCopy.getManagedBy()); StorageBackendFactory sbf = storageBackendHandler.get(contentToCopy.getPayloadBackend());
StorageBackend sb = sbf.create(contentToCopy.getPayloadBackend());
//TODO: add metadata taken from content node
StorageBackend sb = sbf.create(null);
MetaInfo contentInfo = sb.onCopy(contentToCopy, destination.getPath(), uniqueName); MetaInfo contentInfo = sb.onCopy(contentToCopy, destination.getPath(), uniqueName);

View File

@ -0,0 +1,45 @@
package org.gcube.data.access.storagehub.storage.backend.impl;
import java.util.HashMap;
import java.util.Map;
import java.util.UUID;
import javax.annotation.PostConstruct;
import javax.inject.Singleton;
import org.gcube.common.storagehub.model.Constants;
import org.gcube.common.storagehub.model.Metadata;
import org.gcube.common.storagehub.model.exceptions.InvalidCallParameters;
import org.gcube.common.storagehub.model.items.nodes.PayloadBackend;
import org.gcube.common.storagehub.model.storages.StorageBackend;
import org.gcube.common.storagehub.model.storages.StorageBackendFactory;
@Singleton
public class DefaultGcubeMinIoStorageBackendFactory implements StorageBackendFactory {
private StorageBackend singleton;
@PostConstruct
public void init(){
Map<String, Object> params = new HashMap<String, Object>();
params.put("bucketName", "storagehub-default" );
params.put("key", "minio_admin" );
params.put("secret", "8334e0e6acb173a8f915026b1bc4c7" );
params.put("url", "https://minio.dev.d4science.org/" );
Metadata metadata = new Metadata(params);
this.singleton = new S3Backend(new PayloadBackend(getName(), metadata), (String) -> UUID.randomUUID().toString());
}
@Override
public String getName() {
return Constants.DEFAULT_MINIO_STORAGE;
}
@Override
public StorageBackend create(PayloadBackend payloadConfiguration) throws InvalidCallParameters {
if (payloadConfiguration.getParameters().isEmpty())
return singleton;
else return new S3Backend(payloadConfiguration, (String) -> UUID.randomUUID().toString() );
}
}

View File

@ -2,11 +2,13 @@ package org.gcube.data.access.storagehub.storage.backend.impl;
import java.io.InputStream; import java.io.InputStream;
import java.nio.file.Paths; import java.nio.file.Paths;
import java.util.Map;
import java.util.UUID; import java.util.UUID;
import org.gcube.common.authorization.library.provider.AuthorizationProvider; import org.gcube.common.authorization.library.provider.AuthorizationProvider;
import org.gcube.common.storagehub.model.Constants; import org.gcube.common.storagehub.model.Constants;
import org.gcube.common.storagehub.model.items.nodes.Content; import org.gcube.common.storagehub.model.items.nodes.Content;
import org.gcube.common.storagehub.model.items.nodes.PayloadBackend;
import org.gcube.common.storagehub.model.storages.MetaInfo; import org.gcube.common.storagehub.model.storages.MetaInfo;
import org.gcube.common.storagehub.model.storages.StorageBackend; import org.gcube.common.storagehub.model.storages.StorageBackend;
import org.gcube.contentmanagement.blobstorage.service.IClient; import org.gcube.contentmanagement.blobstorage.service.IClient;
@ -16,23 +18,24 @@ import org.gcube.contentmanager.storageclient.wrapper.StorageClient;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
public class GCubeMongoStorageBackend implements StorageBackend { public class GCubeMongoStorageBackend extends StorageBackend {
private static final Logger log = LoggerFactory.getLogger(GCubeMongoStorageBackend.class); private static final Logger log = LoggerFactory.getLogger(GCubeMongoStorageBackend.class);
private final static String SERVICE_NAME = "home-library"; private final static String SERVICE_NAME = "home-library";
private final static String SERVICE_CLASS = "org.gcube.portlets.user"; private final static String SERVICE_CLASS = "org.gcube.portlets.user";
public GCubeMongoStorageBackend(PayloadBackend payloadConf) {
super(payloadConf);
}
@Override @Override
public InputStream download(Content content) { public InputStream download(Content content) {
return getStorageClient(AuthorizationProvider.instance.get().getClient().getId()).getClient().get().RFileAsInputStream(content.getStorageId()); return getStorageClient(AuthorizationProvider.instance.get().getClient().getId()).getClient().get().RFileAsInputStream(content.getStorageId());
} }
@Override
public String getName() {
return Constants.mongoStorageConstant;
}
protected StorageClient getStorageClient(String login){ protected StorageClient getStorageClient(String login){
return new StorageClient(SERVICE_CLASS, SERVICE_NAME, login, AccessType.SHARED, MemoryType.PERSISTENT); return new StorageClient(SERVICE_CLASS, SERVICE_NAME, login, AccessType.SHARED, MemoryType.PERSISTENT);
@ -45,13 +48,13 @@ public class GCubeMongoStorageBackend implements StorageBackend {
String newRemotePath = Paths.get(newParentPath, newName).toString(); String newRemotePath = Paths.get(newParentPath, newName).toString();
String newStorageID = getStorageClient(AuthorizationProvider.instance.get().getClient().getId()).getClient().copyFile(true).from(content.getStorageId()).to(newRemotePath); String newStorageID = getStorageClient(AuthorizationProvider.instance.get().getClient().getId()).getClient().copyFile(true).from(content.getStorageId()).to(newRemotePath);
log.info("The id returned by storage is {}", newStorageID); log.info("The id returned by storage is {}", newStorageID);
return new MetaInfo(content.getSize(),newStorageID, newRemotePath, getName()); return new MetaInfo(content.getSize(),newStorageID, newRemotePath, getPayloadConfiguration());
} }
@Override @Override
public MetaInfo onMove(Content content, String newParentPath) { public MetaInfo onMove(Content content, String newParentPath) {
//new contentPath can be set as remotePath to the storage backend ? //new contentPath can be set as remotePath to the storage backend ?
return new MetaInfo(content.getSize(),content.getStorageId(), content.getRemotePath(), getName()); return new MetaInfo(content.getSize(),content.getStorageId(), content.getRemotePath(), getPayloadConfiguration());
} }
@Override @Override
@ -62,10 +65,15 @@ public class GCubeMongoStorageBackend implements StorageBackend {
String remotePath= String.format("%s/%s-%s",relPath,uid,name); String remotePath= String.format("%s/%s-%s",relPath,uid,name);
String storageId =storageClient.put(true).LFile(stream).RFile(remotePath); String storageId =storageClient.put(true).LFile(stream).RFile(remotePath);
long size = storageClient.getSize().RFileById(storageId); long size = storageClient.getSize().RFileById(storageId);
MetaInfo info = new MetaInfo(size, storageId, remotePath, getName()); MetaInfo info = new MetaInfo(size, storageId, remotePath, getPayloadConfiguration());
return info; return info;
} }
@Override
public MetaInfo upload(InputStream stream, String relPath, String name, Long size) {
return this.upload(stream, relPath, name);
}
@Override @Override
public void onDelete(Content content) { public void onDelete(Content content) {
log.debug("deleting"); log.debug("deleting");

View File

@ -1,23 +1,25 @@
package org.gcube.data.access.storagehub.storage.backend.impl; package org.gcube.data.access.storagehub.storage.backend.impl;
import javax.inject.Singleton;
import org.gcube.common.storagehub.model.Constants; import org.gcube.common.storagehub.model.Constants;
import org.gcube.common.storagehub.model.Metadata; import org.gcube.common.storagehub.model.Metadata;
import org.gcube.common.storagehub.model.exceptions.InvalidCallParameters; import org.gcube.common.storagehub.model.exceptions.InvalidCallParameters;
import org.gcube.common.storagehub.model.items.nodes.PayloadBackend;
import org.gcube.common.storagehub.model.storages.StorageBackend; import org.gcube.common.storagehub.model.storages.StorageBackend;
import org.gcube.common.storagehub.model.storages.StorageBackendFactory; import org.gcube.common.storagehub.model.storages.StorageBackendFactory;
@Singleton
public class GCubeMongoStorageBackendFactory implements StorageBackendFactory { public class GCubeMongoStorageBackendFactory implements StorageBackendFactory {
@Override @Override
public String getName() { public String getName() {
return Constants.mongoStorageConstant; return Constants.MONGO_STORAGE;
} }
@Override @Override
public StorageBackend create(Metadata parameter) throws InvalidCallParameters { public StorageBackend create(PayloadBackend payloadConfiguration) throws InvalidCallParameters {
return new GCubeMongoStorageBackend(); return new GCubeMongoStorageBackend(payloadConfiguration);
} }
} }

View File

@ -1,126 +1,129 @@
package org.gcube.data.access.storagehub.storage.backend.impl; package org.gcube.data.access.storagehub.storage.backend.impl;
import java.io.InputStream; import java.io.InputStream;
import java.util.HashMap;
import java.util.Map; import java.util.Map;
import java.util.function.Function; import java.util.function.Function;
import org.gcube.common.authorization.library.provider.AuthorizationProvider; import org.gcube.common.authorization.library.provider.AuthorizationProvider;
import org.gcube.common.storagehub.model.items.nodes.Content; import org.gcube.common.storagehub.model.items.nodes.Content;
import org.gcube.common.storagehub.model.items.nodes.PayloadBackend;
import org.gcube.common.storagehub.model.storages.MetaInfo; import org.gcube.common.storagehub.model.storages.MetaInfo;
import org.gcube.common.storagehub.model.storages.StorageBackend; import org.gcube.common.storagehub.model.storages.StorageBackend;
import com.amazonaws.auth.AWSCredentials; import io.minio.CopyObjectArgs;
import com.amazonaws.auth.AWSStaticCredentialsProvider; import io.minio.CopySource;
import com.amazonaws.auth.BasicAWSCredentials; import io.minio.GetObjectArgs;
import com.amazonaws.regions.Regions; import io.minio.MinioClient;
import com.amazonaws.services.s3.AmazonS3; import io.minio.PutObjectArgs;
import com.amazonaws.services.s3.AmazonS3ClientBuilder; import io.minio.RemoveObjectArgs;
import com.amazonaws.services.s3.model.CopyObjectRequest; import io.minio.StatObjectArgs;
import com.amazonaws.services.s3.model.ObjectMetadata; import io.minio.StatObjectResponse;
import com.amazonaws.services.s3.model.PutObjectResult;
import com.amazonaws.services.s3.model.S3Object;
import com.amazonaws.services.s3.model.S3ObjectInputStream;
public class S3Backend implements StorageBackend{ public class S3Backend extends StorageBackend{
Map<String, Object> parameters;
Function<Void, String> keyGenerator; Function<Void, String> keyGenerator;
AWSCredentials credentials = new BasicAWSCredentials("user", "password");
final static Regions clientRegion = Regions.DEFAULT_REGION;
String bucketName; String bucketName;
MinioClient client;
private static final long PART_SIZE = 100000000;
public S3Backend(PayloadBackend payloadConfiguration, Function<Void, String> keyGenerator) {
public S3Backend(Map<String, Object> parameters, Function<Void, String> keyGenerator) { super(payloadConfiguration);
super();
this.parameters = parameters;
this.keyGenerator = keyGenerator; this.keyGenerator = keyGenerator;
Map<String, Object> parameters = payloadConfiguration.getParameters();
this.bucketName = (String)parameters.get("bucketName"); this.bucketName = (String)parameters.get("bucketName");
} String accessKey = (String)parameters.get("key");
String secret = (String)parameters.get("secret");
@Override String url = (String)parameters.get("url");
public String getName() { client =
return "s3"; MinioClient.builder()
.endpoint(url)
.credentials(accessKey, secret)
.build();
} }
@Override @Override
public MetaInfo onCopy(Content content, String newParentPath, String newName) { public MetaInfo onCopy(Content content, String newParentPath, String newName) {
String sourceKey = content.getStorageId(); String sourceKey = content.getStorageId();
String destinationKey = keyGenerator.apply(null); String destinationKey = keyGenerator.apply(null);
try { try {
AmazonS3 s3Client = AmazonS3ClientBuilder.standard() CopySource source = CopySource.builder().bucket(bucketName).object(sourceKey).build();
.withCredentials(new AWSStaticCredentialsProvider(credentials))
.withRegion(clientRegion)
.build();
// Copy the object into a new object in the same bucket. // Copy the object into a new object in the same bucket.
CopyObjectRequest copyObjRequest = new CopyObjectRequest(bucketName, sourceKey, bucketName, destinationKey); CopyObjectArgs copyObjRequest = CopyObjectArgs.builder().source(source).bucket(bucketName).object(destinationKey).build();
s3Client.copyObject(copyObjRequest); client.copyObject(copyObjRequest);
} catch (Exception e) { } catch (Exception e) {
throw new RuntimeException("error copying file on s3"); throw new RuntimeException("error copying file on s3", e);
} }
return new MetaInfo(content.getSize(), destinationKey, null, getName()); return new MetaInfo(content.getSize(), destinationKey, null, getPayloadConfiguration());
} }
@Override @Override
public MetaInfo onMove(Content content, String newParentPath) { public MetaInfo onMove(Content content, String newParentPath) {
//new contentPath can be set as remotePath to the storage backend ? //new contentPath can be set as remotePath to the storage backend ?
return new MetaInfo(content.getSize(),content.getStorageId(), content.getRemotePath(), getName()); return new MetaInfo(content.getSize(),content.getStorageId(), content.getRemotePath(), getPayloadConfiguration());
} }
@Override @Override
public void onDelete(Content content) { public void onDelete(Content content) {
// TODO Auto-generated method stub try {
String storageId = content.getStorageId();
client.removeObject(RemoveObjectArgs.builder().bucket(bucketName).object(storageId).build());
} catch (Exception e) {
throw new RuntimeException("error deleting file on s3", e);
}
} }
@Override @Override
public MetaInfo upload(InputStream stream, String relativePath, String name) { public MetaInfo upload(InputStream stream, String relativePath, String name) {
return this.upload(stream, relativePath, name, null);
}
@Override
public MetaInfo upload(InputStream stream, String relativePath, String name, Long size) {
try { try {
AmazonS3 s3Client = AmazonS3ClientBuilder.standard()
.withCredentials(new AWSStaticCredentialsProvider(credentials))
.withRegion(clientRegion)
.build();
String storageId = keyGenerator.apply(null); String storageId = keyGenerator.apply(null);
ObjectMetadata metadata = new ObjectMetadata(); Map<String, String> headers = new HashMap<>();
metadata.addUserMetadata("user", AuthorizationProvider.instance.get().getClient().getId()); headers.put("X-Amz-Storage-Class", "REDUCED_REDUNDANCY");
Map<String, String> userMetadata = new HashMap<>();
userMetadata.put("user", AuthorizationProvider.instance.get().getClient().getId());
client.putObject(
PutObjectArgs.builder().bucket(bucketName).object(storageId).stream(
stream, size == null || size<=0?-1:size, size == null || size<=0?PART_SIZE:-1)
.headers(headers).tags(userMetadata)
.build());
PutObjectResult result = s3Client.putObject( long fileSize;
bucketName, if (size != null && size>0)
keyGenerator.apply(null), fileSize = size;
stream, else {
metadata StatObjectResponse resp = client.statObject(StatObjectArgs.builder().bucket(bucketName).object(storageId).build());
); fileSize = resp.size();
}
long size = result.getMetadata().getContentLength(); return new MetaInfo(fileSize,storageId, null, getPayloadConfiguration());
s3Client.getObjectMetadata(bucketName, );
return new MetaInfo(content.getSize(),storageId, null, getName());
} catch (Exception e) { } catch (Exception e) {
throw new RuntimeException("error copying file on s3"); throw new RuntimeException("error uploading file on s3", e);
} }
} }
@Override @Override
public InputStream download(Content item) { public InputStream download(Content content) {
try { try {
AmazonS3 s3Client = AmazonS3ClientBuilder.standard() String storageId = content.getStorageId();
.withCredentials(new AWSStaticCredentialsProvider(credentials)) InputStream inputStream = client.getObject(GetObjectArgs.builder().bucket(bucketName).object(storageId).build());
.withRegion(clientRegion)
.build();
S3Object s3object = s3Client.getObject(bucketName, item.getStorageId());
S3ObjectInputStream inputStream = s3object.getObjectContent();
return inputStream; return inputStream;
} catch (Exception e) { } catch (Exception e) {
throw new RuntimeException("error copying file on s3"); throw new RuntimeException("error downloading file from s3",e);
} }
} }

View File

@ -25,7 +25,7 @@ The projects leading to this software have received funding from a series of
Version Version
-------------------------------------------------- --------------------------------------------------
1.5.0-SNAPSHOT (2021-11-26) 1.5.0-SNAPSHOT (2021-12-02)
Please see the file named "changelog.xml" in this directory for the release notes. Please see the file named "changelog.xml" in this directory for the release notes.

View File

@ -0,0 +1,45 @@
package org.gcube.data.access.fs;
import java.io.File;
import java.io.FileInputStream;
import java.io.InputStream;
import java.util.Collections;
import org.gcube.common.authorization.library.provider.AuthorizationProvider;
import org.gcube.common.authorization.library.provider.UserInfo;
import org.gcube.common.authorization.library.utils.Caller;
import org.gcube.common.storagehub.model.items.nodes.Content;
import org.gcube.common.storagehub.model.storages.MetaInfo;
import org.gcube.common.storagehub.model.storages.StorageBackend;
import org.gcube.data.access.storagehub.storage.backend.impl.DefaultGcubeMinIoStorageBackendFactory;
import org.junit.Assert;
import org.junit.Test;
public class StorageTest {
@Test
public void testS3() throws Exception{
try {
AuthorizationProvider.instance.set(new Caller(new UserInfo("testUser", Collections.emptyList()), ""));
DefaultGcubeMinIoStorageBackendFactory factory = new DefaultGcubeMinIoStorageBackendFactory();
factory.init();
File file = new File("/home/lucio/Downloads/lombok.jar");
InputStream stream = new FileInputStream(file);
Assert.assertNotNull(stream);
StorageBackend sb = factory.create(null);
Assert.assertNotNull(sb);
MetaInfo inof = sb.upload(stream, "/path", "uploadTest", file.length());
Content content = new Content();
content.setStorageId(inof.getStorageId());
sb.onDelete(content);
}catch (Exception e) {
e.printStackTrace();
throw e;
}
}
}