Lucio Lelii 5 years ago
parent 62d8c068cc
commit 1c1e067a19

@ -1,36 +0,0 @@
package org.gcube.data.access.storagehub;
public class MetaInfo {
private long size;
private String storageId;
private String remotePath;
public long getSize() {
return size;
}
public void setSize(long size) {
this.size = size;
}
public String getStorageId() {
return storageId;
}
public void setStorageId(String storageId) {
this.storageId = storageId;
}
public String getRemotePath() {
return remotePath;
}
public void setRemotePath(String remotePath) {
this.remotePath = remotePath;
}
}

@ -43,7 +43,9 @@ import org.gcube.contentmanager.storageclient.wrapper.StorageClient;
import org.gcube.data.access.storagehub.accounting.AccountingHandler;
import org.gcube.data.access.storagehub.handlers.Item2NodeConverter;
import org.gcube.data.access.storagehub.handlers.Node2ItemConverter;
import org.gcube.data.access.storagehub.handlers.StorageBackendHandler;
import org.gcube.data.access.storagehub.handlers.VersionHandler;
import org.gcube.data.access.storagehub.storage.backend.impl.GCubeStorageBackend;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -155,11 +157,6 @@ public class Utils {
return Paths.getPath(String.format("/Home/%s",login));
}
public static StorageClient getStorageClient(String login){
return new StorageClient(SERVICE_CLASS, SERVICE_NAME, login, AccessType.SHARED, MemoryType.PERSISTENT);
}
public static Deque<Item> getAllNodesForZip(FolderItem directory, Session session, AccountingHandler accountingHandler, List<String> excludes) throws RepositoryException, BackendGenericError{
Deque<Item> queue = new LinkedList<Item>();
Node currentNode = session.getNodeByIdentifier(directory.getId());
@ -182,7 +179,7 @@ public class Utils {
}
public static void zipNode(ZipOutputStream zos, Deque<Item> queue, String login, org.gcube.common.storagehub.model.Path originalPath) throws Exception{
public static void zipNode(ZipOutputStream zos, Deque<Item> queue, String login, org.gcube.common.storagehub.model.Path originalPath, StorageBackendHandler storageHandler) throws Exception{
logger.trace("originalPath is {}",originalPath.toPath());
org.gcube.common.storagehub.model.Path actualPath = Paths.getPath("");
while (!queue.isEmpty()) {
@ -200,7 +197,7 @@ public class Utils {
}
} else if (item instanceof AbstractFileItem){
try {
InputStream streamToWrite = Utils.getStorageClient(login).getClient().get().RFileAsInputStream(((AbstractFileItem)item).getContent().getStorageId());
InputStream streamToWrite = storageHandler.download(((AbstractFileItem)item).getContent().getStorageId());
if (streamToWrite == null){
logger.warn("discarding item {} ",item.getName());
continue;

@ -0,0 +1,43 @@
package org.gcube.data.access.storagehub.handlers;
import java.io.InputStream;
import javax.inject.Inject;
import javax.inject.Singleton;
import org.gcube.common.storagehub.model.items.AbstractFileItem;
import org.gcube.common.storagehub.model.items.FolderItem;
import org.gcube.common.storagehub.model.items.Item;
import org.gcube.common.storagehub.model.storages.MetaInfo;
import org.gcube.data.access.storagehub.storage.backend.impl.GCubeStorageBackend;
@Singleton
public class StorageBackendHandler {
@Inject
private GCubeStorageBackend defaultBackend;
public String move(Item item, FolderItem destination) {
//if item is a folder we have to move everything
return defaultBackend.move(((AbstractFileItem) item).getContent().getStorageId());
}
public String copy(AbstractFileItem item) {
return defaultBackend.copy(item.getContent().getStorageId(), item.getContent().getRemotePath());
}
public MetaInfo upload(InputStream stream, String itemPath) {
return defaultBackend.upload(stream, itemPath);
}
public InputStream download(String id) {
return defaultBackend.getContent(id);
}
public void delete(String id) {
defaultBackend.delete(id);
}
}

@ -50,6 +50,8 @@ public class TrashHandler {
@Inject
Item2NodeConverter item2Node;
@Inject
StorageBackendHandler storageHandler;
public void removeNodes(Session ses, List<Item> itemsToDelete) throws RepositoryException, StorageHubException{
log.debug("defnitively removing nodes with ids {}",itemsToDelete);
@ -83,8 +85,7 @@ public class TrashHandler {
public void run() {
for (String id: contentIdsToDelete) {
try {
IClient client = Utils.getStorageClient(user).getClient();
client.remove().RFileById(id);
storageHandler.delete(id);
log.debug("file with id {} correctly removed on storage",id);
}catch(Throwable t) {
log.warn("error removing file on storage with id {}",id, t);

@ -51,16 +51,16 @@ import org.gcube.common.storagehub.model.exceptions.UserNotAuthorizedException;
import org.gcube.common.storagehub.model.items.AbstractFileItem;
import org.gcube.common.storagehub.model.items.FolderItem;
import org.gcube.common.storagehub.model.items.GCubeItem;
import org.gcube.common.storagehub.model.storages.MetaInfo;
import org.gcube.common.storagehub.model.types.ItemAction;
import org.gcube.contentmanagement.blobstorage.service.IClient;
import org.gcube.data.access.storagehub.AuthorizationChecker;
import org.gcube.data.access.storagehub.MetaInfo;
import org.gcube.data.access.storagehub.MultipleOutputStream;
import org.gcube.data.access.storagehub.Utils;
import org.gcube.data.access.storagehub.accounting.AccountingHandler;
import org.gcube.data.access.storagehub.handlers.CredentialHandler;
import org.gcube.data.access.storagehub.handlers.Item2NodeConverter;
import org.gcube.data.access.storagehub.handlers.Node2ItemConverter;
import org.gcube.data.access.storagehub.handlers.StorageBackendHandler;
import org.gcube.data.access.storagehub.handlers.VersionHandler;
import org.gcube.data.access.storagehub.handlers.content.ContentHandler;
import org.gcube.data.access.storagehub.handlers.content.ContentHandlerFactory;
@ -98,6 +98,7 @@ public class ItemsCreator {
@Inject Node2ItemConverter node2Item;
@Inject Item2NodeConverter item2Node;
@Inject StorageBackendHandler storageBackend;
//@Path("/{id}/create/{type:(?!FILE)[^/?$]*}")
@POST
@ -470,22 +471,10 @@ public class ItemsCreator {
try(InputStream is1 = mos.get()){
String uid = UUID.randomUUID().toString();
String remotePath= String.format("%s/%s-%s",path,uid,name);
long start = System.currentTimeMillis();
log.debug("TIMING: sending the content to Storage - start");
IClient storageClient = Utils.getStorageClient(login).getClient();
log.debug("TIMING: getting the client took {} ",System.currentTimeMillis()-start);
String storageId =storageClient.put(true).LFile(is1).RFile(remotePath);
log.debug("returned storage Id is {} for remotepath {}",storageId, remotePath);
log.debug("TIMING: sending the file took {} ",System.currentTimeMillis()-start);
long size = storageClient.getSize().RFileById(storageId);
log.debug("TIMING: sending the content to Storage - finished in {}",System.currentTimeMillis()-start);
MetaInfo info = new MetaInfo();
info.setSize(size);
info.setStorageId(storageId);
info.setRemotePath(remotePath);
MetaInfo info = storageBackend.upload(is1, remotePath);
return info;
}catch (Throwable e) {
log.error("error writing content");
log.error("error writing content",e );
throw e;
}

@ -44,6 +44,7 @@ import javax.ws.rs.core.Response.Status;
import javax.ws.rs.core.StreamingOutput;
import org.apache.commons.io.FilenameUtils;
import org.gcube.accounting.datamodel.usagerecords.StorageUsageRecord;
import org.gcube.common.authorization.control.annotations.AuthorizationControl;
import org.gcube.common.authorization.library.provider.AuthorizationProvider;
import org.gcube.common.encryption.StringEncrypter;
@ -82,8 +83,10 @@ import org.gcube.data.access.storagehub.handlers.ClassHandler;
import org.gcube.data.access.storagehub.handlers.CredentialHandler;
import org.gcube.data.access.storagehub.handlers.Item2NodeConverter;
import org.gcube.data.access.storagehub.handlers.Node2ItemConverter;
import org.gcube.data.access.storagehub.handlers.StorageBackendHandler;
import org.gcube.data.access.storagehub.handlers.TrashHandler;
import org.gcube.data.access.storagehub.handlers.VersionHandler;
import org.gcube.data.access.storagehub.storage.backend.impl.GCubeStorageBackend;
import org.gcube.smartgears.utils.InnerMethodName;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -119,6 +122,8 @@ public class ItemsManager {
@Inject Node2ItemConverter node2Item;
@Inject Item2NodeConverter item2Node;
@Inject StorageBackendHandler storageBackend;
@GET
@Path("{id}")
@Produces(MediaType.APPLICATION_JSON)
@ -548,7 +553,7 @@ public class ItemsManager {
String mimeType = version.getFrozenNode().getProperty(NodeProperty.MIME_TYPE.toString()).getString();
String storageId = version.getFrozenNode().getProperty(NodeProperty.STORAGE_ID.toString()).getString();
final InputStream streamToWrite = Utils.getStorageClient(login).getClient().get().RFileAsInputStream(storageId);
final InputStream streamToWrite = storageBackend.download(storageId);
String oldfilename = FilenameUtils.getBaseName(currentItem.getTitle());
String ext = FilenameUtils.getExtension(currentItem.getTitle());
@ -653,7 +658,7 @@ public class ItemsManager {
long start = System.currentTimeMillis();
zos.setLevel(Deflater.BEST_COMPRESSION);
log.debug("writing StreamOutput");
Utils.zipNode(zos, allNodes, login, originalPath);
Utils.zipNode(zos, allNodes, login, originalPath, storageBackend);
log.debug("StreamOutput written in {}",(System.currentTimeMillis()-start));
} catch (Exception e) {
log.error("error writing stream",e);
@ -690,7 +695,7 @@ public class ItemsManager {
private Response downloadFileInternal(Session ses, AbstractFileItem fileItem, String login, boolean withAccounting) throws RepositoryException {
final InputStream streamToWrite = Utils.getStorageClient(login).getClient().get().RFileAsInputStream(fileItem.getContent().getStorageId());
final InputStream streamToWrite = storageBackend.download(fileItem.getContent().getStorageId());
if (withAccounting)
accountingHandler.createReadObj(fileItem.getTitle(), ses, ses.getNodeByIdentifier(fileItem.getId()), true);
@ -820,11 +825,9 @@ public class ItemsManager {
newFileIdentifier = newNode.getIdentifier();
if (item instanceof AbstractFileItem) {
String oldStorageId = ((AbstractFileItem)item).getContent().getStorageId();
String newStorageID = Utils.getStorageClient(login).getClient().copyFile(true).from(oldStorageId).to(newPath);
log.info("copying storage Id {} to newPath {} and the id returned by storage is {}", oldStorageId, newPath, newStorageID);
((AbstractFileItem) item).getContent().setStorageId(newStorageID);
((AbstractFileItem) item).getContent().setRemotePath(newPath);
String newStorageID = storageBackend.copy((AbstractFileItem) item);
((AbstractFileItem) item).getContent().setStorageId(newStorageID);
item2Node.replaceContent(newNode, (AbstractFileItem) item, ItemAction.CLONED);
}

@ -0,0 +1,76 @@
package org.gcube.data.access.storagehub.storage.backend.impl;
import java.io.InputStream;
import javax.inject.Singleton;
import org.gcube.common.authorization.library.provider.AuthorizationProvider;
import org.gcube.common.storagehub.model.storages.MetaInfo;
import org.gcube.common.storagehub.model.storages.StorageBackend;
import org.gcube.contentmanagement.blobstorage.service.IClient;
import org.gcube.contentmanager.storageclient.wrapper.AccessType;
import org.gcube.contentmanager.storageclient.wrapper.MemoryType;
import org.gcube.contentmanager.storageclient.wrapper.StorageClient;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@Singleton
public class GCubeStorageBackend implements StorageBackend {
private static final Logger log = LoggerFactory.getLogger(GCubeStorageBackend.class);
private final static String SERVICE_NAME = "home-library";
private final static String SERVICE_CLASS = "org.gcube.portlets.user";
@Override
public InputStream getContent(String id) {
return getStorageClient(AuthorizationProvider.instance.get().getClient().getId()).getClient().get().RFileAsInputStream(id);
}
@Override
public String getName() {
return GCubeStorageBackend.class.getName();
}
@Override
public String copy(String idToCopy, String path) {
String newStorageID = getStorageClient(AuthorizationProvider.instance.get().getClient().getId()).getClient().copyFile(true).from(idToCopy).to(path);
log.info("copying storage Id {} to newPath {} and the id returned by storage is {}", idToCopy, path, newStorageID);
return newStorageID;
}
@Override
public String move(String idToMove) {
return idToMove;
}
@Override
public MetaInfo upload(InputStream stream, String itemPath) {
log.debug("uploading file");
IClient storageClient = getStorageClient(AuthorizationProvider.instance.get().getClient().getId()).getClient();
String storageId =storageClient.put(true).LFile(stream).RFile(itemPath);
long size = storageClient.getSize().RFileById(storageId);
MetaInfo info = new MetaInfo();
info.setSize(size);
info.setStorageId(storageId);
info.setRemotePath(itemPath);
return info;
}
@Override
public void delete(String id) {
log.debug("deleting");
IClient storageClient = getStorageClient(AuthorizationProvider.instance.get().getClient().getId()).getClient();
storageClient.remove().RFileById(id);
}
private static StorageClient getStorageClient(String login){
return new StorageClient(SERVICE_CLASS, SERVICE_NAME, login, AccessType.SHARED, MemoryType.PERSISTENT);
}
}

@ -25,7 +25,7 @@ no. 654119), SoBigData (grant no. 654024), AGINFRA PLUS (grant no. 731001).
Version
--------------------------------------------------
1.0.5-SNAPSHOT (2019-06-27)
1.0.7-SNAPSHOT (2019-07-03)
Please see the file named "changelog.xml" in this directory for the release notes.

@ -1,7 +1,7 @@
<application mode='online'>
<name>StorageHub</name>
<group>DataAccess</group>
<version>1.0.5-SNAPSHOT</version>
<version>1.0.7-SNAPSHOT</version>
<description>Storage Hub webapp</description>
<!-- <proxy protocol="https">

@ -10,11 +10,11 @@
<Packages>
<Software>
<Name>storagehub</Name>
<Version>1.0.5-SNAPSHOT</Version>
<Version>1.0.7-SNAPSHOT</Version>
<MavenCoordinates>
<groupId>org.gcube.data.access</groupId>
<artifactId>storagehub</artifactId>
<version>1.0.5-SNAPSHOT</version>
<version>1.0.7-SNAPSHOT</version>
</MavenCoordinates>
<Files>
<File>storagehub.jar</File>

Loading…
Cancel
Save