package org.gcube.data.access.storagehub.handlers; import java.io.BufferedInputStream; import java.io.InputStream; import java.util.Deque; import java.util.LinkedList; import java.util.List; import java.util.zip.ZipEntry; import java.util.zip.ZipOutputStream; import javax.inject.Inject; import javax.jcr.Node; import javax.jcr.RepositoryException; import javax.jcr.Session; import org.gcube.common.storagehub.model.Excludes; import org.gcube.common.storagehub.model.Paths; import org.gcube.common.storagehub.model.exceptions.BackendGenericError; import org.gcube.common.storagehub.model.items.AbstractFileItem; import org.gcube.common.storagehub.model.items.FolderItem; import org.gcube.common.storagehub.model.items.Item; import org.gcube.common.storagehub.model.plugins.FolderManager; import org.gcube.data.access.storagehub.Utils; import org.gcube.data.access.storagehub.accounting.AccountingHandler; import org.gcube.data.access.storagehub.handlers.plugins.FolderPluginHandler; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class CompressHandler { private Logger logger = LoggerFactory.getLogger(CompressHandler.class); @Inject FolderPluginHandler pluginHandler; public Deque getAllNodesForZip(FolderItem directory, Session session, String login, AccountingHandler accountingHandler, List excludes) throws RepositoryException, BackendGenericError{ Deque queue = new LinkedList(); Node currentNode = session.getNodeByIdentifier(directory.getId()); queue.push(directory); Deque tempQueue = new LinkedList(); logger.trace("adding directory {}",currentNode.getPath()); for (Item item : Utils.getItemList(currentNode,Excludes.GET_ONLY_CONTENT, null, false, null)){ if (excludes.contains(item.getId())) continue; if (item instanceof FolderItem) tempQueue.addAll(getAllNodesForZip((FolderItem) item, session, login, accountingHandler, excludes)); else if (item instanceof AbstractFileItem){ logger.trace("adding file {}",item.getPath()); AbstractFileItem fileItem = (AbstractFileItem) item; accountingHandler.createReadObj(fileItem.getTitle(), session, session.getNodeByIdentifier(item.getId()), login, false); queue.addLast(item); } } queue.addAll(tempQueue); return queue; } public void zipNode(ZipOutputStream zos, Deque queue, String login, org.gcube.common.storagehub.model.Path originalPath) throws Exception{ logger.trace("originalPath is {}",originalPath.toPath()); org.gcube.common.storagehub.model.Path actualPath = Paths.getPath(""); while (!queue.isEmpty()) { Item item = queue.pop(); if (item instanceof FolderItem) { actualPath = Paths.getPath(item.getPath()); logger.trace("actualPath is {}",actualPath.toPath()); String name = Paths.remove(actualPath, originalPath).toPath().replaceFirst("/", ""); logger.trace("writing dir {}",name); if (name.isEmpty()) continue; try { zos.putNextEntry(new ZipEntry(name)); }finally { zos.closeEntry(); } } else if (item instanceof AbstractFileItem){ try { AbstractFileItem fileItem = (AbstractFileItem)item; FolderManager manager = pluginHandler.getFolderManager(fileItem); InputStream streamToWrite = manager.getStorageBackend().download(fileItem.getContent()); if (streamToWrite == null){ logger.warn("discarding item {} ",item.getName()); continue; } try(BufferedInputStream is = new BufferedInputStream(streamToWrite)){ String name = (Paths.remove(actualPath, originalPath).toPath()+item.getName()).replaceFirst("/", ""); logger.trace("writing file {}",name); zos.putNextEntry(new ZipEntry(name)); Utils.copyStream(is, zos); }catch (Exception e) { logger.warn("error writing item {}", item.getName(),e); } finally{ zos.closeEntry(); } zos.flush(); }catch (Throwable e) { logger.warn("error reading content for item {}", item.getPath(),e); } } } zos.close(); } }