commit f50cc7582dcfb562879ecc91f4d8e167bd80ef9f Author: Lucio Lelii Date: Thu May 17 10:51:56 2018 +0000 merge for relese 4.11.1 git-svn-id: https://svn.d4science-ii.research-infrastructures.eu/gcube/branches/data-access/storagehub-webapp/1.0@167555 82a268e6-3cf1-43bd-a215-b396298e98cf diff --git a/.classpath b/.classpath new file mode 100644 index 0000000..ace8266 --- /dev/null +++ b/.classpath @@ -0,0 +1,36 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/.project b/.project new file mode 100644 index 0000000..cf3d13f --- /dev/null +++ b/.project @@ -0,0 +1,23 @@ + + + storagehub-webapp + + + + + + org.eclipse.jdt.core.javabuilder + + + + + org.eclipse.m2e.core.maven2Builder + + + + + + org.eclipse.jdt.core.javanature + org.eclipse.m2e.core.maven2Nature + + diff --git a/.settings/org.eclipse.core.resources.prefs b/.settings/org.eclipse.core.resources.prefs new file mode 100644 index 0000000..29abf99 --- /dev/null +++ b/.settings/org.eclipse.core.resources.prefs @@ -0,0 +1,6 @@ +eclipse.preferences.version=1 +encoding//src/main/java=UTF-8 +encoding//src/main/resources=UTF-8 +encoding//src/test/java=UTF-8 +encoding//src/test/resources=UTF-8 +encoding/=UTF-8 diff --git a/.settings/org.eclipse.jdt.core.prefs b/.settings/org.eclipse.jdt.core.prefs new file mode 100644 index 0000000..714351a --- /dev/null +++ b/.settings/org.eclipse.jdt.core.prefs @@ -0,0 +1,5 @@ +eclipse.preferences.version=1 +org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.8 +org.eclipse.jdt.core.compiler.compliance=1.8 +org.eclipse.jdt.core.compiler.problem.forbiddenReference=warning +org.eclipse.jdt.core.compiler.source=1.8 diff --git a/.settings/org.eclipse.m2e.core.prefs b/.settings/org.eclipse.m2e.core.prefs new file mode 100644 index 0000000..f897a7f --- /dev/null +++ b/.settings/org.eclipse.m2e.core.prefs @@ -0,0 +1,4 @@ +activeProfiles= +eclipse.preferences.version=1 +resolveWorkspaceProjects=true +version=1 diff --git a/distro/LICENSE b/distro/LICENSE new file mode 100644 index 0000000..3695e26 --- /dev/null +++ b/distro/LICENSE @@ -0,0 +1 @@ +${gcube.license} diff --git a/distro/README b/distro/README new file mode 100644 index 0000000..fc997a3 --- /dev/null +++ b/distro/README @@ -0,0 +1,66 @@ +The gCube System - ${name} +-------------------------------------------------- + +${description} + + +${gcube.description} + +${gcube.funding} + + +Version +-------------------------------------------------- + +${version} (${buildDate}) + +Please see the file named "changelog.xml" in this directory for the release notes. + + +Authors +-------------------------------------------------- + + +* Lucio Lelii (lucio.lelii-AT-isti.cnr.it), CNR Pisa, + Istituto di Scienza e Tecnologie dell'Informazione "A. Faedo". + +Maintainers +----------- + + +* Lucio Lelii (lucio.lelii-AT-isti.cnr.it), CNR Pisa, + Istituto di Scienza e Tecnologie dell'Informazione "A. Faedo". + +Download information +-------------------------------------------------- + +Source code is available from SVN: + ${scm.url} + +Binaries can be downloaded from the gCube website: + ${gcube.website} + + +Installation +-------------------------------------------------- + +Installation documentation is available on-line in the gCube Wiki: + ${gcube.wikiRoot}/Home_Library_2.0_API_Framework_Specification + +Documentation +-------------------------------------------------- + +Documentation is available on-line in the gCube Wiki: + ${gcube.wikiRoot}/StorageHub_API_Framework_Specification + +Support +-------------------------------------------------- + +Bugs and support requests can be reported in the gCube issue tracking tool: + ${gcube.issueTracking} + + +Licensing +-------------------------------------------------- + +This software is licensed under the terms you may find in the file named "LICENSE" in this directory. \ No newline at end of file diff --git a/distro/changelog.xml b/distro/changelog.xml new file mode 100644 index 0000000..fa005fa --- /dev/null +++ b/distro/changelog.xml @@ -0,0 +1,6 @@ + + + First commit + + \ No newline at end of file diff --git a/distro/descriptor.xml b/distro/descriptor.xml new file mode 100644 index 0000000..7440649 --- /dev/null +++ b/distro/descriptor.xml @@ -0,0 +1,32 @@ + + servicearchive + + tar.gz + + / + + + ${distroDirectory} + / + true + + README + LICENSE + changelog.xml + profile.xml + + 755 + true + + + + + target/${build.finalName}.war + /${artifactId} + + + + \ No newline at end of file diff --git a/distro/profile.xml b/distro/profile.xml new file mode 100644 index 0000000..a163484 --- /dev/null +++ b/distro/profile.xml @@ -0,0 +1,25 @@ + + + + Service + + Storage Hub Webapp + DataAccess + ${artifactId} + 1.0.0 + + + ${artifactId} + ${version} + + ${groupId} + ${artifactId} + ${version} + + + ${build.finalName}.jar + + + + + \ No newline at end of file diff --git a/pom.xml b/pom.xml new file mode 100644 index 0000000..a71acf9 --- /dev/null +++ b/pom.xml @@ -0,0 +1,332 @@ + + + + maven-parent + org.gcube.tools + 1.0.0 + + + + 4.0.0 + org.gcube.data.access + storagehub + 1.0.0-SNAPSHOT + storagehub + + war + + + 2.14.0 + 7.0.40 + 6.1.26 + 1.7 + 1.6.6 + 1.7.4 + 1.0.12 + + ${project.basedir}/distro + REST web service for Jackrabbit + storagehub + UTF-8 + UTF-8 + + + + + + + org.gcube.distribution + maven-smartgears-bom + LATEST + pom + import + + + + + + + + org.gcube.common + common-authorization + + + + org.gcube.core + common-encryption + + + + org.gcube.core + common-scope-maps + + + + org.gcube.core + common-scope + + + + org.gcube.common + storagehub-model + 1.0.0-SNAPSHOT + + + + com.itextpdf + itextpdf + 5.5.6 + + + + gov.nih.imagej + imagej + 1.47 + + + + org.slf4j + slf4j-api + + + + + + javax.jcr + jcr + 2.0 + + + org.apache.jackrabbit + jackrabbit-api + ${jackrabbit.version} + + + org.apache.jackrabbit + jackrabbit-core + ${jackrabbit.version} + + + org.apache.jackrabbit + jackrabbit-jcr-server + ${jackrabbit.version} + + + + + javax.ws.rs + javax.ws.rs-api + 2.0 + + + org.glassfish.jersey.containers + jersey-container-servlet + 2.13 + + + org.glassfish.jersey.containers.glassfish + jersey-gf-cdi + 2.13 + + + + javax.transaction + javax.transaction-api + 1.2 + + + javax.servlet + javax.servlet-api + 3.0.1 + provided + + + + + javax.enterprise + cdi-api + 1.1 + + + org.jboss.weld.servlet + weld-servlet + 2.2.10.Final + + + + org.jboss + jandex + 1.2.2.Final + + + + com.fasterxml.jackson.jaxrs + jackson-jaxrs-json-provider + 2.3.0 + + + + org.glassfish.jersey.media + jersey-media-json-jackson + 2.13 + + + + org.glassfish.jersey.media + jersey-media-multipart + 2.13 + + + + postgresql + postgresql + 9.1-901.jdbc4 + runtime + + + + + org.gcube.contentmanagement + storage-manager-core + [2.0.0-SNAPSHOT,3.0.0-SNAPSHOT) + + + + org.gcube.contentmanagement + storage-manager-wrapper + [2.0.0-SNAPSHOT,3.0.0-SNAPSHOT) + + + + org.reflections + reflections + 0.9.10 + + + + com.google.guava + guava + 16.0 + + + + + org.glassfish.jersey.test-framework.providers + jersey-test-framework-provider-simple + 2.13 + test + + + + org.apache.derby + derby + 10.8.2.2 + test + + + org.apache.derby + derbyclient + 10.8.2.2 + test + + + + com.googlecode.jeeunit + jeeunit + 1.0.0 + test + + + + com.google.code.gson + gson + 2.7 + + + + ch.qos.logback + logback-classic + 1.0.13 + test + + + + junit + junit + 4.10 + test + + + + + org.mockito + mockito-all + 1.9.5 + test + + + + + org.slf4j + jul-to-slf4j + ${slf4j.version} + + + + org.jboss.weld.se + weld-se + 2.2.10.Final + test + + + junit + junit + 4.11 + test + + + + + + + ${artifactId} + + + maven-compiler-plugin + 2.3.2 + + 1.8 + 1.8 + + + + org.apache.maven.plugins + maven-war-plugin + 2.4 + + false + + + + org.apache.maven.plugins + maven-assembly-plugin + + + ${distroDirectory}/descriptor.xml + + + + + servicearchive + install + + single + + + + + + + + \ No newline at end of file diff --git a/src/main/java/org/gcube/data/access/storagehub/AuthorizationChecker.java b/src/main/java/org/gcube/data/access/storagehub/AuthorizationChecker.java new file mode 100644 index 0000000..49d978f --- /dev/null +++ b/src/main/java/org/gcube/data/access/storagehub/AuthorizationChecker.java @@ -0,0 +1,38 @@ +package org.gcube.data.access.storagehub; + +import java.util.Arrays; + +import javax.inject.Singleton; +import javax.jcr.Node; +import javax.jcr.Session; + +import org.gcube.common.authorization.library.provider.AuthorizationProvider; +import org.gcube.common.storagehub.model.items.Item; +import org.gcube.common.storagehub.model.items.SharedFolder; +import org.gcube.data.access.storagehub.handlers.ItemHandler; + +@Singleton +public class AuthorizationChecker { + + public void checkReadAuthorizationControl(Session session, String id) throws Exception{ + Node node = session.getNodeByIdentifier(id); + + Item item = ItemHandler.getItem(node, Arrays.asList("hl:accounting","jcr:content")); + + if (item.isShared()) { + SharedFolder parentShared = retrieveSharedFolderParent(item, session); + if (!parentShared.getUsers().getValue().containsKey(AuthorizationProvider.instance.get().getClient().getId())) + throw new IllegalAccessException("Insufficent Provileges to read node with id "+id); + } else if (!node.getProperty("hl:portalLogin").getString().equals(AuthorizationProvider.instance.get().getClient().getId())) + throw new IllegalAccessException("Insufficent Provileges to read node with id "+id); + + } + + private SharedFolder retrieveSharedFolderParent(Item item, Session session) throws Exception{ + if (item instanceof SharedFolder) return (SharedFolder)item; + else + return retrieveSharedFolderParent(ItemHandler.getItem(session.getNodeByIdentifier(item.getParentId()), Arrays.asList("hl:accounting","jcr:content")), session); + + } + +} diff --git a/src/main/java/org/gcube/data/access/storagehub/Constants.java b/src/main/java/org/gcube/data/access/storagehub/Constants.java new file mode 100644 index 0000000..e67f334 --- /dev/null +++ b/src/main/java/org/gcube/data/access/storagehub/Constants.java @@ -0,0 +1,15 @@ +package org.gcube.data.access.storagehub; + +public class Constants { + + public static final String VRE_FOLDER_PARENT_NAME = "MySpecialFolders"; + + public static final String TRASH_ROOT_FOLDER_NAME ="Trash"; + + public static final String QUERY_LANGUAGE ="JCR-SQL2"; + + public static final String ADMIN_PARAM_NAME ="admin-username"; + + public static final String ADMIN_PARAM_PWD ="admin-pwd"; + +} diff --git a/src/main/java/org/gcube/data/access/storagehub/MetaInfo.java b/src/main/java/org/gcube/data/access/storagehub/MetaInfo.java new file mode 100644 index 0000000..9efa332 --- /dev/null +++ b/src/main/java/org/gcube/data/access/storagehub/MetaInfo.java @@ -0,0 +1,13 @@ +package org.gcube.data.access.storagehub; + +import lombok.Data; + +@Data +public class MetaInfo { + + long size; + + String storageId; + + +} diff --git a/src/main/java/org/gcube/data/access/storagehub/MultipleOutputStream.java b/src/main/java/org/gcube/data/access/storagehub/MultipleOutputStream.java new file mode 100644 index 0000000..de60e49 --- /dev/null +++ b/src/main/java/org/gcube/data/access/storagehub/MultipleOutputStream.java @@ -0,0 +1,141 @@ +package org.gcube.data.access.storagehub; + +import java.io.BufferedInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.PipedInputStream; +import java.io.PipedOutputStream; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class MultipleOutputStream { + + private Logger logger = LoggerFactory.getLogger(MultipleOutputStream.class); + + private MyPipedInputStream[] pipedInStreams; + + private InputStream is; + + private MyPipedOututStream[] pipedOutStreams; + + private int index=0; + + public MultipleOutputStream(InputStream is, int number) throws IOException{ + this.is = is; + + + logger.debug("requested {} piped streams ",number); + + pipedInStreams = new MyPipedInputStream[number]; + pipedOutStreams = new MyPipedOututStream[number]; + + for (int i =0; i=pipedInStreams.length) return null; + return pipedInStreams[index++]; + } + + + public class MyPipedOututStream extends PipedOutputStream{ + + boolean close = false; + + @Override + public void close() throws IOException { + this.close = true; + super.close(); + } + + /** + * @return the close + */ + public boolean isClosed() { + return close; + } + + @Override + public void write(byte[] b, int off, int len) throws IOException { + try{ + super.write(b, off, len); + }catch(IOException io){ + this.close = true; + } + } + + + + + } + + public class MyPipedInputStream extends PipedInputStream{ + + boolean close = false; + + public MyPipedInputStream(PipedOutputStream src) throws IOException { + super(src); + } + + @Override + public void close() throws IOException { + this.close = true; + logger.debug(Thread.currentThread().getName()+" close MyPipedInputStream"); + super.close(); + } + + /** + * @return the close + */ + public boolean isClosed() { + return close; + } + + } +} diff --git a/src/main/java/org/gcube/data/access/storagehub/Range.java b/src/main/java/org/gcube/data/access/storagehub/Range.java new file mode 100644 index 0000000..ec6099e --- /dev/null +++ b/src/main/java/org/gcube/data/access/storagehub/Range.java @@ -0,0 +1,29 @@ +package org.gcube.data.access.storagehub; + +public class Range { + + private int start; + private int limit; + + public Range(int start, int limit) { + super(); + this.start = start; + this.limit = limit; + } + + public int getStart() { + return start; + } + + public int getLimit() { + return limit; + } + + @Override + public String toString() { + return "Range [start=" + start + ", limit=" + limit + "]"; + } + + + +} diff --git a/src/main/java/org/gcube/data/access/storagehub/RepositoryInitializerImpl.java b/src/main/java/org/gcube/data/access/storagehub/RepositoryInitializerImpl.java new file mode 100644 index 0000000..49b988a --- /dev/null +++ b/src/main/java/org/gcube/data/access/storagehub/RepositoryInitializerImpl.java @@ -0,0 +1,31 @@ +package org.gcube.data.access.storagehub; + +import javax.inject.Singleton; +import javax.jcr.Repository; +import javax.naming.Context; +import javax.naming.InitialContext; + +import org.gcube.data.access.storagehub.services.RepositoryInitializer; + +@Singleton +public class RepositoryInitializerImpl implements RepositoryInitializer{ + + private Repository repository; + + @Override + public Repository getRepository(){ + return repository; + } + + public RepositoryInitializerImpl() throws Exception{ + InitialContext context = new InitialContext(); + Context environment = (Context) context.lookup("java:comp/env"); + repository = (Repository) environment.lookup("jcr/repository"); + } + + + + + + +} diff --git a/src/main/java/org/gcube/data/access/storagehub/SingleFileStreamingOutput.java b/src/main/java/org/gcube/data/access/storagehub/SingleFileStreamingOutput.java new file mode 100644 index 0000000..25bcf31 --- /dev/null +++ b/src/main/java/org/gcube/data/access/storagehub/SingleFileStreamingOutput.java @@ -0,0 +1,46 @@ +package org.gcube.data.access.storagehub; + +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; + +import javax.ws.rs.WebApplicationException; +import javax.ws.rs.core.StreamingOutput; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class SingleFileStreamingOutput implements StreamingOutput { + + private static final Logger log = LoggerFactory.getLogger(SingleFileStreamingOutput.class); + + + InputStream streamToWrite; + + public SingleFileStreamingOutput(InputStream streamToWrite) { + super(); + this.streamToWrite = streamToWrite; + } + + /** + * Overriding the write method to write request data directly to Jersey outputStream . + * @param outputStream + * @throws IOException + * @throws WebApplicationException + */ + @Override + public void write(OutputStream outputStream) throws IOException, WebApplicationException { + log.debug("writing StreamOutput"); + copy(streamToWrite, outputStream); + log.debug("StreamOutput written"); + } + + private void copy(InputStream in, OutputStream out) throws IOException { + byte[] buffer = new byte[2048]; + int readcount = 0; + while ((readcount=in.read(buffer))!=-1) { + out.write(buffer, 0, readcount); + } + } + +} \ No newline at end of file diff --git a/src/main/java/org/gcube/data/access/storagehub/StorageFactory.java b/src/main/java/org/gcube/data/access/storagehub/StorageFactory.java new file mode 100644 index 0000000..fe2f022 --- /dev/null +++ b/src/main/java/org/gcube/data/access/storagehub/StorageFactory.java @@ -0,0 +1,36 @@ +package org.gcube.data.access.storagehub; + +import java.util.Map; +import java.util.WeakHashMap; + +import org.gcube.common.authorization.library.provider.AuthorizationProvider; +import org.gcube.contentmanagement.blobstorage.service.IClient; +import org.gcube.contentmanager.storageclient.model.protocol.smp.Handler; +import org.gcube.contentmanager.storageclient.wrapper.AccessType; +import org.gcube.contentmanager.storageclient.wrapper.MemoryType; +import org.gcube.contentmanager.storageclient.wrapper.StorageClient; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class StorageFactory { + + public final static String SERVICE_NAME = "home-library"; + public final static String SERVICE_CLASS = "org.gcube.portlets.user"; + + private static Map clientUserMap = new WeakHashMap(); + + private static Logger log = LoggerFactory.getLogger(StorageFactory.class); + + public static IClient getGcubeStorage(){ + String login = AuthorizationProvider.instance.get().getClient().getId(); + if (!clientUserMap.containsKey(login)){ + IClient storage = new StorageClient(SERVICE_CLASS, SERVICE_NAME, + login, AccessType.SHARED, MemoryType.PERSISTENT).getClient(); + log.info("******* Storage activateProtocol for Storage **********"); + Handler.activateProtocol(); + clientUserMap.put(login, storage); + return storage; + } else return clientUserMap.get(login); + } + +} diff --git a/src/main/java/org/gcube/data/access/storagehub/StorageHub.java b/src/main/java/org/gcube/data/access/storagehub/StorageHub.java new file mode 100644 index 0000000..dad2906 --- /dev/null +++ b/src/main/java/org/gcube/data/access/storagehub/StorageHub.java @@ -0,0 +1,28 @@ +package org.gcube.data.access.storagehub; + +import java.util.HashSet; +import java.util.Set; + +import javax.ws.rs.Path; +import javax.ws.rs.core.Application; + +import org.gcube.data.access.storagehub.services.ItemsCreator; +import org.gcube.data.access.storagehub.services.ItemsManager; +import org.gcube.data.access.storagehub.services.WorkspaceManager; +import org.glassfish.jersey.media.multipart.MultiPartFeature; + +@Path("workspace") +public class StorageHub extends Application { + + @Override + public Set> getClasses() { + final Set> classes = new HashSet>(); + // register resources and features + classes.add(MultiPartFeature.class); + classes.add(WorkspaceManager.class); + classes.add(ItemsManager.class); + classes.add(ItemsCreator.class); + return classes; + } + +} diff --git a/src/main/java/org/gcube/data/access/storagehub/Utils.java b/src/main/java/org/gcube/data/access/storagehub/Utils.java new file mode 100644 index 0000000..0ffa312 --- /dev/null +++ b/src/main/java/org/gcube/data/access/storagehub/Utils.java @@ -0,0 +1,183 @@ +package org.gcube.data.access.storagehub; + +import java.io.BufferedInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.security.MessageDigest; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Deque; +import java.util.LinkedList; +import java.util.List; +import java.util.zip.ZipEntry; +import java.util.zip.ZipOutputStream; + +import javax.jcr.Node; +import javax.jcr.NodeIterator; +import javax.jcr.Session; + +import org.gcube.common.authorization.library.provider.AuthorizationProvider; +import org.gcube.common.storagehub.model.Paths; +import org.gcube.common.storagehub.model.items.AbstractFileItem; +import org.gcube.common.storagehub.model.items.FolderItem; +import org.gcube.common.storagehub.model.items.Item; +import org.gcube.common.storagehub.model.types.NodeProperty; +import org.gcube.contentmanager.storageclient.wrapper.AccessType; +import org.gcube.contentmanager.storageclient.wrapper.MemoryType; +import org.gcube.contentmanager.storageclient.wrapper.StorageClient; +import org.gcube.data.access.storagehub.accounting.AccountingHandler; +import org.gcube.data.access.storagehub.handlers.ItemHandler; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class Utils { + + public final static String SERVICE_NAME = "home-library"; + public final static String SERVICE_CLASS = "org.gcube.portlets.user"; + private static final String FOLDERS_TYPE = "nthl:workspaceItem"; + + + private static final List FOLDERS_TO_EXLUDE = Arrays.asList(Constants.VRE_FOLDER_PARENT_NAME, Constants.TRASH_ROOT_FOLDER_NAME); + + private static final Logger logger = LoggerFactory.getLogger(Utils.class); + + public static String getSecurePassword(String user) throws Exception { + String digest = null; + try { + MessageDigest md = MessageDigest.getInstance("MD5"); + byte[] hash = md.digest(user.getBytes("UTF-8")); + + //converting byte array to Hexadecimal String + StringBuilder sb = new StringBuilder(2*hash.length); + for(byte b : hash){ + sb.append(String.format("%02x", b&0xff)); + } + digest = sb.toString(); + + } catch (Exception e) { + logger.error("error getting secure password",e); + } + return digest; + } + + public static long getItemCount(Node parent, boolean showHidden) throws Exception{ + NodeIterator iterator = parent.getNodes(); + long count=0; + while (iterator.hasNext()){ + Node current = iterator.nextNode(); + + if (isToExclude(current, showHidden)) + continue; + + count++; + } + return count; + } + + + + + + public static List getItemList(Node parent, List excludes, Range range, boolean showHidden) throws Exception{ + + List returnList = new ArrayList(); + long start = System.currentTimeMillis(); + NodeIterator iterator = parent.getNodes(); + logger.trace("time to get iterator {}",(System.currentTimeMillis()-start)); + int count =0; + logger.trace("selected range is {}", range); + while (iterator.hasNext()){ + Node current = iterator.nextNode(); + + if (isToExclude(current, showHidden)) + continue; + + if (range==null || (count>=range.getStart() && returnList.size() getAllNodesForZip(FolderItem directory, Session session, AccountingHandler accountingHandler) throws Exception{ + Deque queue = new LinkedList(); + Node currentNode = session.getNodeByIdentifier(directory.getId()); + queue.push(directory); + Deque tempQueue = new LinkedList(); + logger.debug("adding directory {}",directory.getPath()); + for (Item item : Utils.getItemList(currentNode,null, null, false)){ + if (item instanceof FolderItem) + tempQueue.addAll(getAllNodesForZip((FolderItem) item, session, accountingHandler)); + else if (item instanceof AbstractFileItem){ + logger.debug("adding file {}",item.getPath()); + AbstractFileItem fileItem = (AbstractFileItem) item; + accountingHandler.createReadObj(fileItem.getTitle(), session, session.getNodeByIdentifier(item.getId()), false); + queue.addLast(item); + } + } + queue.addAll(tempQueue); + return queue; + } + + + public static void zipNode(ZipOutputStream zos, Deque queue, String login, org.gcube.common.storagehub.model.Path originalPath) throws Exception{ + logger.trace("originalPath is {}",originalPath.toPath()); + org.gcube.common.storagehub.model.Path actualPath = Paths.getPath(""); + while (!queue.isEmpty()) { + Item item = queue.pop(); + if (item instanceof FolderItem) { + actualPath = Paths.getPath(item.getPath()); + logger.trace("actualPath is {}",actualPath.toPath()); + String name = Paths.remove(actualPath, originalPath).toPath().replaceFirst("/", ""); + logger.trace("writing dir {}",name); + zos.putNextEntry(new ZipEntry(name)); + zos.closeEntry(); + } else if (item instanceof AbstractFileItem){ + InputStream streamToWrite = Utils.getStorageClient(login).getClient().get().RFileAsInputStream(((AbstractFileItem)item).getContent().getStorageId()); + if (streamToWrite == null){ + logger.warn("discarding item {} ",item.getName()); + continue; + } + try(BufferedInputStream is = new BufferedInputStream(streamToWrite)){ + String name = Paths.remove(actualPath, originalPath).toPath()+item.getName().replaceFirst("/", ""); + logger.trace("writing file {}",name); + zos.putNextEntry(new ZipEntry(name)); + copyStream(is, zos); + + }catch (Exception e) { + logger.warn("error writing item {}", item.getName(),e); + } finally{ + zos.closeEntry(); + } + } + } + + } + + private static void copyStream(InputStream in, OutputStream out) throws IOException { + byte[] buffer = new byte[2048]; + int readcount = 0; + while ((readcount=in.read(buffer))!=-1) { + out.write(buffer, 0, readcount); + } + } + +} diff --git a/src/main/java/org/gcube/data/access/storagehub/accounting/AccountingFields.java b/src/main/java/org/gcube/data/access/storagehub/accounting/AccountingFields.java new file mode 100644 index 0000000..5c63ab0 --- /dev/null +++ b/src/main/java/org/gcube/data/access/storagehub/accounting/AccountingFields.java @@ -0,0 +1,71 @@ +package org.gcube.data.access.storagehub.accounting; + +public enum AccountingFields { + + USER{ + @Override + public String toString() { + return "hl:user"; + } + }, + DATE{ + @Override + public String toString() { + return "hl:date"; + } + }, + ITEM_NAME{ + @Override + public String toString() { + return "hl:itemName"; + } + }, + FROM_PATH{ + @Override + public String toString() { + return "hl:fromPath"; + } + }, + OLD_ITEM_NAME{ + @Override + public String toString() { + return "hl:oldItemName"; + } + }, + NEW_ITEM_NAME{ + @Override + public String toString() { + return "hl:newItemName"; + } + }, + MEMBERS{ + @Override + public String toString() { + return "hl:members"; + } + }, + FOLDER_ITEM_TYPE{ + @Override + public String toString() { + return "hl:folderItemType"; + } + }, + MIME_TYPE{ + @Override + public String toString() { + return "hl:mimeType"; + } + }, + ITEM_TYPE{ + @Override + public String toString() { + return "hl:itemType"; + } + }, + VERSION{ + @Override + public String toString() { + return "hl:version"; + } + }, +} \ No newline at end of file diff --git a/src/main/java/org/gcube/data/access/storagehub/accounting/AccountingHandler.java b/src/main/java/org/gcube/data/access/storagehub/accounting/AccountingHandler.java new file mode 100644 index 0000000..521e7ab --- /dev/null +++ b/src/main/java/org/gcube/data/access/storagehub/accounting/AccountingHandler.java @@ -0,0 +1,60 @@ +package org.gcube.data.access.storagehub.accounting; + +import java.util.Calendar; +import java.util.UUID; + +import javax.inject.Singleton; +import javax.jcr.Node; +import javax.jcr.RepositoryException; +import javax.jcr.Session; +import javax.jcr.UnsupportedRepositoryOperationException; +import javax.jcr.version.Version; +import javax.jcr.version.VersionManager; + +import org.gcube.common.authorization.library.provider.AuthorizationProvider; +import org.gcube.common.storagehub.model.items.nodes.accounting.AccountingEntryType; +import org.gcube.common.storagehub.model.types.NodeProperty; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +@Singleton +public class AccountingHandler { + + + /*@Attribute("hl:user") + @Attribute("hl:date") + @Attribute("hl:version")*/ + + private static final Logger logger = LoggerFactory.getLogger(AccountingHandler.class); + + public void createReadObj(String title, Session ses, Node node, boolean saveHistory ) { + try { + Node directoryNode = node.getParent(); + + if (!directoryNode.hasNode(NodeProperty.ACCOUNTING.toString())){ + directoryNode.addNode(NodeProperty.ACCOUNTING.toString(), NodeProperty.NT_ACCOUNTING.toString()); + + } + + Node accountingNodeParent = directoryNode.getNode(NodeProperty.ACCOUNTING.toString()); + Node accountingNode = accountingNodeParent.addNode(UUID.randomUUID().toString(),AccountingEntryType.READ.getNodeTypeDefinition()); + accountingNode.setProperty("hl:user", AuthorizationProvider.instance.get().getClient().getId()); + accountingNode.setProperty("hl:date", Calendar.getInstance()); + accountingNode.setProperty("hl:itemName", title); + + try { + VersionManager vManager = ses.getWorkspace().getVersionManager(); + //VersionManager versionManager = session.getWorkspace().getVersionManager(); + //Version version = versionManager.checkin(node.getPath()); + Version version = vManager.getBaseVersion(node.getNode("jcr:content").getPath()); + accountingNode.setProperty("hl:version", version.getName()); + }catch(UnsupportedRepositoryOperationException uropex) { + logger.warn("version cannot be retrieved", uropex); + } + if (saveHistory) ses.save(); + } catch (RepositoryException e) { + logger.warn("error trying to retrieve accountign node",e); + } + } + +} diff --git a/src/main/java/org/gcube/data/access/storagehub/handlers/ClassHandler.java b/src/main/java/org/gcube/data/access/storagehub/handlers/ClassHandler.java new file mode 100644 index 0000000..06264e8 --- /dev/null +++ b/src/main/java/org/gcube/data/access/storagehub/handlers/ClassHandler.java @@ -0,0 +1,47 @@ +package org.gcube.data.access.storagehub.handlers; + +import java.util.HashMap; +import java.util.Map; +import java.util.Set; + +import org.gcube.common.storagehub.model.annotations.RootNode; +import org.gcube.common.storagehub.model.items.Item; +import org.reflections.Reflections; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class ClassHandler { + + private static Logger log = LoggerFactory.getLogger(ClassHandler.class); + + private Reflections reflection = new Reflections(); + + private Map> classMap = new HashMap>(); + private Map, String> typeMap = new HashMap, String>(); + + + public ClassHandler() { + + Set> classesAnnotated = reflection.getTypesAnnotatedWith(RootNode.class); + for (Class clazz: classesAnnotated ){ + if (Item.class.isAssignableFrom(clazz)) + for (String value: clazz.getAnnotation(RootNode.class).value()){ + log.debug("loading class {} with value {} ", clazz, value ); + classMap.put(value, (Class) clazz); + typeMap.put((Class) clazz, value); + } + } + } + + public Class get(String nodeType){ + if (classMap.containsKey(nodeType)) return classMap.get(nodeType); + throw new RuntimeException("mapping not found for nodetype "+ nodeType); + } + + public String getNodeType(Class clazz){ + if (typeMap.containsKey(clazz)) return typeMap.get(clazz); + throw new RuntimeException("mapping not found for nodetype "+ clazz.getSimpleName()); + } + + +} diff --git a/src/main/java/org/gcube/data/access/storagehub/handlers/ItemHandler.java b/src/main/java/org/gcube/data/access/storagehub/handlers/ItemHandler.java new file mode 100644 index 0000000..79d15ab --- /dev/null +++ b/src/main/java/org/gcube/data/access/storagehub/handlers/ItemHandler.java @@ -0,0 +1,398 @@ +package org.gcube.data.access.storagehub.handlers; + +import java.io.InputStream; +import java.lang.reflect.Field; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Calendar; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Set; + +import javax.jcr.Node; +import javax.jcr.NodeIterator; +import javax.jcr.PathNotFoundException; +import javax.jcr.Property; +import javax.jcr.PropertyIterator; +import javax.jcr.PropertyType; +import javax.jcr.RepositoryException; +import javax.jcr.Session; +import javax.jcr.Value; + +import org.apache.jackrabbit.util.Text; +import org.apache.jackrabbit.value.BinaryValue; +import org.apache.jackrabbit.value.BooleanValue; +import org.apache.jackrabbit.value.DateValue; +import org.apache.jackrabbit.value.LongValue; +import org.apache.jackrabbit.value.StringValue; +import org.gcube.common.storagehub.model.annotations.Attribute; +import org.gcube.common.storagehub.model.annotations.AttributeRootNode; +import org.gcube.common.storagehub.model.annotations.ListNodes; +import org.gcube.common.storagehub.model.annotations.MapAttribute; +import org.gcube.common.storagehub.model.annotations.NodeAttribute; +import org.gcube.common.storagehub.model.items.Item; +import org.gcube.common.storagehub.model.items.SharedFolder; +import org.gcube.common.storagehub.model.items.TrashItem; +import org.reflections.Configuration; +import org.reflections.Reflections; +import org.reflections.util.ConfigurationBuilder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class ItemHandler { + + private static final Logger logger = LoggerFactory.getLogger(ItemHandler.class); + + private static ClassHandler classHandler = new ClassHandler(); + + private static HashMap> typeToSubtypeMap = new HashMap<>(); + + public static T getItem(Node node, List excludes) throws Exception { + + @SuppressWarnings("unchecked") + Class classToHandle = (Class)classHandler.get(node.getPrimaryNodeType().getName()); + + T item = classToHandle.newInstance(); + item.setId(node.getIdentifier()); + item.setName(Text.unescapeIllegalJcrChars(node.getName())); + item.setPath(Text.unescapeIllegalJcrChars(node.getPath())); + item.setLocked(node.isLocked()); + item.setPrimaryType(node.getPrimaryNodeType().getName()); + Item parent = null ; + if (item instanceof SharedFolder) { + logger.debug("I'm in a Shared Folder"); + item.setShared(true); + }else { + try { + parent = ItemHandler.getItem(node.getParent(), Arrays.asList("hl:accounting","jcr:content")); + item.setShared(parent.isShared()); + } catch(Exception e) { + item.setShared(false); + } + } + + if (item instanceof TrashItem) + item.setTrashed(true); + else { + try { + if (parent==null) + parent = ItemHandler.getItem(node.getParent(), Arrays.asList("hl:accounting","jcr:content")); + item.setTrashed(parent.isTrashed()); + } catch(Exception e) { + item.setTrashed(false); + } + } + + try{ + item.setParentId(node.getParent().getIdentifier()); + item.setParentPath(node.getParent().getPath()); + }catch (Throwable e) { + logger.info("Root node doesn't have a parent"); + } + + for (Field field : retrieveAllFields(classToHandle)){ + if (field.isAnnotationPresent(Attribute.class)){ + Attribute attribute = field.getAnnotation(Attribute.class); + field.setAccessible(true); + try{ + Class returnType = field.getType(); + field.set(item, getPropertyValue(returnType, node.getProperty(attribute.value()))); + + }catch(PathNotFoundException e){ + logger.debug("the current node dosn't contain {} property",attribute.value()); + } catch (Exception e ) { + logger.warn("error setting value",e); + } + } else if (field.isAnnotationPresent(NodeAttribute.class)){ + String fieldNodeName = field.getAnnotation(NodeAttribute.class).value(); + //for now it excludes only first level node + if (excludes!=null && excludes.contains(fieldNodeName)) continue; + logger.debug("retrieving field node "+field.getName()); + field.setAccessible(true); + try{ + Node fieldNode = node.getNode(fieldNodeName); + logger.debug("looking in node {} searched with {}",fieldNode.getName(),fieldNodeName); + field.set(item, iterateNodeAttributeFields(field.getType(), fieldNode)); + }catch(PathNotFoundException e){ + logger.debug("the current node dosn't contain {} node",fieldNodeName); + } catch (Exception e ) { + logger.warn("error setting value",e); + } + + + } + } + return item; + } + + private static T iterateNodeAttributeFields(Class clazz, Node node) throws Exception{ + T obj = clazz.newInstance(); + for (Field field : retrieveAllFields(clazz)){ + if (field.isAnnotationPresent(Attribute.class)){ + Attribute attribute = field.getAnnotation(Attribute.class); + field.setAccessible(true); + try{ + @SuppressWarnings("rawtypes") + Class returnType = field.getType(); + field.set(obj, getPropertyValue(returnType, node.getProperty(attribute.value()))); + logger.debug("found field {} of type annotated as ListNodes in class {} on node {}", field.getName(), clazz.getName(), node.getName()); + }catch(PathNotFoundException e){ + logger.debug("the current node dosn't contain {} property",attribute.value()); + } catch (Exception e ) { + logger.warn("error setting value",e); + } + } else if (field.isAnnotationPresent(MapAttribute.class)){ + //logger.debug("found field {} of type annotated as MapAttribute in class {}", field.getName(), clazz.getName()); + field.setAccessible(true); + String exclude = field.getAnnotation(MapAttribute.class).excludeStartWith(); + Map mapToset = new HashMap(); + PropertyIterator iterator = node.getProperties(); + if (iterator!=null) { + while (iterator.hasNext()){ + Property prop = iterator.nextProperty(); + if (!exclude.isEmpty() && prop.getName().startsWith(exclude)) continue; + try{ + mapToset.put(prop.getName(), getPropertyValue(prop)); + }catch(PathNotFoundException e){ + logger.debug("the property [] is not mapped",prop.getName()); + } catch (Exception e ) { + logger.warn("error setting value",e); + } + } + } + field.set(obj, mapToset); + } else if (field.isAnnotationPresent(ListNodes.class)){ + logger.debug("found field {} of type annotated as ListNodes in class {} on node {}", field.getName(), clazz.getName(), node.getName()); + field.setAccessible(true); + String exclude = field.getAnnotation(ListNodes.class).excludeTypeStartWith(); + String include = field.getAnnotation(ListNodes.class).includeTypeStartWith(); + + Class listType = field.getAnnotation(ListNodes.class).listClass(); + + Map subTypesMap = Collections.emptyMap(); + + if (!typeToSubtypeMap.containsKey(listType)) { + + + Configuration config = new ConfigurationBuilder().forPackages(listType.getPackage().getName()); + Reflections reflections = new Reflections(config); + Set subTypes = reflections.getSubTypesOf(listType); + + + if (subTypes.size()>0) { + subTypesMap = new HashMap<>(); + for (Class subtype: subTypes) + if (subtype.isAnnotationPresent(AttributeRootNode.class)) { + AttributeRootNode attributeRootNode = (AttributeRootNode)subtype.getAnnotation(AttributeRootNode.class); + subTypesMap.put(attributeRootNode.value(), subtype); + } + } else logger.debug("no subtypes found for {}",listType.getName()); + + typeToSubtypeMap.put(listType, subTypesMap); + + } else { + logger.info("subtypes already found in cache"); + subTypesMap = typeToSubtypeMap.get(listType); + } + + List toSetList = new ArrayList<>(); + + NodeIterator iterator = node.getNodes(); + + while (iterator.hasNext()){ + Node currentNode = iterator.nextNode(); + + String primaryType = currentNode.getPrimaryNodeType().getName(); + + logger.debug("the current node {} has a list",currentNode.getName()); + + if (!include.isEmpty() && !primaryType.startsWith(include)) + continue; + if (!exclude.isEmpty() && primaryType.startsWith(exclude)) + continue; + if (subTypesMap.containsKey(primaryType)) + toSetList.add(iterateNodeAttributeFields(subTypesMap.get(primaryType), currentNode)); + else toSetList.add(iterateNodeAttributeFields(listType, currentNode)); + } + if (toSetList.size()!=0) field.set(obj, toSetList); + } + } + return obj; + } + + @SuppressWarnings({ "rawtypes", "unchecked" }) + private static Object getPropertyValue(Class returnType, Property prop) throws Exception{ + if (returnType.equals(String.class)) return prop.getString(); + if (returnType.isEnum()) return Enum.valueOf(returnType, prop.getString()); + if (returnType.equals(Calendar.class)) return prop.getDate(); + if (returnType.equals(Boolean.class) || returnType.equals(boolean.class)) return prop.getBoolean(); + if (returnType.equals(Long.class) || returnType.equals(long.class)) return prop.getLong(); + if (returnType.equals(Integer.class) || returnType.equals(int.class)) return prop.getLong(); + if (returnType.isArray()) { + if (prop.getType()==PropertyType.BINARY) { + byte[] bytes = new byte[32000]; + + try (InputStream stream = prop.getBinary().getStream()){ + stream.read(bytes); + } + return bytes; + } else { + Object[] ret= getArrayValue(prop); + return Arrays.copyOf(ret, ret.length, returnType); + } + } + throw new Exception(String.format("class %s not recognized",returnType.getName())); + } + + + + @SuppressWarnings({ "rawtypes" }) + private static Value getObjectValue(Class returnType, Object value) throws Exception{ + if (returnType.equals(String.class)) return new StringValue((String) value); + if (returnType.isEnum()) return new StringValue(((Enum) value).toString()); + if (returnType.equals(Calendar.class)) return new DateValue((Calendar) value); + if (returnType.equals(Boolean.class) || returnType.equals(boolean.class)) return new BooleanValue((Boolean) value); + if (returnType.equals(Long.class) || returnType.equals(long.class)) return new LongValue((Long) value); + if (returnType.equals(Integer.class) || returnType.equals(int.class)) return new LongValue((Long) value); + if (returnType.isArray()) { + if (returnType.getComponentType().equals(Byte.class) + || returnType.getComponentType().equals(byte.class)) + return new BinaryValue((byte[]) value); + } + throw new Exception(String.format("class %s not recognized",returnType.getName())); + } + + + private static Object[] getArrayValue(Property prop) throws Exception{ + Object[] values = new Object[prop.getValues().length]; + int i = 0; + for (Value value : prop.getValues()) + values[i++] = getSingleValue(value); + return values; + } + + + private static Object getPropertyValue(Property prop) throws Exception{ + if (prop.isMultiple()){ + Object[] values = new Object[prop.getValues().length]; + int i = 0; + for (Value value : prop.getValues()) + values[i++] = getSingleValue(value); + return values; + } else + return getSingleValue(prop.getValue()); + + } + + private static Object getSingleValue(Value value) throws Exception{ + switch (value.getType()) { + case PropertyType.DATE: + return value.getDate(); + case PropertyType.BOOLEAN: + return value.getBoolean(); + case PropertyType.LONG: + return value.getDate(); + default: + return value.getString(); + } + } + + private static Set retrieveAllFields(Class clazz){ + + Set fields = new HashSet(); + Class currentClass = clazz; + do{ + List fieldsFound = Arrays.asList(currentClass.getDeclaredFields()); + fields.addAll(fieldsFound); + }while ((currentClass =currentClass.getSuperclass())!=null); + return fields; + } + + public static Node createNodeFromItem(Session session, Node parentNode, T item){ + try { + + //TODO: must understand this place is for name or title + String primaryType= classHandler.getNodeType(item.getClass()); + Node newNode = parentNode.addNode(item.getTitle(), primaryType); + //newNode.setPrimaryType(primaryType); + for (Field field : retrieveAllFields(item.getClass())){ + if (field.isAnnotationPresent(Attribute.class)){ + Attribute attribute = field.getAnnotation(Attribute.class); + field.setAccessible(true); + try{ + //Class returnType = field.getType(); + newNode.setProperty(attribute.value(), getObjectValue(field.getType(), field.get(item))); + + } catch (Exception e ) { + logger.warn("error setting value for attribute "+attribute.value(),e); + } + } else if (field.isAnnotationPresent(NodeAttribute.class)){ + NodeAttribute nodeAttribute = field.getAnnotation(NodeAttribute.class); + if (nodeAttribute.isReadOnly()) continue; + String nodeName = nodeAttribute.value(); + logger.debug("retrieving field node "+field.getName()); + field.setAccessible(true); + try{ + iterateItemNodeAttributeFields(field.get(item), newNode, nodeName); + } catch (Exception e ) { + logger.warn("error setting value",e); + } + + + } + } + return newNode; + } catch (RepositoryException e) { + logger.error("error writing repository",e); + throw new RuntimeException(e); + } + + } + + private static void iterateItemNodeAttributeFields(Object object, Node parentNode, String nodeName) throws Exception{ + + AttributeRootNode attributeRootNode = object.getClass().getAnnotation(AttributeRootNode.class); + Node newNode = parentNode.addNode(nodeName, attributeRootNode.value()); + //newNode.setPrimaryType(attributeRootNode.value()); + for (Field field : retrieveAllFields(object.getClass())){ + if (field.isAnnotationPresent(Attribute.class)){ + Attribute attribute = field.getAnnotation(Attribute.class); + field.setAccessible(true); + try{ + @SuppressWarnings("rawtypes") + Class returnType = field.getType(); + newNode.setProperty(attribute.value(), getObjectValue(returnType, field.get(object))); + } catch (Exception e ) { + logger.warn("error setting value",e); + } + } else if (field.isAnnotationPresent(MapAttribute.class)){ + //logger.debug("found field {} of type annotated as MapAttribute in class {}", field.getName(), clazz.getName()); + field.setAccessible(true); + Map mapToset = (Map)field.get(object); + for (Entry entry : mapToset.entrySet()) + try{ + newNode.setProperty(entry.getKey(), getObjectValue(entry.getValue().getClass(), entry.getValue())); + } catch (Exception e ) { + logger.warn("error setting value",e); + } + + } else if (field.isAnnotationPresent(ListNodes.class)){ + logger.debug("found field {} of type annotated as ListNodes in class {} on node {}", field.getName(), object.getClass().getName(), newNode.getName()); + field.setAccessible(true); + List toSetList = (List) field.get(object); + + int i = 0; + for (Object obj: toSetList){ + + logger.debug("the current node {} has a list",newNode.getName()); + + iterateItemNodeAttributeFields(obj,newNode, field.getName()+(i++)); + } + } + } + } +} diff --git a/src/main/java/org/gcube/data/access/storagehub/handlers/VersionHandler.java b/src/main/java/org/gcube/data/access/storagehub/handlers/VersionHandler.java new file mode 100644 index 0000000..6783e49 --- /dev/null +++ b/src/main/java/org/gcube/data/access/storagehub/handlers/VersionHandler.java @@ -0,0 +1,37 @@ +package org.gcube.data.access.storagehub.handlers; + +import javax.inject.Singleton; +import javax.jcr.Node; +import javax.jcr.Session; +import javax.jcr.version.Version; +import javax.jcr.version.VersionManager; + +import org.apache.jackrabbit.JcrConstants; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +@Singleton +public class VersionHandler { + + private static final Logger logger = LoggerFactory.getLogger(VersionHandler.class); + + public void makeVersionableContent(Node node, Session session){ + try { + Node contentNode = node.getNode("jcr:content"); + contentNode.addMixin(JcrConstants.MIX_VERSIONABLE); + }catch(Exception e ) { + logger.warn("cannot create versioned content node",e); + } + } + + public void checkinContentNode(Node node, Session session){ + try { + Node contentNode = node.getNode("jcr:content"); + VersionManager versionManager = session.getWorkspace().getVersionManager(); + Version version = versionManager.checkin(contentNode.getPath()); + }catch(Exception e ) { + logger.warn("cannotcheckinNode content node",e); + } + } + +} diff --git a/src/main/java/org/gcube/data/access/storagehub/handlers/content/ContentHandler.java b/src/main/java/org/gcube/data/access/storagehub/handlers/content/ContentHandler.java new file mode 100644 index 0000000..f446d47 --- /dev/null +++ b/src/main/java/org/gcube/data/access/storagehub/handlers/content/ContentHandler.java @@ -0,0 +1,18 @@ +package org.gcube.data.access.storagehub.handlers.content; + +import java.io.InputStream; +import java.util.List; + +import org.gcube.common.storagehub.model.items.AbstractFileItem; +import org.gcube.common.storagehub.model.items.nodes.Content; + +public interface ContentHandler { + + void initiliseSpecificContent(InputStream is) throws Exception; + + Content getContent(); + + AbstractFileItem buildItem(String name, String description, String login); + + +} diff --git a/src/main/java/org/gcube/data/access/storagehub/handlers/content/ContentHandlerFactory.java b/src/main/java/org/gcube/data/access/storagehub/handlers/content/ContentHandlerFactory.java new file mode 100644 index 0000000..d66b2ce --- /dev/null +++ b/src/main/java/org/gcube/data/access/storagehub/handlers/content/ContentHandlerFactory.java @@ -0,0 +1,49 @@ +package org.gcube.data.access.storagehub.handlers.content; + +import java.util.Arrays; +import java.util.HashMap; +import java.util.Set; +import javax.inject.Singleton; + +import org.gcube.common.storagehub.model.annotations.MimeTypeHandler; +import org.reflections.Reflections; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +@Singleton +public class ContentHandlerFactory { + + private static Logger logger = LoggerFactory.getLogger(ContentHandlerFactory.class); + + private Reflections reflection = new Reflections(); + + private static HashMap> handlerMap = new HashMap>(); + + private Class defaultHandler = GenericFileHandler.class; + + @SuppressWarnings("unchecked") + public ContentHandlerFactory() { + Set> classesAnnotated = reflection.getTypesAnnotatedWith(MimeTypeHandler.class); + for (Class clazz: classesAnnotated ){ + if (ContentHandler.class.isAssignableFrom(clazz)) { + logger.debug("searching for mimetypes {} with values {}",clazz.getName(), Arrays.toString(clazz.getAnnotation(MimeTypeHandler.class).value())); + for (String value: clazz.getAnnotation(MimeTypeHandler.class).value()){ + logger.debug("value for class {} is {}",clazz.getName(), value); + handlerMap.put(value, (Class) clazz); + } + + + } + + } + } + + public ContentHandler create(String mimetype) throws Exception{ + Class handlerClass = handlerMap.get(mimetype); + if (handlerClass!=null) + return handlerClass.newInstance(); + else + return defaultHandler.newInstance(); + } + +} diff --git a/src/main/java/org/gcube/data/access/storagehub/handlers/content/GenericFileHandler.java b/src/main/java/org/gcube/data/access/storagehub/handlers/content/GenericFileHandler.java new file mode 100644 index 0000000..cbdb6a4 --- /dev/null +++ b/src/main/java/org/gcube/data/access/storagehub/handlers/content/GenericFileHandler.java @@ -0,0 +1,43 @@ +package org.gcube.data.access.storagehub.handlers.content; + +import java.io.InputStream; +import java.util.Calendar; + +import org.gcube.common.storagehub.model.items.GenericFileItem; +import org.gcube.common.storagehub.model.items.nodes.Content; +import org.gcube.common.storagehub.model.types.ItemAction; + +public class GenericFileHandler implements ContentHandler{ + + Content content = new Content(); + + @Override + public void initiliseSpecificContent(InputStream is) throws Exception {} + + @Override + public Content getContent() { + return content; + } + + @Override + public GenericFileItem buildItem(String name, String description, String login) { + GenericFileItem item = new GenericFileItem(); + Calendar now = Calendar.getInstance(); + item.setName(name); + item.setTitle(name); + item.setDescription(description); + //item.setCreationTime(now); + item.setHidden(false); + item.setLastAction(ItemAction.CREATED); + item.setLastModificationTime(now); + item.setLastModifiedBy(login); + item.setOwner(login); + item.setContent(this.content); + return item; + } + + + + + +} diff --git a/src/main/java/org/gcube/data/access/storagehub/handlers/content/ImageHandler.java b/src/main/java/org/gcube/data/access/storagehub/handlers/content/ImageHandler.java new file mode 100644 index 0000000..657301e --- /dev/null +++ b/src/main/java/org/gcube/data/access/storagehub/handlers/content/ImageHandler.java @@ -0,0 +1,136 @@ +package org.gcube.data.access.storagehub.handlers.content; + +import java.awt.Image; +import java.awt.image.BufferedImage; +import java.io.ByteArrayOutputStream; +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.util.Base64; +import java.util.Calendar; + +import javax.imageio.ImageIO; + +import org.gcube.common.storagehub.model.annotations.MimeTypeHandler; +import org.gcube.common.storagehub.model.items.ImageFile; +import org.gcube.common.storagehub.model.items.nodes.ImageContent; +import org.gcube.common.storagehub.model.types.ItemAction; + +import ij.ImagePlus; +import ij.io.FileSaver; +import ij.process.ImageProcessor; + +@MimeTypeHandler({"image/gif", "image/jpeg","image/png","image/svg+xml"}) +public class ImageHandler implements ContentHandler{ + + private static final int THUMB_MAX_DIM = 50; + + private ImageContent content = new ImageContent(); + + + @Override + public void initiliseSpecificContent(InputStream is) throws Exception { + BufferedImage buf = ImageIO.read(is); + content.setWidth(Long.valueOf(buf.getWidth())); + content.setHeight(Long.valueOf(buf.getHeight())); + + ImagePlus image = new ImagePlus("thumbnail", buf); + + int thumbSize[] = getThumbnailDimension(buf.getWidth(), buf.getHeight()); + + try(ByteArrayOutputStream baos = new ByteArrayOutputStream(); InputStream thumbstream = getThumbnailAsPng(image, thumbSize[0], thumbSize[1])){ + + byte[] imgbuf = new byte[1024]; + int read = -1; + while ((read=thumbstream.read(imgbuf))!=-1) + baos.write(imgbuf, 0, read); + + content.setThumbnailHeight(Long.valueOf(thumbSize[1])); + content.setThumbnailWidth(Long.valueOf(thumbSize[0])); + content.setThumbnailData(Base64.getEncoder().encode(baos.toByteArray())); + } catch (Exception e) { + // TODO: handle exception + } + + } + + @Override + public ImageContent getContent() { + return content; + } + + private int[] getThumbnailDimension(int original_width, int original_height) { + int new_width = 0; + int new_height = 0; + + if ((original_width < THUMB_MAX_DIM) && (original_height< THUMB_MAX_DIM)){ + new_width = original_width; + new_height = original_height; + } + if (original_width > THUMB_MAX_DIM) { + new_width = THUMB_MAX_DIM; + new_height = (new_width * original_height) / original_width; + } + + if (original_width < THUMB_MAX_DIM) { + new_width = THUMB_MAX_DIM; + new_height = (new_width * original_height) / original_width; + } + + if (new_height > THUMB_MAX_DIM) { + new_height = THUMB_MAX_DIM; + new_width = (new_height * original_width) / original_height; + } + + if (new_width > THUMB_MAX_DIM) { + new_width = THUMB_MAX_DIM; + new_height = (new_width * original_height) / original_width; + } + + int[] dimension = {new_width, new_height}; + + return dimension; + } + + + private InputStream getThumbnailAsPng(ImagePlus img, int thumbWidth, + int thumbHeight) throws IOException { + + InputStream stream = null; + ImageProcessor processor = img.getProcessor(); + try{ + Image thumb = processor.resize(thumbWidth, thumbHeight).createImage(); + thumb = thumb.getScaledInstance(thumbWidth,thumbHeight,Image.SCALE_SMOOTH); + + FileSaver fs = new FileSaver(new ImagePlus("",thumb)); + File tmpThumbFile = File.createTempFile("THUMB", "TMP"); + tmpThumbFile.deleteOnExit(); + + fs.saveAsPng(tmpThumbFile.getAbsolutePath()); + stream = new FileInputStream(tmpThumbFile); + + }catch (Exception e) { + throw new RuntimeException(e); + } + return stream; + } + + @Override + public ImageFile buildItem(String name, String description, String login) { + ImageFile item = new ImageFile(); + Calendar now = Calendar.getInstance(); + item.setName(name); + item.setTitle(name); + item.setDescription(description); + //item.setCreationTime(now); + item.setHidden(false); + item.setLastAction(ItemAction.CREATED); + item.setLastModificationTime(now); + item.setLastModifiedBy(login); + item.setOwner(login); + item.setContent(this.content); + return item; + } + +} diff --git a/src/main/java/org/gcube/data/access/storagehub/handlers/content/PdfHandler.java b/src/main/java/org/gcube/data/access/storagehub/handlers/content/PdfHandler.java new file mode 100644 index 0000000..c6c9c69 --- /dev/null +++ b/src/main/java/org/gcube/data/access/storagehub/handlers/content/PdfHandler.java @@ -0,0 +1,59 @@ +package org.gcube.data.access.storagehub.handlers.content; + +import java.io.InputStream; +import java.util.Calendar; +import java.util.HashMap; + +import org.gcube.common.storagehub.model.annotations.MimeTypeHandler; +import org.gcube.common.storagehub.model.items.PDFFileItem; +import org.gcube.common.storagehub.model.items.nodes.PDFContent; +import org.gcube.common.storagehub.model.types.ItemAction; + +import com.itextpdf.text.pdf.PdfReader; + +@MimeTypeHandler("application/pdf") +public class PdfHandler implements ContentHandler { + + public static final String NUMBER_OF_PAGES = "xmpTPg:NPages"; + public static final String PRODUCER = "producer"; + public static final String VERSION = "version"; + public static final String AUTHOR = "Author"; + public static final String TITLE = "dc:title"; + + PDFContent content = new PDFContent(); + + + @Override + public void initiliseSpecificContent(InputStream is) throws Exception { + PdfReader reader = new PdfReader(is); + content.setNumberOfPages(Long.valueOf(reader.getNumberOfPages())); + content.setVersion(String.valueOf(reader.getPdfVersion())); + HashMap fileInfo = reader.getInfo(); + content.setAuthor(fileInfo.containsKey(AUTHOR)?fileInfo.get(AUTHOR):"n/a"); + content.setProducer(fileInfo.containsKey(PRODUCER)?fileInfo.get(PRODUCER):"n/a"); + content.setTitle(fileInfo.containsKey(TITLE)?fileInfo.get(TITLE):"n/a"); + } + + @Override + public PDFContent getContent() { + return content; + } + + public PDFFileItem buildItem(String name, String description, String login) { + PDFFileItem item = new PDFFileItem(); + Calendar now = Calendar.getInstance(); + item.setName(name); + item.setTitle(name); + item.setDescription(description); + //item.setCreationTime(now); + item.setHidden(false); + item.setLastAction(ItemAction.CREATED); + item.setLastModificationTime(now); + item.setLastModifiedBy(login); + item.setOwner(login); + item.setContent(this.content); + return item; + } + + +} diff --git a/src/main/java/org/gcube/data/access/storagehub/query/sql2/evaluators/Evaluator.java b/src/main/java/org/gcube/data/access/storagehub/query/sql2/evaluators/Evaluator.java new file mode 100644 index 0000000..9d21e3d --- /dev/null +++ b/src/main/java/org/gcube/data/access/storagehub/query/sql2/evaluators/Evaluator.java @@ -0,0 +1,15 @@ +package org.gcube.data.access.storagehub.query.sql2.evaluators; + +import javax.inject.Singleton; + +import org.gcube.common.storagehub.model.expressions.Expression; + +@Singleton +public abstract class Evaluator> { + + + public abstract Class getType(); + + public abstract String evaluate(T expr, Iterable>> evaluators); + +} diff --git a/src/main/java/org/gcube/data/access/storagehub/query/sql2/evaluators/Evaluators.java b/src/main/java/org/gcube/data/access/storagehub/query/sql2/evaluators/Evaluators.java new file mode 100644 index 0000000..aea2e19 --- /dev/null +++ b/src/main/java/org/gcube/data/access/storagehub/query/sql2/evaluators/Evaluators.java @@ -0,0 +1,36 @@ +package org.gcube.data.access.storagehub.query.sql2.evaluators; + +import javax.enterprise.inject.Instance; +import javax.inject.Inject; +import javax.inject.Singleton; + +import org.gcube.common.storagehub.model.expressions.Expression; + +@Singleton +public class Evaluators { + + public static Evaluator> getEvaluator(Class type, Iterable>> evaluators){ + for (Evaluator> eval: evaluators) { + if (eval.getType().equals(type)) return eval; + } + throw new IllegalStateException("evaluator not found for class "+type.getName()); + } + + @Inject + Instance> evaluators; + + public String evaluate(Expression expression) { + for (Evaluator eval: evaluators) { + if (eval.getType().equals(expression.getClass())) + return eval.evaluate(expression, evaluators); + } + throw new IllegalStateException("Evaluator not found for expression type "+expression.getClass()); + } + + public Instance> getEvaluators() { + return evaluators; + } + + + +} diff --git a/src/main/java/org/gcube/data/access/storagehub/query/sql2/evaluators/date/BeforeEvaluator.java b/src/main/java/org/gcube/data/access/storagehub/query/sql2/evaluators/date/BeforeEvaluator.java new file mode 100644 index 0000000..d14fca0 --- /dev/null +++ b/src/main/java/org/gcube/data/access/storagehub/query/sql2/evaluators/date/BeforeEvaluator.java @@ -0,0 +1,37 @@ +package org.gcube.data.access.storagehub.query.sql2.evaluators.date; + +import java.text.DateFormat; +import java.text.SimpleDateFormat; +import java.util.List; +import java.util.TimeZone; + +import javax.inject.Singleton; + +import org.gcube.common.storagehub.model.expressions.Expression; +import org.gcube.common.storagehub.model.expressions.date.Before; +import org.gcube.data.access.storagehub.query.sql2.evaluators.Evaluator; + +@Singleton +public class BeforeEvaluator extends Evaluator { + + @Override + public String evaluate(Before expr, Iterable>> evaluators) { + TimeZone tz = TimeZone.getTimeZone("UTC"); + DateFormat df = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'"); + df.setTimeZone(tz); + return String.format("node.[%s] < CAST('%s' AS DATE)", expr.getSearchableField().getName(), df.format(expr.getValue().getTime())); + } + + public Evaluator> getEvaluator(Class type, List>> evaluators){ + for (Evaluator> eval: evaluators) { + if (eval.getType().equals(type)) return eval; + } + throw new IllegalStateException("evaluator not found for class "+type.getName()); + } + + @Override + public Class getType() { + return Before.class; + } + +} \ No newline at end of file diff --git a/src/main/java/org/gcube/data/access/storagehub/query/sql2/evaluators/logical/AndEvaluator.java b/src/main/java/org/gcube/data/access/storagehub/query/sql2/evaluators/logical/AndEvaluator.java new file mode 100644 index 0000000..f56d279 --- /dev/null +++ b/src/main/java/org/gcube/data/access/storagehub/query/sql2/evaluators/logical/AndEvaluator.java @@ -0,0 +1,33 @@ +package org.gcube.data.access.storagehub.query.sql2.evaluators.logical; + +import java.util.ArrayList; +import java.util.List; +import java.util.stream.Collectors; + +import javax.inject.Singleton; + +import org.gcube.common.storagehub.model.expressions.Expression; +import org.gcube.common.storagehub.model.expressions.logical.And; +import org.gcube.data.access.storagehub.query.sql2.evaluators.Evaluator; +import org.gcube.data.access.storagehub.query.sql2.evaluators.Evaluators; + +@Singleton +public class AndEvaluator extends Evaluator { + + @Override + public String evaluate(And expr, Iterable>> evaluators) { + List evaluated = new ArrayList<>(); + for (Expression subExpression :expr.getExpressions()) { + Evaluator> eval = Evaluators.getEvaluator(subExpression.getClass(), evaluators); + evaluated.add(eval.evaluate(subExpression, evaluators)); + } + + return "("+evaluated.stream().map(Object::toString).collect(Collectors.joining(" and ")).toString()+")"; + } + + @Override + public Class getType() { + return And.class; + } + +} diff --git a/src/main/java/org/gcube/data/access/storagehub/query/sql2/evaluators/logical/ISDescendantEvaluator.java b/src/main/java/org/gcube/data/access/storagehub/query/sql2/evaluators/logical/ISDescendantEvaluator.java new file mode 100644 index 0000000..610fb6d --- /dev/null +++ b/src/main/java/org/gcube/data/access/storagehub/query/sql2/evaluators/logical/ISDescendantEvaluator.java @@ -0,0 +1,24 @@ +package org.gcube.data.access.storagehub.query.sql2.evaluators.logical; + +import javax.inject.Singleton; + +import org.gcube.common.storagehub.model.expressions.Expression; +import org.gcube.common.storagehub.model.expressions.logical.ISDescendant; +import org.gcube.data.access.storagehub.query.sql2.evaluators.Evaluator; + +@Singleton +public class ISDescendantEvaluator extends Evaluator { + + @Override + public String evaluate(ISDescendant expr, Iterable>> evaluators) { + + return String.format("ISDESCENDANTNODE( node , [%s])", expr.getPath().toPath()); + } + + @Override + public Class getType() { + return ISDescendant.class; + } + + +} diff --git a/src/main/java/org/gcube/data/access/storagehub/query/sql2/evaluators/logical/OrEvaluator.java b/src/main/java/org/gcube/data/access/storagehub/query/sql2/evaluators/logical/OrEvaluator.java new file mode 100644 index 0000000..1811ab0 --- /dev/null +++ b/src/main/java/org/gcube/data/access/storagehub/query/sql2/evaluators/logical/OrEvaluator.java @@ -0,0 +1,35 @@ +package org.gcube.data.access.storagehub.query.sql2.evaluators.logical; + +import java.util.ArrayList; +import java.util.List; +import java.util.stream.Collectors; + +import javax.inject.Singleton; + +import org.gcube.common.storagehub.model.expressions.Expression; +import org.gcube.common.storagehub.model.expressions.logical.Or; +import org.gcube.data.access.storagehub.query.sql2.evaluators.Evaluator; +import org.gcube.data.access.storagehub.query.sql2.evaluators.Evaluators; + +@Singleton +public class OrEvaluator extends Evaluator { + + + + @Override + public String evaluate(Or expr, Iterable>> evaluators) { + List evaluated = new ArrayList<>(); + for (Expression subExpression :expr.getExpressions()) { + Evaluator> eval = Evaluators.getEvaluator(subExpression.getClass(), evaluators); + evaluated.add(eval.evaluate(subExpression, evaluators)); + } + + return "("+evaluated.stream().map(Object::toString).collect(Collectors.joining(" or ")).toString()+")"; + } + + @Override + public Class getType() { + return Or.class; + } + +} diff --git a/src/main/java/org/gcube/data/access/storagehub/query/sql2/evaluators/text/ContainsEvaluator.java b/src/main/java/org/gcube/data/access/storagehub/query/sql2/evaluators/text/ContainsEvaluator.java new file mode 100644 index 0000000..957bab2 --- /dev/null +++ b/src/main/java/org/gcube/data/access/storagehub/query/sql2/evaluators/text/ContainsEvaluator.java @@ -0,0 +1,31 @@ +package org.gcube.data.access.storagehub.query.sql2.evaluators.text; + +import javax.inject.Singleton; + +import org.gcube.common.storagehub.model.expressions.Expression; +import org.gcube.common.storagehub.model.expressions.text.Contains; +import org.gcube.data.access.storagehub.query.sql2.evaluators.Evaluator; + +@Singleton +public class ContainsEvaluator extends Evaluator { + + + + @Override + public String evaluate(Contains expr, Iterable>> evaluators) { + return String.format("node.[%s] LIKE '%%%s%%'", expr.getSearchableField().getName(), expr.getValue()); + } + + public Evaluator> getEvaluator(Class type, Iterable>> evaluators){ + for (Evaluator> eval: evaluators) { + if (eval.getType().equals(type)) return eval; + } + throw new IllegalStateException("evaluator not found for class "+type.getName()); + } + + @Override + public Class getType() { + return Contains.class; + } + +} diff --git a/src/main/java/org/gcube/data/access/storagehub/services/ItemsCreator.java b/src/main/java/org/gcube/data/access/storagehub/services/ItemsCreator.java new file mode 100644 index 0000000..894953b --- /dev/null +++ b/src/main/java/org/gcube/data/access/storagehub/services/ItemsCreator.java @@ -0,0 +1,267 @@ +package org.gcube.data.access.storagehub.services; + +import java.io.BufferedInputStream; +import java.io.InputStream; +import java.util.Arrays; +import java.util.concurrent.Callable; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.Future; + +import javax.inject.Inject; +import javax.jcr.Node; +import javax.jcr.Session; +import javax.jcr.SimpleCredentials; +import javax.servlet.ServletContext; +import javax.ws.rs.Consumes; +import javax.ws.rs.POST; +import javax.ws.rs.Path; +import javax.ws.rs.PathParam; +import javax.ws.rs.Produces; +import javax.ws.rs.QueryParam; +import javax.ws.rs.core.Context; +import javax.ws.rs.core.MediaType; +import javax.ws.rs.core.Response; +import javax.ws.rs.core.UriInfo; + +import org.apache.tika.config.TikaConfig; +import org.apache.tika.detect.Detector; +import org.apache.tika.io.TikaInputStream; +import org.apache.tika.metadata.Metadata; +import org.gcube.common.authorization.library.provider.AuthorizationProvider; +import org.gcube.common.authorization.library.provider.CalledMethodProvider; +import org.gcube.common.storagehub.model.items.AbstractFileItem; +import org.gcube.common.storagehub.model.items.FolderItem; +import org.gcube.common.storagehub.model.items.Item; +import org.gcube.common.storagehub.model.service.ItemWrapper; +import org.gcube.data.access.storagehub.Constants; +import org.gcube.data.access.storagehub.MetaInfo; +import org.gcube.data.access.storagehub.MultipleOutputStream; +import org.gcube.data.access.storagehub.Utils; +import org.gcube.data.access.storagehub.handlers.ItemHandler; +import org.gcube.data.access.storagehub.handlers.VersionHandler; +import org.gcube.data.access.storagehub.handlers.content.ContentHandler; +import org.gcube.data.access.storagehub.handlers.content.ContentHandlerFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + + +@Path("item") +public class ItemsCreator { + + private static final Logger log = LoggerFactory.getLogger(ItemsCreator.class); + + private static ExecutorService executor = Executors.newFixedThreadPool(100); + + @Context ServletContext context; + + @Inject + RepositoryInitializer repository; + + @Inject + ContentHandlerFactory contenthandlerFactory; + + @Inject + VersionHandler versionHandler; + + @POST + @Path("/{id}/create/{type:(?!FILE)[^/?$]*}") + public Response createItem(@Context UriInfo uriInfo, @PathParam("id") String id, @PathParam("type") String type){ + log.info("create generic item called"); + Session ses = null; + Item destinationItem = null; + try{ + final String login = AuthorizationProvider.instance.get().getClient().getId(); + long start = System.currentTimeMillis(); + + //TODO check if it is possible to change all the ACL on a workspace + ses = repository.getRepository().login(new SimpleCredentials(context.getInitParameter(Constants.ADMIN_PARAM_NAME),context.getInitParameter(Constants.ADMIN_PARAM_PWD).toCharArray())); + + //validate input parameters for Item Type + + log.info("time to connect to repo {}",(System.currentTimeMillis()-start)); + Node destination = ses.getNodeByIdentifier(id); + destinationItem = ItemHandler.getItem(destination,Arrays.asList("hl:accounting","jcr:content")); + + if (!(destinationItem instanceof FolderItem)) throw new Exception("an Item must be copyed to another directory"); + + //TODO: write control + if (!destinationItem.getOwner().equals(login)){ + /*AccessControlManager accessControlManager = ses.getAccessControlManager(); + boolean canWrite = accessControlManager.hasPrivileges(path, new Privilege[] { + accessControlManager.privilegeFromName(Privilege.JCR_ADD_CHILD_NODES)});*/ + //if (!canWrite) + + throw new IllegalAccessException("Insufficent Provileges to write in "+destinationItem.getPath()); + } + + //ses.getWorkspace().getLockManager().lock(destinationItem.getPath(), true, true, 0,login); + + //TODO for now only owner of the destination folder can move file + + ses.save(); + }catch(Exception e){ + log.error("error creating item", e); + return Response.serverError().build(); + } finally{ + if (ses!=null){ + if (destinationItem!=null) + try { + ses.getWorkspace().getLockManager().unlock(destinationItem.getPath()); + } catch (Throwable t){ + log.warn("error unlocking {}", destinationItem.getPath(), t); + } + ses.logout(); + } + } + return Response.ok().build(); + } + + + + @POST + @Consumes(MediaType.APPLICATION_OCTET_STREAM) + @Produces(MediaType.APPLICATION_JSON) + @Path("/{id}/create/FILE") + public Response createFileItem(InputStream stream , @PathParam("id") String id, + @QueryParam("name") String name, @QueryParam("description") String description){ + CalledMethodProvider.instance.set("createFileItem"); + log.info("create file called"); + Session ses = null; + Item destinationItem = null; + try{ + final String login = AuthorizationProvider.instance.get().getClient().getId(); + long start = System.currentTimeMillis(); + + //TODO check if it is possible to change all the ACL on a workspace + ses = repository.getRepository().login(new SimpleCredentials(context.getInitParameter(Constants.ADMIN_PARAM_NAME),context.getInitParameter(Constants.ADMIN_PARAM_PWD).toCharArray())); + + //validate input parameters for Item Type + + log.info("time to connect to repo {}",(System.currentTimeMillis()-start)); + Node destination = ses.getNodeByIdentifier(id); + destinationItem = ItemHandler.getItem(destination,Arrays.asList("hl:accounting","jcr:content")); + log.debug("destination item path is {}",destinationItem.getPath()); + if (!(destinationItem instanceof FolderItem)) throw new Exception("an Item must be copyed to another directory"); + + //TODO: write control + if (!destinationItem.getOwner().equals(login)){ + /*AccessControlManager accessControlManager = ses.getAccessControlManager(); + boolean canWrite = accessControlManager.hasPrivileges(path, new Privilege[] { + accessControlManager.privilegeFromName(Privilege.JCR_ADD_CHILD_NODES)});*/ + //if (!canWrite) + + throw new IllegalAccessException("Insufficent Provileges to write in "+destinationItem.getPath()); + } + + + ses.getWorkspace().getLockManager().lock(destinationItem.getPath(), true, true, 0,login); + + + ContentHandler handler = getContentHandler(stream , name, destinationItem.getPath()); + + AbstractFileItem item =handler.buildItem(name, description, login); + + log.debug("item prepared, fulfilling content"); + + log.debug("content prepared"); + Node newNode = ItemHandler.createNodeFromItem(ses, destination, item); + versionHandler.makeVersionableContent(newNode, ses); + ses.save(); + versionHandler.checkinContentNode(newNode, ses);; + log.info("item correctly created"); + return Response.ok(new ItemWrapper<>(item)).build(); + }catch(Throwable e){ + log.error("error creating item", e); + return Response.serverError().build(); + } finally{ + if (ses!=null){ + if (destinationItem!=null) + try { + ses.getWorkspace().getLockManager().unlock(destinationItem.getPath()); + } catch (Throwable t){ + log.warn("error unlocking {}", destinationItem.getPath(), t); + } + ses.logout(); + } + } + + } + + + private ContentHandler getContentHandler(InputStream stream , String name, String path) throws Exception { + + final MultipleOutputStream mos = new MultipleOutputStream(stream, 2); + + Callable mimeTypeDector = new Callable() { + + @Override + public ContentHandler call() throws Exception { + ContentHandler handler =null; + try(BufferedInputStream is1 = new BufferedInputStream(mos.get(), 2048)){ + org.apache.tika.mime.MediaType mediaType = null; + TikaConfig config = TikaConfig.getDefaultConfig(); + Detector detector = config.getDetector(); + TikaInputStream stream = TikaInputStream.get(is1); + Metadata metadata = new Metadata(); + metadata.add(Metadata.RESOURCE_NAME_KEY, name); + mediaType = detector.detect(stream, metadata); + String mimeType = mediaType.getBaseType().toString(); + + handler = contenthandlerFactory.create(mimeType); + + is1.reset(); + handler.initiliseSpecificContent(is1); + handler.getContent().setMimeType(mimeType); + + } catch (Throwable e) { + log.error("error retreiving content",e); + throw new RuntimeException(e); + } + return handler; + } + + }; + + Callable uploader = new Callable() { + + @Override + public MetaInfo call() throws Exception { + String remotePath= path+"/"+name; + String storageId = Utils.getStorageClient(AuthorizationProvider.instance.get().getClient().getId()).getClient().put(true).LFile(mos.get()).RFile(remotePath); + long size = Utils.getStorageClient(AuthorizationProvider.instance.get().getClient().getId()).getClient().getSize().RFileById(storageId); + MetaInfo info = new MetaInfo(); + info.setSize(size); + info.setStorageId(storageId); + return info; + } + }; + + Future detectorF = executor.submit(mimeTypeDector); + Future uploaderF = executor.submit(uploader); + + mos.startWriting(); + + ContentHandler handler = detectorF.get(); + handler.getContent().setData("jcr:content"); + handler.getContent().setStorageId(uploaderF.get().getStorageId()); + handler.getContent().setSize(uploaderF.get().getSize()); + + return handler; + + } + + + + /* + private boolean hasSharedChildren(FolderItem folder, Session session) throws Exception{ + Node currentNode = session.getNodeByIdentifier(folder.getId()); + for (Item item : Utils.getItemList(currentNode,Arrays.asList("hl:accounting","jcr:content"), null)){ + if (item instanceof FolderItem) + return (item instanceof SharedFolder) || hasSharedChildren((FolderItem)item, session); + } + return false; + + }*/ + +} diff --git a/src/main/java/org/gcube/data/access/storagehub/services/ItemsManager.java b/src/main/java/org/gcube/data/access/storagehub/services/ItemsManager.java new file mode 100644 index 0000000..8497453 --- /dev/null +++ b/src/main/java/org/gcube/data/access/storagehub/services/ItemsManager.java @@ -0,0 +1,321 @@ +package org.gcube.data.access.storagehub.services; + +import java.io.InputStream; +import java.io.OutputStream; +import java.net.URL; +import java.util.Arrays; +import java.util.Deque; +import java.util.List; +import java.util.zip.Deflater; +import java.util.zip.ZipOutputStream; + +import javax.enterprise.context.RequestScoped; +import javax.inject.Inject; +import javax.jcr.Node; +import javax.jcr.Session; +import javax.jcr.SimpleCredentials; +import javax.servlet.ServletContext; +import javax.ws.rs.GET; +import javax.ws.rs.PUT; +import javax.ws.rs.Path; +import javax.ws.rs.PathParam; +import javax.ws.rs.Produces; +import javax.ws.rs.QueryParam; +import javax.ws.rs.WebApplicationException; +import javax.ws.rs.core.Context; +import javax.ws.rs.core.MediaType; +import javax.ws.rs.core.Response; +import javax.ws.rs.core.StreamingOutput; + +import org.gcube.common.authorization.library.provider.AuthorizationProvider; +import org.gcube.common.storagehub.model.Paths; +import org.gcube.common.storagehub.model.items.AbstractFileItem; +import org.gcube.common.storagehub.model.items.FolderItem; +import org.gcube.common.storagehub.model.items.Item; +import org.gcube.common.storagehub.model.items.SharedFolder; +import org.gcube.common.storagehub.model.service.ItemList; +import org.gcube.common.storagehub.model.service.ItemWrapper; +import org.gcube.data.access.storagehub.AuthorizationChecker; +import org.gcube.data.access.storagehub.Constants; +import org.gcube.data.access.storagehub.Range; +import org.gcube.data.access.storagehub.SingleFileStreamingOutput; +import org.gcube.data.access.storagehub.Utils; +import org.gcube.data.access.storagehub.accounting.AccountingHandler; +import org.gcube.data.access.storagehub.handlers.ItemHandler; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +@Path("item") +public class ItemsManager { + + private static final Logger log = LoggerFactory.getLogger(ItemsManager.class); + + @Inject + RepositoryInitializer repository; + + @Inject + AccountingHandler accountingHandler; + + @RequestScoped + @PathParam("id") + String id; + + @Context + ServletContext context; + + @Inject + AuthorizationChecker authChecker; + + + + + @GET() + @Path("{id}") + @Produces(MediaType.APPLICATION_JSON) + public ItemWrapper getById(@QueryParam("exclude") List excludes){ + Session ses = null; + Item toReturn = null; + try{ + String login = AuthorizationProvider.instance.get().getClient().getId(); + long start = System.currentTimeMillis(); + ses = repository.getRepository().login(new SimpleCredentials(login,Utils.getSecurePassword(login).toCharArray())); + authChecker.checkReadAuthorizationControl(ses, id); + log.info("time to connect to repo {}",(System.currentTimeMillis()-start)); + log.info("excludes is {}",excludes); + toReturn = ItemHandler.getItem(ses.getNodeByIdentifier(id), excludes); + }catch(Throwable e){ + log.error("error reading the node children of {}",id,e); + throw new WebApplicationException(e); + }finally{ + if (ses!=null) + ses.logout(); + } + + return new ItemWrapper(toReturn); + } + + + @GET + @Path("{id}/children/count") + @Produces(MediaType.APPLICATION_JSON) + public Long countById(@QueryParam("showHidden") Boolean showHidden, @QueryParam("exclude") List excludes){ + Session ses = null; + Long toReturn = null; + try{ + String login = AuthorizationProvider.instance.get().getClient().getId(); + long start = System.currentTimeMillis(); + ses = repository.getRepository().login(new SimpleCredentials(login,Utils.getSecurePassword(login).toCharArray())); + authChecker.checkReadAuthorizationControl(ses, id); + log.info("time to connect to repo {}",(System.currentTimeMillis()-start)); + log.info("excludes is {}",excludes); + toReturn = Utils.getItemCount(ses.getNodeByIdentifier(id), showHidden==null?false:showHidden); + }catch(Throwable e){ + log.error("error reading the node children of {}",id,e); + throw new WebApplicationException(e); + }finally{ + if (ses!=null) + ses.logout(); + } + return toReturn ; + } + + @GET + @Path("{id}/children") + @Produces(MediaType.APPLICATION_JSON) + public ItemList listById(@QueryParam("showHidden") Boolean showHidden, @QueryParam("exclude") List excludes){ + Session ses = null; + List toReturn = null; + try{ + String login = AuthorizationProvider.instance.get().getClient().getId(); + long start = System.currentTimeMillis(); + ses = repository.getRepository().login(new SimpleCredentials(login,Utils.getSecurePassword(login).toCharArray())); + authChecker.checkReadAuthorizationControl(ses, id); + log.info("time to connect to repo {}",(System.currentTimeMillis()-start)); + log.info("excludes is {}",excludes); + toReturn = Utils.getItemList(ses.getNodeByIdentifier(id), excludes, null, showHidden==null?false:showHidden); + }catch(Throwable e){ + log.error("error reading the node children of {}",id,e); + throw new WebApplicationException(e); + }finally{ + if (ses!=null) + ses.logout(); + } + + return new ItemList(toReturn); + } + + @GET + @Path("{id}/children/paged") + @Produces(MediaType.APPLICATION_JSON) + public ItemList listByIdPaged(@QueryParam("showHidden") Boolean showHidden, @QueryParam("start") Integer start, @QueryParam("limit") Integer limit, @QueryParam("exclude") List excludes){ + Session ses = null; + List toReturn = null; + try{ + String login = AuthorizationProvider.instance.get().getClient().getId(); + ses = repository.getRepository().login(new SimpleCredentials(login,Utils.getSecurePassword(login).toCharArray())); + authChecker.checkReadAuthorizationControl(ses, id); + log.info("time to connect to repo {}",(System.currentTimeMillis()-start)); + log.info("excludes is {}",excludes); + toReturn = Utils.getItemList(ses.getNodeByIdentifier(id), excludes, new Range(start, limit),showHidden==null?false:showHidden); + }catch(Throwable e){ + log.error("error reading the node children of {}",id,e); + throw new WebApplicationException(e); + }finally{ + if (ses!=null) + ses.logout(); + } + + return new ItemList(toReturn); + } + + @GET + @Path("{id}/publiclink") + public URL getPubliclink() { + //TODO: check who can call this method + Session ses = null; + try{ + String login = AuthorizationProvider.instance.get().getClient().getId(); + ses = repository.getRepository().login(new SimpleCredentials(login,Utils.getSecurePassword(login).toCharArray())); + authChecker.checkReadAuthorizationControl(ses, id); + String url = Utils.getStorageClient(login).getClient().getHttpsUrl().RFileById(id); + return new URL(url); + }catch(Throwable e){ + log.error("error reading the node children of {}",id,e); + throw new WebApplicationException(e); + }finally{ + if (ses!=null) + ses.logout(); + } + + } + + + @GET + @Path("{id}/download") + public Response download(){ + + Session ses = null; + try{ + final String login = AuthorizationProvider.instance.get().getClient().getId(); + long start = System.currentTimeMillis(); + ses = repository.getRepository().login(new SimpleCredentials(context.getInitParameter(Constants.ADMIN_PARAM_NAME),context.getInitParameter(Constants.ADMIN_PARAM_PWD).toCharArray())); + log.info("time to connect to repo {}",(System.currentTimeMillis()-start)); + + final Node node = ses.getNodeByIdentifier(id); + + authChecker.checkReadAuthorizationControl(ses, id); + + final Item item = ItemHandler.getItem(node, null); + if (item instanceof AbstractFileItem){ + AbstractFileItem fileItem =(AbstractFileItem) item; + + final InputStream streamToWrite = Utils.getStorageClient(login).getClient().get().RFileAsInputStream(fileItem.getContent().getStorageId()); + + accountingHandler.createReadObj(fileItem.getTitle(), ses, node, true); + + StreamingOutput so = new SingleFileStreamingOutput(streamToWrite); + + return Response + .ok(so) + .header("content-disposition","attachment; filename = "+fileItem.getName()) + .header("Content-Length", fileItem.getContent().getSize()) + .build(); + + } else if (item instanceof FolderItem){ + + try { + final Deque allNodes = Utils.getAllNodesForZip((FolderItem)item, ses, accountingHandler); + final org.gcube.common.storagehub.model.Path originalPath = Paths.getPath(item.getPath()); + StreamingOutput so = new StreamingOutput() { + + @Override + public void write(OutputStream os) { + + try(ZipOutputStream zos = new ZipOutputStream(os)){ + long start = System.currentTimeMillis(); + zos.setLevel(Deflater.BEST_COMPRESSION); + log.debug("writing StreamOutput"); + Utils.zipNode(zos, allNodes, login, originalPath); + log.debug("StreamOutput written in {}",(System.currentTimeMillis()-start)); + } catch (Exception e) { + log.error("error writing stream",e); + } + + } + }; + + return Response + .ok(so) + .header("content-disposition","attachment; filename = directory.zip") + .header("Content-Length", -1l) + .build(); + }finally { + if (ses!=null) ses.save(); + } + } else throw new Exception("item type not supported for download: "+item.getClass()); + + }catch(Exception e ){ + log.error("error downloading item content",e); + throw new WebApplicationException(e); + } finally{ + if (ses!=null) ses.logout(); + } + } + + @PUT + @Path("{id}/move") + public Response move(@QueryParam("newpath") String path, @PathParam("id") String identifier){ + Session ses = null; + try{ + final String login = AuthorizationProvider.instance.get().getClient().getId(); + long start = System.currentTimeMillis(); + //ses = RepositoryInitializer.getRepository().login(new SimpleCredentials(login,Utils.getSecurePassword(login).toCharArray())); + //TODO check if it is possible to change all the ACL on a workspace + ses = repository.getRepository().login(new SimpleCredentials(context.getInitParameter(Constants.ADMIN_PARAM_NAME),context.getInitParameter(Constants.ADMIN_PARAM_PWD).toCharArray())); + authChecker.checkReadAuthorizationControl(ses, id); + log.info("time to connect to repo {}",(System.currentTimeMillis()-start)); + final Node nodeToMove = ses.getNodeByIdentifier(identifier); + final Node destination = ses.getNode(path); + Item destinationItem = ItemHandler.getItem(destination,null); + //TODO for now only owner of the destination folder can move file + if (!destinationItem.getOwner().equals(login)){ + /*AccessControlManager accessControlManager = ses.getAccessControlManager(); + boolean canWrite = accessControlManager.hasPrivileges(path, new Privilege[] { + accessControlManager.privilegeFromName(Privilege.JCR_ADD_CHILD_NODES)});*/ + //if (!canWrite) + + throw new IllegalAccessException("Insufficent Provileges to write in "+path); + } + final Item item = ItemHandler.getItem(nodeToMove, null); + if (item instanceof SharedFolder){ + throw new Exception("shared folder cannot be moved"); + }else if (item instanceof FolderItem){ + if (hasSharedChildren((FolderItem) item, ses)) throw new Exception("folder item with shared children cannot be moved"); + ses.getWorkspace().move(nodeToMove.getPath(), destination.getPath()+"/"+nodeToMove.getName()); + }else { + item.setParentId(destinationItem.getId()); + ses.getWorkspace().move(nodeToMove.getPath(), destination.getPath()+"/"+nodeToMove.getName()); + } + ses.save(); + }catch(Exception e){ + log.error("error moving item with id {} in path {}",identifier, path,e); + throw new WebApplicationException(e); + } finally{ + if (ses!=null) ses.logout(); + } + return Response.ok().build(); + } + + + + private boolean hasSharedChildren(FolderItem item, Session session) throws Exception{ + Node currentNode = session.getNodeByIdentifier(item.getId()); + for (Item children : Utils.getItemList(currentNode,Arrays.asList("hl:accounting","jcr:content"), null, false)){ + if (children instanceof FolderItem) + return (children instanceof SharedFolder) || hasSharedChildren((FolderItem)item, session); + } + return false; + + } + +} \ No newline at end of file diff --git a/src/main/java/org/gcube/data/access/storagehub/services/RepositoryInitializer.java b/src/main/java/org/gcube/data/access/storagehub/services/RepositoryInitializer.java new file mode 100644 index 0000000..e2b58af --- /dev/null +++ b/src/main/java/org/gcube/data/access/storagehub/services/RepositoryInitializer.java @@ -0,0 +1,8 @@ +package org.gcube.data.access.storagehub.services; + +import javax.jcr.Repository; + +public interface RepositoryInitializer { + + Repository getRepository(); +} diff --git a/src/main/java/org/gcube/data/access/storagehub/services/WorkspaceManager.java b/src/main/java/org/gcube/data/access/storagehub/services/WorkspaceManager.java new file mode 100644 index 0000000..e868078 --- /dev/null +++ b/src/main/java/org/gcube/data/access/storagehub/services/WorkspaceManager.java @@ -0,0 +1,379 @@ +package org.gcube.data.access.storagehub.services; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.stream.Collectors; + +import javax.enterprise.context.RequestScoped; +import javax.inject.Inject; +import javax.jcr.Node; +import javax.jcr.NodeIterator; +import javax.jcr.Session; +import javax.jcr.SimpleCredentials; +import javax.jcr.query.Query; +import javax.jcr.query.QueryResult; +import javax.ws.rs.GET; +import javax.ws.rs.Path; +import javax.ws.rs.Produces; +import javax.ws.rs.QueryParam; +import javax.ws.rs.WebApplicationException; +import javax.ws.rs.core.MediaType; +import javax.xml.ws.WebServiceException; + +import org.gcube.common.authorization.library.provider.AuthorizationProvider; +import org.gcube.common.scope.api.ScopeProvider; +import org.gcube.common.scope.impl.ScopeBean; +import org.gcube.common.scope.impl.ScopeBean.Type; +import org.gcube.common.storagehub.model.Paths; +import org.gcube.common.storagehub.model.expressions.Expression; +import org.gcube.common.storagehub.model.expressions.logical.And; +import org.gcube.common.storagehub.model.expressions.logical.ISDescendant; +import org.gcube.common.storagehub.model.items.Item; +import org.gcube.common.storagehub.model.service.ItemList; +import org.gcube.common.storagehub.model.service.ItemWrapper; +import org.gcube.data.access.storagehub.Constants; +import org.gcube.data.access.storagehub.Range; +import org.gcube.data.access.storagehub.Utils; +import org.gcube.data.access.storagehub.handlers.ItemHandler; +import org.gcube.data.access.storagehub.query.sql2.evaluators.Evaluators; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.fasterxml.jackson.databind.ObjectMapper; + + +@Path("") +public class WorkspaceManager { + + private static final Logger log = LoggerFactory.getLogger(WorkspaceManager.class); + + @Inject + RepositoryInitializer repository; + + @Inject + Evaluators evaluator; + + @RequestScoped + @QueryParam("exclude") + private List excludes = Collections.emptyList(); + + @Path("") + @GET + @Produces(MediaType.APPLICATION_JSON) + public ItemWrapper getWorkspace(){ + Session ses = null; + org.gcube.common.storagehub.model.Path absolutePath = Utils.getHomePath(); + Item toReturn = null; + try{ + String login = AuthorizationProvider.instance.get().getClient().getId(); + long start = System.currentTimeMillis(); + ses = repository.getRepository().login(new SimpleCredentials(login,Utils.getSecurePassword(login).toCharArray())); + log.info("time to connect to repo {}",(System.currentTimeMillis()-start)); + toReturn = ItemHandler.getItem(ses.getNode(absolutePath.toPath()), excludes); + }catch(Throwable e){ + log.error("error reading the node children of {}",absolutePath,e); + }finally{ + if (ses!=null) + ses.logout(); + } + + return new ItemWrapper(toReturn); + } + + + @Path("vrefolder") + @GET + @Produces(MediaType.APPLICATION_JSON) + public ItemWrapper getVreRootFolder(){ + Session ses = null; + + org.gcube.common.storagehub.model.Path vrePath = Paths.append(Utils.getHomePath(), Constants.VRE_FOLDER_PARENT_NAME); + + try{ + String login = AuthorizationProvider.instance.get().getClient().getId(); + ScopeBean bean = new ScopeBean(ScopeProvider.instance.get()); + if (!bean.is(Type.VRE)) throw new Exception("the current scope is not a VRE"); + long start = System.currentTimeMillis(); + ses = repository.getRepository().login(new SimpleCredentials(login,Utils.getSecurePassword(login).toCharArray())); + log.info("time to connect to repo {}",(System.currentTimeMillis()-start)); + + String entireScopename= bean.toString().replaceAll("^/(.*)/?$", "$1").replaceAll("/", "-"); + + String query = String.format("SELECT * FROM [nthl:workspaceItem] As node WHERE node.[jcr:title] like '%s'", entireScopename); + Query jcrQuery = ses.getWorkspace().getQueryManager().createQuery(query, Constants.QUERY_LANGUAGE); + NodeIterator it = jcrQuery.execute().getNodes(); + + if (!it.hasNext()) throw new Exception("vre folder not found for context "+bean.toString()); + + Node folder = it.nextNode(); + Item item = ItemHandler.getItem(folder, excludes); + + return new ItemWrapper(item); + }catch(Throwable e){ + log.error("error reading node {}",vrePath,e); + throw new WebApplicationException("error retrieving vre folder",e); + }finally{ + if (ses!=null) + ses.logout(); + } + + } + + @Path("trash") + @GET + @Produces(MediaType.APPLICATION_JSON) + public ItemWrapper getTrashRootFolder(){ + Session ses = null; + + org.gcube.common.storagehub.model.Path trashPath = Paths.append(Utils.getHomePath(), Constants.TRASH_ROOT_FOLDER_NAME); + + try{ + String login = AuthorizationProvider.instance.get().getClient().getId(); + long start = System.currentTimeMillis(); + ses = repository.getRepository().login(new SimpleCredentials(login,Utils.getSecurePassword(login).toCharArray())); + log.info("time to connect to repo {}",(System.currentTimeMillis()-start)); + + + + Node folder = ses.getNode(trashPath.toPath()); + Item item = ItemHandler.getItem(folder, excludes); + + return new ItemWrapper(item); + }catch(Throwable e){ + log.error("error reading the node {}",trashPath,e); + throw new WebApplicationException("error retrieving trash folder",e); + }finally{ + if (ses!=null) + ses.logout(); + } + + } + + @Path("vrefolders") + @GET + @Produces(MediaType.APPLICATION_JSON) + public ItemList getVreFolders(){ + Session ses = null; + + org.gcube.common.storagehub.model.Path vrePath = Paths.append(Utils.getHomePath(), Constants.VRE_FOLDER_PARENT_NAME); + List toReturn = null; + try{ + String login = AuthorizationProvider.instance.get().getClient().getId(); + ses = repository.getRepository().login(new SimpleCredentials(login,Utils.getSecurePassword(login).toCharArray())); + toReturn = Utils.getItemList(ses.getNode(vrePath.toPath()) , excludes, null, false); + }catch(Throwable e){ + log.error("error reading the node children of {}",vrePath,e); + }finally{ + if (ses!=null) + ses.logout(); + } + + return new ItemList(toReturn); + } + + @Path("vrefolders/paged") + @GET + @Produces(MediaType.APPLICATION_JSON) + public ItemList getVreFoldersPaged(@QueryParam("start") Integer start, @QueryParam("limit") Integer limit){ + Session ses = null; + + org.gcube.common.storagehub.model.Path vrePath = Paths.append(Utils.getHomePath(), Constants.VRE_FOLDER_PARENT_NAME); + List toReturn = null; + try{ + String login = AuthorizationProvider.instance.get().getClient().getId(); + ses = repository.getRepository().login(new SimpleCredentials(login,Utils.getSecurePassword(login).toCharArray())); + toReturn = Utils.getItemList(ses.getNode(vrePath.toPath()) , excludes, new Range(start, limit), false); + }catch(Throwable e){ + log.error("error reading the node children of {}",vrePath,e); + }finally{ + if (ses!=null) + ses.logout(); + } + + return new ItemList(toReturn); + } + + @Path("query") + @GET + @Produces(MediaType.APPLICATION_JSON) + public ItemList searchItems(@QueryParam("n") String node, @QueryParam("e") String jsonExpr, @QueryParam("o") List orderField, @QueryParam("l") Integer limit, @QueryParam("f") Integer offset){ + Session ses = null; + List toReturn = new ArrayList<>(); + + try{ + + ObjectMapper mapper = new ObjectMapper(); + Expression expression = mapper.readValue(jsonExpr, Expression.class); + String stringExpression = evaluator.evaluate(new And(new ISDescendant(Utils.getHomePath()), expression)); + //ADD ALSO LIMIT AND OFFSET + + String orderBy = ""; + if (orderField!=null && orderField.size()>0) + orderBy= String.format("ORDER BY %s", orderField.stream().collect(Collectors.joining(",")).toString()); + + + String sql2Query = String.format("SELECT * FROM [%s] AS node WHERE %s %s ",node, stringExpression,orderBy); + + log.info("query sent is {}",sql2Query); + + + String login = AuthorizationProvider.instance.get().getClient().getId(); + ses = repository.getRepository().login(new SimpleCredentials(login,Utils.getSecurePassword(login).toCharArray())); + Query jcrQuery = ses.getWorkspace().getQueryManager().createQuery(sql2Query, Constants.QUERY_LANGUAGE); + + if (limit!=null && limit!=-1 ) + jcrQuery.setLimit(limit); + + if (offset!=null && offset!=-1 ) + jcrQuery.setOffset(offset); + + QueryResult result = jcrQuery.execute(); + + NodeIterator it = result.getNodes(); + + while (it.hasNext()) + toReturn.add(ItemHandler.getItem(it.nextNode(), excludes)); + + }catch(Throwable e){ + log.error("error executing the query",e); + throw new WebServiceException("error executing the query", e); + }finally{ + if (ses!=null) + ses.logout(); + } + + return new ItemList(toReturn); + } + + +/* + @POST + @Path("create") + @Consumes({MediaType.MULTIPART_FORM_DATA}) + public Response create(@FormDataParam("item") ItemWrapper itemWrapper, @FormDataParam("file") InputStream stream , @FormDataParam("file") FormDataBodyPart fileDetail, @QueryParam("path") String path){ + Session ses = null; + log.debug("method create called"); + org.gcube.common.storagehub.model.Path absolutePath = Paths.append(Utils.getHomePath(), Paths.getPath(path)); + try{ + final String login = AuthorizationProvider.instance.get().getClient().getId(); + //long start = System.currentTimeMillis(); + ses = repository.getRepository().login(new SimpleCredentials("workspacerep.imarine","gcube2010*onan".toCharArray())); + log.debug("session retrieved"); + ItemHandler handler = new ItemHandler(); + Node parentNode = ses.getNode(absolutePath.toPath()); + Item item = itemWrapper.getItem(); + + if (item instanceof AbstractFileItem){ + if (stream==null) + throw new Exception("invalid item: file without an input stream is not accepted"); + fulfillContent((AbstractFileItem)item, stream, fileDetail, absolutePath.toPath()); + } + + + Calendar now = Calendar.getInstance(); + item.setCreationTime(now); + item.setHidden(false); + item.setLastAction(ItemAction.CREATED); + item.setLastModificationTime(now); + item.setLastModifiedBy(login); + item.setOwner(login); + + handler.createNodeFromItem(ses, parentNode, item, stream); + ses.save(); + }catch(Throwable e){ + log.error("error creating file",e); + return Response.serverError().build(); + } finally{ + if (ses!=null) ses.logout(); + } + return Response.ok().build(); + } + + private void fulfillContent(AbstractFileItem item, InputStream stream , FormDataBodyPart fileDetail, String path) { + if (item instanceof GenericFileItem){ + Content content = new Content(); + String remotePath= path+"/"+fileDetail.getContentDisposition().getFileName(); + content.setData("jcr:content"); + content.setRemotePath(remotePath); + content.setSize(fileDetail.getContentDisposition().getSize()); + content.setMimeType(fileDetail.getMediaType().toString()); + String storageId = Utils.getStorageClient(AuthorizationProvider.instance.get().getClient().getId()).getClient().put(true).LFile(stream).RFile(remotePath); + content.setStorageId(storageId); + ((GenericFileItem) item).setContent(content); + } else throw new RuntimeException("type file error"); + } + + @PUT + @Path("{id}/move") + public Response move(@QueryParam("path") String path, @PathParam("id") String identifier){ + Session ses = null; + try{ + final String login = AuthorizationProvider.instance.get().getClient().getId(); + long start = System.currentTimeMillis(); + //ses = RepositoryInitializer.getRepository().login(new SimpleCredentials(login,Utils.getSecurePassword(login).toCharArray())); + //TODO check if it is possible to change all the ACL on a workspace + ses = repository.getRepository().login(new SimpleCredentials("workspacerep.imarine","gcube2010*onan".toCharArray())); + + log.info("time to connect to repo {}",(System.currentTimeMillis()-start)); + final Node nodeToMove = ses.getNodeByIdentifier(identifier); + final Node destination = ses.getNode(path); + Item destinationItem = ItemHandler.getItem(destination,null); + //TODO for now only owner of the destination folder can move file + if (!destinationItem.getOwner().equals(login)){ + /*AccessControlManager accessControlManager = ses.getAccessControlManager(); + boolean canWrite = accessControlManager.hasPrivileges(path, new Privilege[] { + accessControlManager.privilegeFromName(Privilege.JCR_ADD_CHILD_NODES)});*/ + //if (!canWrite) + /* + throw new IllegalAccessException("Insufficent Provileges to write in "+path); + } + final Item item = ItemHandler.getItem(nodeToMove, null); + if (item instanceof SharedFolder){ + throw new Exception("shared item cannot be moved"); + }else if (item instanceof FolderItem){ + if (hasSharedChildren((FolderItem) item, ses)) throw new Exception("folder item with shared children cannot be moved"); + ses.getWorkspace().move(nodeToMove.getPath(), destination.getPath()+"/"+nodeToMove.getName()); + }else { + item.setParentId(destinationItem.getId()); + ses.getWorkspace().move(nodeToMove.getPath(), destination.getPath()+"/"+nodeToMove.getName()); + } + ses.save(); + }catch(Exception e){ + log.error("error moving item with id {} in path {}",identifier, path,e); + return Response.serverError().build(); + } finally{ + if (ses!=null) ses.logout(); + } + return Response.ok().build(); + } + + private boolean hasSharedChildren(FolderItem folder, Session session) throws Exception{ + Node currentNode = session.getNodeByIdentifier(folder.getId()); + for (Item item : Utils.getItemList(currentNode,null)){ + if (item instanceof FolderItem) + return (item instanceof SharedFolder) || hasSharedChildren((FolderItem)item, session); + } + return false; + + } + + @PUT + @Path("{id}/rename") + public Response rename(@QueryParam("newname") String newName, @PathParam("id") String identifier){ + Session ses = null; + try{ + final String login = AuthorizationProvider.instance.get().getClient().getId(); + long start = System.currentTimeMillis(); + ses = repository.getRepository().login(new SimpleCredentials(login,Utils.getSecurePassword(login).toCharArray())); + log.info("time to connect to repo {}",(System.currentTimeMillis()-start)); + }catch(Exception e){ + + } + return Response.ok().build(); + } +*/ + + + +} diff --git a/src/main/java/org/gcube/data/access/storagehub/services/WriteOperation.java b/src/main/java/org/gcube/data/access/storagehub/services/WriteOperation.java new file mode 100644 index 0000000..71c3737 --- /dev/null +++ b/src/main/java/org/gcube/data/access/storagehub/services/WriteOperation.java @@ -0,0 +1,118 @@ +package org.gcube.data.access.storagehub.services; + +import java.util.List; + +import javax.inject.Inject; +import javax.jcr.Node; +import javax.jcr.Session; +import javax.jcr.SimpleCredentials; +import javax.ws.rs.PUT; +import javax.ws.rs.Path; +import javax.ws.rs.QueryParam; + +import org.gcube.common.authorization.library.provider.AuthorizationProvider; +import org.gcube.common.storagehub.model.Paths; +import org.gcube.common.storagehub.model.items.AbstractFileItem; +import org.gcube.common.storagehub.model.items.FolderItem; +import org.gcube.common.storagehub.model.items.Item; +import org.gcube.common.storagehub.model.items.SharedFolder; +import org.gcube.common.storagehub.model.types.PrimaryNodeType; +import org.gcube.data.access.storagehub.StorageFactory; +import org.gcube.data.access.storagehub.Utils; +import org.gcube.data.access.storagehub.handlers.ItemHandler; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +@Path("modify") +public class WriteOperation { + + private static final Logger log = LoggerFactory.getLogger(WriteOperation.class); + + @Inject + RepositoryInitializer repository; +/* + @PUT + @Path("copy") + public String copy(@QueryParam("destinationPath") String destinationPath, @QueryParam("sourcePath") String origin){ + Session ses = null; + try{ + String login = AuthorizationProvider.instance.get().getClient().getId(); + ses = repository.getRepository().login(new SimpleCredentials(login,Utils.getSecurePassword(login).toCharArray())); + Node originNode = ses.getNode(origin); + Item originItem = ItemHandler.getItem(originNode, null); + Node destinationNode = ses.getNode(destinationPath); + Item destinationItem = ItemHandler.getItem(destinationNode, null); + + if (!(destinationItem instanceof FolderItem)) throw new Exception("an Item must be copyed to another directory"); + + if (originItem instanceof SharedFolder) throw new Exception("trying to copy a sharedFolder into a normal folder"); + + ses.getWorkspace().getLockManager().lock(origin, true, true, 0,login); + ses.getWorkspace().getLockManager().lock(destinationPath, true, true, 0,login); + + + if (!destinationItem.getPrimaryType().equals(PrimaryNodeType.NT_WORKSPACE_FOLDER) || + !destinationItem.getPrimaryType().equals(PrimaryNodeType.NT_WORKSPACE_SHARED_FOLDER)) + throw new Exception("origin is not a folder"); + + ses.getWorkspace().getLockManager().lock(origin, true, true, Long.MAX_VALUE, login); + ses.getWorkspace().getLockManager().lock(destinationPath, false, true, Long.MAX_VALUE, login); + + ses.getWorkspace().copy(origin, destinationPath); + + org.gcube.common.storagehub.model.Path newNodePath = Paths.append(Paths.getPath(destinationPath), Paths.getPath(origin).getLastDirName()); + + if (originItem instanceof FolderItem ){ + //copying a folder + + StorageFactory.getGcubeStorage().copyDir().from(origin).to(destinationPath); + + + List items= Utils.getItemList(originNode,null, null); + for (Item item: items){ + if (item instanceof FolderItem){ + //TODO iterate on it recursively + } else if (item instanceof AbstractFileItem) { + String storageId = ((AbstractFileItem) item).getContent().getStorageId(); + //String newStorageId = StorageFactory.getGcubeStorage().copyById(storageId); + //TODO set the new storageId into the item + } //else nothing to do + + } + } else { + //copying item that is not a folder + + } + + ItemHandler handler = new ItemHandler(); + + //itera su i nodi e modifica solo quelli che non sono di tipo folder facendo la copy del content (con le nuovi api dello storage) + //e setta il nuovo id del content + + //copy also the content of the directory + ses.save(); + return destinationPath; + }catch(Exception e){ + log.error("error copying {} to {}", origin, destinationPath); + return null; + } finally { + if (ses!=null){ + try { + ses.getWorkspace().getLockManager().unlock(destinationPath); + } catch (Throwable t){ + log.warn("error unlocking {}", destinationPath); + } + try { + ses.getWorkspace().getLockManager().unlock(origin); + } catch (Throwable t){ + log.warn("error unlocking {}", origin); + } + ses.logout(); + } + } + } + + */ + + +} diff --git a/src/main/resources/META-INF/beans.xml b/src/main/resources/META-INF/beans.xml new file mode 100644 index 0000000..7c7e8db --- /dev/null +++ b/src/main/resources/META-INF/beans.xml @@ -0,0 +1,5 @@ + + \ No newline at end of file diff --git a/src/main/webapp/WEB-INF/gcube-app.xml b/src/main/webapp/WEB-INF/gcube-app.xml new file mode 100644 index 0000000..4f0d8e1 --- /dev/null +++ b/src/main/webapp/WEB-INF/gcube-app.xml @@ -0,0 +1,7 @@ + + StorageHub + DataAccess + 1.0.0-SNAPSHOT + Storage Hub webapp + + \ No newline at end of file diff --git a/src/main/webapp/WEB-INF/web.xml b/src/main/webapp/WEB-INF/web.xml new file mode 100644 index 0000000..97fb7ac --- /dev/null +++ b/src/main/webapp/WEB-INF/web.xml @@ -0,0 +1,21 @@ + + + + admin-username + workspacerep.imarine + + + admin-pwd + gcube2010*onan + + + + org.gcube.data.access.storagehub.StorageHub + + + + org.gcube.data.access.storagehub.StorageHub + /workspace/* + + + diff --git a/src/test/java/org/gcube/data/access/fs/Expressions.java b/src/test/java/org/gcube/data/access/fs/Expressions.java new file mode 100644 index 0000000..7e789d5 --- /dev/null +++ b/src/test/java/org/gcube/data/access/fs/Expressions.java @@ -0,0 +1,39 @@ +package org.gcube.data.access.fs; + +import java.util.Calendar; + +import javax.inject.Inject; + +import org.gcube.common.storagehub.model.expressions.Expression; +import org.gcube.common.storagehub.model.expressions.GenericSearchableItem; +import org.gcube.common.storagehub.model.expressions.date.Before; +import org.gcube.common.storagehub.model.expressions.logical.And; +import org.gcube.common.storagehub.model.expressions.text.Contains; +import org.gcube.data.access.storagehub.query.sql2.evaluators.Evaluators; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +@RunWith(WeldJunit4Runner.class) +public class Expressions { + + private static Logger log = LoggerFactory.getLogger(Expression.class); + + @Inject + Evaluators evaluators; + + @Test + public void test() { + + evaluators.getEvaluators().forEach(s-> System.out.println(s.getType().toString())); + + Expression cont1 = new Contains(GenericSearchableItem.get().title, "Data"); + Expression before = new Before(GenericSearchableItem.get().creationTime, Calendar.getInstance()); + Expression andExpr = new And(cont1, before); + System.out.println(evaluators.evaluate(andExpr)); + + } + + +} diff --git a/src/test/java/org/gcube/data/access/fs/TestFields.java b/src/test/java/org/gcube/data/access/fs/TestFields.java new file mode 100644 index 0000000..9562e32 --- /dev/null +++ b/src/test/java/org/gcube/data/access/fs/TestFields.java @@ -0,0 +1,65 @@ +package org.gcube.data.access.fs; + + +import static org.mockito.Matchers.anyString; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +import java.util.Arrays; + +import javax.jcr.Node; +import javax.jcr.Property; +import javax.jcr.nodetype.NodeType; + +import org.gcube.common.storagehub.model.items.Item; +import org.gcube.common.storagehub.model.types.ItemAction; +import org.gcube.data.access.storagehub.handlers.ItemHandler; +import org.junit.Assert; +import org.junit.Test; + +public class TestFields { + + @Test + public void replace(){ + System.out.println("/Home/Giancarlo".replaceAll("^/(.*)/?$", "$1").replaceAll("/", "-")); + } + + @Test + public void iterateOverFields() throws Exception{ + + Property prop = mock(Property.class); + when(prop.getString()).thenReturn(ItemAction.UPDATED.name()); + when(prop.getLong()).thenReturn(2l); + when(prop.getBoolean()).thenReturn(false); + + Node parent = mock(Node.class); + NodeType parentType = mock(NodeType.class); + when(parentType.getName()).thenReturn("nthl:workspaceSharedItem"); + when(parent.getPrimaryNodeType()).thenReturn(parentType); + when(parent.getName()).thenReturn("parent"); + when(parent.getPath()).thenReturn("path"); + when(parent.isLocked()).thenReturn(false); + when(parent.getParent()).thenReturn(null); + when(parent.getProperty(anyString())).thenReturn(prop); + when(parent.getNode(anyString())).thenReturn(parent); + + + Node node = mock(Node.class); + + + + NodeType type = mock(NodeType.class); + when(type.getName()).thenReturn("nthl:externalImage"); + when(node.getPrimaryNodeType()).thenReturn(type); + when(node.getName()).thenReturn("name"); + when(node.getPath()).thenReturn("path"); + when(node.isLocked()).thenReturn(false); + when(node.getParent()).thenReturn(parent); + when(node.getProperty(anyString())).thenReturn(prop); + when(node.getNode(anyString())).thenReturn(node); + Item item = ItemHandler.getItem(node, Arrays.asList("hl:accounting","jcr:content")); + + Assert.assertTrue(item.isShared()); + + } +} diff --git a/src/test/java/org/gcube/data/access/fs/TestNode.java b/src/test/java/org/gcube/data/access/fs/TestNode.java new file mode 100644 index 0000000..d3a4fab --- /dev/null +++ b/src/test/java/org/gcube/data/access/fs/TestNode.java @@ -0,0 +1,38 @@ +package org.gcube.data.access.fs; + +import java.awt.Image; +import java.awt.image.BufferedImage; +import java.awt.image.DataBufferByte; +import java.awt.image.DataBufferInt; +import java.awt.image.ImageObserver; +import java.io.File; +import java.util.Base64; + +import javax.imageio.ImageIO; + +import org.junit.Test; + +public class TestNode { + + @Test + public void testShared() throws Exception{ + + BufferedImage buf = ImageIO.read(new File("/home/lucio/Downloads/djbattle.png")); + byte[] bigImageInByte = ((DataBufferByte) buf.getData().getDataBuffer()).getData(); + + System.out.println(new String(Base64.getEncoder().encode(bigImageInByte))); + + /* + Image image = buf.getScaledInstance(64, 64, Image.SCALE_SMOOTH); + BufferedImage buffered = new BufferedImage(image.getWidth(null), image.getHeight(null), BufferedImage.TYPE_4BYTE_ABGR); + buffered.getGraphics().drawImage(image, 0, 0, null); + byte[] imageInByte = ((DataBufferByte) buffered.getData().getDataBuffer()).getData(); + */ + + + /*buffered.getGraphics().drawImage(image, 0, 0 , null); + ImageIO.write(buffered, "png", buffer ); + byte[] imageInByte = buffer.toByteArray();*/ + } + +} diff --git a/src/test/java/org/gcube/data/access/fs/WeldContext.java b/src/test/java/org/gcube/data/access/fs/WeldContext.java new file mode 100644 index 0000000..b93c0db --- /dev/null +++ b/src/test/java/org/gcube/data/access/fs/WeldContext.java @@ -0,0 +1,27 @@ +package org.gcube.data.access.fs; + +import org.jboss.weld.environment.se.Weld; +import org.jboss.weld.environment.se.WeldContainer; + +public class WeldContext { + + public static final WeldContext INSTANCE = new WeldContext(); + + private final Weld weld; + private final WeldContainer container; + + private WeldContext() { + this.weld = new Weld(); + this.container = weld.initialize(); + Runtime.getRuntime().addShutdownHook(new Thread() { + @Override + public void run() { + weld.shutdown(); + } + }); + } + + public T getBean(Class type) { + return container.instance().select(type).get(); + } +} \ No newline at end of file diff --git a/src/test/java/org/gcube/data/access/fs/WeldJunit4Runner.java b/src/test/java/org/gcube/data/access/fs/WeldJunit4Runner.java new file mode 100644 index 0000000..098abc6 --- /dev/null +++ b/src/test/java/org/gcube/data/access/fs/WeldJunit4Runner.java @@ -0,0 +1,17 @@ +package org.gcube.data.access.fs; + +import org.junit.runners.BlockJUnit4ClassRunner; +import org.junit.runners.model.InitializationError; + +public class WeldJunit4Runner extends BlockJUnit4ClassRunner { + + public WeldJunit4Runner(Class clazz) throws InitializationError { + super(clazz); + } + + @Override + protected Object createTest() { + final Class test = getTestClass().getJavaClass(); + return WeldContext.INSTANCE.getBean(test); + } +} \ No newline at end of file diff --git a/src/test/resources/META-INF/beans.xml b/src/test/resources/META-INF/beans.xml new file mode 100644 index 0000000..7c7e8db --- /dev/null +++ b/src/test/resources/META-INF/beans.xml @@ -0,0 +1,5 @@ + + \ No newline at end of file diff --git a/src/test/resources/logback-test.xml b/src/test/resources/logback-test.xml new file mode 100644 index 0000000..c5fbacc --- /dev/null +++ b/src/test/resources/logback-test.xml @@ -0,0 +1,12 @@ + + + + + + %d{yyyy-MM-dd HH:mm:ss} [%thread] %-5level %logger{36} - %msg%n + + + + + +