Compare commits

...

86 Commits

Author SHA1 Message Date
lucio 0becded125 checks updated 2 months ago
lucio c4d5cffe02 moved to new ceph storage 2 months ago
lucio f75f7d86d9 jdbc driver updated 4 months ago
lucio 2452a25349 changelog updated 10 months ago
lucio 87cc8a3ff9 - managing of vre folder specific backend 10 months ago
lucio b34ad84baf updated 10 months ago
lucio 7e875a5dfb added Docs to root application 10 months ago
lucio 10982ea64d added correct exclude 10 months ago
lucio a0cd2e8ccf enunciate improved 10 months ago
lucio e5dda6bb8b enunciate docs added 10 months ago
lucio d1d45a8056 script utils version updated 10 months ago
lucio 56f8ffb838 updated 10 months ago
Lucio Lelii d216459747 acl control added 11 months ago
lucio 4d118372f3 solved bug on archive upload 11 months ago
lucio 0cc0949698 updated 11 months ago
lucio 884f40b759 app configuration updated 11 months ago
lucio a1b69aee6a update 11 months ago
lucio 09879535d2 added application.yaml 1 year ago
lucio 7d96327512 updated for changes on smartgears 1 year ago
Lucio Lelii 443d9cabd4 version of jackrabbit moved to the latest stable 1 year ago
Lucio Lelii 2629c5c387 updated 1 year ago
Lucio Lelii 3a7aa8b8e3 test on container added 1 year ago
Lucio Lelii d0a7197c5c solved issue on duration 1 year ago
Lucio Lelii 0420e2ba3e added fields to ScriptStatus 1 year ago
Lucio Lelii 2033a4b79f added status for script execution 1 year ago
Lucio Lelii bca553aa5f changes 1 year ago
Lucio Lelii 4bd37f8963 remove specific version added 1 year ago
Lucio Lelii b3913ba9c1 added method for version removal 1 year ago
Lucio Lelii 1f6329c38e added possibility to set owner on backends 1 year ago
Lucio Lelii 4083b7c120 TODO for accounting 1 year ago
Lucio Lelii a500df61a1 excluded VREFolders from renaming 1 year ago
Lucio Lelii 7019740af7 enabled renaming of SharedFodler 1 year ago
Lucio Lelii 2012500de8 added check on id exists 1 year ago
Lucio Lelii 8b235da142 changes on StorageBachend interface 1 year ago
Lucio Lelii 0649acb8a9 solved a bug on internal file creation 1 year ago
Lucio Lelii 2e7fc876cf adde methdo for scripts 1 year ago
Lucio Lelii 9b568a09ec Content handler modified 1 year ago
Lucio Lelii 55b6d8e09a removed unused files 1 year ago
Lucio Lelii e60a07abe9 ingore update 1 year ago
Lucio Lelii 1525afef9e removed uned class 1 year ago
Lucio Lelii fad2e7ffb9 removed old files 1 year ago
Lucio Lelii b625fafcc8 improve upload speed 1 year ago
Lucio Lelii 80d15ccef7 changes 1 year ago
Lucio Lelii 6d72896662 pom updated 1 year ago
Lucio Lelii a87d6ab3da update 1 year ago
Lucio Lelii 3b5686e705 update aspectj plugin dependency 1 year ago
Lucio Lelii d36a3314ba issue on voaltile links solved 1 year ago
Lucio Lelii 6dd371070e updated pom to include lastest tika 1 year ago
Lucio Lelii 6af9fce70f Merge branch 'multipleStorageBackends' of https://code-repo.d4science.org/gCubeSystem/storagehub.git into multipleStorageBackends 1 year ago
Lucio Lelii ac2ca4c360 updated tika library 1 year ago
Lucio Lelii b5b3669af5 bug on public link solved 1 year ago
Lucio Lelii 88406a3bf2 solved big on home update 2 years ago
Lucio Lelii 6756c2890c Merge branch 'multipleStorageBackends' of https://code-repo.d4science.org/gCubeSystem/storagehub into multipleStorageBackends 2 years ago
Lucio Lelii 4d38cc6e72 update user created 2 years ago
Lucio Lelii e1db5df7c9 ScritpUtil updated 2 years ago
Lucio Lelii 25105ca041 adding enunciate 2 years ago
Lucio Lelii 5de8dee586 added notification client to AppManager 2 years ago
Lucio Lelii 3e6e203f36 update 2 years ago
Lucio Lelii bfa702bf0f download folder modified 2 years ago
Lucio Lelii 50124d8a49 volatile area with public link added 2 years ago
Lucio Lelii 9dea04e74e Merge branch 'multipleStorageBackends' of https://code-repo.d4science.org/gCubeSystem/storagehub.git into multipleStorageBackends 2 years ago
Lucio Lelii 28044da030 pom updated 2 years ago
Lucio Lelii b0141e6b6f storage manager libraries updated 2 years ago
Lucio Lelii 14a71d4aa7 docker folder updated 2 years ago
Lucio Lelii 3b0bb084b6 java melody removed 2 years ago
Lucio Lelii c4ea5bb05c solved bug on initalization 2 years ago
Lucio Lelii ce071c1f7e conf updated 2 years ago
Lucio Lelii 805b72155d Merge branch 'multipleStorageBackends' of
https://code-repo.d4science.org/gCubeSystem/storagehub.git into
multipleStorageBackends

Conflicts:
	docker-compose.yml
	src/main/webapp/WEB-INF/README
	src/test/java/org/gcube/data/access/fs/container/CreateUsers.java
	src/test/resources/compose-test.yml
2 years ago
Lucio Lelii e43faf6f92 changes 2 years ago
Lucio Lelii 492873bd7e porting to smartgears 4 2 years ago
Lucio Lelii d2b3151edc update 2 years ago
Lucio Lelii 9d3bd619bd added init script if repository is not yet initialized 2 years ago
Lucio Lelii 6d3e9394c4 docker files added 2 years ago
Lucio Lelii d672386824 added docker files for tests 2 years ago
Lucio Lelii 57e0113216 new feature for PayloadBackend added 2 years ago
Lucio Lelii 4f87677674 Minio integration 2 years ago
Lucio Lelii e11bb536a7 a part of s3StorageIntegration 2 years ago
lucio.lelii af9290cbca check for folder connector with same name 2 years ago
lucio.lelii db30621608 success set on users add to group 2 years ago
Lucio Lelii 70391906e2 update 2 years ago
Lucio Lelii ca23f94e09 ManageBy info added 2 years ago
Lucio Lelii 8d11063f6b Added some fix 2 years ago
lucio.lelii 9facccdf46 upload by url added 2 years ago
lucio.lelii da7385f62f pom update 2 years ago
lucio.lelii 29b728b057 reverted last commit 2 years ago
lucio.lelii 7ee17adeac upload archive session is not saved until finished 2 years ago

@ -1,40 +1,39 @@
<?xml version="1.0" encoding="UTF-8"?>
<classpath>
<classpathentry kind="src" output="target/classes" path="src/main/java">
<classpathentry excluding="**" kind="src" output="target/classes" path="src/main/resources">
<attributes>
<attribute name="optional" value="true"/>
<attribute name="maven.pomderived" value="true"/>
</attributes>
</classpathentry>
<classpathentry excluding="**" kind="src" output="target/classes" path="src/main/resources">
<classpathentry excluding="**" kind="src" output="target/test-classes" path="src/test/resources">
<attributes>
<attribute name="test" value="true"/>
<attribute name="maven.pomderived" value="true"/>
</attributes>
</classpathentry>
<classpathentry kind="src" output="target/test-classes" path="src/test/java">
<classpathentry kind="con" path="org.eclipse.m2e.MAVEN2_CLASSPATH_CONTAINER">
<attributes>
<attribute name="test" value="true"/>
<attribute name="optional" value="true"/>
<attribute name="maven.pomderived" value="true"/>
<attribute name="org.eclipse.jst.component.dependency" value="/WEB-INF/lib"/>
</attributes>
</classpathentry>
<classpathentry excluding="**" kind="src" output="target/test-classes" path="src/test/resources">
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-11">
<attributes>
<attribute name="test" value="true"/>
<attribute name="maven.pomderived" value="true"/>
</attributes>
</classpathentry>
<classpathentry kind="con" path="org.eclipse.m2e.MAVEN2_CLASSPATH_CONTAINER">
<classpathentry kind="src" output="target/classes" path="src/main/java">
<attributes>
<attribute name="optional" value="true"/>
<attribute name="maven.pomderived" value="true"/>
<attribute name="org.eclipse.jst.component.dependency" value="/WEB-INF/lib"/>
</attributes>
</classpathentry>
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.8">
<classpathentry kind="src" output="target/test-classes" path="src/test/java">
<attributes>
<attribute name="optional" value="true"/>
<attribute name="maven.pomderived" value="true"/>
<attribute name="test" value="true"/>
</attributes>
</classpathentry>
<classpathentry combineaccessrules="false" kind="src" path="/storagehub-model"/>
<classpathentry kind="output" path="target/classes"/>
</classpath>

1
.gitignore vendored

@ -1 +1,2 @@
target
/Storagehub-TODO

@ -1,8 +1,11 @@
eclipse.preferences.version=1
org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.8
org.eclipse.jdt.core.compiler.compliance=1.8
org.eclipse.jdt.core.compiler.codegen.inlineJsrBytecode=enabled
org.eclipse.jdt.core.compiler.codegen.targetPlatform=11
org.eclipse.jdt.core.compiler.compliance=11
org.eclipse.jdt.core.compiler.problem.assertIdentifier=error
org.eclipse.jdt.core.compiler.problem.enablePreviewFeatures=disabled
org.eclipse.jdt.core.compiler.problem.enumIdentifier=error
org.eclipse.jdt.core.compiler.problem.forbiddenReference=warning
org.eclipse.jdt.core.compiler.problem.reportPreviewFeatures=ignore
org.eclipse.jdt.core.compiler.problem.reportPreviewFeatures=warning
org.eclipse.jdt.core.compiler.release=disabled
org.eclipse.jdt.core.compiler.source=1.8
org.eclipse.jdt.core.compiler.source=11

@ -1,5 +1,16 @@
<?xml version="1.0" encoding="UTF-8"?><project-modules id="moduleCoreId" project-version="1.5.0">
@ -23,7 +34,18 @@
<wb-module deploy-name="storagehub">
@ -47,7 +69,7 @@
<wb-resource deploy-path="/" source-path="/target/m2e-wtp/web-resources"/>
@ -70,8 +92,6 @@
<wb-resource deploy-path="/" source-path="/src/main/webapp" tag="defaultRootSource"/>
@ -83,6 +103,8 @@
<wb-resource deploy-path="/" source-path="/src/main/webapp" tag="defaultRootSource"/>
@ -94,8 +116,6 @@
<wb-resource deploy-path="/WEB-INF/classes" source-path="/src/main/java"/>
@ -119,13 +139,42 @@
<wb-resource deploy-path="/WEB-INF/classes" source-path="/src/main/resources"/>
<dependent-module archiveName="storagehub-model-1.1.0-SNAPSHOT.jar" deploy-path="/WEB-INF/lib" handle="module:/resource/storagehub-model/storagehub-model">
<wb-resource deploy-path="/WEB-INF/classes" source-path="/src/main/java"/>
<dependent-module archiveName="common-smartgears-app-3.0.0.jar" deploy-path="/WEB-INF/lib" handle="module:/resource/common-smartgears-app/common-smartgears-app">
<dependency-type>uses</dependency-type>
</dependent-module>
<dependent-module archiveName="storagehub-script-utils-1.0.0.jar" deploy-path="/WEB-INF/lib" handle="module:/resource/storagehub-scripting-util/storagehub-scripting-util">
<dependent-module archiveName="common-configuration-scanner-1.1.0.jar" deploy-path="/WEB-INF/lib" handle="module:/resource/common-configuration-scanner/common-configuration-scanner">
<dependency-type>uses</dependency-type>
</dependent-module>
<dependent-module archiveName="storagehub-model-2.0.0-SNAPSHOT.jar" deploy-path="/WEB-INF/lib" handle="module:/resource/storagehub-model/storagehub-model">
<dependency-type>uses</dependency-type>
</dependent-module>
<dependent-module archiveName="storagehub-script-utils-2.0.0-SNAPSHOT.jar" deploy-path="/WEB-INF/lib" handle="module:/resource/storagehub-scripting-util/storagehub-scripting-util">
<dependency-type>uses</dependency-type>
</dependent-module>
@ -149,7 +198,18 @@
<property name="context-root" value="storagehub"/>
@ -173,7 +233,18 @@
<property name="java-output-path" value="/storagehub-webapp_BRANCH/target/classes"/>
@ -197,7 +268,18 @@
</wb-module>

@ -1,8 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
<faceted-project>
<fixed facet="wst.jsdt.web"/>
<installed facet="java" version="1.8"/>
<installed facet="jst.web" version="3.0"/>
<installed facet="jst.jaxrs" version="2.0"/>
<installed facet="wst.jsdt.web" version="1.0"/>
<installed facet="java" version="11"/>
</faceted-project>

@ -1,15 +1,22 @@
# Changelog
# Changelog for "storagehub"
All notable changes to this project will be documented in this file.
This project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
## [v1.4.0] - [2021-10-07]
## [v1.5.0-SNAPSHOT] 2021-10-15
- minio as default storage
- vre folders can define specific bucket as backend
- enunciate docs
- dockerization of the service
## [v1.4.0] 2021-10-07
- slow query removed from VRE retrieving and recents
- incident #22184 solved
- incident solved [#22184]
## [v1.3.2] - [2021-09-28]
- fix 22087
- fix 22087
## [v1.3.1] - [2021-09-08]
@ -17,7 +24,7 @@ This project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.htm
## [v1.3.0] - [2021-03-31]
- possibility to impersonate people added
possibility to impersonate people added
## [v1.2.5] - [2021-03-11]
@ -41,8 +48,6 @@ method for description update added
bug on Archive uploader solved
## [v1.2.0] - [2020-04-15]
trash items changes owner on restore
@ -51,20 +56,14 @@ restore with new destination folder added
move between shared and private or different shared folder enabled
## [v1.0.8] - [2019-09-20]
Bug on ushare owner fixed
## [v1.0.5] - [2019-04-04]
Active wait for lock in case of item creation added
## [v1.0.0] - [2015-07-01]
First commit

@ -0,0 +1,12 @@
FROM smartgears-distribution:4.0.0-java11-tomcat9
ARG REPOUSER=admin
ARG REPOPWD=admin
COPY ./target/storagehub.war /tomcat/webapps/
COPY ./docker/jackrabbit /app/jackrabbit
COPY ./docker/storagehub.xml /tomcat/conf/Catalina/localhost/
COPY ./docker/logback.xml /etc/
COPY ./docker/container.ini /etc/
RUN unzip /tomcat/webapps/storagehub.war -d /tomcat/webapps/storagehub
RUN rm /tomcat/webapps/storagehub.war
COPY ./docker/storage-settings.properties /tomcat/webapps/storagehub/WEB-INF/classes/
RUN sed -i "s/{{adminId}}/$REPOUSER/g; s/{{adminPwd}}/$REPOPWD/g" /tomcat/webapps/storagehub/WEB-INF/web.xml

@ -0,0 +1,12 @@
FROM smartgears-distribution:4.0.0-java11-tomcat9
ARG REPOUSER=admin
ARG REPOPWD=admin
COPY ./target/storagehub-test-storages.war /tomcat/webapps/storagehub.war
COPY ./docker/jackrabbit /app/jackrabbit
COPY ./docker/storagehub.xml /tomcat/conf/Catalina/localhost/
COPY ./docker/logback.xml /etc/
COPY ./docker/container.ini /etc/
RUN unzip /tomcat/webapps/storagehub.war -d /tomcat/webapps/storagehub
RUN rm /tomcat/webapps/storagehub.war
COPY ./docker/storage-settings.properties /tomcat/webapps/storagehub/WEB-INF/classes/
RUN sed -i "s/{{adminId}}/$REPOUSER/g; s/{{adminPwd}}/$REPOPWD/g" /tomcat/webapps/storagehub/WEB-INF/web.xml

@ -0,0 +1,3 @@
nodeType to remove on new import from a backup:
externalUrl

@ -1 +0,0 @@
${gcube.license}

@ -1,66 +0,0 @@
The gCube System - ${name}
--------------------------------------------------
${description}
${gcube.description}
${gcube.funding}
Version
--------------------------------------------------
${version} (${buildDate})
Please see the file named "changelog.xml" in this directory for the release notes.
Authors
--------------------------------------------------
* Lucio Lelii (lucio.lelii-AT-isti.cnr.it), CNR Pisa,
Istituto di Scienza e Tecnologie dell'Informazione "A. Faedo".
Maintainers
-----------
* Lucio Lelii (lucio.lelii-AT-isti.cnr.it), CNR Pisa,
Istituto di Scienza e Tecnologie dell'Informazione "A. Faedo".
Download information
--------------------------------------------------
Source code is available from SVN:
${scm.url}
Binaries can be downloaded from the gCube website:
${gcube.website}
Installation
--------------------------------------------------
Installation documentation is available on-line in the gCube Wiki:
${gcube.wikiRoot}/Home_Library_2.0_API_Framework_Specification
Documentation
--------------------------------------------------
Documentation is available on-line in the gCube Wiki:
${gcube.wikiRoot}/StorageHub_API_Framework_Specification
Support
--------------------------------------------------
Bugs and support requests can be reported in the gCube issue tracking tool:
${gcube.issueTracking}
Licensing
--------------------------------------------------
This software is licensed under the terms you may find in the file named "LICENSE" in this directory.

@ -1,7 +0,0 @@
<application mode='online'>
<name>StorageHub</name>
<group>DataAccess</group>
<version>${version}</version>
<description>Storage Hub webapp</description>
<local-persistence location='target' />
</application>

@ -0,0 +1,39 @@
version: '3.7'
services:
postgres:
image: postgres:10.5
restart: always
environment:
- POSTGRES_DB=workspace-db
- POSTGRES_USER=ws-db-user
- POSTGRES_PASSWORD=dbPwd
logging:
options:
max-size: 10m
max-file: "3"
ports:
- '5423:5432'
volumes:
- ./postgres-data:/var/lib/postgresql/data
copy the sql script to create tables
- ./sql/create_tables.sql:/docker-entrypoint-initdb.d/create_tables.sql
storagehub:
build:
dockerfile: ./Dockerfile-test
ports:
- '8081:8080'
minio:
image: minio/minio
ports:
- "9000:9000"
- "9001:9001"
volumes:
- minio_storage:/data
environment:
MINIO_ROOT_USER: SHUBTEST
MINIO_ROOT_PASSWORD: wJalrXUtnFEMI/K7MDENG/bPxRfiCY
command: server --console-address ":9001" /data
volumes:
minio_storage: {}

@ -0,0 +1,38 @@
version: '3.7'
services:
postgres:
image: postgres:10.5
restart: always
environment:
- POSTGRES_DB=workspace-db
- POSTGRES_USER=ws-db-user
- POSTGRES_PASSWORD=dbPwd
logging:
options:
max-size: 10m
max-file: "3"
ports:
- '5423:5432'
volumes:
- ./postgres-data:/var/lib/postgresql/data
copy the sql script to create tables
- ./sql/create_tables.sql:/docker-entrypoint-initdb.d/create_tables.sql
storagehub:
build: .
ports:
- '8081:8080'
minio:
image: minio/minio
ports:
- "9000:9000"
- "9001:9001"
volumes:
- minio_storage:/data
environment:
MINIO_ROOT_USER: SHUBTEST
MINIO_ROOT_PASSWORD: wJalrXUtnFEMI/K7MDENG/bPxRfiCY
command: server --console-address ":9001" /data
volumes:
minio_storage: {}

@ -0,0 +1,23 @@
[node]
mode = offline
hostname = dlib29.isti.cnr.it
protocol= http
port = 8080
infrastructure = gcube
authorizeChildrenContext = true
publicationFrequencyInSeconds = 60
[properties]
SmartGearsDistribution = 4.0.0-SNAPSHOT
SmartGearsDistributionBundle = UnBundled
[site]
country = it
location = pisa
[authorization]
factory = org.gcube.smartgears.security.defaults.DefaultAuthorizationProviderFactory
factory.endpoint = https://accounts.dev.d4science.org/auth/realms/d4science/protocol/openid-connect/token
credentials.class = org.gcube.smartgears.security.SimpleCredentials
credentials.clientID = node-whn-test-uno-d-d4s.d4science.org
credentials.secret = 979bd3bc-5cc4-11ec-bf63-0242ac130002

@ -0,0 +1,11 @@
#bootstrap properties for the repository startup servlet.
#Fri Jul 21 05:19:29 CEST 2017
java.naming.factory.initial=org.apache.jackrabbit.core.jndi.provider.DummyInit$
repository.home=jackrabbit
rmi.enabled=true
repository.config=jackrabbit/repository.xml
repository.name=jackrabbit.repository
rmi.host=localhost
java.naming.provider.url=http\://www.apache.org/jackrabbit
jndi.enabled=true
rmi.port=0

@ -0,0 +1,110 @@
<?xml version="1.0" encoding="UTF-8"?>
<!-- Licensed to the Apache Software Foundation (ASF) under one or more contributor
license agreements. See the NOTICE file distributed with this work for additional
information regarding copyright ownership. The ASF licenses this file to
You under the Apache License, Version 2.0 (the "License"); you may not use
this file except in compliance with the License. You may obtain a copy of
the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required
by applicable law or agreed to in writing, software distributed under the
License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS
OF ANY KIND, either express or implied. See the License for the specific
language governing permissions and limitations under the License. -->
<!DOCTYPE Repository PUBLIC "-//The Apache Software Foundation//DTD Jackrabbit 2.0//EN" "http://jackrabbit.apache.org/dtd/repository-2.0.dtd">
<Repository>
<!-- virtual file system where the repository stores global state (e.g.
registered namespaces, custom node types, etc.) -->
<FileSystem class="org.apache.jackrabbit.core.fs.db.DbFileSystem">
<param name="driver" value="org.postgresql.Driver" />
<param name="schema" value="postgresql" />
<param name="url" value="jdbc:postgresql://postgres:5432/workspace-db" />
<param name="user" value="ws-db-user" />
<param name="password" value="dbPwd" />
<param name="schemaObjectPrefix" value="rep_" />
</FileSystem>
<!-- data store configuration -->
<DataStore class="org.apache.jackrabbit.core.data.db.DbDataStore">
<param name="driver" value="org.postgresql.Driver" />
<param name="databaseType" value="postgresql" />
<param name="url" value="jdbc:postgresql://postgres:5432/workspace-db" />
<param name="user" value="ws-db-user" />
<param name="password" value="dbPwd" />
<param name="minRecordLength" value="1024" />
<param name="maxConnections" value="3" />
<param name="copyWhenReading" value="true" />
<param name="tablePrefix" value="datastore_" />
<param name="schemaObjectPrefix" value="" />
</DataStore>
<!-- security configuration -->
<Security appName="Jackrabbit">
<SecurityManager class="org.apache.jackrabbit.core.DefaultSecurityManager" />
<AccessManager class="org.apache.jackrabbit.core.security.DefaultAccessManager" />
<LoginModule class="org.apache.jackrabbit.core.security.authentication.DefaultLoginModule">
<param name="adminId" value="admin" />
</LoginModule>
</Security>
<!-- location of workspaces root directory and name of default workspace -->
<Workspaces rootPath="${rep.home}/workspaces" defaultWorkspace="default" />
<Workspace name="${wsp.name}">
<FileSystem class="org.apache.jackrabbit.core.fs.local.LocalFileSystem">
<param name="path" value="${wsp.home}" />
</FileSystem>
<PersistenceManager class="org.apache.jackrabbit.core.persistence.pool.PostgreSQLPersistenceManager">
<param name="driver" value="org.postgresql.Driver" />
<param name="url" value="jdbc:postgresql://postgres:5432/workspace-db" />
<param name="schema" value="postgresql" />
<param name="user" value="ws-db-user" />
<param name="password" value="dbPwd" />
<param name="schemaObjectPrefix" value="pm_${wsp.name}_" />
<param name="bundleCacheSize" value="600" />
<param name="errorHandling" value="IGNORE_MISSING_BLOBS" />
<param name="consistencyFix" value="false" />
<param name="consistencyCheck" value="false" />
</PersistenceManager>
<!-- Search index and the file system it uses. class: FQN of class implementing
the QueryHandler interface -->
<SearchIndex class="org.apache.jackrabbit.core.query.lucene.SearchIndex">
<param name="path" value="${wsp.home}/index" />
<param name="supportHighlighting" value="true" />
<param name="autoRepair" value="true" />
<param name="onWorkspaceInconsistency" value="log" />
<param name="indexingConfiguration" value="${rep.home}/indexing_configuration.xml" />
<param name="resultFetchSize" value="50" />
<param name="cacheSize" value="100000" />
<param name="enableConsistencyCheck" value="false" />
<param name="forceConsistencyCheck" value="false" />
</SearchIndex>
</Workspace>
<!-- Configures the versioning -->
<Versioning rootPath="${rep.home}/version">
<!-- Configures the filesystem to use for versioning for the respective
persistence manager -->
<FileSystem class="org.apache.jackrabbit.core.fs.local.LocalFileSystem">
<param name="path" value="${rep.home}/version" />
</FileSystem>
<PersistenceManager class="org.apache.jackrabbit.core.persistence.pool.PostgreSQLPersistenceManager">
<param name="driver" value="org.postgresql.Driver" />
<param name="url" value="jdbc:postgresql://postgres:5432/workspace-db" />
<param name="schema" value="postgresql" />
<param name="user" value="ws-db-user" />
<param name="password" value="dbPwd" />
<param name="schemaObjectPrefix" value="pm_version_" />
<param name="bundleCacheSize" value="600" />
<param name="consistencyFix" value="false" />
<param name="consistencyCheck" value="false" />
</PersistenceManager>
</Versioning>
<!-- Cluster configuration -->
<!-- Cluster id="storagehub1.d4science.org" syncDelay="2000">
<Journal class="org.apache.jackrabbit.core.journal.DatabaseJournal">
<param name="driver" value="org.postgresql.Driver" />
<param name="url" value="jdbc:postgresql://postgres/workspace-db" />
<param name="databaseType" value="postgresql" />
<param name="schemaObjectPrefix" value="journal_" />
<param name="user" value="ws-db-user" />
<param name="password" value="dbPwd" />
<param name="revision" value="${rep.home}/revision.log" />
<param name="janitorEnabled" value="false"/>
<set to true if you want to daily clean the journal table https://wiki.apache.org/jackrabbit/Clustering#Removing_Old_Revisions>
</Journal>
</Cluster > -->
</Repository>

@ -0,0 +1,25 @@
<configuration scan="true" debug="true">
<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
<encoder>Ï
<pattern>%d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n</pattern>
</encoder>
</appender>
<logger name="org.gcube" level="DEBUG" />
<logger name="org.gcube.smartgears" level="TRACE" />
<logger name="org.gcube.smartgears.handlers" level="TRACE"/>
<logger name="org.gcube.common.events" level="WARN" />
<logger name="org.gcube.data.publishing" level="ERROR" />
<logger name="org.gcube.documentstore" level="ERROR" />
<logger name="org.gcube.common.core.publisher.is.legacy" level="TRACE" />
<logger name="org.gcube.data.access" level="TRACE" />
<logger name="org.gcube.data.access.storagehub.handlers" level="DEBUG"/>
<root level="WARN">
<appender-ref ref="STDOUT" />
</root>
</configuration>

@ -0,0 +1,6 @@
${{adminId}}=workspace
${{adminPwd}}=gcube
${{db-host}}=postgres
${{ws-db}}=workspace-db
${{dbUser}}=ws-db-user
${{dbPwd}}=dbPwd

@ -0,0 +1,19 @@
#default.bucketName=storagehub-dev
#default.key=SHUBTEST
#default.secret=wJalrXUtnFEMI/K7MDENG/bPxRfiCY
#default.url=http://minio:9000
#default.createBucket=true
#volatile.bucketName=storagehub-volatile-dev
#volatile.key=SHUBTEST
#volatile.secret=wJalrXUtnFEMI/K7MDENG/bPxRfiCY
#volatile.url=http://minio:9000
#volatile.createBucket=true
default.bucketName=storagehub-dev
default.key=18eb719ebffb4cd0ab78f9343f8aedd2
default.secret=e7b6178dd61d4e0dbbc37ff7cb941aed
default.url=https://isti-cloud.isti.cnr.it:13808/
default.createBucket=false
volatile.bucketName=shub-volatile-dev
volatile.key=18eb719ebffb4cd0ab78f9343f8aedd2
volatile.secret=e7b6178dd61d4e0dbbc37ff7cb941aed
volatile.url=https://isti-cloud.isti.cnr.it:13808/

@ -0,0 +1,10 @@
<Context path="/storagehub">
<Resource
name="jcr/repository"
auth="Container"
type="javax.jcr.Repository"
factory="org.apache.jackrabbit.core.jndi.BindableRepositoryFactory"
configFilePath="/app/jackrabbit/repository.xml"
repHomeDir="/app/jackrabbit/workspaces"
/>
</Context>

@ -0,0 +1,24 @@
<?xml version="1.0" encoding="UTF-8"?>
<enunciate
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:noNamespaceSchemaLocation="http://enunciate.webcohesion.com/schemas/enunciate-2.14.0.xsd">
<api-classes>
<!-- Use patterns to exclude classes... e.g. for URI-Resolver <exclude
pattern="org.gcube.datatransfer.resolver.services.DocsGenerator" /> -->
</api-classes>
<modules>
<gwt-json-overlay disabled="true" />
<php-json-client disabled="true" />
<ruby-json-client disabled="true" />
<java-json-client disabled="true" />
<javascript-client disabled="true" />
<docs docsDir="${project.build.directory}" docsSubdir="api-docs" />
<docs
freemarkerTemplate="${project.basedir}/src/main/resources/META-INF/enunciate/d4science_docs.fmt">
<additional-css
file="css/d4science_enunciate_custom.css" />
</docs>
<swagger basePath="/workspace" />
</modules>
</enunciate>

@ -0,0 +1,6 @@
name: StorageHub
group: DataAccess
version: ${version}
description: ${description}
excludes:
- path: /workspace/api-docs/*

@ -2,11 +2,11 @@
<web-app>
<context-param>
<param-name>admin-username</param-name>
<param-value>workspacerep.imarine</param-value>
<param-value>{{adminId}}</param-value>
</context-param>
<context-param>
<param-name>admin-pwd</param-name>
<param-value>gcube2010*onan</param-value>
<param-value>{{adminPwd}}</param-value>
</context-param>
<context-param>
<param-name>resolver-basepath</param-name>

@ -1,52 +1,45 @@
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<parent>
<artifactId>maven-parent</artifactId>
<groupId>org.gcube.tools</groupId>
<version>1.1.0</version>
<relativePath />
</parent>
<modelVersion>4.0.0</modelVersion>
<groupId>org.gcube.data.access</groupId>
<artifactId>storagehub</artifactId>
<version>1.4.0</version>
<version>1.5.0-SNAPSHOT</version>
<name>storagehub</name>
<scm>
<connection>scm:git:https://code-repo.d4science.org/gCubeSystem/storagehub.git</connection>
<developerConnection>scm:git:https://code-repo.d4science.org/gCubeSystem/storagehub.git</developerConnection>
<url>https://code-repo.d4science.org/gCubeSystem/storagehub</url>
</scm>
<packaging>war</packaging>
<properties>
<webappDirectory>${project.basedir}/src/main/webapp/WEB-INF</webappDirectory>
<jackrabbit.version>2.20.2</jackrabbit.version>
<jackrabbit.version>2.20.7</jackrabbit.version>
<jackson.version>2.8.11</jackson.version>
<slf4j.version>1.7.4</slf4j.version>
<tomcat.version>7.0.40</tomcat.version>
<jetty.version>6.1.26</jetty.version>
<tika.version>1.21</tika.version>
<tika.version>2.6.0</tika.version>
<aspectj-plugin.version>1.14.0</aspectj-plugin.version>
<distroDirectory>${project.basedir}/distro</distroDirectory>
<description>REST web service for Jackrabbit</description>
<warname>storagehub</warname>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
<enunciate.version>2.14.0</enunciate.version>
<maven.compiler.source>11</maven.compiler.source>
<maven.compiler.target>11</maven.compiler.target>
</properties>
<dependencyManagement>
<dependencies>
<dependency>
<groupId>org.gcube.distribution</groupId>
<artifactId>gcube-smartgears-bom</artifactId>
<version>2.0.0</version>
<version>3.0.1-SNAPSHOT</version>
<type>pom</type>
<scope>import</scope>
</dependency>
@ -56,88 +49,74 @@
<version>1.8.2</version>
</dependency>
</dependencies>
<!-- https://mvnrepository.com/artifact/org.apache.tika/tika-parsers -->
</dependencyManagement>
<dependencies>
<dependency>
<groupId>org.aspectj</groupId>
<artifactId>aspectjrt</artifactId>
</dependency>
<dependency>
<groupId>org.gcube.core</groupId>
<artifactId>common-smartgears</artifactId>
</dependency>
<dependency>
<groupId>org.gcube.core</groupId>
<artifactId>common-smartgears-app</artifactId>
</dependency>
<dependency>
<groupId>org.gcube.common</groupId>
<artifactId>authorization-control-library</artifactId>
<version>[1.0.0,2.0.0-SNAPSHOT)</version>
</dependency>
<dependency>
<groupId>org.gcube.common</groupId>
<artifactId>common-authorization</artifactId>
</dependency>
<dependency>
<groupId>org.gcube.core</groupId>
<artifactId>common-encryption</artifactId>
</dependency>
<dependency>
<groupId>org.gcube.core</groupId>
<artifactId>common-scope-maps</artifactId>
</dependency>
<dependency>
<groupId>org.gcube.core</groupId>
<artifactId>common-scope</artifactId>
</dependency>
<dependency>
<groupId>org.gcube.common</groupId>
<artifactId>storagehub-model</artifactId>
<version>[1.0.0,2.0.0-SNAPSHOT)</version>
</dependency>
<dependency>
<groupId>org.gcube.data.access</groupId>
<artifactId>storagehub-script-utils</artifactId>
<version>[1.0.0-SNAPSHOT,2.0.0)</version>
<version>[2.0.0-SNAPSHOT,3.0.0)</version>
</dependency>
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
</dependency>
<dependency>
<groupId>com.itextpdf</groupId>
<artifactId>itextpdf</artifactId>
<version>5.5.13.2</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.bouncycastle/bcprov-jdk15on -->
<dependency>
<groupId>org.bouncycastle</groupId>
<artifactId>bcprov-jdk15on</artifactId>
<version>1.62</version>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
</dependency>
<dependency>
<groupId>org.gcube.common</groupId>
<artifactId>gxJRS</artifactId>
</dependency>
<!-- JCR dependencies -->
<dependency>
<groupId>javax.jcr</groupId>
<artifactId>jcr</artifactId>
@ -153,79 +132,72 @@
<artifactId>jackrabbit-core</artifactId>
<version>${jackrabbit.version}</version>
</dependency>
<dependency>
<groupId>org.apache.jackrabbit</groupId>
<artifactId>jackrabbit-jcr-server</artifactId>
<version>${jackrabbit.version}</version>
</dependency>
<!-- <dependency> <groupId>org.apache.jackrabbit</groupId> <artifactId>jackrabbit-jcr-server</artifactId>
<version>${jackrabbit.version}</version> </dependency> -->
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-annotations</artifactId>
<version>${jackson.version}</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-core</artifactId>
<version>${jackson.version}</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
<version>${jackson.version}</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.tika/tika-parsers -->
<!-- https://mvnrepository.com/artifact/org.apache.tika/tika-core -->
<dependency>
<groupId>org.apache.tika</groupId>
<artifactId>tika-parsers</artifactId>
<version>1.21</version>
<artifactId>tika-core</artifactId>
<version>${tika.version}</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.tika/tika-core -->
<dependency>
<groupId>org.apache.tika</groupId>
<artifactId>tika-core</artifactId>
<version>1.21</version>
<artifactId>tika-parsers-standard-package</artifactId>
<version>${tika.version}</version>
</dependency>
<!-- needed to manage strange image types -->
<dependency>
<groupId>com.twelvemonkeys.imageio</groupId>
<artifactId>imageio-jpeg</artifactId>
<version>3.3.2</version>
</dependency>
<dependency>
<groupId>com.twelvemonkeys.imageio</groupId>
<artifactId>imageio-bmp</artifactId>
<version>3.3.2</version>
</dependency>
<!-- https://mvnrepository.com/artifact/net.bull.javamelody/javamelody-core -->
<dependency>
<groupId>net.bull.javamelody</groupId>
<artifactId>javamelody-core</artifactId>
<version>1.82.0</version>
<groupId>com.twelvemonkeys.imageio</groupId>
<artifactId>imageio-core</artifactId>
<version>3.3.2</version>
</dependency>
<!-- jersey & weld -->
<dependency>
<groupId>javax.servlet</groupId>
<artifactId>javax.servlet-api</artifactId>
<version>3.0.1</version>
</dependency>
<!-- https://mvnrepository.com/artifact/javax.interceptor/javax.interceptor-api -->
<dependency>
<groupId>javax.interceptor</groupId>
<artifactId>javax.interceptor-api</artifactId>
<version>1.2.2</version>
</dependency>
<!-- https://mvnrepository.com/artifact/javax.enterprise/cdi-api -->
<dependency>
<groupId>javax.enterprise</groupId>
<artifactId>cdi-api</artifactId>
<version>2.0</version>
</dependency>
<dependency>
<groupId>javax.ws.rs</groupId>
<artifactId>javax.ws.rs-api</artifactId>
</dependency>
<dependency>
<groupId>org.glassfish.jersey.containers</groupId>
<artifactId>jersey-container-servlet</artifactId>
@ -234,14 +206,12 @@
<groupId>org.glassfish.jersey.containers</groupId>
<artifactId>jersey-container-servlet-core</artifactId>
</dependency>
<!-- https://mvnrepository.com/artifact/org.glassfish.jersey.inject/jersey-hk2 -->
<dependency>
<groupId>org.glassfish.jersey.inject</groupId>
<artifactId>jersey-hk2</artifactId>
<version>2.30.1</version>
</dependency>
<dependency>
<groupId>org.glassfish.jersey.core</groupId>
<artifactId>jersey-server</artifactId>
@ -259,87 +229,55 @@
<artifactId>weld-servlet-core</artifactId>
<version>3.1.0.Final</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.jboss/jandex -->
<dependency>
<groupId>org.jboss</groupId>
<artifactId>jandex</artifactId>
<version>2.2.2.Final</version>
</dependency>
<dependency>
<groupId>javax.servlet</groupId>
<artifactId>javax.servlet-api</artifactId>
<version>3.0.1</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.glassfish.jersey.core/jersey-common -->
<dependency>
<groupId>org.glassfish.jersey.core</groupId>
<artifactId>jersey-common</artifactId>
</dependency>
<dependency>
<groupId>org.glassfish.jersey.media</groupId>
<artifactId>jersey-media-json-jackson</artifactId>
</dependency>
<dependency>
<groupId>org.glassfish.jersey.media</groupId>
<artifactId>jersey-media-multipart</artifactId>
</dependency>
<dependency>
<groupId>postgresql</groupId>
<artifactId>postgresql</artifactId>
<version>9.1-901.jdbc4</version>
<scope>runtime</scope>
</dependency>
<!-- Storage dependencies -->
<dependency>
<groupId>org.gcube.contentmanagement</groupId>
<artifactId>storage-manager-core</artifactId>
<version>[3.0.0-SNAPSHOT,4.0.0-SNAPSHOT)</version>
</dependency>
<dependency>
<groupId>org.gcube.contentmanagement</groupId>
<artifactId>storage-manager-wrapper</artifactId>
<version>[3.0.0-SNAPSHOT,4.0.0-SNAPSHOT)</version>
</dependency>
<dependency>
<groupId>org.reflections</groupId>
<artifactId>reflections</artifactId>
<version>0.9.10</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.postgresql/postgresql -->
<dependency>
<groupId>org.postgresql</groupId>
<artifactId>postgresql</artifactId>
<version>42.7.0</version>
</dependency>
<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
<version>16.0</version>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-compress</artifactId>
<version>1.17</version>
<version>1.22</version>
</dependency>
<dependency>
<groupId>org.tukaani</groupId>
<artifactId>xz</artifactId>
<version>1.5</version>
<version>1.5</version>
</dependency>
<dependency>
<groupId>org.glassfish.jersey.test-framework.providers</groupId>
<artifactId>jersey-test-framework-provider-simple</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.derby</groupId>
<artifactId>derby</artifactId>
@ -352,73 +290,88 @@
<version>10.8.2.2</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>com.googlecode.jeeunit</groupId>
<artifactId>jeeunit</artifactId>
<version>1.0.0</version>
<scope>test</scope>
</dependency>
<!-- https://mvnrepository.com/artifact/com.google.code.gson/gson -->
<dependency>
<groupId>com.google.code.gson</groupId>
<artifactId>gson</artifactId>
<version>2.7</version>
</dependency>
<dependency>
<groupId>ch.qos.logback</groupId>
<artifactId>logback-classic</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>com.vlkan.rfos</groupId>
<artifactId>rotating-fos</artifactId>
<version>0.9.2</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.mockito/mockito-all -->
<dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-all</artifactId>
<version>1.9.5</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>jul-to-slf4j</artifactId>
<version>${slf4j.version}</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.logging.log4j/log4j-to-slf4j -->
<!-- https://mvnrepository.com/artifact/org.slf4j/log4j-over-slf4j -->
<dependency>
<groupId>org.jboss.weld.se</groupId>
<artifactId>weld-se</artifactId>
<version>2.2.10.Final</version>
<groupId>org.slf4j</groupId>
<artifactId>log4j-over-slf4j</artifactId>
<version>2.0.7</version>
</dependency>
<!-- enunciate deps -->
<dependency>
<groupId>com.webcohesion.enunciate</groupId>
<artifactId>enunciate-core-annotations</artifactId>
<version>${enunciate.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>com.webcohesion.enunciate</groupId>
<artifactId>enunciate-rt-util</artifactId>
<version>${enunciate.version}</version>
<scope>provided</scope>
</dependency>
<!-- Storage dependencies -->
<dependency>
<groupId>org.gcube.contentmanagement</groupId>
<artifactId>storage-manager-core</artifactId>
<version>[4.0.0-SNAPSHOT,5.0.0-SNAPSHOT)</version>
</dependency>
<dependency>
<groupId>org.gcube.contentmanagement</groupId>
<artifactId>storage-manager-wrapper</artifactId>
<version>[4.0.0-SNAPSHOT,5.0.0-SNAPSHOT)</version>
</dependency>
<!-- https://mvnrepository.com/artifact/com.amazonaws/aws-java-sdk-s3 -->
<dependency>
<groupId>com.amazonaws</groupId>
<artifactId>aws-java-sdk-s3</artifactId>
<version>1.12.512</version>
</dependency>
<!-- https://mvnrepository.com/artifact/io.minio/minio
<dependency>
<groupId>io.minio</groupId>
<artifactId>minio</artifactId>
<version>8.3.3</version>
</dependency> -->
<dependency>
<groupId>org.gcube.common</groupId>
<artifactId>storagehub-client-library</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.11</version>
<groupId>org.testcontainers</groupId>
<artifactId>testcontainers</artifactId>
<version>1.16.3</version>
<scope>test</scope>
</dependency>
<!-- https://mvnrepository.com/artifact/junit/junit -->
<dependency>
<groupId>org.glassfish.jersey.test-framework</groupId>
<artifactId>jersey-test-framework-core</artifactId>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.13.2</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.glassfish.jersey.test-framework.providers</groupId>
<artifactId>jersey-test-framework-provider-grizzly2</artifactId>
<groupId>ch.qos.logback</groupId>
<artifactId>logback-classic</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
<build>
<finalName>${project.artifactId}</finalName>
@ -452,7 +405,7 @@
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>aspectj-maven-plugin</artifactId>
<version>1.7</version>
<version>${aspectj-plugin.version}</version>
<configuration>
<complianceLevel>1.8</complianceLevel>
<source>1.8</source>
@ -486,22 +439,38 @@
<failOnMissingWebXml>false</failOnMissingWebXml>
</configuration>
</plugin>
<!-- Enunciate Maven plugin -->
<plugin>
<groupId>com.webcohesion.enunciate</groupId>
<artifactId>enunciate-maven-plugin</artifactId>
<version>${enunciate.version}</version>
<configuration></configuration>
<executions>
<execution>
<id>assemble</id>
<goals>
<goal>assemble</goal>
</goals>
</execution>
</executions>
</plugin>
<!-- Copy Enunciate Documentation from your-application/docs to your-application.war -->
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-resources-plugin</artifactId>
<version>2.6</version>
<executions>
<execution>
<id>copy-profile</id>
<id>copy-enunciate-docs</id>
<phase>process-resources</phase>
<goals>
<goal>copy-resources</goal>
</goals>
<phase>process-resources</phase>
<configuration>
<outputDirectory>${webappDirectory}</outputDirectory>
<outputDirectory>target</outputDirectory>
<resources>
<resource>
<directory>${distroDirectory}</directory>
<targetPath>${project.build.directory}/${project.artifactId}/api-docs</targetPath>
<directory>${project.build.directory}/api-docs</directory>
<filtering>true</filtering>
</resource>
</resources>
@ -511,5 +480,33 @@
</plugin>
</plugins>
</build>
<profiles>
<profile>
<id>integration</id>
<build>
<finalName>storagehub-test-storages</finalName>
<plugins>
<plugin>
<artifactId>maven-antrun-plugin</artifactId>
<version>1.7</version>
<executions>
<execution>
<phase>process-test-classes</phase>
<configuration>
<target>
<copy todir="${basedir}/target/classes">
<fileset dir="${basedir}/target/test-classes" includes="org/gcube/data/access/storages/**/*" />
</copy>
</target>
</configuration>
<goals>
<goal>run</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
</profiles>
</project>

@ -49,7 +49,7 @@ public class AuthorizationChecker {
ACLManagerInterface aclManager;
public void checkReadAuthorizationControl(Session session, String userToCheck, String id) throws UserNotAuthorizedException , BackendGenericError, RepositoryException{
Node node = session.getNodeByIdentifier(id);
Item item = node2Item.getItem(node, Excludes.ALL);

@ -17,6 +17,8 @@ public class Constants {
public static final String SHARED_WITH_ME_PARENT_NAME = "SharedWithMe";
//public static final String MYSHARED_PARENT_NAME = "MyShared";
public static final String SHARED_FOLDER_PATH = "/Share";
public static final String WORKSPACE_ROOT_FOLDER_NAME ="Workspace";
@ -36,4 +38,5 @@ public class Constants {
public static final List<String> WRITE_PROTECTED_FOLDER = Arrays.asList(Constants.OLD_VRE_FOLDER_PARENT_NAME, Constants.TRASH_ROOT_FOLDER_NAME);
public static final List<String> PROTECTED_FOLDER = Arrays.asList(Constants.WORKSPACE_ROOT_FOLDER_NAME, Constants.OLD_VRE_FOLDER_PARENT_NAME, Constants.TRASH_ROOT_FOLDER_NAME);
}

@ -1,141 +0,0 @@
package org.gcube.data.access.storagehub;
import java.io.BufferedInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.PipedInputStream;
import java.io.PipedOutputStream;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class MultipleOutputStream {
private Logger logger = LoggerFactory.getLogger(MultipleOutputStream.class);
private MyPipedInputStream[] pipedInStreams;
private InputStream is;
private MyPipedOututStream[] pipedOutStreams;
private int index=0;
public MultipleOutputStream(InputStream is, int number) throws IOException{
this.is = is;
logger.debug("requested {} piped streams ",number);
pipedInStreams = new MyPipedInputStream[number];
pipedOutStreams = new MyPipedOututStream[number];
for (int i =0; i<number; i++) {
pipedOutStreams[i] = new MyPipedOututStream();
pipedInStreams[i] = new MyPipedInputStream(pipedOutStreams[i]);
}
}
public void startWriting() throws IOException{
BufferedInputStream bis = new BufferedInputStream(is);
byte[] buf = new byte[1024*64];
int read=-1;
int writeTot = 0;
while ((read =bis.read(buf))!=-1){
for (int i=0; i< pipedInStreams.length; i++) {
if (!pipedInStreams[i].isClosed()) {
pipedOutStreams[i].write(buf, 0, read);
}
}
writeTot+= read;
if (allOutStreamClosed())
break;
}
for (int i=0; i< pipedOutStreams.length; i++) {
if (!pipedOutStreams[i].isClosed()) {
logger.debug("closing outputstream {}",i);
pipedOutStreams[i].close();
}
}
logger.debug("total written {} ",writeTot);
}
private boolean allOutStreamClosed() {
for (int i=0; i<pipedOutStreams.length; i++) {
if (!pipedOutStreams[i].isClosed())
return false;
}
return true;
}
public synchronized InputStream get() {
logger.debug("requesting piped streams {}",index);
if (index>=pipedInStreams.length) return null;
return pipedInStreams[index++];
}
public class MyPipedOututStream extends PipedOutputStream{
boolean close = false;
@Override
public void close() throws IOException {
this.close = true;
super.close();
}
/**
* @return the close
*/
public boolean isClosed() {
return close;
}
@Override
public void write(byte[] b, int off, int len) throws IOException {
try{
super.write(b, off, len);
}catch(IOException io){
this.close = true;
}
}
}
public class MyPipedInputStream extends PipedInputStream{
boolean close = false;
public MyPipedInputStream(PipedOutputStream src) throws IOException {
super(src);
}
@Override
public void close() throws IOException {
this.close = true;
logger.debug(Thread.currentThread().getName()+" close MyPipedInputStream");
super.close();
}
/**
* @return the close
*/
public boolean isClosed() {
return close;
}
}
}

@ -46,8 +46,13 @@ public class PathUtil {
}
public Path getSharedWithMePath(String login){
return Paths.append(getWorkspacePath(login),Constants.SHARED_WITH_ME_PARENT_NAME);
return Paths.append(getHome(login),Constants.SHARED_WITH_ME_PARENT_NAME);
}
/*
public Path getMySharedPath(String login){
return Paths.append(getHome(login),Constants.MYSHARED_PARENT_NAME);
}*/
public Path getVREsPath(String login, Session session) throws RepositoryException {
Path home = getHome(login);

@ -2,34 +2,71 @@ package org.gcube.data.access.storagehub;
import javax.inject.Singleton;
import javax.jcr.Repository;
import javax.jcr.SimpleCredentials;
import javax.naming.Context;
import javax.naming.InitialContext;
import org.apache.jackrabbit.api.JackrabbitRepository;
import org.apache.jackrabbit.api.JackrabbitSession;
import org.gcube.data.access.storagehub.services.RepositoryInitializer;
import org.gcube.data.access.storagehub.services.admin.InitScript;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@Singleton
public class RepositoryInitializerImpl implements RepositoryInitializer{
private static Logger log = LoggerFactory.getLogger(RepositoryInitializerImpl.class);
private static RepositoryInitializer instance = new RepositoryInitializerImpl();
public static RepositoryInitializer get(){
return instance;
}
private Repository repository;
private boolean jackrabbitInitialized = false;
@Override
public synchronized Repository getRepository(){
public Repository getRepository(){
return repository;
}
protected RepositoryInitializerImpl() throws Exception{
InitialContext context = new InitialContext();
Context environment = (Context) context.lookup("java:comp/env");
repository = (Repository) environment.lookup("jcr/repository");
protected RepositoryInitializerImpl(){
try {
InitialContext context = new InitialContext();
Context environment = (Context) context.lookup("java:comp/env");
repository = (Repository) environment.lookup("jcr/repository");
}catch (Throwable e) {
log.error("error initializing repository", e);
throw new RuntimeException("error initializing repository",e);
}
}
public void shutdown() {
((JackrabbitRepository)repository).shutdown();
}
@Override
public synchronized void initContainerAtFirstStart(SimpleCredentials credentials) {
try {
log.info("credential are {} {}",credentials.getUserID(), new String(credentials.getPassword()));
JackrabbitSession ses = (JackrabbitSession) repository.login(credentials);
try {
boolean notAlreadyDone = !jackrabbitInitialized && !ses.getRootNode().hasNode("Home");
if (notAlreadyDone)
new InitScript().init(ses);
else log.info("jackrabbit is already initialized");
}finally {
ses.logout();
}
} catch (Exception e) {
log.warn("error initialising Jackrabbit",e);
}
jackrabbitInitialized = true;
}
}

@ -1,36 +0,0 @@
package org.gcube.data.access.storagehub;
import java.util.Map;
import java.util.WeakHashMap;
import org.gcube.common.authorization.library.provider.AuthorizationProvider;
import org.gcube.contentmanagement.blobstorage.service.IClient;
import org.gcube.contentmanager.storageclient.model.protocol.smp.Handler;
import org.gcube.contentmanager.storageclient.wrapper.AccessType;
import org.gcube.contentmanager.storageclient.wrapper.MemoryType;
import org.gcube.contentmanager.storageclient.wrapper.StorageClient;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class StorageFactory {
public final static String SERVICE_NAME = "home-library";
public final static String SERVICE_CLASS = "org.gcube.portlets.user";
private static Map<String, IClient> clientUserMap = new WeakHashMap<String, IClient>();
private static Logger log = LoggerFactory.getLogger(StorageFactory.class);
public static IClient getGcubeStorage(){
String login = AuthorizationProvider.instance.get().getClient().getId();
if (!clientUserMap.containsKey(login)){
IClient storage = new StorageClient(SERVICE_CLASS, SERVICE_NAME,
login, AccessType.SHARED, MemoryType.PERSISTENT).getClient();
log.info("******* Storage activateProtocol for Storage **********");
Handler.activateProtocol();
clientUserMap.put(login, storage);
return storage;
} else return clientUserMap.get(login);
}
}

@ -8,12 +8,14 @@ import javax.ws.rs.core.Application;
import org.gcube.common.gxrest.response.entity.SerializableErrorEntityTextWriter;
import org.gcube.data.access.storagehub.services.ACLManager;
import org.gcube.data.access.storagehub.services.DocsGenerator;
import org.gcube.data.access.storagehub.services.GroupManager;
import org.gcube.data.access.storagehub.services.Impersonable;
import org.gcube.data.access.storagehub.services.ItemSharing;
import org.gcube.data.access.storagehub.services.ItemsCreator;
import org.gcube.data.access.storagehub.services.ItemsManager;
import org.gcube.data.access.storagehub.services.MessageManager;
import org.gcube.data.access.storagehub.services.StorageManager;
import org.gcube.data.access.storagehub.services.UserManager;
import org.gcube.data.access.storagehub.services.WorkspaceManager;
import org.gcube.data.access.storagehub.services.admin.ScriptManager;
@ -36,6 +38,8 @@ public class StorageHub extends Application {
classes.add(GroupManager.class);
classes.add(ScriptManager.class);
classes.add(MessageManager.class);
classes.add(StorageManager.class);
classes.add(DocsGenerator.class);
classes.add(MultiPartFeature.class);
classes.add(SerializableErrorEntityTextWriter.class);
classes.add(MyApplicationListener.class);

@ -1,8 +1,13 @@
package org.gcube.data.access.storagehub;
import javax.jcr.SimpleCredentials;
import org.gcube.data.access.storagehub.handlers.CredentialHandler;
import org.gcube.data.access.storagehub.services.RepositoryInitializer;
import org.gcube.smartgears.ApplicationManager;
import org.gcube.smartgears.ContextProvider;
import org.gcube.smartgears.context.application.ApplicationContext;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -11,19 +16,43 @@ public class StorageHubAppllicationManager implements ApplicationManager {
private static Logger logger = LoggerFactory.getLogger(StorageHubAppllicationManager.class);
private boolean alreadyShutDown = false;
private boolean alreadyInit = false;
public static RepositoryInitializer repository;
private static RepositoryInitializer repository = RepositoryInitializerImpl.get();
//private static NotificationClient notificationClient;
public static RepositoryInitializer getRepository() {
return repository;
}
/*
public static NotificationClient getNotificationClient() {
return notificationClient;
}*/
@Override
public synchronized void onInit() {
logger.info("jackrabbit initialization started");
logger.info("initializing storagehub");
try {
repository = new RepositoryInitializerImpl();
} catch (Exception e) {
logger.error("ERROR INITIALIZING REPOSITORY",e);
ApplicationContext ctx = ContextProvider.get();
if (!alreadyInit) {
logger.info("jackrabbit initialization started");
SimpleCredentials credentials = CredentialHandler.getAdminCredentials(ctx.application());
repository.initContainerAtFirstStart(credentials);
//notificationClient = new NotificationClient();
alreadyInit = true;
}
} catch (Throwable e) {
logger.error("unexpected error initiliazing storagehub",e);
}
repository.getRepository();
}
@Override
@ -37,5 +66,6 @@ public class StorageHubAppllicationManager implements ApplicationManager {
logger.warn("the database was not shutdown properly",e);
}
}
}

@ -30,18 +30,21 @@ import org.gcube.common.storagehub.model.exceptions.IdNotFoundException;
import org.gcube.common.storagehub.model.exceptions.ItemLockedException;
import org.gcube.common.storagehub.model.exceptions.StorageHubException;
import org.gcube.common.storagehub.model.exceptions.UserNotAuthorizedException;
import org.gcube.common.storagehub.model.items.AbstractFileItem;
import org.gcube.common.storagehub.model.items.ExternalLink;
import org.gcube.common.storagehub.model.items.FolderItem;
import org.gcube.common.storagehub.model.items.GCubeItem;
import org.gcube.common.storagehub.model.items.Item;
import org.gcube.common.storagehub.model.items.RootItem;
import org.gcube.common.storagehub.model.items.SharedFolder;
import org.gcube.common.storagehub.model.storages.MetaInfo;
import org.gcube.common.storagehub.model.types.ItemAction;
import org.gcube.common.storagehub.model.types.NodeProperty;
import org.gcube.data.access.storagehub.accounting.AccountingHandler;
import org.gcube.data.access.storagehub.handlers.items.Item2NodeConverter;
import org.gcube.data.access.storagehub.handlers.items.Node2ItemConverter;
import org.gcube.data.access.storagehub.handlers.items.builders.FolderCreationParameters;
import org.gcube.data.access.storagehub.handlers.plugins.StorageBackendHandler;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -129,6 +132,7 @@ public class Utils {
}
public static <T extends Item> List<T> getItemList(Node parent, List<String> excludes, Range range, boolean showHidden, Class<? extends RootItem> nodeTypeToInclude) throws RepositoryException, BackendGenericError{
return getItemList(null, parent, excludes, range, showHidden, nodeTypeToInclude);
}
@ -188,7 +192,7 @@ public class Utils {
}
public static void copyStream(InputStream in, OutputStream out) throws IOException {
byte[] buffer = new byte[2048];
@ -211,19 +215,20 @@ public class Utils {
return false;
}
public static String checkExistanceAndGetUniqueName(Session ses, Node destination, String name) throws BackendGenericError{
String escapedName = Text.escapeIllegalJcrChars(name);
try {
destination.getNode(name);
destination.getNode(escapedName);
}catch(PathNotFoundException pnf) {
return Text.escapeIllegalJcrChars(name);
return escapedName;
} catch (Exception e) {
throw new BackendGenericError(e);
}
try {
String filename = FilenameUtils.getBaseName(name);
String ext = FilenameUtils.getExtension(name);
String filename = FilenameUtils.getBaseName(escapedName);
String ext = FilenameUtils.getExtension(escapedName);
String nameTocheck = ext.isEmpty()? String.format("%s(*)",filename): String.format("%s(*).%s",filename, ext);
@ -240,19 +245,21 @@ public class Utils {
String newName = ext.isEmpty()? String.format("%s(%d)", filename,maxval+1) : String.format("%s(%d).%s", filename,maxval+1, ext) ;
return Text.escapeIllegalJcrChars(newName);
return newName;
} catch (Exception e) {
throw new BackendGenericError(e);
}
}
public static Node createFolderInternally(FolderCreationParameters params, AccountingHandler accountingHandler) throws StorageHubException {
public static Node createFolderInternally(FolderCreationParameters params, AccountingHandler accountingHandler, boolean isInternalWSFolder) throws StorageHubException {
logger.debug("creating folder {} in {}", params.getName(), params.getParentId());
Node destinationNode;
FolderItem destinationItem;
try {
destinationNode = params.getSession().getNodeByIdentifier(params.getParentId());
destinationItem = (FolderItem) new Node2ItemConverter().getItem(destinationNode, Excludes.ALL);
}catch (RepositoryException e) {
throw new IdNotFoundException(params.getParentId());
}
@ -263,6 +270,20 @@ public class Utils {
item.setName(uniqueName);
item.setTitle(uniqueName);
item.setDescription(params.getDescription());
if (isInternalWSFolder) {
item.setBackend(StorageBackendHandler.getDefaultPayloadForFolder());
} else {
if (params.getBackend() != null)
item.setBackend(params.getBackend());
else {
if (destinationItem.getBackend() != null)
item.setBackend(destinationItem.getBackend());
else
item.setBackend(StorageBackendHandler.getDefaultPayloadForFolder());
}
}
//TODO: item.setExternalStorage();
//item.setCreationTime(now);
@ -346,5 +367,11 @@ public class Utils {
node.setProperty(NodeProperty.LAST_ACTION.toString(), action.name());
}
public static void setContentFromMetaInfo(AbstractFileItem item, MetaInfo contentInfo) {
item.getContent().setSize(contentInfo.getSize());
item.getContent().setRemotePath(contentInfo.getRemotePath());
item.getContent().setSize(contentInfo.getSize());
item.getContent().setPayloadBackend(contentInfo.getPayloadBackend());
}
}

@ -32,11 +32,12 @@ public class AccountingHandler {
private static final String OLD_ITEM_NAME = "hl:oldItemName";
private static final String NEW_ITEM_NAME = "hl:newItemName";
private static final String BASE_VERSION ="1.0";
private static final Logger logger = LoggerFactory.getLogger(AccountingHandler.class);
public void createReadObj(String title, Session ses, Node node, String login, boolean saveHistory ) {
public void createReadObj(String title, String version, Session ses, Node node, String login, boolean saveHistory ) {
try {
if (!node.hasNode(NodeProperty.ACCOUNTING.toString())){
@ -49,20 +50,8 @@ public class AccountingHandler {
accountingNode.setProperty(USER, login);
accountingNode.setProperty(DATE, Calendar.getInstance());
accountingNode.setProperty(ITEM_NAME, title);
try {
VersionManager vManager = ses.getWorkspace().getVersionManager();
VersionHistory history = vManager.getVersionHistory(node.getNode("jcr:content").getPath());
VersionIterator versions = history.getAllVersions();
Version version= null;
while (versions.hasNext()) {
version = versions.nextVersion();
}
if (version!=null)
accountingNode.setProperty(VERSION_ACCOUNTING, version.getName());
}catch(UnsupportedRepositoryOperationException uropex) {
logger.warn("version cannot be retrieved", uropex);
}
accountingNode.setProperty(VERSION_ACCOUNTING, version!=null?version:BASE_VERSION);
if (saveHistory) ses.save();
} catch (RepositoryException e) {
logger.warn("error trying to retrieve accountign node",e);
@ -81,6 +70,8 @@ public class AccountingHandler {
accountingNode.setProperty(USER, login);
accountingNode.setProperty(DATE, Calendar.getInstance());
accountingNode.setProperty(ITEM_NAME, title);
accountingNode.setProperty(VERSION_ACCOUNTING, BASE_VERSION);
if (saveHistory) ses.save();
} catch (RepositoryException e) {
@ -88,7 +79,7 @@ public class AccountingHandler {
}
}
public void createFileUpdated(String title, Session ses, Node node, String login, boolean saveHistory ) {
public void createFileUpdated(String title, String version, Session ses, Node node, String login, boolean saveHistory ) {
try {
if (!node.hasNode(NodeProperty.ACCOUNTING.toString())){
@ -97,26 +88,35 @@ public class AccountingHandler {
Node accountingNodeParent = node.getNode(NodeProperty.ACCOUNTING.toString());
Node accountingNode = accountingNodeParent.addNode(UUID.randomUUID().toString(),AccountingEntryType.UPDATE.getNodeTypeDefinition());
accountingNode.setProperty(USER, login);
accountingNode.setProperty(DATE, Calendar.getInstance());
accountingNode.setProperty(ITEM_NAME, title);
accountingNode.setProperty(VERSION_ACCOUNTING, version);
try {
VersionManager vManager = ses.getWorkspace().getVersionManager();
VersionHistory history = vManager.getVersionHistory(node.getNode("jcr:content").getPath());
VersionIterator versions = history.getAllVersions();
Version version= null;
while (versions.hasNext()) {
version = versions.nextVersion();
}
if (version!=null)
accountingNode.setProperty(VERSION_ACCOUNTING, version.getName());
}catch(UnsupportedRepositoryOperationException uropex) {
logger.warn("version cannot be retrieved", uropex);
if (saveHistory) ses.save();
} catch (RepositoryException e) {
logger.warn("error trying to retrieve accountign node",e);
}
}
public void createVersionDeleted(String title, String version, Session ses, Node node, String login, boolean saveHistory ) {
try {
if (!node.hasNode(NodeProperty.ACCOUNTING.toString())){
node.addNode(NodeProperty.ACCOUNTING.toString(), NodeProperty.NT_ACCOUNTING.toString());
}
Node accountingNodeParent = node.getNode(NodeProperty.ACCOUNTING.toString());
Node accountingNode = accountingNodeParent.addNode(UUID.randomUUID().toString(),AccountingEntryType.DELETE.getNodeTypeDefinition());
accountingNode.setProperty(USER, login);
accountingNode.setProperty(DATE, Calendar.getInstance());
accountingNode.setProperty(ITEM_NAME, title);
accountingNode.setProperty(VERSION_ACCOUNTING, version);
if (saveHistory) ses.save();
} catch (RepositoryException e) {

@ -40,11 +40,12 @@ public class ClassHandler {
Set<Class<?>> classesAnnotated = reflection.getTypesAnnotatedWith(RootNode.class);
for (Class<?> clazz: classesAnnotated ){
if (RootItem.class.isAssignableFrom(clazz)) {
String value = clazz.getAnnotation(RootNode.class).value();
log.debug("loading class {} with value {} ", clazz, value );
classMap.put(value, (Class<? extends RootItem>) clazz);
typeMap.put((Class<? extends RootItem>) clazz, value);
if (RootItem.class.isAssignableFrom(clazz) && clazz.isAnnotationPresent(RootNode.class)) {
String[] values = clazz.getAnnotation(RootNode.class).value();
log.debug("loading class {} with values {} ", clazz, values );
for (String value: values)
classMap.put(value, (Class<? extends RootItem>) clazz);
typeMap.put((Class<? extends RootItem>) clazz, values[0]);
}
}
}

@ -12,17 +12,20 @@ import javax.inject.Inject;
import javax.jcr.Node;
import javax.jcr.RepositoryException;
import javax.jcr.Session;
import javax.jcr.version.Version;
import org.gcube.common.storagehub.model.Excludes;
import org.gcube.common.storagehub.model.Path;
import org.gcube.common.storagehub.model.Paths;
import org.gcube.common.storagehub.model.exceptions.BackendGenericError;
import org.gcube.common.storagehub.model.items.AbstractFileItem;
import org.gcube.common.storagehub.model.items.FolderItem;
import org.gcube.common.storagehub.model.items.Item;
import org.gcube.common.storagehub.model.plugins.FolderManager;
import org.gcube.common.storagehub.model.storages.StorageBackend;
import org.gcube.common.storagehub.model.storages.StorageBackendFactory;
import org.gcube.data.access.storagehub.Utils;
import org.gcube.data.access.storagehub.accounting.AccountingHandler;
import org.gcube.data.access.storagehub.handlers.plugins.FolderPluginHandler;
import org.gcube.data.access.storagehub.handlers.plugins.StorageBackendHandler;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -31,9 +34,12 @@ public class CompressHandler {
private Logger logger = LoggerFactory.getLogger(CompressHandler.class);
@Inject
FolderPluginHandler pluginHandler;
StorageBackendHandler storageBackendHandler;
public Deque<Item> getAllNodesForZip(FolderItem directory, Session session, String login, AccountingHandler accountingHandler, List<String> excludes) throws RepositoryException, BackendGenericError{
@Inject
VersionHandler versionHandler;
public Deque<Item> getAllNodesForZip(FolderItem directory, String login, Session session, AccountingHandler accountingHandler, List<String> excludes) throws RepositoryException, BackendGenericError{
Deque<Item> queue = new LinkedList<Item>();
Node currentNode = session.getNodeByIdentifier(directory.getId());
queue.push(directory);
@ -42,11 +48,18 @@ public class CompressHandler {
for (Item item : Utils.getItemList(currentNode,Excludes.GET_ONLY_CONTENT, null, false, null)){
if (excludes.contains(item.getId())) continue;
if (item instanceof FolderItem)
tempQueue.addAll(getAllNodesForZip((FolderItem) item, session, login, accountingHandler, excludes));
tempQueue.addAll(getAllNodesForZip((FolderItem) item, login, session, accountingHandler, excludes));
else if (item instanceof AbstractFileItem){
logger.trace("adding file {}",item.getPath());
AbstractFileItem fileItem = (AbstractFileItem) item;
accountingHandler.createReadObj(fileItem.getTitle(), session, session.getNodeByIdentifier(item.getId()), login, false);
String versionName = null;
try {
Version version = versionHandler.getCurrentVersion((Node) item.getRelatedNode());
versionName = version.getName();
}catch(RepositoryException e) {
logger.warn("current version of {} cannot be retreived", item.getId());
}
accountingHandler.createReadObj(fileItem.getTitle(), versionName, session, (Node) item.getRelatedNode(), login, false);
queue.addLast(item);
}
}
@ -55,9 +68,9 @@ public class CompressHandler {
}
public void zipNode(ZipOutputStream zos, Deque<Item> queue, String login, org.gcube.common.storagehub.model.Path originalPath) throws Exception{
public void zipNode(ZipOutputStream zos, Deque<Item> queue, Path originalPath) throws Exception{
logger.trace("originalPath is {}",originalPath.toPath());
org.gcube.common.storagehub.model.Path actualPath = Paths.getPath("");
Path actualPath = Paths.getPath("");
while (!queue.isEmpty()) {
Item item = queue.pop();
if (item instanceof FolderItem) {
@ -74,8 +87,12 @@ public class CompressHandler {
} else if (item instanceof AbstractFileItem){
try {
AbstractFileItem fileItem = (AbstractFileItem)item;
FolderManager manager = pluginHandler.getFolderManager(fileItem);
InputStream streamToWrite = manager.getStorageBackend().download(fileItem.getContent());
StorageBackendFactory sbf = storageBackendHandler.get(fileItem.getContent().getPayloadBackend());
StorageBackend sb = sbf.create(fileItem.getContent().getPayloadBackend());
InputStream streamToWrite = sb.download(fileItem.getContent());
if (streamToWrite == null){
logger.warn("discarding item {} ",item.getName());
continue;

@ -0,0 +1,211 @@
package org.gcube.data.access.storagehub.handlers;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.Deque;
import java.util.List;
import java.util.Map;
import java.util.zip.Deflater;
import java.util.zip.ZipOutputStream;
import javax.inject.Inject;
import javax.inject.Singleton;
import javax.jcr.Node;
import javax.jcr.RepositoryException;
import javax.jcr.Session;
import javax.jcr.version.Version;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.StreamingOutput;
import org.apache.commons.io.FilenameUtils;
import org.gcube.common.storagehub.model.Constants;
import org.gcube.common.storagehub.model.Excludes;
import org.gcube.common.storagehub.model.Paths;
import org.gcube.common.storagehub.model.exceptions.InvalidItemException;
import org.gcube.common.storagehub.model.exceptions.PluginInitializationException;
import org.gcube.common.storagehub.model.exceptions.PluginNotFoundException;
import org.gcube.common.storagehub.model.exceptions.StorageHubException;
import org.gcube.common.storagehub.model.exceptions.StorageIdNotFoundException;
import org.gcube.common.storagehub.model.items.AbstractFileItem;
import org.gcube.common.storagehub.model.items.FolderItem;
import org.gcube.common.storagehub.model.items.Item;
import org.gcube.common.storagehub.model.items.nodes.Content;
import org.gcube.common.storagehub.model.items.nodes.PayloadBackend;
import org.gcube.common.storagehub.model.storages.StorageBackend;
import org.gcube.common.storagehub.model.storages.StorageBackendFactory;
import org.gcube.data.access.storagehub.SingleFileStreamingOutput;
import org.gcube.data.access.storagehub.accounting.AccountingHandler;
import org.gcube.data.access.storagehub.handlers.items.Node2ItemConverter;
import org.gcube.data.access.storagehub.handlers.plugins.StorageBackendHandler;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@Singleton
public class DownloadHandler {
private static final Logger log = LoggerFactory.getLogger(DownloadHandler.class);
@Inject
private AccountingHandler accountingHandler;
@Inject
private StorageBackendHandler storageBackendHandler;
@Inject
private CompressHandler compressHandler;
@Inject
private VersionHandler versionHandler;
@Inject
private Node2ItemConverter node2Item;
public Response downloadFolderItem(Session ses, String login, FolderItem item, boolean withAccounting ) throws StorageHubException, RepositoryException {
try {
final Deque<Item> allNodes = compressHandler.getAllNodesForZip((FolderItem)item, login, ses, accountingHandler, Excludes.GET_ONLY_CONTENT);
final org.gcube.common.storagehub.model.Path originalPath = Paths.getPath(item.getParentPath());
StreamingOutput so = new StreamingOutput() {
@Override
public void write(OutputStream os) {
try(ZipOutputStream zos = new ZipOutputStream(os)){
long start = System.currentTimeMillis();
zos.setLevel(Deflater.BEST_COMPRESSION);
log.debug("writing StreamOutput");
compressHandler.zipNode(zos, allNodes, originalPath);
log.debug("StreamOutput written in {}",(System.currentTimeMillis()-start));
} catch (Exception e) {
log.error("error writing stream",e);
}
}
};
Response response = Response
.ok(so)
.header("content-disposition","attachment; filename = "+item.getTitle()+".zip")
.header("Content-Type", "application/zip")
.header("Content-Length", -1l)
.build();
if (withAccounting)
accountingHandler.createReadObj(item.getTitle(), null, ses, (Node) item.getRelatedNode(), login, false);
return response;
}finally {
if (ses!=null) ses.save();
}
}
public Response downloadFileItem(Session ses, AbstractFileItem fileItem, String login, boolean withAccounting) throws RepositoryException, PluginInitializationException, PluginNotFoundException, StorageHubException {
Content content = fileItem.getContent();
StorageBackendFactory sbf = storageBackendHandler.get(content.getPayloadBackend());
StorageBackend sb = sbf.create(content.getPayloadBackend());
InputStream streamToWrite = sb.download(content);
if (withAccounting) {
String versionName = null;
try {
Version version = versionHandler.getCurrentVersion((Node) fileItem.getRelatedNode());
versionName = version.getName();
}catch(RepositoryException e) {
log.warn("current version of {} cannot be retreived", fileItem.getId());
}
accountingHandler.createReadObj(fileItem.getTitle(), versionName, ses, (Node) fileItem.getRelatedNode(), login, true);
}
StreamingOutput so = new SingleFileStreamingOutput(streamToWrite);
return Response
.ok(so)
.header("content-disposition","attachment; filename = "+fileItem.getName())
.header("Content-Length", fileItem.getContent().getSize())
.header("Content-Type", fileItem.getContent().getMimeType())
.build();
}
public Response downloadVersionedItem(Session ses, String login, AbstractFileItem currentItem, String versionName, boolean withAccounting) throws RepositoryException, StorageHubException{
List<Version> jcrVersions = versionHandler.getContentVersionHistory((Node)currentItem.getRelatedNode());
for (Version version: jcrVersions) {
log.debug("retrieved version id {}, name {}", version.getIdentifier(), version.getName());
if (version.getName().equals(versionName)) {
Content content = node2Item.getContentFromVersion(version);
StorageBackendFactory sbf = storageBackendHandler.get(content.getPayloadBackend());
StorageBackend sb = sbf.create(content.getPayloadBackend());
InputStream streamToWrite = null;
try {
streamToWrite = sb.download(content);
}catch (StorageIdNotFoundException e) {
//TODO: temporary code, it will last until the MINIO porting will not finish
if (sbf.getName().equals(Constants.MONGO_STORAGE)) {
sbf = storageBackendHandler.get(Constants.DEFAULT_S3_STORAGE);
sbf.create(new PayloadBackend(Constants.DEFAULT_S3_STORAGE, null));
} else
throw e;
}
log.debug("retrieved storage id is {} with storageBackend {} (stream is null? {})",content.getStorageId(), sbf.getName(), streamToWrite==null );
String oldfilename = FilenameUtils.getBaseName(currentItem.getTitle());
String ext = FilenameUtils.getExtension(currentItem.getTitle());
String fileName = String.format("%s_v%s.%s", oldfilename, version.getName(), ext);
if (withAccounting)
accountingHandler.createReadObj(currentItem.getTitle(), versionName, ses, (Node) currentItem.getRelatedNode(), login, true);
StreamingOutput so = new SingleFileStreamingOutput(streamToWrite);
return Response
.ok(so)
.header("content-disposition","attachment; filename = "+fileName)
.header("Content-Length", content.getSize())
.header("Content-Type", content.getMimeType())
.build();
}
}
throw new InvalidItemException("the version is not valid");
}
public Response downloadFileFromStorageBackend(String storageId, String storageName) throws RepositoryException, PluginInitializationException, PluginNotFoundException, StorageHubException {
StorageBackendFactory sbf = storageBackendHandler.get(storageName);
StorageBackend sb = sbf.create(new PayloadBackend(storageName, null));
InputStream streamToWrite = sb.download(storageId);
Map<String, String> userMetadata = sb.getFileMetadata(storageId);
log.info("returned metadata from storageBackend are: {}", userMetadata);
long size = Long.parseLong(userMetadata.get("size"));
String title = userMetadata.get("title");
String contentType = userMetadata.get("content-type");
StreamingOutput so = new SingleFileStreamingOutput(streamToWrite);
return Response
.ok(so)
.header("content-disposition","attachment; filename = "+title)
.header("Content-Length", size)
.header("Content-Type", contentType)
.build();
}
}

@ -41,15 +41,20 @@ public class GroupHandler {
String folderName = group.getPrincipal().getName();
Node folder = getFolderNodeRelatedToGroup(session, folderName);
boolean found = false;
NodeIterator ni = folder.getSharedSet();
while (ni.hasNext()) {
Node node = ni.nextNode();
if (node.getPath().startsWith(pathUtil.getWorkspacePath(user.getPrincipal().getName()).toPath())) {
if (node.getPath().startsWith(pathUtil.getVREsPath(userId, session).toPath())) {
node.removeShare();
found = true;
break;
}
}
if (!found)
log.warn("sharing not removed for user {} ",userId);
return group.removeMember(user);
}

@ -0,0 +1,143 @@
package org.gcube.data.access.storagehub.handlers;
import static org.gcube.common.storagehub.model.Constants.enchriptedPrefix;
import static org.gcube.common.storagehub.model.Constants.enchriptedVolatile;
import static org.gcube.common.storagehub.model.Constants.versionPrefix;
import java.util.Base64;
import javax.inject.Singleton;
import javax.servlet.ServletContext;
import org.gcube.common.encryption.encrypter.StringEncrypter;
import org.gcube.common.security.AuthorizedTasks;
import org.gcube.common.security.secrets.Secret;
import org.gcube.common.storagehub.model.exceptions.BackendGenericError;
import org.gcube.common.storagehub.model.exceptions.StorageHubException;
import org.gcube.data.access.storagehub.types.LinkType;
import org.gcube.data.access.storagehub.types.PublicLink;
import org.gcube.smartgears.ContextProvider;
@Singleton
public class PublicLinkHandler {
public String getForItem(String itemId, ServletContext context) throws BackendGenericError{
return getUrl(itemId, enchriptedPrefix, context);
}
public String getForVersionedItem(String itemId, String version, ServletContext context) throws BackendGenericError {
return getUrl(String.format("%s%s%s",itemId, versionPrefix, version), enchriptedPrefix, context);
}
public String getForVolatile(String fileId, String storageName, ServletContext context) throws BackendGenericError {
return getUrl(String.format("%s_%s",fileId, storageName), enchriptedVolatile, context);
}
public PublicLink resolveEnchriptedId(String enchriptedId) throws StorageHubException {
String complexId = enchriptedId;
boolean isVolatile = false;
if (enchriptedId.startsWith(enchriptedPrefix) || enchriptedId.startsWith(enchriptedVolatile) ) {
final String enchriptedValue = enchriptedId.startsWith(enchriptedPrefix) ? enchriptedPrefix : enchriptedVolatile;
isVolatile = enchriptedId.startsWith(enchriptedVolatile);
try {
String infraContext = String.format("/%s", ContextProvider.get().container().configuration().infrastructure());
Secret infraSecret = ContextProvider.get().container().authorizationProvider().getSecretForContext(infraContext);
complexId = AuthorizedTasks.executeSafely(() -> {
return StringEncrypter.getEncrypter().decrypt(
new String(Base64.getUrlDecoder().decode(enchriptedId.replace(enchriptedValue, ""))));
}, infraSecret);
}catch(Throwable e){
throw new BackendGenericError("invalid public url",e);
}
}
if (isVolatile) {
String[] volatileIdSplit = complexId.split("_");
return new VolatilePublicLink(volatileIdSplit[0], volatileIdSplit[1]);
}else {
if (complexId.contains(versionPrefix)) {
String[] split = complexId.split(versionPrefix);
String itemId = split[0];
String versionName = split[1];
return new ItemPublicLink(itemId, versionName);
} else
return new ItemPublicLink(complexId);
}
}
private String getUrl(String toEnchript, String prefix, ServletContext context) throws BackendGenericError{
String infraContext = String.format("/%s", ContextProvider.get().container().configuration().infrastructure());
Secret infraSecret = ContextProvider.get().container().authorizationProvider().getSecretForContext(infraContext);
try {
String enchriptedQueryString = AuthorizedTasks.executeSafely(
() -> {return StringEncrypter.getEncrypter().encrypt(toEnchript);},infraSecret);
String basepath = context.getInitParameter("resolver-basepath");
String filePublicUrl = String.format("%s/%s%s",basepath, prefix, Base64.getUrlEncoder().encodeToString(enchriptedQueryString.getBytes()));
return filePublicUrl;
}catch (Throwable e) {
throw new BackendGenericError("error encrypting item id",e );
}
}
public static class VolatilePublicLink implements PublicLink {
private String storageKey;
private String storageName;
protected VolatilePublicLink(String storageKey, String storageName){
this.storageKey = storageKey;
this.storageName = storageName;
}
@Override
public LinkType getType() {return LinkType.VOLATILE;}
@Override
public String getId() { return storageKey; }
@Override
public String getStorageName() { return storageName; }
}
public static class ItemPublicLink implements PublicLink {
private String itemId;
private String version;
private LinkType type;
protected ItemPublicLink(String itemId){
this.itemId = itemId;
this.type = LinkType.STANDARD;
}
protected ItemPublicLink(String itemId, String version){
this.itemId = itemId;
this.version = version;
this.type = LinkType.VERSIONED;
}
@Override
public LinkType getType() {return type;}
@Override
public String getId() { return itemId; }
@Override
public String getVersion() { return version; }
}
}

@ -18,7 +18,7 @@ import javax.jcr.Session;
import javax.jcr.lock.LockException;
import javax.jcr.version.Version;
import org.gcube.common.authorization.library.AuthorizedTasks;
import org.gcube.common.security.AuthorizedTasks;
import org.gcube.common.storagehub.model.Excludes;
import org.gcube.common.storagehub.model.Paths;
import org.gcube.common.storagehub.model.exceptions.BackendGenericError;
@ -30,7 +30,8 @@ import org.gcube.common.storagehub.model.items.FolderItem;
import org.gcube.common.storagehub.model.items.Item;
import org.gcube.common.storagehub.model.items.TrashItem;
import org.gcube.common.storagehub.model.items.nodes.Content;
import org.gcube.common.storagehub.model.plugins.FolderManager;
import org.gcube.common.storagehub.model.storages.StorageBackend;
import org.gcube.common.storagehub.model.storages.StorageBackendFactory;
import org.gcube.common.storagehub.model.types.ItemAction;
import org.gcube.data.access.storagehub.AuthorizationChecker;
import org.gcube.data.access.storagehub.PathUtil;
@ -38,7 +39,7 @@ import org.gcube.data.access.storagehub.Utils;
import org.gcube.data.access.storagehub.accounting.AccountingHandler;
import org.gcube.data.access.storagehub.handlers.items.Item2NodeConverter;
import org.gcube.data.access.storagehub.handlers.items.Node2ItemConverter;
import org.gcube.data.access.storagehub.handlers.plugins.FolderPluginHandler;
import org.gcube.data.access.storagehub.handlers.plugins.StorageBackendHandler;
import org.gcube.data.access.storagehub.types.ContentPair;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -68,11 +69,9 @@ public class TrashHandler {
@Inject
PathUtil pathUtil;
@Inject FolderPluginHandler managerHandler;
@Inject
FolderPluginHandler folderHandler;
StorageBackendHandler storageBackendHandler;
public void removeNodes(Session ses, List<Item> itemsToDelete) throws RepositoryException, StorageHubException{
log.debug("defnitively removing nodes with ids {}",itemsToDelete);
for (Item item: itemsToDelete) {
@ -109,8 +108,13 @@ public class TrashHandler {
}
try {
FolderManager manager = folderHandler.getFolderManager(item);
contentSet.add(new ContentPair(item.getContent(), manager.getStorageBackend()));
StorageBackendFactory sbf = storageBackendHandler.get(item.getContent().getPayloadBackend());
StorageBackend sb = sbf.create(item.getContent().getPayloadBackend());
contentSet.add(new ContentPair(item.getContent(), sb));
List<Version> versions = versionHandler.getContentVersionHistory((Node)item.getRelatedNode());
@ -118,7 +122,7 @@ public class TrashHandler {
try {
Content content = node2Item.getContentFromVersion(version);
if (content!= null && content.getStorageId()!=null)
contentSet.add(new ContentPair(content, manager.getStorageBackend()));
contentSet.add(new ContentPair(content, sb));
else log.warn("invalid version {}",version.getName());
}catch (Throwable t) {
log.warn("error retrieving version content for {}",version.getName(),t);
@ -159,7 +163,7 @@ public class TrashHandler {
public void run() {
for (ContentPair cp: contentToDelete ) {
try {
cp.getStorageBackend().onDelete(cp.getContent());
cp.getStorageBackend().delete(cp.getContent().getStorageId());
log.debug("file with id {} correctly removed from storage {}",cp.getContent().getStorageId(),cp.getStorageBackend().getClass().getSimpleName());
}catch(Throwable t) {
log.warn("error removing file with id {} from storage {}",cp.getContent().getStorageId(), cp.getStorageBackend().getClass().getSimpleName(), t);

@ -6,6 +6,7 @@ import java.util.List;
import javax.inject.Singleton;
import javax.jcr.Node;
import javax.jcr.RepositoryException;
import javax.jcr.Session;
import javax.jcr.version.Version;
import javax.jcr.version.VersionHistory;
@ -52,7 +53,14 @@ public class VersionHandler {
logger.warn("cannot checkoutNode content node",e);
}
}
public Version getCurrentVersion(Node node) throws RepositoryException{
Session session = node.getSession();
Node contentNode = node.getNode(NodeConstants.CONTENT_NAME);
VersionManager versionManager = session.getWorkspace().getVersionManager();
return versionManager.getBaseVersion(contentNode.getPath());
}
public List<Version> getContentVersionHistory(Node node) {
try {
Session session = node.getSession();
@ -68,10 +76,17 @@ public class VersionHandler {
logger.debug("version name {} with nodeType {}",version.getName(),version.getPrimaryNodeType().getName());
}
return versions;
}catch(Exception e ) {
}catch(Throwable e ) {
logger.warn("cannot get version history content node",e);
return Collections.emptyList();
}
}
public void removeContentVersion(Node node, String versionName) throws RepositoryException{
Node contentNode = node.getNode(NodeConstants.CONTENT_NAME);
VersionHistory history = contentNode.getSession().getWorkspace().getVersionManager().getVersionHistory(contentNode.getPath());
history.removeVersion(versionName);
}
}

@ -7,10 +7,18 @@ import org.gcube.common.storagehub.model.items.nodes.Content;
public interface ContentHandler {
void initiliseSpecificContent(InputStream is, String fileName, String mimeType) throws Exception;
boolean requiresInputStream();
Content getContent();
default void initiliseSpecificContent(InputStream is, String fileName, String mimeType, long size) throws Exception{
throw new UnsupportedOperationException();
}
default void initiliseSpecificContent(String fileName, String mimeType) throws Exception {
throw new UnsupportedOperationException();
}
Content getContent();
AbstractFileItem buildItem(String name, String description, String login);
}

@ -42,9 +42,9 @@ public class ContentHandlerFactory {
public ContentHandler create(String mimetype) throws Exception{
Class<? extends ContentHandler> handlerClass = handlerMap.get(mimetype);
if (handlerClass!=null)
return handlerClass.newInstance();
return handlerClass.getDeclaredConstructor().newInstance();
else
return defaultHandler.newInstance();
return defaultHandler.getDeclaredConstructor().newInstance();
}
}

@ -1,6 +1,5 @@
package org.gcube.data.access.storagehub.handlers.content;
import java.io.InputStream;
import java.util.Calendar;
import org.gcube.common.storagehub.model.items.GenericFileItem;
@ -11,12 +10,17 @@ public class GenericFileHandler implements ContentHandler{
Content content = new Content();
@Override
public void initiliseSpecificContent(InputStream is, String filename, String mimeType) throws Exception {
public boolean requiresInputStream() {
return false;
}
@Override
public void initiliseSpecificContent(String filename, String mimeType) throws Exception {
content.setMimeType(mimeType);
}
@Override
public Content getContent() {
return content;

@ -28,28 +28,38 @@ public class ImageHandler implements ContentHandler{
private static final Logger logger = LoggerFactory.getLogger(ImageHandler.class);
@Override
public boolean requiresInputStream() {
return true;
}
@Override
public void initiliseSpecificContent(InputStream is, String fileName, String mimeType) throws Exception {
Image image = javax.imageio.ImageIO.read(is);
public void initiliseSpecificContent(InputStream is, String fileName, String mimeType, long size) throws Exception {
if (size<5242880) {
Image image = javax.imageio.ImageIO.read(is);
int width = image.getWidth(null);
int height = image.getHeight(null);
int width = image.getWidth(null);
int height = image.getHeight(null);
content.setWidth(Long.valueOf(width));
content.setHeight(Long.valueOf(height));
content.setWidth(Long.valueOf(width));
content.setHeight(Long.valueOf(height));
try {
int[] dimension = getThumbnailDimension(width, height);
try {
int[] dimension = getThumbnailDimension(width, height);
byte[] buf = transform(image, fileName, dimension[0], dimension[1]).toByteArray();
byte[] buf = transform(image, fileName, dimension[0], dimension[1]).toByteArray();
content.setThumbnailHeight(Long.valueOf(dimension[1]));
content.setThumbnailWidth(Long.valueOf(dimension[0]));
content.setThumbnailData(buf);
}catch(Throwable t) {
logger.warn("thumbnail for file {} cannot be created ", fileName,t);
content.setThumbnailHeight(Long.valueOf(dimension[1]));
content.setThumbnailWidth(Long.valueOf(dimension[0]));
content.setThumbnailData(buf);
}catch(Throwable t) {
logger.warn("thumbnail for file {} cannot be created ", fileName,t);
}
}
content.setMimeType(mimeType);
}

@ -17,7 +17,12 @@ public class OfficeAppHandler implements ContentHandler{
Content content = new Content();
@Override
public void initiliseSpecificContent(InputStream is, String filename, String mimeType) throws Exception {
public boolean requiresInputStream() {
return true;
}
@Override
public void initiliseSpecificContent(InputStream is, String filename, String mimeType, long size) throws Exception {
//detecting the file type
BodyContentHandler handler = new BodyContentHandler();
Metadata metadata = new Metadata();

@ -27,7 +27,12 @@ public class PdfHandler implements ContentHandler {
private static final Logger logger = LoggerFactory.getLogger(PdfHandler.class);
@Override
public void initiliseSpecificContent(InputStream is, String fileName, String mimeType) throws Exception {
public boolean requiresInputStream() {
return true;
}
@Override
public void initiliseSpecificContent(InputStream is, String fileName, String mimeType, long size) throws Exception {
try {
PdfReader reader = new PdfReader(is);
content.setNumberOfPages(Long.valueOf(reader.getNumberOfPages()));

@ -82,7 +82,6 @@ public class Item2NodeConverter {
logger.debug("error setting value",e);
}
}
}
return newNode;
@ -94,7 +93,7 @@ public class Item2NodeConverter {
}
private void iterateItemNodeAttributeFields(Object object, Node parentNode, String nodeName) throws Exception{
AttributeRootNode attributeRootNode = object.getClass().getAnnotation(AttributeRootNode.class);
Node newNode;
@ -115,6 +114,7 @@ public class Item2NodeConverter {
@SuppressWarnings("rawtypes")
Class returnType = field.getType();
Values values = getObjectValue(returnType, field.get(object));
if (values == null) continue;
if (values.isMulti()) newNode.setProperty(attribute.value(), values.getValues());
else newNode.setProperty(attribute.value(), values.getValue());
} catch (Exception e ) {
@ -145,13 +145,28 @@ public class Item2NodeConverter {
iterateItemNodeAttributeFields(obj,newNode, field.getName()+(i++));
}
} else if (field.isAnnotationPresent(NodeAttribute.class)){
NodeAttribute nodeAttribute = field.getAnnotation(NodeAttribute.class);
if (nodeAttribute.isReadOnly()) continue;
String subNodeName = nodeAttribute.value();
logger.trace("retrieving field node "+field.getName());
field.setAccessible(true);
try{
Object obj = field.get(object);
if (obj!=null)
iterateItemNodeAttributeFields(obj, newNode, subNodeName);
} catch (Exception e ) {
logger.debug("error setting value",e);
}
}
}
}
@SuppressWarnings({ "rawtypes" })
public static Values getObjectValue(Class returnType, Object value) throws Exception{
if (value== null) return null;
if (returnType.equals(String.class)) return new Values(new StringValue((String) value));
if (returnType.isEnum()) return new Values(new StringValue(((Enum) value).toString()));
if (returnType.equals(Calendar.class)) return new Values(new DateValue((Calendar) value));
@ -188,8 +203,9 @@ public class Item2NodeConverter {
public <F extends AbstractFileItem> void replaceContent(Node node, F item, ItemAction action){
try {
node.setPrimaryType(item.getClass().getAnnotation(RootNode.class).value());
String primaryType = item.getClass().getAnnotation(RootNode.class).value()[0];
node.setPrimaryType(primaryType);
Node contentNode = node.getNode(NodeConstants.CONTENT_NAME);
contentNode.setPrimaryType(item.getContent().getClass().getAnnotation(AttributeRootNode.class).value());
@ -197,22 +213,9 @@ public class Item2NodeConverter {
node.setProperty(NodeProperty.LAST_MODIFIED_BY.toString(), item.getLastModifiedBy());
node.setProperty(NodeProperty.LAST_ACTION.toString(), action.name());
for (Field field : retrieveAllFields(item.getContent().getClass())){
if (field.isAnnotationPresent(Attribute.class)){
Attribute attribute = field.getAnnotation(Attribute.class);
if (attribute.isReadOnly()) continue;
field.setAccessible(true);
try{
//Class<?> returnType = field.getType();
Values values = getObjectValue(field.getType(), field.get(item.getContent()));
if (values.isMulti()) contentNode.setProperty(attribute.value(), values.getValues() );
else contentNode.setProperty(attribute.value(), values.getValue());
} catch (Exception e ) {
logger.debug("error setting value for attribute "+attribute.value(),e);
}
}
}
replaceContentNodeInternal(contentNode, item.getContent().getClass(),item.getContent());
} catch (RepositoryException e) {
logger.error("error writing repository",e);
@ -220,7 +223,41 @@ public class Item2NodeConverter {
}
}
//VALID ONLY FOR CONTENT
public void replaceContentNodeInternal(Node node, Class<?> clazz, Object instance) {
for (Field field : retrieveAllFields(clazz)){
if (field.isAnnotationPresent(Attribute.class)){
Attribute attribute = field.getAnnotation(Attribute.class);
if (attribute.isReadOnly()) continue;
field.setAccessible(true);
try{
//Class<?> returnType = field.getType();
Values values = getObjectValue(field.getType(), field.get(instance));
if (values.isMulti()) node.setProperty(attribute.value(), values.getValues() );
else node.setProperty(attribute.value(), values.getValue());
} catch (Exception e ) {
logger.debug("error setting value for attribute "+attribute.value(),e);
}
} else if (field.isAnnotationPresent(NodeAttribute.class)){
NodeAttribute nodeAttribute = field.getAnnotation(NodeAttribute.class);
if (nodeAttribute.isReadOnly()) continue;
String subNodeName = nodeAttribute.value();
logger.trace("retrieving field node "+field.getName());
field.setAccessible(true);
try{
Object obj = field.get(instance);
if (obj!=null)
iterateItemNodeAttributeFields(obj, node, subNodeName);
} catch (Exception e ) {
logger.debug("error setting value",e);
}
}
}
}
public void updateHidden(Node node, Boolean hidden,String login) throws RepositoryException {
Utils.setPropertyOnChangeNode(node, login, ItemAction.UPDATED);
node.setProperty(NodeProperty.HIDDEN.toString(), hidden);

@ -1,16 +1,11 @@
package org.gcube.data.access.storagehub.handlers.items;
import java.io.BufferedInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import javax.inject.Inject;
import javax.inject.Singleton;
@ -18,6 +13,7 @@ import javax.jcr.Node;
import javax.jcr.RepositoryException;
import javax.jcr.Session;
import javax.jcr.lock.LockException;
import javax.jcr.version.Version;
import org.apache.commons.compress.archivers.ArchiveEntry;
import org.apache.commons.compress.archivers.ArchiveInputStream;
@ -26,7 +22,6 @@ import org.apache.tika.config.TikaConfig;
import org.apache.tika.detect.Detector;
import org.apache.tika.io.TikaInputStream;
import org.apache.tika.metadata.Metadata;
import org.gcube.common.authorization.library.AuthorizedTasks;
import org.gcube.common.storagehub.model.Excludes;
import org.gcube.common.storagehub.model.NodeConstants;
import org.gcube.common.storagehub.model.Paths;
@ -38,12 +33,11 @@ import org.gcube.common.storagehub.model.exceptions.ItemLockedException;
import org.gcube.common.storagehub.model.exceptions.StorageHubException;
import org.gcube.common.storagehub.model.items.AbstractFileItem;
import org.gcube.common.storagehub.model.items.FolderItem;
import org.gcube.common.storagehub.model.plugins.FolderManager;
import org.gcube.common.storagehub.model.storages.MetaInfo;
import org.gcube.common.storagehub.model.storages.StorageBackend;
import org.gcube.common.storagehub.model.storages.StorageBackendFactory;
import org.gcube.common.storagehub.model.types.ItemAction;
import org.gcube.data.access.storagehub.AuthorizationChecker;
import org.gcube.data.access.storagehub.MultipleOutputStream;
import org.gcube.data.access.storagehub.Utils;
import org.gcube.data.access.storagehub.accounting.AccountingHandler;
import org.gcube.data.access.storagehub.handlers.VersionHandler;
@ -55,15 +49,15 @@ import org.gcube.data.access.storagehub.handlers.items.builders.FileCreationPara
import org.gcube.data.access.storagehub.handlers.items.builders.FolderCreationParameters;
import org.gcube.data.access.storagehub.handlers.items.builders.GCubeItemCreationParameters;
import org.gcube.data.access.storagehub.handlers.items.builders.URLCreationParameters;
import org.gcube.data.access.storagehub.handlers.plugins.FolderPluginHandler;
import org.gcube.data.access.storagehub.handlers.plugins.StorageBackendHandler;
import org.glassfish.jersey.media.multipart.FormDataContentDisposition;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@Singleton
public class ItemHandler {
@Inject
@Inject
AccountingHandler accountingHandler;
@Inject
@ -76,43 +70,48 @@ public class ItemHandler {
VersionHandler versionHandler;
@Inject
FolderPluginHandler pluginHandler;
StorageBackendHandler storageBackendHandler;
private static ExecutorService executor = Executors.newFixedThreadPool(100);
// private static ExecutorService executor = Executors.newFixedThreadPool(100);
@Inject Node2ItemConverter node2Item;
@Inject Item2NodeConverter item2Node;
@Inject
Node2ItemConverter node2Item;
@Inject
Item2NodeConverter item2Node;
private static Logger log = LoggerFactory.getLogger(ItemHandler.class);
public <T extends CreateParameters> String create(T parameters) throws Exception{
// TODO: accounting
// provider URI host
// resourceOwner user
// consumer write
// in caso di versione - update con -delta
public <T extends CreateParameters> String create(T parameters) throws Exception {
Session ses = parameters.getSession();
Node destination;
try {
destination = ses.getNodeByIdentifier(parameters.getParentId());
}catch(RepositoryException inf) {
} catch (RepositoryException inf) {
throw new IdNotFoundException(parameters.getParentId());
}
if (!node2Item.checkNodeType(destination, FolderItem.class))
if (!node2Item.checkNodeType(destination, FolderItem.class))
throw new InvalidItemException("the destination item is not a folder");
authChecker.checkWriteAuthorizationControl(ses, parameters.getUser(), destination.getIdentifier(), true);
try {
Node newNode = null;
switch (parameters.getMangedType()) {
case FILE:
newNode = create((FileCreationParameters)parameters, destination);
newNode = create((FileCreationParameters) parameters, destination);
break;
case FOLDER:
newNode = create((FolderCreationParameters)parameters, destination);
newNode = create((FolderCreationParameters) parameters, destination);
break;
case ARCHIVE:
newNode = create((ArchiveStructureCreationParameter)parameters, destination);
newNode = create((ArchiveStructureCreationParameter) parameters, destination);
break;
case URL:
newNode = create((URLCreationParameters) parameters, destination);
@ -123,7 +122,7 @@ public class ItemHandler {
default:
throw new InvalidCallParameters("Item not supported");
}
log.debug("item with id {} correctly created",newNode.getIdentifier());
log.debug("item with id {} correctly created", newNode.getIdentifier());
return newNode.getIdentifier();
} finally {
if (parameters.getSession().getWorkspace().getLockManager().isLocked(destination.getPath()))
@ -132,267 +131,290 @@ public class ItemHandler {
}
private Node create(FolderCreationParameters params, Node destination) throws Exception{
private Node create(FolderCreationParameters params, Node destination) throws Exception {
Utils.acquireLockWithWait(params.getSession(), destination.getPath(), false, params.getUser(), 10);
Node newNode = Utils.createFolderInternally(params, accountingHandler);
Node newNode = Utils.createFolderInternally(params, accountingHandler, false);
params.getSession().save();
return newNode;
}
private Node create(FileCreationParameters params, Node destination) throws Exception{
Node newNode = createFileItemInternally(params.getSession(), destination, params.getStream(), params.getName(), params.getDescription(), params.getUser(), true);
private Node create(FileCreationParameters params, Node destination) throws Exception {
Node newNode = createFileItemInternally(params.getSession(), destination, params.getStream(), params.getName(),
params.getDescription(), params.getFileDetails(), params.getUser(), true);
params.getSession().save();
versionHandler.checkinContentNode(newNode);
log.info("file with id {} correctly created",newNode.getIdentifier());
return newNode;
log.info("file with id {} correctly created", newNode.getIdentifier());
return newNode;
}
private Node create(URLCreationParameters params, Node destination) throws Exception{
private Node create(URLCreationParameters params, Node destination) throws Exception {
Utils.acquireLockWithWait(params.getSession(), destination.getPath(), false, params.getUser(), 10);
Node newNode = Utils.createURLInternally(params.getSession(), destination, params.getName(), params.getUrl(), params.getDescription(), params.getUser(), accountingHandler);
Node newNode = Utils.createURLInternally(params.getSession(), destination, params.getName(), params.getUrl(),
params.getDescription(), params.getUser(), accountingHandler);
params.getSession().save();
return newNode;
}
private Node create(ArchiveStructureCreationParameter params, Node destination) throws Exception{
private Node create(ArchiveStructureCreationParameter params, Node destination) throws Exception {
Utils.acquireLockWithWait(params.getSession(), destination.getPath(), false, params.getUser(), 10);
FolderCreationParameters folderParameters = FolderCreationParameters.builder().name(params.getParentFolderName()).author(params.getUser()).on(destination.getIdentifier()).with(params.getSession()).build();
Node parentDirectoryNode = Utils.createFolderInternally(folderParameters, accountingHandler);
FolderCreationParameters folderParameters = FolderCreationParameters.builder()
.name(params.getParentFolderName()).author(params.getUser()).on(destination.getIdentifier())
.with(params.getSession()).build();
Node parentDirectoryNode = Utils.createFolderInternally(folderParameters, accountingHandler, false);
params.getSession().save();
try {
if (params.getSession().getWorkspace().getLockManager().isLocked(destination.getPath()))
params.getSession().getWorkspace().getLockManager().unlock(destination.getPath());
} catch (Throwable t){
} catch (Throwable t) {
log.warn("error unlocking {}", destination.getPath(), t);
}
Set<Node> fileNodes = new HashSet<>();
HashMap<String, Node> directoryNodeMap = new HashMap<>();
HashMap<String, Node> directoryNodeMap = new HashMap<>();
try (ArchiveInputStream input = new ArchiveStreamFactory()
.createArchiveInputStream(new BufferedInputStream(params.getStream(), 1024*64))){
ArchiveEntry entry;
while ((entry = input.getNextEntry()) != null) {
String entirePath = entry.getName();
if (entry.isDirectory()) {
log.debug("creating directory with entire path {} ", entirePath);
createPath(entirePath, directoryNodeMap, parentDirectoryNode, params.getSession(), params.getUser());
continue;
} else {
try {
String name = entirePath.replaceAll("([^/]*/)*(.*)", "$2");
String parentPath = entirePath.replaceAll("(([^/]*/)*)(.*)", "$1");
log.debug("creating file with entire path {}, name {}, parentPath {} ", entirePath, name, parentPath);
Node fileNode = null;
if (parentPath.isEmpty())
fileNode = createFileItemInternally(params.getSession(), parentDirectoryNode, input, name, "", params.getUser(), false);
else {
Node parentNode = directoryNodeMap.get(parentPath);
if (parentNode ==null)
parentNode = createPath(parentPath, directoryNodeMap, parentDirectoryNode, params.getSession(), params.getUser());
fileNode = createFileItemInternally(params.getSession(), parentNode, input, name, "", params.getUser(), false);
}
fileNodes.add(fileNode);
}catch(Exception e) {
log.warn("error getting file {}",entry.getName(),e);
ArchiveInputStream input = new ArchiveStreamFactory()
.createArchiveInputStream(new BufferedInputStream(params.getStream()));
ArchiveEntry entry;
while ((entry = input.getNextEntry()) != null) {
String entirePath = entry.getName();
log.debug("reading new entry ------> {} ", entirePath);
if (entry.isDirectory()) {
log.debug("creating directory with entire path {} ", entirePath);
createPath(entirePath, directoryNodeMap, parentDirectoryNode, params.getSession(), params.getUser());
} else {
try {
String name = entirePath.replaceAll("([^/]*/)*(.*)", "$2");
String parentPath = entirePath.replaceAll("(([^/]*/)*)(.*)", "$1");
log.debug("creating file with entire path {}, name {}, parentPath {} ", entirePath, name,
parentPath);
Node fileNode = null;
long fileSize = entry.getSize();
FormDataContentDisposition fileDetail = FormDataContentDisposition.name(name).size(fileSize)
.build();
if (parentPath.isEmpty()) {
fileNode = createFileItemInternally(params.getSession(), parentDirectoryNode, input, name, "",
fileDetail, params.getUser(), false);
} else {
Node parentNode = directoryNodeMap.get(parentPath);
if (parentNode == null)
parentNode = createPath(parentPath, directoryNodeMap, parentDirectoryNode,
params.getSession(), params.getUser());
fileNode = createFileItemInternally(params.getSession(), parentNode, input, name, "",
fileDetail, params.getUser(), false);
}
fileNodes.add(fileNode);
} catch (Throwable e) {
log.warn("error getting file {}", entry.getName(), e);
}
}
}
log.info("archive {} uploading finished ", params.getParentFolderName());
params.getSession().save();
for (Node node : fileNodes)
versionHandler.checkinContentNode(node);
return parentDirectoryNode;
}
private Node createPath(String parentPath, Map<String, Node> directoryNodeMap, Node rootNode, Session ses, String user) throws StorageHubException, RepositoryException{
private Node createPath(String parentPath, Map<String, Node> directoryNodeMap, Node rootNode, Session ses,
String user) throws StorageHubException, RepositoryException {
String[] parentPathSplit = parentPath.split("/");
String name = parentPathSplit[parentPathSplit.length-1];
String name = parentPathSplit[parentPathSplit.length - 1];
StringBuilder relParentPath = new StringBuilder();
for (int i = 0 ; i<=parentPathSplit.length-2; i++)
for (int i = 0; i <= parentPathSplit.length - 2; i++)
relParentPath.append(parentPathSplit[i]).append("/");
if (relParentPath.toString().isEmpty()) {
FolderCreationParameters folderParameters = FolderCreationParameters.builder().name(name).author(user).on(rootNode.getIdentifier()).with(ses).build();
Node createdNode = Utils.createFolderInternally(folderParameters, accountingHandler);
directoryNodeMap.put(name+"/", createdNode);
FolderCreationParameters folderParameters = FolderCreationParameters.builder().name(name).author(user)
.on(rootNode.getIdentifier()).with(ses).build();
Node createdNode = Utils.createFolderInternally(folderParameters, accountingHandler, false);
directoryNodeMap.put(name + "/", createdNode);
return createdNode;
}else {
} else {
Node relParentNode = directoryNodeMap.get(relParentPath.toString());
if (relParentNode==null) {
if (relParentNode == null) {
relParentNode = createPath(relParentPath.toString(), directoryNodeMap, rootNode, ses, user);
}
FolderCreationParameters folderParameters = FolderCreationParameters.builder().name(name).author(user).on(relParentNode.getIdentifier()).with(ses).build();
Node createdNode = Utils.createFolderInternally(folderParameters, accountingHandler);
FolderCreationParameters folderParameters = FolderCreationParameters.builder().name(name).author(user)
.on(relParentNode.getIdentifier()).with(ses).build();
Node createdNode = Utils.createFolderInternally(folderParameters, accountingHandler, false);
directoryNodeMap.put(relParentPath.append(name).append("/").toString(), createdNode);
return createdNode;
}
}
private Node create(GCubeItemCreationParameters params, Node destination) throws Exception{
private Node create(GCubeItemCreationParameters params, Node destination) throws Exception {
Utils.acquireLockWithWait(params.getSession(), destination.getPath(), false, params.getUser(), 10);
Node newNode = Utils.createGcubeItemInternally(params.getSession(), destination, params.getItem().getName(), params.getItem().getDescription(), params.getUser(), params.getItem(), accountingHandler);
Node newNode = Utils.createGcubeItemInternally(params.getSession(), destination, params.getItem().getName(),
params.getItem().getDescription(), params.getUser(), params.getItem(), accountingHandler);
params.getSession().save();
return newNode;
}
private Node createFileItemInternally(Session ses, Node destinationNode, InputStream stream, String name, String description, String login, boolean withLock) throws RepositoryException, StorageHubException{
private Node createFileItemInternally(Session ses, Node destinationNode, InputStream stream, String name,
String description, FormDataContentDisposition fileDetails, String login, boolean withLock)
throws RepositoryException, StorageHubException {
log.trace("UPLOAD: starting preparing file");
Node newNode;
FolderItem destinationItem = node2Item.getItem(destinationNode, Excludes.ALL);
FolderManager folderManager = pluginHandler.getFolderManager(destinationItem);
StorageBackend storageBackend = folderManager.getStorageBackend();
String relativePath = destinationNode.getPath();
if (destinationItem.isExternalManaged())
relativePath = relativePath.replace(folderManager.getRootFolder().getPath(), "");
StorageBackendFactory sbf = storageBackendHandler.get(destinationItem.getBackend());
StorageBackend sb = sbf.create(destinationItem.getBackend());
String relativePath = destinationNode.getPath();
String newNodePath = Paths.append(Paths.getPath(destinationNode.getPath()), name).toPath();
log.info("new node path is {}", newNodePath);
if (ses.nodeExists(newNodePath)) {
if (!folderManager.manageVersion())
throw new InvalidCallParameters("storage for plugin "+folderManager.getClass().getName()+" doesn't support versioning");
newNode = ses.getNode(newNodePath);
authChecker.checkWriteAuthorizationControl(ses, login, newNode.getIdentifier(), false);
AbstractFileItem item = fillItemWithContent(stream, storageBackend, name, description, relativePath,login);
AbstractFileItem item = fillItemWithContent(stream, sb, name, description, fileDetails, relativePath,
login);
if (withLock) {
try {
ses.getWorkspace().getLockManager().lock(newNode.getPath(), true, true, 0,login);
}catch (LockException le) {
ses.getWorkspace().getLockManager().lock(newNode.getPath(), true, true, 0, login);
} catch (LockException le) {
throw new ItemLockedException(le);
}
}
try {
versionHandler.checkoutContentNode(newNode);
log.trace("replacing content of class {}",item.getContent().getClass());
item2Node.replaceContent(newNode,item, ItemAction.UPDATED);
accountingHandler.createFileUpdated(item.getTitle(), ses, newNode, login, false);
log.trace("replacing content of class {}", item.getContent().getClass());
item2Node.replaceContent(newNode, item, ItemAction.UPDATED);
String versionName = null;
try {
Version version = versionHandler.getCurrentVersion(newNode);
versionName = version.getName();
} catch (RepositoryException e) {
log.warn("current version of {} cannot be retreived", item.getId());
}
accountingHandler.createFileUpdated(item.getTitle(), versionName, ses, newNode, login, false);
ses.save();
}finally {
if (withLock) ses.getWorkspace().getLockManager().unlock(newNode.getPath());
} catch (Throwable t) {
log.error("error saving item", t);
} finally {
if (withLock) {
if (ses != null && ses.hasPendingChanges())
ses.save();
ses.getWorkspace().getLockManager().unlock(newNode.getPath());
}
}
}
else {
} else {
authChecker.checkWriteAuthorizationControl(ses, login, destinationNode.getIdentifier(), true);
AbstractFileItem item = fillItemWithContent(stream, storageBackend, name, description, relativePath, login);
AbstractFileItem item = fillItemWithContent(stream, sb, name, description, fileDetails, relativePath,
login);
if (withLock) {
try {
log.debug("trying to acquire lock");
Utils.acquireLockWithWait(ses, destinationNode.getPath(), false, login, 10);
}catch (LockException le) {
} catch (LockException le) {
throw new ItemLockedException(le);
}
}
try {
newNode = item2Node.getNode(destinationNode, item);
newNode = item2Node.getNode(destinationNode, item);
accountingHandler.createEntryCreate(item.getTitle(), ses, newNode, login, false);
ses.save();
}finally {
if (withLock) ses.getWorkspace().getLockManager().unlock(destinationNode.getPath());
} catch (Throwable t) {
log.error("error saving item", t);
throw new BackendGenericError(t);
} finally {
if (withLock)
ses.getWorkspace().getLockManager().unlock(destinationNode.getPath());
}
versionHandler.makeVersionableContent(newNode);
accountingHandler.createFolderAddObj(name, item.getClass().getSimpleName(), item.getContent().getMimeType(), ses, login, destinationNode, false);
accountingHandler.createFolderAddObj(name, item.getClass().getSimpleName(), item.getContent().getMimeType(),
ses, login, destinationNode, false);
}
// TODO: Utils.updateParentSize()
return newNode;
}
private AbstractFileItem fillItemWithContent(InputStream stream, StorageBackend storageBackend, String name, String description, String relPath, String login) throws BackendGenericError{
ContentHandler handler = getContentHandler(stream, storageBackend, name, relPath, login);
AbstractFileItem item =handler.buildItem(name, description, login);
return item ;
private AbstractFileItem fillItemWithContent(InputStream stream, StorageBackend storageBackend, String name,
String description, FormDataContentDisposition fileDetails, String relPath, String login)
throws BackendGenericError {
log.trace("UPLOAD: filling content");
ContentHandler handler = getContentHandler(stream, storageBackend, name, fileDetails, relPath, login);
AbstractFileItem item = handler.buildItem(name, description, login);
return item;
}
private ContentHandler getContentHandler(InputStream stream, StorageBackend storageBackend, String name, String relPath, String login) throws BackendGenericError {
private ContentHandler getContentHandler(InputStream stream, StorageBackend storageBackend, String name,
FormDataContentDisposition fileDetails, String relPath, String login) throws BackendGenericError {
log.trace("UPLOAD: handling content");
final MultipleOutputStream mos;
try{
mos = new MultipleOutputStream(stream, 2);
}catch (IOException e) {
throw new BackendGenericError(e);
}
long start = System.currentTimeMillis();
log.trace("UPLOAD: writing the stream - start");
try {
Callable<ContentHandler> mimeTypeDector = new Callable<ContentHandler>() {
@Override
public ContentHandler call() throws Exception {
ContentHandler handler =null;
long start = System.currentTimeMillis();
log.debug("TIMING: reading the mimetype - start");
try(InputStream is1 = new BufferedInputStream(mos.get(), 1024*64)){
org.apache.tika.mime.MediaType mediaType = null;
TikaConfig config = TikaConfig.getDefaultConfig();
Detector detector = config.getDetector();
TikaInputStream stream = TikaInputStream.get(is1);
Metadata metadata = new Metadata();
metadata.add(Metadata.RESOURCE_NAME_KEY, name);
mediaType = detector.detect(stream, metadata);
String mimeType = mediaType.getBaseType().toString();
handler = contenthandlerFactory.create(mimeType);
is1.reset();
handler.initiliseSpecificContent(is1, name, mimeType);
log.trace("TIMING: reading the mimetype - finished in {}",System.currentTimeMillis()-start);
} catch (Throwable e) {
log.error("error retrieving mimeType",e);
throw new RuntimeException(e);
}
return handler;
MetaInfo info = null;
try {
log.debug("UPLOAD: upload on {} - start", storageBackend.getClass());
if (fileDetails != null && fileDetails.getSize() > 0) {
log.debug("UPLOAD: file size set is {} Byte", fileDetails.getSize());
info = storageBackend.upload(stream, relPath, name, fileDetails.getSize(), login);
} else
info = storageBackend.upload(stream, relPath, name, login);
log.debug("UPLOAD: upload on storage - stop");
} catch (Throwable e) {
log.error("error writing content", e);
throw e;
}
ContentHandler handler = null;
String mimeType;
log.debug("UPLOAD: reading the mimetype - start");
try (InputStream is1 = new BufferedInputStream(storageBackend.download(info.getStorageId()))) {
org.apache.tika.mime.MediaType mediaType = null;
TikaConfig config = TikaConfig.getDefaultConfig();
Detector detector = config.getDetector();
TikaInputStream tikastream = TikaInputStream.get(is1);
Metadata metadata = new Metadata();
mediaType = detector.detect(tikastream, metadata);
mimeType = mediaType.getBaseType().toString();
handler = contenthandlerFactory.create(mimeType);
log.debug("UPLOAD: reading the mimetype {} - finished in {}", mimeType,
System.currentTimeMillis() - start);
} catch (Throwable e) {
log.error("error retrieving mimeType", e);
throw new RuntimeException(e);
}
};
Callable<MetaInfo> uploader = new Callable<MetaInfo>() {
@Override
public MetaInfo call() throws Exception {
try(InputStream is1 = mos.get()){
MetaInfo info = storageBackend.upload(is1, relPath, name);
return info;
}catch (Throwable e) {
log.error("error writing content",e );
throw e;
if (handler.requiresInputStream())
try (InputStream is1 = new BufferedInputStream(storageBackend.download(info.getStorageId()))) {
log.debug("UPLOAD: the file type requires input stream");
handler.initiliseSpecificContent(is1, name, mimeType, info.getSize());
}
else {
log.debug("UPLOAD: the file type doesn't requires input stream");
handler.initiliseSpecificContent(name, mimeType);
}
};
Future<ContentHandler> detectorF = executor.submit(AuthorizedTasks.bind(mimeTypeDector));
Future<MetaInfo> uploaderF = executor.submit(AuthorizedTasks.bind(uploader));
log.debug("UPLOAD: writing the stream - finished in {}", System.currentTimeMillis() - start);
long start = System.currentTimeMillis();
log.debug("TIMING: writing the stream - start");
try {
mos.startWriting();
log.debug("TIMING: writing the stream - finished in {}",System.currentTimeMillis()-start);
ContentHandler handler = detectorF.get();
MetaInfo info = uploaderF.get();
handler.getContent().setData(NodeConstants.CONTENT_NAME);
handler.getContent().setStorageId(info.getStorageId());
handler.getContent().setSize(info.getSize());
handler.getContent().setRemotePath(info.getRemotePath());
handler.getContent().setRemotePath(info.getRemotePath());
handler.getContent().setPayloadBackend(info.getPayloadBackend());
log.debug("UPLOAD: content payload set as {} ", handler.getContent().getPayloadBackend());
return handler;
}catch (Exception e) {
} catch (Throwable e) {
log.error("error writing file", e);
throw new BackendGenericError(e);
}

@ -83,7 +83,7 @@ public class Node2ItemConverter {
setGenericFields(node.getFrozenNode(), Content.class, null, content);
return content;
}
public Message getMessageItem(Node node) throws RepositoryException{
if (!(node.getPrimaryNodeType().getName().equals("nthl:itemSentRequest")
|| node.getPrimaryNodeType().getName().equals("nthl:itemSentRequestSH")))
@ -95,7 +95,7 @@ public class Node2ItemConverter {
}catch (Throwable e) {
msg.setWithAttachments(false);
}
setRootItemCommonFields(node, Collections.emptyList(), Message.class, msg);
return msg;
}
@ -129,7 +129,7 @@ public class Node2ItemConverter {
item.setExternalManaged(false);
}
item.setLocked(node.isLocked());
setRootItemCommonFields(node, excludes, classToHandle, item);
@ -137,7 +137,7 @@ public class Node2ItemConverter {
return item;
}
private <T extends Item> boolean hasTypedParent(Node node, Class<T> parentType) throws BackendGenericError, RepositoryException{
if(node==null) return false;
@ -146,16 +146,16 @@ public class Node2ItemConverter {
private <T extends RootItem> void setRootItemCommonFields(Node node, List<String> excludes, Class<T> classToHandle, T instance) throws RepositoryException{
try{
instance.setParentId(node.getParent().getIdentifier());
instance.setParentPath(node.getParent().getPath());
}catch (Throwable e) {
logger.trace("Root node doesn't have a parent");
}
instance.setRelatedNode(node);
instance.setId(node.getIdentifier());
instance.setName(Text.unescapeIllegalJcrChars(node.getName()));
@ -166,21 +166,11 @@ public class Node2ItemConverter {
setGenericFields(node, classToHandle, excludes, instance);
}
private <T> void setGenericFields(Node node, Class<T> classToHandle,List<String> excludes, T instance){
for (Field field : retrieveAllFields(classToHandle)){
if (field.isAnnotationPresent(Attribute.class)){
Attribute attribute = field.getAnnotation(Attribute.class);
field.setAccessible(true);
try{
Class<?> returnType = field.getType();
field.set(instance, getPropertyValue(returnType, node.getProperty(attribute.value())));
logger.trace("retrieve item - added field {}",field.getName());
}catch(PathNotFoundException e){
logger.trace("the current node dosn't contain {} property",attribute.value());
} catch (Exception e ) {
logger.debug("error setting value for property {} ",attribute.value());
}
setAttributeFieldCheckingDefault(field, instance, node);
} else if (field.isAnnotationPresent(NodeAttribute.class)){
String fieldNodeName = field.getAnnotation(NodeAttribute.class).value();
//for now it excludes only first level node
@ -195,7 +185,7 @@ public class Node2ItemConverter {
}catch(PathNotFoundException e){
logger.trace("the current node dosn't contain {} node",fieldNodeName);
} catch (Exception e ) {
logger.debug("error setting value",e);
logger.trace("error setting value",e);
}
@ -207,97 +197,141 @@ public class Node2ItemConverter {
T obj = clazz.newInstance();
for (Field field : retrieveAllFields(clazz)){
if (field.isAnnotationPresent(Attribute.class)){
Attribute attribute = field.getAnnotation(Attribute.class);
setAttributeFieldCheckingDefault(field, obj, node);
} else if (field.isAnnotationPresent(MapAttribute.class)){
logger.trace("found field {} of type annotated as MapAttribute in class {} and node name {}", field.getName(), clazz.getName(), node.getName());
setMapAttribute(field, obj, node);
} else if (field.isAnnotationPresent(ListNodes.class)){
logger.trace("found field {} of type annotated as ListNodes in class {} on node {}", field.getName(), clazz.getName(), node.getName());
setListNode(field, obj, node);
} else if (field.isAnnotationPresent(NodeAttribute.class)){
logger.trace("found field {} of type annotated as NodeAttribute in class {} on node {}", field.getName(), clazz.getName(), node.getName());
String fieldNodeName = field.getAnnotation(NodeAttribute.class).value();
//for now it excludes only first level node
//if (excludes!=null && excludes.contains(fieldNodeName)) continue;
//for now it excludes only first level node
logger.trace("retrieving field node {} on field {}", fieldNodeName, field.getName());
field.setAccessible(true);
try{
Class returnType = field.getType();
field.set(obj, getPropertyValue(returnType, node.getProperty(attribute.value())));
Node fieldNode = node.getNode(fieldNodeName);
logger.trace("looking in node {} searched with {}",fieldNode.getName(),fieldNodeName);
field.set(obj, iterateNodeAttributeFields(field.getType(), fieldNode));
}catch(PathNotFoundException e){
logger.trace("the current node dosn't contain {} property",attribute.value());
logger.warn("the current node dosn't contain {} node",fieldNodeName);
} catch (Exception e ) {
logger.debug("error setting value {}",e.getMessage());
logger.warn("error setting value",e);
}
} else if (field.isAnnotationPresent(MapAttribute.class)){
logger.trace("found field {} of type annotated as MapAttribute in class {} and node name {}", field.getName(), clazz.getName(), node.getName());
field.setAccessible(true);
String exclude = field.getAnnotation(MapAttribute.class).excludeStartWith();
Map<String, Object> mapToset = new HashMap<String, Object>();
PropertyIterator iterator = node.getProperties();
if (iterator!=null) {
while (iterator.hasNext()){
Property prop = iterator.nextProperty();
if (!exclude.isEmpty() && prop.getName().startsWith(exclude)) continue;
try{
logger.trace("adding {} in the map",prop.getName());
mapToset.put(prop.getName(), getPropertyValue(prop));
}catch(PathNotFoundException e){
logger.warn("the property {} is not mapped",prop.getName());
} catch (Exception e ) {
logger.debug("error setting value {}",e.getMessage());
}
}
}
}
return obj;
}
private <T> void setMapAttribute(Field field, T instance, Node node) throws Exception{
field.setAccessible(true);
String exclude = field.getAnnotation(MapAttribute.class).excludeStartWith();
Map<String, Object> mapToset = new HashMap<String, Object>();
PropertyIterator iterator = node.getProperties();
if (iterator!=null) {
while (iterator.hasNext()){
Property prop = iterator.nextProperty();
if (!exclude.isEmpty() && prop.getName().startsWith(exclude)) continue;
try{
logger.trace("adding {} in the map",prop.getName());
mapToset.put(prop.getName(), getPropertyValue(prop));
}catch(PathNotFoundException e){
logger.warn("the property {} is not mapped",prop.getName());
} catch (Exception e ) {
logger.trace("error setting value {}",e);
}
field.set(obj, mapToset);
} else if (field.isAnnotationPresent(ListNodes.class)){
logger.trace("found field {} of type annotated as ListNodes in class {} on node {}", field.getName(), clazz.getName(), node.getName());
field.setAccessible(true);
String exclude = field.getAnnotation(ListNodes.class).excludeTypeStartWith();
String include = field.getAnnotation(ListNodes.class).includeTypeStartWith();
}
}
field.set(instance, mapToset);
}
Class listType = field.getAnnotation(ListNodes.class).listClass();
Map<String, Class> subTypesMap = Collections.emptyMap();
private <T> void setListNode(Field field, T instance, Node node) throws Exception{
field.setAccessible(true);
String exclude = field.getAnnotation(ListNodes.class).excludeTypeStartWith();
String include = field.getAnnotation(ListNodes.class).includeTypeStartWith();
if (!typeToSubtypeMap.containsKey(listType)) {
Class listType = field.getAnnotation(ListNodes.class).listClass();
Map<String, Class> subTypesMap = Collections.emptyMap();
Configuration config = new ConfigurationBuilder().forPackages(listType.getPackage().getName());
Reflections reflections = new Reflections(config);
Set<Class> subTypes = reflections.getSubTypesOf(listType);
if (!typeToSubtypeMap.containsKey(listType)) {
Configuration config = new ConfigurationBuilder().forPackages(listType.getPackage().getName());
Reflections reflections = new Reflections(config);
Set<Class> subTypes = reflections.getSubTypesOf(listType);
if (subTypes.size()>0) {
subTypesMap = new HashMap<>();
for (Class subtype: subTypes)
if (subtype.isAnnotationPresent(AttributeRootNode.class)) {
AttributeRootNode attributeRootNode = (AttributeRootNode)subtype.getAnnotation(AttributeRootNode.class);
subTypesMap.put(attributeRootNode.value(), subtype);
}
} else logger.trace("no subtypes found for {}",listType.getName());
typeToSubtypeMap.put(listType, subTypesMap);
if (subTypes.size()>0) {
subTypesMap = new HashMap<>();
for (Class subtype: subTypes)
if (subtype.isAnnotationPresent(AttributeRootNode.class)) {
AttributeRootNode attributeRootNode = (AttributeRootNode)subtype.getAnnotation(AttributeRootNode.class);
subTypesMap.put(attributeRootNode.value(), subtype);
}
} else logger.trace("no subtypes found for {}",listType.getName());
} else {
logger.info("subtypes already found in cache");
subTypesMap = typeToSubtypeMap.get(listType);
}
typeToSubtypeMap.put(listType, subTypesMap);
List<Object> toSetList = new ArrayList<>();
} else {
logger.debug("subtypes already found in cache");
subTypesMap = typeToSubtypeMap.get(listType);
}
NodeIterator iterator = node.getNodes();
List<Object> toSetList = new ArrayList<>();
while (iterator.hasNext()){
Node currentNode = iterator.nextNode();
NodeIterator iterator = node.getNodes();
String primaryType = currentNode.getPrimaryNodeType().getName();
while (iterator.hasNext()){
Node currentNode = iterator.nextNode();
logger.trace("the current node {} has a list",currentNode.getName());
String primaryType = currentNode.getPrimaryNodeType().getName();
if (!include.isEmpty() && !primaryType.startsWith(include))
continue;
if (!exclude.isEmpty() && primaryType.startsWith(exclude))
continue;
if (subTypesMap.containsKey(primaryType))
toSetList.add(iterateNodeAttributeFields(subTypesMap.get(primaryType), currentNode));
else toSetList.add(iterateNodeAttributeFields(listType, currentNode));
}
if (toSetList.size()!=0) field.set(obj, toSetList);
}
logger.trace("the current node {} has a list",currentNode.getName());
if (!include.isEmpty() && !primaryType.startsWith(include))
continue;
if (!exclude.isEmpty() && primaryType.startsWith(exclude))
continue;
if (subTypesMap.containsKey(primaryType))
toSetList.add(iterateNodeAttributeFields(subTypesMap.get(primaryType), currentNode));
else toSetList.add(iterateNodeAttributeFields(listType, currentNode));
}
return obj;
if (toSetList.size()!=0) field.set(instance, toSetList);
}
private <T> void setAttributeFieldCheckingDefault(Field field, T instance, Node node) {
Attribute attribute = field.getAnnotation(Attribute.class);
field.setAccessible(true);
try{
Object propValue;
Class<?> returnType = field.getType();
if (node.hasProperty(attribute.value())) {
propValue = getPropertyValue(returnType, node.getProperty(attribute.value()));
if (!attribute.defaultValue().isEmpty() && propValue==null )
propValue = returnType.cast(attribute.defaultValue());
field.set(instance, propValue);
logger.trace("retrieve item - added field {}",field.getName());
} else if (!attribute.defaultValue().isEmpty()){
propValue = returnType.cast(attribute.defaultValue());
field.set(instance, propValue);
logger.trace("retrieve item - setting default for field {}",field.getName());
} else
logger.trace("property not found for field {}",field.getName());
} catch (Exception e ) {
logger.debug("error setting value for property {} ",attribute.value());
}
}
@SuppressWarnings({ "unchecked" })
private Object getPropertyValue(Class returnType, Property prop) throws Exception{
if (returnType.equals(String.class)) return prop.getString();

@ -28,7 +28,7 @@ public class ArchiveStructureCreationParameter extends CreateParameters {
@Override
protected boolean isValid() {
return Objects.nonNull(parentFolderName) && Objects.nonNull(stream) && Objects.nonNull(fileDetails);
return Objects.nonNull(parentFolderName) && Objects.nonNull(stream);
}
@Override

@ -1,16 +1,18 @@
package org.gcube.data.access.storagehub.handlers.items.builders;
import java.util.Map;
import java.util.Objects;
import org.gcube.common.storagehub.model.plugins.PluginParameters;
import org.gcube.common.storagehub.model.Metadata;
import org.gcube.common.storagehub.model.items.nodes.PayloadBackend;
public class FolderCreationParameters extends CreateParameters {
private String name;
private String description="";
private boolean hidden = false;
private String externalPluginName = null;
private PluginParameters externalPluginParameters = null;
private PayloadBackend backend;
protected FolderCreationParameters() {}
@ -26,16 +28,8 @@ public class FolderCreationParameters extends CreateParameters {
return hidden;
}
public boolean isExternal() {
return externalPluginName!=null;
}
public String getPluginName(String name) {
return this.externalPluginName;
}
public PluginParameters getPluginParameters() {
return externalPluginParameters;
public PayloadBackend getBackend() {
return backend;
}
@Override
@ -70,8 +64,8 @@ public class FolderCreationParameters extends CreateParameters {
return this;
}
public ExternalFolderCreationBuilder onRepository(String pluginName) {
return new ExternalFolderCreationBuilder(pluginName, this);
public BackendCreationBuilder onRepository(String pluginName) {
return new BackendCreationBuilder(pluginName, this);
}
public FolderCreationBuilder hidden(boolean hidden) {
@ -81,19 +75,26 @@ public class FolderCreationParameters extends CreateParameters {
}
public static class ExternalFolderCreationBuilder {
public static class BackendCreationBuilder {
FolderCreationBuilder cb;
protected ExternalFolderCreationBuilder(String pluginName, FolderCreationBuilder cb) {
this.cb.parameters.externalPluginName = pluginName;
String plugin;
protected BackendCreationBuilder(String pluginName, FolderCreationBuilder cb) {
this.cb = cb;
this.plugin = pluginName;
}
public FolderCreationBuilder withParameters(PluginParameters params){
this.cb.parameters.externalPluginParameters = params;
public FolderCreationBuilder withParameters(Map<String,Object> params){
this.cb.parameters.backend = new PayloadBackend(plugin, new Metadata(params));
return this.cb;
}
public FolderCreationBuilder withoutParameters(){
this.cb.parameters.backend = new PayloadBackend(plugin, null);
return this.cb;
}
}

@ -1,80 +0,0 @@
package org.gcube.data.access.storagehub.handlers.plugins;
import java.util.Collections;
import java.util.Map;
import java.util.stream.Collectors;
import javax.enterprise.inject.Default;
import javax.enterprise.inject.Instance;
import javax.inject.Inject;
import javax.inject.Singleton;
import javax.jcr.Node;
import javax.jcr.RepositoryException;
import javax.jcr.Session;
import org.gcube.common.storagehub.model.Excludes;
import org.gcube.common.storagehub.model.exceptions.BackendGenericError;
import org.gcube.common.storagehub.model.exceptions.PluginInitializationException;
import org.gcube.common.storagehub.model.exceptions.PluginNotFoundException;
import org.gcube.common.storagehub.model.items.ExternalFolder;
import org.gcube.common.storagehub.model.items.Item;
import org.gcube.common.storagehub.model.plugins.FolderManager;
import org.gcube.common.storagehub.model.plugins.FolderManagerConnector;
import org.gcube.data.access.storagehub.handlers.items.Node2ItemConverter;
import org.gcube.data.access.storagehub.storage.backend.impl.GcubeFolderManager;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@Singleton
public class FolderPluginHandler {
private static Logger log = LoggerFactory.getLogger(FolderPluginHandler.class);
@Inject
Node2ItemConverter node2Item;
private GcubeFolderManager defaultManager = new GcubeFolderManager();
public FolderManager getDefault() {
return defaultManager;
}
@Inject
Instance<FolderManagerConnector> connectors;
private Map<String, FolderManagerConnector> connectorsMap;
FolderPluginHandler(){
if (connectors !=null)
connectorsMap = connectors.stream().collect(Collectors.toMap(FolderManagerConnector::getName, e -> e ));
else {
log.info("connectors are null");
connectorsMap = Collections.emptyMap();
}
}
public FolderManagerConnector getConnector(String name) throws PluginNotFoundException {
if (!connectorsMap.containsKey(name)) throw new PluginNotFoundException("plugin "+name+" not found");
return connectorsMap.get(name);
}
public FolderManager getFolderManager(Item item) throws PluginInitializationException, PluginNotFoundException, RepositoryException, BackendGenericError{
if (!item.isExternalManaged())
return defaultManager;
Session session = ((Node)item.getRelatedNode()).getSession();
Item parent = null;
do {
String parentId = item.getParentId();
Node node = session.getNodeByIdentifier(parentId);
parent = node2Item.getItem(node, Excludes.ALL);
if (parent !=null && parent instanceof ExternalFolder) {
ExternalFolder extParent = (ExternalFolder) parent;
String plugin = extParent.getManagedBy();
Map<String, Object> parameters = extParent.getConnectionParameters().getMap();
return getConnector(plugin).connect(extParent, parameters);
}
} while (parent!=null);
throw new BackendGenericError("selected external managed item doesn't have a parent external folder");
}
}

@ -1,28 +0,0 @@
package org.gcube.data.access.storagehub.handlers.plugins;
import javax.inject.Inject;
import javax.inject.Singleton;
@Singleton
public class OperationMediator {
@Inject
FolderPluginHandler folderHandler;
/*
boolean onMove(Item source, Item destination, Session session) throws PluginInitializationException, PluginNotFoundException, BackendGenericError, RepositoryException{
FolderManager sourceFolderManager = folderHandler.getFolderManager(source);
FolderManager destinationFolderManager = folderHandler.getFolderManager(destination);
if (source instanceof FolderItem) {
destinationFolderManager.onCreatedFolder((FolderItem) source);
session.move(source.getPath(), destination.getPath());
sourceFolderManager.onDeletingFolder((FolderItem) source);
} else if (source instanceof AbstractFileItem){
}
}
*/
}

@ -0,0 +1,60 @@
package org.gcube.data.access.storagehub.handlers.plugins;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import javax.annotation.PostConstruct;
import javax.enterprise.inject.Instance;
import javax.inject.Inject;
import javax.inject.Singleton;
import org.gcube.common.storagehub.model.Constants;
import org.gcube.common.storagehub.model.exceptions.PluginNotFoundException;
import org.gcube.common.storagehub.model.items.nodes.PayloadBackend;
import org.gcube.common.storagehub.model.storages.StorageBackendFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@Singleton
public class StorageBackendHandler {
private static Logger log = LoggerFactory.getLogger(StorageBackendHandler.class);
public static PayloadBackend getDefaultPayloadForFolder() {
return new PayloadBackend(Constants.DEFAULT_S3_STORAGE, null);
}
@Inject
Instance<StorageBackendFactory> factories;
Map<String, StorageBackendFactory> storagebackendMap= new HashMap<String, StorageBackendFactory>();
@PostConstruct
void init(){
if (factories !=null)
for (StorageBackendFactory connector : factories) {
if (storagebackendMap.containsKey(connector.getName())) {
log.error("multiple storage backend with the same name");
throw new RuntimeException("multiple storage backend with the same name");
}
storagebackendMap.put(connector.getName(), connector);
}
else
throw new RuntimeException("storage backend implementation not found");
}
public StorageBackendFactory get(PayloadBackend payload) throws PluginNotFoundException {
if (payload == null || !storagebackendMap.containsKey(payload.getStorageName()))
throw new PluginNotFoundException(String.format("implementation for storage %s not found", payload.getStorageName()));
return storagebackendMap.get(payload.getStorageName());
}
public StorageBackendFactory get(String storageName) throws PluginNotFoundException {
return storagebackendMap.get(storageName);
}
public Collection<StorageBackendFactory> getAllImplementations() {
return storagebackendMap.values();
}
}

@ -0,0 +1,50 @@
package org.gcube.data.access.storagehub.handlers.plugins;
import java.io.InputStream;
import javax.inject.Inject;
import javax.inject.Singleton;
import org.gcube.common.storagehub.model.exceptions.StorageHubException;
import org.gcube.common.storagehub.model.items.nodes.Content;
import org.gcube.common.storagehub.model.items.nodes.PayloadBackend;
import org.gcube.common.storagehub.model.storages.MetaInfo;
import org.gcube.common.storagehub.model.storages.StorageBackend;
import org.gcube.common.storagehub.model.storages.StorageBackendFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@Singleton
public class StorageOperationMediator {
Logger log = LoggerFactory.getLogger(StorageOperationMediator.class);
@Inject
StorageBackendHandler storageBackendHandler;
public MetaInfo copy(Content source, PayloadBackend destination, String newName, String newParentPath, String login) throws StorageHubException{
log.info("creating Storages for source {} and destination {}", source.getPayloadBackend(), destination.getStorageName());
StorageBackendFactory sourceSBF = storageBackendHandler.get(source.getPayloadBackend());
//TODO: add metadata taken from content node
StorageBackend sourceSB = sourceSBF.create(source.getPayloadBackend());
StorageBackendFactory destSBF = storageBackendHandler.get(destination);
StorageBackend destSB = destSBF.create(destination);
if (sourceSB.equals(destSB)) {
log.info("source and destintiona are the same storage");
return sourceSB.onCopy(source, newParentPath, newName);
}else {
log.info("source and destintiona are different storage");
InputStream stream = sourceSB.download(source);
MetaInfo info = destSB.upload(stream, newParentPath, newName, source.getSize(), login);
return info;
}
}
public boolean move(){
return true;
}
}

@ -17,9 +17,10 @@ import org.apache.jackrabbit.api.JackrabbitSession;
import org.apache.jackrabbit.api.security.user.Group;
import org.apache.jackrabbit.api.security.user.User;
import org.apache.jackrabbit.api.security.user.UserManager;
import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.common.scope.impl.ScopeBean;
import org.gcube.common.scope.impl.ScopeBean.Type;
import org.gcube.common.security.ContextBean;
import org.gcube.common.security.ContextBean.Type;
import org.gcube.common.security.providers.SecretManagerProvider;
import org.gcube.common.security.secrets.Secret;
import org.gcube.common.storagehub.model.exceptions.BackendGenericError;
import org.gcube.common.storagehub.model.exceptions.InvalidCallParameters;
import org.gcube.common.storagehub.model.exceptions.StorageHubException;
@ -82,7 +83,8 @@ public class VREManager {
}
public synchronized VRE getVreFolderItem(JackrabbitSession ses, String userId, List<String> excludes ) throws RepositoryException, StorageHubException{
ScopeBean bean = new ScopeBean(ScopeProvider.instance.get());
Secret secret = SecretManagerProvider.instance.get();
ContextBean bean = new ContextBean(secret.getContext());
if (!bean.is(Type.VRE)) throw new BackendGenericError("the current scope is not a VRE");
String entireScopeName= bean.toString().replaceAll("^/(.*)/?$", "$1").replaceAll("/", "-");
return getVreFolderItemByGroupName(ses, entireScopeName, userId, excludes);

@ -0,0 +1,41 @@
package org.gcube.data.access.storagehub.health;
import org.gcube.common.health.api.HealthCheck;
import org.gcube.common.health.api.ReadinessChecker;
import org.gcube.common.health.api.response.HealthCheckResponse;
import org.gcube.common.storagehub.model.items.nodes.PayloadBackend;
import org.gcube.data.access.storagehub.handlers.plugins.StorageBackendHandler;
import org.gcube.data.access.storagehub.storage.backend.impl.GcubeDefaultS3StorageBackendFactory;
import org.gcube.data.access.storagehub.storage.backend.impl.S3Backend;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ReadinessChecker
public class DefaultStorageCheck implements HealthCheck{
private static Logger log = LoggerFactory.getLogger(DefaultStorageCheck.class);
PayloadBackend defaultPayload = StorageBackendHandler.getDefaultPayloadForFolder();
@Override
public String getName() {
return String.format("default storage (%s)",defaultPayload.getStorageName());
}
@Override
public HealthCheckResponse check() {
try {
GcubeDefaultS3StorageBackendFactory storageFactory =new GcubeDefaultS3StorageBackendFactory();
storageFactory.init();
if (((S3Backend)storageFactory.create(defaultPayload)).isAlive())
return HealthCheckResponse.builder(getName()).up().build();
else
return HealthCheckResponse.builder(getName()).down().error("error contacting storage").build();
} catch (Exception e) {
log.error("error checking defaultStorage",e);
return HealthCheckResponse.builder(getName()).down().error(e.getMessage()).build();
}
}
}

@ -0,0 +1,32 @@
package org.gcube.data.access.storagehub.health;
import javax.jcr.LoginException;
import javax.jcr.Session;
import org.gcube.common.health.api.HealthCheck;
import org.gcube.common.health.api.ReadinessChecker;
import org.gcube.common.health.api.response.HealthCheckResponse;
import org.gcube.data.access.storagehub.RepositoryInitializerImpl;
@ReadinessChecker
public class JCRRepositoryCheck implements HealthCheck{
@Override
public String getName() {
return "Jackrabbit repository";
}
@Override
public HealthCheckResponse check() {
try {
Session session = RepositoryInitializerImpl.get().getRepository().login();
if (session != null) session.logout();
return HealthCheckResponse.builder(getName()).up().build();
}catch (LoginException e) { }
catch(Throwable ex) {
return HealthCheckResponse.builder(getName()).down().error(ex.getMessage()).build();
}
return HealthCheckResponse.builder(getName()).up().build();
}
}

@ -47,33 +47,39 @@ import org.glassfish.jersey.media.multipart.FormDataParam;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.webcohesion.enunciate.metadata.rs.RequestHeader;
import com.webcohesion.enunciate.metadata.rs.RequestHeaders;
@Path("items")
@ManagedBy(StorageHubAppllicationManager.class)
@RequestHeaders({
@RequestHeader( name = "Authorization", description = "Bearer token, see https://dev.d4science.org/how-to-access-resources"),
})
public class ACLManager extends Impersonable {
private static final Logger log = LoggerFactory.getLogger(ACLManager.class);
RepositoryInitializer repository = StorageHubAppllicationManager.repository;
RepositoryInitializer repository = StorageHubAppllicationManager.getRepository();
@RequestScoped
@PathParam("id")
String id;
@Inject
AuthorizationChecker authChecker;
@Inject
PathUtil pathUtil;
@Context
ServletContext context;
@Inject Node2ItemConverter node2Item;
@Inject UnshareHandler unshareHandler;
@Inject ACLManagerInterface aclManagerDelegate;
/**
* returns the AccessType for all the users in a shared folder
*
@ -124,9 +130,9 @@ public class ACLManager extends Impersonable {
InnerMethodName.instance.set("setACLById");
Session ses = null;
try {
if (user==currentUser) throw new InvalidCallParameters("own ACLs cannot be modified");
ses = repository.getRepository().login(CredentialHandler.getAdminCredentials(context));
Node node = ses.getNodeByIdentifier(id);
@ -139,28 +145,25 @@ public class ACLManager extends Impersonable {
throw new UserNotAuthorizedException("owner acl cannot be changed");
SharedFolder folder = (SharedFolder) item;
authChecker.checkAdministratorControl(ses, currentUser, folder);
if (folder.isVreFolder()) {
if (accessType==AccessType.ADMINISTRATOR) throw new InvalidCallParameters("a VRE admin cannot be changed with this method");
if (!user.equals(folder.getTitle())) throw new InvalidCallParameters("the groupId in the argument is different to the one of the VREFolder");
} else {
NodeIterator sharedSet = node.getSharedSet();
boolean found = false;
while (sharedSet.hasNext() && !found) {
Node current = sharedSet.nextNode();
if (current.getPath().startsWith(pathUtil.getWorkspacePath(user).toPath()))
found = true;
}
if (!found)
throw new InvalidCallParameters("shared folder with id "+folder.getId()+" is not shared with user "+user);
if (item instanceof VreFolder || ((SharedFolder) item).isVreFolder())
throw new InvalidCallParameters("acls in vreFolder cannot be removed with this method");
NodeIterator sharedSet = node.getSharedSet();
boolean found = false;
while (sharedSet.hasNext() && !found) {
Node current = sharedSet.nextNode();
if (current.getPath().startsWith(pathUtil.getWorkspacePath(user).toPath()))
found = true;
}
if (!found)
throw new InvalidCallParameters("shared folder with id "+folder.getId()+" is not shared with user "+user);
aclManagerDelegate.update(user, folder, accessType, ses);
}catch(RepositoryException re){
log.error("jcr error extracting archive", re);
throw new WebApplicationException(new BackendGenericError("jcr error setting acl", re));
@ -195,17 +198,17 @@ public class ACLManager extends Impersonable {
Session ses = null;
try{
ses = repository.getRepository().login(CredentialHandler.getAdminCredentials(context));
Node node = ses.getNodeByIdentifier(id);
Item item = node2Item.getItem(node, Excludes.ALL);
if (!(item instanceof SharedFolder))
throw new InvalidItemException("the item is not a shared folder");
if (item instanceof VreFolder || ((SharedFolder) item).isVreFolder())
throw new InvalidCallParameters("acls in vreFolder cannot be removed with this method");
authChecker.checkAdministratorControl(ses, currentUser, (SharedFolder) item);
unshareHandler.unshare(ses, Collections.singleton(user), node, currentUser);

@ -0,0 +1,49 @@
package org.gcube.data.access.storagehub.services;
import java.io.File;
import java.io.FileInputStream;
import java.io.InputStream;
import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.Response.Status;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@Path("api-docs")
public class DocsGenerator {
private static Logger logger = LoggerFactory.getLogger(DocsGenerator.class);
@GET
@Path("/{any: .*}")
public InputStream toDoc(@Context HttpServletRequest req) throws WebApplicationException {
logger.info(DocsGenerator.class.getSimpleName() + " toDoc called");
String pathInfo = req.getPathInfo();
logger.debug("pathInfo {}", pathInfo);
try {
if (pathInfo.endsWith("/api-docs")) {
pathInfo += "index.html";
}
if (pathInfo.endsWith("/api-docs/")) {
pathInfo += "index.html";
}
logger.info("going to {}", pathInfo);
String realPath = req.getServletContext().getRealPath(pathInfo);
return new FileInputStream(new File(realPath));
} catch (Exception e) {
logger.error("error getting the docs", e);
throw new WebApplicationException(e,Status.SERVICE_UNAVAILABLE);
}
}
}

@ -5,6 +5,7 @@ import static org.gcube.data.access.storagehub.Roles.VREMANAGER_ROLE;
import java.security.Principal;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.Objects;
@ -19,6 +20,7 @@ import javax.jcr.security.Privilege;
import javax.servlet.ServletContext;
import javax.ws.rs.Consumes;
import javax.ws.rs.DELETE;
import javax.ws.rs.DefaultValue;
import javax.ws.rs.FormParam;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
@ -39,11 +41,10 @@ import org.apache.jackrabbit.api.security.user.QueryBuilder;
import org.apache.jackrabbit.api.security.user.User;
import org.apache.jackrabbit.commons.jackrabbit.authorization.AccessControlUtils;
import org.gcube.common.authorization.control.annotations.AuthorizationControl;
import org.gcube.common.authorization.library.provider.AuthorizationProvider;
import org.gcube.common.gxrest.response.outbound.GXOutboundErrorResponse;
import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.common.scope.impl.ScopeBean;
import org.gcube.common.scope.impl.ScopeBean.Type;
import org.gcube.common.security.ContextBean;
import org.gcube.common.security.ContextBean.Type;
import org.gcube.common.security.providers.SecretManagerProvider;
import org.gcube.common.storagehub.model.Excludes;
import org.gcube.common.storagehub.model.Paths;
import org.gcube.common.storagehub.model.acls.AccessType;
@ -73,8 +74,14 @@ import org.glassfish.jersey.media.multipart.FormDataParam;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.webcohesion.enunciate.metadata.rs.RequestHeader;
import com.webcohesion.enunciate.metadata.rs.RequestHeaders;
@Path("groups")
@ManagedBy(StorageHubAppllicationManager.class)
@RequestHeaders({
@RequestHeader( name = "Authorization", description = "Bearer token, see https://dev.d4science.org/how-to-access-resources"),
})
public class GroupManager {
@ -86,7 +93,7 @@ public class GroupManager {
private static final Logger log = LoggerFactory.getLogger(GroupManager.class);
RepositoryInitializer repository = StorageHubAppllicationManager.repository;
RepositoryInitializer repository = StorageHubAppllicationManager.getRepository();
@Inject
VREManager vreManager;
@ -143,7 +150,7 @@ public class GroupManager {
@Path("")
@Consumes(MediaType.MULTIPART_FORM_DATA)
@AuthorizationControl(allowedRoles={INFRASTRUCTURE_MANAGER_ROLE}, exception=MyAuthException.class)
public String createGroup(@FormDataParam("group") String group, @FormDataParam("accessType") AccessType accessType, @FormDataParam("folderOwner") String folderOwner){
public String createGroup(@FormDataParam("group") String group, @FormDataParam("accessType") AccessType accessType, @FormDataParam("folderOwner") String folderOwner, @FormDataParam("useDefaultStorage") @DefaultValue("true") boolean useDefaultStorage){
InnerMethodName.instance.set("createGroup");
@ -162,7 +169,7 @@ public class GroupManager {
User user = (User)usrManager.getAuthorizable(folderOwner);
createVreFolder(groupId, session, accessType!=null?accessType:AccessType.WRITE_OWNER, folderOwner);
createVreFolder(groupId, session, accessType!=null?accessType:AccessType.WRITE_OWNER, folderOwner, useDefaultStorage);
boolean success = this.internalAddUserToGroup(session, createdGroup, user);
@ -222,9 +229,9 @@ public class GroupManager {
}
public boolean isInfraManager() { return AuthorizationProvider.instance.get().getClient().getRoles().contains(INFRASTRUCTURE_MANAGER_ROLE); }
public boolean isInfraManager() { return SecretManagerProvider.instance.get().getOwner().getRoles().contains(INFRASTRUCTURE_MANAGER_ROLE); }
public boolean isVREManager() { return AuthorizationProvider.instance.get().getClient().getRoles().contains(VREMANAGER_ROLE); }
public boolean isVREManager() { return SecretManagerProvider.instance.get().getOwner().getRoles().contains(VREMANAGER_ROLE); }
@PUT
@Path("{id}/admins")
@ -241,7 +248,9 @@ public class GroupManager {
session = (JackrabbitSession) repository.getRepository().login(CredentialHandler.getAdminCredentials(context));
Node vreFolder = groupHandler.getFolderNodeRelatedToGroup(session, groupId);
String currentUser = AuthorizationProvider.instance.get().getClient().getId();
String currentUser = SecretManagerProvider.instance.get().getOwner().getId();
if (!isInfraManager() && !(isVREManager() && isValidGroupForContext(groupId) ))
authChecker.checkAdministratorControl(session, currentUser, node2Item.getItem(vreFolder, Excludes.ALL));
@ -295,7 +304,7 @@ public class GroupManager {
session = (JackrabbitSession) repository.getRepository().login(CredentialHandler.getAdminCredentials(context));
Node vreFolder = groupHandler.getFolderNodeRelatedToGroup(session, groupId);
String currentUser = AuthorizationProvider.instance.get().getClient().getId();
String currentUser = SecretManagerProvider.instance.get().getOwner().getId();
if (!isInfraManager() && !(isVREManager() && isValidGroupForContext(groupId) ))
authChecker.checkAdministratorControl(session, currentUser, node2Item.getItem(vreFolder, Excludes.ALL));
@ -333,7 +342,7 @@ public class GroupManager {
public List<String> getAdmins(@PathParam("groupId") String groupId){
InnerMethodName.instance.set("getAdmins");
String login = AuthorizationProvider.instance.get().getClient().getId();
String login = SecretManagerProvider.instance.get().getOwner().getId();
JackrabbitSession session = null;
List<String> users = new ArrayList<>();
@ -402,6 +411,7 @@ public class GroupManager {
this.internalAddUserToGroup(session, group, user);
session.save();
success = true;
}catch(StorageHubException she ){
log.error(she.getErrorMessage(), she);
GXOutboundErrorResponse.throwException(she, Response.Status.fromStatusCode(she.getStatus()));
@ -483,7 +493,7 @@ public class GroupManager {
if (!isValidGroupForContext(groupId) && !isInfraManager())
throw new UserNotAuthorizedException("only VREManager of the selected VRE can execute this operation");
session = (JackrabbitSession) repository.getRepository().login(CredentialHandler.getAdminCredentials(context));
org.apache.jackrabbit.api.security.user.UserManager usrManager = session.getUserManager();
@ -496,14 +506,12 @@ public class GroupManager {
Authorizable user = it.next();
users.add(user.getPrincipal().getName());
}
}catch(StorageHubException she ){
log.error(she.getErrorMessage(), she);
GXOutboundErrorResponse.throwException(she, Response.Status.fromStatusCode(she.getStatus()));
}catch (StorageHubException e) {
log.error("error getting users", e);
GXOutboundErrorResponse.throwException(e);
}catch(RepositoryException re ){
log.error("jcr error creating item", re);
GXOutboundErrorResponse.throwException(new BackendGenericError("jcr error creating item", re));
log.error("jcr error getting users", re);
GXOutboundErrorResponse.throwException(new BackendGenericError("jcr error getting users", re));
}finally {
if (session!=null)
session.logout();
@ -512,14 +520,14 @@ public class GroupManager {
return users;
}
private void createVreFolder(String groupId, JackrabbitSession session, AccessType defaultAccessType, String owner ) throws Exception{
private void createVreFolder(String groupId, JackrabbitSession session, AccessType defaultAccessType, String owner, boolean useDefaultStorage ) throws Exception{
Node sharedRootNode = session.getNode(Constants.SHARED_FOLDER_PATH);
String name = groupId;
String currentScope = ScopeProvider.instance.get();
ScopeBean bean = new ScopeBean(currentScope);
String currentScope = SecretManagerProvider.instance.get().getContext();
ContextBean bean = new ContextBean(currentScope);
while (!bean.is(Type.INFRASTRUCTURE)) {
bean = bean.enclosingScope();
}
@ -527,11 +535,15 @@ public class GroupManager {
String displayName = groupId.replaceAll(root+"-[^\\-]*\\-(.*)", "$1");
log.info("creating vreFolder with name {} and title {} and owner {}", name, displayName, owner);
log.info("creating vreFolder with name {} and title {} and owner {} and default storage {}", name, displayName, owner, useDefaultStorage);
FolderCreationParameters folderParameters = FolderCreationParameters.builder().name(name).description( "VREFolder for "+groupId).author(owner).on(sharedRootNode.getIdentifier()).with(session).build();
FolderCreationParameters folderParameters;
if (!useDefaultStorage)
folderParameters = FolderCreationParameters.builder().onRepository("gcube-minio").withParameters(Collections.singletonMap("bucketName", name+"-gcube-vre")).name(name).description( "VREFolder for "+groupId).author(owner).on(sharedRootNode.getIdentifier()).with(session).build();
else
folderParameters = FolderCreationParameters.builder().name(name).description( "VREFolder for "+groupId).author(owner).on(sharedRootNode.getIdentifier()).with(session).build();
Node folder= Utils.createFolderInternally(folderParameters, null);
Node folder= Utils.createFolderInternally(folderParameters, null, useDefaultStorage);
folder.setPrimaryType(PrimaryNodeType.NT_WORKSPACE_SHARED_FOLDER);
folder.setProperty(NodeProperty.IS_VRE_FOLDER.toString(), true);
folder.setProperty(NodeProperty.TITLE.toString(), name);
@ -556,7 +568,7 @@ public class GroupManager {
private boolean isValidGroupForContext(String group){
String currentContext = ScopeProvider.instance.get();
String currentContext = SecretManagerProvider.instance.get().getContext();
String expectedGroupId= currentContext.replace("/", "-").substring(1);
return group.equals(expectedGroupId);
}

@ -9,8 +9,8 @@ import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.Path;
import javax.ws.rs.core.Context;
import org.gcube.common.authorization.library.provider.AuthorizationProvider;
import org.gcube.common.authorization.library.provider.ClientInfo;
import org.gcube.common.security.Owner;
import org.gcube.common.security.providers.SecretManagerProvider;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -26,13 +26,13 @@ public abstract class Impersonable {
public void setCurrentUser(@Context final HttpServletRequest request) {
String impersonate = request!=null ? request.getParameter("impersonate") : null ;
ClientInfo info = AuthorizationProvider.instance.get().getClient();
if(impersonate!=null && info.getRoles().contains(INFRASTRUCTURE_MANAGER_ROLE)) {
Owner owner = SecretManagerProvider.instance.get().getOwner();
if(impersonate!=null && owner.getRoles().contains(INFRASTRUCTURE_MANAGER_ROLE)) {
this.currentUser = impersonate;
} else
this.currentUser = info.getId();
this.currentUser = owner.getId();
log.info("called with login {} and impersonate {}",info.getId(), impersonate);
log.info("called with login {} and impersonate {}",owner.getId(), impersonate);
}
}

@ -55,14 +55,20 @@ import org.glassfish.jersey.media.multipart.FormDataParam;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.webcohesion.enunciate.metadata.rs.RequestHeader;
import com.webcohesion.enunciate.metadata.rs.RequestHeaders;
@Path("items")
@RequestHeaders({
@RequestHeader( name = "Authorization", description = "Bearer token, see https://dev.d4science.org/how-to-access-resources"),
})
public class ItemSharing extends Impersonable{
private static final Logger log = LoggerFactory.getLogger(ItemSharing.class);
RepositoryInitializer repository = StorageHubAppllicationManager.repository;
RepositoryInitializer repository = StorageHubAppllicationManager.getRepository();
@Inject
AccountingHandler accountingHandler;
@ -73,10 +79,10 @@ public class ItemSharing extends Impersonable{
@Context
ServletContext context;
@Inject
AuthorizationChecker authChecker;
@Inject
PathUtil pathUtil;
@ -86,7 +92,7 @@ public class ItemSharing extends Impersonable{
@Inject Node2ItemConverter node2Item;
@Inject Item2NodeConverter item2Node;
@SuppressWarnings("unchecked")
@Consumes(MediaType.APPLICATION_FORM_URLENCODED)
@POST
@ -94,7 +100,7 @@ public class ItemSharing extends Impersonable{
public String shareWithMap(@FormParam("mapUserPermission") String mapUserPermissionString, @FormParam("defaultAccessType") String defaultAccessTypeString){
InnerMethodName.instance.set("shareFolder");
HashMap<String,String> mapUserPermission;
Session ses = null;
String toReturn = null;
try{
@ -105,17 +111,17 @@ public class ItemSharing extends Impersonable{
}
AccessType defaultAccessType;
try {
defaultAccessType = AccessType.fromValue(defaultAccessTypeString);
defaultAccessType = AccessType.fromValue(defaultAccessTypeString);
}catch (IllegalArgumentException e) {
throw new InvalidCallParameters("invalid default accessType");
}
ses = repository.getRepository().login(CredentialHandler.getAdminCredentials(context));
authChecker.checkWriteAuthorizationControl(ses, currentUser, id, false);
Item item = node2Item.getItem(ses.getNodeByIdentifier(id), Excludes.ALL);
if (mapUserPermission==null || mapUserPermission.isEmpty())
throw new InvalidCallParameters("users is empty");
@ -194,7 +200,6 @@ public class ItemSharing extends Impersonable{
@PUT
@Path("{id}/share")
@Consumes(MediaType.MULTIPART_FORM_DATA)
@Deprecated
public String share(@FormDataParam("users") Set<String> users, @FormDataParam("defaultAccessType") AccessType accessType){
InnerMethodName.instance.set("shareFolder");
Session ses = null;
@ -211,6 +216,8 @@ public class ItemSharing extends Impersonable{
if (users==null || users.isEmpty())
throw new InvalidCallParameters("users is empty");
log.info("shared method called with users {} and default access type {} ", users, accessType.getValue());
Node nodeToShare = ses.getNodeByIdentifier(id);
boolean alreadyShared = false;
@ -244,8 +251,9 @@ public class ItemSharing extends Impersonable{
for (String user : users)
try {
addUserToSharing(sharedFolderNode, ses, user, null, userPrivileges, acls);
log.info("added user {} to the shared node",user);
}catch(Exception e){
log.warn("error adding user {} to sharing of folder {}", user, sharedFolderNode.getName());
log.warn("error adding user {} to sharing of folder {}", user, sharedFolderNode.getName(),e);
}
acm.setPolicy(sharedFolderNode.getPath(), acls);
@ -267,6 +275,9 @@ public class ItemSharing extends Impersonable{
}catch(StorageHubException she ){
log.error(she.getErrorMessage(), she);
GXOutboundErrorResponse.throwException(she, Response.Status.fromStatusCode(she.getStatus()));
}catch(Exception e){
log.error("jcr sharing", e);
GXOutboundErrorResponse.throwException(new BackendGenericError("jcr error sharing folder", e));
}finally{
if (ses!=null)
ses.logout();
@ -278,7 +289,7 @@ public class ItemSharing extends Impersonable{
private Node shareFolder(Node node, Session ses) throws RepositoryException, BackendGenericError, StorageHubException{
if (!node2Item.checkNodeType(node, FolderItem.class) || Utils.hasSharedChildren(node) || !node.getProperty(NodeProperty.PORTAL_LOGIN.toString()).getString().equals(currentUser))
throw new InvalidItemException("item with id "+id+" cannot be shared");
@ -296,30 +307,35 @@ public class ItemSharing extends Impersonable{
}
private void addUserToSharing(Node sharedFolderNode, Session ses, String user, Item itemToShare, Privilege[] userPrivileges, JackrabbitAccessControlList acls) throws RepositoryException{
String userRootWSId;
String userPath;
if (itemToShare==null) {
String userRootWS = pathUtil.getWorkspacePath(user).toPath();
userRootWSId = ses.getNode(userRootWS).getIdentifier();
userPath = String.format("%s%s",userRootWS,sharedFolderNode.getProperty(NodeProperty.TITLE.toString()).getString());
}
else {
userPath = itemToShare.getPath();
userRootWSId = itemToShare.getParentId();
}
try {
String userRootWSId;
String userPath;
if (itemToShare==null) {
String userRootWS = pathUtil.getWorkspacePath(user).toPath();
userRootWSId = ses.getNode(userRootWS).getIdentifier();
userPath = String.format("%s%s",userRootWS,sharedFolderNode.getProperty(NodeProperty.TITLE.toString()).getString());
}
else {
userPath = itemToShare.getPath();
userRootWSId = itemToShare.getParentId();
}
log.info("cloning directory to {} ",userPath);
log.info("cloning directory to {} ",userPath);
ses.getWorkspace().clone(ses.getWorkspace().getName(), sharedFolderNode.getPath(), userPath , false);
ses.getWorkspace().clone(ses.getWorkspace().getName(), sharedFolderNode.getPath(), userPath , false);
acls.addAccessControlEntry(AccessControlUtils.getPrincipal(ses, user), userPrivileges );
Node usersNode =null;
if (sharedFolderNode.hasNode(NodeConstants.USERS_NAME))
usersNode = sharedFolderNode.getNode(NodeConstants.USERS_NAME);
else
usersNode = sharedFolderNode.addNode(NodeConstants.USERS_NAME);
usersNode.setProperty(user, String.format("%s/%s",userRootWSId,sharedFolderNode.getProperty(NodeProperty.TITLE.toString()).getString()));
acls.addAccessControlEntry(AccessControlUtils.getPrincipal(ses, user), userPrivileges );
Node usersNode =null;
if (sharedFolderNode.hasNode(NodeConstants.USERS_NAME))
usersNode = sharedFolderNode.getNode(NodeConstants.USERS_NAME);
else
usersNode = sharedFolderNode.addNode(NodeConstants.USERS_NAME);
usersNode.setProperty(user, String.format("%s/%s",userRootWSId,sharedFolderNode.getProperty(NodeProperty.TITLE.toString()).getString()));
}catch (Exception e) {
log.error("error sharing node with user {}",user,e);
throw new RepositoryException(e);
}
}

@ -1,8 +1,11 @@
package org.gcube.data.access.storagehub.services;
import java.io.BufferedInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.URI;
import java.net.URL;
import java.net.URLConnection;
import javax.inject.Inject;
import javax.jcr.RepositoryException;
@ -38,21 +41,27 @@ import org.glassfish.jersey.media.multipart.FormDataParam;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.webcohesion.enunciate.metadata.rs.RequestHeader;
import com.webcohesion.enunciate.metadata.rs.RequestHeaders;
@Path("items")
@ManagedBy(StorageHubAppllicationManager.class)
@RequestHeaders({
@RequestHeader( name = "Authorization", description = "Bearer token, see https://dev.d4science.org/how-to-access-resources"),
})
public class ItemsCreator extends Impersonable{
private static final Logger log = LoggerFactory.getLogger(ItemsCreator.class);
@Context
ServletContext context;
RepositoryInitializer repository = StorageHubAppllicationManager.repository;
RepositoryInitializer repository = StorageHubAppllicationManager.getRepository();
@Inject
ItemHandler itemHandler;
@POST
@Consumes(MediaType.APPLICATION_FORM_URLENCODED)
@Path("/{id}/create/FOLDER")
@ -116,7 +125,7 @@ public class ItemsCreator extends Impersonable{
}
return Response.ok(toReturn).build();
}
@POST
@Consumes(MediaType.APPLICATION_FORM_URLENCODED)
@Path("/{id}/create/URL")
@ -127,7 +136,7 @@ public class ItemsCreator extends Impersonable{
String toReturn = null;
try{
ses = repository.getRepository().login(CredentialHandler.getAdminCredentials(context));
ItemsParameterBuilder<URLCreationParameters> builder = URLCreationParameters.builder().name(name).description(description).url(value).on(id).with(ses).author(currentUser);
toReturn = itemHandler.create(builder.build());
@ -147,8 +156,8 @@ public class ItemsCreator extends Impersonable{
}
return Response.ok(toReturn).build();
}
@POST
@Consumes(MediaType.APPLICATION_JSON)
@Path("/{id}/create/GCUBEITEM")
@ -161,7 +170,7 @@ public class ItemsCreator extends Impersonable{
try{
ses = repository.getRepository().login(CredentialHandler.getAdminCredentials(context));
ItemsParameterBuilder<GCubeItemCreationParameters> builder = GCubeItemCreationParameters.builder().item(item).on(id).with(ses).author(currentUser);
toReturn = itemHandler.create(builder.build());
}catch(StorageHubException she ){
log.error(she.getErrorMessage(), she);
@ -180,6 +189,52 @@ public class ItemsCreator extends Impersonable{
}
@POST
@Consumes(MediaType.APPLICATION_FORM_URLENCODED)
@Path("/{id}/create/FILE")
public String createFileItemFromUrl(@PathParam("id") String id, @FormParam("name") String name,
@FormParam("description") String description,
@FormParam("url") String url){
InnerMethodName.instance.set("createItem(FILEFromUrl)");
Session ses = null;
String toReturn = null;
try{
log.debug("UPLOAD: call started");
ses = repository.getRepository().login(CredentialHandler.getAdminCredentials(context));
URLConnection connectionURL = new URI(url).toURL().openConnection();
long fileLength = connectionURL.getContentLengthLong();
try(InputStream stream = connectionURL.getInputStream()){
ItemsParameterBuilder<FileCreationParameters> builder = FileCreationParameters.builder().name(name).fileDetails(FormDataContentDisposition.name(name).size(fileLength).build())
.description(description).stream(stream)
.on(id).with(ses).author(currentUser);
toReturn = itemHandler.create(builder.build());
}
log.debug("UPLOAD: call finished");
}catch(RepositoryException re ){
log.error("jcr error creating file item", re);
GXOutboundErrorResponse.throwException(new BackendGenericError("jcr error creating file item", re));
}catch(StorageHubException she ){
log.error(she.getErrorMessage(), she);
GXOutboundErrorResponse.throwException(she, Response.Status.fromStatusCode(she.getStatus()));
}catch(Throwable e ){
log.error("unexpected error", e);
GXOutboundErrorResponse.throwException(new BackendGenericError(e));
}finally{
if (ses!=null && ses.isLive()) {
log.info("session closed");
ses.logout();
}
}
return toReturn;
}
@POST
@Consumes(MediaType.MULTIPART_FORM_DATA)
@Path("/{id}/create/FILE")
@ -191,14 +246,17 @@ public class ItemsCreator extends Impersonable{
Session ses = null;
String toReturn = null;
try{
try(InputStream is = new BufferedInputStream(stream)){
long size = fileDetail.getSize();
log.info("UPLOAD: call started with file size {}",size);
ses = repository.getRepository().login(CredentialHandler.getAdminCredentials(context));
ItemsParameterBuilder<FileCreationParameters> builder = FileCreationParameters.builder().name(name).description(description).stream(stream).fileDetails(fileDetail)
.on(id).with(ses).author(currentUser);
log.debug("UPLOAD: item prepared");
toReturn = itemHandler.create(builder.build());
log.debug("UPLOAD: call finished");
}catch(RepositoryException re ){
log.error("jcr error creating file item", re);
GXOutboundErrorResponse.throwException(new BackendGenericError("jcr error creating file item", re));
@ -218,10 +276,42 @@ public class ItemsCreator extends Impersonable{
}
@POST
@Consumes(MediaType.APPLICATION_FORM_URLENCODED)
@Path("/{id}/create/ARCHIVE")
public String uploadArchiveFromURL(@PathParam("id") String id, @FormParam("parentFolderName") String parentFolderName,
@FormParam("url") String url){
InnerMethodName.instance.set("createItem(ARCHIVEFromURL)");
Session ses = null;
String toReturn = null;
try{
ses = repository.getRepository().login(CredentialHandler.getAdminCredentials(context));
try(InputStream stream = new URI(url).toURL().openStream()){
ItemsParameterBuilder<ArchiveStructureCreationParameter> builder = ArchiveStructureCreationParameter.builder().parentName(parentFolderName).stream(stream)
.on(id).with(ses).author(currentUser);
toReturn = itemHandler.create(builder.build());
}
}catch(RepositoryException | ArchiveException | IOException re){
log.error("jcr error extracting archive", re);
GXOutboundErrorResponse.throwException(new BackendGenericError("jcr error extracting archive", re));
}catch(StorageHubException she ){
log.error(she.getErrorMessage(), she);
GXOutboundErrorResponse.throwException(she, Response.Status.fromStatusCode(she.getStatus()));
}catch(Throwable e ){
log.error("unexpected error", e);
GXOutboundErrorResponse.throwException(new BackendGenericError(e));
} finally{
if (ses!=null)
ses.logout();
}
return toReturn;
}
@POST
@Consumes(MediaType.MULTIPART_FORM_DATA)
@ -233,12 +323,12 @@ public class ItemsCreator extends Impersonable{
Session ses = null;
String toReturn = null;
try{
try(InputStream is = new BufferedInputStream(stream)){
ses = repository.getRepository().login(CredentialHandler.getAdminCredentials(context));
ItemsParameterBuilder<ArchiveStructureCreationParameter> builder = ArchiveStructureCreationParameter.builder().parentName(parentFolderName).stream(stream).fileDetails(fileDetail)
ItemsParameterBuilder<ArchiveStructureCreationParameter> builder = ArchiveStructureCreationParameter.builder().parentName(parentFolderName).stream(is).fileDetails(fileDetail)
.on(id).with(ses).author(currentUser);
toReturn = itemHandler.create(builder.build());
}catch(RepositoryException | ArchiveException | IOException re){
@ -259,6 +349,6 @@ public class ItemsCreator extends Impersonable{
}
}

@ -1,21 +1,12 @@
package org.gcube.data.access.storagehub.services;
import static org.gcube.common.storagehub.model.Constants.enchriptedPrefix;
import static org.gcube.common.storagehub.model.Constants.versionPrefix;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Base64;
import java.util.Collections;
import java.util.Deque;
import java.util.LinkedList;
import java.util.List;
import java.util.zip.Deflater;
import java.util.zip.ZipOutputStream;
import javax.enterprise.context.RequestScoped;
import javax.inject.Inject;
@ -40,25 +31,16 @@ import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.Response.Status;
import javax.ws.rs.core.StreamingOutput;
import org.apache.commons.io.FilenameUtils;
import org.gcube.common.authorization.control.annotations.AuthorizationControl;
import org.gcube.common.encryption.encrypter.StringEncrypter;
import org.gcube.common.gxrest.response.outbound.GXOutboundErrorResponse;
import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.common.scope.impl.ScopeBean;
import org.gcube.common.scope.impl.ScopeBean.Type;
import org.gcube.common.storagehub.model.Excludes;
import org.gcube.common.storagehub.model.NodeConstants;
import org.gcube.common.storagehub.model.Paths;
import org.gcube.common.storagehub.model.exceptions.BackendGenericError;
import org.gcube.common.storagehub.model.exceptions.IdNotFoundException;
import org.gcube.common.storagehub.model.exceptions.InvalidCallParameters;
import org.gcube.common.storagehub.model.exceptions.InvalidItemException;
import org.gcube.common.storagehub.model.exceptions.ItemLockedException;
import org.gcube.common.storagehub.model.exceptions.PluginInitializationException;
import org.gcube.common.storagehub.model.exceptions.PluginNotFoundException;
import org.gcube.common.storagehub.model.exceptions.StorageHubException;
import org.gcube.common.storagehub.model.items.AbstractFileItem;
import org.gcube.common.storagehub.model.items.FolderItem;
@ -66,42 +48,49 @@ import org.gcube.common.storagehub.model.items.Item;
import org.gcube.common.storagehub.model.items.SharedFolder;
import org.gcube.common.storagehub.model.items.VreFolder;
import org.gcube.common.storagehub.model.items.nodes.Content;
import org.gcube.common.storagehub.model.plugins.FolderManager;
import org.gcube.common.storagehub.model.service.ItemList;
import org.gcube.common.storagehub.model.service.ItemWrapper;
import org.gcube.common.storagehub.model.service.VersionList;
import org.gcube.common.storagehub.model.storages.MetaInfo;
import org.gcube.common.storagehub.model.types.ItemAction;
import org.gcube.common.storagehub.model.types.NodeProperty;
import org.gcube.data.access.storagehub.AuthorizationChecker;
import org.gcube.data.access.storagehub.Constants;
import org.gcube.data.access.storagehub.PathUtil;
import org.gcube.data.access.storagehub.Range;
import org.gcube.data.access.storagehub.SingleFileStreamingOutput;
import org.gcube.data.access.storagehub.StorageHubAppllicationManager;
import org.gcube.data.access.storagehub.Utils;
import org.gcube.data.access.storagehub.accounting.AccountingHandler;
import org.gcube.data.access.storagehub.exception.MyAuthException;
import org.gcube.data.access.storagehub.handlers.ClassHandler;
import org.gcube.data.access.storagehub.handlers.CompressHandler;
import org.gcube.data.access.storagehub.handlers.CredentialHandler;
import org.gcube.data.access.storagehub.handlers.DownloadHandler;
import org.gcube.data.access.storagehub.handlers.PublicLinkHandler;
import org.gcube.data.access.storagehub.handlers.TrashHandler;
import org.gcube.data.access.storagehub.handlers.VersionHandler;
import org.gcube.data.access.storagehub.handlers.items.Item2NodeConverter;
import org.gcube.data.access.storagehub.handlers.items.Node2ItemConverter;
import org.gcube.data.access.storagehub.handlers.plugins.FolderPluginHandler;
import org.gcube.data.access.storagehub.handlers.plugins.StorageOperationMediator;
import org.gcube.data.access.storagehub.types.PublicLink;
import org.gcube.smartgears.annotations.ManagedBy;
import org.gcube.smartgears.utils.InnerMethodName;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.webcohesion.enunciate.metadata.rs.RequestHeader;
import com.webcohesion.enunciate.metadata.rs.RequestHeaders;
@Path("items")
@ManagedBy(StorageHubAppllicationManager.class)
@RequestHeaders({
@RequestHeader( name = "Authorization", description = "Bearer token, see https://dev.d4science.org/how-to-access-resources"),
})
public class ItemsManager extends Impersonable{
private static final Logger log = LoggerFactory.getLogger(ItemsManager.class);
RepositoryInitializer repository = StorageHubAppllicationManager.repository;
RepositoryInitializer repository = StorageHubAppllicationManager.getRepository();
@Inject
AccountingHandler accountingHandler;
@ -115,26 +104,31 @@ public class ItemsManager extends Impersonable{
@Inject
AuthorizationChecker authChecker;
@Inject
VersionHandler versionHandler;
@Inject
DownloadHandler downloadHandler;
@Inject
TrashHandler trashHandler;
@Inject PathUtil pathUtil;
@Inject Node2ItemConverter node2Item;
@Inject Item2NodeConverter item2Node;
@Inject
FolderPluginHandler folderPluginHandler;
StorageOperationMediator opMediator;
@Inject
CompressHandler compressHandler;
PublicLinkHandler publicLinkHandler;
@GET
@Path("{id}")
@Produces(MediaType.APPLICATION_JSON)
@ -241,7 +235,7 @@ public class ItemsManager extends Impersonable{
InnerMethodName.instance.set("findChildrenByNamePattern");
return _findChildrenByNamePattern(excludes, name);
}
public ItemList _findChildrenByNamePattern(List<String> excludes, String name){
Session ses = null;
List<Item> toReturn = new ArrayList<>();
@ -278,9 +272,9 @@ public class ItemsManager extends Impersonable{
if (ses!=null)
ses.logout();
}
return new ItemList(toReturn);
}
@GET
@ -407,54 +401,18 @@ public class ItemsManager extends Impersonable{
Session ses = null;
try{
ses = repository.getRepository().login(CredentialHandler.getAdminCredentials(context));
String complexId = id;
if (id.startsWith(enchriptedPrefix)) {
String currentScope = ScopeProvider.instance.get();
try {
ScopeBean bean= new ScopeBean(currentScope);
while (!bean.is(Type.INFRASTRUCTURE)) {
bean = bean.enclosingScope();
}
ScopeProvider.instance.set(bean.toString());
complexId = StringEncrypter.getEncrypter().decrypt(new String(Base64.getUrlDecoder().decode(id.replace(enchriptedPrefix, ""))));
}catch(Exception e){
throw new BackendGenericError("invalid public url",e);
}finally {
ScopeProvider.instance.set(currentScope);
}
}
String itemId = complexId;
String versionName = null;
if (complexId.contains(versionPrefix)) {
String[] split = complexId.split(versionPrefix);
itemId = split[0];
versionName = split[1];
}
log.warn("item id to retrieve is {}",itemId);
Node selectedNode;
try {
selectedNode= ses.getNodeByIdentifier(itemId);
}catch (ItemNotFoundException e) {
throw new IdNotFoundException(itemId);
PublicLink publicLink = publicLinkHandler.resolveEnchriptedId(id);
switch (publicLink.getType()) {
case VOLATILE:
return downloadHandler.downloadFileFromStorageBackend(publicLink.getId(), publicLink.getStorageName());
case VERSIONED:
Item versionedItem = node2Item.getItem(publicLink.getId(), ses, Excludes.GET_ONLY_CONTENT);
return downloadHandler.downloadVersionedItem(ses, currentUser, (AbstractFileItem) versionedItem, publicLink.getVersion(), true);
default:
Item currentItem = node2Item.getItem(publicLink.getId(), ses, Excludes.GET_ONLY_CONTENT);
return downloadHandler.downloadFileItem(ses,(AbstractFileItem) currentItem, currentUser, true);
}
Item item = node2Item.getItem(selectedNode, Arrays.asList(NodeConstants.ACCOUNTING_NAME, NodeConstants.METADATA_NAME));
if (!(item instanceof AbstractFileItem)) throw new InvalidCallParameters("the choosen item is not a File");
if (versionName!=null)
return downloadVersionInternal(ses, currentUser, itemId, versionName, false);
else
return downloadFileInternal(ses, (AbstractFileItem) item, currentUser, true);
}catch(RepositoryException re ){
log.error("jcr error getting public link", re);
GXOutboundErrorResponse.throwException(new BackendGenericError(re));
@ -505,28 +463,10 @@ public class ItemsManager extends Impersonable{
ses.getWorkspace().getLockManager().unlock(selectedNode.getPath());
}*/
String url = null;
String currentScope = ScopeProvider.instance.get();
try {
ScopeBean bean= new ScopeBean(currentScope);
while (!bean.is(Type.INFRASTRUCTURE)) {
bean = bean.enclosingScope();
}
ScopeProvider.instance.set(bean.toString());
String toEnchript;
if(version!=null) toEnchript = String.format("%s%s%s",id, versionPrefix, version);
else toEnchript = id;
String url = version!=null ? publicLinkHandler.getForVersionedItem(id, version, context):
publicLinkHandler.getForItem(id, context);
String enchriptedQueryString = StringEncrypter.getEncrypter().encrypt(toEnchript);
url = createPublicLink(new String(Base64.getUrlEncoder().encode(enchriptedQueryString.getBytes())));
}catch(Exception e){
throw new BackendGenericError(e);
}finally {
ScopeProvider.instance.set(currentScope);
}
toReturn = new URL(url);
@ -545,11 +485,9 @@ public class ItemsManager extends Impersonable{
}
private String createPublicLink(String enchriptedString) {
String basepath = context.getInitParameter("resolver-basepath");
String filePublicUrl = String.format("%s/%s%s",basepath, enchriptedPrefix, enchriptedString);
return filePublicUrl;
}
@PUT
@Path("{id}/publish")
@ -574,7 +512,7 @@ public class ItemsManager extends Impersonable{
ses.save();
}catch(RepositoryException re ){
log.error("jcr error getting rootSharedFolder", re);
log.error("jcr error publishing folder", re);
GXOutboundErrorResponse.throwException(new BackendGenericError(re));
}catch(StorageHubException she ){
log.error(she.getErrorMessage(), she);
@ -675,8 +613,12 @@ public class ItemsManager extends Impersonable{
try{
ses = repository.getRepository().login(CredentialHandler.getAdminCredentials(context));
authChecker.checkReadAuthorizationControl(ses, currentUser, id);
Node node = ses.getNodeByIdentifier(id);
Item currentItem = node2Item.getItem(node, Excludes.ALL);
if (!(currentItem instanceof AbstractFileItem))
throw new InvalidItemException("this item is not a file");
return downloadVersionInternal(ses, currentUser, id, versionName, true);
return downloadHandler.downloadVersionedItem(ses, currentUser, (AbstractFileItem) currentItem, versionName, true);
}catch(RepositoryException re ){
log.error("jcr error downloading version", re);
@ -691,45 +633,49 @@ public class ItemsManager extends Impersonable{
return Response.serverError().build();
}
private Response downloadVersionInternal(Session ses, String login, String id, String versionName, boolean withAccounting) throws RepositoryException, StorageHubException{
Node node = ses.getNodeByIdentifier(id);
Item currentItem = node2Item.getItem(node, Excludes.ALL);
if (!(currentItem instanceof AbstractFileItem))
throw new InvalidItemException("this item is not a file");
List<Version> jcrVersions = versionHandler.getContentVersionHistory(node);
for (Version version: jcrVersions) {
log.debug("retrieved version id {}, name {}", version.getIdentifier(), version.getName());
if (version.getName().equals(versionName)) {
Content content = node2Item.getContentFromVersion(version);
FolderManager folderManager = folderPluginHandler.getFolderManager((AbstractFileItem) currentItem);
final InputStream streamToWrite = folderManager.getStorageBackend().download(content);
log.debug("retrieved storage id is {} with storageBackend {} (stream is null? {})",content.getStorageId(), folderManager.getStorageBackend().getClass().getSimpleName(), streamToWrite==null );
String oldfilename = FilenameUtils.getBaseName(currentItem.getTitle());
String ext = FilenameUtils.getExtension(currentItem.getTitle());
String fileName = String.format("%s_v%s.%s", oldfilename, version.getName(), ext);
if (withAccounting)
accountingHandler.createReadObj(fileName, ses, node, login, true);
StreamingOutput so = new SingleFileStreamingOutput(streamToWrite);
return Response
.ok(so)
.header("content-disposition","attachment; filename = "+fileName)
.header("Content-Length", content.getSize())
.header("Content-Type", content.getMimeType())
.build();
}
@DELETE
@Path("{id}/versions/{version}")
public void deleteVersion(@PathParam("version") String versionName){
InnerMethodName.instance.set("deleteVersion");
Session ses = null;
try{
ses = repository.getRepository().login(CredentialHandler.getAdminCredentials(context));
authChecker.checkWriteAuthorizationControl(ses, currentUser, id, false);
Node node = ses.getNodeByIdentifier(id);
Item currentItem = node2Item.getItem(node, Excludes.GET_ONLY_CONTENT);
if (!(currentItem instanceof AbstractFileItem))
throw new InvalidItemException("this item is not a file");
List<Version> versions = versionHandler.getContentVersionHistory(node);
boolean found = false;
for(Version version : versions)
if (version.getName().equals(versionName)) {
boolean currentVersion = ((AbstractFileItem)currentItem).getContent().getStorageId().equals(version.getFrozenNode().getProperty(NodeProperty.STORAGE_ID.toString()).getString());
if (currentVersion)
throw new InvalidCallParameters("current version cannot be removed");
versionHandler.removeContentVersion(node, versionName);
accountingHandler.createVersionDeleted(currentItem.getTitle(), versionName, ses, node, currentUser, false);
ses.save();
found = true;
break;
}
if (!found) throw new InvalidItemException("the version "+versionName+" is not valid or is current version for item "+currentItem.getTitle());
}catch(RepositoryException re ){
log.error("jcr error removing version", re);
GXOutboundErrorResponse.throwException(new BackendGenericError(re));
}catch(StorageHubException she ){
log.error(she.getErrorMessage(), she);
GXOutboundErrorResponse.throwException(she, Response.Status.fromStatusCode(she.getStatus()));
}finally{
if (ses!=null)
ses.logout();
}
throw new InvalidItemException("the version is not valid");
}
@GET
@Path("{id}/anchestors")
@Produces(MediaType.APPLICATION_JSON)
@ -739,7 +685,7 @@ public class ItemsManager extends Impersonable{
Session ses = null;
List<Item> toReturn = new LinkedList<>();
try{
ses = repository.getRepository().login(CredentialHandler.getAdminCredentials(context));
authChecker.checkReadAuthorizationControl(ses, currentUser, id);
Node currentNode = ses.getNodeByIdentifier(id);
@ -792,48 +738,15 @@ public class ItemsManager extends Impersonable{
public Response download(@QueryParam("exclude") List<String> excludes){
InnerMethodName.instance.set("downloadById");
Session ses = null;
Response response = null;
try{
ses = repository.getRepository().login(CredentialHandler.getAdminCredentials(context));
final Node node = ses.getNodeByIdentifier(id);
authChecker.checkReadAuthorizationControl(ses, currentUser, id);
final Item item = node2Item.getItem(node, null);
if (item instanceof AbstractFileItem){
return downloadFileInternal(ses, (AbstractFileItem) item, currentUser, true);
return downloadHandler.downloadFileItem(ses, (AbstractFileItem) item, currentUser, true);
} else if (item instanceof FolderItem){
try {
final Deque<Item> allNodes = compressHandler.getAllNodesForZip((FolderItem)item, ses, currentUser, accountingHandler, excludes);
final org.gcube.common.storagehub.model.Path originalPath = Paths.getPath(item.getParentPath());
StreamingOutput so = new StreamingOutput() {
@Override
public void write(OutputStream os) {
try(ZipOutputStream zos = new ZipOutputStream(os)){
long start = System.currentTimeMillis();
zos.setLevel(Deflater.BEST_COMPRESSION);
log.debug("writing StreamOutput");
compressHandler.zipNode(zos, allNodes, currentUser, originalPath);
log.debug("StreamOutput written in {}",(System.currentTimeMillis()-start));
} catch (Exception e) {
log.error("error writing stream",e);
}
}
};
response = Response
.ok(so)
.header("content-disposition","attachment; filename = "+item.getTitle()+".zip")
.header("Content-Type", "application/zip")
.header("Content-Length", -1l)
.build();
accountingHandler.createReadObj(item.getTitle(), ses, (Node) item.getRelatedNode(), currentUser, false);
}finally {
if (ses!=null) ses.save();
}
return downloadHandler.downloadFolderItem(ses, currentUser, (FolderItem)item, true);
} else throw new InvalidItemException("item type not supported for download: "+item.getClass());
}catch(RepositoryException re ){
@ -845,28 +758,7 @@ public class ItemsManager extends Impersonable{
} finally{
if (ses!=null) ses.logout();
}
return response;
}
private Response downloadFileInternal(Session ses, AbstractFileItem fileItem, String login, boolean withAccounting) throws RepositoryException, PluginInitializationException, PluginNotFoundException, BackendGenericError {
FolderManager folderManager = folderPluginHandler.getFolderManager(fileItem);
final InputStream streamToWrite = folderManager.getStorageBackend().download(fileItem.getContent());
if (withAccounting)
accountingHandler.createReadObj(fileItem.getTitle(), ses, (Node) fileItem.getRelatedNode(), login, true);
StreamingOutput so = new SingleFileStreamingOutput(streamToWrite);
return Response
.ok(so)
.header("content-disposition","attachment; filename = "+fileItem.getName())
.header("Content-Length", fileItem.getContent().getSize())
.header("Content-Type", fileItem.getContent().getMimeType())
.build();
return null;
}
@ -877,7 +769,7 @@ public class ItemsManager extends Impersonable{
Session ses = null;
try{
ses = repository.getRepository().login(CredentialHandler.getAdminCredentials(context));
authChecker.checkMoveOpsForProtectedFolders(ses, id);
@ -922,11 +814,12 @@ public class ItemsManager extends Impersonable{
if (movingSharedItemOutside)
item2Node.updateOwnerOnSubTree(nodeToMove, currentUser);
//folderHandler.onMove(source, destination);
//add onMove (if it changes the remotePath) and in case of different backend
accountingHandler.createFolderAddObj(uniqueName, item.getClass().getSimpleName(), mimeTypeForAccounting, ses, currentUser, destination, false);
accountingHandler.createFolderRemoveObj(item.getTitle(), item.getClass().getSimpleName(), mimeTypeForAccounting, ses, currentUser, originalParent, false);
ses.save();
}finally {
ses.getWorkspace().getLockManager().unlock(nodeToMove.getPath());
@ -955,7 +848,7 @@ public class ItemsManager extends Impersonable{
Session ses = null;
String newFileIdentifier = null;
try{
ses = repository.getRepository().login(CredentialHandler.getAdminCredentials(context));
authChecker.checkWriteAuthorizationControl(ses, currentUser, destinationId, true);
@ -963,7 +856,7 @@ public class ItemsManager extends Impersonable{
final Node nodeToCopy = ses.getNodeByIdentifier(id);
final Node destination = ses.getNodeByIdentifier(destinationId);
//Item destinationItem = node2Item.getItem(destination,null);
FolderItem destinationItem = (FolderItem)node2Item.getItem(destination,null);
final Item item = node2Item.getItem(nodeToCopy, Arrays.asList(NodeConstants.ACCOUNTING_NAME, NodeConstants.METADATA_NAME));
@ -976,6 +869,7 @@ public class ItemsManager extends Impersonable{
}catch (LockException e) {
throw new ItemLockedException(e);
}
try {
String uniqueName = Utils.checkExistanceAndGetUniqueName(ses, destination, newFileName);
String newPath= String.format("%s/%s", destination.getPath(), uniqueName);
@ -983,15 +877,12 @@ public class ItemsManager extends Impersonable{
Node newNode = ses.getNode(newPath);
newFileIdentifier = newNode.getIdentifier();
//TODO: folderHandler.onCopy(source, destination);
if (item instanceof AbstractFileItem) {
FolderManager manager = folderPluginHandler.getFolderManager(item);
((AbstractFileItem) item).getContent().setRemotePath(newPath);
String newStorageID = manager.getStorageBackend().onCopy((AbstractFileItem) item);
((AbstractFileItem) item).getContent().setStorageId(newStorageID);
item2Node.replaceContent(newNode, (AbstractFileItem) item, ItemAction.CLONED);
}
Content contentToCopy = ((AbstractFileItem) item).getContent();
MetaInfo contentInfo = opMediator.copy(contentToCopy, destinationItem.getBackend(), destination.getPath(), uniqueName, currentUser);
Utils.setContentFromMetaInfo((AbstractFileItem) item, contentInfo);
item2Node.replaceContent(newNode, (AbstractFileItem) item, ItemAction.CLONED);
Utils.setPropertyOnChangeNode(newNode, currentUser, ItemAction.CLONED);
newNode.setProperty(NodeProperty.PORTAL_LOGIN.toString(), currentUser);
@ -1030,22 +921,21 @@ public class ItemsManager extends Impersonable{
Session ses = null;
try{
ses = repository.getRepository().login(CredentialHandler.getAdminCredentials(context));
authChecker.checkMoveOpsForProtectedFolders(ses, id);
authChecker.checkWriteAuthorizationControl(ses, currentUser, id, false);
final Node nodeToMove = ses.getNodeByIdentifier(id);
final Item item = node2Item.getItem(nodeToMove, null);
if (item instanceof SharedFolder)
throw new InvalidItemException("shared folder");
if (Constants.FOLDERS_TO_EXLUDE.contains(item.getTitle()))
throw new InvalidItemException("protected folder cannot be renamed");
if (item instanceof SharedFolder)
if (getSharedParentNode(nodeToMove).getIdentifier() == item.getId())
throw new InvalidItemException("root shared folder name cannot be modfied");
String uniqueName = Utils.checkExistanceAndGetUniqueName(ses, nodeToMove.getParent(), newName);
try {
ses.getWorkspace().getLockManager().lock(nodeToMove.getPath(), true, true, 0,currentUser);
@ -1053,10 +943,7 @@ public class ItemsManager extends Impersonable{
}catch (LockException e) {
throw new ItemLockedException(e);
}
try {
String uniqueName = Utils.checkExistanceAndGetUniqueName(ses, nodeToMove.getParent(), newName);
String newPath = String.format("%s/%s", nodeToMove.getParent().getPath(), uniqueName);
nodeToMove.setProperty(NodeProperty.TITLE.toString(), uniqueName);
Utils.setPropertyOnChangeNode(nodeToMove, currentUser, ItemAction.RENAMED);
@ -1093,7 +980,7 @@ public class ItemsManager extends Impersonable{
Session ses = null;
try{
ses = repository.getRepository().login(CredentialHandler.getAdminCredentials(context));
authChecker.checkWriteAuthorizationControl(ses, currentUser, id, false);
@ -1138,7 +1025,7 @@ public class ItemsManager extends Impersonable{
Session ses = null;
try{
ses = repository.getRepository().login(CredentialHandler.getAdminCredentials(context));
authChecker.checkWriteAuthorizationControl(ses, currentUser, id, false);
@ -1182,7 +1069,7 @@ public class ItemsManager extends Impersonable{
Session ses = null;
try{
ses = repository.getRepository().login(CredentialHandler.getAdminCredentials(context));
authChecker.checkWriteAuthorizationControl(ses, currentUser, id, false);
@ -1223,7 +1110,7 @@ public class ItemsManager extends Impersonable{
@Path("{id}")
public Response deleteItem(@QueryParam("force") boolean force){
InnerMethodName.instance.set("deleteItem("+force+")");
Session ses = null;
try{
@ -1243,7 +1130,7 @@ public class ItemsManager extends Impersonable{
if (itemToDelete.isExternalManaged() && !force)
throw new InvalidItemException("External managed Items cannot be moved to Trash");
log.debug("item is trashed? {}", itemToDelete.isTrashed());
if (!itemToDelete.isTrashed() && !force) {

@ -40,9 +40,13 @@ import org.gcube.common.storagehub.model.exceptions.StorageHubException;
import org.gcube.common.storagehub.model.exceptions.UserNotAuthorizedException;
import org.gcube.common.storagehub.model.items.AbstractFileItem;
import org.gcube.common.storagehub.model.items.Item;
import org.gcube.common.storagehub.model.items.nodes.Content;
import org.gcube.common.storagehub.model.items.nodes.Owner;
import org.gcube.common.storagehub.model.messages.Message;
import org.gcube.common.storagehub.model.service.ItemList;
import org.gcube.common.storagehub.model.storages.MetaInfo;
import org.gcube.common.storagehub.model.storages.StorageBackend;
import org.gcube.common.storagehub.model.storages.StorageBackendFactory;
import org.gcube.common.storagehub.model.types.ItemAction;
import org.gcube.common.storagehub.model.types.MessageList;
import org.gcube.common.storagehub.model.types.NodeProperty;
@ -56,20 +60,26 @@ import org.gcube.data.access.storagehub.handlers.TrashHandler;
import org.gcube.data.access.storagehub.handlers.items.Item2NodeConverter;
import org.gcube.data.access.storagehub.handlers.items.Item2NodeConverter.Values;
import org.gcube.data.access.storagehub.handlers.items.Node2ItemConverter;
import org.gcube.data.access.storagehub.handlers.plugins.FolderPluginHandler;
import org.gcube.data.access.storagehub.handlers.plugins.StorageBackendHandler;
import org.gcube.data.access.storagehub.types.MessageSharable;
import org.gcube.smartgears.annotations.ManagedBy;
import org.gcube.smartgears.utils.InnerMethodName;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.webcohesion.enunciate.metadata.rs.RequestHeader;
import com.webcohesion.enunciate.metadata.rs.RequestHeaders;
@Path("messages")
@ManagedBy(StorageHubAppllicationManager.class)
@RequestHeaders({
@RequestHeader( name = "Authorization", description = "Bearer token, see https://dev.d4science.org/how-to-access-resources"),
})
public class MessageManager extends Impersonable{
private static final Logger log = LoggerFactory.getLogger(MessageManager.class);
RepositoryInitializer repository = StorageHubAppllicationManager.repository;
RepositoryInitializer repository = StorageHubAppllicationManager.getRepository();
@Inject
AccountingHandler accountingHandler;
@ -87,7 +97,10 @@ public class MessageManager extends Impersonable{
@Inject Item2NodeConverter item2Node;
@Inject TrashHandler trashHandler;
@Inject
StorageBackendHandler storageBackendHandler;
@GET
@Path("{id}")
@Produces(MediaType.APPLICATION_JSON)
@ -128,9 +141,10 @@ public class MessageManager extends Impersonable{
Message message = node2Item.getMessageItem(messageNode);
Node personalNode = checkRights(currentUser, message);
if (countSharedSet(messageNode)>1)
personalNode.removeShare();
else {
if (countSharedSet(messageNode)>1) {
log.debug("removing node message "+personalNode.getPath());
personalNode.remove();
}else {
if (message.isWithAttachments()) {
Node attachmentNode = messageNode.getNode(Constants.ATTACHMENTNODE_NAME);
List<Item> attachments = Utils.getItemList(attachmentNode, Excludes.GET_ONLY_CONTENT, null, true, AbstractFileItem.class);
@ -139,6 +153,7 @@ public class MessageManager extends Impersonable{
messageNode.removeSharedSet();
}
ses.save();
log.debug("removing node message saved");
}catch (ItemNotFoundException e) {
log.error("id {} not found",id,e);
GXOutboundErrorResponse.throwException(new IdNotFoundException(id, e), Status.NOT_FOUND);
@ -288,6 +303,9 @@ public class MessageManager extends Impersonable{
message.setBody(body);
message.setName(UUID.randomUUID().toString());
User user = ses.getUserManager().getAuthorizable(currentUser, User.class);
if (user ==null)
throw new InvalidCallParameters("invalid storagehub user: "+currentUser);
Owner owner = new Owner();
owner.setUserId(user.getID());
owner.setUserName(user.getPrincipal().getName());
@ -325,7 +343,7 @@ public class MessageManager extends Impersonable{
private Node saveAttachments(Session ses, Node messageNode , List<String> attachments) throws RepositoryException, BackendGenericError{
private Node saveAttachments(Session ses, Node messageNode , List<String> attachments) throws RepositoryException, StorageHubException{
Node attachmentNode = messageNode.getNode(Constants.ATTACHMENTNODE_NAME);
for (String itemId: attachments) {
@ -345,7 +363,7 @@ public class MessageManager extends Impersonable{
NodeIterator nodeIt = node.getNodes();
while(nodeIt.hasNext()) {
Node child = nodeIt.nextNode();
log.info("message type "+child.getPrimaryNodeType().getName());
log.trace("message type "+child.getPrimaryNodeType().getName());
Message message = node2Item.getMessageItem(child);
if (message == null) {
log.info("message discarded");
@ -375,13 +393,13 @@ public class MessageManager extends Impersonable{
private Node checkRights(String user, Message messageItem) throws RepositoryException, StorageHubException{
Node personalNode = null;
Node messageNode = (Node) messageItem.getRelatedNode();
if (messageNode.getPath().startsWith(pathUtil.getWorkspacePath(currentUser).toPath()))
if (messageNode.getPath().startsWith(pathUtil.getInboxPath(user).toPath()))
return messageNode;
NodeIterator nodeIt = messageNode.getSharedSet();
while (nodeIt.hasNext()) {
Node node = nodeIt.nextNode();
if (node.getPath().startsWith(pathUtil.getWorkspacePath(currentUser).toPath()))
if (node.getPath().startsWith(pathUtil.getInboxPath(user).toPath()))
personalNode = node;
}
if (personalNode == null &&
@ -390,10 +408,8 @@ public class MessageManager extends Impersonable{
return personalNode== null ? messageNode : personalNode;
}
//TODO: move in a common place
@Inject FolderPluginHandler folderPluginHandler;
private Node copyNode(Session session, Node destination, Item itemToCopy) throws RepositoryException, BackendGenericError{
private Node copyNode(Session session, Node destination, Item itemToCopy) throws RepositoryException, StorageHubException{
//it needs to be locked ??
Node nodeToCopy = ((Node)itemToCopy.getRelatedNode());
String uniqueName = Utils.checkExistanceAndGetUniqueName(session, destination,itemToCopy.getName() );
@ -403,9 +419,15 @@ public class MessageManager extends Impersonable{
if (itemToCopy instanceof AbstractFileItem) {
AbstractFileItem newNodeItem = node2Item.getItem(newNode, Excludes.EXCLUDE_ACCOUNTING);
newNodeItem.getContent().setRemotePath(newPath);
String newStorageID = folderPluginHandler.getDefault().getStorageBackend().onCopy(newNodeItem);
newNodeItem.getContent().setStorageId(newStorageID);
Content contentToCopy = newNodeItem.getContent();
StorageBackendFactory sbf = storageBackendHandler.get(contentToCopy.getPayloadBackend());
StorageBackend sb = sbf.create(contentToCopy.getPayloadBackend());
MetaInfo contentInfo = sb.onCopy(contentToCopy, destination.getPath(), uniqueName);
Utils.setContentFromMetaInfo(newNodeItem, contentInfo);
item2Node.replaceContent(newNode, newNodeItem, ItemAction.CLONED);
}

@ -1,10 +1,13 @@
package org.gcube.data.access.storagehub.services;
import javax.jcr.Repository;
import javax.jcr.SimpleCredentials;
public interface RepositoryInitializer {
Repository getRepository();
void initContainerAtFirstStart(SimpleCredentials credentials);
void shutdown();
}

@ -0,0 +1,40 @@
package org.gcube.data.access.storagehub.services;
import java.util.ArrayList;
import java.util.List;
import javax.inject.Inject;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.core.MediaType;
import org.gcube.common.storagehub.model.storages.StorageDescriptor;
import org.gcube.data.access.storagehub.StorageHubAppllicationManager;
import org.gcube.data.access.storagehub.handlers.plugins.StorageBackendHandler;
import org.gcube.smartgears.annotations.ManagedBy;
import org.gcube.smartgears.utils.InnerMethodName;
import com.webcohesion.enunciate.metadata.rs.RequestHeader;
import com.webcohesion.enunciate.metadata.rs.RequestHeaders;
@Path("storages")
@ManagedBy(StorageHubAppllicationManager.class)
@RequestHeaders({
@RequestHeader( name = "Authorization", description = "Bearer token, see https://dev.d4science.org/how-to-access-resources"),
})
public class StorageManager {
@Inject
StorageBackendHandler storageBackendHandler;
@GET
@Path("/")
@Produces(MediaType.APPLICATION_JSON)
public List<StorageDescriptor> getStorages(){
InnerMethodName.instance.set("getStorages");
List<StorageDescriptor> storages = new ArrayList<>();
storageBackendHandler.getAllImplementations().forEach( f -> storages.add(new StorageDescriptor(f.getName())));
return storages;
}
}

@ -19,6 +19,7 @@ import javax.ws.rs.DELETE;
import javax.ws.rs.FormParam;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.PUT;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
@ -36,12 +37,15 @@ import org.apache.jackrabbit.core.security.principal.PrincipalImpl;
import org.gcube.common.authorization.control.annotations.AuthorizationControl;
import org.gcube.common.gxrest.response.outbound.GXOutboundErrorResponse;
import org.gcube.common.storagehub.model.Excludes;
import org.gcube.common.storagehub.model.Paths;
import org.gcube.common.storagehub.model.exceptions.BackendGenericError;
import org.gcube.common.storagehub.model.exceptions.IdNotFoundException;
import org.gcube.common.storagehub.model.exceptions.InvalidCallParameters;
import org.gcube.common.storagehub.model.exceptions.StorageHubException;
import org.gcube.common.storagehub.model.exceptions.UserNotAuthorizedException;
import org.gcube.common.storagehub.model.items.Item;
import org.gcube.common.storagehub.model.items.SharedFolder;
import org.gcube.common.storagehub.model.types.SHUBUser;
import org.gcube.data.access.storagehub.AuthorizationChecker;
import org.gcube.data.access.storagehub.Constants;
import org.gcube.data.access.storagehub.PathUtil;
@ -58,8 +62,14 @@ import org.gcube.smartgears.utils.InnerMethodName;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.webcohesion.enunciate.metadata.rs.RequestHeader;
import com.webcohesion.enunciate.metadata.rs.RequestHeaders;
@Path("users")
@ManagedBy(StorageHubAppllicationManager.class)
@RequestHeaders({
@RequestHeader( name = "Authorization", description = "Bearer token, see https://dev.d4science.org/how-to-access-resources"),
})
public class UserManager {
private static final String INFRASTRUCTURE_MANAGER_ROLE = "Infrastructure-Manager";
@ -68,36 +78,29 @@ public class UserManager {
private static final Logger log = LoggerFactory.getLogger(UserManager.class);
RepositoryInitializer repository = StorageHubAppllicationManager.repository;
RepositoryInitializer repository = StorageHubAppllicationManager.getRepository();
@Inject
UnshareHandler unshareHandler;
@Inject
AuthorizationChecker authChecker;
@Inject
TrashHandler trashHandler;
@Inject
GroupHandler groupHandler;
@Inject
PathUtil pathUtil;
@GET
@Path("")
@Produces(MediaType.APPLICATION_JSON)
public List<String> getUsers(){
InnerMethodName.instance.set("getUsers");
private List<SHUBUser> retrieveUsers() throws Throwable{
JackrabbitSession session = null;
List<String> users = null;
List<SHUBUser> users = null;
try {
session = (JackrabbitSession) repository.getRepository().login(CredentialHandler.getAdminCredentials(context));
Iterator<Authorizable> result = session.getUserManager().findAuthorizables(new Query() {
@Override
@ -105,33 +108,63 @@ public class UserManager {
builder.setSelector(User.class);
}
});
Set<String> usersSet= new HashSet<>();
Set<SHUBUser> usersSet= new HashSet<>();
String adminUser = context.getInitParameter(Constants.ADMIN_PARAM_NAME);
Node homeNode = session.getNode("/Home");
while (result.hasNext()) {
Authorizable user = result.next();
log.debug("user {} found",user.getPrincipal().getName());
if (user.getPrincipal().getName().equals(adminUser)) continue;
usersSet.add(user.getPrincipal().getName());
long homeVersion = -1;
try {
Node userHome = homeNode.getNode(user.getPrincipal().getName());
if(userHome.hasProperty(Constants.HOME_VERSION_PROP))
homeVersion = userHome.getProperty(Constants.HOME_VERSION_PROP).getLong();
else homeVersion = 0;
usersSet.add(new SHUBUser(user.getPrincipal().getName(), homeVersion));
}catch (Exception e) {
log.warn("error retrieving user {} home", user.getPrincipal().getName());
}
}
users = new ArrayList<>(usersSet);
Collections.sort(users);
}catch(Exception e) {
log.error("jcr error getting users", e);
GXOutboundErrorResponse.throwException(new BackendGenericError(e));
} finally {
if (session!=null)
session.logout();
}
return users;
}
@GET
@Path("")
@Produces(MediaType.APPLICATION_JSON)
public List<SHUBUser> getUsers(){
InnerMethodName.instance.set("getUsers");
try {
return retrieveUsers();
}catch (Throwable e) {
log.error("jcr error getting users", e);
GXOutboundErrorResponse.throwException(new BackendGenericError(e));
}
return null;
}
@GET
@Path("{user}")
public String getUser(@PathParam("user") String user){
public SHUBUser getUser(@PathParam("user") String user){
InnerMethodName.instance.set("getUser");
@ -142,9 +175,20 @@ public class UserManager {
org.apache.jackrabbit.api.security.user.UserManager usrManager = session.getUserManager();
Authorizable authorizable = usrManager.getAuthorizable(user);
if (authorizable != null && !authorizable.isGroup())
return authorizable.getPrincipal().getName();
if (authorizable != null && !authorizable.isGroup()) {
long homeVersion = -1;
try {
Node homeNode = session.getNode("/Home");
Node userHome = homeNode.getNode(authorizable.getPrincipal().getName());
if(userHome.hasProperty(Constants.HOME_VERSION_PROP))
homeVersion = userHome.getProperty(Constants.HOME_VERSION_PROP).getLong();
else homeVersion = 0;
}catch (Exception e) {
log.warn("error retrieving user {} home", authorizable.getPrincipal().getName(), e );
}
return new SHUBUser(authorizable.getPrincipal().getName(), homeVersion);
}
log.debug("user {} not found", user);
}catch(Exception e) {
@ -177,32 +221,112 @@ public class UserManager {
org.apache.jackrabbit.api.security.user.UserManager usrManager = session.getUserManager();
User createdUser = usrManager.createUser(user, password);
userId = createdUser.getID();
Node homeNode = session.getNode("/Home");
Node userHome = homeNode.addNode(user, "nthl:home");
userHome.setProperty(Constants.HOME_VERSION_PROP, 1l);
//creating workspace folder
FolderCreationParameters wsFolderParameters = FolderCreationParameters.builder().name(Constants.WORKSPACE_ROOT_FOLDER_NAME).description("workspace of "+user).author(user).on(userHome.getIdentifier()).with(session).build();
Utils.createFolderInternally(wsFolderParameters, null);
Utils.createFolderInternally(wsFolderParameters, null, true);
//creating thrash folder
FolderCreationParameters trashFolderParameters = FolderCreationParameters.builder().name(Constants.TRASH_ROOT_FOLDER_NAME).description("trash of "+user).author(user).on(userHome.getIdentifier()).with(session).build();
Utils.createFolderInternally(trashFolderParameters, null);
Utils.createFolderInternally(trashFolderParameters, null, true);
//creating Vre container folder
FolderCreationParameters vreFolderParameters = FolderCreationParameters.builder().name(Constants.PERSONAL_VRES_FOLDER_PARENT_NAME).description("vre folder container of "+user).author(user).on(userHome.getIdentifier()).with(session).build();
Utils.createFolderInternally(vreFolderParameters, null);
Utils.createFolderInternally(vreFolderParameters, null, true);
//creating inbox folder
FolderCreationParameters inboxFolderParameters = FolderCreationParameters.builder().name(Constants.INBOX_FOLDER_NAME).description("inbox of "+user).author(user).on(userHome.getIdentifier()).with(session).build();
Utils.createFolderInternally(inboxFolderParameters, null);
Utils.createFolderInternally(inboxFolderParameters, null, true);
//creating outbox folder
FolderCreationParameters outboxFolderParameters = FolderCreationParameters.builder().name(Constants.OUTBOX_FOLDER_NAME).description("outbox of "+user).author(user).on(userHome.getIdentifier()).with(session).build();
Utils.createFolderInternally(outboxFolderParameters, null);
Utils.createFolderInternally(outboxFolderParameters, null, true);
session.save();
}catch(StorageHubException she ){
log.error(she.getErrorMessage(), she);
GXOutboundErrorResponse.throwException(she, Response.Status.fromStatusCode(she.getStatus()));
}catch(RepositoryException re ){
log.error("jcr error creating item", re);
GXOutboundErrorResponse.throwException(new BackendGenericError("jcr error creating item", re));
} finally {
if (session!=null)
session.logout();
}
return userId;
}
@PUT
@Path("{user}")
@Consumes(MediaType.APPLICATION_FORM_URLENCODED)
@AuthorizationControl(allowedRoles={INFRASTRUCTURE_MANAGER_ROLE}, exception=MyAuthException.class)
public String updateHomeUserToLatestVersion(@PathParam("user") String user){
InnerMethodName.instance.set("updateHomeUserToLatestVersion");
JackrabbitSession session = null;
String userId = null;
try {
session = (JackrabbitSession) repository.getRepository().login(CredentialHandler.getAdminCredentials(context));
org.apache.jackrabbit.api.security.user.UserManager usrManager = session.getUserManager();
Authorizable auth = usrManager.getAuthorizable(user);
if( auth==null || auth.isGroup())
throw new InvalidCallParameters("invalid user passed");
Node homeNode = session.getNode("/Home");
Node userHome = homeNode.getNode(user);
if (userHome == null)
throw new BackendGenericError("home for user {} not found");
/*
//creating workspace folder
FolderCreationParameters wsFolderParameters = FolderCreationParameters.builder().name(Constants.WORKSPACE_ROOT_FOLDER_NAME).description("workspace of "+user).author(user).on(userHome.getIdentifier()).with(session).build();
Utils.createFolderInternally(wsFolderParameters, null, true);
*/
//updating thrash folder
if (!userHome.hasProperty(Constants.HOME_VERSION_PROP) || userHome.getProperty(Constants.HOME_VERSION_PROP).getLong()<1) {
org.gcube.common.storagehub.model.Path workspacePath = Paths.append(Paths.getPath(userHome.getPath()),Constants.WORKSPACE_ROOT_FOLDER_NAME);
Boolean oldTrashExists = session.nodeExists(Paths.append(workspacePath, Constants.TRASH_ROOT_FOLDER_NAME).toPath());
if (oldTrashExists)
session.move(Paths.append(workspacePath, Constants.TRASH_ROOT_FOLDER_NAME).toPath(), Paths.append(Paths.getPath(userHome.getPath()), Constants.TRASH_ROOT_FOLDER_NAME).toPath());
else {
FolderCreationParameters trashFolderParameters = FolderCreationParameters.builder().name(Constants.TRASH_ROOT_FOLDER_NAME).description("trash of "+user).author(user).on(userHome.getIdentifier()).with(session).build();
Utils.createFolderInternally(trashFolderParameters, null, true);
}
Boolean oldVresExists = session.nodeExists(Paths.append(workspacePath, Constants.OLD_VRE_FOLDER_PARENT_NAME).toPath());
if (oldVresExists)
session.move(Paths.append(workspacePath, Constants.OLD_VRE_FOLDER_PARENT_NAME).toPath(), Paths.append(Paths.getPath(userHome.getPath()), Constants.PERSONAL_VRES_FOLDER_PARENT_NAME).toPath());
else {
//creating Vre container folder
FolderCreationParameters vreFolderParameters = FolderCreationParameters.builder().name(Constants.PERSONAL_VRES_FOLDER_PARENT_NAME).description("vre folder container of "+user).author(user).on(userHome.getIdentifier()).with(session).build();
Utils.createFolderInternally(vreFolderParameters, null, true);
}
}
/*
//creating inbox folder
FolderCreationParameters inboxFolderParameters = FolderCreationParameters.builder().name(Constants.INBOX_FOLDER_NAME).description("inbox of "+user).author(user).on(userHome.getIdentifier()).with(session).build();
Utils.createFolderInternally(inboxFolderParameters, null, true);
//creating outbox folder
FolderCreationParameters outboxFolderParameters = FolderCreationParameters.builder().name(Constants.OUTBOX_FOLDER_NAME).description("outbox of "+user).author(user).on(userHome.getIdentifier()).with(session).build();
Utils.createFolderInternally(outboxFolderParameters, null, true);
*/
userHome.setProperty(Constants.HOME_VERSION_PROP, 1l);
session.save();
}catch(StorageHubException she ){
log.error(she.getErrorMessage(), she);
@ -234,21 +358,21 @@ public class UserManager {
org.apache.jackrabbit.api.security.user.UserManager usrManager = session.getUserManager();
User authorizable = (User) usrManager.getAuthorizable(new PrincipalImpl(user));
if (authorizable!=null)
removeUserFromBelongingGroup(session, authorizable, usrManager);
else log.warn("user was already deleted from jackrabbit, trying to delete folders");
unshareUsersFolders(session, user);
removeUserHomeAndDeleteFiles(session, user);
//FINALIZE user removal
if (authorizable!=null && !authorizable.isGroup()) {
log.info("removing user {}", user);
authorizable.remove();
} else log.warn("the user {} was already deleted, it should never happen", user);
session.save();
}catch(StorageHubException she ){
log.error(she.getErrorMessage(), she);
@ -264,6 +388,37 @@ public class UserManager {
return user;
}
@GET
@Path("{user}/groups")
@Produces(MediaType.APPLICATION_JSON)
public List<String> getGroupsPerUser(@PathParam("user") final String user){
InnerMethodName.instance.set("getGroupsPerUser");
JackrabbitSession session = null;
List<String> groups= new ArrayList<>();
try {
session = (JackrabbitSession) repository.getRepository().login(CredentialHandler.getAdminCredentials(context));
org.apache.jackrabbit.api.security.user.UserManager usrManager = session.getUserManager();
User authUser = (User) usrManager.getAuthorizable(new PrincipalImpl(user));
Iterator<Group> groupsAuth =authUser.memberOf();
while (groupsAuth.hasNext()) {
Authorizable group = groupsAuth.next();
groups.add(group.getPrincipal().getName());
}
}catch(RepositoryException re ){
log.error("jcr error creating item", re);
GXOutboundErrorResponse.throwException(new BackendGenericError("jcr error creating item", re));
} finally {
if (session!=null)
session.logout();
}
return groups;
}
private void removeUserFromBelongingGroup(JackrabbitSession session, User authorizable, org.apache.jackrabbit.api.security.user.UserManager usrManager) throws RepositoryException, StorageHubException {
Iterator<Authorizable> groups = session.getUserManager().findAuthorizables(new Query() {
@ -272,26 +427,26 @@ public class UserManager {
builder.setSelector(Group.class);
}
});
String user = authorizable.getPrincipal().getName();
while(groups.hasNext()) {
Authorizable group = groups.next();
log.info("group found {}", group.getPrincipal().getName() );
if (group.isGroup() && ((Group)group).isMember(authorizable)) {
boolean success = groupHandler.removeUserFromGroup(group.getPrincipal().getName(), user, session);
log.warn("user {} {} removed from vre {}",user,success?"":"not" ,group.getPrincipal().getName());
}
}
}
private void unshareUsersFolders(JackrabbitSession session, String user){
try {
Node sharedFolderNode = session.getNode(Constants.SHARED_FOLDER_PATH);
Predicate<Node> sharedWithUserChecker = new Predicate<Node>() {
@Override
@ -304,20 +459,20 @@ public class UserManager {
}
}
};
List<SharedFolder> items = Utils.getItemList(sharedWithUserChecker, sharedFolderNode, Excludes.ALL, null, false, SharedFolder.class);
log.debug(" Shared folder to unshare found are {}", items.size());
for (SharedFolder item: items) {
String title = item.getTitle();
log.debug("in list folder name {} with title {} and path {} ",item.getName(), title, item.getPath());
if (item.isPublicItem() && !item.getUsers().getMap().containsKey(user)) continue;
if (item.isVreFolder()) continue;
log.info("removing sharing for folder name {} with title {} and path {} ",item.getName(), title, item.getPath());
String owner = item.getOwner();
Set<String> usersToUnshare= owner.equals(user)? Collections.emptySet():Collections.singleton(user);
try {
@ -334,8 +489,8 @@ public class UserManager {
private void removeUserHomeAndDeleteFiles(JackrabbitSession session, String user) throws RepositoryException, StorageHubException {
org.gcube.common.storagehub.model.Path homePath = pathUtil.getHome(user);
org.gcube.common.storagehub.model.Path workspacePath = pathUtil.getWorkspacePath(user);
org.gcube.common.storagehub.model.Path trashPath = pathUtil.getTrashPath(user, session);
try {
Node workspaceNode = session.getNode(workspacePath.toPath());
List<Item> workspaceItems = Utils.getItemList(workspaceNode, Excludes.GET_ONLY_CONTENT, null, true, null).stream().filter(i -> !i.isShared()).collect(Collectors.toList());
@ -343,15 +498,16 @@ public class UserManager {
} catch (PathNotFoundException e) {
log.warn("{} workspace dir {} was already deleted", user, homePath.toPath());
}
try {
org.gcube.common.storagehub.model.Path trashPath = pathUtil.getTrashPath(user, session);
Node trashNode = session.getNode(trashPath.toPath());
List<Item> trashItems = Utils.getItemList(trashNode, Excludes.ALL, null, true, null);
trashHandler.removeOnlyNodesContent(session, trashItems);
} catch (PathNotFoundException e) {
log.warn("{} trash dir {} was already deleted", user, homePath.toPath());
}
try {
Node homeNode = session.getNode(homePath.toPath());
homeNode.remove();
@ -359,5 +515,5 @@ public class UserManager {
log.warn("{} home dir {} was already deleted", user, homePath.toPath());
}
}
}

@ -1,5 +1,6 @@
package org.gcube.data.access.storagehub.services;
import java.io.InputStream;
import java.util.Collections;
import java.util.List;
@ -13,6 +14,7 @@ import javax.ws.rs.Consumes;
import javax.ws.rs.DELETE;
import javax.ws.rs.FormParam;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.PUT;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
@ -32,9 +34,14 @@ import org.gcube.common.storagehub.model.exceptions.StorageHubException;
import org.gcube.common.storagehub.model.exceptions.UserNotAuthorizedException;
import org.gcube.common.storagehub.model.items.FolderItem;
import org.gcube.common.storagehub.model.items.Item;
import org.gcube.common.storagehub.model.items.SharedFolder;
import org.gcube.common.storagehub.model.items.TrashItem;
import org.gcube.common.storagehub.model.items.nodes.PayloadBackend;
import org.gcube.common.storagehub.model.service.ItemList;
import org.gcube.common.storagehub.model.service.ItemWrapper;
import org.gcube.common.storagehub.model.storages.MetaInfo;
import org.gcube.common.storagehub.model.storages.StorageBackend;
import org.gcube.common.storagehub.model.storages.StorageBackendFactory;
import org.gcube.data.access.storagehub.AuthorizationChecker;
import org.gcube.data.access.storagehub.Constants;
import org.gcube.data.access.storagehub.PathUtil;
@ -42,28 +49,36 @@ import org.gcube.data.access.storagehub.Range;
import org.gcube.data.access.storagehub.StorageHubAppllicationManager;
import org.gcube.data.access.storagehub.Utils;
import org.gcube.data.access.storagehub.handlers.CredentialHandler;
import org.gcube.data.access.storagehub.handlers.PublicLinkHandler;
import org.gcube.data.access.storagehub.handlers.TrashHandler;
import org.gcube.data.access.storagehub.handlers.items.Item2NodeConverter;
import org.gcube.data.access.storagehub.handlers.items.Node2ItemConverter;
import org.gcube.data.access.storagehub.handlers.items.builders.FolderCreationParameters;
import org.gcube.data.access.storagehub.handlers.plugins.FolderPluginHandler;
import org.gcube.data.access.storagehub.handlers.plugins.StorageBackendHandler;
import org.gcube.data.access.storagehub.handlers.vres.VRE;
import org.gcube.data.access.storagehub.handlers.vres.VREManager;
import org.gcube.data.access.storagehub.query.sql2.evaluators.Evaluators;
import org.gcube.data.access.storagehub.storage.backend.impl.GCubeVolatileStorageBackendFactory;
import org.gcube.smartgears.annotations.ManagedBy;
import org.gcube.smartgears.utils.InnerMethodName;
import org.glassfish.jersey.media.multipart.FormDataContentDisposition;
import org.glassfish.jersey.media.multipart.FormDataParam;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.webcohesion.enunciate.metadata.rs.RequestHeader;
import com.webcohesion.enunciate.metadata.rs.RequestHeaders;
@Path("/")
@ManagedBy(StorageHubAppllicationManager.class)
@RequestHeaders({
@RequestHeader( name = "Authorization", description = "Bearer token, see https://dev.d4science.org/how-to-access-resources"),
})
public class WorkspaceManager extends Impersonable{
private static final Logger log = LoggerFactory.getLogger(WorkspaceManager.class);
RepositoryInitializer repository = StorageHubAppllicationManager.repository;
RepositoryInitializer repository = StorageHubAppllicationManager.getRepository();
@Inject
Evaluators evaluator;
@ -83,16 +98,19 @@ public class WorkspaceManager extends Impersonable{
@Inject
TrashHandler trashHandler;
@Inject
StorageBackendHandler storageBackendHandler;
@Inject
PublicLinkHandler publicLinkHandler;
@RequestScoped
@QueryParam("exclude")
private List<String> excludes = Collections.emptyList();
@Inject Node2ItemConverter node2Item;
@Inject Item2NodeConverter item2Node;
@Inject
FolderPluginHandler folderHandler;
@Path("/")
@GET
@ -115,7 +133,7 @@ public class WorkspaceManager extends Impersonable{
FolderCreationParameters trashFolderParameters = FolderCreationParameters.builder().name(Constants.TRASH_ROOT_FOLDER_NAME)
.description("trash of "+currentUser)
.author(currentUser).on(wsNode.getIdentifier()).with(ses).build();
Utils.createFolderInternally(trashFolderParameters, null);
Utils.createFolderInternally(trashFolderParameters, null, true);
ses.save();
}
Node node = ses.getNode(absolutePath.toPath());
@ -135,6 +153,54 @@ public class WorkspaceManager extends Impersonable{
return new ItemWrapper<Item>(toReturn);
}
/**
* Uploads a file in the volatile area returning a public link
*
* @param id
* @param name
* @param description
* @param stream
* @param fileDetail
* @return
*/
@POST
@Consumes(MediaType.MULTIPART_FORM_DATA)
@Path("volatile")
public String uploadVolatileFile(
@FormDataParam("file") InputStream stream,
@FormDataParam("file") FormDataContentDisposition fileDetail){
InnerMethodName.instance.set("uploadToVolatileArea");
log.info("uploading file {} of size {} to volatile area ({} - {})", fileDetail.getFileName(), fileDetail.getSize(), fileDetail.getName(), fileDetail.getParameters().toString() );
Session ses = null;
String toReturn = null;
try{
long size = fileDetail.getSize();
PayloadBackend payloadBackend = new PayloadBackend(GCubeVolatileStorageBackendFactory.NAME, null);
StorageBackendFactory sbf = storageBackendHandler.get(payloadBackend);
StorageBackend sb = sbf.create(payloadBackend);
log.info("UPLOAD: call started with file size {}",size);
MetaInfo info = sb.upload(stream, null, fileDetail.getFileName(), currentUser);
log.debug("UPLOAD: call finished");
toReturn = publicLinkHandler.getForVolatile(info.getStorageId(), GCubeVolatileStorageBackendFactory.NAME, context);
}catch(Throwable e ){
log.error("error uploading file to volatile area", e);
GXOutboundErrorResponse.throwException(new BackendGenericError(e));
}finally{
if (ses!=null && ses.isLive()) {
log.info("session closed");
ses.logout();
}
}
return toReturn;
}
@Path("vrefolder")
@GET
@ -324,6 +390,7 @@ public class WorkspaceManager extends Impersonable{
return new ItemList(toReturn);
}
@Path("vrefolders/paged")
@GET
@ -351,13 +418,69 @@ public class WorkspaceManager extends Impersonable{
return new ItemList(toReturn);
}
/*
@Path("shared-by-me")
@GET
@Produces(MediaType.APPLICATION_JSON)
public ItemList getMySharedFolders(){
InnerMethodName.instance.set("getMySharedFolders");
Session ses = null;
List<? extends Item> toReturn = null;
org.gcube.common.storagehub.model.Path sharedPath = null;
try{
ses = repository.getRepository().login(CredentialHandler.getAdminCredentials(context));
sharedPath = pathUtil.getMySharedPath(currentUser);
log.info("my shared folder path is folder path is {}",sharedPath.toPath());
toReturn = Utils.getItemList(ses.getNode(sharedPath.toPath()) , excludes, null, false, SharedFolder.class);
}catch(RepositoryException re ){
log.error("error reading my shared folder ({})",sharedPath, re);
GXOutboundErrorResponse.throwException(new BackendGenericError(re));
}catch(StorageHubException she ){
log.error(she.getErrorMessage(), she);
GXOutboundErrorResponse.throwException(she, Response.Status.fromStatusCode(she.getStatus()));
}finally{
if (ses!=null)
ses.logout();
}
return new ItemList(toReturn);
}*/
@Path("shared-with-me")
@GET
@Produces(MediaType.APPLICATION_JSON)
public ItemList getSharedWithMeFolders(){
InnerMethodName.instance.set("getSharedWithMeFolders");
Session ses = null;
List<? extends Item> toReturn = null;
org.gcube.common.storagehub.model.Path sharedPath = null;
try{
ses = repository.getRepository().login(CredentialHandler.getAdminCredentials(context));
sharedPath = pathUtil.getSharedWithMePath(currentUser);
log.info("vres folder path is {}",sharedPath.toPath());
toReturn = Utils.getItemList(ses.getNode(sharedPath.toPath()) , excludes, null, false, SharedFolder.class);
}catch(RepositoryException re ){
log.error("error reading shared with me folder ({})",sharedPath, re);
GXOutboundErrorResponse.throwException(new BackendGenericError(re));
}catch(StorageHubException she ){
log.error(she.getErrorMessage(), she);
GXOutboundErrorResponse.throwException(she, Response.Status.fromStatusCode(she.getStatus()));
}finally{
if (ses!=null)
ses.logout();
}
return new ItemList(toReturn);
}
@Path("count")
@GET
public String getTotalItemsCount(){
InnerMethodName.instance.set("getTotalItemsCount");
return folderHandler.getDefault().getStorageBackend().getTotalItemsCount();
return "1203";
}
@ -365,7 +488,7 @@ public class WorkspaceManager extends Impersonable{
@GET
public String getTotalVolume(){
InnerMethodName.instance.set("getTotalSize");
return folderHandler.getDefault().getStorageBackend().getTotalSizeStored();
return "120300000";
}

@ -0,0 +1,60 @@
package org.gcube.data.access.storagehub.services.admin;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.nio.charset.Charset;
import javax.jcr.Session;
import javax.jcr.nodetype.NodeType;
import org.apache.jackrabbit.api.JackrabbitSession;
import org.apache.jackrabbit.api.JackrabbitWorkspace;
import org.apache.jackrabbit.api.security.authorization.PrivilegeManager;
import org.apache.jackrabbit.commons.cnd.CndImporter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class InitScript {
private static Logger log = LoggerFactory.getLogger(InitScript.class);
public void init(JackrabbitSession ses) throws Exception{
log.info("init started");
try {
initNodeTypes(ses);
ses.getRootNode().addNode("Home");
ses.getRootNode().addNode("Share");
PrivilegeManager pm = ((JackrabbitWorkspace) ses.getWorkspace()).getPrivilegeManager();
pm.registerPrivilege("hl:writeAll", false, new String[0]);
ses.save();
}catch (Exception e) {
log.error("init error", e);
throw e;
}
log.info("init finished");
}
void initNodeTypes(Session ses) throws Exception{
InputStream stream = InitScript.class.getResourceAsStream("/init/NodeType.cnd");
if (stream == null)
throw new Exception("NodeType.cnd inputStream is null");
InputStreamReader inputstream = new InputStreamReader(stream, Charset.forName("UTF-8"));
// Register the custom node types defined in the CND file, using JCR Commons CndImporter
log.info("start to register the custom node types defined in the CND file...");
NodeType[] nodeTypes = CndImporter.registerNodeTypes(inputstream, ses, true);
for (NodeType nt : nodeTypes)
log.info("Registered: {} ", nt.getName());
log.info("custom node types registered");
}
}

@ -2,27 +2,33 @@ package org.gcube.data.access.storagehub.services.admin;
import static org.gcube.data.access.storagehub.Roles.INFRASTRUCTURE_MANAGER_ROLE;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.InputStream;
import java.io.PrintWriter;
import java.io.StringReader;
import java.io.StringWriter;
import java.util.HashMap;
import java.util.UUID;
import javax.inject.Inject;
import javax.jcr.Node;
import javax.jcr.Session;
import javax.servlet.ServletContext;
import javax.ws.rs.Consumes;
import javax.ws.rs.DefaultValue;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType;
import org.apache.cxf.io.ReaderInputStream;
import org.apache.jackrabbit.api.JackrabbitSession;
import org.gcube.common.authorization.control.annotations.AuthorizationControl;
import org.gcube.common.authorization.library.AuthorizedTasks;
import org.gcube.common.authorization.library.provider.AuthorizationProvider;
import org.gcube.common.security.AuthorizedTasks;
import org.gcube.common.security.providers.SecretManagerProvider;
import org.gcube.common.storagehub.model.Paths;
import org.gcube.data.access.storagehub.PathUtil;
import org.gcube.data.access.storagehub.StorageHubAppllicationManager;
@ -35,53 +41,84 @@ import org.gcube.data.access.storagehub.handlers.items.builders.ItemsParameterBu
import org.gcube.data.access.storagehub.scripting.AbstractScript;
import org.gcube.data.access.storagehub.scripting.ScriptUtil;
import org.gcube.data.access.storagehub.services.RepositoryInitializer;
import org.gcube.data.access.storagehub.services.admin.ScriptStatus.Status;
import org.gcube.smartgears.ContextProvider;
import org.gcube.smartgears.context.application.ApplicationContext;
import org.gcube.smartgears.utils.InnerMethodName;
import org.glassfish.jersey.media.multipart.FormDataContentDisposition;
import org.glassfish.jersey.media.multipart.FormDataParam;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.webcohesion.enunciate.metadata.rs.RequestHeader;
import com.webcohesion.enunciate.metadata.rs.RequestHeaders;
@Path("admin/script")
@RequestHeaders({
@RequestHeader( name = "Authorization", description = "Bearer token, see https://dev.d4science.org/how-to-access-resources"),
})
public class ScriptManager {
private static Logger log = LoggerFactory.getLogger(ScriptManager.class);
private RepositoryInitializer repository = StorageHubAppllicationManager.repository;
private RepositoryInitializer repository = StorageHubAppllicationManager.getRepository();
@Inject
AccountingHandler accountingHandler;
@Context
ServletContext context;
@Inject
ScriptUtil scriptUtil;
@Inject
ItemHandler itemHandler;
@Inject
PathUtil pathUtil;
private static HashMap<String, ScriptStatus> scriptStatusMap = new HashMap<String, ScriptStatus>();
@POST
@Path("execute")
@AuthorizationControl(allowedRoles = {INFRASTRUCTURE_MANAGER_ROLE},exception=MyAuthException.class)
@Consumes(MediaType.MULTIPART_FORM_DATA)
public String run( @FormDataParam("name") String name,
@FormDataParam("asynch") Boolean asynch,
@Produces(MediaType.APPLICATION_JSON)
public ScriptStatus run( @FormDataParam("name") String name,
@FormDataParam("asynch") @DefaultValue("false") Boolean asynch,
@FormDataParam("writeResult") @DefaultValue("false") Boolean writeResult ,
@FormDataParam("destinationFolderId") String destinationFolderId,
@FormDataParam("file") InputStream stream,
@FormDataParam("file") FormDataContentDisposition fileDetail) {
try {
InnerMethodName.instance.set("executeScript");
ScriptClassLoader scriptClassLoader = new ScriptClassLoader(Thread.currentThread().getContextClassLoader());
Class<?> scriptClass = uploadClass(stream, scriptClassLoader, fileDetail.getFileName().replace(".class", ""));
return run(scriptClass, name, destinationFolderId, asynch!=null? asynch : false);
return internalRun(scriptClass, name, destinationFolderId, asynch, writeResult);
}catch(Throwable e) {
log.error("error executing script {}", name,e);
throw new WebApplicationException("error loading class",e);
}
}
@GET
@Path("{id}/status")
@AuthorizationControl(allowedRoles = {INFRASTRUCTURE_MANAGER_ROLE},exception=MyAuthException.class)
@Produces(MediaType.APPLICATION_JSON)
public ScriptStatus getStatus(@PathParam("id") String runningId) {
InnerMethodName.instance.set("getScriptStatus");
if (!scriptStatusMap.containsKey(runningId)) {
log.error("script with id {} not found",runningId);
throw new WebApplicationException("id "+runningId+" not found", 404);
}
ScriptStatus status = scriptStatusMap.get(runningId);
if (status.getStatus()!= Status.Running)
scriptStatusMap.remove(runningId);
return status;
}
private Class<?> uploadClass(InputStream stream, ScriptClassLoader classLoader, String name) throws Throwable {
try(ByteArrayOutputStream buffer = new ByteArrayOutputStream()){
@ -97,33 +134,53 @@ public class ScriptManager {
}
private String run(Class<?> clazz, String name, String destinationFolderId, boolean asynch) throws Throwable {
String login = AuthorizationProvider.instance.get().getClient().getId();
private ScriptStatus internalRun(Class<?> clazz, String name, String destinationFolderId, boolean asynch, boolean writeResult) throws Throwable {
String login = SecretManagerProvider.instance.get().getOwner().getId();
log.info("script {} called by {}", clazz.getSimpleName(), login);
JackrabbitSession ses = null;
try {
ses = (JackrabbitSession) repository.getRepository().login(CredentialHandler.getAdminCredentials(context));
String parentId = destinationFolderId!=null ? destinationFolderId : ses.getNode(pathUtil.getWorkspacePath(login).toPath()).getIdentifier();
Node parentNode = ses.getNodeByIdentifier(parentId);
String parentPath = parentNode.getPath();
String resultPath= null;
Node parentNode = null;
if (writeResult) {
JackrabbitSession ses = null;
try {
ses = (JackrabbitSession) repository.getRepository().login(CredentialHandler.getAdminCredentials(context));
String parentId = destinationFolderId!=null ? destinationFolderId : ses.getNode(pathUtil.getWorkspacePath(login).toPath()).getIdentifier();
parentNode = ses.getNodeByIdentifier(parentId);
resultPath = Paths.append(Paths.getPath(parentNode.getPath()), name).toPath();
}finally {
if (ses!=null)
ses.logout();
}
}
if (AbstractScript.class.isAssignableFrom(clazz)) {
AbstractScript scriptInstance = (AbstractScript) clazz.newInstance();
AbstractScript scriptInstance = (AbstractScript) clazz.getDeclaredConstructor().newInstance();
ApplicationContext appContext = ContextProvider.get();
String serverHost = appContext.container().configuration().hostname();
RealRun realRun = new RealRun(ses, scriptInstance, login, parentId, name);
if (asynch) {
String runningId = UUID.randomUUID().toString();
ScriptStatus status = new ScriptStatus(runningId, resultPath, serverHost);
RealRun realRun = new RealRun(scriptInstance, login, parentNode, name, writeResult, status);
if (asynch) {
scriptStatusMap.put(runningId, status);
new Thread(AuthorizedTasks.bind(realRun)).start();
}else realRun.run();
return status;
}else {
realRun.run();
return status;
}
} else throw new Exception("class "+clazz.getSimpleName()+" not implements AbstractScript");
return Paths.append(Paths.getPath(parentPath), name).toPath();
}catch (Throwable e) {
if (ses !=null && ses.isLive())
ses.logout();
throw e;
}
@ -133,51 +190,64 @@ public class ScriptManager {
class RealRun implements Runnable{
private JackrabbitSession ses;
AbstractScript instance;
String login;
String parentId;
Node parentNode;
String name;
boolean writeResult = true;
ScriptStatus status;
public RealRun(JackrabbitSession ses, AbstractScript instance, String login, String parentId, String name) {
public RealRun(AbstractScript instance, String login, Node parentNode, String name, boolean writeResult, ScriptStatus status) {
super();
this.ses = ses;
this.instance = instance;
this.login = login;
this.parentId = parentId;
this.parentNode = parentNode;
this.name = name;
this.writeResult = writeResult;
this.status = status;
}
@Override
public void run() {
try{
String result ="";
String result ="";
try {
JackrabbitSession executeSession = null;
try {
result = instance.run(ses, null, scriptUtil);
log.info("result is {}",result);
executeSession = (JackrabbitSession) repository.getRepository().login(CredentialHandler.getAdminCredentials(context));
result = instance.run(executeSession, null, scriptUtil);
status.setSuccess();
}catch(Throwable t) {
StringWriter sw = new StringWriter();
PrintWriter pw = new PrintWriter(sw, true);
t.printStackTrace(pw);
status.setFailed(sw.toString());
result+= "\n"+sw.toString();
log.warn("error executing script {}",instance.getClass().getSimpleName(), t);
}finally {
if (executeSession !=null && executeSession.isLive())
executeSession.logout();
}
try( InputStream stream = new ReaderInputStream(new StringReader(result))){
ItemsParameterBuilder<FileCreationParameters> builder = FileCreationParameters.builder().name(name).description("result of script execution "+name)
.stream(stream).on(parentId).with(ses).author(login);
itemHandler.create(builder.build());
} catch (Throwable e) {
log.error("error saving script result {} in the Workspace",name, e);
if (this.writeResult) {
Session writeSession = null;
try( InputStream stream = new ByteArrayInputStream(result.getBytes())){
writeSession = repository.getRepository().login(CredentialHandler.getAdminCredentials(context));
ItemsParameterBuilder<FileCreationParameters> builder = FileCreationParameters.builder().name(name).description("result of script execution "+name)
.stream(stream).on(parentNode.getIdentifier()).with(writeSession).author(login);
itemHandler.create(builder.build());
} catch (Throwable e) {
log.error("error saving script result {} in the Workspace",name, e);
} finally {
if (writeSession!=null)
writeSession.logout();
}
}
} finally {
if (ses!=null)
ses.logout();
}catch (Exception e) {
log.error("unexpected error executing script {}",instance.getClass().getSimpleName(),e);
}
}
}
class ScriptClassLoader extends ClassLoader{
@ -192,3 +262,6 @@ public class ScriptManager {
}
}

@ -0,0 +1,94 @@
package org.gcube.data.access.storagehub.services.admin;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.Date;
public class ScriptStatus {
enum Status {
Running, Success, Failed
}
private static final DateFormat dateFormat = new SimpleDateFormat("dd MMM yyyy HH:mm:ss:SSS Z") ;
private Status status;
private String errorMessage;
private String resultPath;
private long start;
private long finished = -1;
private String runningId;
private String executionServer;
public ScriptStatus(String runningId, String resultPath, String executionServer) {
super();
this.status = Status.Running;
this.start = System.currentTimeMillis();
this.runningId = runningId;
this.resultPath = resultPath;
this.executionServer = executionServer;
}
public void setFailed(String error) {
this.status = Status.Failed;
this.errorMessage = error;
this.finished = System.currentTimeMillis();
}
public void setSuccess() {
this.status = Status.Success;
this.finished = System.currentTimeMillis();
}
public Status getStatus() {
return status;
}
public String getErrorMessage() {
return errorMessage;
}
public String getStartDate() {
Date date = new Date(this.start);
return dateFormat.format(date);
}
public long getDurationInMillis() {
long toUse = finished;
if (finished < 0)
toUse = System.currentTimeMillis();
return toUse-start;
}
public String getHumanReadableDuration() {
long toUse = finished;
if (finished < 0)
toUse = System.currentTimeMillis();
long duration = toUse - this.start;
long minutes = (duration/1000)/60;
long seconds = (duration/1000)%60;
return String.format("%d minutes %d seconds", minutes, seconds);
}
public String getResultPath() {
return resultPath;
}
public String getRunningId() {
return runningId;
}
public String getExecutionServer() {
return executionServer;
}
}

@ -1,5 +1,6 @@
package org.gcube.data.access.storagehub.services.admin;
import java.util.Collection;
import java.util.List;
import java.util.function.Predicate;
@ -7,11 +8,19 @@ import javax.inject.Inject;
import javax.inject.Singleton;
import javax.jcr.Node;
import javax.jcr.RepositoryException;
import javax.jcr.Session;
import org.gcube.common.storagehub.model.exceptions.BackendGenericError;
import org.gcube.common.storagehub.model.exceptions.StorageHubException;
import org.gcube.common.storagehub.model.items.Item;
import org.gcube.common.storagehub.model.items.nodes.Content;
import org.gcube.common.storagehub.model.storages.StorageBackendFactory;
import org.gcube.data.access.storagehub.Utils;
import org.gcube.data.access.storagehub.handlers.TrashHandler;
import org.gcube.data.access.storagehub.handlers.items.Item2NodeConverter;
import org.gcube.data.access.storagehub.handlers.items.Node2ItemConverter;
import org.gcube.data.access.storagehub.handlers.items.builders.FolderCreationParameters;
import org.gcube.data.access.storagehub.handlers.plugins.StorageBackendHandler;
import org.gcube.data.access.storagehub.scripting.ScriptUtil;
@Singleton
@ -19,6 +28,12 @@ public class ScriptUtilImpl implements ScriptUtil {
@Inject Node2ItemConverter node2Item;
@Inject Item2NodeConverter item2Node;
@Inject TrashHandler trashHandler;
@Inject StorageBackendHandler backendHandler;
@Override
public List<Item> getChildren(Predicate<Node> checker, Node parent, List<String> excludes, boolean showHidden, Class<? extends Item> nodeTypeToInclude) throws RepositoryException, BackendGenericError {
return Utils.getItemList(checker, parent, excludes, null, showHidden, nodeTypeToInclude);
@ -28,5 +43,26 @@ public class ScriptUtilImpl implements ScriptUtil {
public Item getItem(Node node, List<String> excludes) throws RepositoryException, BackendGenericError {
return node2Item.getItem(node, excludes);
}
@Override
public void removeNodes(Session ses, List<Item> itemsToDelete) throws RepositoryException, StorageHubException{
trashHandler.removeNodes(ses, itemsToDelete);
}
@Override
public Collection<StorageBackendFactory> getStorageBackendHandler() {
return backendHandler.getAllImplementations();
}
@Override
public void updateContentNode(Content content, Node node) throws Exception {
item2Node.replaceContentNodeInternal(node, content.getClass(), content);
}
@Override
public Node createInternalFolder(Session ses, String name, String description, String userOwner, String parentNodeIdentifier) throws StorageHubException {
FolderCreationParameters parameters = FolderCreationParameters.builder().name(name).description(description).author(userOwner).on(parentNodeIdentifier).with(ses).build();
return Utils.createFolderInternally(parameters, null, true);
}
}

@ -0,0 +1,2 @@
/MockStorage.java
/MockStorageFactory.java

@ -1,22 +0,0 @@
package org.gcube.data.access.storagehub.storage.backend.impl;
import java.util.Map;
import javax.inject.Singleton;
import org.gcube.common.storagehub.model.exceptions.PluginInitializationException;
import org.gcube.common.storagehub.model.items.FolderItem;
import org.gcube.common.storagehub.model.plugins.FolderManager;
import org.gcube.common.storagehub.model.plugins.FolderManagerConnector;
@Singleton
public class GCubeFolderManagerConnector implements FolderManagerConnector {
@Override
public FolderManager connect(FolderItem item, Map<String, Object> parameters) throws PluginInitializationException {
return new GcubeFolderManager();
}
}

@ -0,0 +1,116 @@
package org.gcube.data.access.storagehub.storage.backend.impl;
import java.io.InputStream;
import java.nio.file.Paths;
import java.util.Collections;
import java.util.Map;
import java.util.UUID;
import org.gcube.common.security.providers.SecretManagerProvider;
import org.gcube.common.storagehub.model.exceptions.StorageIdNotFoundException;
import org.gcube.common.storagehub.model.items.nodes.Content;
import org.gcube.common.storagehub.model.items.nodes.PayloadBackend;
import org.gcube.common.storagehub.model.storages.MetaInfo;
import org.gcube.common.storagehub.model.storages.StorageBackend;
import org.gcube.contentmanagement.blobstorage.service.IClient;
import org.gcube.contentmanager.storageclient.wrapper.AccessType;
import org.gcube.contentmanager.storageclient.wrapper.MemoryType;
import org.gcube.contentmanager.storageclient.wrapper.StorageClient;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class GCubeMongoStorageBackend extends StorageBackend {
private static final Logger log = LoggerFactory.getLogger(GCubeMongoStorageBackend.class);
private final static String SERVICE_NAME = "home-library";
private final static String SERVICE_CLASS = "org.gcube.portlets.user";
public GCubeMongoStorageBackend(PayloadBackend payloadConf) {
super(payloadConf);
}
@Override
public InputStream download(Content content) throws StorageIdNotFoundException {
return download(content.getStorageId());
}
@Override
public InputStream download(String id) throws StorageIdNotFoundException{
IClient storageClient = getStorageClient(SecretManagerProvider.instance.get().getOwner().getId()).getClient();
if (!storageClient.exist().RFile(id))
throw new StorageIdNotFoundException(id, this.getPayloadConfiguration().getStorageName());
return storageClient.get().RFileAsInputStream(id);
}
protected StorageClient getStorageClient(String login){
return new StorageClient(SERVICE_CLASS, SERVICE_NAME, login, AccessType.SHARED, MemoryType.PERSISTENT);
}
@Override
public MetaInfo onCopy(Content content, String newParentPath, String newName) {
log.info("copying storage Id {} to newPath {}", content.getStorageId(), newParentPath);
String newRemotePath = Paths.get(newParentPath, newName).toString();
String newStorageID = getStorageClient(SecretManagerProvider.instance.get().getOwner().getId()).getClient().copyFile(true).from(content.getStorageId()).to(newRemotePath);
log.info("The id returned by storage is {}", newStorageID);
return new MetaInfo(content.getSize(),newStorageID, newRemotePath, getPayloadConfiguration());
}
@Override
public MetaInfo onMove(Content content, String newParentPath) {
//new contentPath can be set as remotePath to the storage backend ?
return new MetaInfo(content.getSize(),content.getStorageId(), content.getRemotePath(), getPayloadConfiguration());
}
@Override
public MetaInfo upload(InputStream stream, String relPath, String name, String user) {
log.debug("uploading file");
IClient storageClient = getStorageClient(user).getClient();
String uid = UUID.randomUUID().toString();
String remotePath= String.format("%s/%s-%s",relPath,uid,name);
String storageId =storageClient.put(true).LFile(stream).RFile(remotePath);
long size = storageClient.getSize().RFileById(storageId);
MetaInfo info = new MetaInfo(size, storageId, remotePath, getPayloadConfiguration());
return info;
}
@Override
public MetaInfo upload(InputStream stream, String relPath, String name, Long size, String user) {
return this.upload(stream, relPath, name, user);
}
@Override
public MetaInfo upload(InputStream stream, String relativePath, String name, String storageId, Long size, String user) {
throw new UnsupportedOperationException();
}
@Override
public void delete(String storageId) {
log.debug("deleting object {} ",storageId);
IClient storageClient = getStorageClient(SecretManagerProvider.instance.get().getOwner().getId()).getClient();
storageClient.remove().RFileById(storageId);
}
@Override
public String getTotalSizeStored() {
IClient storageClient = getStorageClient(SecretManagerProvider.instance.get().getOwner().getId()).getClient();
return storageClient.getTotalUserVolume();
}
@Override
public String getTotalItemsCount() {
IClient storageClient = getStorageClient(SecretManagerProvider.instance.get().getOwner().getId()).getClient();
return storageClient.getUserTotalItems();
}
@Override
public Map<String, String> getFileMetadata(String id) {
return Collections.emptyMap();
}
}

@ -0,0 +1,29 @@
package org.gcube.data.access.storagehub.storage.backend.impl;
import javax.inject.Singleton;
import org.gcube.common.storagehub.model.Constants;
import org.gcube.common.storagehub.model.exceptions.InvalidCallParameters;
import org.gcube.common.storagehub.model.items.nodes.PayloadBackend;
import org.gcube.common.storagehub.model.storages.StorageBackend;
import org.gcube.common.storagehub.model.storages.StorageBackendFactory;
@Singleton
public class GCubeMongoStorageBackendFactory implements StorageBackendFactory {
@Override
public String getName() {
return Constants.MONGO_STORAGE;
}
@Override
public boolean isSystemStorage() {
return true;
}
@Override
public StorageBackend create(PayloadBackend payloadConfiguration) throws InvalidCallParameters {
return new GCubeMongoStorageBackend(payloadConfiguration);
}
}

@ -1,92 +0,0 @@
package org.gcube.data.access.storagehub.storage.backend.impl;
import java.io.InputStream;
import java.util.UUID;
import org.gcube.common.authorization.library.provider.AuthorizationProvider;
import org.gcube.common.storagehub.model.items.AbstractFileItem;
import org.gcube.common.storagehub.model.items.nodes.Content;
import org.gcube.common.storagehub.model.storages.MetaInfo;
import org.gcube.common.storagehub.model.storages.StorageBackend;
import org.gcube.contentmanagement.blobstorage.service.IClient;
import org.gcube.contentmanager.storageclient.wrapper.AccessType;
import org.gcube.contentmanager.storageclient.wrapper.MemoryType;
import org.gcube.contentmanager.storageclient.wrapper.StorageClient;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class GCubeStorageBackend implements StorageBackend {
private static final Logger log = LoggerFactory.getLogger(GCubeStorageBackend.class);
private final static String SERVICE_NAME = "home-library";
private final static String SERVICE_CLASS = "org.gcube.portlets.user";
protected GCubeStorageBackend() {}
@Override
public InputStream download(Content content) {
return getStorageClient(AuthorizationProvider.instance.get().getClient().getId()).getClient().get().RFileAsInputStream(content.getStorageId());
}
@Override
public String getName() {
return GCubeStorageBackend.class.getName();
}
@Override
public String onCopy(AbstractFileItem item) {
log.info("copying storage Id {} to newPath {}", item.getContent().getStorageId(), item.getPath());
String newStorageID = getStorageClient(AuthorizationProvider.instance.get().getClient().getId()).getClient().copyFile(true).from(item.getContent().getStorageId()).to(item.getPath());
log.info("The id returned by storage is {}", newStorageID);
return newStorageID;
}
@Override
public String onMove(AbstractFileItem item) {
return item.getContent().getStorageId();
}
@Override
public MetaInfo upload(InputStream stream, String relPath, String name) {
log.debug("uploading file");
IClient storageClient = getStorageClient(AuthorizationProvider.instance.get().getClient().getId()).getClient();
String uid = UUID.randomUUID().toString();
String remotePath= String.format("%s/%s-%s",relPath,uid,name);
String storageId =storageClient.put(true).LFile(stream).RFile(remotePath);
long size = storageClient.getSize().RFileById(storageId);
MetaInfo info = new MetaInfo();
info.setSize(size);
info.setStorageId(storageId);
info.setRemotePath(remotePath);
return info;
}
@Override
public void onDelete(Content content) {
log.debug("deleting");
IClient storageClient = getStorageClient(AuthorizationProvider.instance.get().getClient().getId()).getClient();
storageClient.remove().RFileById(content.getStorageId());
}
private static StorageClient getStorageClient(String login){
return new StorageClient(SERVICE_CLASS, SERVICE_NAME, login, AccessType.SHARED, MemoryType.PERSISTENT);
}
@Override
public String getTotalSizeStored() {
IClient storageClient = getStorageClient(AuthorizationProvider.instance.get().getClient().getId()).getClient();
return storageClient.getTotalUserVolume();
}
@Override
public String getTotalItemsCount() {
IClient storageClient = getStorageClient(AuthorizationProvider.instance.get().getClient().getId()).getClient();
return storageClient.getUserTotalItems();
}
}

@ -0,0 +1,75 @@
package org.gcube.data.access.storagehub.storage.backend.impl;
import java.io.IOException;
import java.io.InputStream;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
import java.util.UUID;
import javax.annotation.PostConstruct;
import javax.inject.Singleton;
import org.gcube.common.storagehub.model.Metadata;
import org.gcube.common.storagehub.model.exceptions.InvalidCallParameters;
import org.gcube.common.storagehub.model.items.nodes.PayloadBackend;
import org.gcube.common.storagehub.model.storages.StorageBackend;
import org.gcube.common.storagehub.model.storages.StorageBackendFactory;
@Singleton
public class GCubeVolatileStorageBackendFactory implements StorageBackendFactory {
private StorageBackend singleton;
private static final String PROP_PREFIX = "volatile.";
public static final String NAME = "volatile-minio";
@PostConstruct
public void init(){
S3Backend s3Backend = new S3Backend(new PayloadBackend(getName(), getParameters()), (String) -> UUID.randomUUID().toString());
s3Backend.setPayloadConfiguration(new PayloadBackend(getName(),null));
this.singleton = s3Backend;
}
@Override
public String getName() {
return NAME;
}
@Override
public boolean isSystemStorage() {
return true;
}
@Override
public StorageBackend create(PayloadBackend payloadConfiguration) throws InvalidCallParameters {
if (payloadConfiguration.getParameters().isEmpty())
return singleton;
throw new InvalidCallParameters("gcubeMinIo created with not empty parameters");
}
private Metadata getParameters(){
try (InputStream input = GCubeVolatileStorageBackendFactory.class.getClassLoader().getResourceAsStream("storage-settings.properties")) {
Properties prop = new Properties();
prop.load(input);
Map<String, Object> params = new HashMap<String, Object>();
prop.forEach((k,v) -> { if (k.toString().startsWith(PROP_PREFIX)) params.put(k.toString().replace(PROP_PREFIX, ""), v.toString());});
return new Metadata(params);
} catch (IOException ex) {
throw new RuntimeException("error initializing MinIO", ex);
}
}
}

@ -0,0 +1,73 @@
package org.gcube.data.access.storagehub.storage.backend.impl;
import java.io.IOException;
import java.io.InputStream;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
import java.util.UUID;
import javax.annotation.PostConstruct;
import javax.inject.Singleton;
import org.gcube.common.storagehub.model.Constants;
import org.gcube.common.storagehub.model.Metadata;
import org.gcube.common.storagehub.model.exceptions.InvalidCallParameters;
import org.gcube.common.storagehub.model.items.nodes.PayloadBackend;
import org.gcube.common.storagehub.model.storages.StorageBackend;
import org.gcube.common.storagehub.model.storages.StorageBackendFactory;
@Singleton
public class GcubeDefaultS3StorageBackendFactory implements StorageBackendFactory {
private StorageBackend singleton;
private static final String PROP_PREFIX = "default.";
@PostConstruct
public void init(){
S3Backend s3Backend = new S3Backend(new PayloadBackend(getName(), getParameters()), (String) -> UUID.randomUUID().toString());
s3Backend.setPayloadConfiguration(new PayloadBackend(getName(),null));
this.singleton = s3Backend;
}
@Override
public String getName() {
return Constants.DEFAULT_S3_STORAGE;
}
@Override
public boolean isSystemStorage() {
return true;
}
@Override
public StorageBackend create(PayloadBackend payloadConfiguration) throws InvalidCallParameters {
if (payloadConfiguration.getParameters().isEmpty())
return singleton;
throw new InvalidCallParameters("S3 created with not empty parameters");
}
private Metadata getParameters(){
try (InputStream input = GcubeDefaultS3StorageBackendFactory.class.getClassLoader().getResourceAsStream("storage-settings.properties")) {
Properties prop = new Properties();
prop.load(input);
Map<String, Object> params = new HashMap<String, Object>();
prop.forEach((k,v) -> { if (k.toString().startsWith(PROP_PREFIX)) params.put(k.toString().replace(PROP_PREFIX, ""), v);});
return new Metadata(params);
} catch (IOException ex) {
throw new RuntimeException("error initializing S3", ex);
}
}
}

@ -1,48 +0,0 @@
package org.gcube.data.access.storagehub.storage.backend.impl;
import javax.inject.Singleton;
import org.gcube.common.storagehub.model.items.FolderItem;
import org.gcube.common.storagehub.model.plugins.FolderManager;
import org.gcube.common.storagehub.model.storages.StorageBackend;
@Singleton
public class GcubeFolderManager implements FolderManager {
@Override
public StorageBackend getStorageBackend() {
return new GCubeStorageBackend();
}
@Override
public boolean manageVersion() {
return true;
}
@Override
public void onCreatedFolder(FolderItem folder) {
}
@Override
public void onDeletingFolder(FolderItem folder) {
}
@Override
public void onMovedFolder(FolderItem movedFolder) {
// TODO Auto-generated method stub
}
@Override
public void onCopiedFolder(FolderItem copiedFolder) {
// TODO Auto-generated method stub
}
@Override
public FolderItem getRootFolder() {
return null;
}
}

@ -0,0 +1,74 @@
package org.gcube.data.access.storagehub.storage.backend.impl;
import java.io.IOException;
import java.io.InputStream;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
import java.util.UUID;
import javax.annotation.PostConstruct;
import javax.inject.Singleton;
import org.gcube.common.storagehub.model.Metadata;
import org.gcube.common.storagehub.model.exceptions.InvalidCallParameters;
import org.gcube.common.storagehub.model.items.nodes.PayloadBackend;
import org.gcube.common.storagehub.model.storages.StorageBackend;
import org.gcube.common.storagehub.model.storages.StorageBackendFactory;
@Singleton
public class GcubeMinIOStorageBackendFactory implements StorageBackendFactory {
private static final String PROP_PREFIX = "default.";
private Metadata baseParameters;
@PostConstruct
public void init(){
baseParameters = getParameters();
}
@Override
public String getName() {
return "gcube-default-minio";
}
@Override
public boolean isSystemStorage() {
return true;
}
@Override
public StorageBackend create(PayloadBackend payloadConfiguration) throws InvalidCallParameters {
if (payloadConfiguration.getParameters().isEmpty())
throw new InvalidCallParameters(getName()+": null or empty parameter not allowed");
String bucketName = (String)payloadConfiguration.getParameters().get("bucketName");
if (bucketName == null || bucketName.isBlank())
throw new InvalidCallParameters(getName()+": 'bucketName' cannot be blank or empty");
Metadata metadata = new Metadata(new HashMap<>(baseParameters.getMap()));
metadata.getMap().putAll(payloadConfiguration.getParameters());
return new S3Backend(new PayloadBackend(getName(), metadata), (String) -> UUID.randomUUID().toString());
}
private Metadata getParameters(){
try (InputStream input = GcubeMinIOStorageBackendFactory.class.getClassLoader().getResourceAsStream("storage-settings.properties")) {
Properties prop = new Properties();
prop.load(input);
Map<String, Object> params = new HashMap<String, Object>();
prop.forEach((k,v) -> { if (k.toString().startsWith(PROP_PREFIX)) params.put(k.toString().replace(PROP_PREFIX, ""), v);});
return new Metadata(params);
} catch (IOException ex) {
throw new RuntimeException("error initializing MinIO", ex);
}
}
}

@ -0,0 +1,206 @@
package org.gcube.data.access.storagehub.storage.backend.impl;
import java.io.InputStream;
import java.util.HashMap;
import java.util.Map;
import java.util.function.Function;
import org.gcube.common.storagehub.model.exceptions.StorageIdNotFoundException;
import org.gcube.common.storagehub.model.items.nodes.Content;
import org.gcube.common.storagehub.model.items.nodes.PayloadBackend;
import org.gcube.common.storagehub.model.storages.MetaInfo;
import org.gcube.common.storagehub.model.storages.StorageBackend;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.amazonaws.ClientConfiguration;
import com.amazonaws.Protocol;
import com.amazonaws.auth.AWSCredentials;
import com.amazonaws.auth.BasicAWSCredentials;
import com.amazonaws.services.s3.AmazonS3;
import com.amazonaws.services.s3.AmazonS3Client;
import com.amazonaws.services.s3.S3ClientOptions;
import com.amazonaws.services.s3.model.ObjectMetadata;
public class S3Backend extends StorageBackend{
private static Logger log = LoggerFactory.getLogger(S3Backend.class);
Function<Void, String> keyGenerator;
String bucketName;
AmazonS3 client;
//private static final long PART_SIZE = 100000000;
@Override
protected void setPayloadConfiguration(PayloadBackend payloadConfiguration) {
super.setPayloadConfiguration(payloadConfiguration);
}
public S3Backend(PayloadBackend payloadConfiguration, Function<Void, String> keyGenerator) {
super(payloadConfiguration);
this.keyGenerator = keyGenerator;
Map<String, Object> parameters = payloadConfiguration.getParameters();
this.bucketName = (String)parameters.get("bucketName");
String accessKey = (String)parameters.get("key");
String secret = (String)parameters.get("secret");
String url = (String)parameters.get("url");
boolean createBucket = Boolean.valueOf((String)parameters.get("createBucket"));
log.debug("parameters are: bucketName = {}, url = {}, createBucket = {}",this.bucketName, url, createBucket);
try {
AWSCredentials credentials = new BasicAWSCredentials(accessKey, secret);
ClientConfiguration clientConfig = new ClientConfiguration();
clientConfig.setProtocol(Protocol.HTTPS);
client = new AmazonS3Client(credentials, clientConfig);
client.setEndpoint(url);
client.setS3ClientOptions(new S3ClientOptions().withPathStyleAccess(true));
if (createBucket && !client.doesBucketExistV2(bucketName)) {
client.createBucket(bucketName);
log.debug("bucket {} created",this.bucketName);
}
} catch (Exception e) {
log.error("error initializing s3",e);
throw new RuntimeException("error initializing s3", e);
}
}
public boolean isAlive() {
boolean toReturn = true;
try {
client.doesBucketExistV2(bucketName);
}catch (Exception e) {
log.error("error checking aliveness",e);
toReturn = false;
}
log.debug("the S3 backend is {} Alive",toReturn?"":"not");
return toReturn;
}
@Override
public MetaInfo onCopy(Content content, String newParentPath, String newName) {
String sourceKey = content.getStorageId();
String destinationKey = keyGenerator.apply(null);
try {
client.copyObject(bucketName, sourceKey, bucketName, destinationKey);
} catch (Exception e) {
throw new RuntimeException("error copying file on s3", e);
}
return new MetaInfo(content.getSize(), destinationKey, null, getPayloadConfiguration());
}
@Override
public MetaInfo onMove(Content content, String newParentPath) {
//new contentPath can be set as remotePath to the storage backend ?
return new MetaInfo(content.getSize(),content.getStorageId(), content.getRemotePath(), getPayloadConfiguration());
}
@Override
public void delete(String storageId) {
try {
client.deleteObject(bucketName, storageId);
} catch (Exception e) {
throw new RuntimeException("error deleting file on s3", e);
}
}
@Override
public MetaInfo upload(InputStream stream, String relativePath, String name, String user) {
return this.upload(stream, relativePath, name, null, user);
}
@Override
public MetaInfo upload(InputStream stream, String relativePath, String name, Long size, String user) {
String storageId = keyGenerator.apply(null);
return upload(stream, relativePath, name, storageId, size, user);
}
@Override
public MetaInfo upload(InputStream stream, String relativePath, String name, String storageId, Long size, String user) {
try {
/*Map<String, String> headers = new HashMap<>();
headers.put("X-Amz-Storage-Class", "REDUCED_REDUNDANCY");
Map<String, String> userMetadata = new HashMap<>();*/
ObjectMetadata objMeta = new ObjectMetadata();
objMeta.addUserMetadata("user", user);
objMeta.addUserMetadata("title", name);
if (size != null && size >0) {
objMeta.setContentLength(size);
log.info("content length set to {}",size);
} else
log.info("content length not set");
log.info("uploading file {} with id {} in bucket {} ",name, storageId, bucketName);
client.putObject(bucketName, storageId, stream, objMeta);
long fileSize;
if (size != null && size>0)
fileSize = size;
else
fileSize = client.getObjectMetadata(bucketName, storageId).getContentLength();
return new MetaInfo(fileSize,storageId, null, getPayloadConfiguration());
} catch (Exception e) {
log.error("error uploading file on s3",e);
throw new RuntimeException("error uploading file on s3", e);
}
}
@Override
public InputStream download(String id) throws StorageIdNotFoundException{
try {
InputStream inputStream = client.getObject(bucketName, id).getObjectContent();
return inputStream;
}catch (Exception e) {
log.error("error downloading file form s3");
throw new RuntimeException("error downloading file from s3",e);
}
}
@Override
public InputStream download(Content content) throws StorageIdNotFoundException {
return download(content.getStorageId());
}
@Override
public Map<String, String> getFileMetadata(String id) {
try {
ObjectMetadata objMeta = client.getObjectMetadata(bucketName, id);
Map<String, String> userMetadata = objMeta.getUserMetadata();
HashMap<String, String> toReturn = new HashMap<>(userMetadata);
toReturn.put("size" , Long.toString(objMeta.getContentLength()));
return toReturn;
} catch (Exception e) {
log.error("error getting metadata from s3");
throw new RuntimeException("error downloading file from s3",e);
}
}
@Override
public String getTotalSizeStored() {
// TODO Auto-generated method stub
return null;
}
@Override
public String getTotalItemsCount() {
// TODO Auto-generated method stub
return null;
}
}

@ -0,0 +1,8 @@
package org.gcube.data.access.storagehub.types;
public enum LinkType {
VOLATILE,
VERSIONED,
STANDARD
}

@ -0,0 +1,18 @@
package org.gcube.data.access.storagehub.types;
public interface PublicLink {
LinkType getType();
String getId();
default String getVersion() {
return null;
}
default String getStorageName() {
return null;
}
}

File diff suppressed because it is too large Load Diff

@ -0,0 +1,397 @@
<mix = 'http://www.jcp.org/jcr/mix/1.0'>
<nt = 'http://www.jcp.org/jcr/nt/1.0'>
<nthl = 'http://ip-server:port/hl/nthl'>
<hl = 'http://ip-server:port/hl'>
[nthl:user] > nt:base
- hl:portalLogin (String)
- hl:uuid (String)
- hl:scope (String)
[nthl:applicationData] > mix:lastModified,mix:referenceable,nt:base
- hl:type (String) mandatory
- hl:data (binary) mandatory
[nthl:home] > nt:folder
- hl:scopes (String) multiple
- hl:version (Long)
[nthl:accountingEntry] > nt:base
- hl:user (String)
- hl:date (Date) mandatory
- hl:version (String)
[nthl:accountingEntryCreate] > nthl:accountingEntry
- hl:itemName (String)
[nthl:accountingEntryRead] > nthl:accountingEntry
- hl:itemName (String)
[nthl:accountingEntryEnabledPublicAccess] > nthl:accountingEntryRead
[nthl:accountingEntryDisabledPublicAccess] > nthl:accountingEntryRead
[nthl:accountingEntryPaste] > nthl:accountingEntry
- hl:fromPath (String) mandatory
[nthl:accountingEntryUpdate] > nthl:accountingEntry
- hl:itemName (String) mandatory
[nthl:accountingEntryShare] > nthl:accountingEntry
- hl:itemName (String) mandatory
- hl:members (String) multiple
[nthl:accountingEntryUnshare] > nthl:accountingEntry
- hl:itemName (String) mandatory
[nthl:accountingEntryRestore] > nthl:accountingEntry
- hl:itemName (String) mandatory
[nthl:accountingEntryDelete] > nthl:accountingEntry
- hl:itemName (String) mandatory
- hl:fromPath (String)
[nthl:accountingFolderEntryRenaming] > nthl:accountingEntry
- hl:oldItemName (String) mandatory
- hl:newItemName (String)
[nthl:accountingFolderEntryRemoval] > nthl:accountingEntry
- hl:itemType (String) mandatory
- hl:folderItemType (String)
- hl:itemName (String) mandatory
- hl:mimeType (String)
[nthl:accountingFolderEntryCut] > nthl:accountingFolderEntryRemoval
[nthl:accountingFolderEntryAdd] > nthl:accountingFolderEntryRemoval
[nthl:accountingEntryACL] > nthl:accountingEntry
- hl:itemName (String) mandatory
- hl:members (String) multiple
[nthl:accountingSet] > nt:base
+ * (nthl:accountingEntry)
[nthl:readersSet] > nt:base
+ * (nthl:accountingEntryRead)
[nthl:workspaceItem] > mix:referenceable, mix:title, mix:lastModified, nt:hierarchyNode, mix:lockable
- hl:portalLogin (String)
- hl:lastAction (String) mandatory
- hl:oldRemotePath (String)
- hl:storagePath (String)
- hl:moved (Boolean)
- hl:hidden (Boolean)
= 'false'
autocreated
- hl:IsSystemFolder (Boolean)
= 'false'
autocreated
- hl:isPublic (Boolean)
= 'false'
autocreated
+ hl:readers (nthl:readersSet)
= nthl:readersSet
autocreated
+ hl:accounting (nthl:accountingSet)
= nthl:accountingSet
autocreated
+ hl:metadata (nt:unstructured)
= nt:unstructured
autocreated
+ hl:owner(nthl:user)
= nthl:user
autocreated
+ hl:payloadBackend (nthl:payloadBackend)
= nthl:payloadBackend
mandatory autocreated
+ *
[nthl:workspaceSharedItem] > nthl:workspaceItem, mix:shareable
- hl:privilege (String)
+ hl:members (nt:unstructured)
= nt:unstructured
autocreated
- hl:isVreFolder (Boolean)
- hl:displayName (String)
+ hl:users (nt:unstructured)
= nt:unstructured
autocreated
+ * (nthl:workspaceItem)
[nthl:workspaceVreItem] > nthl:workspaceSharedItem
- hl:groupId (String)
- hl:scope (String)
[nthl:workspaceReference] > nthl:workspaceItem
- hl:reference (Reference)
[nthl:workspaceLeafItem] > nthl:workspaceItem, nt:file
- hl:workspaceItemType (String)
- hl:workflowId (String)
- hl:workflowStatus (String)
- hl:workflowData (String)
[nthl:workspaceSmartItem] > nthl:workspaceLeafItem
[nthl:itemSentRequest] > mix:created, nt:base
+ hl:owner(nthl:user)
= nthl:user
mandatory autocreated
- hl:subject (String) mandatory
- hl:body (String) mandatory
- hl:read (Boolean) mandatory
- hl:open (Boolean) mandatory
- hl:addresses (String) mandatory multiple
+ hl:attachments (nt:folder)
= nt:folder
mandatory autocreated
[nthl:itemSentRequestSH] > nthl:itemSentRequest, mix:shareable
[nthl:rootItemSentRequest] > nt:folder
+ * (nthl:itemSentRequest)
= nthl:itemSentRequest
[nthl:workspaceLeafItemContent] > nt:base
[nthl:payloadBackend] > nt:base
- hl:storageName (String)
+ hl:parameters (nt:unstructured)
= nt:unstructured
autocreated
[nthl:file] > nt:resource , mix:versionable
- hl:size (long)
- hl:remotePath (String)
- hl:storageId (String)
- hl:storageName (String)
+ hl:payloadBackend (nthl:payloadBackend)
= nthl:payloadBackend
mandatory autocreated
[nthl:image] > nthl:file
- hl:width (Long)
= '0'
mandatory autocreated
- hl:height (Long)
= '0'
mandatory autocreated
- hl:thumbnailWidth (Long)
= '0'
mandatory autocreated
- hl:thumbnailHeight (Long)
= '0'
mandatory autocreated
- hl:thumbnailData (binary)
[nthl:pdf] > nthl:file
- hl:numberOfPages (long)
- hl:version (string)
- hl:author (string)
- hl:title (string)
- hl:producer (string)
[nthl:externalFile] > nthl:workspaceLeafItem
[nthl:externalImage] > nthl:workspaceLeafItem
[nthl:externalPdf] > nthl:workspaceLeafItem
[nthl:externalLink] > nthl:workspaceLeafItem
- hl:value (String) mandatory
// DUPLICATED, MUST BE CLEANED
[nthl:ExternalLink] > nthl:workspaceLeafItem
- hl:value (String) mandatory
[nthl:gCubeItem] > nthl:workspaceItem
- hl:scopes (String) mandatory multiple
- hl:creator (String) mandatory
- hl:itemType (String) mandatory
- hl:properties (String)
- hl:isShared (Boolean)
- hl:sharedRootId (String)
+ hl:property (nt:unstructured)
= nt:unstructured
autocreated
[nthl:trashItem] > nthl:workspaceItem
- hl:name (String)
- hl:deletedBy (String)
- hl:originalParentId (String)
- hl:deletedFrom (String)
- hl:deletedTime (Date)
- hl:mimeType (String)
- hl:length (String)
- hl:isFolder (Boolean)
+ * (nthl:workspaceItem)
// TO REMOVE
[nthl:externalUrl] > nthl:workspaceLeafItem
[nthl:query] > nthl:workspaceLeafItem
[nthl:aquamapsItem] > nthl:workspaceLeafItem
[nthl:timeSeriesItem] > nthl:workspaceLeafItem
[nthl:report] > nthl:workspaceLeafItem
[nthl:reportTemplate] > nthl:workspaceLeafItem
[nthl:workflowReport] > nthl:workspaceLeafItem
[nthl:workflowTemplate] > nthl:workspaceLeafItem
[nthl:gCubeMetadata] > nthl:workspaceLeafItem
[nthl:gCubeDocument] > nthl:workspaceLeafItem
[nthl:gCubeDocumentLink] > nthl:workspaceLeafItem
[nthl:gCubeImageDocumentLink] > nthl:workspaceLeafItem
[nthl:gCubePDFDocumentLink] > nthl:workspaceLeafItem
[nthl:gCubeImageDocument] > nthl:workspaceLeafItem
[nthl:gCubePDFDocument] > nthl:workspaceLeafItem
[nthl:gCubeURLDocument] > nthl:workspaceLeafItem
[nthl:gCubeAnnotation] > nthl:workspaceLeafItem
[nthl:externalResourceLink] > nthl:workspaceLeafItem
[nthl:tabularDataLink] > nthl:workspaceLeafItem
[nthl:documentAlternativeLink] > nt:base
- hl:parentUri (String) mandatory
- hl:uri (String) mandatory
- hl:name (String) mandatory
- hl:mimeType (String) mandatory
[nthl:documentPartLink] > nthl:documentAlternativeLink
[nthl:documentItemContent] > nthl:workspaceLeafItemContent
- hl:collectionName (String) mandatory
- hl:oid (String) mandatory
+ hl:metadata (nt:unstructured)
= nt:unstructured
mandatory autocreated
+ hl:annotations (nt:unstructured)
= nt:unstructured
mandatory autocreated
+ hl:alternatives (nt:unstructured)
= nt:unstructured
mandatory autocreated
+ hl:parts (nt:unstructured)
= nt:unstructured
mandatory autocreated
[nthl:metadataItemContent] > nthl:workspaceLeafItemContent, nthl:file
- hl:schema (String) mandatory
- hl:language (String) mandatory
- hl:collectionName (String) mandatory
- hl:oid (String) mandatory
[nthl:annotationItemContet] > nthl:workspaceLeafItemContent
- hl:oid (String) mandatory
+ hl:annotations (nt:unstructured)
= nt:unstructured
mandatory autocreated
[nthl:queryItemContent] > nthl:workspaceLeafItemContent
- hl:query (String) mandatory
- hl:queryType (String) mandatory
[nthl:aquamapsItemContent] > nthl:workspaceLeafItemContent, nthl:file
- hl:mapName (String) mandatory
- hl:mapType (String) mandatory
- hl:author (String) mandatory
- hl:numberOfSpecies (Long) mandatory
- hl:boundingBox (String) mandatory
- hl:PSOThreshold (Double) mandatory
- hl:numberOfImages (Long) mandatory
+ hl:images(nt:unstructured)
= nt:unstructured
mandatory autocreated
[nthl:timeSeriesItemContent] > nthl:workspaceLeafItemContent, nthl:file
- hl:id (String) mandatory
- hl:title (String) mandatory
- hl:description (String) mandatory
- hl:creator (String) mandatory
- hl:created (String) mandatory
- hl:publisher (String) mandatory
- hl:sourceId (String) mandatory
- hl:sourceName (String) mandatory
- hl:rights (String) mandatory
- hl:dimension (Long) mandatory
- hl:headerLabels (String)
[nthl:reportItemContent] > nthl:workspaceLeafItemContent, nthl:file
- hl:created (Date) mandatory
- hl:lastEdit (Date) mandatory
- hl:author (String) mandatory
- hl:lastEditBy (String) mandatory
- hl:templateName (String) mandatory
- hl:numberOfSection (Long) mandatory
- hl:status (String) mandatory
[nthl:reportTemplateContent] > nthl:workspaceLeafItemContent, nthl:file
- hl:created (Date) mandatory
- hl:lastEdit (Date) mandatory
- hl:author (String) mandatory
- hl:lastEditBy (String) mandatory
- hl:numberOfSection (Long) mandatory
- hl:status (String) mandatory
[nthl:externalResourceLinkContent] > nthl:workspaceLeafItemContent
- hl:mimeType (String)
- hl:size (long) mandatory
- hl:resourceId (String) mandatory
- hl:servicePlugin (String) mandatory
[nthl:tabularDataLinkContent] > nthl:workspaceLeafItemContent
- hl:tableID (String) mandatory
- hl:tableTemplateID (String) mandatory
- hl:provenance (String) mandatory
- hl:runtimeResourceID (String) mandatory
- hl:operator (String)
[nthl:smartFolderContent] > nt:base
- hl:query (String) mandatory
- hl:folderId (String)
[nthl:folderBulkCreator] > nt:base
- hl:folderId (String) mandatory
- hl:status (Long)
= '0'
mandatory autocreated
- hl:failures (Long)
= '0'
mandatory autocreated
- hl:requests (Long) mandatory
[nthl:rootFolderBulkCreator] > nt:folder
+ * (nthl:folderBulkCreator)
= nthl:folderBulkCreator

@ -0,0 +1,9 @@
default.bucketName=storagehub-dev
default.key=18eb719ebffb4cd0ab78f9343f8aedd2
default.secret=e7b6178dd61d4e0dbbc37ff7cb941aed
default.url=https://isti-cloud.isti.cnr.it:13808/
default.createBucket=false
volatile.bucketName=shub-volatile-dev
volatile.key=18eb719ebffb4cd0ab78f9343f8aedd2
volatile.secret=e7b6178dd61d4e0dbbc37ff7cb941aed
volatile.url=https://isti-cloud.isti.cnr.it:13808/

@ -1,313 +0,0 @@
European Union Public Licence V. 1.1
EUPL © the European Community 2007
This European Union Public Licence (the “EUPL”) applies to the Work or Software
(as defined below) which is provided under the terms of this Licence. Any use of
the Work, other than as authorised under this Licence is prohibited (to the
extent such use is covered by a right of the copyright holder of the Work).
The Original Work is provided under the terms of this Licence when the Licensor
(as defined below) has placed the following notice immediately following the
copyright notice for the Original Work:
Licensed under the EUPL V.1.1
or has expressed by any other mean his willingness to license under the EUPL.
1. Definitions
In this Licence, the following terms have the following meaning:
- The Licence: this Licence.
- The Original Work or the Software: the software distributed and/or
communicated by the Licensor under this Licence, available as Source Code and
also as Executable Code as the case may be.
- Derivative Works: the works or software that could be created by the Licensee,
based upon the Original Work or modifications thereof. This Licence does not
define the extent of modification or dependence on the Original Work required
in order to classify a work as a Derivative Work; this extent is determined by
copyright law applicable in the country mentioned in Article 15.
- The Work: the Original Work and/or its Derivative Works.
- The Source Code: the human-readable form of the Work which is the most
convenient for people to study and modify.
- The Executable Code: any code which has generally been compiled and which is
meant to be interpreted by a computer as a program.
- The Licensor: the natural or legal person that distributes and/or communicates
the Work under the Licence.
- Contributor(s): any natural or legal person who modifies the Work under the
Licence, or otherwise contributes to the creation of a Derivative Work.
- The Licensee or “You”: any natural or legal person who makes any usage of the
Software under the terms of the Licence.
- Distribution and/or Communication: any act of selling, giving, lending,
renting, distributing, communicating, transmitting, or otherwise making
available, on-line or off-line, copies of the Work or providing access to its
essential functionalities at the disposal of any other natural or legal
person.
2. Scope of the rights granted by the Licence
The Licensor hereby grants You a world-wide, royalty-free, non-exclusive,
sub-licensable licence to do the following, for the duration of copyright vested
in the Original Work:
- use the Work in any circumstance and for all usage, reproduce the Work, modify
- the Original Work, and make Derivative Works based upon the Work, communicate
- to the public, including the right to make available or display the Work or
- copies thereof to the public and perform publicly, as the case may be, the
- Work, distribute the Work or copies thereof, lend and rent the Work or copies
- thereof, sub-license rights in the Work or copies thereof.
Those rights can be exercised on any media, supports and formats, whether now
known or later invented, as far as the applicable law permits so.
In the countries where moral rights apply, the Licensor waives his right to
exercise his moral right to the extent allowed by law in order to make effective
the licence of the economic rights here above listed.
The Licensor grants to the Licensee royalty-free, non exclusive usage rights to
any patents held by the Licensor, to the extent necessary to make use of the
rights granted on the Work under this Licence.
3. Communication of the Source Code
The Licensor may provide the Work either in its Source Code form, or as
Executable Code. If the Work is provided as Executable Code, the Licensor
provides in addition a machine-readable copy of the Source Code of the Work
along with each copy of the Work that the Licensor distributes or indicates, in
a notice following the copyright notice attached to the Work, a repository where
the Source Code is easily and freely accessible for as long as the Licensor
continues to distribute and/or communicate the Work.
4. Limitations on copyright
Nothing in this Licence is intended to deprive the Licensee of the benefits from
any exception or limitation to the exclusive rights of the rights owners in the
Original Work or Software, of the exhaustion of those rights or of other
applicable limitations thereto.
5. Obligations of the Licensee
The grant of the rights mentioned above is subject to some restrictions and
obligations imposed on the Licensee. Those obligations are the following:
Attribution right: the Licensee shall keep intact all copyright, patent or
trademarks notices and all notices that refer to the Licence and to the
disclaimer of warranties. The Licensee must include a copy of such notices and a
copy of the Licence with every copy of the Work he/she distributes and/or
communicates. The Licensee must cause any Derivative Work to carry prominent
notices stating that the Work has been modified and the date of modification.
Copyleft clause: If the Licensee distributes and/or communicates copies of the
Original Works or Derivative Works based upon the Original Work, this
Distribution and/or Communication will be done under the terms of this Licence
or of a later version of this Licence unless the Original Work is expressly
distributed only under this version of the Licence. The Licensee (becoming
Licensor) cannot offer or impose any additional terms or conditions on the Work
or Derivative Work that alter or restrict the terms of the Licence.
Compatibility clause: If the Licensee Distributes and/or Communicates Derivative
Works or copies thereof based upon both the Original Work and another work
licensed under a Compatible Licence, this Distribution and/or Communication can
be done under the terms of this Compatible Licence. For the sake of this clause,
“Compatible Licence” refers to the licences listed in the appendix attached to
this Licence. Should the Licensees obligations under the Compatible Licence
conflict with his/her obligations under this Licence, the obligations of the
Compatible Licence shall prevail.
Provision of Source Code: When distributing and/or communicating copies of the
Work, the Licensee will provide a machine-readable copy of the Source Code or
indicate a repository where this Source will be easily and freely available for
as long as the Licensee continues to distribute and/or communicate the Work.
Legal Protection: This Licence does not grant permission to use the trade names,
trademarks, service marks, or names of the Licensor, except as required for
reasonable and customary use in describing the origin of the Work and
reproducing the content of the copyright notice.
6. Chain of Authorship
The original Licensor warrants that the copyright in the Original Work granted
hereunder is owned by him/her or licensed to him/her and that he/she has the
power and authority to grant the Licence.
Each Contributor warrants that the copyright in the modifications he/she brings
to the Work are owned by him/her or licensed to him/her and that he/she has the
power and authority to grant the Licence.
Each time You accept the Licence, the original Licensor and subsequent
Contributors grant You a licence to their contributions to the Work, under the
terms of this Licence.
7. Disclaimer of Warranty
The Work is a work in progress, which is continuously improved by numerous
contributors. It is not a finished work and may therefore contain defects or
“bugs” inherent to this type of software development.
For the above reason, the Work is provided under the Licence on an “as is” basis
and without warranties of any kind concerning the Work, including without
limitation merchantability, fitness for a particular purpose, absence of defects
or errors, accuracy, non-infringement of intellectual property rights other than
copyright as stated in Article 6 of this Licence.
This disclaimer of warranty is an essential part of the Licence and a condition
for the grant of any rights to the Work.
8. Disclaimer of Liability
Except in the cases of wilful misconduct or damages directly caused to natural
persons, the Licensor will in no event be liable for any direct or indirect,
material or moral, damages of any kind, arising out of the Licence or of the use
of the Work, including without limitation, damages for loss of goodwill, work
stoppage, computer failure or malfunction, loss of data or any commercial
damage, even if the Licensor has been advised of the possibility of such
damage. However, the Licensor will be liable under statutory product liability
laws as far such laws apply to the Work.
9. Additional agreements
While distributing the Original Work or Derivative Works, You may choose to
conclude an additional agreement to offer, and charge a fee for, acceptance of
support, warranty, indemnity, or other liability obligations and/or services
consistent with this Licence. However, in accepting such obligations, You may
act only on your own behalf and on your sole responsibility, not on behalf of
the original Licensor or any other Contributor, and only if You agree to
indemnify, defend, and hold each Contributor harmless for any liability incurred
by, or claims asserted against such Contributor by the fact You have accepted
any such warranty or additional liability.
10. Acceptance of the Licence
The provisions of this Licence can be accepted by clicking on an icon “I agree”
placed under the bottom of a window displaying the text of this Licence or by
affirming consent in any other similar way, in accordance with the rules of
applicable law. Clicking on that icon indicates your clear and irrevocable
acceptance of this Licence and all of its terms and conditions.
Similarly, you irrevocably accept this Licence and all of its terms and
conditions by exercising any rights granted to You by Article 2 of this Licence,
such as the use of the Work, the creation by You of a Derivative Work or the
Distribution and/or Communication by You of the Work or copies thereof.
11. Information to the public
In case of any Distribution and/or Communication of the Work by means of
electronic communication by You (for example, by offering to download the Work
from a remote location) the distribution channel or media (for example, a
website) must at least provide to the public the information requested by the
applicable law regarding the Licensor, the Licence and the way it may be
accessible, concluded, stored and reproduced by the Licensee.
12. Termination of the Licence
The Licence and the rights granted hereunder will terminate automatically upon
any breach by the Licensee of the terms of the Licence.
Such a termination will not terminate the licences of any person who has
received the Work from the Licensee under the Licence, provided such persons
remain in full compliance with the Licence.
13. Miscellaneous
Without prejudice of Article 9 above, the Licence represents the complete
agreement between the Parties as to the Work licensed hereunder.
If any provision of the Licence is invalid or unenforceable under applicable
law, this will not affect the validity or enforceability of the Licence as a
whole. Such provision will be construed and/or reformed so as necessary to make
it valid and enforceable.
The European Commission may publish other linguistic versions and/or new
versions of this Licence, so far this is required and reasonable, without
reducing the scope of the rights granted by the Licence. New versions of the
Licence will be published with a unique version number.
All linguistic versions of this Licence, approved by the European Commission,
have identical value. Parties can take advantage of the linguistic version of
their choice.
14. Jurisdiction
Any litigation resulting from the interpretation of this License, arising
between the European Commission, as a Licensor, and any Licensee, will be
subject to the jurisdiction of the Court of Justice of the European Communities,
as laid down in article 238 of the Treaty establishing the European Community.
Any litigation arising between Parties, other than the European Commission, and
resulting from the interpretation of this License, will be subject to the
exclusive jurisdiction of the competent court where the Licensor resides or
conducts its primary business.
15. Applicable Law
This Licence shall be governed by the law of the European Union country where
the Licensor resides or has his registered office.
This licence shall be governed by the Belgian law if:
- a litigation arises between the European Commission, as a Licensor, and any
- Licensee; the Licensor, other than the European Commission, has no residence
- or registered office inside a European Union country.
===
Appendix
“Compatible Licences” according to article 5 EUPL are:
- GNU General Public License (GNU GPL) v. 2
- Open Software License (OSL) v. 2.1, v. 3.0
- Common Public License v. 1.0
- Eclipse Public License v. 1.0
- Cecill v. 2.0

@ -1,79 +0,0 @@
The gCube System - storagehub
--------------------------------------------------
REST web service for Jackrabbit
This software is part of the gCube Framework (https://www.gcube-system.org/): an
open-source software toolkit used for building and operating Hybrid Data
Infrastructures enabling the dynamic deployment of Virtual Research Environments
by favouring the realisation of reuse oriented policies.
The projects leading to this software have received funding from a series of
European Union programmes including:
* the Sixth Framework Programme for Research and Technological Development -
DILIGENT (grant no. 004260);
* the Seventh Framework Programme for research, technological development and
demonstration - D4Science (grant no. 212488), D4Science-II (grant no.
239019),ENVRI (grant no. 283465), EUBrazilOpenBio (grant no. 288754), iMarine
(grant no. 283644);
* the H2020 research and innovation programme - BlueBRIDGE (grant no. 675680),
EGIEngage (grant no. 654142), ENVRIplus (grant no. 654182), Parthenos (grant
no. 654119), SoBigData (grant no. 654024), AGINFRA PLUS (grant no. 731001).
Version
--------------------------------------------------
1.3.1 (20210910-085653)
Please see the file named "changelog.xml" in this directory for the release notes.
Authors
--------------------------------------------------
* Lucio Lelii (lucio.lelii-AT-isti.cnr.it), CNR Pisa,
Istituto di Scienza e Tecnologie dell'Informazione "A. Faedo".
Maintainers
-----------
* Lucio Lelii (lucio.lelii-AT-isti.cnr.it), CNR Pisa,
Istituto di Scienza e Tecnologie dell'Informazione "A. Faedo".
Download information
--------------------------------------------------
Source code is available from SVN:
https://code-repo.d4science.org/gCubeSystem/storagehub
Binaries can be downloaded from the gCube website:
https://www.gcube-system.org/
Installation
--------------------------------------------------
Installation documentation is available on-line in the gCube Wiki:
https://wiki.gcube-system.org/gcube/index.php/Home_Library_2.0_API_Framework_Specification
Documentation
--------------------------------------------------
Documentation is available on-line in the gCube Wiki:
https://wiki.gcube-system.org/gcube/index.php/StorageHub_API_Framework_Specification
Support
--------------------------------------------------
Bugs and support requests can be reported in the gCube issue tracking tool:
https://support.d4science.org/projects/gcube/
Licensing
--------------------------------------------------
This software is licensed under the terms you may find in the file named "LICENSE" in this directory.

@ -1,10 +0,0 @@
<ReleaseNotes>
<Changeset component="org.gcube.data-access.storagehub-webapp.1.0.5"
date="2019-04-04">
<Change>Active wait for lock in case of item creation added</Change>
</Changeset>
<Changeset component="org.gcube.data-access.storagehub-webapp.1.0.0"
date="2015-07-01">
<Change>First commit</Change>
</Changeset>
</ReleaseNotes>

@ -1,32 +0,0 @@
<assembly
xmlns="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0 http://maven.apache.org/xsd/assembly-1.1.0.xsd">
<id>servicearchive</id>
<formats>
<format>tar.gz</format>
</formats>
<baseDirectory>/</baseDirectory>
<fileSets>
<fileSet>
<directory>/home/lucio/eclipse-workspace/storagehub-webapp_BRANCH/distro</directory>
<outputDirectory>/</outputDirectory>
<useDefaultExcludes>true</useDefaultExcludes>
<includes>
<include>README</include>
<include>LICENSE</include>
<include>changelog.xml</include>
<include>profile.xml</include>
</includes>
<fileMode>755</fileMode>
<filtered>true</filtered>
</fileSet>
</fileSets>
<files>
<file>
<source>target/storagehub.war</source>
<outputDirectory>/storagehub</outputDirectory>
</file>
</files>
</assembly>

@ -1,7 +0,0 @@
<application mode='online'>
<name>StorageHub</name>
<group>DataAccess</group>
<version>1.3.1</version>
<description>Storage Hub webapp</description>
<local-persistence location='target' />
</application>

@ -1,25 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<Resource xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<ID></ID>
<Type>Service</Type>
<Profile>
<Description>Storage Hub Webapp</Description>
<Class>DataAccess</Class>
<Name>storagehub</Name>
<Version>1.0.0</Version>
<Packages>
<Software>
<Name>storagehub</Name>
<Version>1.0.7-SNAPSHOT</Version>
<MavenCoordinates>
<groupId>org.gcube.data.access</groupId>
<artifactId>storagehub</artifactId>
<version>1.0.7-SNAPSHOT</version>
</MavenCoordinates>
<Files>
<File>storagehub.jar</File>
</Files>
</Software>
</Packages>
</Profile>
</Resource>

@ -0,0 +1,25 @@
.d4science_intro {
top: 0;
z-index: 2000;
position: fixed;
display: block ruby;
padding: 10px;
background: white;
width: 100%;
}
.navbar-fixed-top {
top: 100px !important;
}
.sidebar {
top: 160px !important;
}
.navbar {
margin-bottom: 40px !important;
}
.main {
top: 90px;
}

@ -0,0 +1,26 @@
.d4science_intro {
top: 0;
z-index: 2000;
position: fixed;
display: block ruby;
padding: 10px;
background: white;
width: 100%;
height: 100px;
}
.navbar-fixed-top {
top: 100px !important;
}
.sidebar {
top: 160px !important;
}
.navbar {
margin-bottom: 40px !important;
}
.main {
top: 90px;
}

@ -1,56 +0,0 @@
package org.gcube.data.access.fs;
import java.util.Calendar;
import javax.inject.Inject;
import org.gcube.common.storagehub.model.expressions.Expression;
import org.gcube.common.storagehub.model.expressions.GenericSearchableItem;
import org.gcube.common.storagehub.model.expressions.date.Before;
import org.gcube.common.storagehub.model.expressions.logical.And;
import org.gcube.common.storagehub.model.expressions.text.Contains;
import org.gcube.data.access.storagehub.Constants;
import org.gcube.data.access.storagehub.query.sql2.evaluators.Evaluators;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
//@RunWith(WeldJunit4Runner.class)
public class Expressions {
/*
private static Logger log = LoggerFactory.getLogger(Expression.class);
@Inject
Evaluators evaluators;
public void test() {
evaluators.getEvaluators().forEach(s-> System.out.println(s.getType().toString()));
Expression<Boolean> cont1 = new Contains(GenericSearchableItem.get().title, "Data");
Expression<Boolean> before = new Before(GenericSearchableItem.get().creationTime, Calendar.getInstance());
Expression<Boolean> andExpr = new And(cont1, before);
System.out.println(evaluators.evaluate(andExpr));
}
*/
@Test
public void test() {
String entirePath = "sp2/comic/";
/*String[] parentPathSplit = entirePath.split("/");
System.out.println(parentPathSplit.length);
for (String v: parentPathSplit)
System.out.println(v);
*/
String name = entirePath.replaceAll("([^/]*/)*(.*)", "$2");
String parentPath = entirePath.replaceAll("(([^/]*/)*)(.*)", "$1");
System.out.println(entirePath+" --"+name+"-- "+parentPath);
}
}

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save