Compare commits
86 Commits
master
...
multipleSt
@ -1,40 +1,39 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<classpath>
|
||||
<classpathentry kind="src" output="target/classes" path="src/main/java">
|
||||
<classpathentry excluding="**" kind="src" output="target/classes" path="src/main/resources">
|
||||
<attributes>
|
||||
<attribute name="optional" value="true"/>
|
||||
<attribute name="maven.pomderived" value="true"/>
|
||||
</attributes>
|
||||
</classpathentry>
|
||||
<classpathentry excluding="**" kind="src" output="target/classes" path="src/main/resources">
|
||||
<classpathentry excluding="**" kind="src" output="target/test-classes" path="src/test/resources">
|
||||
<attributes>
|
||||
<attribute name="test" value="true"/>
|
||||
<attribute name="maven.pomderived" value="true"/>
|
||||
</attributes>
|
||||
</classpathentry>
|
||||
<classpathentry kind="src" output="target/test-classes" path="src/test/java">
|
||||
<classpathentry kind="con" path="org.eclipse.m2e.MAVEN2_CLASSPATH_CONTAINER">
|
||||
<attributes>
|
||||
<attribute name="test" value="true"/>
|
||||
<attribute name="optional" value="true"/>
|
||||
<attribute name="maven.pomderived" value="true"/>
|
||||
<attribute name="org.eclipse.jst.component.dependency" value="/WEB-INF/lib"/>
|
||||
</attributes>
|
||||
</classpathentry>
|
||||
<classpathentry excluding="**" kind="src" output="target/test-classes" path="src/test/resources">
|
||||
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-11">
|
||||
<attributes>
|
||||
<attribute name="test" value="true"/>
|
||||
<attribute name="maven.pomderived" value="true"/>
|
||||
</attributes>
|
||||
</classpathentry>
|
||||
<classpathentry kind="con" path="org.eclipse.m2e.MAVEN2_CLASSPATH_CONTAINER">
|
||||
<classpathentry kind="src" output="target/classes" path="src/main/java">
|
||||
<attributes>
|
||||
<attribute name="optional" value="true"/>
|
||||
<attribute name="maven.pomderived" value="true"/>
|
||||
<attribute name="org.eclipse.jst.component.dependency" value="/WEB-INF/lib"/>
|
||||
</attributes>
|
||||
</classpathentry>
|
||||
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.8">
|
||||
<classpathentry kind="src" output="target/test-classes" path="src/test/java">
|
||||
<attributes>
|
||||
<attribute name="optional" value="true"/>
|
||||
<attribute name="maven.pomderived" value="true"/>
|
||||
<attribute name="test" value="true"/>
|
||||
</attributes>
|
||||
</classpathentry>
|
||||
<classpathentry combineaccessrules="false" kind="src" path="/storagehub-model"/>
|
||||
<classpathentry kind="output" path="target/classes"/>
|
||||
</classpath>
|
||||
|
@ -1 +1,2 @@
|
||||
target
|
||||
/Storagehub-TODO
|
||||
|
@ -1,8 +1,11 @@
|
||||
eclipse.preferences.version=1
|
||||
org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.8
|
||||
org.eclipse.jdt.core.compiler.compliance=1.8
|
||||
org.eclipse.jdt.core.compiler.codegen.inlineJsrBytecode=enabled
|
||||
org.eclipse.jdt.core.compiler.codegen.targetPlatform=11
|
||||
org.eclipse.jdt.core.compiler.compliance=11
|
||||
org.eclipse.jdt.core.compiler.problem.assertIdentifier=error
|
||||
org.eclipse.jdt.core.compiler.problem.enablePreviewFeatures=disabled
|
||||
org.eclipse.jdt.core.compiler.problem.enumIdentifier=error
|
||||
org.eclipse.jdt.core.compiler.problem.forbiddenReference=warning
|
||||
org.eclipse.jdt.core.compiler.problem.reportPreviewFeatures=ignore
|
||||
org.eclipse.jdt.core.compiler.problem.reportPreviewFeatures=warning
|
||||
org.eclipse.jdt.core.compiler.release=disabled
|
||||
org.eclipse.jdt.core.compiler.source=1.8
|
||||
org.eclipse.jdt.core.compiler.source=11
|
||||
|
@ -1,8 +1,8 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<faceted-project>
|
||||
<fixed facet="wst.jsdt.web"/>
|
||||
<installed facet="java" version="1.8"/>
|
||||
<installed facet="jst.web" version="3.0"/>
|
||||
<installed facet="jst.jaxrs" version="2.0"/>
|
||||
<installed facet="wst.jsdt.web" version="1.0"/>
|
||||
<installed facet="java" version="11"/>
|
||||
</faceted-project>
|
||||
|
@ -0,0 +1,12 @@
|
||||
FROM smartgears-distribution:4.0.0-java11-tomcat9
|
||||
ARG REPOUSER=admin
|
||||
ARG REPOPWD=admin
|
||||
COPY ./target/storagehub.war /tomcat/webapps/
|
||||
COPY ./docker/jackrabbit /app/jackrabbit
|
||||
COPY ./docker/storagehub.xml /tomcat/conf/Catalina/localhost/
|
||||
COPY ./docker/logback.xml /etc/
|
||||
COPY ./docker/container.ini /etc/
|
||||
RUN unzip /tomcat/webapps/storagehub.war -d /tomcat/webapps/storagehub
|
||||
RUN rm /tomcat/webapps/storagehub.war
|
||||
COPY ./docker/storage-settings.properties /tomcat/webapps/storagehub/WEB-INF/classes/
|
||||
RUN sed -i "s/{{adminId}}/$REPOUSER/g; s/{{adminPwd}}/$REPOPWD/g" /tomcat/webapps/storagehub/WEB-INF/web.xml
|
@ -0,0 +1,12 @@
|
||||
FROM smartgears-distribution:4.0.0-java11-tomcat9
|
||||
ARG REPOUSER=admin
|
||||
ARG REPOPWD=admin
|
||||
COPY ./target/storagehub-test-storages.war /tomcat/webapps/storagehub.war
|
||||
COPY ./docker/jackrabbit /app/jackrabbit
|
||||
COPY ./docker/storagehub.xml /tomcat/conf/Catalina/localhost/
|
||||
COPY ./docker/logback.xml /etc/
|
||||
COPY ./docker/container.ini /etc/
|
||||
RUN unzip /tomcat/webapps/storagehub.war -d /tomcat/webapps/storagehub
|
||||
RUN rm /tomcat/webapps/storagehub.war
|
||||
COPY ./docker/storage-settings.properties /tomcat/webapps/storagehub/WEB-INF/classes/
|
||||
RUN sed -i "s/{{adminId}}/$REPOUSER/g; s/{{adminPwd}}/$REPOPWD/g" /tomcat/webapps/storagehub/WEB-INF/web.xml
|
@ -0,0 +1,3 @@
|
||||
nodeType to remove on new import from a backup:
|
||||
externalUrl
|
||||
|
@ -1 +0,0 @@
|
||||
${gcube.license}
|
@ -1,66 +0,0 @@
|
||||
The gCube System - ${name}
|
||||
--------------------------------------------------
|
||||
|
||||
${description}
|
||||
|
||||
|
||||
${gcube.description}
|
||||
|
||||
${gcube.funding}
|
||||
|
||||
|
||||
Version
|
||||
--------------------------------------------------
|
||||
|
||||
${version} (${buildDate})
|
||||
|
||||
Please see the file named "changelog.xml" in this directory for the release notes.
|
||||
|
||||
|
||||
Authors
|
||||
--------------------------------------------------
|
||||
|
||||
|
||||
* Lucio Lelii (lucio.lelii-AT-isti.cnr.it), CNR Pisa,
|
||||
Istituto di Scienza e Tecnologie dell'Informazione "A. Faedo".
|
||||
|
||||
Maintainers
|
||||
-----------
|
||||
|
||||
|
||||
* Lucio Lelii (lucio.lelii-AT-isti.cnr.it), CNR Pisa,
|
||||
Istituto di Scienza e Tecnologie dell'Informazione "A. Faedo".
|
||||
|
||||
Download information
|
||||
--------------------------------------------------
|
||||
|
||||
Source code is available from SVN:
|
||||
${scm.url}
|
||||
|
||||
Binaries can be downloaded from the gCube website:
|
||||
${gcube.website}
|
||||
|
||||
|
||||
Installation
|
||||
--------------------------------------------------
|
||||
|
||||
Installation documentation is available on-line in the gCube Wiki:
|
||||
${gcube.wikiRoot}/Home_Library_2.0_API_Framework_Specification
|
||||
|
||||
Documentation
|
||||
--------------------------------------------------
|
||||
|
||||
Documentation is available on-line in the gCube Wiki:
|
||||
${gcube.wikiRoot}/StorageHub_API_Framework_Specification
|
||||
|
||||
Support
|
||||
--------------------------------------------------
|
||||
|
||||
Bugs and support requests can be reported in the gCube issue tracking tool:
|
||||
${gcube.issueTracking}
|
||||
|
||||
|
||||
Licensing
|
||||
--------------------------------------------------
|
||||
|
||||
This software is licensed under the terms you may find in the file named "LICENSE" in this directory.
|
@ -1,7 +0,0 @@
|
||||
<application mode='online'>
|
||||
<name>StorageHub</name>
|
||||
<group>DataAccess</group>
|
||||
<version>${version}</version>
|
||||
<description>Storage Hub webapp</description>
|
||||
<local-persistence location='target' />
|
||||
</application>
|
@ -0,0 +1,39 @@
|
||||
version: '3.7'
|
||||
services:
|
||||
postgres:
|
||||
image: postgres:10.5
|
||||
restart: always
|
||||
environment:
|
||||
- POSTGRES_DB=workspace-db
|
||||
- POSTGRES_USER=ws-db-user
|
||||
- POSTGRES_PASSWORD=dbPwd
|
||||
logging:
|
||||
options:
|
||||
max-size: 10m
|
||||
max-file: "3"
|
||||
ports:
|
||||
- '5423:5432'
|
||||
volumes:
|
||||
- ./postgres-data:/var/lib/postgresql/data
|
||||
copy the sql script to create tables
|
||||
- ./sql/create_tables.sql:/docker-entrypoint-initdb.d/create_tables.sql
|
||||
storagehub:
|
||||
build:
|
||||
dockerfile: ./Dockerfile-test
|
||||
ports:
|
||||
- '8081:8080'
|
||||
minio:
|
||||
image: minio/minio
|
||||
ports:
|
||||
- "9000:9000"
|
||||
- "9001:9001"
|
||||
volumes:
|
||||
- minio_storage:/data
|
||||
environment:
|
||||
MINIO_ROOT_USER: SHUBTEST
|
||||
MINIO_ROOT_PASSWORD: wJalrXUtnFEMI/K7MDENG/bPxRfiCY
|
||||
command: server --console-address ":9001" /data
|
||||
|
||||
volumes:
|
||||
minio_storage: {}
|
||||
|
@ -0,0 +1,38 @@
|
||||
version: '3.7'
|
||||
services:
|
||||
postgres:
|
||||
image: postgres:10.5
|
||||
restart: always
|
||||
environment:
|
||||
- POSTGRES_DB=workspace-db
|
||||
- POSTGRES_USER=ws-db-user
|
||||
- POSTGRES_PASSWORD=dbPwd
|
||||
logging:
|
||||
options:
|
||||
max-size: 10m
|
||||
max-file: "3"
|
||||
ports:
|
||||
- '5423:5432'
|
||||
volumes:
|
||||
- ./postgres-data:/var/lib/postgresql/data
|
||||
copy the sql script to create tables
|
||||
- ./sql/create_tables.sql:/docker-entrypoint-initdb.d/create_tables.sql
|
||||
storagehub:
|
||||
build: .
|
||||
ports:
|
||||
- '8081:8080'
|
||||
minio:
|
||||
image: minio/minio
|
||||
ports:
|
||||
- "9000:9000"
|
||||
- "9001:9001"
|
||||
volumes:
|
||||
- minio_storage:/data
|
||||
environment:
|
||||
MINIO_ROOT_USER: SHUBTEST
|
||||
MINIO_ROOT_PASSWORD: wJalrXUtnFEMI/K7MDENG/bPxRfiCY
|
||||
command: server --console-address ":9001" /data
|
||||
|
||||
volumes:
|
||||
minio_storage: {}
|
||||
|
@ -0,0 +1,23 @@
|
||||
[node]
|
||||
mode = offline
|
||||
hostname = dlib29.isti.cnr.it
|
||||
protocol= http
|
||||
port = 8080
|
||||
infrastructure = gcube
|
||||
authorizeChildrenContext = true
|
||||
publicationFrequencyInSeconds = 60
|
||||
|
||||
[properties]
|
||||
SmartGearsDistribution = 4.0.0-SNAPSHOT
|
||||
SmartGearsDistributionBundle = UnBundled
|
||||
|
||||
[site]
|
||||
country = it
|
||||
location = pisa
|
||||
|
||||
[authorization]
|
||||
factory = org.gcube.smartgears.security.defaults.DefaultAuthorizationProviderFactory
|
||||
factory.endpoint = https://accounts.dev.d4science.org/auth/realms/d4science/protocol/openid-connect/token
|
||||
credentials.class = org.gcube.smartgears.security.SimpleCredentials
|
||||
credentials.clientID = node-whn-test-uno-d-d4s.d4science.org
|
||||
credentials.secret = 979bd3bc-5cc4-11ec-bf63-0242ac130002
|
@ -0,0 +1,11 @@
|
||||
#bootstrap properties for the repository startup servlet.
|
||||
#Fri Jul 21 05:19:29 CEST 2017
|
||||
java.naming.factory.initial=org.apache.jackrabbit.core.jndi.provider.DummyInit$
|
||||
repository.home=jackrabbit
|
||||
rmi.enabled=true
|
||||
repository.config=jackrabbit/repository.xml
|
||||
repository.name=jackrabbit.repository
|
||||
rmi.host=localhost
|
||||
java.naming.provider.url=http\://www.apache.org/jackrabbit
|
||||
jndi.enabled=true
|
||||
rmi.port=0
|
@ -0,0 +1,110 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!-- Licensed to the Apache Software Foundation (ASF) under one or more contributor
|
||||
license agreements. See the NOTICE file distributed with this work for additional
|
||||
information regarding copyright ownership. The ASF licenses this file to
|
||||
You under the Apache License, Version 2.0 (the "License"); you may not use
|
||||
this file except in compliance with the License. You may obtain a copy of
|
||||
the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required
|
||||
by applicable law or agreed to in writing, software distributed under the
|
||||
License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS
|
||||
OF ANY KIND, either express or implied. See the License for the specific
|
||||
language governing permissions and limitations under the License. -->
|
||||
<!DOCTYPE Repository PUBLIC "-//The Apache Software Foundation//DTD Jackrabbit 2.0//EN" "http://jackrabbit.apache.org/dtd/repository-2.0.dtd">
|
||||
<Repository>
|
||||
<!-- virtual file system where the repository stores global state (e.g.
|
||||
registered namespaces, custom node types, etc.) -->
|
||||
<FileSystem class="org.apache.jackrabbit.core.fs.db.DbFileSystem">
|
||||
<param name="driver" value="org.postgresql.Driver" />
|
||||
<param name="schema" value="postgresql" />
|
||||
<param name="url" value="jdbc:postgresql://postgres:5432/workspace-db" />
|
||||
<param name="user" value="ws-db-user" />
|
||||
<param name="password" value="dbPwd" />
|
||||
<param name="schemaObjectPrefix" value="rep_" />
|
||||
</FileSystem>
|
||||
<!-- data store configuration -->
|
||||
<DataStore class="org.apache.jackrabbit.core.data.db.DbDataStore">
|
||||
<param name="driver" value="org.postgresql.Driver" />
|
||||
<param name="databaseType" value="postgresql" />
|
||||
<param name="url" value="jdbc:postgresql://postgres:5432/workspace-db" />
|
||||
<param name="user" value="ws-db-user" />
|
||||
<param name="password" value="dbPwd" />
|
||||
<param name="minRecordLength" value="1024" />
|
||||
<param name="maxConnections" value="3" />
|
||||
<param name="copyWhenReading" value="true" />
|
||||
<param name="tablePrefix" value="datastore_" />
|
||||
<param name="schemaObjectPrefix" value="" />
|
||||
</DataStore>
|
||||
<!-- security configuration -->
|
||||
<Security appName="Jackrabbit">
|
||||
<SecurityManager class="org.apache.jackrabbit.core.DefaultSecurityManager" />
|
||||
<AccessManager class="org.apache.jackrabbit.core.security.DefaultAccessManager" />
|
||||
<LoginModule class="org.apache.jackrabbit.core.security.authentication.DefaultLoginModule">
|
||||
<param name="adminId" value="admin" />
|
||||
</LoginModule>
|
||||
</Security>
|
||||
<!-- location of workspaces root directory and name of default workspace -->
|
||||
<Workspaces rootPath="${rep.home}/workspaces" defaultWorkspace="default" />
|
||||
<Workspace name="${wsp.name}">
|
||||
<FileSystem class="org.apache.jackrabbit.core.fs.local.LocalFileSystem">
|
||||
<param name="path" value="${wsp.home}" />
|
||||
</FileSystem>
|
||||
<PersistenceManager class="org.apache.jackrabbit.core.persistence.pool.PostgreSQLPersistenceManager">
|
||||
<param name="driver" value="org.postgresql.Driver" />
|
||||
<param name="url" value="jdbc:postgresql://postgres:5432/workspace-db" />
|
||||
<param name="schema" value="postgresql" />
|
||||
<param name="user" value="ws-db-user" />
|
||||
<param name="password" value="dbPwd" />
|
||||
<param name="schemaObjectPrefix" value="pm_${wsp.name}_" />
|
||||
<param name="bundleCacheSize" value="600" />
|
||||
<param name="errorHandling" value="IGNORE_MISSING_BLOBS" />
|
||||
<param name="consistencyFix" value="false" />
|
||||
<param name="consistencyCheck" value="false" />
|
||||
</PersistenceManager>
|
||||
<!-- Search index and the file system it uses. class: FQN of class implementing
|
||||
the QueryHandler interface -->
|
||||
<SearchIndex class="org.apache.jackrabbit.core.query.lucene.SearchIndex">
|
||||
<param name="path" value="${wsp.home}/index" />
|
||||
<param name="supportHighlighting" value="true" />
|
||||
<param name="autoRepair" value="true" />
|
||||
<param name="onWorkspaceInconsistency" value="log" />
|
||||
<param name="indexingConfiguration" value="${rep.home}/indexing_configuration.xml" />
|
||||
<param name="resultFetchSize" value="50" />
|
||||
<param name="cacheSize" value="100000" />
|
||||
<param name="enableConsistencyCheck" value="false" />
|
||||
<param name="forceConsistencyCheck" value="false" />
|
||||
</SearchIndex>
|
||||
</Workspace>
|
||||
<!-- Configures the versioning -->
|
||||
<Versioning rootPath="${rep.home}/version">
|
||||
<!-- Configures the filesystem to use for versioning for the respective
|
||||
persistence manager -->
|
||||
<FileSystem class="org.apache.jackrabbit.core.fs.local.LocalFileSystem">
|
||||
<param name="path" value="${rep.home}/version" />
|
||||
</FileSystem>
|
||||
<PersistenceManager class="org.apache.jackrabbit.core.persistence.pool.PostgreSQLPersistenceManager">
|
||||
<param name="driver" value="org.postgresql.Driver" />
|
||||
<param name="url" value="jdbc:postgresql://postgres:5432/workspace-db" />
|
||||
<param name="schema" value="postgresql" />
|
||||
<param name="user" value="ws-db-user" />
|
||||
<param name="password" value="dbPwd" />
|
||||
<param name="schemaObjectPrefix" value="pm_version_" />
|
||||
<param name="bundleCacheSize" value="600" />
|
||||
<param name="consistencyFix" value="false" />
|
||||
<param name="consistencyCheck" value="false" />
|
||||
</PersistenceManager>
|
||||
</Versioning>
|
||||
<!-- Cluster configuration -->
|
||||
<!-- Cluster id="storagehub1.d4science.org" syncDelay="2000">
|
||||
<Journal class="org.apache.jackrabbit.core.journal.DatabaseJournal">
|
||||
<param name="driver" value="org.postgresql.Driver" />
|
||||
<param name="url" value="jdbc:postgresql://postgres/workspace-db" />
|
||||
<param name="databaseType" value="postgresql" />
|
||||
<param name="schemaObjectPrefix" value="journal_" />
|
||||
<param name="user" value="ws-db-user" />
|
||||
<param name="password" value="dbPwd" />
|
||||
<param name="revision" value="${rep.home}/revision.log" />
|
||||
<param name="janitorEnabled" value="false"/>
|
||||
<set to true if you want to daily clean the journal table https://wiki.apache.org/jackrabbit/Clustering#Removing_Old_Revisions>
|
||||
</Journal>
|
||||
</Cluster > -->
|
||||
</Repository>
|
@ -0,0 +1,25 @@
|
||||
<configuration scan="true" debug="true">
|
||||
|
||||
<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
|
||||
<encoder>Ï
|
||||
<pattern>%d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n</pattern>
|
||||
</encoder>
|
||||
</appender>
|
||||
|
||||
|
||||
|
||||
<logger name="org.gcube" level="DEBUG" />
|
||||
<logger name="org.gcube.smartgears" level="TRACE" />
|
||||
<logger name="org.gcube.smartgears.handlers" level="TRACE"/>
|
||||
<logger name="org.gcube.common.events" level="WARN" />
|
||||
<logger name="org.gcube.data.publishing" level="ERROR" />
|
||||
<logger name="org.gcube.documentstore" level="ERROR" />
|
||||
<logger name="org.gcube.common.core.publisher.is.legacy" level="TRACE" />
|
||||
<logger name="org.gcube.data.access" level="TRACE" />
|
||||
<logger name="org.gcube.data.access.storagehub.handlers" level="DEBUG"/>
|
||||
|
||||
<root level="WARN">
|
||||
<appender-ref ref="STDOUT" />
|
||||
</root>
|
||||
|
||||
</configuration>
|
@ -0,0 +1,6 @@
|
||||
${{adminId}}=workspace
|
||||
${{adminPwd}}=gcube
|
||||
${{db-host}}=postgres
|
||||
${{ws-db}}=workspace-db
|
||||
${{dbUser}}=ws-db-user
|
||||
${{dbPwd}}=dbPwd
|
@ -0,0 +1,19 @@
|
||||
#default.bucketName=storagehub-dev
|
||||
#default.key=SHUBTEST
|
||||
#default.secret=wJalrXUtnFEMI/K7MDENG/bPxRfiCY
|
||||
#default.url=http://minio:9000
|
||||
#default.createBucket=true
|
||||
#volatile.bucketName=storagehub-volatile-dev
|
||||
#volatile.key=SHUBTEST
|
||||
#volatile.secret=wJalrXUtnFEMI/K7MDENG/bPxRfiCY
|
||||
#volatile.url=http://minio:9000
|
||||
#volatile.createBucket=true
|
||||
default.bucketName=storagehub-dev
|
||||
default.key=18eb719ebffb4cd0ab78f9343f8aedd2
|
||||
default.secret=e7b6178dd61d4e0dbbc37ff7cb941aed
|
||||
default.url=https://isti-cloud.isti.cnr.it:13808/
|
||||
default.createBucket=false
|
||||
volatile.bucketName=shub-volatile-dev
|
||||
volatile.key=18eb719ebffb4cd0ab78f9343f8aedd2
|
||||
volatile.secret=e7b6178dd61d4e0dbbc37ff7cb941aed
|
||||
volatile.url=https://isti-cloud.isti.cnr.it:13808/
|
@ -0,0 +1,10 @@
|
||||
<Context path="/storagehub">
|
||||
<Resource
|
||||
name="jcr/repository"
|
||||
auth="Container"
|
||||
type="javax.jcr.Repository"
|
||||
factory="org.apache.jackrabbit.core.jndi.BindableRepositoryFactory"
|
||||
configFilePath="/app/jackrabbit/repository.xml"
|
||||
repHomeDir="/app/jackrabbit/workspaces"
|
||||
/>
|
||||
</Context>
|
@ -0,0 +1,24 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<enunciate
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:noNamespaceSchemaLocation="http://enunciate.webcohesion.com/schemas/enunciate-2.14.0.xsd">
|
||||
<api-classes>
|
||||
<!-- Use patterns to exclude classes... e.g. for URI-Resolver <exclude
|
||||
pattern="org.gcube.datatransfer.resolver.services.DocsGenerator" /> -->
|
||||
</api-classes>
|
||||
<modules>
|
||||
<gwt-json-overlay disabled="true" />
|
||||
<php-json-client disabled="true" />
|
||||
<ruby-json-client disabled="true" />
|
||||
<java-json-client disabled="true" />
|
||||
<javascript-client disabled="true" />
|
||||
<docs docsDir="${project.build.directory}" docsSubdir="api-docs" />
|
||||
|
||||
<docs
|
||||
freemarkerTemplate="${project.basedir}/src/main/resources/META-INF/enunciate/d4science_docs.fmt">
|
||||
<additional-css
|
||||
file="css/d4science_enunciate_custom.css" />
|
||||
</docs>
|
||||
<swagger basePath="/workspace" />
|
||||
</modules>
|
||||
</enunciate>
|
@ -0,0 +1,6 @@
|
||||
name: StorageHub
|
||||
group: DataAccess
|
||||
version: ${version}
|
||||
description: ${description}
|
||||
excludes:
|
||||
- path: /workspace/api-docs/*
|
@ -1,141 +0,0 @@
|
||||
package org.gcube.data.access.storagehub;
|
||||
|
||||
import java.io.BufferedInputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.PipedInputStream;
|
||||
import java.io.PipedOutputStream;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
public class MultipleOutputStream {
|
||||
|
||||
private Logger logger = LoggerFactory.getLogger(MultipleOutputStream.class);
|
||||
|
||||
private MyPipedInputStream[] pipedInStreams;
|
||||
|
||||
private InputStream is;
|
||||
|
||||
private MyPipedOututStream[] pipedOutStreams;
|
||||
|
||||
private int index=0;
|
||||
|
||||
public MultipleOutputStream(InputStream is, int number) throws IOException{
|
||||
this.is = is;
|
||||
|
||||
|
||||
logger.debug("requested {} piped streams ",number);
|
||||
|
||||
pipedInStreams = new MyPipedInputStream[number];
|
||||
pipedOutStreams = new MyPipedOututStream[number];
|
||||
|
||||
for (int i =0; i<number; i++) {
|
||||
pipedOutStreams[i] = new MyPipedOututStream();
|
||||
pipedInStreams[i] = new MyPipedInputStream(pipedOutStreams[i]);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public void startWriting() throws IOException{
|
||||
|
||||
BufferedInputStream bis = new BufferedInputStream(is);
|
||||
byte[] buf = new byte[1024*64];
|
||||
int read=-1;
|
||||
int writeTot = 0;
|
||||
while ((read =bis.read(buf))!=-1){
|
||||
for (int i=0; i< pipedInStreams.length; i++) {
|
||||
if (!pipedInStreams[i].isClosed()) {
|
||||
pipedOutStreams[i].write(buf, 0, read);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
writeTot+= read;
|
||||
if (allOutStreamClosed())
|
||||
break;
|
||||
|
||||
}
|
||||
|
||||
for (int i=0; i< pipedOutStreams.length; i++) {
|
||||
if (!pipedOutStreams[i].isClosed()) {
|
||||
logger.debug("closing outputstream {}",i);
|
||||
pipedOutStreams[i].close();
|
||||
}
|
||||
}
|
||||
|
||||
logger.debug("total written {} ",writeTot);
|
||||
|
||||
}
|
||||
|
||||
|
||||
private boolean allOutStreamClosed() {
|
||||
for (int i=0; i<pipedOutStreams.length; i++) {
|
||||
if (!pipedOutStreams[i].isClosed())
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
public synchronized InputStream get() {
|
||||
logger.debug("requesting piped streams {}",index);
|
||||
if (index>=pipedInStreams.length) return null;
|
||||
return pipedInStreams[index++];
|
||||
}
|
||||
|
||||
|
||||
public class MyPipedOututStream extends PipedOutputStream{
|
||||
|
||||
boolean close = false;
|
||||
|
||||
@Override
|
||||
public void close() throws IOException {
|
||||
this.close = true;
|
||||
super.close();
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the close
|
||||
*/
|
||||
public boolean isClosed() {
|
||||
return close;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void write(byte[] b, int off, int len) throws IOException {
|
||||
try{
|
||||
super.write(b, off, len);
|
||||
}catch(IOException io){
|
||||
this.close = true;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
}
|
||||
|
||||
public class MyPipedInputStream extends PipedInputStream{
|
||||
|
||||
boolean close = false;
|
||||
|
||||
public MyPipedInputStream(PipedOutputStream src) throws IOException {
|
||||
super(src);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() throws IOException {
|
||||
this.close = true;
|
||||
logger.debug(Thread.currentThread().getName()+" close MyPipedInputStream");
|
||||
super.close();
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the close
|
||||
*/
|
||||
public boolean isClosed() {
|
||||
return close;
|
||||
}
|
||||
|
||||
}
|
||||
}
|
@ -1,36 +0,0 @@
|
||||
package org.gcube.data.access.storagehub;
|
||||
|
||||
import java.util.Map;
|
||||
import java.util.WeakHashMap;
|
||||
|
||||
import org.gcube.common.authorization.library.provider.AuthorizationProvider;
|
||||
import org.gcube.contentmanagement.blobstorage.service.IClient;
|
||||
import org.gcube.contentmanager.storageclient.model.protocol.smp.Handler;
|
||||
import org.gcube.contentmanager.storageclient.wrapper.AccessType;
|
||||
import org.gcube.contentmanager.storageclient.wrapper.MemoryType;
|
||||
import org.gcube.contentmanager.storageclient.wrapper.StorageClient;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
public class StorageFactory {
|
||||
|
||||
public final static String SERVICE_NAME = "home-library";
|
||||
public final static String SERVICE_CLASS = "org.gcube.portlets.user";
|
||||
|
||||
private static Map<String, IClient> clientUserMap = new WeakHashMap<String, IClient>();
|
||||
|
||||
private static Logger log = LoggerFactory.getLogger(StorageFactory.class);
|
||||
|
||||
public static IClient getGcubeStorage(){
|
||||
String login = AuthorizationProvider.instance.get().getClient().getId();
|
||||
if (!clientUserMap.containsKey(login)){
|
||||
IClient storage = new StorageClient(SERVICE_CLASS, SERVICE_NAME,
|
||||
login, AccessType.SHARED, MemoryType.PERSISTENT).getClient();
|
||||
log.info("******* Storage activateProtocol for Storage **********");
|
||||
Handler.activateProtocol();
|
||||
clientUserMap.put(login, storage);
|
||||
return storage;
|
||||
} else return clientUserMap.get(login);
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,211 @@
|
||||
package org.gcube.data.access.storagehub.handlers;
|
||||
|
||||
import java.io.InputStream;
|
||||
import java.io.OutputStream;
|
||||
import java.util.Deque;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.zip.Deflater;
|
||||
import java.util.zip.ZipOutputStream;
|
||||
|
||||
import javax.inject.Inject;
|
||||
import javax.inject.Singleton;
|
||||
import javax.jcr.Node;
|
||||
import javax.jcr.RepositoryException;
|
||||
import javax.jcr.Session;
|
||||
import javax.jcr.version.Version;
|
||||
import javax.ws.rs.core.Response;
|
||||
import javax.ws.rs.core.StreamingOutput;
|
||||
|
||||
import org.apache.commons.io.FilenameUtils;
|
||||
import org.gcube.common.storagehub.model.Constants;
|
||||
import org.gcube.common.storagehub.model.Excludes;
|
||||
import org.gcube.common.storagehub.model.Paths;
|
||||
import org.gcube.common.storagehub.model.exceptions.InvalidItemException;
|
||||
import org.gcube.common.storagehub.model.exceptions.PluginInitializationException;
|
||||
import org.gcube.common.storagehub.model.exceptions.PluginNotFoundException;
|
||||
import org.gcube.common.storagehub.model.exceptions.StorageHubException;
|
||||
import org.gcube.common.storagehub.model.exceptions.StorageIdNotFoundException;
|
||||
import org.gcube.common.storagehub.model.items.AbstractFileItem;
|
||||
import org.gcube.common.storagehub.model.items.FolderItem;
|
||||
import org.gcube.common.storagehub.model.items.Item;
|
||||
import org.gcube.common.storagehub.model.items.nodes.Content;
|
||||
import org.gcube.common.storagehub.model.items.nodes.PayloadBackend;
|
||||
import org.gcube.common.storagehub.model.storages.StorageBackend;
|
||||
import org.gcube.common.storagehub.model.storages.StorageBackendFactory;
|
||||
import org.gcube.data.access.storagehub.SingleFileStreamingOutput;
|
||||
import org.gcube.data.access.storagehub.accounting.AccountingHandler;
|
||||
import org.gcube.data.access.storagehub.handlers.items.Node2ItemConverter;
|
||||
import org.gcube.data.access.storagehub.handlers.plugins.StorageBackendHandler;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
@Singleton
|
||||
public class DownloadHandler {
|
||||
|
||||
private static final Logger log = LoggerFactory.getLogger(DownloadHandler.class);
|
||||
|
||||
@Inject
|
||||
private AccountingHandler accountingHandler;
|
||||
|
||||
@Inject
|
||||
private StorageBackendHandler storageBackendHandler;
|
||||
|
||||
@Inject
|
||||
private CompressHandler compressHandler;
|
||||
|
||||
@Inject
|
||||
private VersionHandler versionHandler;
|
||||
|
||||
@Inject
|
||||
private Node2ItemConverter node2Item;
|
||||
|
||||
public Response downloadFolderItem(Session ses, String login, FolderItem item, boolean withAccounting ) throws StorageHubException, RepositoryException {
|
||||
try {
|
||||
final Deque<Item> allNodes = compressHandler.getAllNodesForZip((FolderItem)item, login, ses, accountingHandler, Excludes.GET_ONLY_CONTENT);
|
||||
final org.gcube.common.storagehub.model.Path originalPath = Paths.getPath(item.getParentPath());
|
||||
StreamingOutput so = new StreamingOutput() {
|
||||
|
||||
@Override
|
||||
public void write(OutputStream os) {
|
||||
|
||||
try(ZipOutputStream zos = new ZipOutputStream(os)){
|
||||
long start = System.currentTimeMillis();
|
||||
zos.setLevel(Deflater.BEST_COMPRESSION);
|
||||
log.debug("writing StreamOutput");
|
||||
compressHandler.zipNode(zos, allNodes, originalPath);
|
||||
log.debug("StreamOutput written in {}",(System.currentTimeMillis()-start));
|
||||
} catch (Exception e) {
|
||||
log.error("error writing stream",e);
|
||||
}
|
||||
|
||||
}
|
||||
};
|
||||
|
||||
Response response = Response
|
||||
.ok(so)
|
||||
.header("content-disposition","attachment; filename = "+item.getTitle()+".zip")
|
||||
.header("Content-Type", "application/zip")
|
||||
.header("Content-Length", -1l)
|
||||
.build();
|
||||
|
||||
if (withAccounting)
|
||||
accountingHandler.createReadObj(item.getTitle(), null, ses, (Node) item.getRelatedNode(), login, false);
|
||||
return response;
|
||||
}finally {
|
||||
if (ses!=null) ses.save();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public Response downloadFileItem(Session ses, AbstractFileItem fileItem, String login, boolean withAccounting) throws RepositoryException, PluginInitializationException, PluginNotFoundException, StorageHubException {
|
||||
|
||||
Content content = fileItem.getContent();
|
||||
|
||||
StorageBackendFactory sbf = storageBackendHandler.get(content.getPayloadBackend());
|
||||
|
||||
StorageBackend sb = sbf.create(content.getPayloadBackend());
|
||||
|
||||
InputStream streamToWrite = sb.download(content);
|
||||
|
||||
if (withAccounting) {
|
||||
String versionName = null;
|
||||
try {
|
||||
Version version = versionHandler.getCurrentVersion((Node) fileItem.getRelatedNode());
|
||||
versionName = version.getName();
|
||||
}catch(RepositoryException e) {
|
||||
log.warn("current version of {} cannot be retreived", fileItem.getId());
|
||||
}
|
||||
accountingHandler.createReadObj(fileItem.getTitle(), versionName, ses, (Node) fileItem.getRelatedNode(), login, true);
|
||||
}
|
||||
StreamingOutput so = new SingleFileStreamingOutput(streamToWrite);
|
||||
|
||||
return Response
|
||||
.ok(so)
|
||||
.header("content-disposition","attachment; filename = "+fileItem.getName())
|
||||
.header("Content-Length", fileItem.getContent().getSize())
|
||||
.header("Content-Type", fileItem.getContent().getMimeType())
|
||||
.build();
|
||||
|
||||
}
|
||||
|
||||
public Response downloadVersionedItem(Session ses, String login, AbstractFileItem currentItem, String versionName, boolean withAccounting) throws RepositoryException, StorageHubException{
|
||||
|
||||
|
||||
List<Version> jcrVersions = versionHandler.getContentVersionHistory((Node)currentItem.getRelatedNode());
|
||||
|
||||
for (Version version: jcrVersions) {
|
||||
log.debug("retrieved version id {}, name {}", version.getIdentifier(), version.getName());
|
||||
if (version.getName().equals(versionName)) {
|
||||
Content content = node2Item.getContentFromVersion(version);
|
||||
|
||||
StorageBackendFactory sbf = storageBackendHandler.get(content.getPayloadBackend());
|
||||
StorageBackend sb = sbf.create(content.getPayloadBackend());
|
||||
|
||||
InputStream streamToWrite = null;
|
||||
try {
|
||||
streamToWrite = sb.download(content);
|
||||
}catch (StorageIdNotFoundException e) {
|
||||
//TODO: temporary code, it will last until the MINIO porting will not finish
|
||||
if (sbf.getName().equals(Constants.MONGO_STORAGE)) {
|
||||
sbf = storageBackendHandler.get(Constants.DEFAULT_S3_STORAGE);
|
||||
sbf.create(new PayloadBackend(Constants.DEFAULT_S3_STORAGE, null));
|
||||
} else
|
||||
throw e;
|
||||
}
|
||||
|
||||
log.debug("retrieved storage id is {} with storageBackend {} (stream is null? {})",content.getStorageId(), sbf.getName(), streamToWrite==null );
|
||||
|
||||
String oldfilename = FilenameUtils.getBaseName(currentItem.getTitle());
|
||||
String ext = FilenameUtils.getExtension(currentItem.getTitle());
|
||||
|
||||
String fileName = String.format("%s_v%s.%s", oldfilename, version.getName(), ext);
|
||||
|
||||
if (withAccounting)
|
||||
accountingHandler.createReadObj(currentItem.getTitle(), versionName, ses, (Node) currentItem.getRelatedNode(), login, true);
|
||||
|
||||
|
||||
StreamingOutput so = new SingleFileStreamingOutput(streamToWrite);
|
||||
|
||||
return Response
|
||||
.ok(so)
|
||||
.header("content-disposition","attachment; filename = "+fileName)
|
||||
.header("Content-Length", content.getSize())
|
||||
.header("Content-Type", content.getMimeType())
|
||||
.build();
|
||||
}
|
||||
}
|
||||
throw new InvalidItemException("the version is not valid");
|
||||
}
|
||||
|
||||
|
||||
public Response downloadFileFromStorageBackend(String storageId, String storageName) throws RepositoryException, PluginInitializationException, PluginNotFoundException, StorageHubException {
|
||||
|
||||
StorageBackendFactory sbf = storageBackendHandler.get(storageName);
|
||||
|
||||
StorageBackend sb = sbf.create(new PayloadBackend(storageName, null));
|
||||
|
||||
InputStream streamToWrite = sb.download(storageId);
|
||||
|
||||
Map<String, String> userMetadata = sb.getFileMetadata(storageId);
|
||||
|
||||
log.info("returned metadata from storageBackend are: {}", userMetadata);
|
||||
|
||||
long size = Long.parseLong(userMetadata.get("size"));
|
||||
|
||||
String title = userMetadata.get("title");
|
||||
String contentType = userMetadata.get("content-type");
|
||||
|
||||
StreamingOutput so = new SingleFileStreamingOutput(streamToWrite);
|
||||
|
||||
return Response
|
||||
.ok(so)
|
||||
.header("content-disposition","attachment; filename = "+title)
|
||||
.header("Content-Length", size)
|
||||
.header("Content-Type", contentType)
|
||||
.build();
|
||||
|
||||
}
|
||||
|
||||
|
||||
}
|
@ -0,0 +1,143 @@
|
||||
package org.gcube.data.access.storagehub.handlers;
|
||||
|
||||
import static org.gcube.common.storagehub.model.Constants.enchriptedPrefix;
|
||||
import static org.gcube.common.storagehub.model.Constants.enchriptedVolatile;
|
||||
import static org.gcube.common.storagehub.model.Constants.versionPrefix;
|
||||
|
||||
import java.util.Base64;
|
||||
|
||||
import javax.inject.Singleton;
|
||||
import javax.servlet.ServletContext;
|
||||
|
||||
import org.gcube.common.encryption.encrypter.StringEncrypter;
|
||||
import org.gcube.common.security.AuthorizedTasks;
|
||||
import org.gcube.common.security.secrets.Secret;
|
||||
import org.gcube.common.storagehub.model.exceptions.BackendGenericError;
|
||||
import org.gcube.common.storagehub.model.exceptions.StorageHubException;
|
||||
import org.gcube.data.access.storagehub.types.LinkType;
|
||||
import org.gcube.data.access.storagehub.types.PublicLink;
|
||||
import org.gcube.smartgears.ContextProvider;
|
||||
|
||||
@Singleton
|
||||
public class PublicLinkHandler {
|
||||
|
||||
public String getForItem(String itemId, ServletContext context) throws BackendGenericError{
|
||||
return getUrl(itemId, enchriptedPrefix, context);
|
||||
}
|
||||
|
||||
public String getForVersionedItem(String itemId, String version, ServletContext context) throws BackendGenericError {
|
||||
return getUrl(String.format("%s%s%s",itemId, versionPrefix, version), enchriptedPrefix, context);
|
||||
}
|
||||
|
||||
public String getForVolatile(String fileId, String storageName, ServletContext context) throws BackendGenericError {
|
||||
return getUrl(String.format("%s_%s",fileId, storageName), enchriptedVolatile, context);
|
||||
}
|
||||
|
||||
public PublicLink resolveEnchriptedId(String enchriptedId) throws StorageHubException {
|
||||
|
||||
String complexId = enchriptedId;
|
||||
boolean isVolatile = false;
|
||||
if (enchriptedId.startsWith(enchriptedPrefix) || enchriptedId.startsWith(enchriptedVolatile) ) {
|
||||
final String enchriptedValue = enchriptedId.startsWith(enchriptedPrefix) ? enchriptedPrefix : enchriptedVolatile;
|
||||
isVolatile = enchriptedId.startsWith(enchriptedVolatile);
|
||||
|
||||
try {
|
||||
String infraContext = String.format("/%s", ContextProvider.get().container().configuration().infrastructure());
|
||||
|
||||
Secret infraSecret = ContextProvider.get().container().authorizationProvider().getSecretForContext(infraContext);
|
||||
|
||||
complexId = AuthorizedTasks.executeSafely(() -> {
|
||||
return StringEncrypter.getEncrypter().decrypt(
|
||||
new String(Base64.getUrlDecoder().decode(enchriptedId.replace(enchriptedValue, ""))));
|
||||
}, infraSecret);
|
||||
|
||||
}catch(Throwable e){
|
||||
throw new BackendGenericError("invalid public url",e);
|
||||
}
|
||||
}
|
||||
|
||||
if (isVolatile) {
|
||||
String[] volatileIdSplit = complexId.split("_");
|
||||
return new VolatilePublicLink(volatileIdSplit[0], volatileIdSplit[1]);
|
||||
}else {
|
||||
if (complexId.contains(versionPrefix)) {
|
||||
String[] split = complexId.split(versionPrefix);
|
||||
String itemId = split[0];
|
||||
String versionName = split[1];
|
||||
|
||||
return new ItemPublicLink(itemId, versionName);
|
||||
} else
|
||||
return new ItemPublicLink(complexId);
|
||||
}
|
||||
}
|
||||
|
||||
private String getUrl(String toEnchript, String prefix, ServletContext context) throws BackendGenericError{
|
||||
String infraContext = String.format("/%s", ContextProvider.get().container().configuration().infrastructure());
|
||||
Secret infraSecret = ContextProvider.get().container().authorizationProvider().getSecretForContext(infraContext);
|
||||
|
||||
|
||||
try {
|
||||
String enchriptedQueryString = AuthorizedTasks.executeSafely(
|
||||
() -> {return StringEncrypter.getEncrypter().encrypt(toEnchript);},infraSecret);
|
||||
|
||||
|
||||
String basepath = context.getInitParameter("resolver-basepath");
|
||||
String filePublicUrl = String.format("%s/%s%s",basepath, prefix, Base64.getUrlEncoder().encodeToString(enchriptedQueryString.getBytes()));
|
||||
|
||||
return filePublicUrl;
|
||||
}catch (Throwable e) {
|
||||
throw new BackendGenericError("error encrypting item id",e );
|
||||
}
|
||||
}
|
||||
|
||||
public static class VolatilePublicLink implements PublicLink {
|
||||
|
||||
private String storageKey;
|
||||
|
||||
private String storageName;
|
||||
|
||||
protected VolatilePublicLink(String storageKey, String storageName){
|
||||
this.storageKey = storageKey;
|
||||
this.storageName = storageName;
|
||||
}
|
||||
|
||||
@Override
|
||||
public LinkType getType() {return LinkType.VOLATILE;}
|
||||
|
||||
@Override
|
||||
public String getId() { return storageKey; }
|
||||
|
||||
@Override
|
||||
public String getStorageName() { return storageName; }
|
||||
}
|
||||
|
||||
public static class ItemPublicLink implements PublicLink {
|
||||
|
||||
private String itemId;
|
||||
private String version;
|
||||
private LinkType type;
|
||||
|
||||
protected ItemPublicLink(String itemId){
|
||||
this.itemId = itemId;
|
||||
this.type = LinkType.STANDARD;
|
||||
}
|
||||
|
||||
protected ItemPublicLink(String itemId, String version){
|
||||
this.itemId = itemId;
|
||||
this.version = version;
|
||||
this.type = LinkType.VERSIONED;
|
||||
}
|
||||
|
||||
@Override
|
||||
public LinkType getType() {return type;}
|
||||
|
||||
@Override
|
||||
public String getId() { return itemId; }
|
||||
|
||||
@Override
|
||||
public String getVersion() { return version; }
|
||||
|
||||
}
|
||||
|
||||
|
||||
}
|
@ -1,80 +0,0 @@
|
||||
package org.gcube.data.access.storagehub.handlers.plugins;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.Map;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import javax.enterprise.inject.Default;
|
||||
import javax.enterprise.inject.Instance;
|
||||
import javax.inject.Inject;
|
||||
import javax.inject.Singleton;
|
||||
import javax.jcr.Node;
|
||||
import javax.jcr.RepositoryException;
|
||||
import javax.jcr.Session;
|
||||
|
||||
import org.gcube.common.storagehub.model.Excludes;
|
||||
import org.gcube.common.storagehub.model.exceptions.BackendGenericError;
|
||||
import org.gcube.common.storagehub.model.exceptions.PluginInitializationException;
|
||||
import org.gcube.common.storagehub.model.exceptions.PluginNotFoundException;
|
||||
import org.gcube.common.storagehub.model.items.ExternalFolder;
|
||||
import org.gcube.common.storagehub.model.items.Item;
|
||||
import org.gcube.common.storagehub.model.plugins.FolderManager;
|
||||
import org.gcube.common.storagehub.model.plugins.FolderManagerConnector;
|
||||
import org.gcube.data.access.storagehub.handlers.items.Node2ItemConverter;
|
||||
import org.gcube.data.access.storagehub.storage.backend.impl.GcubeFolderManager;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
@Singleton
|
||||
public class FolderPluginHandler {
|
||||
|
||||
private static Logger log = LoggerFactory.getLogger(FolderPluginHandler.class);
|
||||
|
||||
@Inject
|
||||
Node2ItemConverter node2Item;
|
||||
|
||||
private GcubeFolderManager defaultManager = new GcubeFolderManager();
|
||||
|
||||
public FolderManager getDefault() {
|
||||
return defaultManager;
|
||||
}
|
||||
|
||||
@Inject
|
||||
Instance<FolderManagerConnector> connectors;
|
||||
|
||||
private Map<String, FolderManagerConnector> connectorsMap;
|
||||
|
||||
FolderPluginHandler(){
|
||||
if (connectors !=null)
|
||||
connectorsMap = connectors.stream().collect(Collectors.toMap(FolderManagerConnector::getName, e -> e ));
|
||||
else {
|
||||
log.info("connectors are null");
|
||||
connectorsMap = Collections.emptyMap();
|
||||
}
|
||||
}
|
||||
|
||||
public FolderManagerConnector getConnector(String name) throws PluginNotFoundException {
|
||||
if (!connectorsMap.containsKey(name)) throw new PluginNotFoundException("plugin "+name+" not found");
|
||||
return connectorsMap.get(name);
|
||||
}
|
||||
|
||||
public FolderManager getFolderManager(Item item) throws PluginInitializationException, PluginNotFoundException, RepositoryException, BackendGenericError{
|
||||
if (!item.isExternalManaged())
|
||||
return defaultManager;
|
||||
Session session = ((Node)item.getRelatedNode()).getSession();
|
||||
Item parent = null;
|
||||
do {
|
||||
String parentId = item.getParentId();
|
||||
Node node = session.getNodeByIdentifier(parentId);
|
||||
parent = node2Item.getItem(node, Excludes.ALL);
|
||||
|
||||
if (parent !=null && parent instanceof ExternalFolder) {
|
||||
ExternalFolder extParent = (ExternalFolder) parent;
|
||||
String plugin = extParent.getManagedBy();
|
||||
Map<String, Object> parameters = extParent.getConnectionParameters().getMap();
|
||||
return getConnector(plugin).connect(extParent, parameters);
|
||||
}
|
||||
} while (parent!=null);
|
||||
throw new BackendGenericError("selected external managed item doesn't have a parent external folder");
|
||||
}
|
||||
}
|
@ -1,28 +0,0 @@
|
||||
package org.gcube.data.access.storagehub.handlers.plugins;
|
||||
|
||||
import javax.inject.Inject;
|
||||
import javax.inject.Singleton;
|
||||
|
||||
@Singleton
|
||||
public class OperationMediator {
|
||||
|
||||
@Inject
|
||||
FolderPluginHandler folderHandler;
|
||||
|
||||
/*
|
||||
boolean onMove(Item source, Item destination, Session session) throws PluginInitializationException, PluginNotFoundException, BackendGenericError, RepositoryException{
|
||||
FolderManager sourceFolderManager = folderHandler.getFolderManager(source);
|
||||
FolderManager destinationFolderManager = folderHandler.getFolderManager(destination);
|
||||
|
||||
if (source instanceof FolderItem) {
|
||||
destinationFolderManager.onCreatedFolder((FolderItem) source);
|
||||
|
||||
|
||||
session.move(source.getPath(), destination.getPath());
|
||||
sourceFolderManager.onDeletingFolder((FolderItem) source);
|
||||
} else if (source instanceof AbstractFileItem){
|
||||
|
||||
}
|
||||
}
|
||||
*/
|
||||
}
|
@ -0,0 +1,60 @@
|
||||
package org.gcube.data.access.storagehub.handlers.plugins;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import javax.annotation.PostConstruct;
|
||||
import javax.enterprise.inject.Instance;
|
||||
import javax.inject.Inject;
|
||||
import javax.inject.Singleton;
|
||||
|
||||
import org.gcube.common.storagehub.model.Constants;
|
||||
import org.gcube.common.storagehub.model.exceptions.PluginNotFoundException;
|
||||
import org.gcube.common.storagehub.model.items.nodes.PayloadBackend;
|
||||
import org.gcube.common.storagehub.model.storages.StorageBackendFactory;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
@Singleton
|
||||
public class StorageBackendHandler {
|
||||
|
||||
private static Logger log = LoggerFactory.getLogger(StorageBackendHandler.class);
|
||||
|
||||
public static PayloadBackend getDefaultPayloadForFolder() {
|
||||
return new PayloadBackend(Constants.DEFAULT_S3_STORAGE, null);
|
||||
}
|
||||
|
||||
@Inject
|
||||
Instance<StorageBackendFactory> factories;
|
||||
|
||||
Map<String, StorageBackendFactory> storagebackendMap= new HashMap<String, StorageBackendFactory>();
|
||||
|
||||
@PostConstruct
|
||||
void init(){
|
||||
if (factories !=null)
|
||||
for (StorageBackendFactory connector : factories) {
|
||||
if (storagebackendMap.containsKey(connector.getName())) {
|
||||
log.error("multiple storage backend with the same name");
|
||||
throw new RuntimeException("multiple storage backend with the same name");
|
||||
}
|
||||
storagebackendMap.put(connector.getName(), connector);
|
||||
}
|
||||
else
|
||||
throw new RuntimeException("storage backend implementation not found");
|
||||
}
|
||||
|
||||
public StorageBackendFactory get(PayloadBackend payload) throws PluginNotFoundException {
|
||||
if (payload == null || !storagebackendMap.containsKey(payload.getStorageName()))
|
||||
throw new PluginNotFoundException(String.format("implementation for storage %s not found", payload.getStorageName()));
|
||||
return storagebackendMap.get(payload.getStorageName());
|
||||
}
|
||||
|
||||
public StorageBackendFactory get(String storageName) throws PluginNotFoundException {
|
||||
return storagebackendMap.get(storageName);
|
||||
}
|
||||
|
||||
public Collection<StorageBackendFactory> getAllImplementations() {
|
||||
return storagebackendMap.values();
|
||||
}
|
||||
}
|
@ -0,0 +1,50 @@
|
||||
package org.gcube.data.access.storagehub.handlers.plugins;
|
||||
|
||||
import java.io.InputStream;
|
||||
|
||||
import javax.inject.Inject;
|
||||
import javax.inject.Singleton;
|
||||
|
||||
import org.gcube.common.storagehub.model.exceptions.StorageHubException;
|
||||
import org.gcube.common.storagehub.model.items.nodes.Content;
|
||||
import org.gcube.common.storagehub.model.items.nodes.PayloadBackend;
|
||||
import org.gcube.common.storagehub.model.storages.MetaInfo;
|
||||
import org.gcube.common.storagehub.model.storages.StorageBackend;
|
||||
import org.gcube.common.storagehub.model.storages.StorageBackendFactory;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
@Singleton
|
||||
public class StorageOperationMediator {
|
||||
|
||||
Logger log = LoggerFactory.getLogger(StorageOperationMediator.class);
|
||||
|
||||
@Inject
|
||||
StorageBackendHandler storageBackendHandler;
|
||||
|
||||
public MetaInfo copy(Content source, PayloadBackend destination, String newName, String newParentPath, String login) throws StorageHubException{
|
||||
|
||||
log.info("creating Storages for source {} and destination {}", source.getPayloadBackend(), destination.getStorageName());
|
||||
|
||||
StorageBackendFactory sourceSBF = storageBackendHandler.get(source.getPayloadBackend());
|
||||
//TODO: add metadata taken from content node
|
||||
StorageBackend sourceSB = sourceSBF.create(source.getPayloadBackend());
|
||||
|
||||
StorageBackendFactory destSBF = storageBackendHandler.get(destination);
|
||||
StorageBackend destSB = destSBF.create(destination);
|
||||
|
||||
if (sourceSB.equals(destSB)) {
|
||||
log.info("source and destintiona are the same storage");
|
||||
return sourceSB.onCopy(source, newParentPath, newName);
|
||||
}else {
|
||||
log.info("source and destintiona are different storage");
|
||||
InputStream stream = sourceSB.download(source);
|
||||
MetaInfo info = destSB.upload(stream, newParentPath, newName, source.getSize(), login);
|
||||
return info;
|
||||
}
|
||||
}
|
||||
|
||||
public boolean move(){
|
||||
return true;
|
||||
}
|
||||
}
|
@ -0,0 +1,41 @@
|
||||
package org.gcube.data.access.storagehub.health;
|
||||
|
||||
import org.gcube.common.health.api.HealthCheck;
|
||||
import org.gcube.common.health.api.ReadinessChecker;
|
||||
import org.gcube.common.health.api.response.HealthCheckResponse;
|
||||
import org.gcube.common.storagehub.model.items.nodes.PayloadBackend;
|
||||
import org.gcube.data.access.storagehub.handlers.plugins.StorageBackendHandler;
|
||||
import org.gcube.data.access.storagehub.storage.backend.impl.GcubeDefaultS3StorageBackendFactory;
|
||||
import org.gcube.data.access.storagehub.storage.backend.impl.S3Backend;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
@ReadinessChecker
|
||||
public class DefaultStorageCheck implements HealthCheck{
|
||||
|
||||
private static Logger log = LoggerFactory.getLogger(DefaultStorageCheck.class);
|
||||
|
||||
PayloadBackend defaultPayload = StorageBackendHandler.getDefaultPayloadForFolder();
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return String.format("default storage (%s)",defaultPayload.getStorageName());
|
||||
}
|
||||
|
||||
@Override
|
||||
public HealthCheckResponse check() {
|
||||
try {
|
||||
GcubeDefaultS3StorageBackendFactory storageFactory =new GcubeDefaultS3StorageBackendFactory();
|
||||
storageFactory.init();
|
||||
if (((S3Backend)storageFactory.create(defaultPayload)).isAlive())
|
||||
return HealthCheckResponse.builder(getName()).up().build();
|
||||
else
|
||||
return HealthCheckResponse.builder(getName()).down().error("error contacting storage").build();
|
||||
} catch (Exception e) {
|
||||
log.error("error checking defaultStorage",e);
|
||||
return HealthCheckResponse.builder(getName()).down().error(e.getMessage()).build();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,32 @@
|
||||
package org.gcube.data.access.storagehub.health;
|
||||
|
||||
import javax.jcr.LoginException;
|
||||
import javax.jcr.Session;
|
||||
|
||||
import org.gcube.common.health.api.HealthCheck;
|
||||
import org.gcube.common.health.api.ReadinessChecker;
|
||||
import org.gcube.common.health.api.response.HealthCheckResponse;
|
||||
import org.gcube.data.access.storagehub.RepositoryInitializerImpl;
|
||||
|
||||
@ReadinessChecker
|
||||
public class JCRRepositoryCheck implements HealthCheck{
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return "Jackrabbit repository";
|
||||
}
|
||||
|
||||
@Override
|
||||
public HealthCheckResponse check() {
|
||||
try {
|
||||
Session session = RepositoryInitializerImpl.get().getRepository().login();
|
||||
if (session != null) session.logout();
|
||||
return HealthCheckResponse.builder(getName()).up().build();
|
||||
}catch (LoginException e) { }
|
||||
catch(Throwable ex) {
|
||||
return HealthCheckResponse.builder(getName()).down().error(ex.getMessage()).build();
|
||||
}
|
||||
return HealthCheckResponse.builder(getName()).up().build();
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,49 @@
|
||||
package org.gcube.data.access.storagehub.services;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.InputStream;
|
||||
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import javax.ws.rs.GET;
|
||||
import javax.ws.rs.Path;
|
||||
import javax.ws.rs.WebApplicationException;
|
||||
import javax.ws.rs.core.Context;
|
||||
import javax.ws.rs.core.Response.Status;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
@Path("api-docs")
|
||||
public class DocsGenerator {
|
||||
|
||||
private static Logger logger = LoggerFactory.getLogger(DocsGenerator.class);
|
||||
|
||||
@GET
|
||||
@Path("/{any: .*}")
|
||||
public InputStream toDoc(@Context HttpServletRequest req) throws WebApplicationException {
|
||||
logger.info(DocsGenerator.class.getSimpleName() + " toDoc called");
|
||||
|
||||
String pathInfo = req.getPathInfo();
|
||||
logger.debug("pathInfo {}", pathInfo);
|
||||
try {
|
||||
|
||||
if (pathInfo.endsWith("/api-docs")) {
|
||||
pathInfo += "index.html";
|
||||
}
|
||||
|
||||
if (pathInfo.endsWith("/api-docs/")) {
|
||||
pathInfo += "index.html";
|
||||
}
|
||||
|
||||
logger.info("going to {}", pathInfo);
|
||||
|
||||
String realPath = req.getServletContext().getRealPath(pathInfo);
|
||||
return new FileInputStream(new File(realPath));
|
||||
|
||||
} catch (Exception e) {
|
||||
logger.error("error getting the docs", e);
|
||||
throw new WebApplicationException(e,Status.SERVICE_UNAVAILABLE);
|
||||
}
|
||||
}
|
||||
}
|
@ -1,10 +1,13 @@
|
||||
package org.gcube.data.access.storagehub.services;
|
||||
|
||||
import javax.jcr.Repository;
|
||||
import javax.jcr.SimpleCredentials;
|
||||
|
||||
public interface RepositoryInitializer {
|
||||
|
||||
Repository getRepository();
|
||||
|
||||
void initContainerAtFirstStart(SimpleCredentials credentials);
|
||||
|
||||
void shutdown();
|
||||
}
|
||||
|
@ -0,0 +1,40 @@
|
||||
package org.gcube.data.access.storagehub.services;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import javax.inject.Inject;
|
||||
import javax.ws.rs.GET;
|
||||
import javax.ws.rs.Path;
|
||||
import javax.ws.rs.Produces;
|
||||
import javax.ws.rs.core.MediaType;
|
||||
|
||||
import org.gcube.common.storagehub.model.storages.StorageDescriptor;
|
||||
import org.gcube.data.access.storagehub.StorageHubAppllicationManager;
|
||||
import org.gcube.data.access.storagehub.handlers.plugins.StorageBackendHandler;
|
||||
import org.gcube.smartgears.annotations.ManagedBy;
|
||||
import org.gcube.smartgears.utils.InnerMethodName;
|
||||
|
||||
import com.webcohesion.enunciate.metadata.rs.RequestHeader;
|
||||
import com.webcohesion.enunciate.metadata.rs.RequestHeaders;
|
||||
|
||||
@Path("storages")
|
||||
@ManagedBy(StorageHubAppllicationManager.class)
|
||||
@RequestHeaders({
|
||||
@RequestHeader( name = "Authorization", description = "Bearer token, see https://dev.d4science.org/how-to-access-resources"),
|
||||
})
|
||||
public class StorageManager {
|
||||
|
||||
@Inject
|
||||
StorageBackendHandler storageBackendHandler;
|
||||
|
||||
@GET
|
||||
@Path("/")
|
||||
@Produces(MediaType.APPLICATION_JSON)
|
||||
public List<StorageDescriptor> getStorages(){
|
||||
InnerMethodName.instance.set("getStorages");
|
||||
List<StorageDescriptor> storages = new ArrayList<>();
|
||||
storageBackendHandler.getAllImplementations().forEach( f -> storages.add(new StorageDescriptor(f.getName())));
|
||||
return storages;
|
||||
}
|
||||
}
|
@ -0,0 +1,60 @@
|
||||
package org.gcube.data.access.storagehub.services.admin;
|
||||
|
||||
import java.io.InputStream;
|
||||
import java.io.InputStreamReader;
|
||||
import java.nio.charset.Charset;
|
||||
|
||||
import javax.jcr.Session;
|
||||
import javax.jcr.nodetype.NodeType;
|
||||
|
||||
import org.apache.jackrabbit.api.JackrabbitSession;
|
||||
import org.apache.jackrabbit.api.JackrabbitWorkspace;
|
||||
import org.apache.jackrabbit.api.security.authorization.PrivilegeManager;
|
||||
import org.apache.jackrabbit.commons.cnd.CndImporter;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
|
||||
public class InitScript {
|
||||
|
||||
private static Logger log = LoggerFactory.getLogger(InitScript.class);
|
||||
|
||||
|
||||
public void init(JackrabbitSession ses) throws Exception{
|
||||
log.info("init started");
|
||||
try {
|
||||
initNodeTypes(ses);
|
||||
ses.getRootNode().addNode("Home");
|
||||
ses.getRootNode().addNode("Share");
|
||||
PrivilegeManager pm = ((JackrabbitWorkspace) ses.getWorkspace()).getPrivilegeManager();
|
||||
pm.registerPrivilege("hl:writeAll", false, new String[0]);
|
||||
ses.save();
|
||||
}catch (Exception e) {
|
||||
log.error("init error", e);
|
||||
throw e;
|
||||
}
|
||||
log.info("init finished");
|
||||
}
|
||||
|
||||
void initNodeTypes(Session ses) throws Exception{
|
||||
InputStream stream = InitScript.class.getResourceAsStream("/init/NodeType.cnd");
|
||||
|
||||
if (stream == null)
|
||||
throw new Exception("NodeType.cnd inputStream is null");
|
||||
|
||||
InputStreamReader inputstream = new InputStreamReader(stream, Charset.forName("UTF-8"));
|
||||
// Register the custom node types defined in the CND file, using JCR Commons CndImporter
|
||||
|
||||
log.info("start to register the custom node types defined in the CND file...");
|
||||
|
||||
|
||||
NodeType[] nodeTypes = CndImporter.registerNodeTypes(inputstream, ses, true);
|
||||
|
||||
for (NodeType nt : nodeTypes)
|
||||
log.info("Registered: {} ", nt.getName());
|
||||
|
||||
|
||||
log.info("custom node types registered");
|
||||
|
||||
}
|
||||
}
|
@ -0,0 +1,94 @@
|
||||
package org.gcube.data.access.storagehub.services.admin;
|
||||
|
||||
import java.text.DateFormat;
|
||||
import java.text.SimpleDateFormat;
|
||||
import java.util.Date;
|
||||
|
||||
public class ScriptStatus {
|
||||
|
||||
enum Status {
|
||||
Running, Success, Failed
|
||||
}
|
||||
|
||||
private static final DateFormat dateFormat = new SimpleDateFormat("dd MMM yyyy HH:mm:ss:SSS Z") ;
|
||||
|
||||
private Status status;
|
||||
|
||||
private String errorMessage;
|
||||
|
||||
private String resultPath;
|
||||
|
||||
private long start;
|
||||
|
||||
private long finished = -1;
|
||||
|
||||
private String runningId;
|
||||
|
||||
private String executionServer;
|
||||
|
||||
public ScriptStatus(String runningId, String resultPath, String executionServer) {
|
||||
super();
|
||||
this.status = Status.Running;
|
||||
this.start = System.currentTimeMillis();
|
||||
this.runningId = runningId;
|
||||
this.resultPath = resultPath;
|
||||
this.executionServer = executionServer;
|
||||
}
|
||||
|
||||
public void setFailed(String error) {
|
||||
this.status = Status.Failed;
|
||||
this.errorMessage = error;
|
||||
this.finished = System.currentTimeMillis();
|
||||
}
|
||||
|
||||
public void setSuccess() {
|
||||
this.status = Status.Success;
|
||||
this.finished = System.currentTimeMillis();
|
||||
}
|
||||
|
||||
public Status getStatus() {
|
||||
return status;
|
||||
}
|
||||
|
||||
public String getErrorMessage() {
|
||||
return errorMessage;
|
||||
}
|
||||
|
||||
public String getStartDate() {
|
||||
Date date = new Date(this.start);
|
||||
return dateFormat.format(date);
|
||||
}
|
||||
|
||||
public long getDurationInMillis() {
|
||||
long toUse = finished;
|
||||
if (finished < 0)
|
||||
toUse = System.currentTimeMillis();
|
||||
return toUse-start;
|
||||
}
|
||||
|
||||
public String getHumanReadableDuration() {
|
||||
long toUse = finished;
|
||||
if (finished < 0)
|
||||
toUse = System.currentTimeMillis();
|
||||
|
||||
long duration = toUse - this.start;
|
||||
|
||||
long minutes = (duration/1000)/60;
|
||||
long seconds = (duration/1000)%60;
|
||||
|
||||
return String.format("%d minutes %d seconds", minutes, seconds);
|
||||
}
|
||||
|
||||
public String getResultPath() {
|
||||
return resultPath;
|
||||
}
|
||||
|
||||
public String getRunningId() {
|
||||
return runningId;
|
||||
}
|
||||
|
||||
public String getExecutionServer() {
|
||||
return executionServer;
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,2 @@
|
||||
/MockStorage.java
|
||||
/MockStorageFactory.java
|
@ -1,22 +0,0 @@
|
||||
package org.gcube.data.access.storagehub.storage.backend.impl;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
import javax.inject.Singleton;
|
||||
|
||||
import org.gcube.common.storagehub.model.exceptions.PluginInitializationException;
|
||||
import org.gcube.common.storagehub.model.items.FolderItem;
|
||||
import org.gcube.common.storagehub.model.plugins.FolderManager;
|
||||
import org.gcube.common.storagehub.model.plugins.FolderManagerConnector;
|
||||
|
||||
@Singleton
|
||||
public class GCubeFolderManagerConnector implements FolderManagerConnector {
|
||||
|
||||
@Override
|
||||
public FolderManager connect(FolderItem item, Map<String, Object> parameters) throws PluginInitializationException {
|
||||
return new GcubeFolderManager();
|
||||
}
|
||||
|
||||
|
||||
|
||||
}
|
@ -0,0 +1,116 @@
|
||||
package org.gcube.data.access.storagehub.storage.backend.impl;
|
||||
|
||||
import java.io.InputStream;
|
||||
import java.nio.file.Paths;
|
||||
import java.util.Collections;
|
||||
import java.util.Map;
|
||||
import java.util.UUID;
|
||||
import org.gcube.common.security.providers.SecretManagerProvider;
|
||||
import org.gcube.common.storagehub.model.exceptions.StorageIdNotFoundException;
|
||||
import org.gcube.common.storagehub.model.items.nodes.Content;
|
||||
import org.gcube.common.storagehub.model.items.nodes.PayloadBackend;
|
||||
import org.gcube.common.storagehub.model.storages.MetaInfo;
|
||||
import org.gcube.common.storagehub.model.storages.StorageBackend;
|
||||
import org.gcube.contentmanagement.blobstorage.service.IClient;
|
||||
import org.gcube.contentmanager.storageclient.wrapper.AccessType;
|
||||
import org.gcube.contentmanager.storageclient.wrapper.MemoryType;
|
||||
import org.gcube.contentmanager.storageclient.wrapper.StorageClient;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
public class GCubeMongoStorageBackend extends StorageBackend {
|
||||
|
||||
|
||||
private static final Logger log = LoggerFactory.getLogger(GCubeMongoStorageBackend.class);
|
||||
|
||||
private final static String SERVICE_NAME = "home-library";
|
||||
private final static String SERVICE_CLASS = "org.gcube.portlets.user";
|
||||
|
||||
|
||||
public GCubeMongoStorageBackend(PayloadBackend payloadConf) {
|
||||
super(payloadConf);
|
||||
}
|
||||
|
||||
@Override
|
||||
public InputStream download(Content content) throws StorageIdNotFoundException {
|
||||
return download(content.getStorageId());
|
||||
}
|
||||
|
||||
@Override
|
||||
public InputStream download(String id) throws StorageIdNotFoundException{
|
||||
IClient storageClient = getStorageClient(SecretManagerProvider.instance.get().getOwner().getId()).getClient();
|
||||
if (!storageClient.exist().RFile(id))
|
||||
throw new StorageIdNotFoundException(id, this.getPayloadConfiguration().getStorageName());
|
||||
return storageClient.get().RFileAsInputStream(id);
|
||||
}
|
||||
|
||||
protected StorageClient getStorageClient(String login){
|
||||
return new StorageClient(SERVICE_CLASS, SERVICE_NAME, login, AccessType.SHARED, MemoryType.PERSISTENT);
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public MetaInfo onCopy(Content content, String newParentPath, String newName) {
|
||||
log.info("copying storage Id {} to newPath {}", content.getStorageId(), newParentPath);
|
||||
String newRemotePath = Paths.get(newParentPath, newName).toString();
|
||||
String newStorageID = getStorageClient(SecretManagerProvider.instance.get().getOwner().getId()).getClient().copyFile(true).from(content.getStorageId()).to(newRemotePath);
|
||||
log.info("The id returned by storage is {}", newStorageID);
|
||||
return new MetaInfo(content.getSize(),newStorageID, newRemotePath, getPayloadConfiguration());
|
||||
}
|
||||
|
||||
@Override
|
||||
public MetaInfo onMove(Content content, String newParentPath) {
|
||||
//new contentPath can be set as remotePath to the storage backend ?
|
||||
return new MetaInfo(content.getSize(),content.getStorageId(), content.getRemotePath(), getPayloadConfiguration());
|
||||
}
|
||||
|
||||
@Override
|
||||
public MetaInfo upload(InputStream stream, String relPath, String name, String user) {
|
||||
log.debug("uploading file");
|
||||
IClient storageClient = getStorageClient(user).getClient();
|
||||
String uid = UUID.randomUUID().toString();
|
||||
String remotePath= String.format("%s/%s-%s",relPath,uid,name);
|
||||
String storageId =storageClient.put(true).LFile(stream).RFile(remotePath);
|
||||
long size = storageClient.getSize().RFileById(storageId);
|
||||
MetaInfo info = new MetaInfo(size, storageId, remotePath, getPayloadConfiguration());
|
||||
return info;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public MetaInfo upload(InputStream stream, String relPath, String name, Long size, String user) {
|
||||
return this.upload(stream, relPath, name, user);
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public MetaInfo upload(InputStream stream, String relativePath, String name, String storageId, Long size, String user) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void delete(String storageId) {
|
||||
log.debug("deleting object {} ",storageId);
|
||||
IClient storageClient = getStorageClient(SecretManagerProvider.instance.get().getOwner().getId()).getClient();
|
||||
storageClient.remove().RFileById(storageId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getTotalSizeStored() {
|
||||
IClient storageClient = getStorageClient(SecretManagerProvider.instance.get().getOwner().getId()).getClient();
|
||||
return storageClient.getTotalUserVolume();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getTotalItemsCount() {
|
||||
IClient storageClient = getStorageClient(SecretManagerProvider.instance.get().getOwner().getId()).getClient();
|
||||
return storageClient.getUserTotalItems();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<String, String> getFileMetadata(String id) {
|
||||
return Collections.emptyMap();
|
||||
}
|
||||
|
||||
|
||||
}
|
@ -0,0 +1,29 @@
|
||||
package org.gcube.data.access.storagehub.storage.backend.impl;
|
||||
|
||||
import javax.inject.Singleton;
|
||||
|
||||
import org.gcube.common.storagehub.model.Constants;
|
||||
import org.gcube.common.storagehub.model.exceptions.InvalidCallParameters;
|
||||
import org.gcube.common.storagehub.model.items.nodes.PayloadBackend;
|
||||
import org.gcube.common.storagehub.model.storages.StorageBackend;
|
||||
import org.gcube.common.storagehub.model.storages.StorageBackendFactory;
|
||||
|
||||
@Singleton
|
||||
public class GCubeMongoStorageBackendFactory implements StorageBackendFactory {
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return Constants.MONGO_STORAGE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isSystemStorage() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public StorageBackend create(PayloadBackend payloadConfiguration) throws InvalidCallParameters {
|
||||
return new GCubeMongoStorageBackend(payloadConfiguration);
|
||||
}
|
||||
|
||||
}
|
@ -1,92 +0,0 @@
|
||||
package org.gcube.data.access.storagehub.storage.backend.impl;
|
||||
|
||||
import java.io.InputStream;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.gcube.common.authorization.library.provider.AuthorizationProvider;
|
||||
import org.gcube.common.storagehub.model.items.AbstractFileItem;
|
||||
import org.gcube.common.storagehub.model.items.nodes.Content;
|
||||
import org.gcube.common.storagehub.model.storages.MetaInfo;
|
||||
import org.gcube.common.storagehub.model.storages.StorageBackend;
|
||||
import org.gcube.contentmanagement.blobstorage.service.IClient;
|
||||
import org.gcube.contentmanager.storageclient.wrapper.AccessType;
|
||||
import org.gcube.contentmanager.storageclient.wrapper.MemoryType;
|
||||
import org.gcube.contentmanager.storageclient.wrapper.StorageClient;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
public class GCubeStorageBackend implements StorageBackend {
|
||||
|
||||
private static final Logger log = LoggerFactory.getLogger(GCubeStorageBackend.class);
|
||||
|
||||
private final static String SERVICE_NAME = "home-library";
|
||||
private final static String SERVICE_CLASS = "org.gcube.portlets.user";
|
||||
|
||||
|
||||
|
||||
protected GCubeStorageBackend() {}
|
||||
|
||||
@Override
|
||||
public InputStream download(Content content) {
|
||||
return getStorageClient(AuthorizationProvider.instance.get().getClient().getId()).getClient().get().RFileAsInputStream(content.getStorageId());
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return GCubeStorageBackend.class.getName();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String onCopy(AbstractFileItem item) {
|
||||
log.info("copying storage Id {} to newPath {}", item.getContent().getStorageId(), item.getPath());
|
||||
String newStorageID = getStorageClient(AuthorizationProvider.instance.get().getClient().getId()).getClient().copyFile(true).from(item.getContent().getStorageId()).to(item.getPath());
|
||||
log.info("The id returned by storage is {}", newStorageID);
|
||||
return newStorageID;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String onMove(AbstractFileItem item) {
|
||||
return item.getContent().getStorageId();
|
||||
}
|
||||
|
||||
@Override
|
||||
public MetaInfo upload(InputStream stream, String relPath, String name) {
|
||||
log.debug("uploading file");
|
||||
IClient storageClient = getStorageClient(AuthorizationProvider.instance.get().getClient().getId()).getClient();
|
||||
String uid = UUID.randomUUID().toString();
|
||||
String remotePath= String.format("%s/%s-%s",relPath,uid,name);
|
||||
String storageId =storageClient.put(true).LFile(stream).RFile(remotePath);
|
||||
long size = storageClient.getSize().RFileById(storageId);
|
||||
MetaInfo info = new MetaInfo();
|
||||
info.setSize(size);
|
||||
info.setStorageId(storageId);
|
||||
info.setRemotePath(remotePath);
|
||||
return info;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onDelete(Content content) {
|
||||
log.debug("deleting");
|
||||
IClient storageClient = getStorageClient(AuthorizationProvider.instance.get().getClient().getId()).getClient();
|
||||
storageClient.remove().RFileById(content.getStorageId());
|
||||
}
|
||||
|
||||
private static StorageClient getStorageClient(String login){
|
||||
return new StorageClient(SERVICE_CLASS, SERVICE_NAME, login, AccessType.SHARED, MemoryType.PERSISTENT);
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getTotalSizeStored() {
|
||||
IClient storageClient = getStorageClient(AuthorizationProvider.instance.get().getClient().getId()).getClient();
|
||||
return storageClient.getTotalUserVolume();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getTotalItemsCount() {
|
||||
IClient storageClient = getStorageClient(AuthorizationProvider.instance.get().getClient().getId()).getClient();
|
||||
return storageClient.getUserTotalItems();
|
||||
}
|
||||
|
||||
|
||||
}
|
@ -0,0 +1,75 @@
|
||||
package org.gcube.data.access.storagehub.storage.backend.impl;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Properties;
|
||||
import java.util.UUID;
|
||||
|
||||
import javax.annotation.PostConstruct;
|
||||
import javax.inject.Singleton;
|
||||
|
||||
import org.gcube.common.storagehub.model.Metadata;
|
||||
import org.gcube.common.storagehub.model.exceptions.InvalidCallParameters;
|
||||
import org.gcube.common.storagehub.model.items.nodes.PayloadBackend;
|
||||
import org.gcube.common.storagehub.model.storages.StorageBackend;
|
||||
import org.gcube.common.storagehub.model.storages.StorageBackendFactory;
|
||||
|
||||
|
||||
|
||||
@Singleton
|
||||
public class GCubeVolatileStorageBackendFactory implements StorageBackendFactory {
|
||||
|
||||
private StorageBackend singleton;
|
||||
|
||||
private static final String PROP_PREFIX = "volatile.";
|
||||
|
||||
public static final String NAME = "volatile-minio";
|
||||
|
||||
@PostConstruct
|
||||
public void init(){
|
||||
S3Backend s3Backend = new S3Backend(new PayloadBackend(getName(), getParameters()), (String) -> UUID.randomUUID().toString());
|
||||
s3Backend.setPayloadConfiguration(new PayloadBackend(getName(),null));
|
||||
this.singleton = s3Backend;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return NAME;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isSystemStorage() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public StorageBackend create(PayloadBackend payloadConfiguration) throws InvalidCallParameters {
|
||||
if (payloadConfiguration.getParameters().isEmpty())
|
||||
return singleton;
|
||||
|
||||
throw new InvalidCallParameters("gcubeMinIo created with not empty parameters");
|
||||
}
|
||||
|
||||
private Metadata getParameters(){
|
||||
try (InputStream input = GCubeVolatileStorageBackendFactory.class.getClassLoader().getResourceAsStream("storage-settings.properties")) {
|
||||
|
||||
Properties prop = new Properties();
|
||||
|
||||
prop.load(input);
|
||||
|
||||
Map<String, Object> params = new HashMap<String, Object>();
|
||||
|
||||
prop.forEach((k,v) -> { if (k.toString().startsWith(PROP_PREFIX)) params.put(k.toString().replace(PROP_PREFIX, ""), v.toString());});
|
||||
|
||||
return new Metadata(params);
|
||||
|
||||
} catch (IOException ex) {
|
||||
throw new RuntimeException("error initializing MinIO", ex);
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,73 @@
|
||||
package org.gcube.data.access.storagehub.storage.backend.impl;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Properties;
|
||||
import java.util.UUID;
|
||||
|
||||
import javax.annotation.PostConstruct;
|
||||
import javax.inject.Singleton;
|
||||
|
||||
import org.gcube.common.storagehub.model.Constants;
|
||||
import org.gcube.common.storagehub.model.Metadata;
|
||||
import org.gcube.common.storagehub.model.exceptions.InvalidCallParameters;
|
||||
import org.gcube.common.storagehub.model.items.nodes.PayloadBackend;
|
||||
import org.gcube.common.storagehub.model.storages.StorageBackend;
|
||||
import org.gcube.common.storagehub.model.storages.StorageBackendFactory;
|
||||
|
||||
@Singleton
|
||||
public class GcubeDefaultS3StorageBackendFactory implements StorageBackendFactory {
|
||||
|
||||
private StorageBackend singleton;
|
||||
|
||||
private static final String PROP_PREFIX = "default.";
|
||||
|
||||
@PostConstruct
|
||||
public void init(){
|
||||
S3Backend s3Backend = new S3Backend(new PayloadBackend(getName(), getParameters()), (String) -> UUID.randomUUID().toString());
|
||||
s3Backend.setPayloadConfiguration(new PayloadBackend(getName(),null));
|
||||
this.singleton = s3Backend;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return Constants.DEFAULT_S3_STORAGE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isSystemStorage() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public StorageBackend create(PayloadBackend payloadConfiguration) throws InvalidCallParameters {
|
||||
if (payloadConfiguration.getParameters().isEmpty())
|
||||
return singleton;
|
||||
|
||||
throw new InvalidCallParameters("S3 created with not empty parameters");
|
||||
}
|
||||
|
||||
private Metadata getParameters(){
|
||||
try (InputStream input = GcubeDefaultS3StorageBackendFactory.class.getClassLoader().getResourceAsStream("storage-settings.properties")) {
|
||||
|
||||
Properties prop = new Properties();
|
||||
|
||||
prop.load(input);
|
||||
|
||||
Map<String, Object> params = new HashMap<String, Object>();
|
||||
|
||||
|
||||
prop.forEach((k,v) -> { if (k.toString().startsWith(PROP_PREFIX)) params.put(k.toString().replace(PROP_PREFIX, ""), v);});
|
||||
|
||||
return new Metadata(params);
|
||||
|
||||
} catch (IOException ex) {
|
||||
throw new RuntimeException("error initializing S3", ex);
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
}
|
@ -1,48 +0,0 @@
|
||||
package org.gcube.data.access.storagehub.storage.backend.impl;
|
||||
|
||||
import javax.inject.Singleton;
|
||||
|
||||
import org.gcube.common.storagehub.model.items.FolderItem;
|
||||
import org.gcube.common.storagehub.model.plugins.FolderManager;
|
||||
import org.gcube.common.storagehub.model.storages.StorageBackend;
|
||||
|
||||
@Singleton
|
||||
public class GcubeFolderManager implements FolderManager {
|
||||
|
||||
@Override
|
||||
public StorageBackend getStorageBackend() {
|
||||
return new GCubeStorageBackend();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean manageVersion() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onCreatedFolder(FolderItem folder) {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onDeletingFolder(FolderItem folder) {
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onMovedFolder(FolderItem movedFolder) {
|
||||
// TODO Auto-generated method stub
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onCopiedFolder(FolderItem copiedFolder) {
|
||||
// TODO Auto-generated method stub
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public FolderItem getRootFolder() {
|
||||
return null;
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,74 @@
|
||||
package org.gcube.data.access.storagehub.storage.backend.impl;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Properties;
|
||||
import java.util.UUID;
|
||||
|
||||
import javax.annotation.PostConstruct;
|
||||
import javax.inject.Singleton;
|
||||
|
||||
import org.gcube.common.storagehub.model.Metadata;
|
||||
import org.gcube.common.storagehub.model.exceptions.InvalidCallParameters;
|
||||
import org.gcube.common.storagehub.model.items.nodes.PayloadBackend;
|
||||
import org.gcube.common.storagehub.model.storages.StorageBackend;
|
||||
import org.gcube.common.storagehub.model.storages.StorageBackendFactory;
|
||||
|
||||
@Singleton
|
||||
public class GcubeMinIOStorageBackendFactory implements StorageBackendFactory {
|
||||
|
||||
private static final String PROP_PREFIX = "default.";
|
||||
|
||||
private Metadata baseParameters;
|
||||
|
||||
@PostConstruct
|
||||
public void init(){
|
||||
baseParameters = getParameters();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return "gcube-default-minio";
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isSystemStorage() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public StorageBackend create(PayloadBackend payloadConfiguration) throws InvalidCallParameters {
|
||||
if (payloadConfiguration.getParameters().isEmpty())
|
||||
throw new InvalidCallParameters(getName()+": null or empty parameter not allowed");
|
||||
String bucketName = (String)payloadConfiguration.getParameters().get("bucketName");
|
||||
if (bucketName == null || bucketName.isBlank())
|
||||
throw new InvalidCallParameters(getName()+": 'bucketName' cannot be blank or empty");
|
||||
|
||||
Metadata metadata = new Metadata(new HashMap<>(baseParameters.getMap()));
|
||||
metadata.getMap().putAll(payloadConfiguration.getParameters());
|
||||
return new S3Backend(new PayloadBackend(getName(), metadata), (String) -> UUID.randomUUID().toString());
|
||||
}
|
||||
|
||||
private Metadata getParameters(){
|
||||
try (InputStream input = GcubeMinIOStorageBackendFactory.class.getClassLoader().getResourceAsStream("storage-settings.properties")) {
|
||||
|
||||
Properties prop = new Properties();
|
||||
|
||||
prop.load(input);
|
||||
|
||||
Map<String, Object> params = new HashMap<String, Object>();
|
||||
|
||||
|
||||
prop.forEach((k,v) -> { if (k.toString().startsWith(PROP_PREFIX)) params.put(k.toString().replace(PROP_PREFIX, ""), v);});
|
||||
|
||||
return new Metadata(params);
|
||||
|
||||
} catch (IOException ex) {
|
||||
throw new RuntimeException("error initializing MinIO", ex);
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
}
|
@ -0,0 +1,206 @@
|
||||
package org.gcube.data.access.storagehub.storage.backend.impl;
|
||||
|
||||
import java.io.InputStream;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.function.Function;
|
||||
|
||||
import org.gcube.common.storagehub.model.exceptions.StorageIdNotFoundException;
|
||||
import org.gcube.common.storagehub.model.items.nodes.Content;
|
||||
import org.gcube.common.storagehub.model.items.nodes.PayloadBackend;
|
||||
import org.gcube.common.storagehub.model.storages.MetaInfo;
|
||||
import org.gcube.common.storagehub.model.storages.StorageBackend;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import com.amazonaws.ClientConfiguration;
|
||||
import com.amazonaws.Protocol;
|
||||
import com.amazonaws.auth.AWSCredentials;
|
||||
import com.amazonaws.auth.BasicAWSCredentials;
|
||||
import com.amazonaws.services.s3.AmazonS3;
|
||||
import com.amazonaws.services.s3.AmazonS3Client;
|
||||
import com.amazonaws.services.s3.S3ClientOptions;
|
||||
import com.amazonaws.services.s3.model.ObjectMetadata;
|
||||
|
||||
|
||||
public class S3Backend extends StorageBackend{
|
||||
|
||||
private static Logger log = LoggerFactory.getLogger(S3Backend.class);
|
||||
|
||||
Function<Void, String> keyGenerator;
|
||||
String bucketName;
|
||||
AmazonS3 client;
|
||||
|
||||
//private static final long PART_SIZE = 100000000;
|
||||
|
||||
|
||||
@Override
|
||||
protected void setPayloadConfiguration(PayloadBackend payloadConfiguration) {
|
||||
super.setPayloadConfiguration(payloadConfiguration);
|
||||
}
|
||||
|
||||
public S3Backend(PayloadBackend payloadConfiguration, Function<Void, String> keyGenerator) {
|
||||
super(payloadConfiguration);
|
||||
this.keyGenerator = keyGenerator;
|
||||
Map<String, Object> parameters = payloadConfiguration.getParameters();
|
||||
this.bucketName = (String)parameters.get("bucketName");
|
||||
String accessKey = (String)parameters.get("key");
|
||||
String secret = (String)parameters.get("secret");
|
||||
String url = (String)parameters.get("url");
|
||||
boolean createBucket = Boolean.valueOf((String)parameters.get("createBucket"));
|
||||
|
||||
log.debug("parameters are: bucketName = {}, url = {}, createBucket = {}",this.bucketName, url, createBucket);
|
||||
|
||||
try {
|
||||
AWSCredentials credentials = new BasicAWSCredentials(accessKey, secret);
|
||||
ClientConfiguration clientConfig = new ClientConfiguration();
|
||||
clientConfig.setProtocol(Protocol.HTTPS);
|
||||
|
||||
client = new AmazonS3Client(credentials, clientConfig);
|
||||
client.setEndpoint(url);
|
||||
client.setS3ClientOptions(new S3ClientOptions().withPathStyleAccess(true));
|
||||
|
||||
if (createBucket && !client.doesBucketExistV2(bucketName)) {
|
||||
client.createBucket(bucketName);
|
||||
log.debug("bucket {} created",this.bucketName);
|
||||
}
|
||||
|
||||
} catch (Exception e) {
|
||||
log.error("error initializing s3",e);
|
||||
throw new RuntimeException("error initializing s3", e);
|
||||
}
|
||||
}
|
||||
|
||||
public boolean isAlive() {
|
||||
|
||||
boolean toReturn = true;
|
||||
try {
|
||||
client.doesBucketExistV2(bucketName);
|
||||
}catch (Exception e) {
|
||||
log.error("error checking aliveness",e);
|
||||
toReturn = false;
|
||||
}
|
||||
log.debug("the S3 backend is {} Alive",toReturn?"":"not");
|
||||
return toReturn;
|
||||
}
|
||||
|
||||
@Override
|
||||
public MetaInfo onCopy(Content content, String newParentPath, String newName) {
|
||||
|
||||
String sourceKey = content.getStorageId();
|
||||
String destinationKey = keyGenerator.apply(null);
|
||||
try {
|
||||
client.copyObject(bucketName, sourceKey, bucketName, destinationKey);
|
||||
} catch (Exception e) {
|
||||
throw new RuntimeException("error copying file on s3", e);
|
||||
}
|
||||
return new MetaInfo(content.getSize(), destinationKey, null, getPayloadConfiguration());
|
||||
}
|
||||
|
||||
@Override
|
||||
public MetaInfo onMove(Content content, String newParentPath) {
|
||||
//new contentPath can be set as remotePath to the storage backend ?
|
||||
return new MetaInfo(content.getSize(),content.getStorageId(), content.getRemotePath(), getPayloadConfiguration());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void delete(String storageId) {
|
||||
try {
|
||||
client.deleteObject(bucketName, storageId);
|
||||
} catch (Exception e) {
|
||||
throw new RuntimeException("error deleting file on s3", e);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public MetaInfo upload(InputStream stream, String relativePath, String name, String user) {
|
||||
return this.upload(stream, relativePath, name, null, user);
|
||||
}
|
||||
|
||||
@Override
|
||||
public MetaInfo upload(InputStream stream, String relativePath, String name, Long size, String user) {
|
||||
String storageId = keyGenerator.apply(null);
|
||||
return upload(stream, relativePath, name, storageId, size, user);
|
||||
}
|
||||
|
||||
@Override
|
||||
public MetaInfo upload(InputStream stream, String relativePath, String name, String storageId, Long size, String user) {
|
||||
try {
|
||||
|
||||
/*Map<String, String> headers = new HashMap<>();
|
||||
headers.put("X-Amz-Storage-Class", "REDUCED_REDUNDANCY");
|
||||
Map<String, String> userMetadata = new HashMap<>();*/
|
||||
ObjectMetadata objMeta = new ObjectMetadata();
|
||||
objMeta.addUserMetadata("user", user);
|
||||
objMeta.addUserMetadata("title", name);
|
||||
|
||||
if (size != null && size >0) {
|
||||
objMeta.setContentLength(size);
|
||||
log.info("content length set to {}",size);
|
||||
} else
|
||||
log.info("content length not set");
|
||||
|
||||
log.info("uploading file {} with id {} in bucket {} ",name, storageId, bucketName);
|
||||
|
||||
client.putObject(bucketName, storageId, stream, objMeta);
|
||||
|
||||
long fileSize;
|
||||
if (size != null && size>0)
|
||||
fileSize = size;
|
||||
else
|
||||
fileSize = client.getObjectMetadata(bucketName, storageId).getContentLength();
|
||||
|
||||
|
||||
return new MetaInfo(fileSize,storageId, null, getPayloadConfiguration());
|
||||
} catch (Exception e) {
|
||||
log.error("error uploading file on s3",e);
|
||||
throw new RuntimeException("error uploading file on s3", e);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public InputStream download(String id) throws StorageIdNotFoundException{
|
||||
try {
|
||||
InputStream inputStream = client.getObject(bucketName, id).getObjectContent();
|
||||
return inputStream;
|
||||
}catch (Exception e) {
|
||||
log.error("error downloading file form s3");
|
||||
throw new RuntimeException("error downloading file from s3",e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public InputStream download(Content content) throws StorageIdNotFoundException {
|
||||
return download(content.getStorageId());
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<String, String> getFileMetadata(String id) {
|
||||
|
||||
try {
|
||||
ObjectMetadata objMeta = client.getObjectMetadata(bucketName, id);
|
||||
Map<String, String> userMetadata = objMeta.getUserMetadata();
|
||||
HashMap<String, String> toReturn = new HashMap<>(userMetadata);
|
||||
toReturn.put("size" , Long.toString(objMeta.getContentLength()));
|
||||
return toReturn;
|
||||
} catch (Exception e) {
|
||||
log.error("error getting metadata from s3");
|
||||
throw new RuntimeException("error downloading file from s3",e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getTotalSizeStored() {
|
||||
// TODO Auto-generated method stub
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getTotalItemsCount() {
|
||||
// TODO Auto-generated method stub
|
||||
return null;
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,8 @@
|
||||
package org.gcube.data.access.storagehub.types;
|
||||
|
||||
public enum LinkType {
|
||||
|
||||
VOLATILE,
|
||||
VERSIONED,
|
||||
STANDARD
|
||||
}
|
@ -0,0 +1,18 @@
|
||||
package org.gcube.data.access.storagehub.types;
|
||||
|
||||
public interface PublicLink {
|
||||
|
||||
|
||||
LinkType getType();
|
||||
|
||||
String getId();
|
||||
|
||||
default String getVersion() {
|
||||
return null;
|
||||
}
|
||||
|
||||
default String getStorageName() {
|
||||
return null;
|
||||
}
|
||||
|
||||
}
|
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,397 @@
|
||||
<mix = 'http://www.jcp.org/jcr/mix/1.0'>
|
||||
<nt = 'http://www.jcp.org/jcr/nt/1.0'>
|
||||
<nthl = 'http://ip-server:port/hl/nthl'>
|
||||
<hl = 'http://ip-server:port/hl'>
|
||||
|
||||
[nthl:user] > nt:base
|
||||
- hl:portalLogin (String)
|
||||
- hl:uuid (String)
|
||||
- hl:scope (String)
|
||||
|
||||
[nthl:applicationData] > mix:lastModified,mix:referenceable,nt:base
|
||||
- hl:type (String) mandatory
|
||||
- hl:data (binary) mandatory
|
||||
|
||||
[nthl:home] > nt:folder
|
||||
- hl:scopes (String) multiple
|
||||
- hl:version (Long)
|
||||
|
||||
[nthl:accountingEntry] > nt:base
|
||||
- hl:user (String)
|
||||
- hl:date (Date) mandatory
|
||||
- hl:version (String)
|
||||
|
||||
[nthl:accountingEntryCreate] > nthl:accountingEntry
|
||||
- hl:itemName (String)
|
||||
|
||||
[nthl:accountingEntryRead] > nthl:accountingEntry
|
||||
- hl:itemName (String)
|
||||
|
||||
[nthl:accountingEntryEnabledPublicAccess] > nthl:accountingEntryRead
|
||||
|
||||
[nthl:accountingEntryDisabledPublicAccess] > nthl:accountingEntryRead
|
||||
|
||||
[nthl:accountingEntryPaste] > nthl:accountingEntry
|
||||
- hl:fromPath (String) mandatory
|
||||
|
||||
[nthl:accountingEntryUpdate] > nthl:accountingEntry
|
||||
- hl:itemName (String) mandatory
|
||||
|
||||
[nthl:accountingEntryShare] > nthl:accountingEntry
|
||||
- hl:itemName (String) mandatory
|
||||
- hl:members (String) multiple
|
||||
|
||||
[nthl:accountingEntryUnshare] > nthl:accountingEntry
|
||||
- hl:itemName (String) mandatory
|
||||
|
||||
[nthl:accountingEntryRestore] > nthl:accountingEntry
|
||||
- hl:itemName (String) mandatory
|
||||
|
||||
[nthl:accountingEntryDelete] > nthl:accountingEntry
|
||||
- hl:itemName (String) mandatory
|
||||
- hl:fromPath (String)
|
||||
|
||||
[nthl:accountingFolderEntryRenaming] > nthl:accountingEntry
|
||||
- hl:oldItemName (String) mandatory
|
||||
- hl:newItemName (String)
|
||||
|
||||
[nthl:accountingFolderEntryRemoval] > nthl:accountingEntry
|
||||
- hl:itemType (String) mandatory
|
||||
- hl:folderItemType (String)
|
||||
- hl:itemName (String) mandatory
|
||||
- hl:mimeType (String)
|
||||
|
||||
[nthl:accountingFolderEntryCut] > nthl:accountingFolderEntryRemoval
|
||||
|
||||
[nthl:accountingFolderEntryAdd] > nthl:accountingFolderEntryRemoval
|
||||
|
||||
[nthl:accountingEntryACL] > nthl:accountingEntry
|
||||
- hl:itemName (String) mandatory
|
||||
- hl:members (String) multiple
|
||||
|
||||
[nthl:accountingSet] > nt:base
|
||||
+ * (nthl:accountingEntry)
|
||||
|
||||
[nthl:readersSet] > nt:base
|
||||
+ * (nthl:accountingEntryRead)
|
||||
|
||||
[nthl:workspaceItem] > mix:referenceable, mix:title, mix:lastModified, nt:hierarchyNode, mix:lockable
|
||||
- hl:portalLogin (String)
|
||||
- hl:lastAction (String) mandatory
|
||||
- hl:oldRemotePath (String)
|
||||
- hl:storagePath (String)
|
||||
- hl:moved (Boolean)
|
||||
|
||||
- hl:hidden (Boolean)
|
||||
= 'false'
|
||||
autocreated
|
||||
|
||||
- hl:IsSystemFolder (Boolean)
|
||||
= 'false'
|
||||
autocreated
|
||||
|
||||
- hl:isPublic (Boolean)
|
||||
= 'false'
|
||||
autocreated
|
||||
|
||||
+ hl:readers (nthl:readersSet)
|
||||
= nthl:readersSet
|
||||
autocreated
|
||||
|
||||
+ hl:accounting (nthl:accountingSet)
|
||||
= nthl:accountingSet
|
||||
autocreated
|
||||
|
||||
+ hl:metadata (nt:unstructured)
|
||||
= nt:unstructured
|
||||
autocreated
|
||||
|
||||
+ hl:owner(nthl:user)
|
||||
= nthl:user
|
||||
autocreated
|
||||
|
||||
+ hl:payloadBackend (nthl:payloadBackend)
|
||||
= nthl:payloadBackend
|
||||
mandatory autocreated
|
||||
|
||||
+ *
|
||||
|
||||
[nthl:workspaceSharedItem] > nthl:workspaceItem, mix:shareable
|
||||
- hl:privilege (String)
|
||||
+ hl:members (nt:unstructured)
|
||||
= nt:unstructured
|
||||
autocreated
|
||||
|
||||
- hl:isVreFolder (Boolean)
|
||||
- hl:displayName (String)
|
||||
|
||||
+ hl:users (nt:unstructured)
|
||||
= nt:unstructured
|
||||
autocreated
|
||||
|
||||
+ * (nthl:workspaceItem)
|
||||
|
||||
[nthl:workspaceVreItem] > nthl:workspaceSharedItem
|
||||
- hl:groupId (String)
|
||||
- hl:scope (String)
|
||||
|
||||
[nthl:workspaceReference] > nthl:workspaceItem
|
||||
- hl:reference (Reference)
|
||||
|
||||
[nthl:workspaceLeafItem] > nthl:workspaceItem, nt:file
|
||||
- hl:workspaceItemType (String)
|
||||
- hl:workflowId (String)
|
||||
- hl:workflowStatus (String)
|
||||
- hl:workflowData (String)
|
||||
|
||||
[nthl:workspaceSmartItem] > nthl:workspaceLeafItem
|
||||
|
||||
[nthl:itemSentRequest] > mix:created, nt:base
|
||||
+ hl:owner(nthl:user)
|
||||
= nthl:user
|
||||
mandatory autocreated
|
||||
- hl:subject (String) mandatory
|
||||
- hl:body (String) mandatory
|
||||
- hl:read (Boolean) mandatory
|
||||
- hl:open (Boolean) mandatory
|
||||
- hl:addresses (String) mandatory multiple
|
||||
+ hl:attachments (nt:folder)
|
||||
= nt:folder
|
||||
mandatory autocreated
|
||||
|
||||
[nthl:itemSentRequestSH] > nthl:itemSentRequest, mix:shareable
|
||||
|
||||
[nthl:rootItemSentRequest] > nt:folder
|
||||
+ * (nthl:itemSentRequest)
|
||||
= nthl:itemSentRequest
|
||||
|
||||
[nthl:workspaceLeafItemContent] > nt:base
|
||||
|
||||
[nthl:payloadBackend] > nt:base
|
||||
- hl:storageName (String)
|
||||
+ hl:parameters (nt:unstructured)
|
||||
= nt:unstructured
|
||||
autocreated
|
||||
|
||||
[nthl:file] > nt:resource , mix:versionable
|
||||
- hl:size (long)
|
||||
- hl:remotePath (String)
|
||||
- hl:storageId (String)
|
||||
- hl:storageName (String)
|
||||
+ hl:payloadBackend (nthl:payloadBackend)
|
||||
= nthl:payloadBackend
|
||||
mandatory autocreated
|
||||
|
||||
[nthl:image] > nthl:file
|
||||
- hl:width (Long)
|
||||
= '0'
|
||||
mandatory autocreated
|
||||
- hl:height (Long)
|
||||
= '0'
|
||||
mandatory autocreated
|
||||
- hl:thumbnailWidth (Long)
|
||||
= '0'
|
||||
mandatory autocreated
|
||||
- hl:thumbnailHeight (Long)
|
||||
= '0'
|
||||
mandatory autocreated
|
||||
- hl:thumbnailData (binary)
|
||||
|
||||
[nthl:pdf] > nthl:file
|
||||
- hl:numberOfPages (long)
|
||||
- hl:version (string)
|
||||
- hl:author (string)
|
||||
- hl:title (string)
|
||||
- hl:producer (string)
|
||||
|
||||
|
||||
|
||||
[nthl:externalFile] > nthl:workspaceLeafItem
|
||||
|
||||
[nthl:externalImage] > nthl:workspaceLeafItem
|
||||
|
||||
[nthl:externalPdf] > nthl:workspaceLeafItem
|
||||
|
||||
[nthl:externalLink] > nthl:workspaceLeafItem
|
||||
- hl:value (String) mandatory
|
||||
|
||||
// DUPLICATED, MUST BE CLEANED
|
||||
[nthl:ExternalLink] > nthl:workspaceLeafItem
|
||||
- hl:value (String) mandatory
|
||||
|
||||
[nthl:gCubeItem] > nthl:workspaceItem
|
||||
- hl:scopes (String) mandatory multiple
|
||||
- hl:creator (String) mandatory
|
||||
- hl:itemType (String) mandatory
|
||||
- hl:properties (String)
|
||||
- hl:isShared (Boolean)
|
||||
- hl:sharedRootId (String)
|
||||
+ hl:property (nt:unstructured)
|
||||
= nt:unstructured
|
||||
autocreated
|
||||
|
||||
[nthl:trashItem] > nthl:workspaceItem
|
||||
- hl:name (String)
|
||||
- hl:deletedBy (String)
|
||||
- hl:originalParentId (String)
|
||||
- hl:deletedFrom (String)
|
||||
- hl:deletedTime (Date)
|
||||
- hl:mimeType (String)
|
||||
- hl:length (String)
|
||||
- hl:isFolder (Boolean)
|
||||
|
||||
+ * (nthl:workspaceItem)
|
||||
|
||||
|
||||
// TO REMOVE
|
||||
[nthl:externalUrl] > nthl:workspaceLeafItem
|
||||
|
||||
[nthl:query] > nthl:workspaceLeafItem
|
||||
|
||||
[nthl:aquamapsItem] > nthl:workspaceLeafItem
|
||||
|
||||
[nthl:timeSeriesItem] > nthl:workspaceLeafItem
|
||||
|
||||
[nthl:report] > nthl:workspaceLeafItem
|
||||
|
||||
[nthl:reportTemplate] > nthl:workspaceLeafItem
|
||||
|
||||
[nthl:workflowReport] > nthl:workspaceLeafItem
|
||||
|
||||
[nthl:workflowTemplate] > nthl:workspaceLeafItem
|
||||
|
||||
[nthl:gCubeMetadata] > nthl:workspaceLeafItem
|
||||
|
||||
[nthl:gCubeDocument] > nthl:workspaceLeafItem
|
||||
|
||||
[nthl:gCubeDocumentLink] > nthl:workspaceLeafItem
|
||||
|
||||
[nthl:gCubeImageDocumentLink] > nthl:workspaceLeafItem
|
||||
|
||||
[nthl:gCubePDFDocumentLink] > nthl:workspaceLeafItem
|
||||
|
||||
[nthl:gCubeImageDocument] > nthl:workspaceLeafItem
|
||||
|
||||
[nthl:gCubePDFDocument] > nthl:workspaceLeafItem
|
||||
|
||||
[nthl:gCubeURLDocument] > nthl:workspaceLeafItem
|
||||
|
||||
[nthl:gCubeAnnotation] > nthl:workspaceLeafItem
|
||||
|
||||
[nthl:externalResourceLink] > nthl:workspaceLeafItem
|
||||
|
||||
[nthl:tabularDataLink] > nthl:workspaceLeafItem
|
||||
|
||||
[nthl:documentAlternativeLink] > nt:base
|
||||
- hl:parentUri (String) mandatory
|
||||
- hl:uri (String) mandatory
|
||||
- hl:name (String) mandatory
|
||||
- hl:mimeType (String) mandatory
|
||||
|
||||
[nthl:documentPartLink] > nthl:documentAlternativeLink
|
||||
|
||||
[nthl:documentItemContent] > nthl:workspaceLeafItemContent
|
||||
- hl:collectionName (String) mandatory
|
||||
- hl:oid (String) mandatory
|
||||
+ hl:metadata (nt:unstructured)
|
||||
= nt:unstructured
|
||||
mandatory autocreated
|
||||
+ hl:annotations (nt:unstructured)
|
||||
= nt:unstructured
|
||||
mandatory autocreated
|
||||
+ hl:alternatives (nt:unstructured)
|
||||
= nt:unstructured
|
||||
mandatory autocreated
|
||||
+ hl:parts (nt:unstructured)
|
||||
= nt:unstructured
|
||||
mandatory autocreated
|
||||
|
||||
[nthl:metadataItemContent] > nthl:workspaceLeafItemContent, nthl:file
|
||||
- hl:schema (String) mandatory
|
||||
- hl:language (String) mandatory
|
||||
- hl:collectionName (String) mandatory
|
||||
- hl:oid (String) mandatory
|
||||
|
||||
[nthl:annotationItemContet] > nthl:workspaceLeafItemContent
|
||||
- hl:oid (String) mandatory
|
||||
+ hl:annotations (nt:unstructured)
|
||||
= nt:unstructured
|
||||
mandatory autocreated
|
||||
|
||||
[nthl:queryItemContent] > nthl:workspaceLeafItemContent
|
||||
- hl:query (String) mandatory
|
||||
- hl:queryType (String) mandatory
|
||||
|
||||
[nthl:aquamapsItemContent] > nthl:workspaceLeafItemContent, nthl:file
|
||||
- hl:mapName (String) mandatory
|
||||
- hl:mapType (String) mandatory
|
||||
- hl:author (String) mandatory
|
||||
- hl:numberOfSpecies (Long) mandatory
|
||||
- hl:boundingBox (String) mandatory
|
||||
- hl:PSOThreshold (Double) mandatory
|
||||
- hl:numberOfImages (Long) mandatory
|
||||
+ hl:images(nt:unstructured)
|
||||
= nt:unstructured
|
||||
mandatory autocreated
|
||||
|
||||
[nthl:timeSeriesItemContent] > nthl:workspaceLeafItemContent, nthl:file
|
||||
- hl:id (String) mandatory
|
||||
- hl:title (String) mandatory
|
||||
- hl:description (String) mandatory
|
||||
- hl:creator (String) mandatory
|
||||
- hl:created (String) mandatory
|
||||
- hl:publisher (String) mandatory
|
||||
- hl:sourceId (String) mandatory
|
||||
- hl:sourceName (String) mandatory
|
||||
- hl:rights (String) mandatory
|
||||
- hl:dimension (Long) mandatory
|
||||
- hl:headerLabels (String)
|
||||
|
||||
[nthl:reportItemContent] > nthl:workspaceLeafItemContent, nthl:file
|
||||
- hl:created (Date) mandatory
|
||||
- hl:lastEdit (Date) mandatory
|
||||
- hl:author (String) mandatory
|
||||
- hl:lastEditBy (String) mandatory
|
||||
- hl:templateName (String) mandatory
|
||||
- hl:numberOfSection (Long) mandatory
|
||||
- hl:status (String) mandatory
|
||||
|
||||
[nthl:reportTemplateContent] > nthl:workspaceLeafItemContent, nthl:file
|
||||
- hl:created (Date) mandatory
|
||||
- hl:lastEdit (Date) mandatory
|
||||
- hl:author (String) mandatory
|
||||
- hl:lastEditBy (String) mandatory
|
||||
- hl:numberOfSection (Long) mandatory
|
||||
- hl:status (String) mandatory
|
||||
|
||||
[nthl:externalResourceLinkContent] > nthl:workspaceLeafItemContent
|
||||
- hl:mimeType (String)
|
||||
- hl:size (long) mandatory
|
||||
- hl:resourceId (String) mandatory
|
||||
- hl:servicePlugin (String) mandatory
|
||||
|
||||
[nthl:tabularDataLinkContent] > nthl:workspaceLeafItemContent
|
||||
- hl:tableID (String) mandatory
|
||||
- hl:tableTemplateID (String) mandatory
|
||||
- hl:provenance (String) mandatory
|
||||
- hl:runtimeResourceID (String) mandatory
|
||||
- hl:operator (String)
|
||||
|
||||
[nthl:smartFolderContent] > nt:base
|
||||
- hl:query (String) mandatory
|
||||
- hl:folderId (String)
|
||||
|
||||
[nthl:folderBulkCreator] > nt:base
|
||||
- hl:folderId (String) mandatory
|
||||
- hl:status (Long)
|
||||
= '0'
|
||||
mandatory autocreated
|
||||
- hl:failures (Long)
|
||||
= '0'
|
||||
mandatory autocreated
|
||||
- hl:requests (Long) mandatory
|
||||
|
||||
[nthl:rootFolderBulkCreator] > nt:folder
|
||||
+ * (nthl:folderBulkCreator)
|
||||
= nthl:folderBulkCreator
|
||||
|
@ -0,0 +1,9 @@
|
||||
default.bucketName=storagehub-dev
|
||||
default.key=18eb719ebffb4cd0ab78f9343f8aedd2
|
||||
default.secret=e7b6178dd61d4e0dbbc37ff7cb941aed
|
||||
default.url=https://isti-cloud.isti.cnr.it:13808/
|
||||
default.createBucket=false
|
||||
volatile.bucketName=shub-volatile-dev
|
||||
volatile.key=18eb719ebffb4cd0ab78f9343f8aedd2
|
||||
volatile.secret=e7b6178dd61d4e0dbbc37ff7cb941aed
|
||||
volatile.url=https://isti-cloud.isti.cnr.it:13808/
|
@ -1,79 +0,0 @@
|
||||
The gCube System - storagehub
|
||||
--------------------------------------------------
|
||||
|
||||
REST web service for Jackrabbit
|
||||
|
||||
|
||||
This software is part of the gCube Framework (https://www.gcube-system.org/): an
|
||||
open-source software toolkit used for building and operating Hybrid Data
|
||||
Infrastructures enabling the dynamic deployment of Virtual Research Environments
|
||||
by favouring the realisation of reuse oriented policies.
|
||||
|
||||
The projects leading to this software have received funding from a series of
|
||||
European Union programmes including:
|
||||
* the Sixth Framework Programme for Research and Technological Development -
|
||||
DILIGENT (grant no. 004260);
|
||||
* the Seventh Framework Programme for research, technological development and
|
||||
demonstration - D4Science (grant no. 212488), D4Science-II (grant no.
|
||||
239019),ENVRI (grant no. 283465), EUBrazilOpenBio (grant no. 288754), iMarine
|
||||
(grant no. 283644);
|
||||
* the H2020 research and innovation programme - BlueBRIDGE (grant no. 675680),
|
||||
EGIEngage (grant no. 654142), ENVRIplus (grant no. 654182), Parthenos (grant
|
||||
no. 654119), SoBigData (grant no. 654024), AGINFRA PLUS (grant no. 731001).
|
||||
|
||||
|
||||
Version
|
||||
--------------------------------------------------
|
||||
|
||||
1.3.1 (20210910-085653)
|
||||
|
||||
Please see the file named "changelog.xml" in this directory for the release notes.
|
||||
|
||||
|
||||
Authors
|
||||
--------------------------------------------------
|
||||
|
||||
|
||||
* Lucio Lelii (lucio.lelii-AT-isti.cnr.it), CNR Pisa,
|
||||
Istituto di Scienza e Tecnologie dell'Informazione "A. Faedo".
|
||||
|
||||
Maintainers
|
||||
-----------
|
||||
|
||||
|
||||
* Lucio Lelii (lucio.lelii-AT-isti.cnr.it), CNR Pisa,
|
||||
Istituto di Scienza e Tecnologie dell'Informazione "A. Faedo".
|
||||
|
||||
Download information
|
||||
--------------------------------------------------
|
||||
|
||||
Source code is available from SVN:
|
||||
https://code-repo.d4science.org/gCubeSystem/storagehub
|
||||
|
||||
Binaries can be downloaded from the gCube website:
|
||||
https://www.gcube-system.org/
|
||||
|
||||
|
||||
Installation
|
||||
--------------------------------------------------
|
||||
|
||||
Installation documentation is available on-line in the gCube Wiki:
|
||||
https://wiki.gcube-system.org/gcube/index.php/Home_Library_2.0_API_Framework_Specification
|
||||
|
||||
Documentation
|
||||
--------------------------------------------------
|
||||
|
||||
Documentation is available on-line in the gCube Wiki:
|
||||
https://wiki.gcube-system.org/gcube/index.php/StorageHub_API_Framework_Specification
|
||||
|
||||
Support
|
||||
--------------------------------------------------
|
||||
|
||||
Bugs and support requests can be reported in the gCube issue tracking tool:
|
||||
https://support.d4science.org/projects/gcube/
|
||||
|
||||
|
||||
Licensing
|
||||
--------------------------------------------------
|
||||
|
||||
This software is licensed under the terms you may find in the file named "LICENSE" in this directory.
|
@ -1,10 +0,0 @@
|
||||
<ReleaseNotes>
|
||||
<Changeset component="org.gcube.data-access.storagehub-webapp.1.0.5"
|
||||
date="2019-04-04">
|
||||
<Change>Active wait for lock in case of item creation added</Change>
|
||||
</Changeset>
|
||||
<Changeset component="org.gcube.data-access.storagehub-webapp.1.0.0"
|
||||
date="2015-07-01">
|
||||
<Change>First commit</Change>
|
||||
</Changeset>
|
||||
</ReleaseNotes>
|
@ -1,32 +0,0 @@
|
||||
<assembly
|
||||
xmlns="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0 http://maven.apache.org/xsd/assembly-1.1.0.xsd">
|
||||
<id>servicearchive</id>
|
||||
<formats>
|
||||
<format>tar.gz</format>
|
||||
</formats>
|
||||
<baseDirectory>/</baseDirectory>
|
||||
<fileSets>
|
||||
<fileSet>
|
||||
<directory>/home/lucio/eclipse-workspace/storagehub-webapp_BRANCH/distro</directory>
|
||||
<outputDirectory>/</outputDirectory>
|
||||
<useDefaultExcludes>true</useDefaultExcludes>
|
||||
<includes>
|
||||
<include>README</include>
|
||||
<include>LICENSE</include>
|
||||
<include>changelog.xml</include>
|
||||
<include>profile.xml</include>
|
||||
</includes>
|
||||
<fileMode>755</fileMode>
|
||||
<filtered>true</filtered>
|
||||
</fileSet>
|
||||
</fileSets>
|
||||
<files>
|
||||
<file>
|
||||
<source>target/storagehub.war</source>
|
||||
<outputDirectory>/storagehub</outputDirectory>
|
||||
</file>
|
||||
</files>
|
||||
|
||||
</assembly>
|
@ -1,7 +0,0 @@
|
||||
<application mode='online'>
|
||||
<name>StorageHub</name>
|
||||
<group>DataAccess</group>
|
||||
<version>1.3.1</version>
|
||||
<description>Storage Hub webapp</description>
|
||||
<local-persistence location='target' />
|
||||
</application>
|
@ -1,25 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<Resource xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
|
||||
<ID></ID>
|
||||
<Type>Service</Type>
|
||||
<Profile>
|
||||
<Description>Storage Hub Webapp</Description>
|
||||
<Class>DataAccess</Class>
|
||||
<Name>storagehub</Name>
|
||||
<Version>1.0.0</Version>
|
||||
<Packages>
|
||||
<Software>
|
||||
<Name>storagehub</Name>
|
||||
<Version>1.0.7-SNAPSHOT</Version>
|
||||
<MavenCoordinates>
|
||||
<groupId>org.gcube.data.access</groupId>
|
||||
<artifactId>storagehub</artifactId>
|
||||
<version>1.0.7-SNAPSHOT</version>
|
||||
</MavenCoordinates>
|
||||
<Files>
|
||||
<File>storagehub.jar</File>
|
||||
</Files>
|
||||
</Software>
|
||||
</Packages>
|
||||
</Profile>
|
||||
</Resource>
|
@ -0,0 +1,25 @@
|
||||
.d4science_intro {
|
||||
top: 0;
|
||||
z-index: 2000;
|
||||
position: fixed;
|
||||
display: block ruby;
|
||||
padding: 10px;
|
||||
background: white;
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
.navbar-fixed-top {
|
||||
top: 100px !important;
|
||||
}
|
||||
|
||||
.sidebar {
|
||||
top: 160px !important;
|
||||
}
|
||||
|
||||
.navbar {
|
||||
margin-bottom: 40px !important;
|
||||
}
|
||||
|
||||
.main {
|
||||
top: 90px;
|
||||
}
|
@ -0,0 +1,26 @@
|
||||
.d4science_intro {
|
||||
top: 0;
|
||||
z-index: 2000;
|
||||
position: fixed;
|
||||
display: block ruby;
|
||||
padding: 10px;
|
||||
background: white;
|
||||
width: 100%;
|
||||
height: 100px;
|
||||
}
|
||||
|
||||
.navbar-fixed-top {
|
||||
top: 100px !important;
|
||||
}
|
||||
|
||||
.sidebar {
|
||||
top: 160px !important;
|
||||
}
|
||||
|
||||
.navbar {
|
||||
margin-bottom: 40px !important;
|
||||
}
|
||||
|
||||
.main {
|
||||
top: 90px;
|
||||
}
|
@ -1,56 +0,0 @@
|
||||
package org.gcube.data.access.fs;
|
||||
|
||||
import java.util.Calendar;
|
||||
|
||||
import javax.inject.Inject;
|
||||
|
||||
import org.gcube.common.storagehub.model.expressions.Expression;
|
||||
import org.gcube.common.storagehub.model.expressions.GenericSearchableItem;
|
||||
import org.gcube.common.storagehub.model.expressions.date.Before;
|
||||
import org.gcube.common.storagehub.model.expressions.logical.And;
|
||||
import org.gcube.common.storagehub.model.expressions.text.Contains;
|
||||
import org.gcube.data.access.storagehub.Constants;
|
||||
import org.gcube.data.access.storagehub.query.sql2.evaluators.Evaluators;
|
||||
import org.junit.Test;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
//@RunWith(WeldJunit4Runner.class)
|
||||
public class Expressions {
|
||||
/*
|
||||
private static Logger log = LoggerFactory.getLogger(Expression.class);
|
||||
|
||||
@Inject
|
||||
Evaluators evaluators;
|
||||
|
||||
|
||||
|
||||
public void test() {
|
||||
|
||||
evaluators.getEvaluators().forEach(s-> System.out.println(s.getType().toString()));
|
||||
|
||||
Expression<Boolean> cont1 = new Contains(GenericSearchableItem.get().title, "Data");
|
||||
Expression<Boolean> before = new Before(GenericSearchableItem.get().creationTime, Calendar.getInstance());
|
||||
Expression<Boolean> andExpr = new And(cont1, before);
|
||||
System.out.println(evaluators.evaluate(andExpr));
|
||||
|
||||
}
|
||||
|
||||
*/
|
||||
@Test
|
||||
public void test() {
|
||||
String entirePath = "sp2/comic/";
|
||||
/*String[] parentPathSplit = entirePath.split("/");
|
||||
System.out.println(parentPathSplit.length);
|
||||
for (String v: parentPathSplit)
|
||||
System.out.println(v);
|
||||
*/
|
||||
|
||||
String name = entirePath.replaceAll("([^/]*/)*(.*)", "$2");
|
||||
String parentPath = entirePath.replaceAll("(([^/]*/)*)(.*)", "$1");
|
||||
System.out.println(entirePath+" --"+name+"-- "+parentPath);
|
||||
}
|
||||
|
||||
|
||||
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue