Compare commits

..

7 Commits

55 changed files with 1282 additions and 2002 deletions

1
.gitignore vendored
View File

@ -1,4 +1,3 @@
target
.classpath
.project
/.DS_Store

View File

@ -1 +0,0 @@
/org.eclipse.jdt.core.prefs

View File

@ -1,6 +1,4 @@
eclipse.preferences.version=1
encoding//src/main/java=UTF-8
encoding//src/main/resources=UTF-8
encoding//src/test/java=UTF-8
encoding//src/test/resources=UTF-8
encoding/<project>=UTF-8

View File

@ -0,0 +1,2 @@
eclipse.preferences.version=1
org.eclipse.jdt.core.compiler.problem.forbiddenReference=warning

View File

@ -1,23 +1,12 @@
<?xml version="1.0" encoding="UTF-8"?><project-modules id="moduleCoreId" project-version="1.5.0">
<wb-module deploy-name="grsf-publisher-ws-1.12.1-SNAPSHOT">
<wb-module deploy-name="grsf-publisher-ws">
<wb-resource deploy-path="/" source-path="/target/m2e-wtp/web-resources"/>
<wb-resource deploy-path="/" source-path="/src/main/webapp" tag="defaultRootSource"/>
<wb-resource deploy-path="/WEB-INF/classes" source-path="/src/main/java"/>
<dependent-module archiveName="storagehub-model-1.1.0.jar" deploy-path="/WEB-INF/lib" handle="module:/resource/storagehub-model/storagehub-model">
<dependent-module archiveName="grsf-common-library-1.0.3-SNAPSHOT.jar" deploy-path="/WEB-INF/lib" handle="module:/resource/grsf-common-library/grsf-common-library">
<dependency-type>uses</dependency-type>
</dependent-module>
<dependent-module archiveName="common-gcube-calls-1.3.1-SNAPSHOT.jar" deploy-path="/WEB-INF/lib" handle="module:/resource/common-gcube-calls/common-gcube-calls">
<dependency-type>uses</dependency-type>
</dependent-module>
<property name="context-root" value="grsf-publisher-ws"/>
<property name="java-output-path" value="/grsf-publisher-ws/target/classes"/>
</wb-module>
</project-modules>

View File

@ -1,139 +0,0 @@
# Changelog
All notable changes to this project will be documented in this file.
This project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
## [v1.13.3]
- Adding support for "FAO SDG 14.4.1 Questionnaire" source [#23670]
## [v1.13.2]
- Migrated request to social-service-client [#23679]
- Added "Assessment Methods" as Group [#23409]
## [v1.13.1]
- Aligned code and wiki to the new requirements [#23167]
- Changed group assign strategy [#23211] [#23215]
- Tag are added also to legacy records [#23216]
- Fixed code which generated groups id from name [#23215]
## [v1.13.0]
### Added
- Added support to not include in records time dependant metadata [#21995]
### Changed
- Switched dependency management to gcube-bom 2.0.0
- Migrated code to storagehub [#21432]
## [v1.12.0] - 2020-06-19
### Added
**Features**
- [#19166] Added support for GRSF_pre VRE with the behaviour of GRSF Admin
## [v1.11.0] - 2020-03-30
### Changed
- [#18293] Traceability flag is assigned only to Fishery records
## [v1.10.0] - 2019-11-11
### Changed
- [#17965] Tags longer than 100 characters are truncated instead of skipped
## [v1.9.0] - 2019-05-27
### Changed
- [#13347] refers_to can be null while publishing legacy records
- [#12421] Removed the non-ascii clean from extra fields
- [#12421] Properly supporting UTF-8 characters
- [#16395] Title is updated according to Stock/Fishery Name
## [v1.8.0] - 2019-02-26
### Changed
- [#12861] The sources in GRSF VRE are calculated using 'database_sources' field
## [v1.7.0] - 2018-10-10
### Changed
- [#12510] Fixed pom to exclude libraries already provided by the container
## [v1.6.0] - 2018-07-18
### Changed
- [#11464] Added biomass timeseries support to stock data
- [#11749] Added 'With Similarities'-'No Similarities' tag to GRSF Records
- [#11748] Added Tag for 'Fishing Gears' and 'Flag State' fields
- [#11766] Added 'Connected'-'Not Connected' tag to GRSF Records
- [#11767] Added group for SDG flag
- [#11811] Added citation field
- [#11832] Added sub-groups support for available time series related to GRSF Type "Assessment Unit"
- [#11967] Added Biomass group
- [#11968] Changed 'State and trend of Marine Resource' to 'State and Trend'
- [#11969] Changed 'Scientific advice' to 'Scientific Advice'
## [v1.5.0] - 2017-01-10
### Changed
- Model enhancements
## [v1.4.0] - 2017-11-02
### Changed
- Some fixes and improvements: added a common library between service and management widget
## [v1.3.0] - 2017-08-01
### Changed
- Model upgrade
## [v1.2.0] - 2017-07-01
### Changed
- [#8719] Model changed
## [v1.1.2] - 2017-05-15
- [#8719] Updates for ticket
- Minor fixes
## [v1.1.1] - 2017-04-02
- Minor fixes
## [v1.1.0] - 2017-02-28
- Model update
## [v1.0.1] - 2017-02-10
- Minor fixes
## [v1.0.0] - 2016-12-10
- First Release

View File

@ -1,312 +0,0 @@
# European Union Public Licence V. 1.1
EUPL © the European Community 2007
This European Union Public Licence (the “EUPL”) applies to the Work or Software
(as defined below) which is provided under the terms of this Licence. Any use of
the Work, other than as authorised under this Licence is prohibited (to the
extent such use is covered by a right of the copyright holder of the Work).
The Original Work is provided under the terms of this Licence when the Licensor
(as defined below) has placed the following notice immediately following the
copyright notice for the Original Work:
Licensed under the EUPL V.1.1
or has expressed by any other mean his willingness to license under the EUPL.
## 1. Definitions
In this Licence, the following terms have the following meaning:
- The Licence: this Licence.
- The Original Work or the Software: the software distributed and/or
communicated by the Licensor under this Licence, available as Source Code and
also as Executable Code as the case may be.
- Derivative Works: the works or software that could be created by the Licensee,
based upon the Original Work or modifications thereof. This Licence does not
define the extent of modification or dependence on the Original Work required
in order to classify a work as a Derivative Work; this extent is determined by
copyright law applicable in the country mentioned in Article 15.
- The Work: the Original Work and/or its Derivative Works.
- The Source Code: the human-readable form of the Work which is the most
convenient for people to study and modify.
- The Executable Code: any code which has generally been compiled and which is
meant to be interpreted by a computer as a program.
- The Licensor: the natural or legal person that distributes and/or communicates
the Work under the Licence.
- Contributor(s): any natural or legal person who modifies the Work under the
Licence, or otherwise contributes to the creation of a Derivative Work.
- The Licensee or “You”: any natural or legal person who makes any usage of the
Software under the terms of the Licence.
- Distribution and/or Communication: any act of selling, giving, lending,
renting, distributing, communicating, transmitting, or otherwise making
available, on-line or off-line, copies of the Work or providing access to its
essential functionalities at the disposal of any other natural or legal
person.
## 2. Scope of the rights granted by the Licence
The Licensor hereby grants You a world-wide, royalty-free, non-exclusive,
sub-licensable licence to do the following, for the duration of copyright vested
in the Original Work:
- use the Work in any circumstance and for all usage, reproduce the Work, modify
- the Original Work, and make Derivative Works based upon the Work, communicate
- to the public, including the right to make available or display the Work or
- copies thereof to the public and perform publicly, as the case may be, the
- Work, distribute the Work or copies thereof, lend and rent the Work or copies
- thereof, sub-license rights in the Work or copies thereof.
Those rights can be exercised on any media, supports and formats, whether now
known or later invented, as far as the applicable law permits so.
In the countries where moral rights apply, the Licensor waives his right to
exercise his moral right to the extent allowed by law in order to make effective
the licence of the economic rights here above listed.
The Licensor grants to the Licensee royalty-free, non exclusive usage rights to
any patents held by the Licensor, to the extent necessary to make use of the
rights granted on the Work under this Licence.
## 3. Communication of the Source Code
The Licensor may provide the Work either in its Source Code form, or as
Executable Code. If the Work is provided as Executable Code, the Licensor
provides in addition a machine-readable copy of the Source Code of the Work
along with each copy of the Work that the Licensor distributes or indicates, in
a notice following the copyright notice attached to the Work, a repository where
the Source Code is easily and freely accessible for as long as the Licensor
continues to distribute and/or communicate the Work.
## 4. Limitations on copyright
Nothing in this Licence is intended to deprive the Licensee of the benefits from
any exception or limitation to the exclusive rights of the rights owners in the
Original Work or Software, of the exhaustion of those rights or of other
applicable limitations thereto.
## 5. Obligations of the Licensee
The grant of the rights mentioned above is subject to some restrictions and
obligations imposed on the Licensee. Those obligations are the following:
Attribution right: the Licensee shall keep intact all copyright, patent or
trademarks notices and all notices that refer to the Licence and to the
disclaimer of warranties. The Licensee must include a copy of such notices and a
copy of the Licence with every copy of the Work he/she distributes and/or
communicates. The Licensee must cause any Derivative Work to carry prominent
notices stating that the Work has been modified and the date of modification.
Copyleft clause: If the Licensee distributes and/or communicates copies of the
Original Works or Derivative Works based upon the Original Work, this
Distribution and/or Communication will be done under the terms of this Licence
or of a later version of this Licence unless the Original Work is expressly
distributed only under this version of the Licence. The Licensee (becoming
Licensor) cannot offer or impose any additional terms or conditions on the Work
or Derivative Work that alter or restrict the terms of the Licence.
Compatibility clause: If the Licensee Distributes and/or Communicates Derivative
Works or copies thereof based upon both the Original Work and another work
licensed under a Compatible Licence, this Distribution and/or Communication can
be done under the terms of this Compatible Licence. For the sake of this clause,
“Compatible Licence” refers to the licences listed in the appendix attached to
this Licence. Should the Licensees obligations under the Compatible Licence
conflict with his/her obligations under this Licence, the obligations of the
Compatible Licence shall prevail.
Provision of Source Code: When distributing and/or communicating copies of the
Work, the Licensee will provide a machine-readable copy of the Source Code or
indicate a repository where this Source will be easily and freely available for
as long as the Licensee continues to distribute and/or communicate the Work.
Legal Protection: This Licence does not grant permission to use the trade names,
trademarks, service marks, or names of the Licensor, except as required for
reasonable and customary use in describing the origin of the Work and
reproducing the content of the copyright notice.
## 6. Chain of Authorship
The original Licensor warrants that the copyright in the Original Work granted
hereunder is owned by him/her or licensed to him/her and that he/she has the
power and authority to grant the Licence.
Each Contributor warrants that the copyright in the modifications he/she brings
to the Work are owned by him/her or licensed to him/her and that he/she has the
power and authority to grant the Licence.
Each time You accept the Licence, the original Licensor and subsequent
Contributors grant You a licence to their contributions to the Work, under the
terms of this Licence.
## 7. Disclaimer of Warranty
The Work is a work in progress, which is continuously improved by numerous
contributors. It is not a finished work and may therefore contain defects or
“bugs” inherent to this type of software development.
For the above reason, the Work is provided under the Licence on an “as is” basis
and without warranties of any kind concerning the Work, including without
limitation merchantability, fitness for a particular purpose, absence of defects
or errors, accuracy, non-infringement of intellectual property rights other than
copyright as stated in Article 6 of this Licence.
This disclaimer of warranty is an essential part of the Licence and a condition
for the grant of any rights to the Work.
## 8. Disclaimer of Liability
Except in the cases of wilful misconduct or damages directly caused to natural
persons, the Licensor will in no event be liable for any direct or indirect,
material or moral, damages of any kind, arising out of the Licence or of the use
of the Work, including without limitation, damages for loss of goodwill, work
stoppage, computer failure or malfunction, loss of data or any commercial
damage, even if the Licensor has been advised of the possibility of such
damage. However, the Licensor will be liable under statutory product liability
laws as far such laws apply to the Work.
## 9. Additional agreements
While distributing the Original Work or Derivative Works, You may choose to
conclude an additional agreement to offer, and charge a fee for, acceptance of
support, warranty, indemnity, or other liability obligations and/or services
consistent with this Licence. However, in accepting such obligations, You may
act only on your own behalf and on your sole responsibility, not on behalf of
the original Licensor or any other Contributor, and only if You agree to
indemnify, defend, and hold each Contributor harmless for any liability incurred
by, or claims asserted against such Contributor by the fact You have accepted
any such warranty or additional liability.
## 10. Acceptance of the Licence
The provisions of this Licence can be accepted by clicking on an icon “I agree”
placed under the bottom of a window displaying the text of this Licence or by
affirming consent in any other similar way, in accordance with the rules of
applicable law. Clicking on that icon indicates your clear and irrevocable
acceptance of this Licence and all of its terms and conditions.
Similarly, you irrevocably accept this Licence and all of its terms and
conditions by exercising any rights granted to You by Article 2 of this Licence,
such as the use of the Work, the creation by You of a Derivative Work or the
Distribution and/or Communication by You of the Work or copies thereof.
## 11. Information to the public
In case of any Distribution and/or Communication of the Work by means of
electronic communication by You (for example, by offering to download the Work
from a remote location) the distribution channel or media (for example, a
website) must at least provide to the public the information requested by the
applicable law regarding the Licensor, the Licence and the way it may be
accessible, concluded, stored and reproduced by the Licensee.
## 12. Termination of the Licence
The Licence and the rights granted hereunder will terminate automatically upon
any breach by the Licensee of the terms of the Licence.
Such a termination will not terminate the licences of any person who has
received the Work from the Licensee under the Licence, provided such persons
remain in full compliance with the Licence.
## 13. Miscellaneous
Without prejudice of Article 9 above, the Licence represents the complete
agreement between the Parties as to the Work licensed hereunder.
If any provision of the Licence is invalid or unenforceable under applicable
law, this will not affect the validity or enforceability of the Licence as a
whole. Such provision will be construed and/or reformed so as necessary to make
it valid and enforceable.
The European Commission may publish other linguistic versions and/or new
versions of this Licence, so far this is required and reasonable, without
reducing the scope of the rights granted by the Licence. New versions of the
Licence will be published with a unique version number.
All linguistic versions of this Licence, approved by the European Commission,
have identical value. Parties can take advantage of the linguistic version of
their choice.
## 14. Jurisdiction
Any litigation resulting from the interpretation of this License, arising
between the European Commission, as a Licensor, and any Licensee, will be
subject to the jurisdiction of the Court of Justice of the European Communities,
as laid down in article 238 of the Treaty establishing the European Community.
Any litigation arising between Parties, other than the European Commission, and
resulting from the interpretation of this License, will be subject to the
exclusive jurisdiction of the competent court where the Licensor resides or
conducts its primary business.
## 15. Applicable Law
This Licence shall be governed by the law of the European Union country where
the Licensor resides or has his registered office.
This licence shall be governed by the Belgian law if:
- a litigation arises between the European Commission, as a Licensor, and any
- Licensee; the Licensor, other than the European Commission, has no residence
- or registered office inside a European Union country.
## Appendix
“Compatible Licences” according to article 5 EUPL are:
- GNU General Public License (GNU GPL) v. 2
- Open Software License (OSL) v. 2.1, v. 3.0
- Common Public License v. 1.0
- Eclipse Public License v. 1.0
- Cecill v. 2.0

View File

@ -1,69 +0,0 @@
# GRSF Publisher Service
This service allows any client to publish on GRSF Catalogue.
## Built With
* [OpenJDK](https://openjdk.java.net/) - The JDK used
* [Maven](https://maven.apache.org/) - Dependency Management
## Documentation
[GRSF Publisher Service](https://wiki.gcube-system.org/gcube/GCube_Data_Catalogue_for_GRSF)
## Change log
See [Releases](https://code-repo.d4science.org/gCubeSystem/grsf-publisher-ws/releases).
## Authors
* **Luca Frosini** ([ORCID](https://orcid.org/0000-0003-3183-2291)) - [ISTI-CNR Infrascience Group](http://nemis.isti.cnr.it/groups/infrascience)
## How to Cite this Software
Tell people how to cite this software.
* Cite an associated paper?
* Use a specific BibTeX entry for the software?
@Manual{,
title = {GRSF Publisher Service},
author = {{Perciante, Costantino}, {Frosini, Luca}},
organization = {ISTI - CNR},
address = {Pisa, Italy},
year = 2019,
url = {http://www.gcube-system.org/}
}
## License
This project is licensed under the EUPL V.1.1 License - see the [LICENSE.md](LICENSE.md) file for details.
## About the gCube Framework
This software is part of the [gCubeFramework](https://www.gcube-system.org/ "gCubeFramework"): an
open-source software toolkit used for building and operating Hybrid Data
Infrastructures enabling the dynamic deployment of Virtual Research Environments
by favouring the realisation of reuse oriented policies.
The projects leading to this software have received funding from a series of European Union programmes including:
- the Sixth Framework Programme for Research and Technological Development
- DILIGENT (grant no. 004260).
- the Seventh Framework Programme for research, technological development and demonstration
- D4Science (grant no. 212488);
- D4Science-II (grant no.239019);
- ENVRI (grant no. 283465);
- iMarine(grant no. 283644);
- EUBrazilOpenBio (grant no. 288754).
- the H2020 research and innovation programme
- SoBigData (grant no. 654024);
- PARTHENOS (grant no. 654119);
- EGIEngage (grant no. 654142);
- ENVRIplus (grant no. 654182);
- BlueBRIDGE (grant no. 675680);
- PerformFish (grant no. 727610);
- AGINFRAplus (grant no. 731001);
- DESIRA (grant no. 818194);
- ARIADNEplus (grant no. 823914);
- RISIS2 (grant no. 824091);

1
distro/LICENSE Normal file
View File

@ -0,0 +1 @@
${gcube.license}

62
distro/README Normal file
View File

@ -0,0 +1,62 @@
The gCube System - ${name}
--------------------------------------------------
${description}
${gcube.description}
${gcube.funding}
Version
--------------------------------------------------
${version} (${buildDate})
Please see the file named "changelog.xml" in this directory for the release notes.
Authors
--------------------------------------------------
* Costantino Perciante (costantino.perciante-AT-isti.cnr.it), Istituto di Scienza e Tecnologie dell'Informazione "A. Faedo" - CNR, Pisa (Italy).
Maintainers
-----------
* Costantino Perciante (costantino.perciante-AT-isti.cnr.it), Istituto di Scienza e Tecnologie dell'Informazione "A. Faedo" - CNR, Pisa (Italy).
Download information
--------------------------------------------------
Source code is available from SVN:
${scm.url}
Binaries can be downloaded from the gCube website:
${gcube.website}
Installation
--------------------------------------------------
Installation documentation is available on-line in the gCube Wiki:
${gcube.wikiRoot}
Documentation
--------------------------------------------------
Documentation is available on-line in the gCube Wiki:
${gcube.wikiRoot}
Support
--------------------------------------------------
Bugs and support requests can be reported in the gCube issue tracking tool:
${gcube.issueTracking}
Licensing
--------------------------------------------------
This software is licensed under the terms you may find in the file named "LICENSE" in this directory.

View File

@ -1,10 +1,7 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE xml>
<ReleaseNotes>
<Changeset component="org.gcube.data-catalogue.grsf-publisher-ws.1-12-0" date="${buildDate}">
<Change>Added support for GRSF_pre VRE with the behaviour of GRSF Admin #19166</Change>
</Changeset>
<Changeset component="org.gcube.data-catalogue.grsf-publisher-ws.1-11-0" date="2020-03-30">
<Changeset component="org.gcube.data-catalogue.grsf-publisher-ws.1-11-0" date="${buildDate}">
<Change>Traceability flag is assigned only to Fishery records refs #18293</Change>
</Changeset>
<Changeset component="org.gcube.data-catalogue.grsf-publisher-ws.1-10-0" date="2019-11-11">

View File

@ -13,10 +13,10 @@
<outputDirectory>/</outputDirectory>
<useDefaultExcludes>true</useDefaultExcludes>
<includes>
<include>LICENSE.md</include>
<include>README.md</include>
<include>CHANGELOG.md</include>
<include>gcube-app.xml</include>
<include>README</include>
<include>LICENSE</include>
<include>changelog.xml</include>
<include>profile.xml</include>
</includes>
<fileMode>755</fileMode>
<filtered>true</filtered>

25
distro/profile.xml Normal file
View File

@ -0,0 +1,25 @@
<?xml version="1.0" encoding="UTF-8"?>
<Resource xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<ID></ID>
<Type>Library</Type>
<Profile>
<Description>${description}</Description>
<Class>DataCatalogue</Class>
<Name>${artifactId}</Name>
<Version>1.0.0</Version>
<Packages>
<Software>
<Name>${artifactId}</Name>
<Version>${version}</Version>
<MavenCoordinates>
<groupId>${groupId}</groupId>
<artifactId>${artifactId}</artifactId>
<version>${version}</version>
</MavenCoordinates>
<Files>
<File>${build.finalName}.war</File>
</Files>
</Software>
</Packages>
</Profile>
</Resource>

View File

@ -1,10 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE xml>
<application mode='online'>
<name>GRSFPublisher</name>
<group>Data-Catalogue</group>
<version>${project.version}</version>
<description>${project.description}</description>
<local-persistence location='target' />
</application>

127
pom.xml
View File

@ -11,7 +11,7 @@
<groupId>org.gcube.data-catalogue</groupId>
<artifactId>grsf-publisher-ws</artifactId>
<version>1.13.3</version>
<version>1.12.0</version>
<packaging>war</packaging>
<name>grsf-publisher-ws</name>
<description>Utility library to publish GRSF products on GRSF catalogue.</description>
@ -19,14 +19,12 @@
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<version.jersey>2.22.4</version.jersey>
<version.jackson>2.8.11</version.jackson>
<version.jackson>2.6.0</version.jackson>
<distroDirectory>${project.basedir}/distro</distroDirectory>
<webappDirectory>${project.build.directory}/${project.build.finalName}</webappDirectory>
<distroDirectory>distro</distroDirectory>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<jsoup.version>1.10.1</jsoup.version>
<serviceClass>DataPublishing</serviceClass>
<maven.compiler.release>8</maven.compiler.release>
</properties>
<scm>
@ -36,11 +34,18 @@
</scm>
<dependencyManagement>
<dependencies>
<dependencies>
<dependency>
<groupId>org.gcube.distribution</groupId>
<artifactId>gcube-bom</artifactId>
<version>1.3.1</version>
<type>pom</type>
<scope>import</scope>
</dependency>
<dependency>
<groupId>org.gcube.distribution</groupId>
<artifactId>gcube-smartgears-bom</artifactId>
<version>2.2.0</version>
<version>1.0.2</version>
<type>pom</type>
<scope>import</scope>
</dependency>
@ -51,16 +56,7 @@
<dependency>
<groupId>org.gcube.data-catalogue</groupId>
<artifactId>grsf-common-library</artifactId>
<version>[2.0.0, 3.0.0-SNAPSHOT)</version>
</dependency>
<dependency>
<groupId>org.gcube.common</groupId>
<artifactId>storagehub-client-library</artifactId>
</dependency>
<dependency>
<groupId>org.gcube.social-networking</groupId>
<artifactId>social-service-client</artifactId>
<version>[1.0.0, 2.0.0-SNAPSHOT)</version>
<version>[1-0-0-SNAPSHOT, 2.0.0-SNAPSHOT)</version>
</dependency>
<!-- jsoup HTML parser library @ http://jsoup.org/ -->
<dependency>
@ -110,7 +106,12 @@
<artifactId>authorization-client</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.gcube.common</groupId>
<artifactId>home-library-jcr</artifactId>
<version>[2.0.0-SNAPSHOT, 3.0.0-SNAPSHOT)</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.gcube.common</groupId>
<artifactId>common-authorization</artifactId>
@ -119,26 +120,32 @@
<dependency>
<groupId>org.glassfish.jersey.containers</groupId>
<artifactId>jersey-container-servlet-core</artifactId>
<version>${version.jersey}</version>
</dependency>
<dependency>
<groupId>org.glassfish.jersey.media</groupId>
<artifactId>jersey-media-json-jackson</artifactId>
<version>${version.jersey}</version>
</dependency>
<dependency>
<groupId>org.glassfish.jersey.media</groupId>
<artifactId>jersey-media-json-processing</artifactId>
<version>${version.jersey}</version>
</dependency>
<dependency>
<groupId>org.glassfish.jersey.media</groupId>
<artifactId>jersey-media-multipart</artifactId>
<version>${version.jersey}</version>
</dependency>
<dependency>
<groupId>org.glassfish.jersey.media</groupId>
<artifactId>jersey-media-sse</artifactId>
<version>${version.jersey}</version>
</dependency>
<dependency>
<groupId>org.glassfish.jersey.ext</groupId>
<artifactId>jersey-bean-validation</artifactId>
<version>${version.jersey}</version>
</dependency>
<dependency>
<groupId>javax.servlet</groupId>
@ -156,37 +163,99 @@
<artifactId>jackson-jaxrs-json-provider</artifactId>
<version>${version.jackson}</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-annotations</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-core</artifactId>
<scope>provided</scope>
</dependency>
<!-- SmartGears -->
<dependency>
<groupId>org.gcube.core</groupId>
<artifactId>common-smartgears</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.gcube.common</groupId>
<artifactId>home-library</artifactId>
<version>[2.0.0-SNAPSHOT, 3.0.0-SNAPSHOT)</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.glassfish.jersey.test-framework.providers</groupId>
<artifactId>jersey-test-framework-provider-jetty</artifactId>
<version>2.23.2</version>
<scope>test</scope>
</dependency>
<!-- Added to support Java 11 JDK -->
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
<scope>provided</scope>
</dependency>
<!-- END Added to support Java 11 JDK -->
</dependencies>
<build>
<finalName>${name}</finalName>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-assembly-plugin</artifactId>
<artifactId>maven-war-plugin</artifactId>
<version>2.1.1</version>
<executions>
<execution>
<id>make-servicearchive</id>
<phase>package</phase>
<phase>compile</phase>
</execution>
</executions>
<configuration>
<webappDirectory>${webappDirectory}</webappDirectory>
</configuration>
</plugin>
<!-- SA Plugin -->
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-assembly-plugin</artifactId>
<configuration>
<descriptors>
<descriptor>${distroDirectory}/descriptor.xml</descriptor>
</descriptors>
</configuration>
<executions>
<execution>
<id>servicearchive</id>
<phase>install</phase>
<goals>
<goal>single</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-resources-plugin</artifactId>
<version>2.5</version>
<executions>
<execution>
<id>copy-profile</id>
<phase>install</phase>
<goals>
<goal>copy-resources</goal>
</goals>
<configuration>
<outputDirectory>target</outputDirectory>
<resources>
<resource>
<directory>${distroDirectory}</directory>
<filtering>true</filtering>
<includes>
<include>profile.xml</include>
</includes>
</resource>
</resources>
</configuration>
</execution>
</executions>
</plugin>

View File

@ -1,8 +1,8 @@
package org.gcube.data_catalogue.grsf_publish_ws.json.input.others;
package org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED;
import javax.validation.constraints.Size;
import org.gcube.datacatalogue.common.Constants;
import org.gcube.datacatalogue.common.AAA_PORTED.Constants;
import org.json.simple.JSONObject;
import com.fasterxml.jackson.annotation.JsonProperty;

View File

@ -1,4 +1,4 @@
package org.gcube.data_catalogue.grsf_publish_ws.json.input.record;
package org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED;
import java.util.ArrayList;
import java.util.Arrays;
@ -8,7 +8,7 @@ import java.util.Map;
import org.gcube.data_catalogue.grsf_publish_ws.utils.HelperMethods;
import org.gcube.datacatalogue.ckanutillibrary.shared.ResourceBean;
import org.gcube.datacatalogue.common.Constants;
import org.gcube.datacatalogue.common.AAA_PORTED.Constants;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -202,18 +202,12 @@ public abstract class Base {
@Override
public String toString() {
return "Base ["
+ "catalogId=" + catalogId
+ ", description=" + description
+ ", license=" + license
+ ", author=" + author
+ ", authorContact=" + authorContact
+ ", version=" + version
+ ", maintainer=" + maintainer
+ ", maintainerContact=" + maintainerContact
+ ", extrasFields=" + extrasFields
+ ", extrasResources=" + extrasResources
+ "]";
return "Base [catalogId=" + catalogId + ", description=" + description
+ ", license=" + license + ", author=" + author
+ ", authorContact=" + authorContact + ", version=" + version
+ ", maintainer=" + maintainer + ", maintainerContact="
+ maintainerContact + ", extrasFields=" + extrasFields
+ ", extrasResources=" + extrasResources + "]";
}
}

View File

@ -1,4 +1,4 @@
package org.gcube.data_catalogue.grsf_publish_ws.custom_annotations;
package org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;

View File

@ -1,4 +1,4 @@
package org.gcube.data_catalogue.grsf_publish_ws.json.input.record;
package org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED;
import java.util.List;
@ -6,19 +6,9 @@ import javax.validation.Valid;
import javax.validation.constraints.NotNull;
import javax.validation.constraints.Size;
import org.gcube.data_catalogue.grsf_publish_ws.custom_annotations.CkanResource;
import org.gcube.data_catalogue.grsf_publish_ws.custom_annotations.CustomField;
import org.gcube.data_catalogue.grsf_publish_ws.custom_annotations.Group;
import org.gcube.data_catalogue.grsf_publish_ws.custom_annotations.Tag;
import org.gcube.data_catalogue.grsf_publish_ws.custom_annotations.TimeSeries;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.others.AnnotationBean;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.others.RefersToBean;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.others.Resource;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.others.SimilarRecordBean;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.others.TimeSeriesBean;
import org.gcube.datacatalogue.common.Constants;
import org.gcube.datacatalogue.common.enums.Sources;
import org.gcube.datacatalogue.common.enums.Status;
import org.gcube.datacatalogue.common.AAA_PORTED.Constants;
import org.gcube.datacatalogue.common.AAA_PORTED.Sources;
import org.gcube.datacatalogue.common.AAA_PORTED.Status;
import com.fasterxml.jackson.annotation.JsonProperty;
@ -28,14 +18,13 @@ import com.fasterxml.jackson.annotation.JsonProperty;
* @author Luca Frosini (ISTI - CNR)
*
*/
public abstract class Common extends Base {
public abstract class Common extends Base{
@JsonProperty(Constants.DATA_OWNER_JSON_KEY)
@CustomField(key=Constants.DATA_OWNER_CUSTOM_KEY)
private List<String> dataOwner;
@JsonProperty(Constants.DATABASE_SOURCES_JSON_KEY)
@CustomField(key=Constants.DATABASE_SOURCES_CUSTOM_KEY)
@CkanResource
@Valid
private List<Resource<Sources>> databaseSources;
@ -55,7 +44,7 @@ public abstract class Common extends Base {
@JsonProperty(Constants.SDG_FLAG_JSON_KEY)
@CustomField(key=Constants.SDG_FLAG_CUSTOM_KEY)
@Group(condition="true", groupNameOverValue=Constants.SDG_FLAG_GROUP_NAME, prependSourceToGroupName=false) // record is added to group grsf-sdg-flag if sdg Flag is true
@Group(condition="true", groupNameOverValue=Constants.SDG_FLAG_GROUP_NAME) // record is added to group grsf-sdg-flag if sdg Flag is true
private Boolean sdgFlag;
@JsonProperty(Constants.STATUS_OF_THE_GRSF_RECORD_JSON_KEY)
@ -89,6 +78,10 @@ public abstract class Common extends Base {
@CustomField(key=Constants.SIMILAR_GRSF_RECORDS_CUSTOM_KEY)
private List<SimilarRecordBean> similarGRSFRecords;
@JsonProperty(Constants.SIMILAR_SOURCE_RECORDS_JSON_KEY)
@CustomField(key=Constants.SIMILAR_SOURCE_RECORDS_CUSTOM_KEY)
private List<SimilarRecordBean> similarSourceRecords;
// automatically set
@CustomField(key=Constants.DOMAIN_CUSTOM_KEY)
private String domain;
@ -110,11 +103,7 @@ public abstract class Common extends Base {
@JsonProperty(Constants.CONNECTED_JSON_KEY)
@CustomField(key=Constants.CONNECTED_CUSTOM_KEY)
private List<String> connectedBeans;
@JsonProperty(Constants.CONNECTIONS_INDICATOR)
@Tag
private String connectionsIndicator;
@JsonProperty(Constants.ANNOTATION_PUBLISHER_JSON_KEY)
@CustomField(key=Constants.ANNOTATION_PUBLISHER_CUSTOM_KEY)
private List<AnnotationBean> annotations;
@ -123,6 +112,10 @@ public abstract class Common extends Base {
@Tag
private String similaritiesIndicator;
@JsonProperty(Constants.CONNECTIONS_INDICATOR)
@Tag
private String connectionsIndicator;
@JsonProperty(Constants.CITATION_JSON_KEY)
@CustomField(key=Constants.CITATION_CUSTOM_KEY)
private String citation;
@ -177,6 +170,7 @@ public abstract class Common extends Base {
this.landings = landings;
this.species = species;
this.similarGRSFRecords = similarGRSFRecords;
this.similarSourceRecords = similarSourceRecords;
this.domain = domain;
this.uuid = uuid;
this.managementBodyAuthorities = managementBodyAuthorities;
@ -224,7 +218,6 @@ public abstract class Common extends Base {
public void setRefersTo(List<RefersToBean> refersTo) {
this.refersTo = refersTo;
}
public List<Resource<Sources>> getDatabaseSources() {
return databaseSources;
}
@ -297,6 +290,14 @@ public abstract class Common extends Base {
this.similarGRSFRecords = similarGRSFRecords;
}
public List<SimilarRecordBean> getSimilarSourceRecords() {
return similarSourceRecords;
}
public void setSimilarSourceRecords(List<SimilarRecordBean> similarSourceRecords) {
this.similarSourceRecords = similarSourceRecords;
}
public String getSpatial() {
return spatial;
}
@ -356,28 +357,17 @@ public abstract class Common extends Base {
@Override
public String toString() {
return super.toString() + " - Common ["
+ " dataOwner=" + dataOwner
+ ", databaseSources=" + databaseSources
+ ", sourceOfInformation=" + sourceOfInformation
+ ", refersTo=" + refersTo
+ ", shortName=" + shortName
+ ", sdgFlag=" + sdgFlag
+ ", status=" + status
+ ", systemType=" + systemType
+ ", catches=" + catches
+ ", landings=" + landings
+ ", species=" + species
return "Common [dataOwner=" + dataOwner + ", databaseSources="
+ databaseSources + ", sourceOfInformation="
+ sourceOfInformation + ", refersTo=" + refersTo
+ ", shortName=" + shortName + ", sdgFlag=" + sdgFlag + ", status="
+ status + ", systemType=" + systemType + ", catches="
+ catches + ", landings=" + landings + ", species=" + species
+ ", similarGRSFRecords=" + similarGRSFRecords
+ ", domain=" + domain
+ ", uuid=" + uuid
+ ", similarSourceRecords=" + similarSourceRecords
+ ", domain=" + domain + ", uuid=" + uuid
+ ", managementBodyAuthorities=" + managementBodyAuthorities
+ ", spatial=" + spatial
+ ", connectedBeans=" + connectedBeans
+ ", annotations=" + annotations
+ ", similaritiesIndicator=" + similaritiesIndicator
+ ", connectionsIndicator=" + connectionsIndicator
+ ", citation=" + citation
+ "]";
+ ", spatial=" + spatial + ", connectedBeans=" + connectedBeans
+ ", annotations=" + annotations + "]";
}
}

View File

@ -1,4 +1,4 @@
package org.gcube.data_catalogue.grsf_publish_ws.custom_annotations;
package org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;

View File

@ -1,16 +1,13 @@
package org.gcube.data_catalogue.grsf_publish_ws.json.input.record;
package org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED;
import java.util.List;
import javax.validation.constraints.NotNull;
import javax.validation.constraints.Size;
import org.gcube.data_catalogue.grsf_publish_ws.custom_annotations.CustomField;
import org.gcube.data_catalogue.grsf_publish_ws.custom_annotations.Group;
import org.gcube.data_catalogue.grsf_publish_ws.custom_annotations.Tag;
import org.gcube.datacatalogue.common.Constants;
import org.gcube.datacatalogue.common.enums.Fishery_Type;
import org.gcube.datacatalogue.common.enums.Production_System_Type;
import org.gcube.datacatalogue.common.AAA_PORTED.Constants;
import org.gcube.datacatalogue.common.AAA_PORTED.Fishery_Type;
import org.gcube.datacatalogue.common.AAA_PORTED.Production_System_Type;
import com.fasterxml.jackson.annotation.JsonProperty;
@ -21,35 +18,38 @@ import com.fasterxml.jackson.annotation.JsonProperty;
*/
public class FisheryRecord extends Common {
@JsonProperty(Constants.TRACEABILITY_FLAG_JSON_KEY)
@CustomField(key=Constants.TRACEABILITY_FLAG_CUSTOM_KEY)
@Group(condition="true", groupNameOverValue=Constants.TRACEABILITY_FLAG_GROUP_NAME) // record is added to group grsf-traceability-flag if Traceability Flag is true
private Boolean traceabilityFlag;
@JsonProperty(Constants.FISHERY_NAME_JSON_KEY)
@CustomField(key=Constants.FISHERY_NAME_CUSTOM_KEY)
@NotNull(message="fishery_name cannot be null")
@Size(min=1, message="fishery_name cannot be empty")
@CustomField(key=Constants.FISHERY_NAME_CUSTOM_KEY)
private String fisheryName;
@JsonProperty(Constants.GRSF_SEMANTIC_IDENTIFIER_JSON_KEY)
@CustomField(key=Constants.GRSF_SEMANTIC_IDENTIFIER_CUSTOM_KEY)
private String fisheryId;
@JsonProperty(Constants.TRACEABILITY_FLAG_JSON_KEY)
@CustomField(key=Constants.TRACEABILITY_FLAG_CUSTOM_KEY)
@Group(condition="true", groupNameOverValue=Constants.TRACEABILITY_FLAG_GROUP_NAME, prependSourceToGroupName=false) // record is added to group grsf-traceability-flag if Traceability Flag is true
private Boolean traceabilityFlag;
@JsonProperty(Constants.FISHING_AREA_JSON_KEY)
@CustomField(key=Constants.FISHING_AREA_CUSTOM_KEY)
@Tag
private List<String> fishingArea;
@JsonProperty(Constants.JURISDICTION_AREA_JSON_KEY)
@CustomField(key=Constants.JURISDICTION_AREA_CUSTOM_KEY)
@Tag
private List<String> jurisdictionArea;
@JsonProperty(Constants.RESOURCES_EXPLOITED_JSON_KEY)
@CustomField(key=Constants.RESOURCES_EXPLOITED_CUSTOM_KEY)
private List<String> resourcesExploited;
@JsonProperty(Constants.JURISDICTION_AREA_JSON_KEY)
@CustomField(key=Constants.JURISDICTION_AREA_CUSTOM_KEY)
private List<String> jurisdictionArea;
@JsonProperty(Constants.PRODUCTION_SYSTEM_TYPE_JSON_KEY)
@CustomField(key=Constants.PRODUCTION_SYSTEM_TYPE_CUSTOM_KEY)
private List<Production_System_Type> productionSystemType;
@JsonProperty(Constants.FLAG_STATE_JSON_KEY)
@CustomField(key=Constants.FLAG_STATE_CUSTOM_KEY)
@Tag
@ -62,6 +62,7 @@ public class FisheryRecord extends Common {
@JsonProperty(Constants.GRSF_TYPE_JSON_KEY)
@CustomField(key=Constants.GRSF_TYPE_CUSTOM_KEY)
@Group
@Tag
private Fishery_Type type;
@ -93,6 +94,7 @@ public class FisheryRecord extends Common {
this.fishingArea = fishingArea;
this.resourcesExploited = resourcesExploited;
this.jurisdictionArea = jurisdictionArea;
this.productionSystemType = productionSystemType;
this.flagState = flagState;
this.fishingGear = fishingGear;
this.type = type;
@ -160,6 +162,15 @@ public class FisheryRecord extends Common {
this.resourcesExploited = resourcesExploited;
}
public List<Production_System_Type> getProductionSystemType() {
return productionSystemType;
}
public void setProductionSystemType(
List<Production_System_Type> productionSystemType) {
this.productionSystemType = productionSystemType;
}
public List<String> getFlagState() {
return flagState;
}
@ -183,6 +194,7 @@ public class FisheryRecord extends Common {
+ ", traceabilityFlag=" + traceabilityFlag
+ ", resourcesExploited=" + resourcesExploited
+ ", jurisdictionArea=" + jurisdictionArea
+ ", productionSystemType=" + productionSystemType
+ ", flagState=" + flagState + ", fishingGear=" + fishingGear
+ ", type=" + type + "]";
}

View File

@ -1,4 +1,4 @@
package org.gcube.data_catalogue.grsf_publish_ws.custom_annotations;
package org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
@ -21,8 +21,8 @@ public @interface Group {
String condition() default "";
/**
* If this value is set, it is used as name of the group in place of the filed value.
* (apart the source, i.e. "grsf", "ram", "firms", "fishsource" that will be prepended depending on prependSourceToGroupName)
* If this value is set, it is the name of the group (apart the source, i.e. "grsf", "ram", "firms", "fishsource" that will be prepended) to which the
* record needs to be put.
* @return
*/
String groupNameOverValue() default "";
@ -32,6 +32,6 @@ public @interface Group {
* Set to false to avoid source prepending
* @return
*/
boolean prependSourceToGroupName() default false;
boolean prependSourceToGroupName() default true;
}

View File

@ -1,8 +1,8 @@
package org.gcube.data_catalogue.grsf_publish_ws.json.input.others;
package org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED;
import javax.validation.constraints.NotNull;
import org.gcube.datacatalogue.common.Constants;
import org.gcube.datacatalogue.common.AAA_PORTED.Constants;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonProperty;

View File

@ -1,10 +1,10 @@
package org.gcube.data_catalogue.grsf_publish_ws.json.input.others;
package org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED;
import javax.validation.constraints.NotNull;
import javax.validation.constraints.Size;
import org.gcube.datacatalogue.common.Constants;
import org.gcube.datacatalogue.common.enums.Sources;
import org.gcube.datacatalogue.common.AAA_PORTED.Constants;
import org.gcube.datacatalogue.common.AAA_PORTED.Sources;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonProperty;

View File

@ -1,6 +1,6 @@
package org.gcube.data_catalogue.grsf_publish_ws.json.input.others;
package org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED;
import org.gcube.datacatalogue.common.Constants;
import org.gcube.datacatalogue.common.AAA_PORTED.Constants;
import org.json.simple.JSONObject;
import com.fasterxml.jackson.annotation.JsonProperty;

View File

@ -1,4 +1,4 @@
package org.gcube.data_catalogue.grsf_publish_ws.json.input.record;
package org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED;
import java.util.List;
@ -6,15 +6,10 @@ import javax.validation.Valid;
import javax.validation.constraints.NotNull;
import javax.validation.constraints.Size;
import org.gcube.data_catalogue.grsf_publish_ws.custom_annotations.CustomField;
import org.gcube.data_catalogue.grsf_publish_ws.custom_annotations.Group;
import org.gcube.data_catalogue.grsf_publish_ws.custom_annotations.Tag;
import org.gcube.data_catalogue.grsf_publish_ws.custom_annotations.TimeSeries;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.others.TimeSeriesBean;
import org.gcube.datacatalogue.common.Constants;
import org.gcube.datacatalogue.common.enums.Abundance_Level;
import org.gcube.datacatalogue.common.enums.Fishing_Pressure;
import org.gcube.datacatalogue.common.enums.Stock_Type;
import org.gcube.datacatalogue.common.AAA_PORTED.Abundance_Level;
import org.gcube.datacatalogue.common.AAA_PORTED.Constants;
import org.gcube.datacatalogue.common.AAA_PORTED.Fishing_Pressure;
import org.gcube.datacatalogue.common.AAA_PORTED.Stock_Type;
import com.fasterxml.jackson.annotation.JsonProperty;
@ -26,9 +21,9 @@ import com.fasterxml.jackson.annotation.JsonProperty;
public class StockRecord extends Common{
@JsonProperty(Constants.STOCK_NAME_JSON_KEY)
@CustomField(key=Constants.STOCK_NAME_CUSTOM_KEY)
@NotNull(message=Constants.STOCK_NAME_JSON_KEY + " cannot be null")
@Size(min=2, message=Constants.STOCK_NAME_JSON_KEY + " cannot be empty")
@CustomField(key=Constants.STOCK_NAME_CUSTOM_KEY)
private String stockName;
@JsonProperty(Constants.GRSF_SEMANTIC_IDENTIFIER_JSON_KEY)
@ -46,7 +41,6 @@ public class StockRecord extends Common{
@JsonProperty(Constants.ASSESSMENT_METHODS_JSON_KEY)
@CustomField(key=Constants.ASSESSMENT_METHODS_CUSTOM_KEY)
@Group(groupNameOverValue=Constants.ASSESSMENT_METHODS_CUSTOM_KEY, prependSourceToGroupName=false)
private List<String> assessmentMethods;
@JsonProperty(Constants.FIRMS_ABUNDANCE_LEVEL_JSON_KEY)
@ -100,9 +94,8 @@ public class StockRecord extends Common{
@JsonProperty(Constants.SCIENTIFIC_ADVICE_JSON_KEY)
@CustomField(key=Constants.SCIENTIFIC_ADVICE_CUSTOM_KEY)
@TimeSeries
@Group(groupNameOverValue=Constants.SCIENTIFIC_ADVICE_CUSTOM_KEY, prependSourceToGroupName=false)
private List<TimeSeriesBean<String, Void>> scientificAdvice;
private List<String> scientificAdvice;
@JsonProperty(Constants.ASSESSOR_JSON_KEY)
@CustomField(key=Constants.ASSESSOR_CUSTOM_KEY)
@ -110,6 +103,7 @@ public class StockRecord extends Common{
@JsonProperty(Constants.GRSF_TYPE_JSON_KEY)
@CustomField(key=Constants.GRSF_TYPE_CUSTOM_KEY)
@Group
@Tag
private Stock_Type type;
@ -147,7 +141,7 @@ public class StockRecord extends Common{
List<TimeSeriesBean<String, String>> fishingPressure,
List<TimeSeriesBean<String, Void>> narrativeState,
List<TimeSeriesBean<String, Void>> faoState,
List<TimeSeriesBean<String, Void>> scientificAdvice, String assessor, Stock_Type type) {
List<String> scientificAdvice, String assessor, Stock_Type type) {
super();
this.stockName = stockName;
this.stockId = stockId;
@ -263,11 +257,11 @@ public class StockRecord extends Common{
this.narrativeState = narrativeState;
}
public List<TimeSeriesBean<String, Void>> getScientificAdvice() {
public List<String> getScientificAdvice() {
return scientificAdvice;
}
public void setScientificAdvice(List<TimeSeriesBean<String, Void>> scientificAdvice) {
public void setScientificAdvice(List<String> scientificAdvice) {
this.scientificAdvice = scientificAdvice;
}

View File

@ -1,4 +1,4 @@
package org.gcube.data_catalogue.grsf_publish_ws.custom_annotations;
package org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;

View File

@ -1,4 +1,4 @@
package org.gcube.data_catalogue.grsf_publish_ws.custom_annotations;
package org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;

View File

@ -1,6 +1,6 @@
package org.gcube.data_catalogue.grsf_publish_ws.json.input.others;
package org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED;
import org.gcube.datacatalogue.common.Constants;
import org.gcube.datacatalogue.common.AAA_PORTED.Constants;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonProperty;

View File

@ -1,11 +1,10 @@
package org.gcube.data_catalogue.grsf_publish_ws.ex;
package org.gcube.data_catalogue.grsf_publish_ws.AAA_UNNEEDED;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import javax.ws.rs.ext.ExceptionMapper;
import javax.ws.rs.ext.Provider;
import org.gcube.data_catalogue.grsf_publish_ws.json.output.ResponseBean;
import org.slf4j.LoggerFactory;
/**

View File

@ -1,9 +1,9 @@
package org.gcube.data_catalogue.grsf_publish_ws.json.input.others;
package org.gcube.data_catalogue.grsf_publish_ws.AAA_UNNEEDED;
import javax.validation.constraints.NotNull;
import javax.validation.constraints.Size;
import org.gcube.datacatalogue.common.Constants;
import org.gcube.datacatalogue.common.AAA_PORTED.Constants;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonProperty;

View File

@ -1,4 +1,4 @@
package org.gcube.data_catalogue.grsf_publish_ws.utils;
package org.gcube.data_catalogue.grsf_publish_ws.AAA_UNNEEDED;
import static org.gcube.resources.discovery.icclient.ICFactory.client;
import static org.gcube.resources.discovery.icclient.ICFactory.queryFor;

View File

@ -1,4 +1,4 @@
package org.gcube.data_catalogue.grsf_publish_ws.json.output;
package org.gcube.data_catalogue.grsf_publish_ws.AAA_UNNEEDED;
/**
* Response bean to be used by the service.

View File

@ -1,6 +1,6 @@
package org.gcube.data_catalogue.grsf_publish_ws.json.output;
package org.gcube.data_catalogue.grsf_publish_ws.AAA_UNNEEDED;
import org.gcube.datacatalogue.common.Constants;
import org.gcube.datacatalogue.common.AAA_PORTED.Constants;
import com.fasterxml.jackson.annotation.JsonProperty;

View File

@ -1,10 +1,10 @@
package org.gcube.data_catalogue.grsf_publish_ws.json.input.others;
package org.gcube.data_catalogue.grsf_publish_ws.AAA_UNNEEDED;
import javax.validation.constraints.NotNull;
import javax.validation.constraints.Size;
import org.gcube.datacatalogue.common.Constants;
import org.gcube.datacatalogue.common.enums.Status;
import org.gcube.datacatalogue.common.AAA_PORTED.Constants;
import org.gcube.datacatalogue.common.AAA_PORTED.Status;
import com.fasterxml.jackson.annotation.JsonProperty;

View File

@ -1,4 +1,4 @@
package org.gcube.data_catalogue.grsf_publish_ws.ex;
package org.gcube.data_catalogue.grsf_publish_ws.AAA_UNNEEDED;
import javax.validation.ConstraintViolation;
import javax.validation.ConstraintViolationException;
@ -7,7 +7,6 @@ import javax.ws.rs.core.Response;
import javax.ws.rs.ext.ExceptionMapper;
import javax.ws.rs.ext.Provider;
import org.gcube.data_catalogue.grsf_publish_ws.json.output.ResponseBean;
import org.slf4j.LoggerFactory;
/**

View File

@ -1,4 +1,4 @@
package org.gcube.data_catalogue.grsf_publish_ws.utils.threads;
package org.gcube.data_catalogue.grsf_publish_ws.AAA_UNNEEDED;
import java.util.List;

View File

@ -29,19 +29,19 @@ import org.gcube.common.authorization.library.provider.AuthorizationProvider;
import org.gcube.common.authorization.library.provider.SecurityTokenProvider;
import org.gcube.common.authorization.library.utils.Caller;
import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.others.DeleteRecord;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.others.UpdateRecordStatus;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.record.FisheryRecord;
import org.gcube.data_catalogue.grsf_publish_ws.json.output.ResponseBean;
import org.gcube.data_catalogue.grsf_publish_ws.json.output.ResponseCreationBean;
import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.FisheryRecord;
import org.gcube.data_catalogue.grsf_publish_ws.AAA_UNNEEDED.DeleteRecord;
import org.gcube.data_catalogue.grsf_publish_ws.AAA_UNNEEDED.ResponseBean;
import org.gcube.data_catalogue.grsf_publish_ws.AAA_UNNEEDED.ResponseCreationBean;
import org.gcube.data_catalogue.grsf_publish_ws.AAA_UNNEEDED.UpdateRecordStatus;
import org.gcube.data_catalogue.grsf_publish_ws.utils.CommonServiceUtils;
import org.gcube.data_catalogue.grsf_publish_ws.utils.HelperMethods;
import org.gcube.datacatalogue.ckanutillibrary.server.DataCatalogue;
import org.gcube.datacatalogue.ckanutillibrary.shared.ResourceBean;
import org.gcube.datacatalogue.ckanutillibrary.shared.RolesCkanGroupOrOrg;
import org.gcube.datacatalogue.common.Constants;
import org.gcube.datacatalogue.common.enums.Product_Type;
import org.gcube.datacatalogue.common.enums.Sources;
import org.gcube.datacatalogue.common.AAA_PORTED.Constants;
import org.gcube.datacatalogue.common.AAA_PORTED.Product_Type;
import org.gcube.datacatalogue.common.AAA_PORTED.Sources;
import org.slf4j.LoggerFactory;
import eu.trentorise.opendata.jackan.model.CkanDataset;
@ -51,7 +51,7 @@ import eu.trentorise.opendata.jackan.model.CkanDataset;
* @author Costantino Perciante (ISTI - CNR)
* @author Luca Frosini (ISTI - CNR)
*/
@Path("{source:firms|FIRMS|grsf|GRSF|FishSource|fishsource|sdg|SDG}/fishery/")
@Path("{source:firms|FIRMS|grsf|GRSF|FishSource|fishsource}/fishery/")
public class GrsfPublisherFisheryService {
// the context
@ -61,33 +61,33 @@ public class GrsfPublisherFisheryService {
// Logger
private static final org.slf4j.Logger logger = LoggerFactory.getLogger(GrsfPublisherFisheryService.class);
@GET
@Path("hello")
@Produces(MediaType.TEXT_PLAIN)
public Response hello() {
return Response.ok("Hello.. Fishery service is here").build();
}
@GET
@Path("get-licenses")
@Produces(MediaType.APPLICATION_JSON)
public Response getLicenses() {
Status status = Status.OK;
String context = ScopeProvider.instance.get();
DataCatalogue catalogue;
try {
catalogue = HelperMethods.getDataCatalogueRunningInstance(context);
Map<String,String> licenses = CommonServiceUtils.getLicenses(catalogue);
if(licenses == null)
status = Status.INTERNAL_SERVER_ERROR;
return Response.status(status).entity(licenses).build();
} catch(Exception e) {
status = Status.INTERNAL_SERVER_ERROR;
return Response.status(status)
.entity(new ResponseBean(false, "Unable to retrieve license list " + e.getLocalizedMessage(), null))
.build();
}
}
// @GET
// @Path("hello")
// @Produces(MediaType.TEXT_PLAIN)
// public Response hello() {
// return Response.ok("Hello.. Fishery service is here").build();
// }
//
// @GET
// @Path("get-licenses")
// @Produces(MediaType.APPLICATION_JSON)
// public Response getLicenses() {
// Status status = Status.OK;
// String context = ScopeProvider.instance.get();
// DataCatalogue catalogue;
// try {
//// catalogue = HelperMethods.getDataCatalogueRunningInstance(context);
// Map<String,String> licenses = CommonServiceUtils.getLicenses(catalogue);
// if(licenses == null)
// status = Status.INTERNAL_SERVER_ERROR;
// return Response.status(status).entity(licenses).build();
// } catch(Exception e) {
// status = Status.INTERNAL_SERVER_ERROR;
// return Response.status(status)
// .entity(new ResponseBean(false, "Unable to retrieve license list " + e.getLocalizedMessage(), null))
// .build();
// }
// }
@POST
@Path("publish-product")
@ -113,36 +113,43 @@ public class GrsfPublisherFisheryService {
// Cast the source to the accepted ones
Sources sourceInPath = Sources.onDeserialize(source);
DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance(context);
if(catalogue == null) {
throw new Exception("There was a problem while serving your request. No catalogue instance was found!");
} else {
DataCatalogue catalogue = null;
// DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance(context);
// if(catalogue == null) {
// throw new Exception("There was a problem while serving your request. No catalogue instance was found!");
// } else {
String apiKey = catalogue.getApiKeyFromUsername(username);
String organization = HelperMethods.retrieveOrgNameFromScope(context);
String apiKey = "";
String organization = "";
// String apiKey = catalogue.getApiKeyFromUsername(username);
// String organization = HelperMethods.retrieveOrgNameFromScope(context);
// CommonServiceUtils.hasAdminRole(username, catalogue, apiKey, organization);
//
// // extend this role to the other organizations in this context
// CommonServiceUtils.extendRoleToOtherOrganizations(username, catalogue, organization,
// RolesCkanGroupOrOrg.ADMIN);
CommonServiceUtils.hasAdminRole(username, catalogue, apiKey, organization);
// extend this role to the other organizations in this context
CommonServiceUtils.extendRoleToOtherOrganizations(username, catalogue, organization,
RolesCkanGroupOrOrg.ADMIN);
String authorMail = "";
String authorFullname = "";
// retrieve the user's email and fullname
String authorMail = HelperMethods.getUserEmail(context, token);
String authorFullname = HelperMethods.getUserFullname(context, token);
if(authorMail == null || authorFullname == null) {
throw new Exception("Sorry but it was not possible to retrieve your fullname/email!");
}
// String authorMail = HelperMethods.getUserEmail(context, token);
// String authorFullname = HelperMethods.getUserFullname(context, token);
//
// if(authorMail == null || authorFullname == null) {
// throw new Exception("Sorry but it was not possible to retrieve your fullname/email!");
// }
// The name of the product will be the uuid of the kb. The title will be the fishery's fishery_name. Fishery has also the constraint that
// fishing area and jurisdiction area cannot be empty at the same time
String futureName = record.getUuid();
String futureTitle = record.getFisheryName();
String futureName = "";
String futureTitle = "";
// String futureName = record.getUuid();
// String futureTitle = record.getFisheryName();
// check name
CommonServiceUtils.checkName(futureName, catalogue);
// CommonServiceUtils.checkName(futureName, catalogue);
Map<String,List<String>> customFields = record.getExtrasFields();
Set<String> tags = new HashSet<String>();
@ -154,26 +161,31 @@ public class GrsfPublisherFisheryService {
Product_Type.FISHERY, tags, customFields, groups, resources, username, futureTitle);
// check the license id
String license = null;
if(record.getLicense() == null || record.getLicense().isEmpty())
license = Constants.DEFAULT_LICENSE;
else if(HelperMethods.existsLicenseId(record.getLicense(), catalogue))
license = record.getLicense();
else
throw new Exception("Please check the license id!");
String license = "";
// String license = null;
// if(record.getLicense() == null || record.getLicense().isEmpty())
// license = Constants.DEFAULT_LICENSE;
// else if(HelperMethods.existsLicenseId(record.getLicense(), catalogue))
// license = record.getLicense();
// else
// throw new Exception("Please check the license id!");
//
long version = record.getVersion() == null ? 1 : record.getVersion();
// set the visibility of the datatest according the context
boolean publicDataset = context
.equals((String) contextServlet.getInitParameter(HelperMethods.PUBLIC_CONTEX_KEY));
// convert extras' keys to keys with namespace
Map<String,String> namespaces = HelperMethods
.getFieldToFieldNameSpaceMapping(Constants.GENERIC_RESOURCE_NAME_MAP_KEY_NAMESPACES_FISHERY);
if(namespaces == null)
throw new Exception("Failed to retrieve the namespaces for the key fields!");
// convert extras' keys to keys with namespace
Map<String,String> namespaces = new HashMap<>();
// Map<String,String> namespaces = HelperMethods
// .getFieldToFieldNameSpaceMapping(Constants.GENERIC_RESOURCE_NAME_MAP_KEY_NAMESPACES_FISHERY);
// if(namespaces == null)
// throw new Exception("Failed to retrieve the namespaces for the key fields!");
customFields = HelperMethods.replaceFieldsKey(customFields, namespaces,
!sourceInPath.equals(Sources.GRSF));
@ -182,6 +194,7 @@ public class GrsfPublisherFisheryService {
logger.info("Invoking create method..");
// create the product
id = catalogue.createCKanDatasetMultipleCustomFields(apiKey, futureTitle, futureName,
publishInOrganization, authorFullname, authorMail,
@ -190,7 +203,7 @@ public class GrsfPublisherFisheryService {
null, license, new ArrayList<String>(tags), customFields, resources, publicDataset);
// post actions
if(id != null) {
// if(id != null) {
logger.info("Created record with identifier " + id);
String description = Constants.SHORT_NAME_CUSTOM_KEY + ": " + record.getShortName() + "\n";
@ -205,10 +218,10 @@ public class GrsfPublisherFisheryService {
status = Status.CREATED;
} else {
throw new Exception("There was an error during the record generation, sorry");
}
}
// } else {
// throw new Exception("There was an error during the record generation, sorry");
// }
// }
} catch(Exception e) {
logger.error("Failed to create fishery record" + e);
status = Status.INTERNAL_SERVER_ERROR;
@ -218,68 +231,69 @@ public class GrsfPublisherFisheryService {
return Response.status(status).entity(responseBean).build();
}
@DELETE
@Path("delete-product")
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
public Response deleteFishery(@NotNull(message = "input value is missing") @Valid DeleteRecord recordToDelete,
@PathParam("source") String source) throws ValidationException {
// retrieve context and username
Caller caller = AuthorizationProvider.instance.get();
String username = caller.getClient().getId();
String context = ScopeProvider.instance.get();
ResponseCreationBean responseBean = new ResponseCreationBean();
Status status = Status.INTERNAL_SERVER_ERROR;
// check it is a fishery ...
logger.info(
"Received call to delete product with id " + recordToDelete.getId() + ", checking if it is a fishery");
try {
DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance(context);
// Cast the source to the accepted ones
Sources sourceInPath = Sources.onDeserialize(source);
logger.debug("The request is to delete a fishery object of source " + sourceInPath);
// retrieve the catalogue instance
String apiKey = catalogue.getApiKeyFromUsername(username);
CkanDataset fisheryInCkan = catalogue.getDataset(recordToDelete.getId(), apiKey);
if(fisheryInCkan == null) {
status = Status.NOT_FOUND;
throw new Exception("There was a problem while serving your request. This item was not found");
}
// check it is in the right source and it is a fishery
String type = fisheryInCkan.getExtrasAsHashMap().get(Constants.DOMAIN_CUSTOM_KEY);
if((fisheryInCkan.getOrganization().getName().equalsIgnoreCase(source)
|| fisheryInCkan.getOrganization().getName().toLowerCase().startsWith(source.toLowerCase()))
&& Product_Type.FISHERY.getOrigName().equals(type)) {
logger.debug("Ok, this is a fishery of the right source, removing it");
boolean deleted = catalogue.deleteProduct(fisheryInCkan.getId(), apiKey, true);
if(deleted) {
logger.info("Fishery DELETED AND PURGED!");
status = Status.OK;
responseBean.setId(fisheryInCkan.getId());
}
} else {
status = Status.BAD_REQUEST;
throw new Exception(
"The id you are using doesn't belong to a Fishery item having source " + source + "!");
}
} catch(Exception e) {
logger.error("Failed to delete this", e);
responseBean.setError(e.getMessage());
}
return Response.status(status).entity(responseBean).build();
}
// @DELETE
// @Path("delete-product")
// @Consumes(MediaType.APPLICATION_JSON)
// @Produces(MediaType.APPLICATION_JSON)
// public Response deleteFishery(@NotNull(message = "input value is missing") @Valid DeleteRecord recordToDelete,
// @PathParam("source") String source) throws ValidationException {
//
// // retrieve context and username
// Caller caller = AuthorizationProvider.instance.get();
// String username = caller.getClient().getId();
// String context = ScopeProvider.instance.get();
//
// ResponseCreationBean responseBean = new ResponseCreationBean();
// Status status = Status.INTERNAL_SERVER_ERROR;
//
// // check it is a fishery ...
// logger.info(
// "Received call to delete product with id " + recordToDelete.getId() + ", checking if it is a fishery");
// try {
//
// DataCatalogue catalogue = null;
//// DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance(context);
//
// // Cast the source to the accepted ones
// Sources sourceInPath = Sources.onDeserialize(source);
// logger.debug("The request is to delete a fishery object of source " + sourceInPath);
//
// // retrieve the catalogue instance
// String apiKey = catalogue.getApiKeyFromUsername(username);
// CkanDataset fisheryInCkan = catalogue.getDataset(recordToDelete.getId(), apiKey);
//
// if(fisheryInCkan == null) {
// status = Status.NOT_FOUND;
// throw new Exception("There was a problem while serving your request. This item was not found");
// }
//
// // check it is in the right source and it is a fishery
// String type = fisheryInCkan.getExtrasAsHashMap().get(Constants.DOMAIN_CUSTOM_KEY);
//
// if((fisheryInCkan.getOrganization().getName().equalsIgnoreCase(source)
// || fisheryInCkan.getOrganization().getName().toLowerCase().contains(source))
// && Product_Type.FISHERY.getOrigName().equals(type)) {
//
// logger.debug("Ok, this is a fishery of the right source, removing it");
// boolean deleted = catalogue.deleteProduct(fisheryInCkan.getId(), apiKey, true);
//
// if(deleted) {
// logger.info("Fishery DELETED AND PURGED!");
// status = Status.OK;
// responseBean.setId(fisheryInCkan.getId());
// }
// } else {
// status = Status.BAD_REQUEST;
// throw new Exception(
// "The id you are using doesn't belong to a Fishery item having source " + source + "!");
// }
// } catch(Exception e) {
// logger.error("Failed to delete this", e);
// responseBean.setError(e.getMessage());
// }
//
// return Response.status(status).entity(responseBean).build();
// }
@GET
@Path("get-fisheries-ids")
@ -300,11 +314,12 @@ public class GrsfPublisherFisheryService {
// Cast the source to the accepted ones
Sources sourceInPath = Sources.onDeserialize(source);
DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance(context);
if(catalogue == null) {
throw new Exception("There was a problem while serving your request");
}
DataCatalogue catalogue = null;
// DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance(context);
// if(catalogue == null) {
// throw new Exception("There was a problem while serving your request");
// }
//
// if it is a request for GRSF records, we have Fishery - Stock groups, so it is easy.
// For other cases, records needs to be parsed
if(sourceInPath.equals(Sources.GRSF))
@ -347,10 +362,13 @@ public class GrsfPublisherFisheryService {
logger.info("Received call to get the catalogue identifier for the product with name " + name);
try {
DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance(context);
if(catalogue == null) {
throw new Exception("There was a problem while serving your request");
}
// DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance(context);
// if(catalogue == null) {
// throw new Exception("There was a problem while serving your request");
// }
//
DataCatalogue catalogue = null;
CkanDataset dataset = catalogue.getDataset(name, catalogue.getApiKeyFromUsername(username));
if(dataset != null) {
Map<String,String> result = new HashMap<String,String>();
@ -397,33 +415,40 @@ public class GrsfPublisherFisheryService {
throw new Exception("Please specify the '" + Constants.CATALOG_ID + "' property");
}
DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance(context);
if(catalogue == null) {
throw new Exception(
"There was a problem while serving your request. No catalogue instance was found in this context!");
} else {
DataCatalogue catalogue = null;
// DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance(context);
// if(catalogue == null) {
// throw new Exception(
// "There was a problem while serving your request. No catalogue instance was found in this context!");
// } else {
// get already published record and modify it
String apiKey = catalogue.getApiKeyFromUsername(username);
String apiKey = "";
// String apiKey = catalogue.getApiKeyFromUsername(username);
CkanDataset recordPublished = catalogue.getDataset(catalogId, apiKey);
if(recordPublished == null)
throw new Exception("A record with catalogue id " + catalogId + " does not exist!");
// retrieve the user's email and fullname
String authorMail = HelperMethods.getUserEmail(context, token);
String authorFullname = HelperMethods.getUserFullname(context, token);
String authorMail = "";
String authorFullname = "";
if(authorMail == null || authorFullname == null) {
logger.debug("Author fullname or mail missing, cannot continue");
throw new Exception("Sorry but there was not possible to retrieve your fullname/email!");
}
// // retrieve the user's email and fullname
// String authorMail = HelperMethods.getUserEmail(context, token);
// String authorFullname = HelperMethods.getUserFullname(context, token);
//
// if(authorMail == null || authorFullname == null) {
// logger.debug("Author fullname or mail missing, cannot continue");
// throw new Exception("Sorry but there was not possible to retrieve your fullname/email!");
// }
String organization = HelperMethods.retrieveOrgNameFromScope(context);
String organization = "";
// String organization = HelperMethods.retrieveOrgNameFromScope(context);
// check he/she has admin role
CommonServiceUtils.hasAdminRole(username, catalogue, apiKey, organization);
// CommonServiceUtils.hasAdminRole(username, catalogue, apiKey, organization);
// name, product url and are going to remain unchanged (so we keep them from the already published record);
String name = recordPublished.getName();
@ -461,11 +486,12 @@ public class GrsfPublisherFisheryService {
.equals((String) contextServlet.getInitParameter(HelperMethods.PUBLIC_CONTEX_KEY));
// convert extras' keys to keys with namespace
Map<String,String> namespaces = HelperMethods
.getFieldToFieldNameSpaceMapping(Constants.GENERIC_RESOURCE_NAME_MAP_KEY_NAMESPACES_FISHERY);
if(namespaces == null)
throw new Exception("Failed to retrieve the namespaces for the key fields!");
Map<String,String> namespaces = new HashMap<>();
// Map<String,String> namespaces = HelperMethods
// .getFieldToFieldNameSpaceMapping(Constants.GENERIC_RESOURCE_NAME_MAP_KEY_NAMESPACES_FISHERY);
//
// if(namespaces == null)
// throw new Exception("Failed to retrieve the namespaces for the key fields!");
// retrieve the already generated url
String modifiedUUIDKey = namespaces.containsKey(Constants.ITEM_URL_FIELD)
@ -504,7 +530,7 @@ public class GrsfPublisherFisheryService {
} else {
throw new Exception("There was an error during the item updated, sorry");
}
}
// }
} catch(Exception e) {
logger.error("Failed to update fishery record" + e);
responseBean.setError(e.getMessage());
@ -529,14 +555,15 @@ public class GrsfPublisherFisheryService {
ResponseCreationBean responseBean = new ResponseCreationBean();
Status status = Status.INTERNAL_SERVER_ERROR;
try {
// try {
DataCatalogue catalogue = null;
DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance(context);
// DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance(context);
if(catalogue == null) {
throw new Exception(
"There was a problem while serving your request. No catalogue instance was found in this context!");
} else {
// if(catalogue == null) {
// throw new Exception(
// "There was a problem while serving your request. No catalogue instance was found in this context!");
// } else {
// catalog id must be reported
String uuid = bean.getUuid();
@ -546,21 +573,24 @@ public class GrsfPublisherFisheryService {
String apiKeyUser = catalogue.getApiKeyFromUsername(username);
CkanDataset record = catalogue.getDataset(uuid, apiKeyUser);
if(record == null)
throw new Exception("A record with knowledge_base_id id " + uuid + " does not exist!");
if(record == null){
// throw new Exception("A record with knowledge_base_id id " + uuid + " does not exist!");
}
// check system type
boolean isGRSF = !record.getExtrasAsHashMap().get(Constants.SYSTEM_TYPE_CUSTOM_KEY)
.equals(Constants.SYSTEM_TYPE_LEGACY_RECORD);
.equals(Constants.SYSTEM_TYPE_FOR_SOURCES_VALUE);
if(!isGRSF)
throw new Exception("You are trying to modify a Legacy record!");
if(!isGRSF) {
// throw new Exception("You are trying to modify a Legacy record!");
}
boolean rightDomain = record.getExtrasAsHashMap().get(Constants.DOMAIN_CUSTOM_KEY)
.equalsIgnoreCase(Product_Type.FISHERY.getOrigName());
if(!rightDomain)
throw new Exception("This is not a Fishery record!");
if(!rightDomain) {
// throw new Exception("This is not a Fishery record!");
}
// update it
Map<String,List<String>> updateStatus = new HashMap<String,List<String>>(1);
@ -572,11 +602,11 @@ public class GrsfPublisherFisheryService {
responseBean.setId(record.getId());
responseBean.setItemUrl(record.getExtrasAsHashMap().get(Constants.ITEM_URL_FIELD));
}
} catch(Exception e) {
logger.error("Failed to update fishery record's status", e);
responseBean.setError(e.getMessage());
}
// }
// } catch(Exception e) {
// logger.error("Failed to update fishery record's status", e);
// responseBean.setError(e.getMessage());
// }
return Response.status(status).entity(responseBean).build();

View File

@ -29,19 +29,19 @@ import org.gcube.common.authorization.library.provider.AuthorizationProvider;
import org.gcube.common.authorization.library.provider.SecurityTokenProvider;
import org.gcube.common.authorization.library.utils.Caller;
import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.others.DeleteRecord;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.others.UpdateRecordStatus;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.record.StockRecord;
import org.gcube.data_catalogue.grsf_publish_ws.json.output.ResponseBean;
import org.gcube.data_catalogue.grsf_publish_ws.json.output.ResponseCreationBean;
import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.StockRecord;
import org.gcube.data_catalogue.grsf_publish_ws.AAA_UNNEEDED.DeleteRecord;
import org.gcube.data_catalogue.grsf_publish_ws.AAA_UNNEEDED.ResponseBean;
import org.gcube.data_catalogue.grsf_publish_ws.AAA_UNNEEDED.ResponseCreationBean;
import org.gcube.data_catalogue.grsf_publish_ws.AAA_UNNEEDED.UpdateRecordStatus;
import org.gcube.data_catalogue.grsf_publish_ws.utils.CommonServiceUtils;
import org.gcube.data_catalogue.grsf_publish_ws.utils.HelperMethods;
import org.gcube.datacatalogue.ckanutillibrary.server.DataCatalogue;
import org.gcube.datacatalogue.ckanutillibrary.shared.ResourceBean;
import org.gcube.datacatalogue.ckanutillibrary.shared.RolesCkanGroupOrOrg;
import org.gcube.datacatalogue.common.Constants;
import org.gcube.datacatalogue.common.enums.Product_Type;
import org.gcube.datacatalogue.common.enums.Sources;
import org.gcube.datacatalogue.common.AAA_PORTED.Constants;
import org.gcube.datacatalogue.common.AAA_PORTED.Product_Type;
import org.gcube.datacatalogue.common.AAA_PORTED.Sources;
import org.slf4j.LoggerFactory;
import eu.trentorise.opendata.jackan.model.CkanDataset;
@ -51,7 +51,7 @@ import eu.trentorise.opendata.jackan.model.CkanDataset;
* @author Costantino Perciante (ISTI - CNR)
* @author Luca Frosini (ISTI - CNR)
*/
@Path("{source:firms|FIRMS|ram|RAM|grsf|GRSF|FishSource|fishsource|sdg|SDG}/stock/")
@Path("{source:firms|FIRMS|ram|RAM|grsf|GRSF|FishSource|fishsource}/stock/")
public class GrsfPublisherStockService {
// the context
@ -61,33 +61,33 @@ public class GrsfPublisherStockService {
// Logger
private static final org.slf4j.Logger logger = LoggerFactory.getLogger(GrsfPublisherStockService.class);
@GET
@Path("hello")
@Produces(MediaType.TEXT_PLAIN)
public Response hello() {
return Response.ok("Hello.. Stock service is here").build();
}
@GET
@Path("get-licenses")
@Produces(MediaType.APPLICATION_JSON)
public Response getLicenses() {
Status status = Status.OK;
String context = ScopeProvider.instance.get();
DataCatalogue catalogue;
try {
catalogue = HelperMethods.getDataCatalogueRunningInstance(context);
Map<String,String> licenses = CommonServiceUtils.getLicenses(catalogue);
if(licenses == null)
status = Status.INTERNAL_SERVER_ERROR;
return Response.status(status).entity(licenses).build();
} catch(Exception e) {
status = Status.INTERNAL_SERVER_ERROR;
return Response.status(status)
.entity(new ResponseBean(false, "Unable to retrieve license list " + e.getLocalizedMessage(), null))
.build();
}
}
// @GET
// @Path("hello")
// @Produces(MediaType.TEXT_PLAIN)
// public Response hello() {
// return Response.ok("Hello.. Stock service is here").build();
// }
//
// @GET
// @Path("get-licenses")
// @Produces(MediaType.APPLICATION_JSON)
// public Response getLicenses() {
// Status status = Status.OK;
// String context = ScopeProvider.instance.get();
// DataCatalogue catalogue;
// try {
// catalogue = HelperMethods.getDataCatalogueRunningInstance(context);
// Map<String,String> licenses = CommonServiceUtils.getLicenses(catalogue);
// if(licenses == null)
// status = Status.INTERNAL_SERVER_ERROR;
// return Response.status(status).entity(licenses).build();
// } catch(Exception e) {
// status = Status.INTERNAL_SERVER_ERROR;
// return Response.status(status)
// .entity(new ResponseBean(false, "Unable to retrieve license list " + e.getLocalizedMessage(), null))
// .build();
// }
// }
@POST
@Path("publish-product")
@ -113,35 +113,46 @@ public class GrsfPublisherStockService {
// Cast the source to the accepted ones
Sources sourceInPath = Sources.onDeserialize(source);
DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance(context);
if(catalogue == null) {
throw new Exception("There was a problem while serving your request. No catalogue instance was found!");
} else {
DataCatalogue catalogue = null;
// DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance(context);
// if(catalogue == null) {
// throw new Exception("There was a problem while serving your request. No catalogue instance was found!");
// } else {
//
String apiKey = catalogue.getApiKeyFromUsername(username);
String organization = HelperMethods.retrieveOrgNameFromScope(context); //"grsf_admin";
String apiKey = "";
String organization = "";
// String apiKey = catalogue.getApiKeyFromUsername(username);
// String organization = HelperMethods.retrieveOrgNameFromScope(context); //"grsf_admin";
// check it has admin role or throw exception
CommonServiceUtils.hasAdminRole(username, catalogue, apiKey, organization);
// CommonServiceUtils.hasAdminRole(username, catalogue, apiKey, organization);
// extend this role to the other organizations in this context
CommonServiceUtils.extendRoleToOtherOrganizations(username, catalogue, organization,
RolesCkanGroupOrOrg.ADMIN);
// To support this gCat must be modified according to the following ticket
// https://support.d4science.org/issues/19365
// CommonServiceUtils.extendRoleToOtherOrganizations(username, catalogue, organization,
// RolesCkanGroupOrOrg.ADMIN);
String authorMail = "";
String authorFullname = "";
// retrieve the user's email and fullname
String authorMail = HelperMethods.getUserEmail(context, token);
String authorFullname = HelperMethods.getUserFullname(context, token);
if(authorMail == null || authorFullname == null) {
throw new Exception("Sorry but it was not possible to retrieve your fullname/email!");
}
// String authorMail = HelperMethods.getUserEmail(context, token);
// String authorFullname = HelperMethods.getUserFullname(context, token);
//
// if(authorMail == null || authorFullname == null) {
// throw new Exception("Sorry but it was not possible to retrieve your fullname/email!");
// }
// check the record has a name, at least
String futureName = record.getUuid();
String futureTitle = record.getStockName();
// check name and throws exception
CommonServiceUtils.checkName(futureName, catalogue);
// CommonServiceUtils.checkName(futureName, catalogue);
// load other information
Map<String,List<String>> customFields = record.getExtrasFields();
@ -170,11 +181,12 @@ public class GrsfPublisherStockService {
.equals((String) contextServlet.getInitParameter(HelperMethods.PUBLIC_CONTEX_KEY));
// convert extras' keys to keys with namespace
Map<String,String> namespaces = HelperMethods
.getFieldToFieldNameSpaceMapping(Constants.GENERIC_RESOURCE_NAME_MAP_KEY_NAMESPACES_STOCK);
if(namespaces == null)
throw new Exception("Failed to retrieve the namespaces for the key fields!");
Map<String,String> namespaces = new HashMap<>();
// Map<String,String> namespaces = HelperMethods
// .getFieldToFieldNameSpaceMapping(Constants.GENERIC_RESOURCE_NAME_MAP_KEY_NAMESPACES_STOCK);
//
// if(namespaces == null)
// throw new Exception("Failed to retrieve the namespaces for the key fields!");
customFields = HelperMethods.replaceFieldsKey(customFields, namespaces,
!sourceInPath.equals(Sources.GRSF));
@ -209,7 +221,7 @@ public class GrsfPublisherStockService {
} else
throw new Exception(
"There was an error during the product generation, sorry! Unable to create the dataset");
}
// }
} catch(Exception e) {
logger.error("Failed to create stock record", e);
status = Status.INTERNAL_SERVER_ERROR;
@ -238,11 +250,13 @@ public class GrsfPublisherStockService {
"Received call to delete product with id " + recordToDelete.getId() + ", checking if it is a stock");
try {
DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance(context);
if(catalogue == null) {
status = Status.INTERNAL_SERVER_ERROR;
throw new Exception("There was a problem while serving your request");
}
DataCatalogue catalogue = null;
// DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance(context);
// if(catalogue == null) {
// status = Status.INTERNAL_SERVER_ERROR;
// throw new Exception("There was a problem while serving your request");
// }
// Cast the source to the accepted ones
Sources sourceInPath = Sources.onDeserialize(source);
@ -260,7 +274,7 @@ public class GrsfPublisherStockService {
// check it is in the right source and it is a stock
String type = stockInCkan.getExtrasAsHashMap().get(Constants.DOMAIN_CUSTOM_KEY);
if((stockInCkan.getOrganization().getName().equalsIgnoreCase(source)
|| stockInCkan.getOrganization().getName().toLowerCase().startsWith(source.toLowerCase()))
|| stockInCkan.getOrganization().getName().toLowerCase().contains(source))
&& Product_Type.STOCK.getOrigName().equals(type)) {
logger.debug("Ok, this is a stock of the right type, removing it");
@ -304,11 +318,13 @@ public class GrsfPublisherStockService {
// Cast the source to the accepted ones
Sources sourceInPath = Sources.onDeserialize(source);
DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance(context);
if(catalogue == null) {
status = Status.INTERNAL_SERVER_ERROR;
throw new Exception("There was a problem while serving your request");
}
DataCatalogue catalogue = null;
// DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance(context);
// if(catalogue == null) {
// status = Status.INTERNAL_SERVER_ERROR;
// throw new Exception("There was a problem while serving your request");
// }
// if it is a request for GRSF records, we have Fishery - Stock groups, so it is easy.
// For other cases, records needs to be parsed
@ -348,10 +364,13 @@ public class GrsfPublisherStockService {
Status status = Status.INTERNAL_SERVER_ERROR;
logger.info("Received call to get the catalogue identifier for the product with name " + name);
try {
DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance(context);
if(catalogue == null) {
throw new Exception("There was a problem while serving your request");
}
DataCatalogue catalogue = null;
// DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance(context);
// if(catalogue == null) {
// throw new Exception("There was a problem while serving your request");
// }
CkanDataset dataset = catalogue.getDataset(name, catalogue.getApiKeyFromUsername(username));
if(dataset != null) {
Map<String,String> result = new HashMap<String,String>();
@ -398,32 +417,40 @@ public class GrsfPublisherStockService {
throw new Exception("Please specify the 'catalog_id' property");
}
DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance(context);
DataCatalogue catalogue = null;
// DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance(context);
if(catalogue == null) {
throw new Exception(
"There was a problem while serving your request. No catalogue instance was found in this context!");
} else {
// if(catalogue == null) {
// throw new Exception(
// "There was a problem while serving your request. No catalogue instance was found in this context!");
// } else {
// get already published record and modify it
String apiKey = catalogue.getApiKeyFromUsername(username);
String apiKey = "";
// String apiKey = catalogue.getApiKeyFromUsername(username);
CkanDataset recordPublished = catalogue.getDataset(catalogId, apiKey);
if(recordPublished == null)
throw new Exception("A record with id " + catalogId + " does not exist!");
// retrieve the user's email and fullname
String authorMail = HelperMethods.getUserEmail(context, token);
String authorFullname = HelperMethods.getUserFullname(context, token);
String authorMail = "";
String authorFullname = "";
if(authorMail == null || authorFullname == null) {
logger.debug("Author fullname or mail missing, cannot continue");
throw new Exception("Sorry but there was not possible to retrieve your fullname/email!");
}
String organization = HelperMethods.retrieveOrgNameFromScope(context); //"grsf_admin";
// retrieve the user's email and fullname
// String authorMail = HelperMethods.getUserEmail(context, token);
// String authorFullname = HelperMethods.getUserFullname(context, token);
//
// if(authorMail == null || authorFullname == null) {
// logger.debug("Author fullname or mail missing, cannot continue");
// throw new Exception("Sorry but there was not possible to retrieve your fullname/email!");
// }
String organization = "";
// String organization = HelperMethods.retrieveOrgNameFromScope(context); //"grsf_admin";
// check he/she has admin role
CommonServiceUtils.hasAdminRole(username, catalogue, apiKey, organization);
// CommonServiceUtils.hasAdminRole(username, catalogue, apiKey, organization);
// name, product url and are going to remain unchanged (so we keep them from the publisher record);
String name = recordPublished.getName();
@ -462,11 +489,12 @@ public class GrsfPublisherStockService {
.equals((String) contextServlet.getInitParameter(HelperMethods.PUBLIC_CONTEX_KEY));
// convert extras' keys to keys with namespace
Map<String,String> namespaces = HelperMethods
.getFieldToFieldNameSpaceMapping(Constants.GENERIC_RESOURCE_NAME_MAP_KEY_NAMESPACES_STOCK);
if(namespaces == null)
throw new Exception("Failed to retrieve the namespaces for the key fields!");
Map<String,String> namespaces = new HashMap<>();
// Map<String,String> namespaces = HelperMethods
// .getFieldToFieldNameSpaceMapping(Constants.GENERIC_RESOURCE_NAME_MAP_KEY_NAMESPACES_STOCK);
//
// if(namespaces == null)
// throw new Exception("Failed to retrieve the namespaces for the key fields!");
// retrieve the url
String modifiedUUIDKey = namespaces.containsKey(Constants.ITEM_URL_FIELD)
@ -506,7 +534,7 @@ public class GrsfPublisherStockService {
} else {
throw new Exception("There was an error during the item updated, sorry");
}
}
// }
} catch(Exception e) {
logger.error("Failed to update stock record", e);
responseBean.setError(e.getMessage());
@ -534,12 +562,13 @@ public class GrsfPublisherStockService {
try {
DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance(context);
if(catalogue == null) {
throw new Exception(
"There was a problem while serving your request. No catalogue instance was found in this context!");
} else {
DataCatalogue catalogue = null;
// DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance(context);
//
// if(catalogue == null) {
// throw new Exception(
// "There was a problem while serving your request. No catalogue instance was found in this context!");
// } else {
// catalog id must be reported
String uuid = bean.getUuid();
@ -554,7 +583,7 @@ public class GrsfPublisherStockService {
// check system type
boolean isGRSF = !record.getExtrasAsHashMap().get(Constants.SYSTEM_TYPE_CUSTOM_KEY)
.equals(Constants.SYSTEM_TYPE_LEGACY_RECORD);
.equals(Constants.SYSTEM_TYPE_FOR_SOURCES_VALUE);
if(!isGRSF)
throw new Exception("You are trying to modify a Legacy record!");
@ -574,7 +603,7 @@ public class GrsfPublisherStockService {
responseBean.setKbUuid(uuid);
responseBean.setId(record.getId());
responseBean.setItemUrl(record.getExtrasAsHashMap().get(Constants.ITEM_URL_FIELD));
}
// }
} catch(Exception e) {
logger.error("Failed to update stock record's status", e);
responseBean.setError(e.getMessage());

View File

@ -4,6 +4,7 @@ import java.beans.PropertyDescriptor;
import java.lang.reflect.Field;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
@ -13,34 +14,31 @@ import java.util.concurrent.ConcurrentHashMap;
import javax.servlet.ServletContext;
import org.gcube.data_catalogue.grsf_publish_ws.custom_annotations.CkanResource;
import org.gcube.data_catalogue.grsf_publish_ws.custom_annotations.CustomField;
import org.gcube.data_catalogue.grsf_publish_ws.custom_annotations.Group;
import org.gcube.data_catalogue.grsf_publish_ws.custom_annotations.Tag;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.others.RefersToBean;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.others.Resource;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.others.TimeSeriesBean;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.record.Base;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.record.Common;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.record.FisheryRecord;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.record.StockRecord;
import org.gcube.data_catalogue.grsf_publish_ws.json.output.ResponseCreationBean;
import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.Base;
import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.CkanResource;
import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.Common;
import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.CustomField;
import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.FisheryRecord;
import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.Group;
import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.RefersToBean;
import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.Resource;
import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.StockRecord;
import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.Tag;
import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.TimeSeriesBean;
import org.gcube.data_catalogue.grsf_publish_ws.AAA_UNNEEDED.ResponseCreationBean;
import org.gcube.data_catalogue.grsf_publish_ws.AAA_UNNEEDED.WritePostCatalogueManagerThread;
import org.gcube.data_catalogue.grsf_publish_ws.utils.csv.ManageTimeSeriesThread;
import org.gcube.data_catalogue.grsf_publish_ws.utils.threads.AssociationToGroupThread;
import org.gcube.data_catalogue.grsf_publish_ws.utils.threads.WritePostCatalogueManagerThread;
import org.gcube.datacatalogue.ckanutillibrary.server.DataCatalogue;
import org.gcube.datacatalogue.ckanutillibrary.shared.ResourceBean;
import org.gcube.datacatalogue.ckanutillibrary.shared.RolesCkanGroupOrOrg;
import org.gcube.datacatalogue.common.Constants;
import org.gcube.datacatalogue.common.enums.Product_Type;
import org.gcube.datacatalogue.common.enums.Sources;
import org.gcube.datacatalogue.common.enums.Status;
import org.gcube.datacatalogue.common.AAA_PORTED.Constants;
import org.gcube.datacatalogue.common.AAA_PORTED.Product_Type;
import org.gcube.datacatalogue.common.AAA_PORTED.Sources;
import org.gcube.datacatalogue.common.AAA_PORTED.Status;
import org.gcube.datacatalogue.common.AAA_PORTED.Stock_Type;
import org.json.simple.JSONObject;
import org.slf4j.LoggerFactory;
import eu.trentorise.opendata.jackan.model.CkanDataset;
import eu.trentorise.opendata.jackan.model.CkanLicense;
/**
* Services common utils.
* @author Costantino Perciante (ISTI - CNR)
@ -53,20 +51,20 @@ public class CommonServiceUtils {
private static final int TAG_MAX_SIZE = 100;
private static Map<String,Boolean> extensionsCheck = new ConcurrentHashMap<>();
/**
* Retrieve the list of licenses for stocks and fisheries
* @return
*/
public static Map<String,String> getLicenses(DataCatalogue catalogue) {
logger.info("Requested licenses...");
Map<String,String> toReturn = new HashMap<String,String>();
List<CkanLicense> licenses = catalogue.getLicenses();
for(CkanLicense ckanLicense : licenses) {
toReturn.put(ckanLicense.getId(), ckanLicense.getTitle());
}
return toReturn;
}
// /**
// * Retrieve the list of licenses for stocks and fisheries
// * @return
// */
// public static Map<String,String> getLicenses(DataCatalogue catalogue) {
// logger.info("Requested licenses...");
// Map<String,String> toReturn = new HashMap<String,String>();
// List<CkanLicense> licenses = catalogue.getLicenses();
//
// for(CkanLicense ckanLicense : licenses) {
// toReturn.put(ckanLicense.getId(), ckanLicense.getTitle());
// }
// return toReturn;
// }
/**
* Validate an aggregated GRSF record. TODO use @Valid tags
@ -217,90 +215,6 @@ public class CommonServiceUtils {
}
}
public static final String GROUP_SUFFIX = "-group";
/**
* Convert a group name to its id on ckan
* @param origName
* @return
*/
private static String getGroupIDOnCkan(String origName){
if(origName == null) {
throw new IllegalArgumentException("origName cannot be null");
}
String modified = origName.replaceAll("\\(", "");
modified = modified.replaceAll("\\)", "");
modified = modified.trim().toLowerCase().replaceAll("[^A-Za-z0-9-]", "-");
if(modified.startsWith("-")) {
modified = modified.substring(1);
}
if(modified.endsWith("-")) {
modified = modified.substring(0, modified.length() -1);
}
return modified;
}
public static String getGroupId(String groupName) {
StringBuffer stringBuffer = new StringBuffer();
stringBuffer.append(groupName);
/*
* The "_group" suffix is added to all groups to
* avoid issues on groups and organizations having the same name
* e.g. RAM organization (id=ram) and RAM group (id=ram_group)
*/
if(!groupName.endsWith(GROUP_SUFFIX)) {
stringBuffer.append(GROUP_SUFFIX);
}
return getGroupIDOnCkan(stringBuffer.toString());
}
private static void addGroup(Group group, Sources source, String value, Set<String> groups) {
String conditionToCheck = group.condition();
String groupNameOverValue = group.groupNameOverValue();
boolean prependSource = group.prependSourceToGroupName();
boolean match = conditionToCheck.isEmpty() ? true
: value.matches(conditionToCheck);
if(match) {
StringBuffer stringBuffer = new StringBuffer();
if(prependSource) {
stringBuffer.append(source.getURLPath());
stringBuffer.append(" ");
}
if(groupNameOverValue.isEmpty()) {
stringBuffer.append(value);
}else {
stringBuffer.append(groupNameOverValue);
}
String groupId = getGroupId(stringBuffer.toString());
groups.add(groupId);
}
}
/**
* Add the record to the group of sources
* @param groups
* @param sourcesList
* @param productType
* @param sourceInPath
*/
private static void addRecordToGroups(Set<String> groups, Set<String> sourcesList, Product_Type productType, Sources sourceInPath) {
if(sourceInPath == Sources.GRSF) {
groups.add(getGroupId(Sources.GRSF.getURLPath())); // i.e. grsf_group
}else {
groups.add(getGroupId(Constants.SYSTEM_TYPE_LEGACY_RECORD)); // i.e. legacy_group
}
// evaluate the custom fields/tags, resources and groups
groups.add(getGroupId(productType.getOrigName())); //i.e. stock_group or fishery_group
for(String source : sourcesList) {
groups.add(getGroupId(source)); // i.e. firms_group, fishsource_group, ram_group
}
}
/**
* Retrieve the list of groups' names for this object
*/
@ -308,22 +222,74 @@ public class CommonServiceUtils {
Sources source) {
if(field.isAnnotationPresent(Group.class)) {
Group group = field.getAnnotation(Group.class);
String conditionToCheck = group.condition();
String groupNameOverValue = group.groupNameOverValue();
// See https://support.d4science.org/issues/11832
boolean assessmentUnit = false;
boolean prependSource = group.prependSourceToGroupName();
if(record instanceof StockRecord) {
StockRecord stockRecord = (StockRecord) record;
Stock_Type stock_Type = stockRecord.getType();
if(stock_Type != Stock_Type.Assessment_Unit) {
prependSource = false;
}else {
assessmentUnit = true;
}
}
// end patch for https://support.d4science.org/issues/11832
try {
Object f = new PropertyDescriptor(field.getName(), current).getReadMethod().invoke(record);
if(f != null) {
if(f instanceof List<?>) {
List asList = ((List) f);
if(!asList.isEmpty()) {
logger.debug("The object annotated with @Group is a list. Adding ... ");
// add all the available elements
// else add all the available elements
for(int i = 0; i < asList.size(); i++) {
String value = asList.get(i).toString().trim();
addGroup(group, source, value, groups);
boolean match = conditionToCheck.isEmpty() ? true
: asList.get(i).toString().trim().matches(conditionToCheck);
if(match) {
String groupName = groupNameOverValue.isEmpty()
? HelperMethods.getGroupNameOnCkan(source.toString().toLowerCase() + "-"
+ asList.get(i).toString().trim())
: source.toString().toLowerCase() + "-" + groupNameOverValue;
if(assessmentUnit && !prependSource) {
groups.add(groupNameOverValue);
}else {
groups.add(groupName);
}
}
}
}
} else {
String value = f.toString().trim();
addGroup(group, source, value, groups);
// also convert to the group name that should be on ckan
boolean match = conditionToCheck.isEmpty() ? true
: f.toString().trim().matches(conditionToCheck);
if(match) {
String groupName = groupNameOverValue.isEmpty()
? HelperMethods.getGroupNameOnCkan(
source.toString().toLowerCase() + "-" + f.toString().trim())
: source.toString().toLowerCase() + "-" + groupNameOverValue;
if(assessmentUnit && !prependSource) {
groups.add(groupNameOverValue);
}else {
groups.add(groupName);
}
}
}
}
@ -331,6 +297,7 @@ public class CommonServiceUtils {
logger.error("Failed to read value for field " + field.getName() + " skipping", e);
}
}
}
/**
@ -450,45 +417,46 @@ public class CommonServiceUtils {
}
}
/**
* Evaluate if the user has the admin role
* Throws exception if he/she doesn't
*/
public static void hasAdminRole(String username, DataCatalogue catalogue, String apiKey, String organization)
throws Exception {
String role = catalogue.getRoleOfUserInOrganization(username, organization, apiKey);
logger.info("Role of the user " + username + " is " + role + " in " + organization);
if(role == null || role.isEmpty() || !role.equalsIgnoreCase(RolesCkanGroupOrOrg.ADMIN.toString()))
throw new Exception(
"You are not authorized to create a product. Please check you have the Catalogue-Administrator role!");
}
/**
* Check this record's name
* @param futureName
* @param catalogue
* @throws Exception on name check
*/
public static void checkName(String futureName, DataCatalogue catalogue) throws Exception {
if(!HelperMethods.isNameValid(futureName)) {
throw new Exception(
"The 'uuid_knowledge_base' must contain only alphanumeric characters, and symbols like '.' or '_', '-'");
} else {
logger.debug("Checking if such name [" + futureName + "] doesn't exist ...");
boolean alreadyExists = catalogue.existProductWithNameOrId(futureName);
if(alreadyExists) {
logger.debug("A product with 'uuid_knowledge_base' " + futureName + " already exists");
throw new Exception("A product with 'uuid_knowledge_base' " + futureName + " already exists");
}
}
}
// /**
// * Evaluate if the user has the admin role
// * Throws exception if he/she doesn't
// */
// public static void hasAdminRole(String username, DataCatalogue catalogue, String apiKey, String organization)
// throws Exception {
//
// String role = catalogue.getRoleOfUserInOrganization(username, organization, apiKey);
// logger.info("Role of the user " + username + " is " + role + " in " + organization);
//
// if(role == null || role.isEmpty() || !role.equalsIgnoreCase(RolesCkanGroupOrOrg.ADMIN.toString()))
// throw new Exception(
// "You are not authorized to create a product. Please check you have the Catalogue-Administrator role!");
//
// }
// /**
// * Check this record's name
// * @param futureName
// * @param catalogue
// * @throws Exception on name check
// */
// public static void checkName(String futureName, DataCatalogue catalogue) throws Exception {
//
// if(!HelperMethods.isNameValid(futureName)) {
// throw new Exception(
// "The 'uuid_knowledge_base' must contain only alphanumeric characters, and symbols like '.' or '_', '-'");
// } else {
//
// logger.debug("Checking if such name [" + futureName + "] doesn't exist ...");
// boolean alreadyExists = catalogue.existProductWithNameOrId(futureName);
//
// if(alreadyExists) {
// logger.debug("A product with 'uuid_knowledge_base' " + futureName + " already exists");
// throw new Exception("A product with 'uuid_knowledge_base' " + futureName + " already exists");
//
// }
// }
// }
/**
* Validate and check sources
@ -514,20 +482,39 @@ public class CommonServiceUtils {
// validate the record if it is a GRSF one and set the record type and in manage context
// Status field is needed only in the Manage context for GRSF records
if( (context.equals((String) contextServlet.getInitParameter(HelperMethods.MANAGE_CONTEX_KEY))) || (context.equals((String) contextServlet.getInitParameter(HelperMethods.PREVALIDATE_CONTEX_KEY)))) {
// In web.xml a parameter indicates the Admin VRE as full path.
if(context.equals((String) contextServlet.getInitParameter(HelperMethods.MANAGE_CONTEX_KEY))) {
// If we are in Admin VRE and the source is GRSF
if(sourceInPath.equals(Sources.GRSF)) {
// RefersTo cannot be empty or null in GRSF_Admin for a GRSF record
List<RefersToBean> refersTo = record.getRefersTo();
if(refersTo == null || refersTo.isEmpty())
throw new Exception("refers_to is empty for a GRSF record");
// For each RefersTo a Resource is created in the record. The resource point to the referred record.
// We have also to set database sources
String databaseSource = "";
// we have the id within the catalog of this record. This means that we can retrieve the record and its system:type
for(RefersToBean refersToBean : refersTo) {
String sourceOrganization = getRecordOrganization(refersToBean.getId(), apiKey, context);
String sourceOrganization = "";
// Here there is a lookup to the referred records.
// getRecordOrganization read the record and get the organization
// String sourceOrganization = getRecordOrganization(refersToBean.getId(), apiKey, context);
resources.add(new ResourceBean(refersToBean.getUrl(), sourceOrganization, "", null, username, null,
null));
sourcesList.add(sourceOrganization.toLowerCase());
// concatenating the organization in a string which provide the databaseSource value :O
// Better using a list
databaseSource += sourceOrganization + " ";
}
@ -542,13 +529,13 @@ public class CommonServiceUtils {
if(databaseSources!=null) {
for(Resource<Sources> source : databaseSources) {
Sources sourceName = source.getName();
sourcesList.add(sourceName.getURLPath());
sourcesList.add(sourceName.getOrigName().toLowerCase());
}
}
}
addRecordToGroups(groups, sourcesList, productType, sourceInPath);
// append to groups: we need to add this record to the correspondent group of the sources
addRecordToGroupSources(groups, new ArrayList(sourcesList), productType, sourceInPath);
// validate
CommonServiceUtils.validateAggregatedRecord(record, sourceInPath);
@ -561,20 +548,37 @@ public class CommonServiceUtils {
sourceInPath.equals(Sources.GRSF)
? productType.equals(Product_Type.FISHERY) ? ((FisheryRecord) record).getType().getOrigName()
: ((StockRecord) record).getType().getOrigName()
: Constants.SYSTEM_TYPE_LEGACY_RECORD);
: Constants.SYSTEM_TYPE_FOR_SOURCES_VALUE);
logger.debug("Domain is " + productType.getOrigName() + " and system type " + record.getSystemType());
/*
* It has been decided to add tags also for legacy records see #23216
* boolean skipTags = !sourceInPath.equals(Sources.GRSF); // no tags for the Original records
*/
CommonServiceUtils.getTagsGroupsResourcesExtrasByRecord(tags, false, groups, false, resources, false,
// evaluate the custom fields/tags, resources and groups
groups.add(sourceInPath.getOrigName().toLowerCase() + "-" + productType.getOrigName().toLowerCase()); //e.g. grsf-fishery
boolean skipTags = !sourceInPath.equals(Sources.GRSF); // no tags for the Original records
CommonServiceUtils.getTagsGroupsResourcesExtrasByRecord(tags, skipTags, groups, false, resources, false,
customFields, record, username, sourceInPath);
}
/**
* Add the record to the group of sources
* @param groups
* @param sourcesList
* @param productType
* @param sourceInPath
*/
private static void addRecordToGroupSources(Set<String> groups, List<String> sourcesList, Product_Type productType,
Sources sourceInPath) {
Collections.sort(sourcesList); // be sure the name are sorted because the groups have been generated this way
String groupName = sourceInPath.getOrigName().toLowerCase() + "-" + productType.getOrigName().toLowerCase();
for(String source : sourcesList) {
groupName += "-" + source;
}
groups.add(groupName);
}
// /**
// * Fetch the system:type property from a record
// * @param itemIdOrName
@ -583,28 +587,27 @@ public class CommonServiceUtils {
// * @throws Exception
// */
// public static String getSystemTypeValue(String itemIdOrName, String apiKey, String context) throws Exception {
//
// DataCatalogue catalog = HelperMethods.getDataCatalogueRunningInstance(context);
// CkanDataset dataset = catalog.getDataset(itemIdOrName, apiKey);
// if(dataset == null) {
// if(dataset == null)
// throw new Exception("Unable to find record with id or name " + itemIdOrName);
// }
// String systemTypeValue = dataset.getExtrasAsHashMap().get(Constants.SYSTEM_TYPE_CUSTOM_KEY);
// if(systemTypeValue == null || systemTypeValue.isEmpty()) {
// if(systemTypeValue == null || systemTypeValue.isEmpty())
// throw new Exception(Constants.SYSTEM_TYPE_CUSTOM_KEY + " property not set in record " + itemIdOrName);
// }else {
// else
// return systemTypeValue;
// }
//
// }
public static String getRecordOrganization(String itemIdOrName, String apiKey, String context) throws Exception {
DataCatalogue catalog = HelperMethods.getDataCatalogueRunningInstance(context);
CkanDataset dataset = catalog.getDataset(itemIdOrName, apiKey);
if(dataset == null)
throw new Exception("Unable to find record with id or name " + itemIdOrName);
else
return dataset.getOrganization().getTitle();
}
//
// public static String getRecordOrganization(String itemIdOrName, String apiKey, String context) throws Exception {
// DataCatalogue catalog = HelperMethods.getDataCatalogueRunningInstance(context);
// CkanDataset dataset = catalog.getDataset(itemIdOrName, apiKey);
// if(dataset == null)
// throw new Exception("Unable to find record with id or name " + itemIdOrName);
// else
// return dataset.getOrganization().getTitle();
// }
/**
* Actions to execute once the dataset has been updated or created.
@ -670,12 +673,12 @@ public class CommonServiceUtils {
new ManageTimeSeriesThread(record, futureName, username, catalogue, context, token).start();
// write a post if the product has been published in grsf context
if(catalogue.isSocialPostEnabled() && !isUpdated && context
.equals((String) contextServlet.getInitParameter(HelperMethods.PUBLIC_CONTEX_KEY))) {
new WritePostCatalogueManagerThread(context, token, futureTitle, itemUrlForThread, true,
new ArrayList<String>(), authorFullname).start();
logger.info("Thread to write a post about the new product has been launched");
}
// if(catalogue.isSocialPostEnabled() && !isUpdated && context
// .equals((String) contextServlet.getInitParameter(HelperMethods.PUBLIC_CONTEX_KEY))) {
// new WritePostCatalogueManagerThread(context, token, futureTitle, itemUrlForThread, true,
// new ArrayList<String>(), authorFullname).start();
// logger.info("Thread to write a post about the new product has been launched");
// }
} catch(InterruptedException e) {
logger.error("Error", e);
}
@ -683,25 +686,25 @@ public class CommonServiceUtils {
}).start();
}
/**
* Extend roles to other organization
* @param username
* @param catalogue
* @param organization
* @param admin
*/
public static void extendRoleToOtherOrganizations(String username, DataCatalogue catalogue, String organization,
RolesCkanGroupOrOrg admin) {
logger.debug("Checking if role extension is needed here");
if(extensionsCheck.containsKey(username) && extensionsCheck.get(username))
return;
else {
catalogue.assignRolesOtherOrganization(username, organization, admin);
extensionsCheck.put(username, true);
}
}
// /**
// * Extend roles to other organization
// * @param username
// * @param catalogue
// * @param organization
// * @param admin
// */
// public static void extendRoleToOtherOrganizations(String username, DataCatalogue catalogue, String organization,
// RolesCkanGroupOrOrg admin) {
//
// logger.debug("Checking if role extension is needed here");
// if(extensionsCheck.containsKey(username) && extensionsCheck.get(username))
// return;
// else {
// catalogue.assignRolesOtherOrganization(username, organization, admin);
// extensionsCheck.put(username, true);
// }
//
// }
/**
* Evaluate in which organization a record has to be published. The only exception is when grsf_admin is involved.
@ -710,14 +713,9 @@ public class CommonServiceUtils {
* @return
*/
public static String evaluateOrganization(String organization, Sources sourceInPath) {
if(sourceInPath.equals(Sources.GRSF) &&
(organization.compareTo(Constants.GRSF_ADMIN_ORGANIZATION_NAME)==0 || organization.compareTo(Constants.GRSF_PRE_ORGANIZATION_NAME)==0)){
if(organization.compareTo(Constants.GRSF_ADMIN_ORGANIZATION_NAME)==0) {
return Constants.GRSF_ADMIN_ORGANIZATION_NAME;
}else {
return Constants.GRSF_PRE_ORGANIZATION_NAME;
}
}else
return sourceInPath.getURLPath();
if(sourceInPath.equals(Sources.GRSF) && organization.equals(Constants.GRSF_ADMIN_ORGANIZATION_NAME))
return Constants.GRSF_ADMIN_ORGANIZATION_NAME;
else
return sourceInPath.getOrigName().toLowerCase();
}
}

View File

@ -5,6 +5,7 @@ import static org.gcube.resources.discovery.icclient.ICFactory.client;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.StringReader;
@ -19,19 +20,23 @@ import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import org.gcube.common.authorization.library.provider.SecurityTokenProvider;
import org.gcube.common.homelibrary.home.exceptions.InternalErrorException;
import org.gcube.common.homelibrary.home.workspace.WorkspaceFolder;
import org.gcube.common.homelibrary.home.workspace.WorkspaceItem;
import org.gcube.common.homelibrary.home.workspace.exceptions.InsufficientPrivilegesException;
import org.gcube.common.homelibrary.home.workspace.exceptions.ItemAlreadyExistException;
import org.gcube.common.homelibrary.home.workspace.exceptions.ItemNotFoundException;
import org.gcube.common.homelibrary.home.workspace.folder.items.ExternalFile;
import org.gcube.common.resources.gcore.utils.XPathHelper;
import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.common.storagehub.client.dsl.FileContainer;
import org.gcube.common.storagehub.client.dsl.FolderContainer;
import org.gcube.common.storagehub.model.exceptions.StorageHubException;
import org.gcube.data_catalogue.grsf_publish_ws.AAA_UNNEEDED.GcoreEndPointReaderSocial;
import org.gcube.datacatalogue.ckanutillibrary.server.DataCatalogue;
import org.gcube.datacatalogue.ckanutillibrary.server.DataCatalogueFactory;
import org.gcube.datacatalogue.common.caches.CacheImpl;
import org.gcube.datacatalogue.common.caches.CacheInterface;
import org.gcube.datacatalogue.common.AAA_UNNEEDED.CacheImpl;
import org.gcube.datacatalogue.common.AAA_UNNEEDED.CacheInterface;
import org.gcube.resources.discovery.client.api.DiscoveryClient;
import org.gcube.resources.discovery.client.queries.api.Query;
import org.gcube.resources.discovery.client.queries.impl.QueryBox;
import org.gcube.social_networking.social_networking_client_library.UserClient;
import org.jsoup.Jsoup;
import org.jsoup.safety.Whitelist;
import org.slf4j.LoggerFactory;
@ -68,12 +73,9 @@ public abstract class HelperMethods {
// to be retrieved from the web.xml
public static final String MANAGE_CONTEX_KEY = "ManageVRE";
public static final String PREVALIDATE_CONTEX_KEY = "PreValidateVRE";
public static final String PUBLIC_CONTEX_KEY = "PublicVRE";
private static final String CSV_MIME = "text/csv";
private static final String PATH_SEPARATOR = "/";
// This key is used in replaceFieldsKey() function to indicate to remove the metadata field
private static final String NONE_KEY = "none:none";
// caches
private static CacheInterface<String, String> userEmailCache = new CacheImpl<String, String>(1000 * 60 * 60 * 24);
@ -82,38 +84,59 @@ public abstract class HelperMethods {
private static CacheInterface<String, DataCatalogue> catalogueCache = new CacheImpl<String, DataCatalogue>(1000 * 60 * 60 * 24);
/**
* Retrieve the running instance of the data catalogue for this scope
* @return
* @throws Exception
*/
public static DataCatalogue getDataCatalogueRunningInstance(String scope) throws Exception{
if(catalogueCache.get(scope) != null)
return catalogueCache.get(scope);
else{
try{
DataCatalogue instance = DataCatalogueFactory.getFactory().getUtilsPerScope(scope);
catalogueCache.insert(scope, instance);
return instance;
}catch(Exception e){
logger.error("Failed to instantiate data catalogue lib", e);
throw new Exception("Failed to retrieve catalogue information");
}
}
}
/**
* Retrieve the organization name in which the user wants to publish starting from the scope
* @param contextInWhichPublish
* Convert a group name to its id on ckan
* @param origName
* @return
*/
public static String retrieveOrgNameFromScope(String scope) {
public static String getGroupNameOnCkan(String origName){
String[] splittedScope = scope.split("/");
return splittedScope[splittedScope.length - 1].toLowerCase();
if(origName == null)
throw new IllegalArgumentException("origName cannot be null");
String modified = origName.trim().toLowerCase().replaceAll("[^A-Za-z0-9-]", "-");
if(modified.startsWith("-"))
modified = modified.substring(1);
if(modified.endsWith("-"))
modified = modified.substring(0, modified.length() -1);
logger.info("Group name generated is " + modified);
return modified;
}
// /**
// * Retrieve the running instance of the data catalogue for this scope
// * @return
// * @throws Exception
// */
// public static DataCatalogue getDataCatalogueRunningInstance(String scope) throws Exception{
//
// if(catalogueCache.get(scope) != null)
// return catalogueCache.get(scope);
// else{
// try{
// DataCatalogue instance = DataCatalogueFactory.getFactory().getUtilsPerScope(scope);
// catalogueCache.insert(scope, instance);
// return instance;
// }catch(Exception e){
// logger.error("Failed to instantiate data catalogue lib", e);
// throw new Exception("Failed to retrieve catalogue information");
// }
// }
// }
// /**
// * Retrieve the organization name in which the user wants to publish starting from the scope
// * @param contextInWhichPublish
// * @return
// */
// public static String retrieveOrgNameFromScope(String scope) {
//
// String[] splittedScope = scope.split("/");
// return splittedScope[splittedScope.length - 1].toLowerCase();
//
// }
/**
* Validate the name the product will have
* @param futureName
@ -128,52 +151,59 @@ public abstract class HelperMethods {
}
}
/**
/* *//**
* Retrieve the user's email given his/her username
* @param context
* @param token
* @return
* @throws Exception
*/
public static String getUserEmail(String context, String token) throws Exception{
*//*
public static String getUserEmail(String context, String token){
// check in cache
String email = null;
if((email = (String) userEmailCache.get(token)) != null){
return email;
String result = null;
if((result = (String) userEmailCache.get(token)) != null){
return result;
}else{
UserClient userClient = new UserClient();
email = userClient.getEmail();
userEmailCache.insert(token, email);
String baseUrl = new GcoreEndPointReaderSocial(context).getBasePath();
String url = baseUrl.endsWith("/") ? baseUrl + "users/getUserEmail?gcube-token=" + token :
baseUrl + "/users/getUserEmail?gcube-token=" + token;
logger.debug("Request url is " + url);
result = executGETHttpRequest(url, 200);
userEmailCache.insert(token, result);
}
return email;
return result;
}
/**
*//**
* Retrieve the user's fullname given his/her username
* @param context
* @param token
* @return
* @throws Exception
*/
public static String getUserFullname(String context, String token) throws Exception{
*//*
public static String getUserFullname(String context, String token){
// check in cache
String fullName = null;
if((fullName = (String) userFullnameCache.get(token)) != null){
return fullName;
String result = null;
if((result = (String) userFullnameCache.get(token)) != null){
return result;
}else{
UserClient userClient = new UserClient();
fullName = userClient.getFullName();
userFullnameCache.insert(token, fullName);
String baseUrl = new GcoreEndPointReaderSocial(context).getBasePath();
String url = baseUrl.endsWith("/") ? baseUrl + "users/getUserFullname?gcube-token=" + token :
baseUrl + "/users/getUserFullname?gcube-token=" + token;
logger.debug("Request url is " + url);
result = executGETHttpRequest(url, 200);
userFullnameCache.insert(token, result);
}
return fullName;
return result;
}
/**
*//**
* Execute the GET http request at this url, and return the result as string
* @return
*/
*//*
private static String executGETHttpRequest(String url, int expectedCodeOnSuccess){
try(CloseableHttpClient client = HttpClientBuilder.create().build();){
@ -203,7 +233,7 @@ public abstract class HelperMethods {
return null;
}
}*/
/**
* Check that the given license id is in CKAN
@ -229,20 +259,29 @@ public abstract class HelperMethods {
* @param csvFile
* @return
*/
public static FileContainer uploadExternalFile(FolderContainer resourceFormatFolder, String resourceToAttachName, String description, File csvFile) {
public static ExternalFile uploadExternalFile(WorkspaceFolder resourceFormatFolder, String resourceToAttachName, String description, File csvFile) {
try {
try (InputStream is= new FileInputStream(csvFile)) {
FileContainer fileContainer = resourceFormatFolder.uploadFile(is , resourceToAttachName, description);
return fileContainer;
}
} catch (StorageHubException she) {
logger.error("Failed to upload the file into the workspace shared folder for " + resourceToAttachName, she);
} catch (Exception e) {
WorkspaceItem existsFile = resourceFormatFolder.find(resourceToAttachName);
if(existsFile == null)
return resourceFormatFolder.createExternalFileItem(resourceToAttachName, description, CSV_MIME, csvFile);
else{
InputStream targetStream = new FileInputStream(csvFile);
existsFile.updateItem(targetStream);
return (ExternalFile)existsFile;
}
} catch (InsufficientPrivilegesException | ItemAlreadyExistException
| InternalErrorException e) {
logger.error("Failed to upload the file into the workspace shared folder for " + resourceToAttachName, e);
} catch (FileNotFoundException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
} catch (ItemNotFoundException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return null;
}
@ -253,10 +292,9 @@ public abstract class HelperMethods {
* @param subPath
* @return null if an error occurred
*/
public static FolderContainer createOrGetSubFoldersByPath(FolderContainer folder, String subPath){
FolderContainer parentFolder = folder;
public static WorkspaceFolder createOrGetSubFoldersByPath(WorkspaceFolder folder, String subPath){
WorkspaceFolder parentFolder = folder;
if(folder == null)
throw new IllegalArgumentException("Root folder is null!");
@ -275,9 +313,8 @@ public abstract class HelperMethods {
String[] splittedPaths = subPath.split(PATH_SEPARATOR);
for (String path : splittedPaths) {
FolderContainer createdFolder = getFolderOrCreate(parentFolder, path, "");
logger.debug("Created subfolder with path " + createdFolder.get().getPath());
WorkspaceFolder createdFolder = getFolderOrCreate(parentFolder, path, "");
logger.debug("Created subfolder with path " + createdFolder.getPath());
parentFolder = createdFolder;
}
@ -293,18 +330,23 @@ public abstract class HelperMethods {
* Get a folder within the catalogue folder or create it if it doesn't exist.
* @return
*/
public static FolderContainer getFolderOrCreate(FolderContainer folder, String relativePath, String descriptionFolder){
FolderContainer result = null;
public static WorkspaceFolder getFolderOrCreate(WorkspaceFolder folder, String relativePath, String descriptionFolder){
WorkspaceFolder result = null;
try {
result = folder.openByRelativePath(relativePath).asFolder();
WorkspaceItem foundFolder = folder.find(relativePath);
if(foundFolder != null && foundFolder.isFolder())
result = (WorkspaceFolder)foundFolder;
if(result != null)
logger.debug("Folder found with name " + result.getName() + ", it has id " + result.getId());
else
throw new Exception("There is no folder with name " + relativePath + " under folder " + folder.getName());
} catch (Exception e) {
logger.debug("Probably the folder doesn't exist");
try{
result = folder.newFolder(relativePath, descriptionFolder);
} catch (StorageHubException se) {
logger.error("Failed to get or generate this folder", se);
result = folder.createFolder(relativePath, descriptionFolder);
} catch (InsufficientPrivilegesException | InternalErrorException | ItemAlreadyExistException e2) {
logger.error("Failed to get or generate this folder", e2);
}
}
return result;
@ -476,56 +518,56 @@ public abstract class HelperMethods {
}
/**
* Return a map for converting a key to a namespace:key format by reading a generic resource.
* @return a map
*/
public static Map<String, String> getFieldToFieldNameSpaceMapping(String resourceName){
Map<String, String> toReturn = new HashMap<String, String>();
// check if data are in cache
if(namespacesCache.get(resourceName) != null){
return namespacesCache.get(resourceName);
}
else{
try {
Query q = new QueryBox("for $profile in collection('/db/Profiles/GenericResource')//Resource " +
"where $profile/Profile/SecondaryType/string() eq '"+ "ApplicationProfile" + "' and $profile/Profile/Name/string() " +
" eq '" + resourceName + "'" +
"return $profile");
DiscoveryClient<String> client = client();
List<String> appProfile = client.submit(q);
if (appProfile == null || appProfile.size() == 0)
throw new Exception("Your applicationProfile is not registered in the infrastructure");
else {
String elem = appProfile.get(0);
DocumentBuilder docBuilder = DocumentBuilderFactory.newInstance().newDocumentBuilder();
Node node = docBuilder.parse(new InputSource(new StringReader(elem))).getDocumentElement();
XPathHelper helper = new XPathHelper(node);
NodeList nodeListKeys = helper.evaluateForNodes("//originalKey");
NodeList nodeListModifiedKeys = helper.evaluateForNodes("//modifiedKey");
int sizeKeys = nodeListKeys != null ? nodeListKeys.getLength() : 0;
int sizeKeysModifed = nodeListModifiedKeys != null ? nodeListModifiedKeys.getLength() : 0;
if(sizeKeys != sizeKeysModifed)
throw new Exception("Malformed XML");
logger.debug("Size is " + sizeKeys);
for (int i = 0; i < sizeKeys; i++) {
toReturn.put(nodeListKeys.item(i).getTextContent(), nodeListModifiedKeys.item(i).getTextContent());
}
}
logger.debug("Map is " + toReturn);
namespacesCache.insert(resourceName, toReturn);
return toReturn;
} catch (Exception e) {
logger.error("Error while trying to fetch applicationProfile profile from the infrastructure", e);
return null;
}
}
}
// /**
// * Return a map for converting a key to a namespace:key format by reading a generic resource.
// * @return a map
// */
// public static Map<String, String> getFieldToFieldNameSpaceMapping(String resourceName){
// Map<String, String> toReturn = new HashMap<String, String>();
//
// // check if data are in cache
// if(namespacesCache.get(resourceName) != null){
// return namespacesCache.get(resourceName);
// }
// else{
// try {
// Query q = new QueryBox("for $profile in collection('/db/Profiles/GenericResource')//Resource " +
// "where $profile/Profile/SecondaryType/string() eq '"+ "ApplicationProfile" + "' and $profile/Profile/Name/string() " +
// " eq '" + resourceName + "'" +
// "return $profile");
//
// DiscoveryClient<String> client = client();
// List<String> appProfile = client.submit(q);
//
// if (appProfile == null || appProfile.size() == 0)
// throw new Exception("Your applicationProfile is not registered in the infrastructure");
// else {
//
// String elem = appProfile.get(0);
// DocumentBuilder docBuilder = DocumentBuilderFactory.newInstance().newDocumentBuilder();
// Node node = docBuilder.parse(new InputSource(new StringReader(elem))).getDocumentElement();
// XPathHelper helper = new XPathHelper(node);
//
// NodeList nodeListKeys = helper.evaluateForNodes("//originalKey");
// NodeList nodeListModifiedKeys = helper.evaluateForNodes("//modifiedKey");
// int sizeKeys = nodeListKeys != null ? nodeListKeys.getLength() : 0;
// int sizeKeysModifed = nodeListModifiedKeys != null ? nodeListModifiedKeys.getLength() : 0;
// if(sizeKeys != sizeKeysModifed)
// throw new Exception("Malformed XML");
// logger.debug("Size is " + sizeKeys);
// for (int i = 0; i < sizeKeys; i++) {
// toReturn.put(nodeListKeys.item(i).getTextContent(), nodeListModifiedKeys.item(i).getTextContent());
// }
// }
// logger.debug("Map is " + toReturn);
// namespacesCache.insert(resourceName, toReturn);
// return toReturn;
// } catch (Exception e) {
// logger.error("Error while trying to fetch applicationProfile profile from the infrastructure", e);
// return null;
// }
// }
// }
/**
* Replace the extras' keys if needed
@ -548,18 +590,16 @@ public abstract class HelperMethods {
if(namespaces.containsKey(entry.getKey())){
usedKey = namespaces.get(entry.getKey());
} else{
}
else{
usedKey = entry.getKey();
}
if(isSourceRecord) {
if(isSourceRecord)
usedKey = usedKey.replace("GRSF", "").trim();
}
// When the replaced key is none the metadata field must be removed
if(usedKey.compareTo(NONE_KEY)!=0) {
toReturn.put(usedKey, entry.getValue());
}
toReturn.put(usedKey, entry.getValue());
}
return toReturn;

View File

@ -8,8 +8,8 @@ import java.util.HashSet;
import java.util.List;
import java.util.Set;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.others.TimeSeriesBean;
import org.gcube.datacatalogue.common.Constants;
import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.TimeSeriesBean;
import org.gcube.datacatalogue.common.AAA_PORTED.Constants;
import org.slf4j.LoggerFactory;

View File

@ -1,5 +1,6 @@
package org.gcube.data_catalogue.grsf_publish_ws.utils.csv;
import java.beans.IntrospectionException;
import java.beans.PropertyDescriptor;
import java.io.File;
import java.lang.reflect.Field;
@ -7,20 +8,27 @@ import java.lang.reflect.InvocationTargetException;
import java.util.List;
import org.gcube.common.authorization.library.provider.SecurityTokenProvider;
import org.gcube.common.homelibrary.home.HomeLibrary;
import org.gcube.common.homelibrary.home.exceptions.HomeNotFoundException;
import org.gcube.common.homelibrary.home.exceptions.InternalErrorException;
import org.gcube.common.homelibrary.home.exceptions.UserNotFoundException;
import org.gcube.common.homelibrary.home.workspace.Workspace;
import org.gcube.common.homelibrary.home.workspace.WorkspaceFolder;
import org.gcube.common.homelibrary.home.workspace.WorkspaceSharedFolder;
import org.gcube.common.homelibrary.home.workspace.catalogue.WorkspaceCatalogue;
import org.gcube.common.homelibrary.home.workspace.exceptions.ItemNotFoundException;
import org.gcube.common.homelibrary.home.workspace.exceptions.WorkspaceFolderNotFoundException;
import org.gcube.common.homelibrary.home.workspace.folder.items.ExternalFile;
import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.common.storagehub.client.dsl.FileContainer;
import org.gcube.common.storagehub.client.dsl.FolderContainer;
import org.gcube.common.storagehub.client.dsl.StorageHubClient;
import org.gcube.common.storagehub.model.exceptions.StorageHubException;
import org.gcube.data_catalogue.grsf_publish_ws.custom_annotations.CustomField;
import org.gcube.data_catalogue.grsf_publish_ws.custom_annotations.TimeSeries;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.record.Common;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.record.FisheryRecord;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.record.StockRecord;
import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.Common;
import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.CustomField;
import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.FisheryRecord;
import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.StockRecord;
import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.TimeSeries;
import org.gcube.data_catalogue.grsf_publish_ws.utils.HelperMethods;
import org.gcube.datacatalogue.ckanutillibrary.server.DataCatalogue;
import org.gcube.datacatalogue.common.caches.CacheImpl;
import org.gcube.datacatalogue.common.caches.CacheInterface;
import org.gcube.datacatalogue.common.AAA_UNNEEDED.CacheImpl;
import org.gcube.datacatalogue.common.AAA_UNNEEDED.CacheInterface;
import org.slf4j.LoggerFactory;
import eu.trentorise.opendata.jackan.model.CkanResourceBase;
@ -34,15 +42,13 @@ public class ManageTimeSeriesThread extends Thread{
private static final String PATH_SEPARATOR = "/";
private static final String CATALOGUE_FOLDER = ".catalogue";
// Logger
private static final org.slf4j.Logger logger = LoggerFactory.getLogger(ManageTimeSeriesThread.class);
// try to attach the source at most CHANCES times ..
private static final int CHANCES = 10;
private static CacheInterface<String, FolderContainer> vreFolderCache = new CacheImpl<String, FolderContainer>(1000 * 60 * 60 * 24);
private static CacheInterface<String, WorkspaceCatalogue> vreFolderCache = new CacheImpl<String, WorkspaceCatalogue>(1000 * 60 * 60 * 24);
private static final int MAX_NAME_CSV_FILE_WITHOUT_MEASURE = 50;
@ -90,7 +96,17 @@ public class ManageTimeSeriesThread extends Thread{
logger.error("Error was " + e.getMessage());
} catch (InvocationTargetException e) {
logger.error("Error was " + e.getMessage());
} catch (StorageHubException e) {
} catch (WorkspaceFolderNotFoundException e) {
logger.error("Error was " + e.getMessage());
} catch (ItemNotFoundException e) {
logger.error("Error was " + e.getMessage());
} catch (IntrospectionException e) {
logger.error("Error was " + e.getMessage());
} catch (InternalErrorException e) {
logger.error("Error was " + e.getMessage());
} catch (HomeNotFoundException e) {
logger.error("Error was " + e.getMessage());
} catch (UserNotFoundException e) {
logger.error("Error was " + e.getMessage());
} catch (Exception e) {
logger.error("Error was " + e.getMessage());
@ -114,26 +130,17 @@ public class ManageTimeSeriesThread extends Thread{
if(record == null)
throw new IllegalArgumentException("The given record is null!!");
StorageHubClient shClient = new StorageHubClient();
String token = SecurityTokenProvider.instance.get();
FolderContainer catalogueFolder = null;
//WorkspaceCatalogue catalogueFolder = null;
WorkspaceCatalogue catalogueFolder = null;
if((catalogueFolder = vreFolderCache.get(token)) == null){
//Workspace ws = HomeLibrary.getHomeManagerFactory().getHomeManager().getHome().getWorkspace();
FolderContainer vreFolder = shClient.openVREFolder();
try {
catalogueFolder = vreFolder.openByRelativePath(CATALOGUE_FOLDER).asFolder();
}catch (StorageHubException e) {
catalogueFolder = vreFolder.newHiddenFolder(CATALOGUE_FOLDER, "catalogue folder");
}
Workspace ws = HomeLibrary.getHomeManagerFactory().getHomeManager().getHome().getWorkspace();
WorkspaceSharedFolder vreFolder = ws.getVREFolderByScope(ScopeProvider.instance.get());
catalogueFolder = vreFolder.getVRECatalogue();
vreFolderCache.insert(token, catalogueFolder);
}
logger.debug("Catalogue folder in vre has path " + catalogueFolder.get().getPath());
logger.debug("Catalogue folder in vre has path " + catalogueFolder.getPath());
// the structure under the .catalogue will be as follows:
// .catalogue:
@ -160,7 +167,7 @@ public class ManageTimeSeriesThread extends Thread{
// the whole path of the directory is going to be...
String csvDirectoryForThisProduct = recordTypeFolderName + PATH_SEPARATOR + firstLetter + PATH_SEPARATOR + replaceIllegalChars(uuidKB, "_") + PATH_SEPARATOR + CSVUtils.CSV_EXTENSION.replace(".", "");
logger.debug("The path under which the time series are going to be saved is " + csvDirectoryForThisProduct);
FolderContainer csvFolder = HelperMethods.createOrGetSubFoldersByPath(catalogueFolder, csvDirectoryForThisProduct);
WorkspaceFolder csvFolder = HelperMethods.createOrGetSubFoldersByPath(catalogueFolder, csvDirectoryForThisProduct);
if(csvFolder == null)
logger.error("Failed to create directory where csv files will be deployed in the workspace!!");
@ -185,7 +192,7 @@ public class ManageTimeSeriesThread extends Thread{
String resourceToAttachOnCkanDescription = productName;
CkanResourceBase ckanResource = null;
FileContainer createdFileOnWorkspace = null;
ExternalFile createdFileOnWorkspace = null;
String[] relevantSources = new String[1];
File csvFile = CSVUtils.listToCSV(asList, relevantSources);
if(csvFile != null){
@ -205,7 +212,7 @@ public class ManageTimeSeriesThread extends Thread{
+ customAnnotation.key() + CSVUtils.CSV_EXTENSION, resourceToAttachOnCkanDescription, csvFile);
if(createdFileOnWorkspace != null){
String publicUrlToSetOnCkan = createdFileOnWorkspace.getPublicLink().toString();
String publicUrlToSetOnCkan = createdFileOnWorkspace.getPublicLink(true);
// wait for patching..
Thread.sleep(1500);

View File

@ -1,94 +0,0 @@
<Resource version="0.4.x">
<ID>85480b75-62f5-4708-acd9-382b22cffc90</ID>
<Type>GenericResource</Type>
<Scopes>
<Scope>/d4science.research-infrastructures.eu/FARM/GRSF</Scope>
</Scopes>
<Profile>
<SecondaryType>ApplicationProfile</SecondaryType>
<Name>GRSF Fishery</Name>
<Description>GRSF mapping between fields and namespaces for Fishery
records</Description>
<Body>
<fields>
<field>
<originalKey>Data Owner</originalKey>
<modifiedKey>none:none</modifiedKey>
</field>
<field>
<originalKey>Database Source</originalKey>
<modifiedKey>fishery_identity:Database Source</modifiedKey>
</field>
<field>
<originalKey>Short Name</originalKey>
<modifiedKey>fishery_identity:Short Name</modifiedKey>
</field>
<field>
<originalKey>Catch</originalKey>
<modifiedKey>none:none</modifiedKey>
</field>
<field>
<originalKey>Landing</originalKey>
<modifiedKey>none:none</modifiedKey>
</field>
<field>
<originalKey>Species</originalKey>
<modifiedKey>fishery_identity:Species</modifiedKey>
</field>
<field>
<originalKey>Similar GRSF Record</originalKey>
<modifiedKey>fishery_identity:Similar GRSF Record</modifiedKey>
</field>
<field>
<originalKey>Management Body/Authority</originalKey>
<modifiedKey>fishery_identity:Management Body/Authority</modifiedKey>
</field>
<field>
<originalKey>Connected Record</originalKey>
<modifiedKey>fishery_identity:Connected Record</modifiedKey>
</field>
<field>
<originalKey>Annotation</originalKey>
<modifiedKey>none:none</modifiedKey>
</field>
<field>
<originalKey>GRSF Fishery Name</originalKey>
<modifiedKey>fishery_identity:GRSF Fishery Name</modifiedKey>
</field>
<field>
<originalKey>GRSF Semantic Identifier</originalKey>
<modifiedKey>fishery_identity:GRSF Semantic Identifier
</modifiedKey>
</field>
<field>
<originalKey>Traceability Flag</originalKey>
<modifiedKey>fishery_identity:Traceability Flag</modifiedKey>
</field>
<field>
<originalKey>Fishing Area</originalKey>
<modifiedKey>fishery_identity:Fishing Area</modifiedKey>
</field>
<field>
<originalKey>Jurisdiction Area</originalKey>
<modifiedKey>fishery_identity:Jurisdiction Area</modifiedKey>
</field>
<field>
<originalKey>Resources Exploited</originalKey>
<modifiedKey>fishery_identity:Resources Exploited</modifiedKey>
</field>
<field>
<originalKey>Flag State</originalKey>
<modifiedKey>fishery_identity:Flag State</modifiedKey>
</field>
<field>
<originalKey>Fishing Gear</originalKey>
<modifiedKey>fishery_identity:Fishing Gear</modifiedKey>
</field>
<field>
<originalKey>GRSF Type</originalKey>
<modifiedKey>fishery_identity:GRSF Type</modifiedKey>
</field>
</fields>
</Body>
</Profile>
</Resource>

View File

@ -1,91 +0,0 @@
<Resource version="0.4.x">
<ID>c3326373-c620-45e8-a7e3-a25a1a2a970b</ID>
<Type>GenericResource</Type>
<Scopes>
<Scope>/d4science.research-infrastructures.eu/FARM/GRSF_Admin</Scope>
<Scope>/d4science.research-infrastructures.eu/FARM</Scope>
</Scopes>
<Profile>
<SecondaryType>ApplicationProfile</SecondaryType>
<Name>GRSF Fishery</Name>
<Description>GRSF mapping between fields and namespaces for Fishery
records</Description>
<Body>
<fields>
<field>
<originalKey>Data Owner</originalKey>
<modifiedKey>fishery_data:Data Owner</modifiedKey>
</field>
<field>
<originalKey>Database Source</originalKey>
<modifiedKey>fishery_identity:Database Source</modifiedKey>
</field>
<field>
<originalKey>Short Name</originalKey>
<modifiedKey>fishery_identity:Short Name</modifiedKey>
</field>
<field>
<originalKey>Catch</originalKey>
<modifiedKey>fishery_data:Catch</modifiedKey>
</field>
<field>
<originalKey>Landing</originalKey>
<modifiedKey>fishery_data:Landing</modifiedKey>
</field>
<field>
<originalKey>Species</originalKey>
<modifiedKey>fishery_identity:Species</modifiedKey>
</field>
<field>
<originalKey>Similar GRSF Record</originalKey>
<modifiedKey>fishery_identity:Similar GRSF Record</modifiedKey>
</field>
<field>
<originalKey>Management Body/Authority</originalKey>
<modifiedKey>fishery_identity:Management Body/Authority</modifiedKey>
</field>
<field>
<originalKey>Connected Record</originalKey>
<modifiedKey>fishery_identity:Connected Record</modifiedKey>
</field>
<field>
<originalKey>GRSF Fishery Name</originalKey>
<modifiedKey>fishery_identity:GRSF Fishery Name</modifiedKey>
</field>
<field>
<originalKey>GRSF Semantic Identifier</originalKey>
<modifiedKey>fishery_identity:GRSF Semantic Identifier
</modifiedKey>
</field>
<field>
<originalKey>Traceability Flag</originalKey>
<modifiedKey>fishery_identity:Traceability Flag</modifiedKey>
</field>
<field>
<originalKey>Fishing Area</originalKey>
<modifiedKey>fishery_identity:Fishing Area</modifiedKey>
</field>
<field>
<originalKey>Jurisdiction Area</originalKey>
<modifiedKey>fishery_identity:Jurisdiction Area</modifiedKey>
</field>
<field>
<originalKey>Resources Exploited</originalKey>
<modifiedKey>fishery_identity:Resources Exploited</modifiedKey>
</field>
<field>
<originalKey>Flag State</originalKey>
<modifiedKey>fishery_identity:Flag State</modifiedKey>
</field>
<field>
<originalKey>Fishing Gear</originalKey>
<modifiedKey>fishery_identity:Fishing Gear</modifiedKey>
</field>
<field>
<originalKey>GRSF Type</originalKey>
<modifiedKey>fishery_identity:GRSF Type</modifiedKey>
</field>
</fields>
</Body>
</Profile>
</Resource>

View File

@ -1,118 +0,0 @@
<Resource version="0.4.x">
<ID>512cca02-e178-420c-a766-b47171e154e2</ID>
<Type>GenericResource</Type>
<Scopes>
<Scope>/d4science.research-infrastructures.eu/FARM/GRSF</Scope>
</Scopes>
<Profile>
<SecondaryType>ApplicationProfile</SecondaryType>
<Name>GRSF Stock</Name>
<Description>GRSF mapping between fields and namespaces for Stock
records
</Description>
<Body>
<fields>
<field>
<originalKey>Data Owner</originalKey>
<modifiedKey>none:none</modifiedKey>
</field>
<field>
<originalKey>Database Source</originalKey>
<modifiedKey>stock_identity:Database Source</modifiedKey>
</field>
<field>
<originalKey>Short Name</originalKey>
<modifiedKey>stock_identity:Short Name</modifiedKey>
</field>
<field>
<originalKey>Catch</originalKey>
<modifiedKey>none:none</modifiedKey>
</field>
<field>
<originalKey>Landing</originalKey>
<modifiedKey>none:none</modifiedKey>
</field>
<field>
<originalKey>Species</originalKey>
<modifiedKey>stock_identity:Species</modifiedKey>
</field>
<field>
<originalKey>Similar GRSF Record</originalKey>
<modifiedKey>stock_identity:Similar GRSF Record</modifiedKey>
</field>
<field>
<originalKey>Management Body/Authority</originalKey>
<modifiedKey>stock_identity:Management Body/Authority</modifiedKey>
</field>
<field>
<originalKey>Connected Record</originalKey>
<modifiedKey>stock_identity:Connected Record</modifiedKey>
</field>
<field>
<originalKey>Annotation</originalKey>
<modifiedKey>none:none</modifiedKey>
</field>
<field>
<originalKey>GRSF Stock Name</originalKey>
<modifiedKey>stock_identity:GRSF Stock Name</modifiedKey>
</field>
<field>
<originalKey>GRSF Semantic Identifier</originalKey>
<modifiedKey>stock_identity:GRSF Semantic Identifier</modifiedKey>
</field>
<field>
<originalKey>Assessment Area</originalKey>
<modifiedKey>stock_identity:Assessment Area</modifiedKey>
</field>
<field>
<originalKey>Exploiting Fishery</originalKey>
<modifiedKey>stock_identity:Exploiting Fishery</modifiedKey>
</field>
<field>
<originalKey>Assessment Method</originalKey>
<modifiedKey>none:none</modifiedKey>
</field>
<field>
<originalKey>Abundance Level (FIRMS Standard)</originalKey>
<modifiedKey>none:none</modifiedKey>
</field>
<field>
<originalKey>Abundance Level</originalKey>
<modifiedKey>none:none</modifiedKey>
</field>
<field>
<originalKey>Biomass</originalKey>
<modifiedKey>none:none</modifiedKey>
</field>
<field>
<originalKey>Fishing Pressure (FIRMS Standard)</originalKey>
<modifiedKey>none:none</modifiedKey>
</field>
<field>
<originalKey>Fishing Pressure</originalKey>
<modifiedKey>none:none</modifiedKey>
</field>
<field>
<originalKey>State and Trend</originalKey>
<modifiedKey>none:none</modifiedKey>
</field>
<field>
<originalKey>FAO Stock Status Category</originalKey>
<modifiedKey>none:none</modifiedKey>
</field>
<field>
<originalKey>Scientific Advice</originalKey>
<modifiedKey>none:none</modifiedKey>
</field>
<field>
<originalKey>Assessor</originalKey>
<modifiedKey>none:none</modifiedKey>
</field>
<field>
<originalKey>GRSF Type</originalKey>
<modifiedKey>stock_identity:GRSF Type</modifiedKey>
</field>
</fields>
</Body>
</Profile>
</Resource>

View File

@ -1,115 +0,0 @@
<Resource version="0.4.x">
<ID>c9cad3f8-5773-4ca7-95a5-4692db5eae2e</ID>
<Type>GenericResource</Type>
<Scopes>
<Scope>/d4science.research-infrastructures.eu/FARM</Scope>
<Scope>/d4science.research-infrastructures.eu/FARM/GRSF_Admin</Scope>
</Scopes>
<Profile>
<SecondaryType>ApplicationProfile</SecondaryType>
<Name>GRSF Stock</Name>
<Description>GRSF mapping between fields and namespaces for Stock
records</Description>
<Body>
<fields>
<field>
<originalKey>Data Owner</originalKey>
<modifiedKey>stock_data:Data Owner</modifiedKey>
</field>
<field>
<originalKey>Database Source</originalKey>
<modifiedKey>stock_identity:Database Source</modifiedKey>
</field>
<field>
<originalKey>Short Name</originalKey>
<modifiedKey>stock_identity:Short Name</modifiedKey>
</field>
<field>
<originalKey>Catch</originalKey>
<modifiedKey>stock_data:Catch</modifiedKey>
</field>
<field>
<originalKey>Landing</originalKey>
<modifiedKey>stock_data:Landing</modifiedKey>
</field>
<field>
<originalKey>Species</originalKey>
<modifiedKey>stock_identity:Species</modifiedKey>
</field>
<field>
<originalKey>Similar GRSF Record</originalKey>
<modifiedKey>stock_identity:Similar GRSF Record</modifiedKey>
</field>
<field>
<originalKey>Management Body/Authority</originalKey>
<modifiedKey>stock_identity:Management Body/Authority</modifiedKey>
</field>
<field>
<originalKey>Connected Record</originalKey>
<modifiedKey>stock_identity:Connected Record</modifiedKey>
</field>
<field>
<originalKey>GRSF Stock Name</originalKey>
<modifiedKey>stock_identity:GRSF Stock Name</modifiedKey>
</field>
<field>
<originalKey>GRSF Semantic Identifier</originalKey>
<modifiedKey>stock_identity:GRSF Semantic Identifier</modifiedKey>
</field>
<field>
<originalKey>Assessment Area</originalKey>
<modifiedKey>stock_identity:Assessment Area</modifiedKey>
</field>
<field>
<originalKey>Exploiting Fishery</originalKey>
<modifiedKey>stock_identity:Exploiting Fishery</modifiedKey>
</field>
<field>
<originalKey>Assessment Method</originalKey>
<modifiedKey>stock_data:Assessment Method</modifiedKey>
</field>
<field>
<originalKey>Abundance Level (FIRMS Standard)</originalKey>
<modifiedKey>stock_data:Abundance Level (FIRMS Standard)
</modifiedKey>
</field>
<field>
<originalKey>Abundance Level</originalKey>
<modifiedKey>stock_data:Abundance Level</modifiedKey>
</field>
<field>
<originalKey>Biomass</originalKey>
<modifiedKey>stock_data:Biomass</modifiedKey>
</field>
<field>
<originalKey>Fishing Pressure (FIRMS Standard)</originalKey>
<modifiedKey>stock_data:Fishing Pressure (FIRMS Standard)</modifiedKey>
</field>
<field>
<originalKey>Fishing Pressure</originalKey>
<modifiedKey>stock_data:Fishing Pressure</modifiedKey>
</field>
<field>
<originalKey>State and Trend</originalKey>
<modifiedKey>stock_data:State and Trend</modifiedKey>
</field>
<field>
<originalKey>FAO Stock Status Category</originalKey>
<modifiedKey>stock_data:FAO Stock Status Category</modifiedKey>
</field>
<field>
<originalKey>Scientific Advice</originalKey>
<modifiedKey>stock_data:Scientific Advice</modifiedKey>
</field>
<field>
<originalKey>Assessor</originalKey>
<modifiedKey>stock_data:Assessor</modifiedKey>
</field>
<field>
<originalKey>GRSF Type</originalKey>
<modifiedKey>stock_identity:GRSF Type</modifiedKey>
</field>
</fields>
</Body>
</Profile>
</Resource>

View File

@ -1,40 +0,0 @@
<Resource version="0.4.x">
<ID>7ee9c6e9-ff73-4428-88e4-185aeb4b3742</ID>
<Type>GenericResource</Type>
<Scopes>
<Scope>/d4science.research-infrastructures.eu/FARM/GRSF_Pre</Scope>
<Scope>/d4science.research-infrastructures.eu/FARM/GRSF_Admin</Scope>
</Scopes>
<Profile>
<SecondaryType>ApplicationProfile</SecondaryType>
<Name>GRSFManageEntries</Name>
<Description>A list of extras metadata to be looked up and prompted by the GRSF Manage widget. The body reports the key values.</Description>
<Body>
fishery_identity:GRSF Type,
fishery_identity:Short Name,
fishery_identity:Database Source,
fishery_identity:GRSF Semantic Identifier,
fishery_data:Catch,
fishery_data:Landing,
stock_identity:GRSF Type,
stock_identity:Short Name,
stock_identity:Database Source,
stock_identity:GRSF Semantic Identifier,
stock_data:Catch,
stock_data:Landing
</Body>
</Profile>
</Resource>

View File

@ -1,7 +1,7 @@
<application mode='online'>
<name>GRSFPublisher</name>
<group>Data-Catalogue</group>
<version>1.13.0</version>
<version>1.5.0</version>
<description>Data Catalogue Service</description>
<local-persistence location='target' />
<exclude>/rest/</exclude>

View File

@ -33,12 +33,6 @@
<!-- <param-value>/gcube/devsec/devVRE</param-value> -->
<param-value>/d4science.research-infrastructures.eu/FARM/GRSF</param-value>
</context-param>
<context-param>
<description>Context of pending products under manage activities for prevalidation VRE</description>
<param-name>PreValidateVRE</param-name>
<!-- <param-value>/gcube/devNext/NextNext</param-value> -->
<param-value>/d4science.research-infrastructures.eu/FARM/GRSF_Pre</param-value>
</context-param>
<welcome-file-list>
<welcome-file>index.jsp</welcome-file>
</welcome-file-list>

View File

@ -8,15 +8,15 @@ import javax.ws.rs.core.Application;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.others.Resource;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.record.FisheryRecord;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.record.StockRecord;
import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.FisheryRecord;
import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.Resource;
import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.StockRecord;
import org.gcube.data_catalogue.grsf_publish_ws.services.GrsfPublisherFisheryService;
import org.gcube.data_catalogue.grsf_publish_ws.services.GrsfPublisherStockService;
import org.gcube.datacatalogue.common.enums.Fishery_Type;
import org.gcube.datacatalogue.common.enums.Sources;
import org.gcube.datacatalogue.common.enums.Status;
import org.gcube.datacatalogue.common.enums.Stock_Type;
import org.gcube.datacatalogue.common.AAA_PORTED.Fishery_Type;
import org.gcube.datacatalogue.common.AAA_PORTED.Sources;
import org.gcube.datacatalogue.common.AAA_PORTED.Status;
import org.gcube.datacatalogue.common.AAA_PORTED.Stock_Type;
import org.glassfish.jersey.server.ResourceConfig;
import org.glassfish.jersey.test.JerseyTest;
import org.glassfish.jersey.test.TestProperties;

View File

@ -13,22 +13,33 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.gcube.data_catalogue.grsf_publish_ws.custom_annotations.CustomField;
import org.gcube.data_catalogue.grsf_publish_ws.custom_annotations.Group;
import org.gcube.data_catalogue.grsf_publish_ws.custom_annotations.Tag;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.others.Resource;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.others.TimeSeriesBean;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.record.FisheryRecord;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.record.StockRecord;
import org.gcube.common.authorization.library.provider.SecurityTokenProvider;
import org.gcube.common.homelibrary.home.HomeLibrary;
import org.gcube.common.homelibrary.home.exceptions.InternalErrorException;
import org.gcube.common.homelibrary.home.workspace.Workspace;
import org.gcube.common.homelibrary.home.workspace.WorkspaceFolder;
import org.gcube.common.homelibrary.home.workspace.WorkspaceItem;
import org.gcube.common.homelibrary.home.workspace.WorkspaceSharedFolder;
import org.gcube.common.homelibrary.home.workspace.catalogue.WorkspaceCatalogue;
import org.gcube.common.homelibrary.home.workspace.exceptions.InsufficientPrivilegesException;
import org.gcube.common.homelibrary.home.workspace.exceptions.ItemAlreadyExistException;
import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.CustomField;
import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.FisheryRecord;
import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.Group;
import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.Resource;
import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.StockRecord;
import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.Tag;
import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.TimeSeriesBean;
import org.gcube.data_catalogue.grsf_publish_ws.utils.HelperMethods;
import org.gcube.data_catalogue.grsf_publish_ws.utils.csv.CSVUtils;
import org.gcube.data_catalogue.grsf_publish_ws.utils.threads.AssociationToGroupThread;
import org.gcube.datacatalogue.ckanutillibrary.server.DataCatalogue;
import org.gcube.datacatalogue.ckanutillibrary.server.DataCatalogueFactory;
import org.gcube.datacatalogue.common.enums.Abundance_Level;
import org.gcube.datacatalogue.common.enums.Fishery_Type;
import org.gcube.datacatalogue.common.enums.Sources;
import org.gcube.datacatalogue.common.enums.Status;
import org.gcube.datacatalogue.common.AAA_PORTED.Abundance_Level;
import org.gcube.datacatalogue.common.AAA_PORTED.Fishery_Type;
import org.gcube.datacatalogue.common.AAA_PORTED.Sources;
import org.gcube.datacatalogue.common.AAA_PORTED.Status;
import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.databind.ObjectMapper;
@ -227,8 +238,129 @@ public class JTests {
instance.uploadResourceFile(csvFile, datasetName, instance.getApiKeyFromUsername("costantino.perciante"), "random_name.csv", null, null, null);
}
// @Test
public void sharedVREFolderWriteTest() throws Exception{
String token = "";
String context = "/gcube/devNext/NextNext";
ScopeProvider.instance.set(context);
SecurityTokenProvider.instance.set(token);
Workspace ws = HomeLibrary.getHomeManagerFactory().getHomeManager().getHome().getWorkspace();
// Get a VRE folder by scope
WorkspaceSharedFolder vreFolder = ws.getVREFolderByScope(context);
//Get the VRE Folder catalogue
WorkspaceCatalogue catalogueFolder = vreFolder.getVRECatalogue();
logger.debug("Catalogue folder retrieved " + catalogueFolder.getName());
// WorkspaceItem stockFolder = catalogueFolder.find("stock");
// vreFolder.removeChild(stockFolder);
/**
* Test is
* .catalogue:
* -test
* - a
* -aproductwiththisname
* - csv
* - testfile.csv
*/
String allSubPath = "/test/a/aproductwiththisname/";
//WorkspaceFolder lastFolder = createGetSubFoldersByPath(catalogueFolder, allSubPath);
// WorkspaceFolder recordFolder = (WorkspaceFolder)getFolderOrCreate(catalogueFolder, "test", "");
// String firstLetter = "a";
// WorkspaceFolder firstLetterFolder = (WorkspaceFolder)getFolderOrCreate(recordFolder, firstLetter, "");
// String folderPath = "aproductwiththisname";
// WorkspaceFolder productFolder = (WorkspaceFolder)getFolderOrCreate(firstLetterFolder, folderPath, "");
//logger.debug("Test folder created/get..its path is " + lastFolder.getPath());
// String ccsvUnderProductFolderName = productFolderName + "/" + "csv";
// WorkspaceFolder csvUnderProductFolder = (WorkspaceFolder)getFolderOrCreate(catalogueFolder, ccsvUnderProductFolderName, "");
//
// logger.debug("FOLDERS created " + csvUnderProductFolder.getPath());
// treeCheck(catalogueFolder);
}
public void treeCheck(WorkspaceFolder rootFolder) throws InternalErrorException{
List<WorkspaceItem> children = rootFolder.getChildren();
for (WorkspaceItem workspaceItem : children) {
if(workspaceItem.isFolder()){
logger.debug("children folder is " + workspaceItem.getName());
treeCheck((WorkspaceFolder)workspaceItem);
}
}
}
/**
* Create subfolders in cascade, returning the last created ones
* It could be also used for getting them if they already exists
* @param folder
* @param subPath
* @return
*/
private static WorkspaceFolder createGetSubFoldersByPath(WorkspaceFolder folder, String subPath){
String pathSeparator = "/";
WorkspaceFolder parentFolder = folder;
if(folder == null)
throw new IllegalArgumentException("Root folder is null!");
if(subPath == null || subPath.isEmpty())
throw new IllegalArgumentException("subPath is null/empty!");
try{
if(subPath.startsWith(pathSeparator))
subPath = subPath.replaceFirst(pathSeparator, "");
if(subPath.endsWith(subPath))
subPath = subPath.substring(0, subPath.length() - 1);
logger.debug("Splitting path " + subPath);
String[] splittedPaths = subPath.split(pathSeparator);
for (String path : splittedPaths) {
WorkspaceFolder createdFolder = getFolderOrCreate(parentFolder, path, "");
logger.debug("Created subfolder with path " + createdFolder.getPath());
parentFolder = createdFolder;
}
}catch(Exception e){
logger.error("Failed to create the subfolders by path " + subPath);
}
return parentFolder;
}
/**
* Get a folder within the catalogue folder or create it if it doesn't exist.
* @return
*/
private static WorkspaceFolder getFolderOrCreate(WorkspaceFolder folder, String relativePath, String descriptionFolder){
WorkspaceFolder result = null;
try {
if(folder.exists(relativePath) && folder.find(relativePath).isFolder())
result = (WorkspaceFolder) folder.find(relativePath);
if(result != null)
logger.debug("Folder found with name " + result.getName() + ", it has id " + result.getId());
else
throw new Exception("There is no folder with name " + relativePath + " under foler " + folder.getName());
} catch (Exception e) {
logger.debug("Probably the folder doesn't exist", e);
try{
result = folder.createFolder(relativePath, descriptionFolder);
} catch (InsufficientPrivilegesException | InternalErrorException | ItemAlreadyExistException e2) {
logger.error("Failed to get or generate this folder", e2);
}
}
return result;
}
//@Test
public void GRSFServiceUrl() throws Exception{
@ -246,39 +378,39 @@ public class JTests {
}
//@Test
public void testHierarchy() throws Exception{
String name = "low-abundance";
DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance("/gcube/devNext/NextNext");
List<String> uniqueGroups = new ArrayList<String>();
uniqueGroups.add(name);
uniqueGroups.add(name);
AssociationToGroupThread.findHierarchy(uniqueGroups, catalogue, catalogue.getApiKeyFromUsername("costantino_perciante"));
logger.debug("Hierarchy is " + uniqueGroups);
}
//@Test
public void testAssociationThread() throws Exception{
String name = "low-abundance";
DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance("/gcube/devNext/NextNext");
AssociationToGroupThread threadGroups = new AssociationToGroupThread(Arrays.asList(name), "another-test-test-please-ignore", "grsf", "costantino_perciante", catalogue, "apiKey");
threadGroups.start();
threadGroups.join();
logger.info("Thread stopped!");
}
//@Test
public void testCaches() throws Exception{
String context = "/gcube/devNext/NextNext";
String token = "";
for (int i = 0; i < 1000; i++) {
logger.debug(HelperMethods.getUserEmail(context, token));
}
}
// //@Test
// public void testHierarchy() throws Exception{
// String name = "low-abundance";
// DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance("/gcube/devNext/NextNext");
// List<String> uniqueGroups = new ArrayList<String>();
// uniqueGroups.add(name);
// uniqueGroups.add(name);
// AssociationToGroupThread.findHierarchy(uniqueGroups, catalogue, catalogue.getApiKeyFromUsername("costantino_perciante"));
// logger.debug("Hierarchy is " + uniqueGroups);
// }
//
// //@Test
// public void testAssociationThread() throws Exception{
// String name = "low-abundance";
// DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance("/gcube/devNext/NextNext");
// AssociationToGroupThread threadGroups = new AssociationToGroupThread(Arrays.asList(name), "another-test-test-please-ignore", "grsf", "costantino_perciante", catalogue, "apiKey");
// threadGroups.start();
// threadGroups.join();
// logger.info("Thread stopped!");
//
//
// }
//
// //@Test
// public void testCaches(){
//
// String context = "/gcube/devNext/NextNext";
// String token = "";
// for (int i = 0; i < 1000; i++) {
// logger.debug(HelperMethods.getUserEmail(context, token));
// }
//
// }
//@Test
public void testMatch(){

View File

@ -1,38 +0,0 @@
package org.gcube.data_catalogue.grsf_publish_ws;
import java.io.File;
import java.net.URL;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.record.StockRecord;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
/**
* @author Luca Frosini (ISTI - CNR)
*/
public class TestJson {
private static final Logger logger = LoggerFactory.getLogger(Test.class);
public File getResourcesDirectory() throws Exception {
URL logbackFileURL = TestJson.class.getClassLoader().getResource("logback-test.xml");
File logbackFile = new File(logbackFileURL.toURI());
File resourcesDirectory = logbackFile.getParentFile();
return resourcesDirectory;
}
@Test
public void testJsonDeserialization() throws Exception {
File jsonQueryFile = new File(getResourcesDirectory(), "70ae6895-7d3d-4f4a-86f9-bcb17d41bff6.json");
ObjectMapper objectMapper = new ObjectMapper();
JsonNode jsonNode = objectMapper.readTree(jsonQueryFile);
logger.debug("{}", jsonNode);
StockRecord sr = objectMapper.readValue(jsonQueryFile, StockRecord.class);
logger.debug("{}", sr);
}
}

View File

@ -1,34 +0,0 @@
{
"stock_name" : "European hake - Southern Adriatic",
"license_id" : "CC-BY-SA-4.0",
"version" : 1.0,
"database_sources" : [ {
"name" : "FAO SDG 14.4.1 questionnaire",
"description" : "FAO SDG 14.4.1 questionnaire",
"url" : "https://www.fao.org/sustainable-development-goals/indicators/14.4.1/en/"
} ],
"stock_uri" : "https://github.com/grsf/resource/sdg_14_4_1/stock/70ae6895-7d3d-4f4a-86f9-bcb17d41bff6",
"grsf_uuid" : "70ae6895-7d3d-4f4a-86f9-bcb17d41bff6",
"short_name" : "European hake - Southern Adriatic",
"description" : "European hake - Southern Adriatic",
"grsf_type" : "assessment unit",
"species" : [ "Code: HKE, Classification System: ASFIS, Scientific Name: Merluccius merluccius" ],
"assessment_area" : [ "Code: 18, System: gfcm, Name: Southern Adriatic " ],
"source_of_information" : [ {
"name" : "https://www.fao.org/sustainable-development-goals/indicators/14.4.1/en/sdg-alb-1",
"description" : "",
"url" : "https://www.fao.org/sustainable-development-goals/indicators/14.4.1/en/sdg-alb-1"
} ],
"data_owner" : [ "Albania" ],
"assessment_methods" : [ "The official stock assessment concludes 'Overfished' with respect to abundance reference points. [Rep. Year or Assessment ID: 2019, Ref. Year: 2018]" ],
"connections_indicator" : "not connected",
"similarities_indicator" : "without similar records",
"landings" : [ {
"value" : "872",
"unit" : "Tonnes",
"reference_year" : 2018,
"reporting_year_or_assessment_id" : "2020",
"data_owner" : "Albania"
} ],
"citation" : "citation TBD"
}

View File

@ -1,19 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE xml>
<configuration>
<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<pattern>%d{HH:mm:ss.SSS} [%thread] %-5level %logger{0}: %msg%n</pattern>
</encoder>
</appender>
<logger name="org.gcube" level="INFO" />
<logger name="org.gcube.data_catalogue.grsf_publish_ws" level="TRACE" />
<root level="WARN">
<appender-ref ref="STDOUT" />
</root>
</configuration>