Compare commits

..

7 Commits

55 changed files with 1282 additions and 2002 deletions

1
.gitignore vendored
View File

@ -1,4 +1,3 @@
target target
.classpath .classpath
.project .project
/.DS_Store

View File

@ -1 +0,0 @@
/org.eclipse.jdt.core.prefs

View File

@ -1,6 +1,4 @@
eclipse.preferences.version=1 eclipse.preferences.version=1
encoding//src/main/java=UTF-8 encoding//src/main/java=UTF-8
encoding//src/main/resources=UTF-8
encoding//src/test/java=UTF-8 encoding//src/test/java=UTF-8
encoding//src/test/resources=UTF-8
encoding/<project>=UTF-8 encoding/<project>=UTF-8

View File

@ -0,0 +1,2 @@
eclipse.preferences.version=1
org.eclipse.jdt.core.compiler.problem.forbiddenReference=warning

View File

@ -1,23 +1,12 @@
<?xml version="1.0" encoding="UTF-8"?><project-modules id="moduleCoreId" project-version="1.5.0"> <?xml version="1.0" encoding="UTF-8"?><project-modules id="moduleCoreId" project-version="1.5.0">
<wb-module deploy-name="grsf-publisher-ws">
<wb-module deploy-name="grsf-publisher-ws-1.12.1-SNAPSHOT">
<wb-resource deploy-path="/" source-path="/target/m2e-wtp/web-resources"/> <wb-resource deploy-path="/" source-path="/target/m2e-wtp/web-resources"/>
<wb-resource deploy-path="/" source-path="/src/main/webapp" tag="defaultRootSource"/> <wb-resource deploy-path="/" source-path="/src/main/webapp" tag="defaultRootSource"/>
<wb-resource deploy-path="/WEB-INF/classes" source-path="/src/main/java"/> <wb-resource deploy-path="/WEB-INF/classes" source-path="/src/main/java"/>
<dependent-module archiveName="storagehub-model-1.1.0.jar" deploy-path="/WEB-INF/lib" handle="module:/resource/storagehub-model/storagehub-model"> <dependent-module archiveName="grsf-common-library-1.0.3-SNAPSHOT.jar" deploy-path="/WEB-INF/lib" handle="module:/resource/grsf-common-library/grsf-common-library">
<dependency-type>uses</dependency-type> <dependency-type>uses</dependency-type>
</dependent-module> </dependent-module>
<dependent-module archiveName="common-gcube-calls-1.3.1-SNAPSHOT.jar" deploy-path="/WEB-INF/lib" handle="module:/resource/common-gcube-calls/common-gcube-calls">
<dependency-type>uses</dependency-type>
</dependent-module>
<property name="context-root" value="grsf-publisher-ws"/> <property name="context-root" value="grsf-publisher-ws"/>
<property name="java-output-path" value="/grsf-publisher-ws/target/classes"/> <property name="java-output-path" value="/grsf-publisher-ws/target/classes"/>
</wb-module> </wb-module>
</project-modules> </project-modules>

View File

@ -1,139 +0,0 @@
# Changelog
All notable changes to this project will be documented in this file.
This project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
## [v1.13.3]
- Adding support for "FAO SDG 14.4.1 Questionnaire" source [#23670]
## [v1.13.2]
- Migrated request to social-service-client [#23679]
- Added "Assessment Methods" as Group [#23409]
## [v1.13.1]
- Aligned code and wiki to the new requirements [#23167]
- Changed group assign strategy [#23211] [#23215]
- Tag are added also to legacy records [#23216]
- Fixed code which generated groups id from name [#23215]
## [v1.13.0]
### Added
- Added support to not include in records time dependant metadata [#21995]
### Changed
- Switched dependency management to gcube-bom 2.0.0
- Migrated code to storagehub [#21432]
## [v1.12.0] - 2020-06-19
### Added
**Features**
- [#19166] Added support for GRSF_pre VRE with the behaviour of GRSF Admin
## [v1.11.0] - 2020-03-30
### Changed
- [#18293] Traceability flag is assigned only to Fishery records
## [v1.10.0] - 2019-11-11
### Changed
- [#17965] Tags longer than 100 characters are truncated instead of skipped
## [v1.9.0] - 2019-05-27
### Changed
- [#13347] refers_to can be null while publishing legacy records
- [#12421] Removed the non-ascii clean from extra fields
- [#12421] Properly supporting UTF-8 characters
- [#16395] Title is updated according to Stock/Fishery Name
## [v1.8.0] - 2019-02-26
### Changed
- [#12861] The sources in GRSF VRE are calculated using 'database_sources' field
## [v1.7.0] - 2018-10-10
### Changed
- [#12510] Fixed pom to exclude libraries already provided by the container
## [v1.6.0] - 2018-07-18
### Changed
- [#11464] Added biomass timeseries support to stock data
- [#11749] Added 'With Similarities'-'No Similarities' tag to GRSF Records
- [#11748] Added Tag for 'Fishing Gears' and 'Flag State' fields
- [#11766] Added 'Connected'-'Not Connected' tag to GRSF Records
- [#11767] Added group for SDG flag
- [#11811] Added citation field
- [#11832] Added sub-groups support for available time series related to GRSF Type "Assessment Unit"
- [#11967] Added Biomass group
- [#11968] Changed 'State and trend of Marine Resource' to 'State and Trend'
- [#11969] Changed 'Scientific advice' to 'Scientific Advice'
## [v1.5.0] - 2017-01-10
### Changed
- Model enhancements
## [v1.4.0] - 2017-11-02
### Changed
- Some fixes and improvements: added a common library between service and management widget
## [v1.3.0] - 2017-08-01
### Changed
- Model upgrade
## [v1.2.0] - 2017-07-01
### Changed
- [#8719] Model changed
## [v1.1.2] - 2017-05-15
- [#8719] Updates for ticket
- Minor fixes
## [v1.1.1] - 2017-04-02
- Minor fixes
## [v1.1.0] - 2017-02-28
- Model update
## [v1.0.1] - 2017-02-10
- Minor fixes
## [v1.0.0] - 2016-12-10
- First Release

View File

@ -1,312 +0,0 @@
# European Union Public Licence V. 1.1
EUPL © the European Community 2007
This European Union Public Licence (the “EUPL”) applies to the Work or Software
(as defined below) which is provided under the terms of this Licence. Any use of
the Work, other than as authorised under this Licence is prohibited (to the
extent such use is covered by a right of the copyright holder of the Work).
The Original Work is provided under the terms of this Licence when the Licensor
(as defined below) has placed the following notice immediately following the
copyright notice for the Original Work:
Licensed under the EUPL V.1.1
or has expressed by any other mean his willingness to license under the EUPL.
## 1. Definitions
In this Licence, the following terms have the following meaning:
- The Licence: this Licence.
- The Original Work or the Software: the software distributed and/or
communicated by the Licensor under this Licence, available as Source Code and
also as Executable Code as the case may be.
- Derivative Works: the works or software that could be created by the Licensee,
based upon the Original Work or modifications thereof. This Licence does not
define the extent of modification or dependence on the Original Work required
in order to classify a work as a Derivative Work; this extent is determined by
copyright law applicable in the country mentioned in Article 15.
- The Work: the Original Work and/or its Derivative Works.
- The Source Code: the human-readable form of the Work which is the most
convenient for people to study and modify.
- The Executable Code: any code which has generally been compiled and which is
meant to be interpreted by a computer as a program.
- The Licensor: the natural or legal person that distributes and/or communicates
the Work under the Licence.
- Contributor(s): any natural or legal person who modifies the Work under the
Licence, or otherwise contributes to the creation of a Derivative Work.
- The Licensee or “You”: any natural or legal person who makes any usage of the
Software under the terms of the Licence.
- Distribution and/or Communication: any act of selling, giving, lending,
renting, distributing, communicating, transmitting, or otherwise making
available, on-line or off-line, copies of the Work or providing access to its
essential functionalities at the disposal of any other natural or legal
person.
## 2. Scope of the rights granted by the Licence
The Licensor hereby grants You a world-wide, royalty-free, non-exclusive,
sub-licensable licence to do the following, for the duration of copyright vested
in the Original Work:
- use the Work in any circumstance and for all usage, reproduce the Work, modify
- the Original Work, and make Derivative Works based upon the Work, communicate
- to the public, including the right to make available or display the Work or
- copies thereof to the public and perform publicly, as the case may be, the
- Work, distribute the Work or copies thereof, lend and rent the Work or copies
- thereof, sub-license rights in the Work or copies thereof.
Those rights can be exercised on any media, supports and formats, whether now
known or later invented, as far as the applicable law permits so.
In the countries where moral rights apply, the Licensor waives his right to
exercise his moral right to the extent allowed by law in order to make effective
the licence of the economic rights here above listed.
The Licensor grants to the Licensee royalty-free, non exclusive usage rights to
any patents held by the Licensor, to the extent necessary to make use of the
rights granted on the Work under this Licence.
## 3. Communication of the Source Code
The Licensor may provide the Work either in its Source Code form, or as
Executable Code. If the Work is provided as Executable Code, the Licensor
provides in addition a machine-readable copy of the Source Code of the Work
along with each copy of the Work that the Licensor distributes or indicates, in
a notice following the copyright notice attached to the Work, a repository where
the Source Code is easily and freely accessible for as long as the Licensor
continues to distribute and/or communicate the Work.
## 4. Limitations on copyright
Nothing in this Licence is intended to deprive the Licensee of the benefits from
any exception or limitation to the exclusive rights of the rights owners in the
Original Work or Software, of the exhaustion of those rights or of other
applicable limitations thereto.
## 5. Obligations of the Licensee
The grant of the rights mentioned above is subject to some restrictions and
obligations imposed on the Licensee. Those obligations are the following:
Attribution right: the Licensee shall keep intact all copyright, patent or
trademarks notices and all notices that refer to the Licence and to the
disclaimer of warranties. The Licensee must include a copy of such notices and a
copy of the Licence with every copy of the Work he/she distributes and/or
communicates. The Licensee must cause any Derivative Work to carry prominent
notices stating that the Work has been modified and the date of modification.
Copyleft clause: If the Licensee distributes and/or communicates copies of the
Original Works or Derivative Works based upon the Original Work, this
Distribution and/or Communication will be done under the terms of this Licence
or of a later version of this Licence unless the Original Work is expressly
distributed only under this version of the Licence. The Licensee (becoming
Licensor) cannot offer or impose any additional terms or conditions on the Work
or Derivative Work that alter or restrict the terms of the Licence.
Compatibility clause: If the Licensee Distributes and/or Communicates Derivative
Works or copies thereof based upon both the Original Work and another work
licensed under a Compatible Licence, this Distribution and/or Communication can
be done under the terms of this Compatible Licence. For the sake of this clause,
“Compatible Licence” refers to the licences listed in the appendix attached to
this Licence. Should the Licensees obligations under the Compatible Licence
conflict with his/her obligations under this Licence, the obligations of the
Compatible Licence shall prevail.
Provision of Source Code: When distributing and/or communicating copies of the
Work, the Licensee will provide a machine-readable copy of the Source Code or
indicate a repository where this Source will be easily and freely available for
as long as the Licensee continues to distribute and/or communicate the Work.
Legal Protection: This Licence does not grant permission to use the trade names,
trademarks, service marks, or names of the Licensor, except as required for
reasonable and customary use in describing the origin of the Work and
reproducing the content of the copyright notice.
## 6. Chain of Authorship
The original Licensor warrants that the copyright in the Original Work granted
hereunder is owned by him/her or licensed to him/her and that he/she has the
power and authority to grant the Licence.
Each Contributor warrants that the copyright in the modifications he/she brings
to the Work are owned by him/her or licensed to him/her and that he/she has the
power and authority to grant the Licence.
Each time You accept the Licence, the original Licensor and subsequent
Contributors grant You a licence to their contributions to the Work, under the
terms of this Licence.
## 7. Disclaimer of Warranty
The Work is a work in progress, which is continuously improved by numerous
contributors. It is not a finished work and may therefore contain defects or
“bugs” inherent to this type of software development.
For the above reason, the Work is provided under the Licence on an “as is” basis
and without warranties of any kind concerning the Work, including without
limitation merchantability, fitness for a particular purpose, absence of defects
or errors, accuracy, non-infringement of intellectual property rights other than
copyright as stated in Article 6 of this Licence.
This disclaimer of warranty is an essential part of the Licence and a condition
for the grant of any rights to the Work.
## 8. Disclaimer of Liability
Except in the cases of wilful misconduct or damages directly caused to natural
persons, the Licensor will in no event be liable for any direct or indirect,
material or moral, damages of any kind, arising out of the Licence or of the use
of the Work, including without limitation, damages for loss of goodwill, work
stoppage, computer failure or malfunction, loss of data or any commercial
damage, even if the Licensor has been advised of the possibility of such
damage. However, the Licensor will be liable under statutory product liability
laws as far such laws apply to the Work.
## 9. Additional agreements
While distributing the Original Work or Derivative Works, You may choose to
conclude an additional agreement to offer, and charge a fee for, acceptance of
support, warranty, indemnity, or other liability obligations and/or services
consistent with this Licence. However, in accepting such obligations, You may
act only on your own behalf and on your sole responsibility, not on behalf of
the original Licensor or any other Contributor, and only if You agree to
indemnify, defend, and hold each Contributor harmless for any liability incurred
by, or claims asserted against such Contributor by the fact You have accepted
any such warranty or additional liability.
## 10. Acceptance of the Licence
The provisions of this Licence can be accepted by clicking on an icon “I agree”
placed under the bottom of a window displaying the text of this Licence or by
affirming consent in any other similar way, in accordance with the rules of
applicable law. Clicking on that icon indicates your clear and irrevocable
acceptance of this Licence and all of its terms and conditions.
Similarly, you irrevocably accept this Licence and all of its terms and
conditions by exercising any rights granted to You by Article 2 of this Licence,
such as the use of the Work, the creation by You of a Derivative Work or the
Distribution and/or Communication by You of the Work or copies thereof.
## 11. Information to the public
In case of any Distribution and/or Communication of the Work by means of
electronic communication by You (for example, by offering to download the Work
from a remote location) the distribution channel or media (for example, a
website) must at least provide to the public the information requested by the
applicable law regarding the Licensor, the Licence and the way it may be
accessible, concluded, stored and reproduced by the Licensee.
## 12. Termination of the Licence
The Licence and the rights granted hereunder will terminate automatically upon
any breach by the Licensee of the terms of the Licence.
Such a termination will not terminate the licences of any person who has
received the Work from the Licensee under the Licence, provided such persons
remain in full compliance with the Licence.
## 13. Miscellaneous
Without prejudice of Article 9 above, the Licence represents the complete
agreement between the Parties as to the Work licensed hereunder.
If any provision of the Licence is invalid or unenforceable under applicable
law, this will not affect the validity or enforceability of the Licence as a
whole. Such provision will be construed and/or reformed so as necessary to make
it valid and enforceable.
The European Commission may publish other linguistic versions and/or new
versions of this Licence, so far this is required and reasonable, without
reducing the scope of the rights granted by the Licence. New versions of the
Licence will be published with a unique version number.
All linguistic versions of this Licence, approved by the European Commission,
have identical value. Parties can take advantage of the linguistic version of
their choice.
## 14. Jurisdiction
Any litigation resulting from the interpretation of this License, arising
between the European Commission, as a Licensor, and any Licensee, will be
subject to the jurisdiction of the Court of Justice of the European Communities,
as laid down in article 238 of the Treaty establishing the European Community.
Any litigation arising between Parties, other than the European Commission, and
resulting from the interpretation of this License, will be subject to the
exclusive jurisdiction of the competent court where the Licensor resides or
conducts its primary business.
## 15. Applicable Law
This Licence shall be governed by the law of the European Union country where
the Licensor resides or has his registered office.
This licence shall be governed by the Belgian law if:
- a litigation arises between the European Commission, as a Licensor, and any
- Licensee; the Licensor, other than the European Commission, has no residence
- or registered office inside a European Union country.
## Appendix
“Compatible Licences” according to article 5 EUPL are:
- GNU General Public License (GNU GPL) v. 2
- Open Software License (OSL) v. 2.1, v. 3.0
- Common Public License v. 1.0
- Eclipse Public License v. 1.0
- Cecill v. 2.0

View File

@ -1,69 +0,0 @@
# GRSF Publisher Service
This service allows any client to publish on GRSF Catalogue.
## Built With
* [OpenJDK](https://openjdk.java.net/) - The JDK used
* [Maven](https://maven.apache.org/) - Dependency Management
## Documentation
[GRSF Publisher Service](https://wiki.gcube-system.org/gcube/GCube_Data_Catalogue_for_GRSF)
## Change log
See [Releases](https://code-repo.d4science.org/gCubeSystem/grsf-publisher-ws/releases).
## Authors
* **Luca Frosini** ([ORCID](https://orcid.org/0000-0003-3183-2291)) - [ISTI-CNR Infrascience Group](http://nemis.isti.cnr.it/groups/infrascience)
## How to Cite this Software
Tell people how to cite this software.
* Cite an associated paper?
* Use a specific BibTeX entry for the software?
@Manual{,
title = {GRSF Publisher Service},
author = {{Perciante, Costantino}, {Frosini, Luca}},
organization = {ISTI - CNR},
address = {Pisa, Italy},
year = 2019,
url = {http://www.gcube-system.org/}
}
## License
This project is licensed under the EUPL V.1.1 License - see the [LICENSE.md](LICENSE.md) file for details.
## About the gCube Framework
This software is part of the [gCubeFramework](https://www.gcube-system.org/ "gCubeFramework"): an
open-source software toolkit used for building and operating Hybrid Data
Infrastructures enabling the dynamic deployment of Virtual Research Environments
by favouring the realisation of reuse oriented policies.
The projects leading to this software have received funding from a series of European Union programmes including:
- the Sixth Framework Programme for Research and Technological Development
- DILIGENT (grant no. 004260).
- the Seventh Framework Programme for research, technological development and demonstration
- D4Science (grant no. 212488);
- D4Science-II (grant no.239019);
- ENVRI (grant no. 283465);
- iMarine(grant no. 283644);
- EUBrazilOpenBio (grant no. 288754).
- the H2020 research and innovation programme
- SoBigData (grant no. 654024);
- PARTHENOS (grant no. 654119);
- EGIEngage (grant no. 654142);
- ENVRIplus (grant no. 654182);
- BlueBRIDGE (grant no. 675680);
- PerformFish (grant no. 727610);
- AGINFRAplus (grant no. 731001);
- DESIRA (grant no. 818194);
- ARIADNEplus (grant no. 823914);
- RISIS2 (grant no. 824091);

1
distro/LICENSE Normal file
View File

@ -0,0 +1 @@
${gcube.license}

62
distro/README Normal file
View File

@ -0,0 +1,62 @@
The gCube System - ${name}
--------------------------------------------------
${description}
${gcube.description}
${gcube.funding}
Version
--------------------------------------------------
${version} (${buildDate})
Please see the file named "changelog.xml" in this directory for the release notes.
Authors
--------------------------------------------------
* Costantino Perciante (costantino.perciante-AT-isti.cnr.it), Istituto di Scienza e Tecnologie dell'Informazione "A. Faedo" - CNR, Pisa (Italy).
Maintainers
-----------
* Costantino Perciante (costantino.perciante-AT-isti.cnr.it), Istituto di Scienza e Tecnologie dell'Informazione "A. Faedo" - CNR, Pisa (Italy).
Download information
--------------------------------------------------
Source code is available from SVN:
${scm.url}
Binaries can be downloaded from the gCube website:
${gcube.website}
Installation
--------------------------------------------------
Installation documentation is available on-line in the gCube Wiki:
${gcube.wikiRoot}
Documentation
--------------------------------------------------
Documentation is available on-line in the gCube Wiki:
${gcube.wikiRoot}
Support
--------------------------------------------------
Bugs and support requests can be reported in the gCube issue tracking tool:
${gcube.issueTracking}
Licensing
--------------------------------------------------
This software is licensed under the terms you may find in the file named "LICENSE" in this directory.

View File

@ -1,10 +1,7 @@
<?xml version="1.0" encoding="UTF-8"?> <?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE xml> <!DOCTYPE xml>
<ReleaseNotes> <ReleaseNotes>
<Changeset component="org.gcube.data-catalogue.grsf-publisher-ws.1-12-0" date="${buildDate}"> <Changeset component="org.gcube.data-catalogue.grsf-publisher-ws.1-11-0" date="${buildDate}">
<Change>Added support for GRSF_pre VRE with the behaviour of GRSF Admin #19166</Change>
</Changeset>
<Changeset component="org.gcube.data-catalogue.grsf-publisher-ws.1-11-0" date="2020-03-30">
<Change>Traceability flag is assigned only to Fishery records refs #18293</Change> <Change>Traceability flag is assigned only to Fishery records refs #18293</Change>
</Changeset> </Changeset>
<Changeset component="org.gcube.data-catalogue.grsf-publisher-ws.1-10-0" date="2019-11-11"> <Changeset component="org.gcube.data-catalogue.grsf-publisher-ws.1-10-0" date="2019-11-11">

View File

@ -13,10 +13,10 @@
<outputDirectory>/</outputDirectory> <outputDirectory>/</outputDirectory>
<useDefaultExcludes>true</useDefaultExcludes> <useDefaultExcludes>true</useDefaultExcludes>
<includes> <includes>
<include>LICENSE.md</include> <include>README</include>
<include>README.md</include> <include>LICENSE</include>
<include>CHANGELOG.md</include> <include>changelog.xml</include>
<include>gcube-app.xml</include> <include>profile.xml</include>
</includes> </includes>
<fileMode>755</fileMode> <fileMode>755</fileMode>
<filtered>true</filtered> <filtered>true</filtered>

25
distro/profile.xml Normal file
View File

@ -0,0 +1,25 @@
<?xml version="1.0" encoding="UTF-8"?>
<Resource xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<ID></ID>
<Type>Library</Type>
<Profile>
<Description>${description}</Description>
<Class>DataCatalogue</Class>
<Name>${artifactId}</Name>
<Version>1.0.0</Version>
<Packages>
<Software>
<Name>${artifactId}</Name>
<Version>${version}</Version>
<MavenCoordinates>
<groupId>${groupId}</groupId>
<artifactId>${artifactId}</artifactId>
<version>${version}</version>
</MavenCoordinates>
<Files>
<File>${build.finalName}.war</File>
</Files>
</Software>
</Packages>
</Profile>
</Resource>

View File

@ -1,10 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE xml>
<application mode='online'>
<name>GRSFPublisher</name>
<group>Data-Catalogue</group>
<version>${project.version}</version>
<description>${project.description}</description>
<local-persistence location='target' />
</application>

127
pom.xml
View File

@ -11,7 +11,7 @@
<groupId>org.gcube.data-catalogue</groupId> <groupId>org.gcube.data-catalogue</groupId>
<artifactId>grsf-publisher-ws</artifactId> <artifactId>grsf-publisher-ws</artifactId>
<version>1.13.3</version> <version>1.12.0</version>
<packaging>war</packaging> <packaging>war</packaging>
<name>grsf-publisher-ws</name> <name>grsf-publisher-ws</name>
<description>Utility library to publish GRSF products on GRSF catalogue.</description> <description>Utility library to publish GRSF products on GRSF catalogue.</description>
@ -19,14 +19,12 @@
<properties> <properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding> <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<version.jersey>2.22.4</version.jersey> <version.jersey>2.22.4</version.jersey>
<version.jackson>2.8.11</version.jackson> <version.jackson>2.6.0</version.jackson>
<distroDirectory>${project.basedir}/distro</distroDirectory> <distroDirectory>${project.basedir}/distro</distroDirectory>
<webappDirectory>${project.build.directory}/${project.build.finalName}</webappDirectory> <webappDirectory>${project.build.directory}/${project.build.finalName}</webappDirectory>
<distroDirectory>distro</distroDirectory> <distroDirectory>distro</distroDirectory>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding> <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<jsoup.version>1.10.1</jsoup.version> <jsoup.version>1.10.1</jsoup.version>
<serviceClass>DataPublishing</serviceClass>
<maven.compiler.release>8</maven.compiler.release>
</properties> </properties>
<scm> <scm>
@ -36,11 +34,18 @@
</scm> </scm>
<dependencyManagement> <dependencyManagement>
<dependencies> <dependencies>
<dependency>
<groupId>org.gcube.distribution</groupId>
<artifactId>gcube-bom</artifactId>
<version>1.3.1</version>
<type>pom</type>
<scope>import</scope>
</dependency>
<dependency> <dependency>
<groupId>org.gcube.distribution</groupId> <groupId>org.gcube.distribution</groupId>
<artifactId>gcube-smartgears-bom</artifactId> <artifactId>gcube-smartgears-bom</artifactId>
<version>2.2.0</version> <version>1.0.2</version>
<type>pom</type> <type>pom</type>
<scope>import</scope> <scope>import</scope>
</dependency> </dependency>
@ -51,16 +56,7 @@
<dependency> <dependency>
<groupId>org.gcube.data-catalogue</groupId> <groupId>org.gcube.data-catalogue</groupId>
<artifactId>grsf-common-library</artifactId> <artifactId>grsf-common-library</artifactId>
<version>[2.0.0, 3.0.0-SNAPSHOT)</version> <version>[1-0-0-SNAPSHOT, 2.0.0-SNAPSHOT)</version>
</dependency>
<dependency>
<groupId>org.gcube.common</groupId>
<artifactId>storagehub-client-library</artifactId>
</dependency>
<dependency>
<groupId>org.gcube.social-networking</groupId>
<artifactId>social-service-client</artifactId>
<version>[1.0.0, 2.0.0-SNAPSHOT)</version>
</dependency> </dependency>
<!-- jsoup HTML parser library @ http://jsoup.org/ --> <!-- jsoup HTML parser library @ http://jsoup.org/ -->
<dependency> <dependency>
@ -110,7 +106,12 @@
<artifactId>authorization-client</artifactId> <artifactId>authorization-client</artifactId>
<scope>provided</scope> <scope>provided</scope>
</dependency> </dependency>
<dependency>
<groupId>org.gcube.common</groupId>
<artifactId>home-library-jcr</artifactId>
<version>[2.0.0-SNAPSHOT, 3.0.0-SNAPSHOT)</version>
<scope>compile</scope>
</dependency>
<dependency> <dependency>
<groupId>org.gcube.common</groupId> <groupId>org.gcube.common</groupId>
<artifactId>common-authorization</artifactId> <artifactId>common-authorization</artifactId>
@ -119,26 +120,32 @@
<dependency> <dependency>
<groupId>org.glassfish.jersey.containers</groupId> <groupId>org.glassfish.jersey.containers</groupId>
<artifactId>jersey-container-servlet-core</artifactId> <artifactId>jersey-container-servlet-core</artifactId>
<version>${version.jersey}</version>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.glassfish.jersey.media</groupId> <groupId>org.glassfish.jersey.media</groupId>
<artifactId>jersey-media-json-jackson</artifactId> <artifactId>jersey-media-json-jackson</artifactId>
<version>${version.jersey}</version>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.glassfish.jersey.media</groupId> <groupId>org.glassfish.jersey.media</groupId>
<artifactId>jersey-media-json-processing</artifactId> <artifactId>jersey-media-json-processing</artifactId>
<version>${version.jersey}</version>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.glassfish.jersey.media</groupId> <groupId>org.glassfish.jersey.media</groupId>
<artifactId>jersey-media-multipart</artifactId> <artifactId>jersey-media-multipart</artifactId>
<version>${version.jersey}</version>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.glassfish.jersey.media</groupId> <groupId>org.glassfish.jersey.media</groupId>
<artifactId>jersey-media-sse</artifactId> <artifactId>jersey-media-sse</artifactId>
<version>${version.jersey}</version>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.glassfish.jersey.ext</groupId> <groupId>org.glassfish.jersey.ext</groupId>
<artifactId>jersey-bean-validation</artifactId> <artifactId>jersey-bean-validation</artifactId>
<version>${version.jersey}</version>
</dependency> </dependency>
<dependency> <dependency>
<groupId>javax.servlet</groupId> <groupId>javax.servlet</groupId>
@ -156,37 +163,99 @@
<artifactId>jackson-jaxrs-json-provider</artifactId> <artifactId>jackson-jaxrs-json-provider</artifactId>
<version>${version.jackson}</version> <version>${version.jackson}</version>
</dependency> </dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-annotations</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-core</artifactId>
<scope>provided</scope>
</dependency>
<!-- SmartGears --> <!-- SmartGears -->
<dependency> <dependency>
<groupId>org.gcube.core</groupId> <groupId>org.gcube.core</groupId>
<artifactId>common-smartgears</artifactId> <artifactId>common-smartgears</artifactId>
<scope>provided</scope> <scope>provided</scope>
</dependency> </dependency>
<dependency>
<groupId>org.gcube.common</groupId>
<artifactId>home-library</artifactId>
<version>[2.0.0-SNAPSHOT, 3.0.0-SNAPSHOT)</version>
<scope>compile</scope>
</dependency>
<dependency> <dependency>
<groupId>org.glassfish.jersey.test-framework.providers</groupId> <groupId>org.glassfish.jersey.test-framework.providers</groupId>
<artifactId>jersey-test-framework-provider-jetty</artifactId> <artifactId>jersey-test-framework-provider-jetty</artifactId>
<version>2.23.2</version>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<!-- Added to support Java 11 JDK -->
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
<scope>provided</scope>
</dependency>
<!-- END Added to support Java 11 JDK -->
</dependencies> </dependencies>
<build> <build>
<finalName>${name}</finalName>
<plugins> <plugins>
<plugin> <plugin>
<groupId>org.apache.maven.plugins</groupId> <groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-assembly-plugin</artifactId> <artifactId>maven-war-plugin</artifactId>
<version>2.1.1</version>
<executions> <executions>
<execution> <execution>
<id>make-servicearchive</id> <phase>compile</phase>
<phase>package</phase> </execution>
</executions>
<configuration>
<webappDirectory>${webappDirectory}</webappDirectory>
</configuration>
</plugin>
<!-- SA Plugin -->
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-assembly-plugin</artifactId>
<configuration>
<descriptors>
<descriptor>${distroDirectory}/descriptor.xml</descriptor>
</descriptors>
</configuration>
<executions>
<execution>
<id>servicearchive</id>
<phase>install</phase>
<goals>
<goal>single</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-resources-plugin</artifactId>
<version>2.5</version>
<executions>
<execution>
<id>copy-profile</id>
<phase>install</phase>
<goals>
<goal>copy-resources</goal>
</goals>
<configuration>
<outputDirectory>target</outputDirectory>
<resources>
<resource>
<directory>${distroDirectory}</directory>
<filtering>true</filtering>
<includes>
<include>profile.xml</include>
</includes>
</resource>
</resources>
</configuration>
</execution> </execution>
</executions> </executions>
</plugin> </plugin>

View File

@ -1,8 +1,8 @@
package org.gcube.data_catalogue.grsf_publish_ws.json.input.others; package org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED;
import javax.validation.constraints.Size; import javax.validation.constraints.Size;
import org.gcube.datacatalogue.common.Constants; import org.gcube.datacatalogue.common.AAA_PORTED.Constants;
import org.json.simple.JSONObject; import org.json.simple.JSONObject;
import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonProperty;

View File

@ -1,4 +1,4 @@
package org.gcube.data_catalogue.grsf_publish_ws.json.input.record; package org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
@ -8,7 +8,7 @@ import java.util.Map;
import org.gcube.data_catalogue.grsf_publish_ws.utils.HelperMethods; import org.gcube.data_catalogue.grsf_publish_ws.utils.HelperMethods;
import org.gcube.datacatalogue.ckanutillibrary.shared.ResourceBean; import org.gcube.datacatalogue.ckanutillibrary.shared.ResourceBean;
import org.gcube.datacatalogue.common.Constants; import org.gcube.datacatalogue.common.AAA_PORTED.Constants;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@ -202,18 +202,12 @@ public abstract class Base {
@Override @Override
public String toString() { public String toString() {
return "Base [" return "Base [catalogId=" + catalogId + ", description=" + description
+ "catalogId=" + catalogId + ", license=" + license + ", author=" + author
+ ", description=" + description + ", authorContact=" + authorContact + ", version=" + version
+ ", license=" + license + ", maintainer=" + maintainer + ", maintainerContact="
+ ", author=" + author + maintainerContact + ", extrasFields=" + extrasFields
+ ", authorContact=" + authorContact + ", extrasResources=" + extrasResources + "]";
+ ", version=" + version
+ ", maintainer=" + maintainer
+ ", maintainerContact=" + maintainerContact
+ ", extrasFields=" + extrasFields
+ ", extrasResources=" + extrasResources
+ "]";
} }
} }

View File

@ -1,4 +1,4 @@
package org.gcube.data_catalogue.grsf_publish_ws.custom_annotations; package org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED;
import java.lang.annotation.ElementType; import java.lang.annotation.ElementType;
import java.lang.annotation.Retention; import java.lang.annotation.Retention;

View File

@ -1,4 +1,4 @@
package org.gcube.data_catalogue.grsf_publish_ws.json.input.record; package org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED;
import java.util.List; import java.util.List;
@ -6,19 +6,9 @@ import javax.validation.Valid;
import javax.validation.constraints.NotNull; import javax.validation.constraints.NotNull;
import javax.validation.constraints.Size; import javax.validation.constraints.Size;
import org.gcube.data_catalogue.grsf_publish_ws.custom_annotations.CkanResource; import org.gcube.datacatalogue.common.AAA_PORTED.Constants;
import org.gcube.data_catalogue.grsf_publish_ws.custom_annotations.CustomField; import org.gcube.datacatalogue.common.AAA_PORTED.Sources;
import org.gcube.data_catalogue.grsf_publish_ws.custom_annotations.Group; import org.gcube.datacatalogue.common.AAA_PORTED.Status;
import org.gcube.data_catalogue.grsf_publish_ws.custom_annotations.Tag;
import org.gcube.data_catalogue.grsf_publish_ws.custom_annotations.TimeSeries;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.others.AnnotationBean;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.others.RefersToBean;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.others.Resource;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.others.SimilarRecordBean;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.others.TimeSeriesBean;
import org.gcube.datacatalogue.common.Constants;
import org.gcube.datacatalogue.common.enums.Sources;
import org.gcube.datacatalogue.common.enums.Status;
import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonProperty;
@ -28,14 +18,13 @@ import com.fasterxml.jackson.annotation.JsonProperty;
* @author Luca Frosini (ISTI - CNR) * @author Luca Frosini (ISTI - CNR)
* *
*/ */
public abstract class Common extends Base { public abstract class Common extends Base{
@JsonProperty(Constants.DATA_OWNER_JSON_KEY) @JsonProperty(Constants.DATA_OWNER_JSON_KEY)
@CustomField(key=Constants.DATA_OWNER_CUSTOM_KEY) @CustomField(key=Constants.DATA_OWNER_CUSTOM_KEY)
private List<String> dataOwner; private List<String> dataOwner;
@JsonProperty(Constants.DATABASE_SOURCES_JSON_KEY) @JsonProperty(Constants.DATABASE_SOURCES_JSON_KEY)
@CustomField(key=Constants.DATABASE_SOURCES_CUSTOM_KEY)
@CkanResource @CkanResource
@Valid @Valid
private List<Resource<Sources>> databaseSources; private List<Resource<Sources>> databaseSources;
@ -55,7 +44,7 @@ public abstract class Common extends Base {
@JsonProperty(Constants.SDG_FLAG_JSON_KEY) @JsonProperty(Constants.SDG_FLAG_JSON_KEY)
@CustomField(key=Constants.SDG_FLAG_CUSTOM_KEY) @CustomField(key=Constants.SDG_FLAG_CUSTOM_KEY)
@Group(condition="true", groupNameOverValue=Constants.SDG_FLAG_GROUP_NAME, prependSourceToGroupName=false) // record is added to group grsf-sdg-flag if sdg Flag is true @Group(condition="true", groupNameOverValue=Constants.SDG_FLAG_GROUP_NAME) // record is added to group grsf-sdg-flag if sdg Flag is true
private Boolean sdgFlag; private Boolean sdgFlag;
@JsonProperty(Constants.STATUS_OF_THE_GRSF_RECORD_JSON_KEY) @JsonProperty(Constants.STATUS_OF_THE_GRSF_RECORD_JSON_KEY)
@ -89,6 +78,10 @@ public abstract class Common extends Base {
@CustomField(key=Constants.SIMILAR_GRSF_RECORDS_CUSTOM_KEY) @CustomField(key=Constants.SIMILAR_GRSF_RECORDS_CUSTOM_KEY)
private List<SimilarRecordBean> similarGRSFRecords; private List<SimilarRecordBean> similarGRSFRecords;
@JsonProperty(Constants.SIMILAR_SOURCE_RECORDS_JSON_KEY)
@CustomField(key=Constants.SIMILAR_SOURCE_RECORDS_CUSTOM_KEY)
private List<SimilarRecordBean> similarSourceRecords;
// automatically set // automatically set
@CustomField(key=Constants.DOMAIN_CUSTOM_KEY) @CustomField(key=Constants.DOMAIN_CUSTOM_KEY)
private String domain; private String domain;
@ -110,11 +103,7 @@ public abstract class Common extends Base {
@JsonProperty(Constants.CONNECTED_JSON_KEY) @JsonProperty(Constants.CONNECTED_JSON_KEY)
@CustomField(key=Constants.CONNECTED_CUSTOM_KEY) @CustomField(key=Constants.CONNECTED_CUSTOM_KEY)
private List<String> connectedBeans; private List<String> connectedBeans;
@JsonProperty(Constants.CONNECTIONS_INDICATOR)
@Tag
private String connectionsIndicator;
@JsonProperty(Constants.ANNOTATION_PUBLISHER_JSON_KEY) @JsonProperty(Constants.ANNOTATION_PUBLISHER_JSON_KEY)
@CustomField(key=Constants.ANNOTATION_PUBLISHER_CUSTOM_KEY) @CustomField(key=Constants.ANNOTATION_PUBLISHER_CUSTOM_KEY)
private List<AnnotationBean> annotations; private List<AnnotationBean> annotations;
@ -123,6 +112,10 @@ public abstract class Common extends Base {
@Tag @Tag
private String similaritiesIndicator; private String similaritiesIndicator;
@JsonProperty(Constants.CONNECTIONS_INDICATOR)
@Tag
private String connectionsIndicator;
@JsonProperty(Constants.CITATION_JSON_KEY) @JsonProperty(Constants.CITATION_JSON_KEY)
@CustomField(key=Constants.CITATION_CUSTOM_KEY) @CustomField(key=Constants.CITATION_CUSTOM_KEY)
private String citation; private String citation;
@ -177,6 +170,7 @@ public abstract class Common extends Base {
this.landings = landings; this.landings = landings;
this.species = species; this.species = species;
this.similarGRSFRecords = similarGRSFRecords; this.similarGRSFRecords = similarGRSFRecords;
this.similarSourceRecords = similarSourceRecords;
this.domain = domain; this.domain = domain;
this.uuid = uuid; this.uuid = uuid;
this.managementBodyAuthorities = managementBodyAuthorities; this.managementBodyAuthorities = managementBodyAuthorities;
@ -224,7 +218,6 @@ public abstract class Common extends Base {
public void setRefersTo(List<RefersToBean> refersTo) { public void setRefersTo(List<RefersToBean> refersTo) {
this.refersTo = refersTo; this.refersTo = refersTo;
} }
public List<Resource<Sources>> getDatabaseSources() { public List<Resource<Sources>> getDatabaseSources() {
return databaseSources; return databaseSources;
} }
@ -297,6 +290,14 @@ public abstract class Common extends Base {
this.similarGRSFRecords = similarGRSFRecords; this.similarGRSFRecords = similarGRSFRecords;
} }
public List<SimilarRecordBean> getSimilarSourceRecords() {
return similarSourceRecords;
}
public void setSimilarSourceRecords(List<SimilarRecordBean> similarSourceRecords) {
this.similarSourceRecords = similarSourceRecords;
}
public String getSpatial() { public String getSpatial() {
return spatial; return spatial;
} }
@ -356,28 +357,17 @@ public abstract class Common extends Base {
@Override @Override
public String toString() { public String toString() {
return super.toString() + " - Common [" return "Common [dataOwner=" + dataOwner + ", databaseSources="
+ " dataOwner=" + dataOwner + databaseSources + ", sourceOfInformation="
+ ", databaseSources=" + databaseSources + sourceOfInformation + ", refersTo=" + refersTo
+ ", sourceOfInformation=" + sourceOfInformation + ", shortName=" + shortName + ", sdgFlag=" + sdgFlag + ", status="
+ ", refersTo=" + refersTo + status + ", systemType=" + systemType + ", catches="
+ ", shortName=" + shortName + catches + ", landings=" + landings + ", species=" + species
+ ", sdgFlag=" + sdgFlag
+ ", status=" + status
+ ", systemType=" + systemType
+ ", catches=" + catches
+ ", landings=" + landings
+ ", species=" + species
+ ", similarGRSFRecords=" + similarGRSFRecords + ", similarGRSFRecords=" + similarGRSFRecords
+ ", domain=" + domain + ", similarSourceRecords=" + similarSourceRecords
+ ", uuid=" + uuid + ", domain=" + domain + ", uuid=" + uuid
+ ", managementBodyAuthorities=" + managementBodyAuthorities + ", managementBodyAuthorities=" + managementBodyAuthorities
+ ", spatial=" + spatial + ", spatial=" + spatial + ", connectedBeans=" + connectedBeans
+ ", connectedBeans=" + connectedBeans + ", annotations=" + annotations + "]";
+ ", annotations=" + annotations
+ ", similaritiesIndicator=" + similaritiesIndicator
+ ", connectionsIndicator=" + connectionsIndicator
+ ", citation=" + citation
+ "]";
} }
} }

View File

@ -1,4 +1,4 @@
package org.gcube.data_catalogue.grsf_publish_ws.custom_annotations; package org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED;
import java.lang.annotation.ElementType; import java.lang.annotation.ElementType;
import java.lang.annotation.Retention; import java.lang.annotation.Retention;

View File

@ -1,16 +1,13 @@
package org.gcube.data_catalogue.grsf_publish_ws.json.input.record; package org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED;
import java.util.List; import java.util.List;
import javax.validation.constraints.NotNull; import javax.validation.constraints.NotNull;
import javax.validation.constraints.Size; import javax.validation.constraints.Size;
import org.gcube.data_catalogue.grsf_publish_ws.custom_annotations.CustomField; import org.gcube.datacatalogue.common.AAA_PORTED.Constants;
import org.gcube.data_catalogue.grsf_publish_ws.custom_annotations.Group; import org.gcube.datacatalogue.common.AAA_PORTED.Fishery_Type;
import org.gcube.data_catalogue.grsf_publish_ws.custom_annotations.Tag; import org.gcube.datacatalogue.common.AAA_PORTED.Production_System_Type;
import org.gcube.datacatalogue.common.Constants;
import org.gcube.datacatalogue.common.enums.Fishery_Type;
import org.gcube.datacatalogue.common.enums.Production_System_Type;
import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonProperty;
@ -21,35 +18,38 @@ import com.fasterxml.jackson.annotation.JsonProperty;
*/ */
public class FisheryRecord extends Common { public class FisheryRecord extends Common {
@JsonProperty(Constants.TRACEABILITY_FLAG_JSON_KEY)
@CustomField(key=Constants.TRACEABILITY_FLAG_CUSTOM_KEY)
@Group(condition="true", groupNameOverValue=Constants.TRACEABILITY_FLAG_GROUP_NAME) // record is added to group grsf-traceability-flag if Traceability Flag is true
private Boolean traceabilityFlag;
@JsonProperty(Constants.FISHERY_NAME_JSON_KEY) @JsonProperty(Constants.FISHERY_NAME_JSON_KEY)
@CustomField(key=Constants.FISHERY_NAME_CUSTOM_KEY)
@NotNull(message="fishery_name cannot be null") @NotNull(message="fishery_name cannot be null")
@Size(min=1, message="fishery_name cannot be empty") @Size(min=1, message="fishery_name cannot be empty")
@CustomField(key=Constants.FISHERY_NAME_CUSTOM_KEY)
private String fisheryName; private String fisheryName;
@JsonProperty(Constants.GRSF_SEMANTIC_IDENTIFIER_JSON_KEY) @JsonProperty(Constants.GRSF_SEMANTIC_IDENTIFIER_JSON_KEY)
@CustomField(key=Constants.GRSF_SEMANTIC_IDENTIFIER_CUSTOM_KEY) @CustomField(key=Constants.GRSF_SEMANTIC_IDENTIFIER_CUSTOM_KEY)
private String fisheryId; private String fisheryId;
@JsonProperty(Constants.TRACEABILITY_FLAG_JSON_KEY)
@CustomField(key=Constants.TRACEABILITY_FLAG_CUSTOM_KEY)
@Group(condition="true", groupNameOverValue=Constants.TRACEABILITY_FLAG_GROUP_NAME, prependSourceToGroupName=false) // record is added to group grsf-traceability-flag if Traceability Flag is true
private Boolean traceabilityFlag;
@JsonProperty(Constants.FISHING_AREA_JSON_KEY) @JsonProperty(Constants.FISHING_AREA_JSON_KEY)
@CustomField(key=Constants.FISHING_AREA_CUSTOM_KEY) @CustomField(key=Constants.FISHING_AREA_CUSTOM_KEY)
@Tag @Tag
private List<String> fishingArea; private List<String> fishingArea;
@JsonProperty(Constants.JURISDICTION_AREA_JSON_KEY)
@CustomField(key=Constants.JURISDICTION_AREA_CUSTOM_KEY)
@Tag
private List<String> jurisdictionArea;
@JsonProperty(Constants.RESOURCES_EXPLOITED_JSON_KEY) @JsonProperty(Constants.RESOURCES_EXPLOITED_JSON_KEY)
@CustomField(key=Constants.RESOURCES_EXPLOITED_CUSTOM_KEY) @CustomField(key=Constants.RESOURCES_EXPLOITED_CUSTOM_KEY)
private List<String> resourcesExploited; private List<String> resourcesExploited;
@JsonProperty(Constants.JURISDICTION_AREA_JSON_KEY)
@CustomField(key=Constants.JURISDICTION_AREA_CUSTOM_KEY)
private List<String> jurisdictionArea;
@JsonProperty(Constants.PRODUCTION_SYSTEM_TYPE_JSON_KEY)
@CustomField(key=Constants.PRODUCTION_SYSTEM_TYPE_CUSTOM_KEY)
private List<Production_System_Type> productionSystemType;
@JsonProperty(Constants.FLAG_STATE_JSON_KEY) @JsonProperty(Constants.FLAG_STATE_JSON_KEY)
@CustomField(key=Constants.FLAG_STATE_CUSTOM_KEY) @CustomField(key=Constants.FLAG_STATE_CUSTOM_KEY)
@Tag @Tag
@ -62,6 +62,7 @@ public class FisheryRecord extends Common {
@JsonProperty(Constants.GRSF_TYPE_JSON_KEY) @JsonProperty(Constants.GRSF_TYPE_JSON_KEY)
@CustomField(key=Constants.GRSF_TYPE_CUSTOM_KEY) @CustomField(key=Constants.GRSF_TYPE_CUSTOM_KEY)
@Group
@Tag @Tag
private Fishery_Type type; private Fishery_Type type;
@ -93,6 +94,7 @@ public class FisheryRecord extends Common {
this.fishingArea = fishingArea; this.fishingArea = fishingArea;
this.resourcesExploited = resourcesExploited; this.resourcesExploited = resourcesExploited;
this.jurisdictionArea = jurisdictionArea; this.jurisdictionArea = jurisdictionArea;
this.productionSystemType = productionSystemType;
this.flagState = flagState; this.flagState = flagState;
this.fishingGear = fishingGear; this.fishingGear = fishingGear;
this.type = type; this.type = type;
@ -160,6 +162,15 @@ public class FisheryRecord extends Common {
this.resourcesExploited = resourcesExploited; this.resourcesExploited = resourcesExploited;
} }
public List<Production_System_Type> getProductionSystemType() {
return productionSystemType;
}
public void setProductionSystemType(
List<Production_System_Type> productionSystemType) {
this.productionSystemType = productionSystemType;
}
public List<String> getFlagState() { public List<String> getFlagState() {
return flagState; return flagState;
} }
@ -183,6 +194,7 @@ public class FisheryRecord extends Common {
+ ", traceabilityFlag=" + traceabilityFlag + ", traceabilityFlag=" + traceabilityFlag
+ ", resourcesExploited=" + resourcesExploited + ", resourcesExploited=" + resourcesExploited
+ ", jurisdictionArea=" + jurisdictionArea + ", jurisdictionArea=" + jurisdictionArea
+ ", productionSystemType=" + productionSystemType
+ ", flagState=" + flagState + ", fishingGear=" + fishingGear + ", flagState=" + flagState + ", fishingGear=" + fishingGear
+ ", type=" + type + "]"; + ", type=" + type + "]";
} }

View File

@ -1,4 +1,4 @@
package org.gcube.data_catalogue.grsf_publish_ws.custom_annotations; package org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED;
import java.lang.annotation.ElementType; import java.lang.annotation.ElementType;
import java.lang.annotation.Retention; import java.lang.annotation.Retention;
@ -21,8 +21,8 @@ public @interface Group {
String condition() default ""; String condition() default "";
/** /**
* If this value is set, it is used as name of the group in place of the filed value. * If this value is set, it is the name of the group (apart the source, i.e. "grsf", "ram", "firms", "fishsource" that will be prepended) to which the
* (apart the source, i.e. "grsf", "ram", "firms", "fishsource" that will be prepended depending on prependSourceToGroupName) * record needs to be put.
* @return * @return
*/ */
String groupNameOverValue() default ""; String groupNameOverValue() default "";
@ -32,6 +32,6 @@ public @interface Group {
* Set to false to avoid source prepending * Set to false to avoid source prepending
* @return * @return
*/ */
boolean prependSourceToGroupName() default false; boolean prependSourceToGroupName() default true;
} }

View File

@ -1,8 +1,8 @@
package org.gcube.data_catalogue.grsf_publish_ws.json.input.others; package org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED;
import javax.validation.constraints.NotNull; import javax.validation.constraints.NotNull;
import org.gcube.datacatalogue.common.Constants; import org.gcube.datacatalogue.common.AAA_PORTED.Constants;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonProperty;

View File

@ -1,10 +1,10 @@
package org.gcube.data_catalogue.grsf_publish_ws.json.input.others; package org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED;
import javax.validation.constraints.NotNull; import javax.validation.constraints.NotNull;
import javax.validation.constraints.Size; import javax.validation.constraints.Size;
import org.gcube.datacatalogue.common.Constants; import org.gcube.datacatalogue.common.AAA_PORTED.Constants;
import org.gcube.datacatalogue.common.enums.Sources; import org.gcube.datacatalogue.common.AAA_PORTED.Sources;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonProperty;

View File

@ -1,6 +1,6 @@
package org.gcube.data_catalogue.grsf_publish_ws.json.input.others; package org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED;
import org.gcube.datacatalogue.common.Constants; import org.gcube.datacatalogue.common.AAA_PORTED.Constants;
import org.json.simple.JSONObject; import org.json.simple.JSONObject;
import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonProperty;

View File

@ -1,4 +1,4 @@
package org.gcube.data_catalogue.grsf_publish_ws.json.input.record; package org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED;
import java.util.List; import java.util.List;
@ -6,15 +6,10 @@ import javax.validation.Valid;
import javax.validation.constraints.NotNull; import javax.validation.constraints.NotNull;
import javax.validation.constraints.Size; import javax.validation.constraints.Size;
import org.gcube.data_catalogue.grsf_publish_ws.custom_annotations.CustomField; import org.gcube.datacatalogue.common.AAA_PORTED.Abundance_Level;
import org.gcube.data_catalogue.grsf_publish_ws.custom_annotations.Group; import org.gcube.datacatalogue.common.AAA_PORTED.Constants;
import org.gcube.data_catalogue.grsf_publish_ws.custom_annotations.Tag; import org.gcube.datacatalogue.common.AAA_PORTED.Fishing_Pressure;
import org.gcube.data_catalogue.grsf_publish_ws.custom_annotations.TimeSeries; import org.gcube.datacatalogue.common.AAA_PORTED.Stock_Type;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.others.TimeSeriesBean;
import org.gcube.datacatalogue.common.Constants;
import org.gcube.datacatalogue.common.enums.Abundance_Level;
import org.gcube.datacatalogue.common.enums.Fishing_Pressure;
import org.gcube.datacatalogue.common.enums.Stock_Type;
import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonProperty;
@ -26,9 +21,9 @@ import com.fasterxml.jackson.annotation.JsonProperty;
public class StockRecord extends Common{ public class StockRecord extends Common{
@JsonProperty(Constants.STOCK_NAME_JSON_KEY) @JsonProperty(Constants.STOCK_NAME_JSON_KEY)
@CustomField(key=Constants.STOCK_NAME_CUSTOM_KEY)
@NotNull(message=Constants.STOCK_NAME_JSON_KEY + " cannot be null") @NotNull(message=Constants.STOCK_NAME_JSON_KEY + " cannot be null")
@Size(min=2, message=Constants.STOCK_NAME_JSON_KEY + " cannot be empty") @Size(min=2, message=Constants.STOCK_NAME_JSON_KEY + " cannot be empty")
@CustomField(key=Constants.STOCK_NAME_CUSTOM_KEY)
private String stockName; private String stockName;
@JsonProperty(Constants.GRSF_SEMANTIC_IDENTIFIER_JSON_KEY) @JsonProperty(Constants.GRSF_SEMANTIC_IDENTIFIER_JSON_KEY)
@ -46,7 +41,6 @@ public class StockRecord extends Common{
@JsonProperty(Constants.ASSESSMENT_METHODS_JSON_KEY) @JsonProperty(Constants.ASSESSMENT_METHODS_JSON_KEY)
@CustomField(key=Constants.ASSESSMENT_METHODS_CUSTOM_KEY) @CustomField(key=Constants.ASSESSMENT_METHODS_CUSTOM_KEY)
@Group(groupNameOverValue=Constants.ASSESSMENT_METHODS_CUSTOM_KEY, prependSourceToGroupName=false)
private List<String> assessmentMethods; private List<String> assessmentMethods;
@JsonProperty(Constants.FIRMS_ABUNDANCE_LEVEL_JSON_KEY) @JsonProperty(Constants.FIRMS_ABUNDANCE_LEVEL_JSON_KEY)
@ -100,9 +94,8 @@ public class StockRecord extends Common{
@JsonProperty(Constants.SCIENTIFIC_ADVICE_JSON_KEY) @JsonProperty(Constants.SCIENTIFIC_ADVICE_JSON_KEY)
@CustomField(key=Constants.SCIENTIFIC_ADVICE_CUSTOM_KEY) @CustomField(key=Constants.SCIENTIFIC_ADVICE_CUSTOM_KEY)
@TimeSeries
@Group(groupNameOverValue=Constants.SCIENTIFIC_ADVICE_CUSTOM_KEY, prependSourceToGroupName=false) @Group(groupNameOverValue=Constants.SCIENTIFIC_ADVICE_CUSTOM_KEY, prependSourceToGroupName=false)
private List<TimeSeriesBean<String, Void>> scientificAdvice; private List<String> scientificAdvice;
@JsonProperty(Constants.ASSESSOR_JSON_KEY) @JsonProperty(Constants.ASSESSOR_JSON_KEY)
@CustomField(key=Constants.ASSESSOR_CUSTOM_KEY) @CustomField(key=Constants.ASSESSOR_CUSTOM_KEY)
@ -110,6 +103,7 @@ public class StockRecord extends Common{
@JsonProperty(Constants.GRSF_TYPE_JSON_KEY) @JsonProperty(Constants.GRSF_TYPE_JSON_KEY)
@CustomField(key=Constants.GRSF_TYPE_CUSTOM_KEY) @CustomField(key=Constants.GRSF_TYPE_CUSTOM_KEY)
@Group
@Tag @Tag
private Stock_Type type; private Stock_Type type;
@ -147,7 +141,7 @@ public class StockRecord extends Common{
List<TimeSeriesBean<String, String>> fishingPressure, List<TimeSeriesBean<String, String>> fishingPressure,
List<TimeSeriesBean<String, Void>> narrativeState, List<TimeSeriesBean<String, Void>> narrativeState,
List<TimeSeriesBean<String, Void>> faoState, List<TimeSeriesBean<String, Void>> faoState,
List<TimeSeriesBean<String, Void>> scientificAdvice, String assessor, Stock_Type type) { List<String> scientificAdvice, String assessor, Stock_Type type) {
super(); super();
this.stockName = stockName; this.stockName = stockName;
this.stockId = stockId; this.stockId = stockId;
@ -263,11 +257,11 @@ public class StockRecord extends Common{
this.narrativeState = narrativeState; this.narrativeState = narrativeState;
} }
public List<TimeSeriesBean<String, Void>> getScientificAdvice() { public List<String> getScientificAdvice() {
return scientificAdvice; return scientificAdvice;
} }
public void setScientificAdvice(List<TimeSeriesBean<String, Void>> scientificAdvice) { public void setScientificAdvice(List<String> scientificAdvice) {
this.scientificAdvice = scientificAdvice; this.scientificAdvice = scientificAdvice;
} }

View File

@ -1,4 +1,4 @@
package org.gcube.data_catalogue.grsf_publish_ws.custom_annotations; package org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED;
import java.lang.annotation.ElementType; import java.lang.annotation.ElementType;
import java.lang.annotation.Retention; import java.lang.annotation.Retention;

View File

@ -1,4 +1,4 @@
package org.gcube.data_catalogue.grsf_publish_ws.custom_annotations; package org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED;
import java.lang.annotation.ElementType; import java.lang.annotation.ElementType;
import java.lang.annotation.Retention; import java.lang.annotation.Retention;

View File

@ -1,6 +1,6 @@
package org.gcube.data_catalogue.grsf_publish_ws.json.input.others; package org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED;
import org.gcube.datacatalogue.common.Constants; import org.gcube.datacatalogue.common.AAA_PORTED.Constants;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonProperty;

View File

@ -1,11 +1,10 @@
package org.gcube.data_catalogue.grsf_publish_ws.ex; package org.gcube.data_catalogue.grsf_publish_ws.AAA_UNNEEDED;
import javax.ws.rs.core.MediaType; import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response; import javax.ws.rs.core.Response;
import javax.ws.rs.ext.ExceptionMapper; import javax.ws.rs.ext.ExceptionMapper;
import javax.ws.rs.ext.Provider; import javax.ws.rs.ext.Provider;
import org.gcube.data_catalogue.grsf_publish_ws.json.output.ResponseBean;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
/** /**

View File

@ -1,9 +1,9 @@
package org.gcube.data_catalogue.grsf_publish_ws.json.input.others; package org.gcube.data_catalogue.grsf_publish_ws.AAA_UNNEEDED;
import javax.validation.constraints.NotNull; import javax.validation.constraints.NotNull;
import javax.validation.constraints.Size; import javax.validation.constraints.Size;
import org.gcube.datacatalogue.common.Constants; import org.gcube.datacatalogue.common.AAA_PORTED.Constants;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonProperty;

View File

@ -1,4 +1,4 @@
package org.gcube.data_catalogue.grsf_publish_ws.utils; package org.gcube.data_catalogue.grsf_publish_ws.AAA_UNNEEDED;
import static org.gcube.resources.discovery.icclient.ICFactory.client; import static org.gcube.resources.discovery.icclient.ICFactory.client;
import static org.gcube.resources.discovery.icclient.ICFactory.queryFor; import static org.gcube.resources.discovery.icclient.ICFactory.queryFor;

View File

@ -1,4 +1,4 @@
package org.gcube.data_catalogue.grsf_publish_ws.json.output; package org.gcube.data_catalogue.grsf_publish_ws.AAA_UNNEEDED;
/** /**
* Response bean to be used by the service. * Response bean to be used by the service.

View File

@ -1,6 +1,6 @@
package org.gcube.data_catalogue.grsf_publish_ws.json.output; package org.gcube.data_catalogue.grsf_publish_ws.AAA_UNNEEDED;
import org.gcube.datacatalogue.common.Constants; import org.gcube.datacatalogue.common.AAA_PORTED.Constants;
import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonProperty;

View File

@ -1,10 +1,10 @@
package org.gcube.data_catalogue.grsf_publish_ws.json.input.others; package org.gcube.data_catalogue.grsf_publish_ws.AAA_UNNEEDED;
import javax.validation.constraints.NotNull; import javax.validation.constraints.NotNull;
import javax.validation.constraints.Size; import javax.validation.constraints.Size;
import org.gcube.datacatalogue.common.Constants; import org.gcube.datacatalogue.common.AAA_PORTED.Constants;
import org.gcube.datacatalogue.common.enums.Status; import org.gcube.datacatalogue.common.AAA_PORTED.Status;
import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonProperty;

View File

@ -1,4 +1,4 @@
package org.gcube.data_catalogue.grsf_publish_ws.ex; package org.gcube.data_catalogue.grsf_publish_ws.AAA_UNNEEDED;
import javax.validation.ConstraintViolation; import javax.validation.ConstraintViolation;
import javax.validation.ConstraintViolationException; import javax.validation.ConstraintViolationException;
@ -7,7 +7,6 @@ import javax.ws.rs.core.Response;
import javax.ws.rs.ext.ExceptionMapper; import javax.ws.rs.ext.ExceptionMapper;
import javax.ws.rs.ext.Provider; import javax.ws.rs.ext.Provider;
import org.gcube.data_catalogue.grsf_publish_ws.json.output.ResponseBean;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
/** /**

View File

@ -1,4 +1,4 @@
package org.gcube.data_catalogue.grsf_publish_ws.utils.threads; package org.gcube.data_catalogue.grsf_publish_ws.AAA_UNNEEDED;
import java.util.List; import java.util.List;

View File

@ -29,19 +29,19 @@ import org.gcube.common.authorization.library.provider.AuthorizationProvider;
import org.gcube.common.authorization.library.provider.SecurityTokenProvider; import org.gcube.common.authorization.library.provider.SecurityTokenProvider;
import org.gcube.common.authorization.library.utils.Caller; import org.gcube.common.authorization.library.utils.Caller;
import org.gcube.common.scope.api.ScopeProvider; import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.others.DeleteRecord; import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.FisheryRecord;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.others.UpdateRecordStatus; import org.gcube.data_catalogue.grsf_publish_ws.AAA_UNNEEDED.DeleteRecord;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.record.FisheryRecord; import org.gcube.data_catalogue.grsf_publish_ws.AAA_UNNEEDED.ResponseBean;
import org.gcube.data_catalogue.grsf_publish_ws.json.output.ResponseBean; import org.gcube.data_catalogue.grsf_publish_ws.AAA_UNNEEDED.ResponseCreationBean;
import org.gcube.data_catalogue.grsf_publish_ws.json.output.ResponseCreationBean; import org.gcube.data_catalogue.grsf_publish_ws.AAA_UNNEEDED.UpdateRecordStatus;
import org.gcube.data_catalogue.grsf_publish_ws.utils.CommonServiceUtils; import org.gcube.data_catalogue.grsf_publish_ws.utils.CommonServiceUtils;
import org.gcube.data_catalogue.grsf_publish_ws.utils.HelperMethods; import org.gcube.data_catalogue.grsf_publish_ws.utils.HelperMethods;
import org.gcube.datacatalogue.ckanutillibrary.server.DataCatalogue; import org.gcube.datacatalogue.ckanutillibrary.server.DataCatalogue;
import org.gcube.datacatalogue.ckanutillibrary.shared.ResourceBean; import org.gcube.datacatalogue.ckanutillibrary.shared.ResourceBean;
import org.gcube.datacatalogue.ckanutillibrary.shared.RolesCkanGroupOrOrg; import org.gcube.datacatalogue.ckanutillibrary.shared.RolesCkanGroupOrOrg;
import org.gcube.datacatalogue.common.Constants; import org.gcube.datacatalogue.common.AAA_PORTED.Constants;
import org.gcube.datacatalogue.common.enums.Product_Type; import org.gcube.datacatalogue.common.AAA_PORTED.Product_Type;
import org.gcube.datacatalogue.common.enums.Sources; import org.gcube.datacatalogue.common.AAA_PORTED.Sources;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import eu.trentorise.opendata.jackan.model.CkanDataset; import eu.trentorise.opendata.jackan.model.CkanDataset;
@ -51,7 +51,7 @@ import eu.trentorise.opendata.jackan.model.CkanDataset;
* @author Costantino Perciante (ISTI - CNR) * @author Costantino Perciante (ISTI - CNR)
* @author Luca Frosini (ISTI - CNR) * @author Luca Frosini (ISTI - CNR)
*/ */
@Path("{source:firms|FIRMS|grsf|GRSF|FishSource|fishsource|sdg|SDG}/fishery/") @Path("{source:firms|FIRMS|grsf|GRSF|FishSource|fishsource}/fishery/")
public class GrsfPublisherFisheryService { public class GrsfPublisherFisheryService {
// the context // the context
@ -61,33 +61,33 @@ public class GrsfPublisherFisheryService {
// Logger // Logger
private static final org.slf4j.Logger logger = LoggerFactory.getLogger(GrsfPublisherFisheryService.class); private static final org.slf4j.Logger logger = LoggerFactory.getLogger(GrsfPublisherFisheryService.class);
@GET // @GET
@Path("hello") // @Path("hello")
@Produces(MediaType.TEXT_PLAIN) // @Produces(MediaType.TEXT_PLAIN)
public Response hello() { // public Response hello() {
return Response.ok("Hello.. Fishery service is here").build(); // return Response.ok("Hello.. Fishery service is here").build();
} // }
//
@GET // @GET
@Path("get-licenses") // @Path("get-licenses")
@Produces(MediaType.APPLICATION_JSON) // @Produces(MediaType.APPLICATION_JSON)
public Response getLicenses() { // public Response getLicenses() {
Status status = Status.OK; // Status status = Status.OK;
String context = ScopeProvider.instance.get(); // String context = ScopeProvider.instance.get();
DataCatalogue catalogue; // DataCatalogue catalogue;
try { // try {
catalogue = HelperMethods.getDataCatalogueRunningInstance(context); //// catalogue = HelperMethods.getDataCatalogueRunningInstance(context);
Map<String,String> licenses = CommonServiceUtils.getLicenses(catalogue); // Map<String,String> licenses = CommonServiceUtils.getLicenses(catalogue);
if(licenses == null) // if(licenses == null)
status = Status.INTERNAL_SERVER_ERROR; // status = Status.INTERNAL_SERVER_ERROR;
return Response.status(status).entity(licenses).build(); // return Response.status(status).entity(licenses).build();
} catch(Exception e) { // } catch(Exception e) {
status = Status.INTERNAL_SERVER_ERROR; // status = Status.INTERNAL_SERVER_ERROR;
return Response.status(status) // return Response.status(status)
.entity(new ResponseBean(false, "Unable to retrieve license list " + e.getLocalizedMessage(), null)) // .entity(new ResponseBean(false, "Unable to retrieve license list " + e.getLocalizedMessage(), null))
.build(); // .build();
} // }
} // }
@POST @POST
@Path("publish-product") @Path("publish-product")
@ -113,36 +113,43 @@ public class GrsfPublisherFisheryService {
// Cast the source to the accepted ones // Cast the source to the accepted ones
Sources sourceInPath = Sources.onDeserialize(source); Sources sourceInPath = Sources.onDeserialize(source);
DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance(context); DataCatalogue catalogue = null;
if(catalogue == null) { // DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance(context);
throw new Exception("There was a problem while serving your request. No catalogue instance was found!"); // if(catalogue == null) {
} else { // throw new Exception("There was a problem while serving your request. No catalogue instance was found!");
// } else {
String apiKey = catalogue.getApiKeyFromUsername(username); String apiKey = "";
String organization = HelperMethods.retrieveOrgNameFromScope(context); String organization = "";
// String apiKey = catalogue.getApiKeyFromUsername(username);
// String organization = HelperMethods.retrieveOrgNameFromScope(context);
// CommonServiceUtils.hasAdminRole(username, catalogue, apiKey, organization);
//
// // extend this role to the other organizations in this context
// CommonServiceUtils.extendRoleToOtherOrganizations(username, catalogue, organization,
// RolesCkanGroupOrOrg.ADMIN);
CommonServiceUtils.hasAdminRole(username, catalogue, apiKey, organization); String authorMail = "";
String authorFullname = "";
// extend this role to the other organizations in this context
CommonServiceUtils.extendRoleToOtherOrganizations(username, catalogue, organization,
RolesCkanGroupOrOrg.ADMIN);
// retrieve the user's email and fullname // retrieve the user's email and fullname
String authorMail = HelperMethods.getUserEmail(context, token); // String authorMail = HelperMethods.getUserEmail(context, token);
String authorFullname = HelperMethods.getUserFullname(context, token); // String authorFullname = HelperMethods.getUserFullname(context, token);
//
if(authorMail == null || authorFullname == null) { // if(authorMail == null || authorFullname == null) {
throw new Exception("Sorry but it was not possible to retrieve your fullname/email!"); // throw new Exception("Sorry but it was not possible to retrieve your fullname/email!");
} // }
// The name of the product will be the uuid of the kb. The title will be the fishery's fishery_name. Fishery has also the constraint that // The name of the product will be the uuid of the kb. The title will be the fishery's fishery_name. Fishery has also the constraint that
// fishing area and jurisdiction area cannot be empty at the same time // fishing area and jurisdiction area cannot be empty at the same time
String futureName = record.getUuid(); String futureName = "";
String futureTitle = record.getFisheryName(); String futureTitle = "";
// String futureName = record.getUuid();
// String futureTitle = record.getFisheryName();
// check name // check name
CommonServiceUtils.checkName(futureName, catalogue); // CommonServiceUtils.checkName(futureName, catalogue);
Map<String,List<String>> customFields = record.getExtrasFields(); Map<String,List<String>> customFields = record.getExtrasFields();
Set<String> tags = new HashSet<String>(); Set<String> tags = new HashSet<String>();
@ -154,26 +161,31 @@ public class GrsfPublisherFisheryService {
Product_Type.FISHERY, tags, customFields, groups, resources, username, futureTitle); Product_Type.FISHERY, tags, customFields, groups, resources, username, futureTitle);
// check the license id // check the license id
String license = null; String license = "";
if(record.getLicense() == null || record.getLicense().isEmpty()) // String license = null;
license = Constants.DEFAULT_LICENSE; // if(record.getLicense() == null || record.getLicense().isEmpty())
else if(HelperMethods.existsLicenseId(record.getLicense(), catalogue)) // license = Constants.DEFAULT_LICENSE;
license = record.getLicense(); // else if(HelperMethods.existsLicenseId(record.getLicense(), catalogue))
else // license = record.getLicense();
throw new Exception("Please check the license id!"); // else
// throw new Exception("Please check the license id!");
//
long version = record.getVersion() == null ? 1 : record.getVersion(); long version = record.getVersion() == null ? 1 : record.getVersion();
// set the visibility of the datatest according the context // set the visibility of the datatest according the context
boolean publicDataset = context boolean publicDataset = context
.equals((String) contextServlet.getInitParameter(HelperMethods.PUBLIC_CONTEX_KEY)); .equals((String) contextServlet.getInitParameter(HelperMethods.PUBLIC_CONTEX_KEY));
// convert extras' keys to keys with namespace
Map<String,String> namespaces = HelperMethods
.getFieldToFieldNameSpaceMapping(Constants.GENERIC_RESOURCE_NAME_MAP_KEY_NAMESPACES_FISHERY);
if(namespaces == null)
throw new Exception("Failed to retrieve the namespaces for the key fields!"); // convert extras' keys to keys with namespace
Map<String,String> namespaces = new HashMap<>();
// Map<String,String> namespaces = HelperMethods
// .getFieldToFieldNameSpaceMapping(Constants.GENERIC_RESOURCE_NAME_MAP_KEY_NAMESPACES_FISHERY);
// if(namespaces == null)
// throw new Exception("Failed to retrieve the namespaces for the key fields!");
customFields = HelperMethods.replaceFieldsKey(customFields, namespaces, customFields = HelperMethods.replaceFieldsKey(customFields, namespaces,
!sourceInPath.equals(Sources.GRSF)); !sourceInPath.equals(Sources.GRSF));
@ -182,6 +194,7 @@ public class GrsfPublisherFisheryService {
logger.info("Invoking create method.."); logger.info("Invoking create method..");
// create the product // create the product
id = catalogue.createCKanDatasetMultipleCustomFields(apiKey, futureTitle, futureName, id = catalogue.createCKanDatasetMultipleCustomFields(apiKey, futureTitle, futureName,
publishInOrganization, authorFullname, authorMail, publishInOrganization, authorFullname, authorMail,
@ -190,7 +203,7 @@ public class GrsfPublisherFisheryService {
null, license, new ArrayList<String>(tags), customFields, resources, publicDataset); null, license, new ArrayList<String>(tags), customFields, resources, publicDataset);
// post actions // post actions
if(id != null) { // if(id != null) {
logger.info("Created record with identifier " + id); logger.info("Created record with identifier " + id);
String description = Constants.SHORT_NAME_CUSTOM_KEY + ": " + record.getShortName() + "\n"; String description = Constants.SHORT_NAME_CUSTOM_KEY + ": " + record.getShortName() + "\n";
@ -205,10 +218,10 @@ public class GrsfPublisherFisheryService {
status = Status.CREATED; status = Status.CREATED;
} else { // } else {
throw new Exception("There was an error during the record generation, sorry"); // throw new Exception("There was an error during the record generation, sorry");
} // }
} // }
} catch(Exception e) { } catch(Exception e) {
logger.error("Failed to create fishery record" + e); logger.error("Failed to create fishery record" + e);
status = Status.INTERNAL_SERVER_ERROR; status = Status.INTERNAL_SERVER_ERROR;
@ -218,68 +231,69 @@ public class GrsfPublisherFisheryService {
return Response.status(status).entity(responseBean).build(); return Response.status(status).entity(responseBean).build();
} }
@DELETE // @DELETE
@Path("delete-product") // @Path("delete-product")
@Consumes(MediaType.APPLICATION_JSON) // @Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON) // @Produces(MediaType.APPLICATION_JSON)
public Response deleteFishery(@NotNull(message = "input value is missing") @Valid DeleteRecord recordToDelete, // public Response deleteFishery(@NotNull(message = "input value is missing") @Valid DeleteRecord recordToDelete,
@PathParam("source") String source) throws ValidationException { // @PathParam("source") String source) throws ValidationException {
//
// retrieve context and username // // retrieve context and username
Caller caller = AuthorizationProvider.instance.get(); // Caller caller = AuthorizationProvider.instance.get();
String username = caller.getClient().getId(); // String username = caller.getClient().getId();
String context = ScopeProvider.instance.get(); // String context = ScopeProvider.instance.get();
//
ResponseCreationBean responseBean = new ResponseCreationBean(); // ResponseCreationBean responseBean = new ResponseCreationBean();
Status status = Status.INTERNAL_SERVER_ERROR; // Status status = Status.INTERNAL_SERVER_ERROR;
//
// check it is a fishery ... // // check it is a fishery ...
logger.info( // logger.info(
"Received call to delete product with id " + recordToDelete.getId() + ", checking if it is a fishery"); // "Received call to delete product with id " + recordToDelete.getId() + ", checking if it is a fishery");
try { // try {
//
DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance(context); // DataCatalogue catalogue = null;
//// DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance(context);
// Cast the source to the accepted ones //
Sources sourceInPath = Sources.onDeserialize(source); // // Cast the source to the accepted ones
logger.debug("The request is to delete a fishery object of source " + sourceInPath); // Sources sourceInPath = Sources.onDeserialize(source);
// logger.debug("The request is to delete a fishery object of source " + sourceInPath);
// retrieve the catalogue instance //
String apiKey = catalogue.getApiKeyFromUsername(username); // // retrieve the catalogue instance
CkanDataset fisheryInCkan = catalogue.getDataset(recordToDelete.getId(), apiKey); // String apiKey = catalogue.getApiKeyFromUsername(username);
// CkanDataset fisheryInCkan = catalogue.getDataset(recordToDelete.getId(), apiKey);
if(fisheryInCkan == null) { //
status = Status.NOT_FOUND; // if(fisheryInCkan == null) {
throw new Exception("There was a problem while serving your request. This item was not found"); // status = Status.NOT_FOUND;
} // throw new Exception("There was a problem while serving your request. This item was not found");
// }
// check it is in the right source and it is a fishery //
String type = fisheryInCkan.getExtrasAsHashMap().get(Constants.DOMAIN_CUSTOM_KEY); // // check it is in the right source and it is a fishery
// String type = fisheryInCkan.getExtrasAsHashMap().get(Constants.DOMAIN_CUSTOM_KEY);
if((fisheryInCkan.getOrganization().getName().equalsIgnoreCase(source) //
|| fisheryInCkan.getOrganization().getName().toLowerCase().startsWith(source.toLowerCase())) // if((fisheryInCkan.getOrganization().getName().equalsIgnoreCase(source)
&& Product_Type.FISHERY.getOrigName().equals(type)) { // || fisheryInCkan.getOrganization().getName().toLowerCase().contains(source))
// && Product_Type.FISHERY.getOrigName().equals(type)) {
logger.debug("Ok, this is a fishery of the right source, removing it"); //
boolean deleted = catalogue.deleteProduct(fisheryInCkan.getId(), apiKey, true); // logger.debug("Ok, this is a fishery of the right source, removing it");
// boolean deleted = catalogue.deleteProduct(fisheryInCkan.getId(), apiKey, true);
if(deleted) { //
logger.info("Fishery DELETED AND PURGED!"); // if(deleted) {
status = Status.OK; // logger.info("Fishery DELETED AND PURGED!");
responseBean.setId(fisheryInCkan.getId()); // status = Status.OK;
} // responseBean.setId(fisheryInCkan.getId());
} else { // }
status = Status.BAD_REQUEST; // } else {
throw new Exception( // status = Status.BAD_REQUEST;
"The id you are using doesn't belong to a Fishery item having source " + source + "!"); // throw new Exception(
} // "The id you are using doesn't belong to a Fishery item having source " + source + "!");
} catch(Exception e) { // }
logger.error("Failed to delete this", e); // } catch(Exception e) {
responseBean.setError(e.getMessage()); // logger.error("Failed to delete this", e);
} // responseBean.setError(e.getMessage());
// }
return Response.status(status).entity(responseBean).build(); //
} // return Response.status(status).entity(responseBean).build();
// }
@GET @GET
@Path("get-fisheries-ids") @Path("get-fisheries-ids")
@ -300,11 +314,12 @@ public class GrsfPublisherFisheryService {
// Cast the source to the accepted ones // Cast the source to the accepted ones
Sources sourceInPath = Sources.onDeserialize(source); Sources sourceInPath = Sources.onDeserialize(source);
DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance(context); DataCatalogue catalogue = null;
if(catalogue == null) { // DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance(context);
throw new Exception("There was a problem while serving your request"); // if(catalogue == null) {
} // throw new Exception("There was a problem while serving your request");
// }
//
// if it is a request for GRSF records, we have Fishery - Stock groups, so it is easy. // if it is a request for GRSF records, we have Fishery - Stock groups, so it is easy.
// For other cases, records needs to be parsed // For other cases, records needs to be parsed
if(sourceInPath.equals(Sources.GRSF)) if(sourceInPath.equals(Sources.GRSF))
@ -347,10 +362,13 @@ public class GrsfPublisherFisheryService {
logger.info("Received call to get the catalogue identifier for the product with name " + name); logger.info("Received call to get the catalogue identifier for the product with name " + name);
try { try {
DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance(context); // DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance(context);
if(catalogue == null) { // if(catalogue == null) {
throw new Exception("There was a problem while serving your request"); // throw new Exception("There was a problem while serving your request");
} // }
//
DataCatalogue catalogue = null;
CkanDataset dataset = catalogue.getDataset(name, catalogue.getApiKeyFromUsername(username)); CkanDataset dataset = catalogue.getDataset(name, catalogue.getApiKeyFromUsername(username));
if(dataset != null) { if(dataset != null) {
Map<String,String> result = new HashMap<String,String>(); Map<String,String> result = new HashMap<String,String>();
@ -397,33 +415,40 @@ public class GrsfPublisherFisheryService {
throw new Exception("Please specify the '" + Constants.CATALOG_ID + "' property"); throw new Exception("Please specify the '" + Constants.CATALOG_ID + "' property");
} }
DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance(context);
if(catalogue == null) { DataCatalogue catalogue = null;
throw new Exception( // DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance(context);
"There was a problem while serving your request. No catalogue instance was found in this context!");
} else { // if(catalogue == null) {
// throw new Exception(
// "There was a problem while serving your request. No catalogue instance was found in this context!");
// } else {
// get already published record and modify it // get already published record and modify it
String apiKey = catalogue.getApiKeyFromUsername(username); String apiKey = "";
// String apiKey = catalogue.getApiKeyFromUsername(username);
CkanDataset recordPublished = catalogue.getDataset(catalogId, apiKey); CkanDataset recordPublished = catalogue.getDataset(catalogId, apiKey);
if(recordPublished == null) if(recordPublished == null)
throw new Exception("A record with catalogue id " + catalogId + " does not exist!"); throw new Exception("A record with catalogue id " + catalogId + " does not exist!");
// retrieve the user's email and fullname String authorMail = "";
String authorMail = HelperMethods.getUserEmail(context, token); String authorFullname = "";
String authorFullname = HelperMethods.getUserFullname(context, token);
if(authorMail == null || authorFullname == null) { // // retrieve the user's email and fullname
logger.debug("Author fullname or mail missing, cannot continue"); // String authorMail = HelperMethods.getUserEmail(context, token);
throw new Exception("Sorry but there was not possible to retrieve your fullname/email!"); // String authorFullname = HelperMethods.getUserFullname(context, token);
} //
// if(authorMail == null || authorFullname == null) {
// logger.debug("Author fullname or mail missing, cannot continue");
// throw new Exception("Sorry but there was not possible to retrieve your fullname/email!");
// }
String organization = HelperMethods.retrieveOrgNameFromScope(context); String organization = "";
// String organization = HelperMethods.retrieveOrgNameFromScope(context);
// check he/she has admin role // check he/she has admin role
CommonServiceUtils.hasAdminRole(username, catalogue, apiKey, organization); // CommonServiceUtils.hasAdminRole(username, catalogue, apiKey, organization);
// name, product url and are going to remain unchanged (so we keep them from the already published record); // name, product url and are going to remain unchanged (so we keep them from the already published record);
String name = recordPublished.getName(); String name = recordPublished.getName();
@ -461,11 +486,12 @@ public class GrsfPublisherFisheryService {
.equals((String) contextServlet.getInitParameter(HelperMethods.PUBLIC_CONTEX_KEY)); .equals((String) contextServlet.getInitParameter(HelperMethods.PUBLIC_CONTEX_KEY));
// convert extras' keys to keys with namespace // convert extras' keys to keys with namespace
Map<String,String> namespaces = HelperMethods Map<String,String> namespaces = new HashMap<>();
.getFieldToFieldNameSpaceMapping(Constants.GENERIC_RESOURCE_NAME_MAP_KEY_NAMESPACES_FISHERY); // Map<String,String> namespaces = HelperMethods
// .getFieldToFieldNameSpaceMapping(Constants.GENERIC_RESOURCE_NAME_MAP_KEY_NAMESPACES_FISHERY);
if(namespaces == null) //
throw new Exception("Failed to retrieve the namespaces for the key fields!"); // if(namespaces == null)
// throw new Exception("Failed to retrieve the namespaces for the key fields!");
// retrieve the already generated url // retrieve the already generated url
String modifiedUUIDKey = namespaces.containsKey(Constants.ITEM_URL_FIELD) String modifiedUUIDKey = namespaces.containsKey(Constants.ITEM_URL_FIELD)
@ -504,7 +530,7 @@ public class GrsfPublisherFisheryService {
} else { } else {
throw new Exception("There was an error during the item updated, sorry"); throw new Exception("There was an error during the item updated, sorry");
} }
} // }
} catch(Exception e) { } catch(Exception e) {
logger.error("Failed to update fishery record" + e); logger.error("Failed to update fishery record" + e);
responseBean.setError(e.getMessage()); responseBean.setError(e.getMessage());
@ -529,14 +555,15 @@ public class GrsfPublisherFisheryService {
ResponseCreationBean responseBean = new ResponseCreationBean(); ResponseCreationBean responseBean = new ResponseCreationBean();
Status status = Status.INTERNAL_SERVER_ERROR; Status status = Status.INTERNAL_SERVER_ERROR;
try { // try {
DataCatalogue catalogue = null;
DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance(context); // DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance(context);
if(catalogue == null) { // if(catalogue == null) {
throw new Exception( // throw new Exception(
"There was a problem while serving your request. No catalogue instance was found in this context!"); // "There was a problem while serving your request. No catalogue instance was found in this context!");
} else { // } else {
// catalog id must be reported // catalog id must be reported
String uuid = bean.getUuid(); String uuid = bean.getUuid();
@ -546,21 +573,24 @@ public class GrsfPublisherFisheryService {
String apiKeyUser = catalogue.getApiKeyFromUsername(username); String apiKeyUser = catalogue.getApiKeyFromUsername(username);
CkanDataset record = catalogue.getDataset(uuid, apiKeyUser); CkanDataset record = catalogue.getDataset(uuid, apiKeyUser);
if(record == null) if(record == null){
throw new Exception("A record with knowledge_base_id id " + uuid + " does not exist!"); // throw new Exception("A record with knowledge_base_id id " + uuid + " does not exist!");
}
// check system type // check system type
boolean isGRSF = !record.getExtrasAsHashMap().get(Constants.SYSTEM_TYPE_CUSTOM_KEY) boolean isGRSF = !record.getExtrasAsHashMap().get(Constants.SYSTEM_TYPE_CUSTOM_KEY)
.equals(Constants.SYSTEM_TYPE_LEGACY_RECORD); .equals(Constants.SYSTEM_TYPE_FOR_SOURCES_VALUE);
if(!isGRSF) if(!isGRSF) {
throw new Exception("You are trying to modify a Legacy record!"); // throw new Exception("You are trying to modify a Legacy record!");
}
boolean rightDomain = record.getExtrasAsHashMap().get(Constants.DOMAIN_CUSTOM_KEY) boolean rightDomain = record.getExtrasAsHashMap().get(Constants.DOMAIN_CUSTOM_KEY)
.equalsIgnoreCase(Product_Type.FISHERY.getOrigName()); .equalsIgnoreCase(Product_Type.FISHERY.getOrigName());
if(!rightDomain) if(!rightDomain) {
throw new Exception("This is not a Fishery record!"); // throw new Exception("This is not a Fishery record!");
}
// update it // update it
Map<String,List<String>> updateStatus = new HashMap<String,List<String>>(1); Map<String,List<String>> updateStatus = new HashMap<String,List<String>>(1);
@ -572,11 +602,11 @@ public class GrsfPublisherFisheryService {
responseBean.setId(record.getId()); responseBean.setId(record.getId());
responseBean.setItemUrl(record.getExtrasAsHashMap().get(Constants.ITEM_URL_FIELD)); responseBean.setItemUrl(record.getExtrasAsHashMap().get(Constants.ITEM_URL_FIELD));
} // }
} catch(Exception e) { // } catch(Exception e) {
logger.error("Failed to update fishery record's status", e); // logger.error("Failed to update fishery record's status", e);
responseBean.setError(e.getMessage()); // responseBean.setError(e.getMessage());
} // }
return Response.status(status).entity(responseBean).build(); return Response.status(status).entity(responseBean).build();

View File

@ -29,19 +29,19 @@ import org.gcube.common.authorization.library.provider.AuthorizationProvider;
import org.gcube.common.authorization.library.provider.SecurityTokenProvider; import org.gcube.common.authorization.library.provider.SecurityTokenProvider;
import org.gcube.common.authorization.library.utils.Caller; import org.gcube.common.authorization.library.utils.Caller;
import org.gcube.common.scope.api.ScopeProvider; import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.others.DeleteRecord; import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.StockRecord;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.others.UpdateRecordStatus; import org.gcube.data_catalogue.grsf_publish_ws.AAA_UNNEEDED.DeleteRecord;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.record.StockRecord; import org.gcube.data_catalogue.grsf_publish_ws.AAA_UNNEEDED.ResponseBean;
import org.gcube.data_catalogue.grsf_publish_ws.json.output.ResponseBean; import org.gcube.data_catalogue.grsf_publish_ws.AAA_UNNEEDED.ResponseCreationBean;
import org.gcube.data_catalogue.grsf_publish_ws.json.output.ResponseCreationBean; import org.gcube.data_catalogue.grsf_publish_ws.AAA_UNNEEDED.UpdateRecordStatus;
import org.gcube.data_catalogue.grsf_publish_ws.utils.CommonServiceUtils; import org.gcube.data_catalogue.grsf_publish_ws.utils.CommonServiceUtils;
import org.gcube.data_catalogue.grsf_publish_ws.utils.HelperMethods; import org.gcube.data_catalogue.grsf_publish_ws.utils.HelperMethods;
import org.gcube.datacatalogue.ckanutillibrary.server.DataCatalogue; import org.gcube.datacatalogue.ckanutillibrary.server.DataCatalogue;
import org.gcube.datacatalogue.ckanutillibrary.shared.ResourceBean; import org.gcube.datacatalogue.ckanutillibrary.shared.ResourceBean;
import org.gcube.datacatalogue.ckanutillibrary.shared.RolesCkanGroupOrOrg; import org.gcube.datacatalogue.ckanutillibrary.shared.RolesCkanGroupOrOrg;
import org.gcube.datacatalogue.common.Constants; import org.gcube.datacatalogue.common.AAA_PORTED.Constants;
import org.gcube.datacatalogue.common.enums.Product_Type; import org.gcube.datacatalogue.common.AAA_PORTED.Product_Type;
import org.gcube.datacatalogue.common.enums.Sources; import org.gcube.datacatalogue.common.AAA_PORTED.Sources;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import eu.trentorise.opendata.jackan.model.CkanDataset; import eu.trentorise.opendata.jackan.model.CkanDataset;
@ -51,7 +51,7 @@ import eu.trentorise.opendata.jackan.model.CkanDataset;
* @author Costantino Perciante (ISTI - CNR) * @author Costantino Perciante (ISTI - CNR)
* @author Luca Frosini (ISTI - CNR) * @author Luca Frosini (ISTI - CNR)
*/ */
@Path("{source:firms|FIRMS|ram|RAM|grsf|GRSF|FishSource|fishsource|sdg|SDG}/stock/") @Path("{source:firms|FIRMS|ram|RAM|grsf|GRSF|FishSource|fishsource}/stock/")
public class GrsfPublisherStockService { public class GrsfPublisherStockService {
// the context // the context
@ -61,33 +61,33 @@ public class GrsfPublisherStockService {
// Logger // Logger
private static final org.slf4j.Logger logger = LoggerFactory.getLogger(GrsfPublisherStockService.class); private static final org.slf4j.Logger logger = LoggerFactory.getLogger(GrsfPublisherStockService.class);
@GET // @GET
@Path("hello") // @Path("hello")
@Produces(MediaType.TEXT_PLAIN) // @Produces(MediaType.TEXT_PLAIN)
public Response hello() { // public Response hello() {
return Response.ok("Hello.. Stock service is here").build(); // return Response.ok("Hello.. Stock service is here").build();
} // }
//
@GET // @GET
@Path("get-licenses") // @Path("get-licenses")
@Produces(MediaType.APPLICATION_JSON) // @Produces(MediaType.APPLICATION_JSON)
public Response getLicenses() { // public Response getLicenses() {
Status status = Status.OK; // Status status = Status.OK;
String context = ScopeProvider.instance.get(); // String context = ScopeProvider.instance.get();
DataCatalogue catalogue; // DataCatalogue catalogue;
try { // try {
catalogue = HelperMethods.getDataCatalogueRunningInstance(context); // catalogue = HelperMethods.getDataCatalogueRunningInstance(context);
Map<String,String> licenses = CommonServiceUtils.getLicenses(catalogue); // Map<String,String> licenses = CommonServiceUtils.getLicenses(catalogue);
if(licenses == null) // if(licenses == null)
status = Status.INTERNAL_SERVER_ERROR; // status = Status.INTERNAL_SERVER_ERROR;
return Response.status(status).entity(licenses).build(); // return Response.status(status).entity(licenses).build();
} catch(Exception e) { // } catch(Exception e) {
status = Status.INTERNAL_SERVER_ERROR; // status = Status.INTERNAL_SERVER_ERROR;
return Response.status(status) // return Response.status(status)
.entity(new ResponseBean(false, "Unable to retrieve license list " + e.getLocalizedMessage(), null)) // .entity(new ResponseBean(false, "Unable to retrieve license list " + e.getLocalizedMessage(), null))
.build(); // .build();
} // }
} // }
@POST @POST
@Path("publish-product") @Path("publish-product")
@ -113,35 +113,46 @@ public class GrsfPublisherStockService {
// Cast the source to the accepted ones // Cast the source to the accepted ones
Sources sourceInPath = Sources.onDeserialize(source); Sources sourceInPath = Sources.onDeserialize(source);
DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance(context);
if(catalogue == null) { DataCatalogue catalogue = null;
throw new Exception("There was a problem while serving your request. No catalogue instance was found!"); // DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance(context);
} else { // if(catalogue == null) {
// throw new Exception("There was a problem while serving your request. No catalogue instance was found!");
// } else {
//
String apiKey = catalogue.getApiKeyFromUsername(username); String apiKey = "";
String organization = HelperMethods.retrieveOrgNameFromScope(context); //"grsf_admin"; String organization = "";
// String apiKey = catalogue.getApiKeyFromUsername(username);
// String organization = HelperMethods.retrieveOrgNameFromScope(context); //"grsf_admin";
// check it has admin role or throw exception // check it has admin role or throw exception
CommonServiceUtils.hasAdminRole(username, catalogue, apiKey, organization); // CommonServiceUtils.hasAdminRole(username, catalogue, apiKey, organization);
// extend this role to the other organizations in this context // extend this role to the other organizations in this context
CommonServiceUtils.extendRoleToOtherOrganizations(username, catalogue, organization, // To support this gCat must be modified according to the following ticket
RolesCkanGroupOrOrg.ADMIN); // https://support.d4science.org/issues/19365
// CommonServiceUtils.extendRoleToOtherOrganizations(username, catalogue, organization,
// RolesCkanGroupOrOrg.ADMIN);
String authorMail = "";
String authorFullname = "";
// retrieve the user's email and fullname // retrieve the user's email and fullname
String authorMail = HelperMethods.getUserEmail(context, token); // String authorMail = HelperMethods.getUserEmail(context, token);
String authorFullname = HelperMethods.getUserFullname(context, token); // String authorFullname = HelperMethods.getUserFullname(context, token);
//
if(authorMail == null || authorFullname == null) { // if(authorMail == null || authorFullname == null) {
throw new Exception("Sorry but it was not possible to retrieve your fullname/email!"); // throw new Exception("Sorry but it was not possible to retrieve your fullname/email!");
} // }
// check the record has a name, at least // check the record has a name, at least
String futureName = record.getUuid(); String futureName = record.getUuid();
String futureTitle = record.getStockName(); String futureTitle = record.getStockName();
// check name and throws exception // check name and throws exception
CommonServiceUtils.checkName(futureName, catalogue); // CommonServiceUtils.checkName(futureName, catalogue);
// load other information // load other information
Map<String,List<String>> customFields = record.getExtrasFields(); Map<String,List<String>> customFields = record.getExtrasFields();
@ -170,11 +181,12 @@ public class GrsfPublisherStockService {
.equals((String) contextServlet.getInitParameter(HelperMethods.PUBLIC_CONTEX_KEY)); .equals((String) contextServlet.getInitParameter(HelperMethods.PUBLIC_CONTEX_KEY));
// convert extras' keys to keys with namespace // convert extras' keys to keys with namespace
Map<String,String> namespaces = HelperMethods Map<String,String> namespaces = new HashMap<>();
.getFieldToFieldNameSpaceMapping(Constants.GENERIC_RESOURCE_NAME_MAP_KEY_NAMESPACES_STOCK); // Map<String,String> namespaces = HelperMethods
// .getFieldToFieldNameSpaceMapping(Constants.GENERIC_RESOURCE_NAME_MAP_KEY_NAMESPACES_STOCK);
if(namespaces == null) //
throw new Exception("Failed to retrieve the namespaces for the key fields!"); // if(namespaces == null)
// throw new Exception("Failed to retrieve the namespaces for the key fields!");
customFields = HelperMethods.replaceFieldsKey(customFields, namespaces, customFields = HelperMethods.replaceFieldsKey(customFields, namespaces,
!sourceInPath.equals(Sources.GRSF)); !sourceInPath.equals(Sources.GRSF));
@ -209,7 +221,7 @@ public class GrsfPublisherStockService {
} else } else
throw new Exception( throw new Exception(
"There was an error during the product generation, sorry! Unable to create the dataset"); "There was an error during the product generation, sorry! Unable to create the dataset");
} // }
} catch(Exception e) { } catch(Exception e) {
logger.error("Failed to create stock record", e); logger.error("Failed to create stock record", e);
status = Status.INTERNAL_SERVER_ERROR; status = Status.INTERNAL_SERVER_ERROR;
@ -238,11 +250,13 @@ public class GrsfPublisherStockService {
"Received call to delete product with id " + recordToDelete.getId() + ", checking if it is a stock"); "Received call to delete product with id " + recordToDelete.getId() + ", checking if it is a stock");
try { try {
DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance(context); DataCatalogue catalogue = null;
if(catalogue == null) {
status = Status.INTERNAL_SERVER_ERROR; // DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance(context);
throw new Exception("There was a problem while serving your request"); // if(catalogue == null) {
} // status = Status.INTERNAL_SERVER_ERROR;
// throw new Exception("There was a problem while serving your request");
// }
// Cast the source to the accepted ones // Cast the source to the accepted ones
Sources sourceInPath = Sources.onDeserialize(source); Sources sourceInPath = Sources.onDeserialize(source);
@ -260,7 +274,7 @@ public class GrsfPublisherStockService {
// check it is in the right source and it is a stock // check it is in the right source and it is a stock
String type = stockInCkan.getExtrasAsHashMap().get(Constants.DOMAIN_CUSTOM_KEY); String type = stockInCkan.getExtrasAsHashMap().get(Constants.DOMAIN_CUSTOM_KEY);
if((stockInCkan.getOrganization().getName().equalsIgnoreCase(source) if((stockInCkan.getOrganization().getName().equalsIgnoreCase(source)
|| stockInCkan.getOrganization().getName().toLowerCase().startsWith(source.toLowerCase())) || stockInCkan.getOrganization().getName().toLowerCase().contains(source))
&& Product_Type.STOCK.getOrigName().equals(type)) { && Product_Type.STOCK.getOrigName().equals(type)) {
logger.debug("Ok, this is a stock of the right type, removing it"); logger.debug("Ok, this is a stock of the right type, removing it");
@ -304,11 +318,13 @@ public class GrsfPublisherStockService {
// Cast the source to the accepted ones // Cast the source to the accepted ones
Sources sourceInPath = Sources.onDeserialize(source); Sources sourceInPath = Sources.onDeserialize(source);
DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance(context);
if(catalogue == null) { DataCatalogue catalogue = null;
status = Status.INTERNAL_SERVER_ERROR; // DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance(context);
throw new Exception("There was a problem while serving your request"); // if(catalogue == null) {
} // status = Status.INTERNAL_SERVER_ERROR;
// throw new Exception("There was a problem while serving your request");
// }
// if it is a request for GRSF records, we have Fishery - Stock groups, so it is easy. // if it is a request for GRSF records, we have Fishery - Stock groups, so it is easy.
// For other cases, records needs to be parsed // For other cases, records needs to be parsed
@ -348,10 +364,13 @@ public class GrsfPublisherStockService {
Status status = Status.INTERNAL_SERVER_ERROR; Status status = Status.INTERNAL_SERVER_ERROR;
logger.info("Received call to get the catalogue identifier for the product with name " + name); logger.info("Received call to get the catalogue identifier for the product with name " + name);
try { try {
DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance(context);
if(catalogue == null) { DataCatalogue catalogue = null;
throw new Exception("There was a problem while serving your request"); // DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance(context);
} // if(catalogue == null) {
// throw new Exception("There was a problem while serving your request");
// }
CkanDataset dataset = catalogue.getDataset(name, catalogue.getApiKeyFromUsername(username)); CkanDataset dataset = catalogue.getDataset(name, catalogue.getApiKeyFromUsername(username));
if(dataset != null) { if(dataset != null) {
Map<String,String> result = new HashMap<String,String>(); Map<String,String> result = new HashMap<String,String>();
@ -398,32 +417,40 @@ public class GrsfPublisherStockService {
throw new Exception("Please specify the 'catalog_id' property"); throw new Exception("Please specify the 'catalog_id' property");
} }
DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance(context); DataCatalogue catalogue = null;
// DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance(context);
if(catalogue == null) { // if(catalogue == null) {
throw new Exception( // throw new Exception(
"There was a problem while serving your request. No catalogue instance was found in this context!"); // "There was a problem while serving your request. No catalogue instance was found in this context!");
} else { // } else {
// get already published record and modify it // get already published record and modify it
String apiKey = catalogue.getApiKeyFromUsername(username); String apiKey = "";
// String apiKey = catalogue.getApiKeyFromUsername(username);
CkanDataset recordPublished = catalogue.getDataset(catalogId, apiKey); CkanDataset recordPublished = catalogue.getDataset(catalogId, apiKey);
if(recordPublished == null) if(recordPublished == null)
throw new Exception("A record with id " + catalogId + " does not exist!"); throw new Exception("A record with id " + catalogId + " does not exist!");
// retrieve the user's email and fullname String authorMail = "";
String authorMail = HelperMethods.getUserEmail(context, token); String authorFullname = "";
String authorFullname = HelperMethods.getUserFullname(context, token);
if(authorMail == null || authorFullname == null) { // retrieve the user's email and fullname
logger.debug("Author fullname or mail missing, cannot continue"); // String authorMail = HelperMethods.getUserEmail(context, token);
throw new Exception("Sorry but there was not possible to retrieve your fullname/email!"); // String authorFullname = HelperMethods.getUserFullname(context, token);
} //
String organization = HelperMethods.retrieveOrgNameFromScope(context); //"grsf_admin"; // if(authorMail == null || authorFullname == null) {
// logger.debug("Author fullname or mail missing, cannot continue");
// throw new Exception("Sorry but there was not possible to retrieve your fullname/email!");
// }
String organization = "";
// String organization = HelperMethods.retrieveOrgNameFromScope(context); //"grsf_admin";
// check he/she has admin role // check he/she has admin role
CommonServiceUtils.hasAdminRole(username, catalogue, apiKey, organization); // CommonServiceUtils.hasAdminRole(username, catalogue, apiKey, organization);
// name, product url and are going to remain unchanged (so we keep them from the publisher record); // name, product url and are going to remain unchanged (so we keep them from the publisher record);
String name = recordPublished.getName(); String name = recordPublished.getName();
@ -462,11 +489,12 @@ public class GrsfPublisherStockService {
.equals((String) contextServlet.getInitParameter(HelperMethods.PUBLIC_CONTEX_KEY)); .equals((String) contextServlet.getInitParameter(HelperMethods.PUBLIC_CONTEX_KEY));
// convert extras' keys to keys with namespace // convert extras' keys to keys with namespace
Map<String,String> namespaces = HelperMethods Map<String,String> namespaces = new HashMap<>();
.getFieldToFieldNameSpaceMapping(Constants.GENERIC_RESOURCE_NAME_MAP_KEY_NAMESPACES_STOCK); // Map<String,String> namespaces = HelperMethods
// .getFieldToFieldNameSpaceMapping(Constants.GENERIC_RESOURCE_NAME_MAP_KEY_NAMESPACES_STOCK);
if(namespaces == null) //
throw new Exception("Failed to retrieve the namespaces for the key fields!"); // if(namespaces == null)
// throw new Exception("Failed to retrieve the namespaces for the key fields!");
// retrieve the url // retrieve the url
String modifiedUUIDKey = namespaces.containsKey(Constants.ITEM_URL_FIELD) String modifiedUUIDKey = namespaces.containsKey(Constants.ITEM_URL_FIELD)
@ -506,7 +534,7 @@ public class GrsfPublisherStockService {
} else { } else {
throw new Exception("There was an error during the item updated, sorry"); throw new Exception("There was an error during the item updated, sorry");
} }
} // }
} catch(Exception e) { } catch(Exception e) {
logger.error("Failed to update stock record", e); logger.error("Failed to update stock record", e);
responseBean.setError(e.getMessage()); responseBean.setError(e.getMessage());
@ -534,12 +562,13 @@ public class GrsfPublisherStockService {
try { try {
DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance(context); DataCatalogue catalogue = null;
// DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance(context);
if(catalogue == null) { //
throw new Exception( // if(catalogue == null) {
"There was a problem while serving your request. No catalogue instance was found in this context!"); // throw new Exception(
} else { // "There was a problem while serving your request. No catalogue instance was found in this context!");
// } else {
// catalog id must be reported // catalog id must be reported
String uuid = bean.getUuid(); String uuid = bean.getUuid();
@ -554,7 +583,7 @@ public class GrsfPublisherStockService {
// check system type // check system type
boolean isGRSF = !record.getExtrasAsHashMap().get(Constants.SYSTEM_TYPE_CUSTOM_KEY) boolean isGRSF = !record.getExtrasAsHashMap().get(Constants.SYSTEM_TYPE_CUSTOM_KEY)
.equals(Constants.SYSTEM_TYPE_LEGACY_RECORD); .equals(Constants.SYSTEM_TYPE_FOR_SOURCES_VALUE);
if(!isGRSF) if(!isGRSF)
throw new Exception("You are trying to modify a Legacy record!"); throw new Exception("You are trying to modify a Legacy record!");
@ -574,7 +603,7 @@ public class GrsfPublisherStockService {
responseBean.setKbUuid(uuid); responseBean.setKbUuid(uuid);
responseBean.setId(record.getId()); responseBean.setId(record.getId());
responseBean.setItemUrl(record.getExtrasAsHashMap().get(Constants.ITEM_URL_FIELD)); responseBean.setItemUrl(record.getExtrasAsHashMap().get(Constants.ITEM_URL_FIELD));
} // }
} catch(Exception e) { } catch(Exception e) {
logger.error("Failed to update stock record's status", e); logger.error("Failed to update stock record's status", e);
responseBean.setError(e.getMessage()); responseBean.setError(e.getMessage());

View File

@ -4,6 +4,7 @@ import java.beans.PropertyDescriptor;
import java.lang.reflect.Field; import java.lang.reflect.Field;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap; import java.util.HashMap;
import java.util.HashSet; import java.util.HashSet;
import java.util.List; import java.util.List;
@ -13,34 +14,31 @@ import java.util.concurrent.ConcurrentHashMap;
import javax.servlet.ServletContext; import javax.servlet.ServletContext;
import org.gcube.data_catalogue.grsf_publish_ws.custom_annotations.CkanResource; import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.Base;
import org.gcube.data_catalogue.grsf_publish_ws.custom_annotations.CustomField; import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.CkanResource;
import org.gcube.data_catalogue.grsf_publish_ws.custom_annotations.Group; import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.Common;
import org.gcube.data_catalogue.grsf_publish_ws.custom_annotations.Tag; import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.CustomField;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.others.RefersToBean; import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.FisheryRecord;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.others.Resource; import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.Group;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.others.TimeSeriesBean; import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.RefersToBean;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.record.Base; import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.Resource;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.record.Common; import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.StockRecord;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.record.FisheryRecord; import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.Tag;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.record.StockRecord; import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.TimeSeriesBean;
import org.gcube.data_catalogue.grsf_publish_ws.json.output.ResponseCreationBean; import org.gcube.data_catalogue.grsf_publish_ws.AAA_UNNEEDED.ResponseCreationBean;
import org.gcube.data_catalogue.grsf_publish_ws.AAA_UNNEEDED.WritePostCatalogueManagerThread;
import org.gcube.data_catalogue.grsf_publish_ws.utils.csv.ManageTimeSeriesThread; import org.gcube.data_catalogue.grsf_publish_ws.utils.csv.ManageTimeSeriesThread;
import org.gcube.data_catalogue.grsf_publish_ws.utils.threads.AssociationToGroupThread; import org.gcube.data_catalogue.grsf_publish_ws.utils.threads.AssociationToGroupThread;
import org.gcube.data_catalogue.grsf_publish_ws.utils.threads.WritePostCatalogueManagerThread;
import org.gcube.datacatalogue.ckanutillibrary.server.DataCatalogue; import org.gcube.datacatalogue.ckanutillibrary.server.DataCatalogue;
import org.gcube.datacatalogue.ckanutillibrary.shared.ResourceBean; import org.gcube.datacatalogue.ckanutillibrary.shared.ResourceBean;
import org.gcube.datacatalogue.ckanutillibrary.shared.RolesCkanGroupOrOrg; import org.gcube.datacatalogue.common.AAA_PORTED.Constants;
import org.gcube.datacatalogue.common.Constants; import org.gcube.datacatalogue.common.AAA_PORTED.Product_Type;
import org.gcube.datacatalogue.common.enums.Product_Type; import org.gcube.datacatalogue.common.AAA_PORTED.Sources;
import org.gcube.datacatalogue.common.enums.Sources; import org.gcube.datacatalogue.common.AAA_PORTED.Status;
import org.gcube.datacatalogue.common.enums.Status; import org.gcube.datacatalogue.common.AAA_PORTED.Stock_Type;
import org.json.simple.JSONObject; import org.json.simple.JSONObject;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import eu.trentorise.opendata.jackan.model.CkanDataset;
import eu.trentorise.opendata.jackan.model.CkanLicense;
/** /**
* Services common utils. * Services common utils.
* @author Costantino Perciante (ISTI - CNR) * @author Costantino Perciante (ISTI - CNR)
@ -53,20 +51,20 @@ public class CommonServiceUtils {
private static final int TAG_MAX_SIZE = 100; private static final int TAG_MAX_SIZE = 100;
private static Map<String,Boolean> extensionsCheck = new ConcurrentHashMap<>(); private static Map<String,Boolean> extensionsCheck = new ConcurrentHashMap<>();
/** // /**
* Retrieve the list of licenses for stocks and fisheries // * Retrieve the list of licenses for stocks and fisheries
* @return // * @return
*/ // */
public static Map<String,String> getLicenses(DataCatalogue catalogue) { // public static Map<String,String> getLicenses(DataCatalogue catalogue) {
logger.info("Requested licenses..."); // logger.info("Requested licenses...");
Map<String,String> toReturn = new HashMap<String,String>(); // Map<String,String> toReturn = new HashMap<String,String>();
List<CkanLicense> licenses = catalogue.getLicenses(); // List<CkanLicense> licenses = catalogue.getLicenses();
//
for(CkanLicense ckanLicense : licenses) { // for(CkanLicense ckanLicense : licenses) {
toReturn.put(ckanLicense.getId(), ckanLicense.getTitle()); // toReturn.put(ckanLicense.getId(), ckanLicense.getTitle());
} // }
return toReturn; // return toReturn;
} // }
/** /**
* Validate an aggregated GRSF record. TODO use @Valid tags * Validate an aggregated GRSF record. TODO use @Valid tags
@ -217,90 +215,6 @@ public class CommonServiceUtils {
} }
} }
public static final String GROUP_SUFFIX = "-group";
/**
* Convert a group name to its id on ckan
* @param origName
* @return
*/
private static String getGroupIDOnCkan(String origName){
if(origName == null) {
throw new IllegalArgumentException("origName cannot be null");
}
String modified = origName.replaceAll("\\(", "");
modified = modified.replaceAll("\\)", "");
modified = modified.trim().toLowerCase().replaceAll("[^A-Za-z0-9-]", "-");
if(modified.startsWith("-")) {
modified = modified.substring(1);
}
if(modified.endsWith("-")) {
modified = modified.substring(0, modified.length() -1);
}
return modified;
}
public static String getGroupId(String groupName) {
StringBuffer stringBuffer = new StringBuffer();
stringBuffer.append(groupName);
/*
* The "_group" suffix is added to all groups to
* avoid issues on groups and organizations having the same name
* e.g. RAM organization (id=ram) and RAM group (id=ram_group)
*/
if(!groupName.endsWith(GROUP_SUFFIX)) {
stringBuffer.append(GROUP_SUFFIX);
}
return getGroupIDOnCkan(stringBuffer.toString());
}
private static void addGroup(Group group, Sources source, String value, Set<String> groups) {
String conditionToCheck = group.condition();
String groupNameOverValue = group.groupNameOverValue();
boolean prependSource = group.prependSourceToGroupName();
boolean match = conditionToCheck.isEmpty() ? true
: value.matches(conditionToCheck);
if(match) {
StringBuffer stringBuffer = new StringBuffer();
if(prependSource) {
stringBuffer.append(source.getURLPath());
stringBuffer.append(" ");
}
if(groupNameOverValue.isEmpty()) {
stringBuffer.append(value);
}else {
stringBuffer.append(groupNameOverValue);
}
String groupId = getGroupId(stringBuffer.toString());
groups.add(groupId);
}
}
/**
* Add the record to the group of sources
* @param groups
* @param sourcesList
* @param productType
* @param sourceInPath
*/
private static void addRecordToGroups(Set<String> groups, Set<String> sourcesList, Product_Type productType, Sources sourceInPath) {
if(sourceInPath == Sources.GRSF) {
groups.add(getGroupId(Sources.GRSF.getURLPath())); // i.e. grsf_group
}else {
groups.add(getGroupId(Constants.SYSTEM_TYPE_LEGACY_RECORD)); // i.e. legacy_group
}
// evaluate the custom fields/tags, resources and groups
groups.add(getGroupId(productType.getOrigName())); //i.e. stock_group or fishery_group
for(String source : sourcesList) {
groups.add(getGroupId(source)); // i.e. firms_group, fishsource_group, ram_group
}
}
/** /**
* Retrieve the list of groups' names for this object * Retrieve the list of groups' names for this object
*/ */
@ -308,22 +222,74 @@ public class CommonServiceUtils {
Sources source) { Sources source) {
if(field.isAnnotationPresent(Group.class)) { if(field.isAnnotationPresent(Group.class)) {
Group group = field.getAnnotation(Group.class); Group group = field.getAnnotation(Group.class);
String conditionToCheck = group.condition();
String groupNameOverValue = group.groupNameOverValue();
// See https://support.d4science.org/issues/11832
boolean assessmentUnit = false;
boolean prependSource = group.prependSourceToGroupName();
if(record instanceof StockRecord) {
StockRecord stockRecord = (StockRecord) record;
Stock_Type stock_Type = stockRecord.getType();
if(stock_Type != Stock_Type.Assessment_Unit) {
prependSource = false;
}else {
assessmentUnit = true;
}
}
// end patch for https://support.d4science.org/issues/11832
try { try {
Object f = new PropertyDescriptor(field.getName(), current).getReadMethod().invoke(record); Object f = new PropertyDescriptor(field.getName(), current).getReadMethod().invoke(record);
if(f != null) { if(f != null) {
if(f instanceof List<?>) { if(f instanceof List<?>) {
List asList = ((List) f); List asList = ((List) f);
if(!asList.isEmpty()) { if(!asList.isEmpty()) {
logger.debug("The object annotated with @Group is a list. Adding ... "); logger.debug("The object annotated with @Group is a list. Adding ... ");
// add all the available elements
// else add all the available elements
for(int i = 0; i < asList.size(); i++) { for(int i = 0; i < asList.size(); i++) {
String value = asList.get(i).toString().trim(); boolean match = conditionToCheck.isEmpty() ? true
addGroup(group, source, value, groups); : asList.get(i).toString().trim().matches(conditionToCheck);
if(match) {
String groupName = groupNameOverValue.isEmpty()
? HelperMethods.getGroupNameOnCkan(source.toString().toLowerCase() + "-"
+ asList.get(i).toString().trim())
: source.toString().toLowerCase() + "-" + groupNameOverValue;
if(assessmentUnit && !prependSource) {
groups.add(groupNameOverValue);
}else {
groups.add(groupName);
}
}
} }
} }
} else { } else {
String value = f.toString().trim();
addGroup(group, source, value, groups); // also convert to the group name that should be on ckan
boolean match = conditionToCheck.isEmpty() ? true
: f.toString().trim().matches(conditionToCheck);
if(match) {
String groupName = groupNameOverValue.isEmpty()
? HelperMethods.getGroupNameOnCkan(
source.toString().toLowerCase() + "-" + f.toString().trim())
: source.toString().toLowerCase() + "-" + groupNameOverValue;
if(assessmentUnit && !prependSource) {
groups.add(groupNameOverValue);
}else {
groups.add(groupName);
}
}
} }
} }
@ -331,6 +297,7 @@ public class CommonServiceUtils {
logger.error("Failed to read value for field " + field.getName() + " skipping", e); logger.error("Failed to read value for field " + field.getName() + " skipping", e);
} }
} }
} }
/** /**
@ -450,45 +417,46 @@ public class CommonServiceUtils {
} }
} }
/**
* Evaluate if the user has the admin role
* Throws exception if he/she doesn't
*/
public static void hasAdminRole(String username, DataCatalogue catalogue, String apiKey, String organization)
throws Exception {
String role = catalogue.getRoleOfUserInOrganization(username, organization, apiKey);
logger.info("Role of the user " + username + " is " + role + " in " + organization);
if(role == null || role.isEmpty() || !role.equalsIgnoreCase(RolesCkanGroupOrOrg.ADMIN.toString()))
throw new Exception(
"You are not authorized to create a product. Please check you have the Catalogue-Administrator role!");
}
/** // /**
* Check this record's name // * Evaluate if the user has the admin role
* @param futureName // * Throws exception if he/she doesn't
* @param catalogue // */
* @throws Exception on name check // public static void hasAdminRole(String username, DataCatalogue catalogue, String apiKey, String organization)
*/ // throws Exception {
public static void checkName(String futureName, DataCatalogue catalogue) throws Exception { //
// String role = catalogue.getRoleOfUserInOrganization(username, organization, apiKey);
if(!HelperMethods.isNameValid(futureName)) { // logger.info("Role of the user " + username + " is " + role + " in " + organization);
throw new Exception( //
"The 'uuid_knowledge_base' must contain only alphanumeric characters, and symbols like '.' or '_', '-'"); // if(role == null || role.isEmpty() || !role.equalsIgnoreCase(RolesCkanGroupOrOrg.ADMIN.toString()))
} else { // throw new Exception(
// "You are not authorized to create a product. Please check you have the Catalogue-Administrator role!");
logger.debug("Checking if such name [" + futureName + "] doesn't exist ..."); //
boolean alreadyExists = catalogue.existProductWithNameOrId(futureName); // }
if(alreadyExists) { // /**
logger.debug("A product with 'uuid_knowledge_base' " + futureName + " already exists"); // * Check this record's name
throw new Exception("A product with 'uuid_knowledge_base' " + futureName + " already exists"); // * @param futureName
// * @param catalogue
} // * @throws Exception on name check
} // */
} // public static void checkName(String futureName, DataCatalogue catalogue) throws Exception {
//
// if(!HelperMethods.isNameValid(futureName)) {
// throw new Exception(
// "The 'uuid_knowledge_base' must contain only alphanumeric characters, and symbols like '.' or '_', '-'");
// } else {
//
// logger.debug("Checking if such name [" + futureName + "] doesn't exist ...");
// boolean alreadyExists = catalogue.existProductWithNameOrId(futureName);
//
// if(alreadyExists) {
// logger.debug("A product with 'uuid_knowledge_base' " + futureName + " already exists");
// throw new Exception("A product with 'uuid_knowledge_base' " + futureName + " already exists");
//
// }
// }
// }
/** /**
* Validate and check sources * Validate and check sources
@ -514,20 +482,39 @@ public class CommonServiceUtils {
// validate the record if it is a GRSF one and set the record type and in manage context // validate the record if it is a GRSF one and set the record type and in manage context
// Status field is needed only in the Manage context for GRSF records // Status field is needed only in the Manage context for GRSF records
if( (context.equals((String) contextServlet.getInitParameter(HelperMethods.MANAGE_CONTEX_KEY))) || (context.equals((String) contextServlet.getInitParameter(HelperMethods.PREVALIDATE_CONTEX_KEY)))) {
// In web.xml a parameter indicates the Admin VRE as full path.
if(context.equals((String) contextServlet.getInitParameter(HelperMethods.MANAGE_CONTEX_KEY))) {
// If we are in Admin VRE and the source is GRSF
if(sourceInPath.equals(Sources.GRSF)) { if(sourceInPath.equals(Sources.GRSF)) {
// RefersTo cannot be empty or null in GRSF_Admin for a GRSF record
List<RefersToBean> refersTo = record.getRefersTo(); List<RefersToBean> refersTo = record.getRefersTo();
if(refersTo == null || refersTo.isEmpty()) if(refersTo == null || refersTo.isEmpty())
throw new Exception("refers_to is empty for a GRSF record"); throw new Exception("refers_to is empty for a GRSF record");
// For each RefersTo a Resource is created in the record. The resource point to the referred record.
// We have also to set database sources
String databaseSource = ""; String databaseSource = "";
// we have the id within the catalog of this record. This means that we can retrieve the record and its system:type // we have the id within the catalog of this record. This means that we can retrieve the record and its system:type
for(RefersToBean refersToBean : refersTo) { for(RefersToBean refersToBean : refersTo) {
String sourceOrganization = getRecordOrganization(refersToBean.getId(), apiKey, context);
String sourceOrganization = "";
// Here there is a lookup to the referred records.
// getRecordOrganization read the record and get the organization
// String sourceOrganization = getRecordOrganization(refersToBean.getId(), apiKey, context);
resources.add(new ResourceBean(refersToBean.getUrl(), sourceOrganization, "", null, username, null, resources.add(new ResourceBean(refersToBean.getUrl(), sourceOrganization, "", null, username, null,
null)); null));
sourcesList.add(sourceOrganization.toLowerCase()); sourcesList.add(sourceOrganization.toLowerCase());
// concatenating the organization in a string which provide the databaseSource value :O
// Better using a list
databaseSource += sourceOrganization + " "; databaseSource += sourceOrganization + " ";
} }
@ -542,13 +529,13 @@ public class CommonServiceUtils {
if(databaseSources!=null) { if(databaseSources!=null) {
for(Resource<Sources> source : databaseSources) { for(Resource<Sources> source : databaseSources) {
Sources sourceName = source.getName(); Sources sourceName = source.getName();
sourcesList.add(sourceName.getURLPath()); sourcesList.add(sourceName.getOrigName().toLowerCase());
} }
} }
} }
addRecordToGroups(groups, sourcesList, productType, sourceInPath); // append to groups: we need to add this record to the correspondent group of the sources
addRecordToGroupSources(groups, new ArrayList(sourcesList), productType, sourceInPath);
// validate // validate
CommonServiceUtils.validateAggregatedRecord(record, sourceInPath); CommonServiceUtils.validateAggregatedRecord(record, sourceInPath);
@ -561,20 +548,37 @@ public class CommonServiceUtils {
sourceInPath.equals(Sources.GRSF) sourceInPath.equals(Sources.GRSF)
? productType.equals(Product_Type.FISHERY) ? ((FisheryRecord) record).getType().getOrigName() ? productType.equals(Product_Type.FISHERY) ? ((FisheryRecord) record).getType().getOrigName()
: ((StockRecord) record).getType().getOrigName() : ((StockRecord) record).getType().getOrigName()
: Constants.SYSTEM_TYPE_LEGACY_RECORD); : Constants.SYSTEM_TYPE_FOR_SOURCES_VALUE);
logger.debug("Domain is " + productType.getOrigName() + " and system type " + record.getSystemType()); logger.debug("Domain is " + productType.getOrigName() + " and system type " + record.getSystemType());
// evaluate the custom fields/tags, resources and groups
/* groups.add(sourceInPath.getOrigName().toLowerCase() + "-" + productType.getOrigName().toLowerCase()); //e.g. grsf-fishery
* It has been decided to add tags also for legacy records see #23216 boolean skipTags = !sourceInPath.equals(Sources.GRSF); // no tags for the Original records
* boolean skipTags = !sourceInPath.equals(Sources.GRSF); // no tags for the Original records CommonServiceUtils.getTagsGroupsResourcesExtrasByRecord(tags, skipTags, groups, false, resources, false,
*/
CommonServiceUtils.getTagsGroupsResourcesExtrasByRecord(tags, false, groups, false, resources, false,
customFields, record, username, sourceInPath); customFields, record, username, sourceInPath);
} }
/**
* Add the record to the group of sources
* @param groups
* @param sourcesList
* @param productType
* @param sourceInPath
*/
private static void addRecordToGroupSources(Set<String> groups, List<String> sourcesList, Product_Type productType,
Sources sourceInPath) {
Collections.sort(sourcesList); // be sure the name are sorted because the groups have been generated this way
String groupName = sourceInPath.getOrigName().toLowerCase() + "-" + productType.getOrigName().toLowerCase();
for(String source : sourcesList) {
groupName += "-" + source;
}
groups.add(groupName);
}
// /** // /**
// * Fetch the system:type property from a record // * Fetch the system:type property from a record
// * @param itemIdOrName // * @param itemIdOrName
@ -583,28 +587,27 @@ public class CommonServiceUtils {
// * @throws Exception // * @throws Exception
// */ // */
// public static String getSystemTypeValue(String itemIdOrName, String apiKey, String context) throws Exception { // public static String getSystemTypeValue(String itemIdOrName, String apiKey, String context) throws Exception {
//
// DataCatalogue catalog = HelperMethods.getDataCatalogueRunningInstance(context); // DataCatalogue catalog = HelperMethods.getDataCatalogueRunningInstance(context);
// CkanDataset dataset = catalog.getDataset(itemIdOrName, apiKey); // CkanDataset dataset = catalog.getDataset(itemIdOrName, apiKey);
// if(dataset == null) { // if(dataset == null)
// throw new Exception("Unable to find record with id or name " + itemIdOrName); // throw new Exception("Unable to find record with id or name " + itemIdOrName);
// }
// String systemTypeValue = dataset.getExtrasAsHashMap().get(Constants.SYSTEM_TYPE_CUSTOM_KEY); // String systemTypeValue = dataset.getExtrasAsHashMap().get(Constants.SYSTEM_TYPE_CUSTOM_KEY);
// if(systemTypeValue == null || systemTypeValue.isEmpty()) { // if(systemTypeValue == null || systemTypeValue.isEmpty())
// throw new Exception(Constants.SYSTEM_TYPE_CUSTOM_KEY + " property not set in record " + itemIdOrName); // throw new Exception(Constants.SYSTEM_TYPE_CUSTOM_KEY + " property not set in record " + itemIdOrName);
// }else { // else
// return systemTypeValue; // return systemTypeValue;
// }
// //
// } // }
//
public static String getRecordOrganization(String itemIdOrName, String apiKey, String context) throws Exception { // public static String getRecordOrganization(String itemIdOrName, String apiKey, String context) throws Exception {
DataCatalogue catalog = HelperMethods.getDataCatalogueRunningInstance(context); // DataCatalogue catalog = HelperMethods.getDataCatalogueRunningInstance(context);
CkanDataset dataset = catalog.getDataset(itemIdOrName, apiKey); // CkanDataset dataset = catalog.getDataset(itemIdOrName, apiKey);
if(dataset == null) // if(dataset == null)
throw new Exception("Unable to find record with id or name " + itemIdOrName); // throw new Exception("Unable to find record with id or name " + itemIdOrName);
else // else
return dataset.getOrganization().getTitle(); // return dataset.getOrganization().getTitle();
} // }
/** /**
* Actions to execute once the dataset has been updated or created. * Actions to execute once the dataset has been updated or created.
@ -670,12 +673,12 @@ public class CommonServiceUtils {
new ManageTimeSeriesThread(record, futureName, username, catalogue, context, token).start(); new ManageTimeSeriesThread(record, futureName, username, catalogue, context, token).start();
// write a post if the product has been published in grsf context // write a post if the product has been published in grsf context
if(catalogue.isSocialPostEnabled() && !isUpdated && context // if(catalogue.isSocialPostEnabled() && !isUpdated && context
.equals((String) contextServlet.getInitParameter(HelperMethods.PUBLIC_CONTEX_KEY))) { // .equals((String) contextServlet.getInitParameter(HelperMethods.PUBLIC_CONTEX_KEY))) {
new WritePostCatalogueManagerThread(context, token, futureTitle, itemUrlForThread, true, // new WritePostCatalogueManagerThread(context, token, futureTitle, itemUrlForThread, true,
new ArrayList<String>(), authorFullname).start(); // new ArrayList<String>(), authorFullname).start();
logger.info("Thread to write a post about the new product has been launched"); // logger.info("Thread to write a post about the new product has been launched");
} // }
} catch(InterruptedException e) { } catch(InterruptedException e) {
logger.error("Error", e); logger.error("Error", e);
} }
@ -683,25 +686,25 @@ public class CommonServiceUtils {
}).start(); }).start();
} }
/** // /**
* Extend roles to other organization // * Extend roles to other organization
* @param username // * @param username
* @param catalogue // * @param catalogue
* @param organization // * @param organization
* @param admin // * @param admin
*/ // */
public static void extendRoleToOtherOrganizations(String username, DataCatalogue catalogue, String organization, // public static void extendRoleToOtherOrganizations(String username, DataCatalogue catalogue, String organization,
RolesCkanGroupOrOrg admin) { // RolesCkanGroupOrOrg admin) {
//
logger.debug("Checking if role extension is needed here"); // logger.debug("Checking if role extension is needed here");
if(extensionsCheck.containsKey(username) && extensionsCheck.get(username)) // if(extensionsCheck.containsKey(username) && extensionsCheck.get(username))
return; // return;
else { // else {
catalogue.assignRolesOtherOrganization(username, organization, admin); // catalogue.assignRolesOtherOrganization(username, organization, admin);
extensionsCheck.put(username, true); // extensionsCheck.put(username, true);
} // }
//
} // }
/** /**
* Evaluate in which organization a record has to be published. The only exception is when grsf_admin is involved. * Evaluate in which organization a record has to be published. The only exception is when grsf_admin is involved.
@ -710,14 +713,9 @@ public class CommonServiceUtils {
* @return * @return
*/ */
public static String evaluateOrganization(String organization, Sources sourceInPath) { public static String evaluateOrganization(String organization, Sources sourceInPath) {
if(sourceInPath.equals(Sources.GRSF) && if(sourceInPath.equals(Sources.GRSF) && organization.equals(Constants.GRSF_ADMIN_ORGANIZATION_NAME))
(organization.compareTo(Constants.GRSF_ADMIN_ORGANIZATION_NAME)==0 || organization.compareTo(Constants.GRSF_PRE_ORGANIZATION_NAME)==0)){ return Constants.GRSF_ADMIN_ORGANIZATION_NAME;
if(organization.compareTo(Constants.GRSF_ADMIN_ORGANIZATION_NAME)==0) { else
return Constants.GRSF_ADMIN_ORGANIZATION_NAME; return sourceInPath.getOrigName().toLowerCase();
}else {
return Constants.GRSF_PRE_ORGANIZATION_NAME;
}
}else
return sourceInPath.getURLPath();
} }
} }

View File

@ -5,6 +5,7 @@ import static org.gcube.resources.discovery.icclient.ICFactory.client;
import java.io.BufferedReader; import java.io.BufferedReader;
import java.io.File; import java.io.File;
import java.io.FileInputStream; import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.InputStream; import java.io.InputStream;
import java.io.InputStreamReader; import java.io.InputStreamReader;
import java.io.StringReader; import java.io.StringReader;
@ -19,19 +20,23 @@ import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.DocumentBuilderFactory;
import org.gcube.common.authorization.library.provider.SecurityTokenProvider; import org.gcube.common.authorization.library.provider.SecurityTokenProvider;
import org.gcube.common.homelibrary.home.exceptions.InternalErrorException;
import org.gcube.common.homelibrary.home.workspace.WorkspaceFolder;
import org.gcube.common.homelibrary.home.workspace.WorkspaceItem;
import org.gcube.common.homelibrary.home.workspace.exceptions.InsufficientPrivilegesException;
import org.gcube.common.homelibrary.home.workspace.exceptions.ItemAlreadyExistException;
import org.gcube.common.homelibrary.home.workspace.exceptions.ItemNotFoundException;
import org.gcube.common.homelibrary.home.workspace.folder.items.ExternalFile;
import org.gcube.common.resources.gcore.utils.XPathHelper; import org.gcube.common.resources.gcore.utils.XPathHelper;
import org.gcube.common.scope.api.ScopeProvider; import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.common.storagehub.client.dsl.FileContainer; import org.gcube.data_catalogue.grsf_publish_ws.AAA_UNNEEDED.GcoreEndPointReaderSocial;
import org.gcube.common.storagehub.client.dsl.FolderContainer;
import org.gcube.common.storagehub.model.exceptions.StorageHubException;
import org.gcube.datacatalogue.ckanutillibrary.server.DataCatalogue; import org.gcube.datacatalogue.ckanutillibrary.server.DataCatalogue;
import org.gcube.datacatalogue.ckanutillibrary.server.DataCatalogueFactory; import org.gcube.datacatalogue.ckanutillibrary.server.DataCatalogueFactory;
import org.gcube.datacatalogue.common.caches.CacheImpl; import org.gcube.datacatalogue.common.AAA_UNNEEDED.CacheImpl;
import org.gcube.datacatalogue.common.caches.CacheInterface; import org.gcube.datacatalogue.common.AAA_UNNEEDED.CacheInterface;
import org.gcube.resources.discovery.client.api.DiscoveryClient; import org.gcube.resources.discovery.client.api.DiscoveryClient;
import org.gcube.resources.discovery.client.queries.api.Query; import org.gcube.resources.discovery.client.queries.api.Query;
import org.gcube.resources.discovery.client.queries.impl.QueryBox; import org.gcube.resources.discovery.client.queries.impl.QueryBox;
import org.gcube.social_networking.social_networking_client_library.UserClient;
import org.jsoup.Jsoup; import org.jsoup.Jsoup;
import org.jsoup.safety.Whitelist; import org.jsoup.safety.Whitelist;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@ -68,12 +73,9 @@ public abstract class HelperMethods {
// to be retrieved from the web.xml // to be retrieved from the web.xml
public static final String MANAGE_CONTEX_KEY = "ManageVRE"; public static final String MANAGE_CONTEX_KEY = "ManageVRE";
public static final String PREVALIDATE_CONTEX_KEY = "PreValidateVRE";
public static final String PUBLIC_CONTEX_KEY = "PublicVRE"; public static final String PUBLIC_CONTEX_KEY = "PublicVRE";
private static final String CSV_MIME = "text/csv";
private static final String PATH_SEPARATOR = "/"; private static final String PATH_SEPARATOR = "/";
// This key is used in replaceFieldsKey() function to indicate to remove the metadata field
private static final String NONE_KEY = "none:none";
// caches // caches
private static CacheInterface<String, String> userEmailCache = new CacheImpl<String, String>(1000 * 60 * 60 * 24); private static CacheInterface<String, String> userEmailCache = new CacheImpl<String, String>(1000 * 60 * 60 * 24);
@ -82,38 +84,59 @@ public abstract class HelperMethods {
private static CacheInterface<String, DataCatalogue> catalogueCache = new CacheImpl<String, DataCatalogue>(1000 * 60 * 60 * 24); private static CacheInterface<String, DataCatalogue> catalogueCache = new CacheImpl<String, DataCatalogue>(1000 * 60 * 60 * 24);
/** /**
* Retrieve the running instance of the data catalogue for this scope * Convert a group name to its id on ckan
* @return * @param origName
* @throws Exception
*/
public static DataCatalogue getDataCatalogueRunningInstance(String scope) throws Exception{
if(catalogueCache.get(scope) != null)
return catalogueCache.get(scope);
else{
try{
DataCatalogue instance = DataCatalogueFactory.getFactory().getUtilsPerScope(scope);
catalogueCache.insert(scope, instance);
return instance;
}catch(Exception e){
logger.error("Failed to instantiate data catalogue lib", e);
throw new Exception("Failed to retrieve catalogue information");
}
}
}
/**
* Retrieve the organization name in which the user wants to publish starting from the scope
* @param contextInWhichPublish
* @return * @return
*/ */
public static String retrieveOrgNameFromScope(String scope) { public static String getGroupNameOnCkan(String origName){
String[] splittedScope = scope.split("/"); if(origName == null)
return splittedScope[splittedScope.length - 1].toLowerCase(); throw new IllegalArgumentException("origName cannot be null");
String modified = origName.trim().toLowerCase().replaceAll("[^A-Za-z0-9-]", "-");
if(modified.startsWith("-"))
modified = modified.substring(1);
if(modified.endsWith("-"))
modified = modified.substring(0, modified.length() -1);
logger.info("Group name generated is " + modified);
return modified;
} }
// /**
// * Retrieve the running instance of the data catalogue for this scope
// * @return
// * @throws Exception
// */
// public static DataCatalogue getDataCatalogueRunningInstance(String scope) throws Exception{
//
// if(catalogueCache.get(scope) != null)
// return catalogueCache.get(scope);
// else{
// try{
// DataCatalogue instance = DataCatalogueFactory.getFactory().getUtilsPerScope(scope);
// catalogueCache.insert(scope, instance);
// return instance;
// }catch(Exception e){
// logger.error("Failed to instantiate data catalogue lib", e);
// throw new Exception("Failed to retrieve catalogue information");
// }
// }
// }
// /**
// * Retrieve the organization name in which the user wants to publish starting from the scope
// * @param contextInWhichPublish
// * @return
// */
// public static String retrieveOrgNameFromScope(String scope) {
//
// String[] splittedScope = scope.split("/");
// return splittedScope[splittedScope.length - 1].toLowerCase();
//
// }
/** /**
* Validate the name the product will have * Validate the name the product will have
* @param futureName * @param futureName
@ -128,52 +151,59 @@ public abstract class HelperMethods {
} }
} }
/**
/* *//**
* Retrieve the user's email given his/her username * Retrieve the user's email given his/her username
* @param context * @param context
* @param token * @param token
* @return * @return
* @throws Exception * @throws Exception
*/ *//*
public static String getUserEmail(String context, String token) throws Exception{ public static String getUserEmail(String context, String token){
// check in cache // check in cache
String email = null; String result = null;
if((email = (String) userEmailCache.get(token)) != null){ if((result = (String) userEmailCache.get(token)) != null){
return email; return result;
}else{ }else{
UserClient userClient = new UserClient(); String baseUrl = new GcoreEndPointReaderSocial(context).getBasePath();
email = userClient.getEmail(); String url = baseUrl.endsWith("/") ? baseUrl + "users/getUserEmail?gcube-token=" + token :
userEmailCache.insert(token, email); baseUrl + "/users/getUserEmail?gcube-token=" + token;
logger.debug("Request url is " + url);
result = executGETHttpRequest(url, 200);
userEmailCache.insert(token, result);
} }
return email; return result;
} }
/** *//**
* Retrieve the user's fullname given his/her username * Retrieve the user's fullname given his/her username
* @param context * @param context
* @param token * @param token
* @return * @return
* @throws Exception * @throws Exception
*/ *//*
public static String getUserFullname(String context, String token) throws Exception{ public static String getUserFullname(String context, String token){
// check in cache // check in cache
String fullName = null; String result = null;
if((fullName = (String) userFullnameCache.get(token)) != null){ if((result = (String) userFullnameCache.get(token)) != null){
return fullName; return result;
}else{ }else{
UserClient userClient = new UserClient(); String baseUrl = new GcoreEndPointReaderSocial(context).getBasePath();
fullName = userClient.getFullName(); String url = baseUrl.endsWith("/") ? baseUrl + "users/getUserFullname?gcube-token=" + token :
userFullnameCache.insert(token, fullName); baseUrl + "/users/getUserFullname?gcube-token=" + token;
logger.debug("Request url is " + url);
result = executGETHttpRequest(url, 200);
userFullnameCache.insert(token, result);
} }
return fullName; return result;
} }
/** *//**
* Execute the GET http request at this url, and return the result as string * Execute the GET http request at this url, and return the result as string
* @return * @return
*/ *//*
private static String executGETHttpRequest(String url, int expectedCodeOnSuccess){ private static String executGETHttpRequest(String url, int expectedCodeOnSuccess){
try(CloseableHttpClient client = HttpClientBuilder.create().build();){ try(CloseableHttpClient client = HttpClientBuilder.create().build();){
@ -203,7 +233,7 @@ public abstract class HelperMethods {
return null; return null;
} }*/
/** /**
* Check that the given license id is in CKAN * Check that the given license id is in CKAN
@ -229,20 +259,29 @@ public abstract class HelperMethods {
* @param csvFile * @param csvFile
* @return * @return
*/ */
public static FileContainer uploadExternalFile(FolderContainer resourceFormatFolder, String resourceToAttachName, String description, File csvFile) { public static ExternalFile uploadExternalFile(WorkspaceFolder resourceFormatFolder, String resourceToAttachName, String description, File csvFile) {
try { try {
try (InputStream is= new FileInputStream(csvFile)) {
FileContainer fileContainer = resourceFormatFolder.uploadFile(is , resourceToAttachName, description);
return fileContainer;
}
} catch (StorageHubException she) { WorkspaceItem existsFile = resourceFormatFolder.find(resourceToAttachName);
logger.error("Failed to upload the file into the workspace shared folder for " + resourceToAttachName, she);
} catch (Exception e) { if(existsFile == null)
return resourceFormatFolder.createExternalFileItem(resourceToAttachName, description, CSV_MIME, csvFile);
else{
InputStream targetStream = new FileInputStream(csvFile);
existsFile.updateItem(targetStream);
return (ExternalFile)existsFile;
}
} catch (InsufficientPrivilegesException | ItemAlreadyExistException
| InternalErrorException e) {
logger.error("Failed to upload the file into the workspace shared folder for " + resourceToAttachName, e);
} catch (FileNotFoundException e) {
// TODO Auto-generated catch block // TODO Auto-generated catch block
e.printStackTrace(); e.printStackTrace();
} } catch (ItemNotFoundException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return null; return null;
} }
@ -253,10 +292,9 @@ public abstract class HelperMethods {
* @param subPath * @param subPath
* @return null if an error occurred * @return null if an error occurred
*/ */
public static FolderContainer createOrGetSubFoldersByPath(FolderContainer folder, String subPath){ public static WorkspaceFolder createOrGetSubFoldersByPath(WorkspaceFolder folder, String subPath){
FolderContainer parentFolder = folder; WorkspaceFolder parentFolder = folder;
if(folder == null) if(folder == null)
throw new IllegalArgumentException("Root folder is null!"); throw new IllegalArgumentException("Root folder is null!");
@ -275,9 +313,8 @@ public abstract class HelperMethods {
String[] splittedPaths = subPath.split(PATH_SEPARATOR); String[] splittedPaths = subPath.split(PATH_SEPARATOR);
for (String path : splittedPaths) { for (String path : splittedPaths) {
WorkspaceFolder createdFolder = getFolderOrCreate(parentFolder, path, "");
FolderContainer createdFolder = getFolderOrCreate(parentFolder, path, ""); logger.debug("Created subfolder with path " + createdFolder.getPath());
logger.debug("Created subfolder with path " + createdFolder.get().getPath());
parentFolder = createdFolder; parentFolder = createdFolder;
} }
@ -293,18 +330,23 @@ public abstract class HelperMethods {
* Get a folder within the catalogue folder or create it if it doesn't exist. * Get a folder within the catalogue folder or create it if it doesn't exist.
* @return * @return
*/ */
public static FolderContainer getFolderOrCreate(FolderContainer folder, String relativePath, String descriptionFolder){ public static WorkspaceFolder getFolderOrCreate(WorkspaceFolder folder, String relativePath, String descriptionFolder){
WorkspaceFolder result = null;
FolderContainer result = null;
try { try {
result = folder.openByRelativePath(relativePath).asFolder(); WorkspaceItem foundFolder = folder.find(relativePath);
if(foundFolder != null && foundFolder.isFolder())
result = (WorkspaceFolder)foundFolder;
if(result != null)
logger.debug("Folder found with name " + result.getName() + ", it has id " + result.getId());
else
throw new Exception("There is no folder with name " + relativePath + " under folder " + folder.getName());
} catch (Exception e) { } catch (Exception e) {
logger.debug("Probably the folder doesn't exist"); logger.debug("Probably the folder doesn't exist");
try{ try{
result = folder.newFolder(relativePath, descriptionFolder); result = folder.createFolder(relativePath, descriptionFolder);
} catch (StorageHubException se) { } catch (InsufficientPrivilegesException | InternalErrorException | ItemAlreadyExistException e2) {
logger.error("Failed to get or generate this folder", se); logger.error("Failed to get or generate this folder", e2);
} }
} }
return result; return result;
@ -476,56 +518,56 @@ public abstract class HelperMethods {
} }
/** // /**
* Return a map for converting a key to a namespace:key format by reading a generic resource. // * Return a map for converting a key to a namespace:key format by reading a generic resource.
* @return a map // * @return a map
*/ // */
public static Map<String, String> getFieldToFieldNameSpaceMapping(String resourceName){ // public static Map<String, String> getFieldToFieldNameSpaceMapping(String resourceName){
Map<String, String> toReturn = new HashMap<String, String>(); // Map<String, String> toReturn = new HashMap<String, String>();
//
// check if data are in cache // // check if data are in cache
if(namespacesCache.get(resourceName) != null){ // if(namespacesCache.get(resourceName) != null){
return namespacesCache.get(resourceName); // return namespacesCache.get(resourceName);
} // }
else{ // else{
try { // try {
Query q = new QueryBox("for $profile in collection('/db/Profiles/GenericResource')//Resource " + // Query q = new QueryBox("for $profile in collection('/db/Profiles/GenericResource')//Resource " +
"where $profile/Profile/SecondaryType/string() eq '"+ "ApplicationProfile" + "' and $profile/Profile/Name/string() " + // "where $profile/Profile/SecondaryType/string() eq '"+ "ApplicationProfile" + "' and $profile/Profile/Name/string() " +
" eq '" + resourceName + "'" + // " eq '" + resourceName + "'" +
"return $profile"); // "return $profile");
//
DiscoveryClient<String> client = client(); // DiscoveryClient<String> client = client();
List<String> appProfile = client.submit(q); // List<String> appProfile = client.submit(q);
//
if (appProfile == null || appProfile.size() == 0) // if (appProfile == null || appProfile.size() == 0)
throw new Exception("Your applicationProfile is not registered in the infrastructure"); // throw new Exception("Your applicationProfile is not registered in the infrastructure");
else { // else {
//
String elem = appProfile.get(0); // String elem = appProfile.get(0);
DocumentBuilder docBuilder = DocumentBuilderFactory.newInstance().newDocumentBuilder(); // DocumentBuilder docBuilder = DocumentBuilderFactory.newInstance().newDocumentBuilder();
Node node = docBuilder.parse(new InputSource(new StringReader(elem))).getDocumentElement(); // Node node = docBuilder.parse(new InputSource(new StringReader(elem))).getDocumentElement();
XPathHelper helper = new XPathHelper(node); // XPathHelper helper = new XPathHelper(node);
//
NodeList nodeListKeys = helper.evaluateForNodes("//originalKey"); // NodeList nodeListKeys = helper.evaluateForNodes("//originalKey");
NodeList nodeListModifiedKeys = helper.evaluateForNodes("//modifiedKey"); // NodeList nodeListModifiedKeys = helper.evaluateForNodes("//modifiedKey");
int sizeKeys = nodeListKeys != null ? nodeListKeys.getLength() : 0; // int sizeKeys = nodeListKeys != null ? nodeListKeys.getLength() : 0;
int sizeKeysModifed = nodeListModifiedKeys != null ? nodeListModifiedKeys.getLength() : 0; // int sizeKeysModifed = nodeListModifiedKeys != null ? nodeListModifiedKeys.getLength() : 0;
if(sizeKeys != sizeKeysModifed) // if(sizeKeys != sizeKeysModifed)
throw new Exception("Malformed XML"); // throw new Exception("Malformed XML");
logger.debug("Size is " + sizeKeys); // logger.debug("Size is " + sizeKeys);
for (int i = 0; i < sizeKeys; i++) { // for (int i = 0; i < sizeKeys; i++) {
toReturn.put(nodeListKeys.item(i).getTextContent(), nodeListModifiedKeys.item(i).getTextContent()); // toReturn.put(nodeListKeys.item(i).getTextContent(), nodeListModifiedKeys.item(i).getTextContent());
} // }
} // }
logger.debug("Map is " + toReturn); // logger.debug("Map is " + toReturn);
namespacesCache.insert(resourceName, toReturn); // namespacesCache.insert(resourceName, toReturn);
return toReturn; // return toReturn;
} catch (Exception e) { // } catch (Exception e) {
logger.error("Error while trying to fetch applicationProfile profile from the infrastructure", e); // logger.error("Error while trying to fetch applicationProfile profile from the infrastructure", e);
return null; // return null;
} // }
} // }
} // }
/** /**
* Replace the extras' keys if needed * Replace the extras' keys if needed
@ -548,18 +590,16 @@ public abstract class HelperMethods {
if(namespaces.containsKey(entry.getKey())){ if(namespaces.containsKey(entry.getKey())){
usedKey = namespaces.get(entry.getKey()); usedKey = namespaces.get(entry.getKey());
} else{ }
else{
usedKey = entry.getKey(); usedKey = entry.getKey();
} }
if(isSourceRecord) { if(isSourceRecord)
usedKey = usedKey.replace("GRSF", "").trim(); usedKey = usedKey.replace("GRSF", "").trim();
}
// When the replaced key is none the metadata field must be removed toReturn.put(usedKey, entry.getValue());
if(usedKey.compareTo(NONE_KEY)!=0) {
toReturn.put(usedKey, entry.getValue());
}
} }
return toReturn; return toReturn;

View File

@ -8,8 +8,8 @@ import java.util.HashSet;
import java.util.List; import java.util.List;
import java.util.Set; import java.util.Set;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.others.TimeSeriesBean; import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.TimeSeriesBean;
import org.gcube.datacatalogue.common.Constants; import org.gcube.datacatalogue.common.AAA_PORTED.Constants;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;

View File

@ -1,5 +1,6 @@
package org.gcube.data_catalogue.grsf_publish_ws.utils.csv; package org.gcube.data_catalogue.grsf_publish_ws.utils.csv;
import java.beans.IntrospectionException;
import java.beans.PropertyDescriptor; import java.beans.PropertyDescriptor;
import java.io.File; import java.io.File;
import java.lang.reflect.Field; import java.lang.reflect.Field;
@ -7,20 +8,27 @@ import java.lang.reflect.InvocationTargetException;
import java.util.List; import java.util.List;
import org.gcube.common.authorization.library.provider.SecurityTokenProvider; import org.gcube.common.authorization.library.provider.SecurityTokenProvider;
import org.gcube.common.homelibrary.home.HomeLibrary;
import org.gcube.common.homelibrary.home.exceptions.HomeNotFoundException;
import org.gcube.common.homelibrary.home.exceptions.InternalErrorException;
import org.gcube.common.homelibrary.home.exceptions.UserNotFoundException;
import org.gcube.common.homelibrary.home.workspace.Workspace;
import org.gcube.common.homelibrary.home.workspace.WorkspaceFolder;
import org.gcube.common.homelibrary.home.workspace.WorkspaceSharedFolder;
import org.gcube.common.homelibrary.home.workspace.catalogue.WorkspaceCatalogue;
import org.gcube.common.homelibrary.home.workspace.exceptions.ItemNotFoundException;
import org.gcube.common.homelibrary.home.workspace.exceptions.WorkspaceFolderNotFoundException;
import org.gcube.common.homelibrary.home.workspace.folder.items.ExternalFile;
import org.gcube.common.scope.api.ScopeProvider; import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.common.storagehub.client.dsl.FileContainer; import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.Common;
import org.gcube.common.storagehub.client.dsl.FolderContainer; import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.CustomField;
import org.gcube.common.storagehub.client.dsl.StorageHubClient; import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.FisheryRecord;
import org.gcube.common.storagehub.model.exceptions.StorageHubException; import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.StockRecord;
import org.gcube.data_catalogue.grsf_publish_ws.custom_annotations.CustomField; import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.TimeSeries;
import org.gcube.data_catalogue.grsf_publish_ws.custom_annotations.TimeSeries;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.record.Common;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.record.FisheryRecord;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.record.StockRecord;
import org.gcube.data_catalogue.grsf_publish_ws.utils.HelperMethods; import org.gcube.data_catalogue.grsf_publish_ws.utils.HelperMethods;
import org.gcube.datacatalogue.ckanutillibrary.server.DataCatalogue; import org.gcube.datacatalogue.ckanutillibrary.server.DataCatalogue;
import org.gcube.datacatalogue.common.caches.CacheImpl; import org.gcube.datacatalogue.common.AAA_UNNEEDED.CacheImpl;
import org.gcube.datacatalogue.common.caches.CacheInterface; import org.gcube.datacatalogue.common.AAA_UNNEEDED.CacheInterface;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import eu.trentorise.opendata.jackan.model.CkanResourceBase; import eu.trentorise.opendata.jackan.model.CkanResourceBase;
@ -34,15 +42,13 @@ public class ManageTimeSeriesThread extends Thread{
private static final String PATH_SEPARATOR = "/"; private static final String PATH_SEPARATOR = "/";
private static final String CATALOGUE_FOLDER = ".catalogue";
// Logger // Logger
private static final org.slf4j.Logger logger = LoggerFactory.getLogger(ManageTimeSeriesThread.class); private static final org.slf4j.Logger logger = LoggerFactory.getLogger(ManageTimeSeriesThread.class);
// try to attach the source at most CHANCES times .. // try to attach the source at most CHANCES times ..
private static final int CHANCES = 10; private static final int CHANCES = 10;
private static CacheInterface<String, FolderContainer> vreFolderCache = new CacheImpl<String, FolderContainer>(1000 * 60 * 60 * 24); private static CacheInterface<String, WorkspaceCatalogue> vreFolderCache = new CacheImpl<String, WorkspaceCatalogue>(1000 * 60 * 60 * 24);
private static final int MAX_NAME_CSV_FILE_WITHOUT_MEASURE = 50; private static final int MAX_NAME_CSV_FILE_WITHOUT_MEASURE = 50;
@ -90,7 +96,17 @@ public class ManageTimeSeriesThread extends Thread{
logger.error("Error was " + e.getMessage()); logger.error("Error was " + e.getMessage());
} catch (InvocationTargetException e) { } catch (InvocationTargetException e) {
logger.error("Error was " + e.getMessage()); logger.error("Error was " + e.getMessage());
} catch (StorageHubException e) { } catch (WorkspaceFolderNotFoundException e) {
logger.error("Error was " + e.getMessage());
} catch (ItemNotFoundException e) {
logger.error("Error was " + e.getMessage());
} catch (IntrospectionException e) {
logger.error("Error was " + e.getMessage());
} catch (InternalErrorException e) {
logger.error("Error was " + e.getMessage());
} catch (HomeNotFoundException e) {
logger.error("Error was " + e.getMessage());
} catch (UserNotFoundException e) {
logger.error("Error was " + e.getMessage()); logger.error("Error was " + e.getMessage());
} catch (Exception e) { } catch (Exception e) {
logger.error("Error was " + e.getMessage()); logger.error("Error was " + e.getMessage());
@ -114,26 +130,17 @@ public class ManageTimeSeriesThread extends Thread{
if(record == null) if(record == null)
throw new IllegalArgumentException("The given record is null!!"); throw new IllegalArgumentException("The given record is null!!");
StorageHubClient shClient = new StorageHubClient();
String token = SecurityTokenProvider.instance.get(); String token = SecurityTokenProvider.instance.get();
FolderContainer catalogueFolder = null; WorkspaceCatalogue catalogueFolder = null;
//WorkspaceCatalogue catalogueFolder = null;
if((catalogueFolder = vreFolderCache.get(token)) == null){ if((catalogueFolder = vreFolderCache.get(token)) == null){
//Workspace ws = HomeLibrary.getHomeManagerFactory().getHomeManager().getHome().getWorkspace(); Workspace ws = HomeLibrary.getHomeManagerFactory().getHomeManager().getHome().getWorkspace();
FolderContainer vreFolder = shClient.openVREFolder(); WorkspaceSharedFolder vreFolder = ws.getVREFolderByScope(ScopeProvider.instance.get());
try { catalogueFolder = vreFolder.getVRECatalogue();
catalogueFolder = vreFolder.openByRelativePath(CATALOGUE_FOLDER).asFolder();
}catch (StorageHubException e) {
catalogueFolder = vreFolder.newHiddenFolder(CATALOGUE_FOLDER, "catalogue folder");
}
vreFolderCache.insert(token, catalogueFolder); vreFolderCache.insert(token, catalogueFolder);
} }
logger.debug("Catalogue folder in vre has path " + catalogueFolder.get().getPath()); logger.debug("Catalogue folder in vre has path " + catalogueFolder.getPath());
// the structure under the .catalogue will be as follows: // the structure under the .catalogue will be as follows:
// .catalogue: // .catalogue:
@ -160,7 +167,7 @@ public class ManageTimeSeriesThread extends Thread{
// the whole path of the directory is going to be... // the whole path of the directory is going to be...
String csvDirectoryForThisProduct = recordTypeFolderName + PATH_SEPARATOR + firstLetter + PATH_SEPARATOR + replaceIllegalChars(uuidKB, "_") + PATH_SEPARATOR + CSVUtils.CSV_EXTENSION.replace(".", ""); String csvDirectoryForThisProduct = recordTypeFolderName + PATH_SEPARATOR + firstLetter + PATH_SEPARATOR + replaceIllegalChars(uuidKB, "_") + PATH_SEPARATOR + CSVUtils.CSV_EXTENSION.replace(".", "");
logger.debug("The path under which the time series are going to be saved is " + csvDirectoryForThisProduct); logger.debug("The path under which the time series are going to be saved is " + csvDirectoryForThisProduct);
FolderContainer csvFolder = HelperMethods.createOrGetSubFoldersByPath(catalogueFolder, csvDirectoryForThisProduct); WorkspaceFolder csvFolder = HelperMethods.createOrGetSubFoldersByPath(catalogueFolder, csvDirectoryForThisProduct);
if(csvFolder == null) if(csvFolder == null)
logger.error("Failed to create directory where csv files will be deployed in the workspace!!"); logger.error("Failed to create directory where csv files will be deployed in the workspace!!");
@ -185,7 +192,7 @@ public class ManageTimeSeriesThread extends Thread{
String resourceToAttachOnCkanDescription = productName; String resourceToAttachOnCkanDescription = productName;
CkanResourceBase ckanResource = null; CkanResourceBase ckanResource = null;
FileContainer createdFileOnWorkspace = null; ExternalFile createdFileOnWorkspace = null;
String[] relevantSources = new String[1]; String[] relevantSources = new String[1];
File csvFile = CSVUtils.listToCSV(asList, relevantSources); File csvFile = CSVUtils.listToCSV(asList, relevantSources);
if(csvFile != null){ if(csvFile != null){
@ -205,7 +212,7 @@ public class ManageTimeSeriesThread extends Thread{
+ customAnnotation.key() + CSVUtils.CSV_EXTENSION, resourceToAttachOnCkanDescription, csvFile); + customAnnotation.key() + CSVUtils.CSV_EXTENSION, resourceToAttachOnCkanDescription, csvFile);
if(createdFileOnWorkspace != null){ if(createdFileOnWorkspace != null){
String publicUrlToSetOnCkan = createdFileOnWorkspace.getPublicLink().toString(); String publicUrlToSetOnCkan = createdFileOnWorkspace.getPublicLink(true);
// wait for patching.. // wait for patching..
Thread.sleep(1500); Thread.sleep(1500);

View File

@ -1,94 +0,0 @@
<Resource version="0.4.x">
<ID>85480b75-62f5-4708-acd9-382b22cffc90</ID>
<Type>GenericResource</Type>
<Scopes>
<Scope>/d4science.research-infrastructures.eu/FARM/GRSF</Scope>
</Scopes>
<Profile>
<SecondaryType>ApplicationProfile</SecondaryType>
<Name>GRSF Fishery</Name>
<Description>GRSF mapping between fields and namespaces for Fishery
records</Description>
<Body>
<fields>
<field>
<originalKey>Data Owner</originalKey>
<modifiedKey>none:none</modifiedKey>
</field>
<field>
<originalKey>Database Source</originalKey>
<modifiedKey>fishery_identity:Database Source</modifiedKey>
</field>
<field>
<originalKey>Short Name</originalKey>
<modifiedKey>fishery_identity:Short Name</modifiedKey>
</field>
<field>
<originalKey>Catch</originalKey>
<modifiedKey>none:none</modifiedKey>
</field>
<field>
<originalKey>Landing</originalKey>
<modifiedKey>none:none</modifiedKey>
</field>
<field>
<originalKey>Species</originalKey>
<modifiedKey>fishery_identity:Species</modifiedKey>
</field>
<field>
<originalKey>Similar GRSF Record</originalKey>
<modifiedKey>fishery_identity:Similar GRSF Record</modifiedKey>
</field>
<field>
<originalKey>Management Body/Authority</originalKey>
<modifiedKey>fishery_identity:Management Body/Authority</modifiedKey>
</field>
<field>
<originalKey>Connected Record</originalKey>
<modifiedKey>fishery_identity:Connected Record</modifiedKey>
</field>
<field>
<originalKey>Annotation</originalKey>
<modifiedKey>none:none</modifiedKey>
</field>
<field>
<originalKey>GRSF Fishery Name</originalKey>
<modifiedKey>fishery_identity:GRSF Fishery Name</modifiedKey>
</field>
<field>
<originalKey>GRSF Semantic Identifier</originalKey>
<modifiedKey>fishery_identity:GRSF Semantic Identifier
</modifiedKey>
</field>
<field>
<originalKey>Traceability Flag</originalKey>
<modifiedKey>fishery_identity:Traceability Flag</modifiedKey>
</field>
<field>
<originalKey>Fishing Area</originalKey>
<modifiedKey>fishery_identity:Fishing Area</modifiedKey>
</field>
<field>
<originalKey>Jurisdiction Area</originalKey>
<modifiedKey>fishery_identity:Jurisdiction Area</modifiedKey>
</field>
<field>
<originalKey>Resources Exploited</originalKey>
<modifiedKey>fishery_identity:Resources Exploited</modifiedKey>
</field>
<field>
<originalKey>Flag State</originalKey>
<modifiedKey>fishery_identity:Flag State</modifiedKey>
</field>
<field>
<originalKey>Fishing Gear</originalKey>
<modifiedKey>fishery_identity:Fishing Gear</modifiedKey>
</field>
<field>
<originalKey>GRSF Type</originalKey>
<modifiedKey>fishery_identity:GRSF Type</modifiedKey>
</field>
</fields>
</Body>
</Profile>
</Resource>

View File

@ -1,91 +0,0 @@
<Resource version="0.4.x">
<ID>c3326373-c620-45e8-a7e3-a25a1a2a970b</ID>
<Type>GenericResource</Type>
<Scopes>
<Scope>/d4science.research-infrastructures.eu/FARM/GRSF_Admin</Scope>
<Scope>/d4science.research-infrastructures.eu/FARM</Scope>
</Scopes>
<Profile>
<SecondaryType>ApplicationProfile</SecondaryType>
<Name>GRSF Fishery</Name>
<Description>GRSF mapping between fields and namespaces for Fishery
records</Description>
<Body>
<fields>
<field>
<originalKey>Data Owner</originalKey>
<modifiedKey>fishery_data:Data Owner</modifiedKey>
</field>
<field>
<originalKey>Database Source</originalKey>
<modifiedKey>fishery_identity:Database Source</modifiedKey>
</field>
<field>
<originalKey>Short Name</originalKey>
<modifiedKey>fishery_identity:Short Name</modifiedKey>
</field>
<field>
<originalKey>Catch</originalKey>
<modifiedKey>fishery_data:Catch</modifiedKey>
</field>
<field>
<originalKey>Landing</originalKey>
<modifiedKey>fishery_data:Landing</modifiedKey>
</field>
<field>
<originalKey>Species</originalKey>
<modifiedKey>fishery_identity:Species</modifiedKey>
</field>
<field>
<originalKey>Similar GRSF Record</originalKey>
<modifiedKey>fishery_identity:Similar GRSF Record</modifiedKey>
</field>
<field>
<originalKey>Management Body/Authority</originalKey>
<modifiedKey>fishery_identity:Management Body/Authority</modifiedKey>
</field>
<field>
<originalKey>Connected Record</originalKey>
<modifiedKey>fishery_identity:Connected Record</modifiedKey>
</field>
<field>
<originalKey>GRSF Fishery Name</originalKey>
<modifiedKey>fishery_identity:GRSF Fishery Name</modifiedKey>
</field>
<field>
<originalKey>GRSF Semantic Identifier</originalKey>
<modifiedKey>fishery_identity:GRSF Semantic Identifier
</modifiedKey>
</field>
<field>
<originalKey>Traceability Flag</originalKey>
<modifiedKey>fishery_identity:Traceability Flag</modifiedKey>
</field>
<field>
<originalKey>Fishing Area</originalKey>
<modifiedKey>fishery_identity:Fishing Area</modifiedKey>
</field>
<field>
<originalKey>Jurisdiction Area</originalKey>
<modifiedKey>fishery_identity:Jurisdiction Area</modifiedKey>
</field>
<field>
<originalKey>Resources Exploited</originalKey>
<modifiedKey>fishery_identity:Resources Exploited</modifiedKey>
</field>
<field>
<originalKey>Flag State</originalKey>
<modifiedKey>fishery_identity:Flag State</modifiedKey>
</field>
<field>
<originalKey>Fishing Gear</originalKey>
<modifiedKey>fishery_identity:Fishing Gear</modifiedKey>
</field>
<field>
<originalKey>GRSF Type</originalKey>
<modifiedKey>fishery_identity:GRSF Type</modifiedKey>
</field>
</fields>
</Body>
</Profile>
</Resource>

View File

@ -1,118 +0,0 @@
<Resource version="0.4.x">
<ID>512cca02-e178-420c-a766-b47171e154e2</ID>
<Type>GenericResource</Type>
<Scopes>
<Scope>/d4science.research-infrastructures.eu/FARM/GRSF</Scope>
</Scopes>
<Profile>
<SecondaryType>ApplicationProfile</SecondaryType>
<Name>GRSF Stock</Name>
<Description>GRSF mapping between fields and namespaces for Stock
records
</Description>
<Body>
<fields>
<field>
<originalKey>Data Owner</originalKey>
<modifiedKey>none:none</modifiedKey>
</field>
<field>
<originalKey>Database Source</originalKey>
<modifiedKey>stock_identity:Database Source</modifiedKey>
</field>
<field>
<originalKey>Short Name</originalKey>
<modifiedKey>stock_identity:Short Name</modifiedKey>
</field>
<field>
<originalKey>Catch</originalKey>
<modifiedKey>none:none</modifiedKey>
</field>
<field>
<originalKey>Landing</originalKey>
<modifiedKey>none:none</modifiedKey>
</field>
<field>
<originalKey>Species</originalKey>
<modifiedKey>stock_identity:Species</modifiedKey>
</field>
<field>
<originalKey>Similar GRSF Record</originalKey>
<modifiedKey>stock_identity:Similar GRSF Record</modifiedKey>
</field>
<field>
<originalKey>Management Body/Authority</originalKey>
<modifiedKey>stock_identity:Management Body/Authority</modifiedKey>
</field>
<field>
<originalKey>Connected Record</originalKey>
<modifiedKey>stock_identity:Connected Record</modifiedKey>
</field>
<field>
<originalKey>Annotation</originalKey>
<modifiedKey>none:none</modifiedKey>
</field>
<field>
<originalKey>GRSF Stock Name</originalKey>
<modifiedKey>stock_identity:GRSF Stock Name</modifiedKey>
</field>
<field>
<originalKey>GRSF Semantic Identifier</originalKey>
<modifiedKey>stock_identity:GRSF Semantic Identifier</modifiedKey>
</field>
<field>
<originalKey>Assessment Area</originalKey>
<modifiedKey>stock_identity:Assessment Area</modifiedKey>
</field>
<field>
<originalKey>Exploiting Fishery</originalKey>
<modifiedKey>stock_identity:Exploiting Fishery</modifiedKey>
</field>
<field>
<originalKey>Assessment Method</originalKey>
<modifiedKey>none:none</modifiedKey>
</field>
<field>
<originalKey>Abundance Level (FIRMS Standard)</originalKey>
<modifiedKey>none:none</modifiedKey>
</field>
<field>
<originalKey>Abundance Level</originalKey>
<modifiedKey>none:none</modifiedKey>
</field>
<field>
<originalKey>Biomass</originalKey>
<modifiedKey>none:none</modifiedKey>
</field>
<field>
<originalKey>Fishing Pressure (FIRMS Standard)</originalKey>
<modifiedKey>none:none</modifiedKey>
</field>
<field>
<originalKey>Fishing Pressure</originalKey>
<modifiedKey>none:none</modifiedKey>
</field>
<field>
<originalKey>State and Trend</originalKey>
<modifiedKey>none:none</modifiedKey>
</field>
<field>
<originalKey>FAO Stock Status Category</originalKey>
<modifiedKey>none:none</modifiedKey>
</field>
<field>
<originalKey>Scientific Advice</originalKey>
<modifiedKey>none:none</modifiedKey>
</field>
<field>
<originalKey>Assessor</originalKey>
<modifiedKey>none:none</modifiedKey>
</field>
<field>
<originalKey>GRSF Type</originalKey>
<modifiedKey>stock_identity:GRSF Type</modifiedKey>
</field>
</fields>
</Body>
</Profile>
</Resource>

View File

@ -1,115 +0,0 @@
<Resource version="0.4.x">
<ID>c9cad3f8-5773-4ca7-95a5-4692db5eae2e</ID>
<Type>GenericResource</Type>
<Scopes>
<Scope>/d4science.research-infrastructures.eu/FARM</Scope>
<Scope>/d4science.research-infrastructures.eu/FARM/GRSF_Admin</Scope>
</Scopes>
<Profile>
<SecondaryType>ApplicationProfile</SecondaryType>
<Name>GRSF Stock</Name>
<Description>GRSF mapping between fields and namespaces for Stock
records</Description>
<Body>
<fields>
<field>
<originalKey>Data Owner</originalKey>
<modifiedKey>stock_data:Data Owner</modifiedKey>
</field>
<field>
<originalKey>Database Source</originalKey>
<modifiedKey>stock_identity:Database Source</modifiedKey>
</field>
<field>
<originalKey>Short Name</originalKey>
<modifiedKey>stock_identity:Short Name</modifiedKey>
</field>
<field>
<originalKey>Catch</originalKey>
<modifiedKey>stock_data:Catch</modifiedKey>
</field>
<field>
<originalKey>Landing</originalKey>
<modifiedKey>stock_data:Landing</modifiedKey>
</field>
<field>
<originalKey>Species</originalKey>
<modifiedKey>stock_identity:Species</modifiedKey>
</field>
<field>
<originalKey>Similar GRSF Record</originalKey>
<modifiedKey>stock_identity:Similar GRSF Record</modifiedKey>
</field>
<field>
<originalKey>Management Body/Authority</originalKey>
<modifiedKey>stock_identity:Management Body/Authority</modifiedKey>
</field>
<field>
<originalKey>Connected Record</originalKey>
<modifiedKey>stock_identity:Connected Record</modifiedKey>
</field>
<field>
<originalKey>GRSF Stock Name</originalKey>
<modifiedKey>stock_identity:GRSF Stock Name</modifiedKey>
</field>
<field>
<originalKey>GRSF Semantic Identifier</originalKey>
<modifiedKey>stock_identity:GRSF Semantic Identifier</modifiedKey>
</field>
<field>
<originalKey>Assessment Area</originalKey>
<modifiedKey>stock_identity:Assessment Area</modifiedKey>
</field>
<field>
<originalKey>Exploiting Fishery</originalKey>
<modifiedKey>stock_identity:Exploiting Fishery</modifiedKey>
</field>
<field>
<originalKey>Assessment Method</originalKey>
<modifiedKey>stock_data:Assessment Method</modifiedKey>
</field>
<field>
<originalKey>Abundance Level (FIRMS Standard)</originalKey>
<modifiedKey>stock_data:Abundance Level (FIRMS Standard)
</modifiedKey>
</field>
<field>
<originalKey>Abundance Level</originalKey>
<modifiedKey>stock_data:Abundance Level</modifiedKey>
</field>
<field>
<originalKey>Biomass</originalKey>
<modifiedKey>stock_data:Biomass</modifiedKey>
</field>
<field>
<originalKey>Fishing Pressure (FIRMS Standard)</originalKey>
<modifiedKey>stock_data:Fishing Pressure (FIRMS Standard)</modifiedKey>
</field>
<field>
<originalKey>Fishing Pressure</originalKey>
<modifiedKey>stock_data:Fishing Pressure</modifiedKey>
</field>
<field>
<originalKey>State and Trend</originalKey>
<modifiedKey>stock_data:State and Trend</modifiedKey>
</field>
<field>
<originalKey>FAO Stock Status Category</originalKey>
<modifiedKey>stock_data:FAO Stock Status Category</modifiedKey>
</field>
<field>
<originalKey>Scientific Advice</originalKey>
<modifiedKey>stock_data:Scientific Advice</modifiedKey>
</field>
<field>
<originalKey>Assessor</originalKey>
<modifiedKey>stock_data:Assessor</modifiedKey>
</field>
<field>
<originalKey>GRSF Type</originalKey>
<modifiedKey>stock_identity:GRSF Type</modifiedKey>
</field>
</fields>
</Body>
</Profile>
</Resource>

View File

@ -1,40 +0,0 @@
<Resource version="0.4.x">
<ID>7ee9c6e9-ff73-4428-88e4-185aeb4b3742</ID>
<Type>GenericResource</Type>
<Scopes>
<Scope>/d4science.research-infrastructures.eu/FARM/GRSF_Pre</Scope>
<Scope>/d4science.research-infrastructures.eu/FARM/GRSF_Admin</Scope>
</Scopes>
<Profile>
<SecondaryType>ApplicationProfile</SecondaryType>
<Name>GRSFManageEntries</Name>
<Description>A list of extras metadata to be looked up and prompted by the GRSF Manage widget. The body reports the key values.</Description>
<Body>
fishery_identity:GRSF Type,
fishery_identity:Short Name,
fishery_identity:Database Source,
fishery_identity:GRSF Semantic Identifier,
fishery_data:Catch,
fishery_data:Landing,
stock_identity:GRSF Type,
stock_identity:Short Name,
stock_identity:Database Source,
stock_identity:GRSF Semantic Identifier,
stock_data:Catch,
stock_data:Landing
</Body>
</Profile>
</Resource>

View File

@ -1,7 +1,7 @@
<application mode='online'> <application mode='online'>
<name>GRSFPublisher</name> <name>GRSFPublisher</name>
<group>Data-Catalogue</group> <group>Data-Catalogue</group>
<version>1.13.0</version> <version>1.5.0</version>
<description>Data Catalogue Service</description> <description>Data Catalogue Service</description>
<local-persistence location='target' /> <local-persistence location='target' />
<exclude>/rest/</exclude> <exclude>/rest/</exclude>

View File

@ -33,12 +33,6 @@
<!-- <param-value>/gcube/devsec/devVRE</param-value> --> <!-- <param-value>/gcube/devsec/devVRE</param-value> -->
<param-value>/d4science.research-infrastructures.eu/FARM/GRSF</param-value> <param-value>/d4science.research-infrastructures.eu/FARM/GRSF</param-value>
</context-param> </context-param>
<context-param>
<description>Context of pending products under manage activities for prevalidation VRE</description>
<param-name>PreValidateVRE</param-name>
<!-- <param-value>/gcube/devNext/NextNext</param-value> -->
<param-value>/d4science.research-infrastructures.eu/FARM/GRSF_Pre</param-value>
</context-param>
<welcome-file-list> <welcome-file-list>
<welcome-file>index.jsp</welcome-file> <welcome-file>index.jsp</welcome-file>
</welcome-file-list> </welcome-file-list>

View File

@ -8,15 +8,15 @@ import javax.ws.rs.core.Application;
import javax.ws.rs.core.MediaType; import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response; import javax.ws.rs.core.Response;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.others.Resource; import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.FisheryRecord;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.record.FisheryRecord; import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.Resource;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.record.StockRecord; import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.StockRecord;
import org.gcube.data_catalogue.grsf_publish_ws.services.GrsfPublisherFisheryService; import org.gcube.data_catalogue.grsf_publish_ws.services.GrsfPublisherFisheryService;
import org.gcube.data_catalogue.grsf_publish_ws.services.GrsfPublisherStockService; import org.gcube.data_catalogue.grsf_publish_ws.services.GrsfPublisherStockService;
import org.gcube.datacatalogue.common.enums.Fishery_Type; import org.gcube.datacatalogue.common.AAA_PORTED.Fishery_Type;
import org.gcube.datacatalogue.common.enums.Sources; import org.gcube.datacatalogue.common.AAA_PORTED.Sources;
import org.gcube.datacatalogue.common.enums.Status; import org.gcube.datacatalogue.common.AAA_PORTED.Status;
import org.gcube.datacatalogue.common.enums.Stock_Type; import org.gcube.datacatalogue.common.AAA_PORTED.Stock_Type;
import org.glassfish.jersey.server.ResourceConfig; import org.glassfish.jersey.server.ResourceConfig;
import org.glassfish.jersey.test.JerseyTest; import org.glassfish.jersey.test.JerseyTest;
import org.glassfish.jersey.test.TestProperties; import org.glassfish.jersey.test.TestProperties;

View File

@ -13,22 +13,33 @@ import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import org.gcube.data_catalogue.grsf_publish_ws.custom_annotations.CustomField; import org.gcube.common.authorization.library.provider.SecurityTokenProvider;
import org.gcube.data_catalogue.grsf_publish_ws.custom_annotations.Group; import org.gcube.common.homelibrary.home.HomeLibrary;
import org.gcube.data_catalogue.grsf_publish_ws.custom_annotations.Tag; import org.gcube.common.homelibrary.home.exceptions.InternalErrorException;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.others.Resource; import org.gcube.common.homelibrary.home.workspace.Workspace;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.others.TimeSeriesBean; import org.gcube.common.homelibrary.home.workspace.WorkspaceFolder;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.record.FisheryRecord; import org.gcube.common.homelibrary.home.workspace.WorkspaceItem;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.record.StockRecord; import org.gcube.common.homelibrary.home.workspace.WorkspaceSharedFolder;
import org.gcube.common.homelibrary.home.workspace.catalogue.WorkspaceCatalogue;
import org.gcube.common.homelibrary.home.workspace.exceptions.InsufficientPrivilegesException;
import org.gcube.common.homelibrary.home.workspace.exceptions.ItemAlreadyExistException;
import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.CustomField;
import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.FisheryRecord;
import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.Group;
import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.Resource;
import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.StockRecord;
import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.Tag;
import org.gcube.data_catalogue.grsf_publish_ws.AAA_PORTED.TimeSeriesBean;
import org.gcube.data_catalogue.grsf_publish_ws.utils.HelperMethods; import org.gcube.data_catalogue.grsf_publish_ws.utils.HelperMethods;
import org.gcube.data_catalogue.grsf_publish_ws.utils.csv.CSVUtils; import org.gcube.data_catalogue.grsf_publish_ws.utils.csv.CSVUtils;
import org.gcube.data_catalogue.grsf_publish_ws.utils.threads.AssociationToGroupThread; import org.gcube.data_catalogue.grsf_publish_ws.utils.threads.AssociationToGroupThread;
import org.gcube.datacatalogue.ckanutillibrary.server.DataCatalogue; import org.gcube.datacatalogue.ckanutillibrary.server.DataCatalogue;
import org.gcube.datacatalogue.ckanutillibrary.server.DataCatalogueFactory; import org.gcube.datacatalogue.ckanutillibrary.server.DataCatalogueFactory;
import org.gcube.datacatalogue.common.enums.Abundance_Level; import org.gcube.datacatalogue.common.AAA_PORTED.Abundance_Level;
import org.gcube.datacatalogue.common.enums.Fishery_Type; import org.gcube.datacatalogue.common.AAA_PORTED.Fishery_Type;
import org.gcube.datacatalogue.common.enums.Sources; import org.gcube.datacatalogue.common.AAA_PORTED.Sources;
import org.gcube.datacatalogue.common.enums.Status; import org.gcube.datacatalogue.common.AAA_PORTED.Status;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectMapper;
@ -227,8 +238,129 @@ public class JTests {
instance.uploadResourceFile(csvFile, datasetName, instance.getApiKeyFromUsername("costantino.perciante"), "random_name.csv", null, null, null); instance.uploadResourceFile(csvFile, datasetName, instance.getApiKeyFromUsername("costantino.perciante"), "random_name.csv", null, null, null);
} }
// @Test
public void sharedVREFolderWriteTest() throws Exception{
String token = "";
String context = "/gcube/devNext/NextNext";
ScopeProvider.instance.set(context);
SecurityTokenProvider.instance.set(token);
Workspace ws = HomeLibrary.getHomeManagerFactory().getHomeManager().getHome().getWorkspace();
// Get a VRE folder by scope
WorkspaceSharedFolder vreFolder = ws.getVREFolderByScope(context);
//Get the VRE Folder catalogue
WorkspaceCatalogue catalogueFolder = vreFolder.getVRECatalogue();
logger.debug("Catalogue folder retrieved " + catalogueFolder.getName());
// WorkspaceItem stockFolder = catalogueFolder.find("stock");
// vreFolder.removeChild(stockFolder);
/**
* Test is
* .catalogue:
* -test
* - a
* -aproductwiththisname
* - csv
* - testfile.csv
*/
String allSubPath = "/test/a/aproductwiththisname/";
//WorkspaceFolder lastFolder = createGetSubFoldersByPath(catalogueFolder, allSubPath);
// WorkspaceFolder recordFolder = (WorkspaceFolder)getFolderOrCreate(catalogueFolder, "test", "");
// String firstLetter = "a";
// WorkspaceFolder firstLetterFolder = (WorkspaceFolder)getFolderOrCreate(recordFolder, firstLetter, "");
// String folderPath = "aproductwiththisname";
// WorkspaceFolder productFolder = (WorkspaceFolder)getFolderOrCreate(firstLetterFolder, folderPath, "");
//logger.debug("Test folder created/get..its path is " + lastFolder.getPath());
// String ccsvUnderProductFolderName = productFolderName + "/" + "csv";
// WorkspaceFolder csvUnderProductFolder = (WorkspaceFolder)getFolderOrCreate(catalogueFolder, ccsvUnderProductFolderName, "");
//
// logger.debug("FOLDERS created " + csvUnderProductFolder.getPath());
// treeCheck(catalogueFolder);
}
public void treeCheck(WorkspaceFolder rootFolder) throws InternalErrorException{
List<WorkspaceItem> children = rootFolder.getChildren();
for (WorkspaceItem workspaceItem : children) {
if(workspaceItem.isFolder()){
logger.debug("children folder is " + workspaceItem.getName());
treeCheck((WorkspaceFolder)workspaceItem);
}
}
}
/**
* Create subfolders in cascade, returning the last created ones
* It could be also used for getting them if they already exists
* @param folder
* @param subPath
* @return
*/
private static WorkspaceFolder createGetSubFoldersByPath(WorkspaceFolder folder, String subPath){
String pathSeparator = "/";
WorkspaceFolder parentFolder = folder;
if(folder == null)
throw new IllegalArgumentException("Root folder is null!");
if(subPath == null || subPath.isEmpty())
throw new IllegalArgumentException("subPath is null/empty!");
try{
if(subPath.startsWith(pathSeparator))
subPath = subPath.replaceFirst(pathSeparator, "");
if(subPath.endsWith(subPath))
subPath = subPath.substring(0, subPath.length() - 1);
logger.debug("Splitting path " + subPath);
String[] splittedPaths = subPath.split(pathSeparator);
for (String path : splittedPaths) {
WorkspaceFolder createdFolder = getFolderOrCreate(parentFolder, path, "");
logger.debug("Created subfolder with path " + createdFolder.getPath());
parentFolder = createdFolder;
}
}catch(Exception e){
logger.error("Failed to create the subfolders by path " + subPath);
}
return parentFolder;
}
/**
* Get a folder within the catalogue folder or create it if it doesn't exist.
* @return
*/
private static WorkspaceFolder getFolderOrCreate(WorkspaceFolder folder, String relativePath, String descriptionFolder){
WorkspaceFolder result = null;
try {
if(folder.exists(relativePath) && folder.find(relativePath).isFolder())
result = (WorkspaceFolder) folder.find(relativePath);
if(result != null)
logger.debug("Folder found with name " + result.getName() + ", it has id " + result.getId());
else
throw new Exception("There is no folder with name " + relativePath + " under foler " + folder.getName());
} catch (Exception e) {
logger.debug("Probably the folder doesn't exist", e);
try{
result = folder.createFolder(relativePath, descriptionFolder);
} catch (InsufficientPrivilegesException | InternalErrorException | ItemAlreadyExistException e2) {
logger.error("Failed to get or generate this folder", e2);
}
}
return result;
}
//@Test //@Test
public void GRSFServiceUrl() throws Exception{ public void GRSFServiceUrl() throws Exception{
@ -246,39 +378,39 @@ public class JTests {
} }
//@Test // //@Test
public void testHierarchy() throws Exception{ // public void testHierarchy() throws Exception{
String name = "low-abundance"; // String name = "low-abundance";
DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance("/gcube/devNext/NextNext"); // DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance("/gcube/devNext/NextNext");
List<String> uniqueGroups = new ArrayList<String>(); // List<String> uniqueGroups = new ArrayList<String>();
uniqueGroups.add(name); // uniqueGroups.add(name);
uniqueGroups.add(name); // uniqueGroups.add(name);
AssociationToGroupThread.findHierarchy(uniqueGroups, catalogue, catalogue.getApiKeyFromUsername("costantino_perciante")); // AssociationToGroupThread.findHierarchy(uniqueGroups, catalogue, catalogue.getApiKeyFromUsername("costantino_perciante"));
logger.debug("Hierarchy is " + uniqueGroups); // logger.debug("Hierarchy is " + uniqueGroups);
} // }
//
//@Test // //@Test
public void testAssociationThread() throws Exception{ // public void testAssociationThread() throws Exception{
String name = "low-abundance"; // String name = "low-abundance";
DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance("/gcube/devNext/NextNext"); // DataCatalogue catalogue = HelperMethods.getDataCatalogueRunningInstance("/gcube/devNext/NextNext");
AssociationToGroupThread threadGroups = new AssociationToGroupThread(Arrays.asList(name), "another-test-test-please-ignore", "grsf", "costantino_perciante", catalogue, "apiKey"); // AssociationToGroupThread threadGroups = new AssociationToGroupThread(Arrays.asList(name), "another-test-test-please-ignore", "grsf", "costantino_perciante", catalogue, "apiKey");
threadGroups.start(); // threadGroups.start();
threadGroups.join(); // threadGroups.join();
logger.info("Thread stopped!"); // logger.info("Thread stopped!");
//
//
} // }
//
//@Test // //@Test
public void testCaches() throws Exception{ // public void testCaches(){
//
String context = "/gcube/devNext/NextNext"; // String context = "/gcube/devNext/NextNext";
String token = ""; // String token = "";
for (int i = 0; i < 1000; i++) { // for (int i = 0; i < 1000; i++) {
logger.debug(HelperMethods.getUserEmail(context, token)); // logger.debug(HelperMethods.getUserEmail(context, token));
} // }
//
} // }
//@Test //@Test
public void testMatch(){ public void testMatch(){

View File

@ -1,38 +0,0 @@
package org.gcube.data_catalogue.grsf_publish_ws;
import java.io.File;
import java.net.URL;
import org.gcube.data_catalogue.grsf_publish_ws.json.input.record.StockRecord;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
/**
* @author Luca Frosini (ISTI - CNR)
*/
public class TestJson {
private static final Logger logger = LoggerFactory.getLogger(Test.class);
public File getResourcesDirectory() throws Exception {
URL logbackFileURL = TestJson.class.getClassLoader().getResource("logback-test.xml");
File logbackFile = new File(logbackFileURL.toURI());
File resourcesDirectory = logbackFile.getParentFile();
return resourcesDirectory;
}
@Test
public void testJsonDeserialization() throws Exception {
File jsonQueryFile = new File(getResourcesDirectory(), "70ae6895-7d3d-4f4a-86f9-bcb17d41bff6.json");
ObjectMapper objectMapper = new ObjectMapper();
JsonNode jsonNode = objectMapper.readTree(jsonQueryFile);
logger.debug("{}", jsonNode);
StockRecord sr = objectMapper.readValue(jsonQueryFile, StockRecord.class);
logger.debug("{}", sr);
}
}

View File

@ -1,34 +0,0 @@
{
"stock_name" : "European hake - Southern Adriatic",
"license_id" : "CC-BY-SA-4.0",
"version" : 1.0,
"database_sources" : [ {
"name" : "FAO SDG 14.4.1 questionnaire",
"description" : "FAO SDG 14.4.1 questionnaire",
"url" : "https://www.fao.org/sustainable-development-goals/indicators/14.4.1/en/"
} ],
"stock_uri" : "https://github.com/grsf/resource/sdg_14_4_1/stock/70ae6895-7d3d-4f4a-86f9-bcb17d41bff6",
"grsf_uuid" : "70ae6895-7d3d-4f4a-86f9-bcb17d41bff6",
"short_name" : "European hake - Southern Adriatic",
"description" : "European hake - Southern Adriatic",
"grsf_type" : "assessment unit",
"species" : [ "Code: HKE, Classification System: ASFIS, Scientific Name: Merluccius merluccius" ],
"assessment_area" : [ "Code: 18, System: gfcm, Name: Southern Adriatic " ],
"source_of_information" : [ {
"name" : "https://www.fao.org/sustainable-development-goals/indicators/14.4.1/en/sdg-alb-1",
"description" : "",
"url" : "https://www.fao.org/sustainable-development-goals/indicators/14.4.1/en/sdg-alb-1"
} ],
"data_owner" : [ "Albania" ],
"assessment_methods" : [ "The official stock assessment concludes 'Overfished' with respect to abundance reference points. [Rep. Year or Assessment ID: 2019, Ref. Year: 2018]" ],
"connections_indicator" : "not connected",
"similarities_indicator" : "without similar records",
"landings" : [ {
"value" : "872",
"unit" : "Tonnes",
"reference_year" : 2018,
"reporting_year_or_assessment_id" : "2020",
"data_owner" : "Albania"
} ],
"citation" : "citation TBD"
}

View File

@ -1,19 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE xml>
<configuration>
<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<pattern>%d{HH:mm:ss.SSS} [%thread] %-5level %logger{0}: %msg%n</pattern>
</encoder>
</appender>
<logger name="org.gcube" level="INFO" />
<logger name="org.gcube.data_catalogue.grsf_publish_ws" level="TRACE" />
<root level="WARN">
<appender-ref ref="STDOUT" />
</root>
</configuration>