Compare commits
349 Commits
Author | SHA1 | Date |
---|---|---|
Michele Artini | 140e14e48d | |
Michele Artini | 180aedd918 | |
Michele Artini | 8e82868559 | |
Michele Artini | 1415976a0f | |
Michele Artini | 40db13d562 | |
Michele Artini | 550baf79e4 | |
Michele Artini | 3cbce1024c | |
Michele Artini | 760791578b | |
Michele Artini | 19bc482244 | |
Michele Artini | db18cb601c | |
Michele Artini | 4ea0d8e0f3 | |
Michele Artini | d750fc7180 | |
Michele Artini | 90effab182 | |
Michele Artini | e023f3774e | |
Michele Artini | 1975d26052 | |
Michele Artini | 6620f8463b | |
Michele Artini | 675b233470 | |
Michele Artini | d392f10e4b | |
Michele Artini | ed73d524d8 | |
Michele Artini | d2da1a1270 | |
Michele Artini | cbe02e457c | |
Michele Artini | 5a8e8f2ade | |
Michele Artini | 7c672594c1 | |
Michele Artini | 07cd4dda85 | |
Michele Artini | 4c3a1af2d2 | |
Michele Artini | 2887e1c9b1 | |
Michele Artini | b79677475b | |
Michele Artini | a623c7be39 | |
Michele Artini | a9458520e1 | |
Michele Artini | 90bb2effc9 | |
Michele Artini | 55ca95a109 | |
Michele Artini | 817864942d | |
Michele Artini | 383fc49908 | |
Michele Artini | e5a9ee0b21 | |
Michele Artini | fc750633b9 | |
Michele Artini | 3eed737bad | |
Michele Artini | e5901a0181 | |
Michele Artini | b030f1a747 | |
Michele Artini | e5ed620707 | |
Michele Artini | 8faf3ae71e | |
Michele Artini | d7c220e88d | |
Michele Artini | a88d0e59f6 | |
Michele Artini | a0e7425379 | |
Michele Artini | e4cbfe863b | |
Michele Artini | 3638a0ea84 | |
Michele Artini | 31e0ed6e1c | |
Michele Artini | e64da1e38d | |
Michele Artini | 94425e276d | |
Michele Artini | b86a249da2 | |
Michele Artini | 23995486be | |
Alessia Bardi | 9404779389 | |
Alessia Bardi | 185f1722ff | |
Michele Artini | 500218ceda | |
Michele Artini | 4c6134bc83 | |
Alessia Bardi | e3e08268f5 | |
Michele Artini | 7d4e8976e9 | |
Michele Artini | 2de5d6e7fb | |
Michele Artini | 05795a3604 | |
Michele Artini | 32f09af2a1 | |
Michele Artini | eeef1df437 | |
Michele Artini | b5ada60da3 | |
Michele Artini | b3e3d676ba | |
Michele Artini | a81f007dcb | |
Michele Artini | c5b23207b2 | |
Michele Artini | 89af989d1a | |
Michele Artini | 80924b651c | |
Claudio Atzori | eece9b04f8 | |
Claudio Atzori | 00fa25cefb | |
Michele Artini | 6179426ca6 | |
Michele Artini | 807d2e493b | |
Michele Artini | d5fd29c3d0 | |
Michele Artini | e87267e9e5 | |
Michele Artini | a98c799c8b | |
Michele Artini | 15f0d8cea7 | |
Michele Artini | 970eff3926 | |
Michele Artini | 1a4dcddbe3 | |
Michele Artini | cda62bd5e4 | |
Michele Artini | e618e2a733 | |
Michele Artini | d8834eb84f | |
Michele Artini | 096b229c5a | |
Michele Artini | 14e54a5140 | |
Michele Artini | 51cbe18abf | |
Michele Artini | a8f3bb6961 | |
Michele Artini | 043cb4497a | |
Michele Artini | 15118e2f18 | |
Michele Artini | dde8e1c564 | |
Michele Artini | 891b49a9d6 | |
Michele Artini | aebac32457 | |
Michele Artini | c40eeee1d1 | |
Michele Artini | 7d91da1099 | |
Michele Artini | 653e925cff | |
Michele Artini | 09606452d4 | |
Michele Artini | 31bc085093 | |
Michele Artini | 3b087ef7ed | |
Michele Artini | 0596a9666c | |
Michele Artini | 3a340bd737 | |
Michele Artini | 6f873b1e45 | |
Michele Artini | 60b7d82d47 | |
Michele Artini | 29af123e06 | |
Michele Artini | 95a522d8d0 | |
Michele Artini | 3ae2207d38 | |
Michele Artini | 7a7d2f08ac | |
Michele Artini | 18e553f3e7 | |
Michele Artini | 5b4597a4ae | |
Michele Artini | d9288c7657 | |
Michele Artini | 375d6c2513 | |
Michele Artini | 8ac49d1dab | |
Michele Artini | 340a577430 | |
Michele Artini | 835de9c1c6 | |
Michele Artini | b509e13560 | |
Michele Artini | de36ad03e6 | |
Michele Artini | 566e94d67b | |
Michele Artini | 81404e3ee9 | |
Michele Artini | 1d199f008a | |
Michele Artini | 174a0ea9f2 | |
Michele Artini | 71ba41a94c | |
Michele Artini | f071321b3d | |
Michele Artini | 5a874b2379 | |
Michele Artini | 0e6de52307 | |
Michele Artini | f3ac95b54f | |
Michele Artini | 6e8f461bec | |
Michele Artini | 98249be293 | |
Michele Artini | 840e83aada | |
Michele Artini | ce55185bcf | |
Michele Artini | 01cea6fe12 | |
Michele Artini | fa5b0ef7c5 | |
Michele Artini | 3edd995e9d | |
Michele Artini | 9e2638c9e4 | |
Michele Artini | c622c4cd53 | |
Michele Artini | 71d3ed7cee | |
Michele Artini | 9ef99f4785 | |
Michele Artini | e604d406bd | |
Michele Artini | 552d8ae566 | |
Michele Artini | f4c053c0e0 | |
Michele Artini | cb7f5eb52c | |
Michele Artini | cdc8084cb6 | |
Michele Artini | 827dec7a29 | |
Michele Artini | f8b3643408 | |
Miriam Baglioni | cfd40e15c1 | |
Miriam Baglioni | 2279f3df31 | |
Michele Artini | b64c5dadd7 | |
Michele Artini | 41aaa9a72b | |
Miriam Baglioni | 17c1ad7de1 | |
Miriam Baglioni | f025c47214 | |
Miriam Baglioni | 092927a933 | |
Michele Artini | 09437abee6 | |
Michele Artini | 740e6e68e4 | |
Miriam Baglioni | 74af40afd0 | |
Miriam Baglioni | 96d253cfb0 | |
Michele Artini | 48a6953af3 | |
Michele Artini | da011a9228 | |
Michele Artini | dfca500b31 | |
Michele Artini | 34d0780545 | |
Michele Artini | f74626b715 | |
Michele Artini | 785746a501 | |
Michele Artini | ca382e6b6c | |
Michele Artini | a04e719a57 | |
Michele Artini | 57d5496855 | |
Michele Artini | ecb5ffca5a | |
Michele Artini | fabcbaf511 | |
Miriam Baglioni | f65a204f43 | |
Michele Artini | 7dd9baf7d0 | |
Michele Artini | 3dc121045c | |
Michele Artini | ff313b5bd1 | |
Claudio Atzori | bea45412c3 | |
Sandro La Bruzzo | bd18376339 | |
Sandro La Bruzzo | f47fbe9de9 | |
Sandro La Bruzzo | c376ef9b6c | |
Miriam Baglioni | 36f858cd5b | |
Miriam Baglioni | 50b2468a13 | |
Michele Artini | 39d335439f | |
Michele Artini | 91d341c7be | |
Michele Artini | f4a803eab1 | |
Michele Artini | 362ab547cb | |
Michele Artini | 13e9821940 | |
Michele Artini | 92bd92f1b9 | |
Michele Artini | 7542dcc511 | |
Michele Artini | 047ee7b89a | |
Michele Artini | 5f4a8cd291 | |
Michele Artini | 963451dc67 | |
Michele Artini | 1ed57060e9 | |
Michele Artini | 4573b41b30 | |
Michele Artini | 96ec14722c | |
Michele Artini | 7716f199ad | |
Michele Artini | c4c7179009 | |
Michele Artini | bf11721159 | |
Michele Artini | 7a2ce6b2ad | |
Miriam Baglioni | 2a937cc4ae | |
Michele Artini | c990fc945b | |
Miriam Baglioni | 3549fdebf9 | |
Michele Artini | 5523ab8657 | |
Michele Artini | 98b59cd0d5 | |
Miriam Baglioni | e751759de9 | |
Michele Artini | 1ccb1c266f | |
Michele Artini | 75d002c8ba | |
Michele Artini | 1271f9cf43 | |
Michele Artini | 02b74616b2 | |
Michele Artini | a4a4586127 | |
Michele Artini | 05b408f4af | |
Michele Artini | 20f7a1da77 | |
Michele Artini | e0143ab27e | |
Michele Artini | ee8e84b316 | |
Michele Artini | 098537e067 | |
Michele Artini | 4c7f4ca4c7 | |
Michele Artini | 373275063a | |
Michele Artini | d5a754ffee | |
Michele Artini | 8e7901afdd | |
Michele Artini | be85f682a0 | |
Michele Artini | c572334b91 | |
Michele Artini | 7355ca175d | |
Michele Artini | 0a5c4a2826 | |
Michele Artini | 6c3b7d6e7c | |
Michele Artini | 7a83fb8c67 | |
Michele Artini | faaf1a6755 | |
Michele Artini | 236ac912c6 | |
Michele Artini | 1c29c9a8d4 | |
Michele Artini | 4b09f75930 | |
Michele Artini | d46f52aa5f | |
Michele Artini | b8ce011c20 | |
Michele Artini | 828488047a | |
Michele Artini | 3b1d3d8d19 | |
Michele Artini | fd0eeb170b | |
Michele Artini | 9890f71491 | |
Enrico Ottonello | 15558ff29a | |
Sandro La Bruzzo | 44e7aaa694 | |
Michele Artini | ebedff0955 | |
Michele Artini | 5aa2f013d4 | |
Michele Artini | 2db58b9e04 | |
Michele Artini | fb08b4a4c2 | |
Michele Artini | c3d2187257 | |
Michele Artini | 9b9d37624c | |
Michele Artini | e037cd0b41 | |
Michele Artini | 834c21ffc8 | |
Michele Artini | fe3a9f7b4e | |
Michele Artini | ec666bf88e | |
Claudio Atzori | db400dbfe0 | |
Michele Artini | 98d1e17761 | |
Michele Artini | c948ed63f3 | |
Michele Artini | b748d847bb | |
Enrico Ottonello | 3f61e6fce2 | |
Enrico Ottonello | a70a327281 | |
Sandro La Bruzzo | 7511ef94c2 | |
Alessia Bardi | 98df910e1c | |
Michele Artini | 55736c5c60 | |
Michele Artini | 181e5cfbc5 | |
Michele Artini | dbec69c77a | |
Michele Artini | 3440383904 | |
Enrico Ottonello | 9edc663c92 | |
Enrico Ottonello | 5e8ecab58d | |
Enrico Ottonello | 852ff05881 | |
Enrico Ottonello | 8a18fe11ec | |
Enrico Ottonello | 079b2506e6 | |
Enrico Ottonello | 7375534764 | |
Michele Artini | 4d066e3d77 | |
Michele Artini | 19010a9624 | |
Michele Artini | 75a27e1fbd | |
Michele Artini | 51be640db9 | |
Michele Artini | 5d9be72fe6 | |
Michele Artini | b1f115dfc1 | |
Michele Artini | f113bcbfdd | |
Michele Artini | c86fc72253 | |
Michele Artini | 62ff066a5d | |
Sandro La Bruzzo | 762af8d0b1 | |
Michele Artini | 3e405075c9 | |
Michele Artini | a15ececddd | |
Michele Artini | 892963fb68 | |
Michele Artini | b47f14aa9b | |
Michele Artini | b5b130e0ab | |
Michele Artini | 6b109d3f15 | |
Michele Artini | aa8c467253 | |
Michele Artini | 004cb02cec | |
Michele Artini | 035d1da34f | |
Michele Artini | f143d1f0c9 | |
Michele Artini | 2f36d8ff47 | |
Michele Artini | 44e170946f | |
Michele Artini | dcc5174f4d | |
Michele Artini | aa294bfc2d | |
Michele Artini | ee8ec5ecc9 | |
Michele Artini | 7078467e97 | |
Michele Artini | a6709a3878 | |
Michele Artini | 9308ab35f0 | |
Sandro La Bruzzo | d7824d5071 | |
Sandro La Bruzzo | c0277c4938 | |
Michele Artini | 3db226bea6 | |
Michele Artini | a6909dc253 | |
Michele Artini | 00ed2922da | |
Michele Artini | 485c865896 | |
Michele Artini | 6e653e1258 | |
Michele Artini | 1311afa993 | |
Michele Artini | 3b898f6a3e | |
Sandro La Bruzzo | 7abf6af997 | |
Sandro La Bruzzo | 786449a43c | |
Michele Artini | 90792c490c | |
Michele Artini | 31afa26b2c | |
Michele Artini | f30eeac80a | |
Michele Artini | 5ea73251e3 | |
Michele Artini | 13c4ef23b9 | |
Michele Artini | e7625b44e0 | |
Michele Artini | 41d4846027 | |
Sandro La Bruzzo | d5a089869c | |
Sandro La Bruzzo | fc74c86ba9 | |
Sandro La Bruzzo | b776a3c7cd | |
Sandro La Bruzzo | 68eed5d523 | |
Michele Artini | 37b662ac20 | |
Michele Artini | 33bd9c396f | |
Michele Artini | 9f2d5c6c24 | |
Michele Artini | cf612424c9 | |
Sandro La Bruzzo | f505f379af | |
Michele Artini | 98a33dd9d8 | |
Sandro La Bruzzo | 04ea22b244 | |
Sandro La Bruzzo | 353258ad68 | |
Michele Artini | 98b8c67f52 | |
Michele Artini | ff97a8c955 | |
Michele Artini | ae7b483c90 | |
Michele Artini | 3a3f24135d | |
Michele Artini | 52a53e6484 | |
Michele Artini | 2f99954a4f | |
Michele Artini | 6f914d52b0 | |
Michele Artini | ef4b379210 | |
Michele Artini | 5833884fb3 | |
Michele Artini | 3e84eac3cb | |
Michele Artini | e127a7e73c | |
Michele Artini | e0a3e6a14f | |
Michele Artini | 6c06ad6b9a | |
Michele Artini | 01fd913c1d | |
Michele Artini | 7ba3c61768 | |
Michele Artini | 6ec96e2c82 | |
Michele Artini | caeb3c8ddb | |
Michele Artini | e774487903 | |
Michele Artini | 5eb49d2b33 | |
Michele Artini | 60629198bd | |
Michele Artini | a7f8f8f061 | |
Michele Artini | dd4c634295 | |
Michele Artini | 7c75386488 | |
Michele Artini | b10bd1f385 | |
Michele Artini | 51bfb5866b | |
Michele Artini | b736851035 | |
Michele Artini | a95f4944f7 | |
Michele Artini | c1873f27fb | |
Michele Artini | dc7dddfb37 | |
Michele Artini | 6ee176ff89 | |
Michele Artini | e25e90819b | |
Michele Artini | 654d3fc397 | |
Michele Artini | 92ae3fb533 | |
Michele Artini | 0a5c711629 | |
Michele Artini | 91fccbae7f | |
Michele Artini | 53750ff818 | |
Michele Artini | 853d4b66e5 | |
Michele Artini | 54445748c0 |
|
@ -5,6 +5,7 @@
|
|||
*.iml
|
||||
*.ipr
|
||||
*.iws
|
||||
*.java-version
|
||||
*~
|
||||
/**/*.sh
|
||||
/**/my_application.properties
|
||||
|
|
|
@ -0,0 +1,661 @@
|
|||
GNU AFFERO GENERAL PUBLIC LICENSE
|
||||
Version 3, 19 November 2007
|
||||
|
||||
Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>
|
||||
Everyone is permitted to copy and distribute verbatim copies
|
||||
of this license document, but changing it is not allowed.
|
||||
|
||||
Preamble
|
||||
|
||||
The GNU Affero General Public License is a free, copyleft license for
|
||||
software and other kinds of works, specifically designed to ensure
|
||||
cooperation with the community in the case of network server software.
|
||||
|
||||
The licenses for most software and other practical works are designed
|
||||
to take away your freedom to share and change the works. By contrast,
|
||||
our General Public Licenses are intended to guarantee your freedom to
|
||||
share and change all versions of a program--to make sure it remains free
|
||||
software for all its users.
|
||||
|
||||
When we speak of free software, we are referring to freedom, not
|
||||
price. Our General Public Licenses are designed to make sure that you
|
||||
have the freedom to distribute copies of free software (and charge for
|
||||
them if you wish), that you receive source code or can get it if you
|
||||
want it, that you can change the software or use pieces of it in new
|
||||
free programs, and that you know you can do these things.
|
||||
|
||||
Developers that use our General Public Licenses protect your rights
|
||||
with two steps: (1) assert copyright on the software, and (2) offer
|
||||
you this License which gives you legal permission to copy, distribute
|
||||
and/or modify the software.
|
||||
|
||||
A secondary benefit of defending all users' freedom is that
|
||||
improvements made in alternate versions of the program, if they
|
||||
receive widespread use, become available for other developers to
|
||||
incorporate. Many developers of free software are heartened and
|
||||
encouraged by the resulting cooperation. However, in the case of
|
||||
software used on network servers, this result may fail to come about.
|
||||
The GNU General Public License permits making a modified version and
|
||||
letting the public access it on a server without ever releasing its
|
||||
source code to the public.
|
||||
|
||||
The GNU Affero General Public License is designed specifically to
|
||||
ensure that, in such cases, the modified source code becomes available
|
||||
to the community. It requires the operator of a network server to
|
||||
provide the source code of the modified version running there to the
|
||||
users of that server. Therefore, public use of a modified version, on
|
||||
a publicly accessible server, gives the public access to the source
|
||||
code of the modified version.
|
||||
|
||||
An older license, called the Affero General Public License and
|
||||
published by Affero, was designed to accomplish similar goals. This is
|
||||
a different license, not a version of the Affero GPL, but Affero has
|
||||
released a new version of the Affero GPL which permits relicensing under
|
||||
this license.
|
||||
|
||||
The precise terms and conditions for copying, distribution and
|
||||
modification follow.
|
||||
|
||||
TERMS AND CONDITIONS
|
||||
|
||||
0. Definitions.
|
||||
|
||||
"This License" refers to version 3 of the GNU Affero General Public License.
|
||||
|
||||
"Copyright" also means copyright-like laws that apply to other kinds of
|
||||
works, such as semiconductor masks.
|
||||
|
||||
"The Program" refers to any copyrightable work licensed under this
|
||||
License. Each licensee is addressed as "you". "Licensees" and
|
||||
"recipients" may be individuals or organizations.
|
||||
|
||||
To "modify" a work means to copy from or adapt all or part of the work
|
||||
in a fashion requiring copyright permission, other than the making of an
|
||||
exact copy. The resulting work is called a "modified version" of the
|
||||
earlier work or a work "based on" the earlier work.
|
||||
|
||||
A "covered work" means either the unmodified Program or a work based
|
||||
on the Program.
|
||||
|
||||
To "propagate" a work means to do anything with it that, without
|
||||
permission, would make you directly or secondarily liable for
|
||||
infringement under applicable copyright law, except executing it on a
|
||||
computer or modifying a private copy. Propagation includes copying,
|
||||
distribution (with or without modification), making available to the
|
||||
public, and in some countries other activities as well.
|
||||
|
||||
To "convey" a work means any kind of propagation that enables other
|
||||
parties to make or receive copies. Mere interaction with a user through
|
||||
a computer network, with no transfer of a copy, is not conveying.
|
||||
|
||||
An interactive user interface displays "Appropriate Legal Notices"
|
||||
to the extent that it includes a convenient and prominently visible
|
||||
feature that (1) displays an appropriate copyright notice, and (2)
|
||||
tells the user that there is no warranty for the work (except to the
|
||||
extent that warranties are provided), that licensees may convey the
|
||||
work under this License, and how to view a copy of this License. If
|
||||
the interface presents a list of user commands or options, such as a
|
||||
menu, a prominent item in the list meets this criterion.
|
||||
|
||||
1. Source Code.
|
||||
|
||||
The "source code" for a work means the preferred form of the work
|
||||
for making modifications to it. "Object code" means any non-source
|
||||
form of a work.
|
||||
|
||||
A "Standard Interface" means an interface that either is an official
|
||||
standard defined by a recognized standards body, or, in the case of
|
||||
interfaces specified for a particular programming language, one that
|
||||
is widely used among developers working in that language.
|
||||
|
||||
The "System Libraries" of an executable work include anything, other
|
||||
than the work as a whole, that (a) is included in the normal form of
|
||||
packaging a Major Component, but which is not part of that Major
|
||||
Component, and (b) serves only to enable use of the work with that
|
||||
Major Component, or to implement a Standard Interface for which an
|
||||
implementation is available to the public in source code form. A
|
||||
"Major Component", in this context, means a major essential component
|
||||
(kernel, window system, and so on) of the specific operating system
|
||||
(if any) on which the executable work runs, or a compiler used to
|
||||
produce the work, or an object code interpreter used to run it.
|
||||
|
||||
The "Corresponding Source" for a work in object code form means all
|
||||
the source code needed to generate, install, and (for an executable
|
||||
work) run the object code and to modify the work, including scripts to
|
||||
control those activities. However, it does not include the work's
|
||||
System Libraries, or general-purpose tools or generally available free
|
||||
programs which are used unmodified in performing those activities but
|
||||
which are not part of the work. For example, Corresponding Source
|
||||
includes interface definition files associated with source files for
|
||||
the work, and the source code for shared libraries and dynamically
|
||||
linked subprograms that the work is specifically designed to require,
|
||||
such as by intimate data communication or control flow between those
|
||||
subprograms and other parts of the work.
|
||||
|
||||
The Corresponding Source need not include anything that users
|
||||
can regenerate automatically from other parts of the Corresponding
|
||||
Source.
|
||||
|
||||
The Corresponding Source for a work in source code form is that
|
||||
same work.
|
||||
|
||||
2. Basic Permissions.
|
||||
|
||||
All rights granted under this License are granted for the term of
|
||||
copyright on the Program, and are irrevocable provided the stated
|
||||
conditions are met. This License explicitly affirms your unlimited
|
||||
permission to run the unmodified Program. The output from running a
|
||||
covered work is covered by this License only if the output, given its
|
||||
content, constitutes a covered work. This License acknowledges your
|
||||
rights of fair use or other equivalent, as provided by copyright law.
|
||||
|
||||
You may make, run and propagate covered works that you do not
|
||||
convey, without conditions so long as your license otherwise remains
|
||||
in force. You may convey covered works to others for the sole purpose
|
||||
of having them make modifications exclusively for you, or provide you
|
||||
with facilities for running those works, provided that you comply with
|
||||
the terms of this License in conveying all material for which you do
|
||||
not control copyright. Those thus making or running the covered works
|
||||
for you must do so exclusively on your behalf, under your direction
|
||||
and control, on terms that prohibit them from making any copies of
|
||||
your copyrighted material outside their relationship with you.
|
||||
|
||||
Conveying under any other circumstances is permitted solely under
|
||||
the conditions stated below. Sublicensing is not allowed; section 10
|
||||
makes it unnecessary.
|
||||
|
||||
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
|
||||
|
||||
No covered work shall be deemed part of an effective technological
|
||||
measure under any applicable law fulfilling obligations under article
|
||||
11 of the WIPO copyright treaty adopted on 20 December 1996, or
|
||||
similar laws prohibiting or restricting circumvention of such
|
||||
measures.
|
||||
|
||||
When you convey a covered work, you waive any legal power to forbid
|
||||
circumvention of technological measures to the extent such circumvention
|
||||
is effected by exercising rights under this License with respect to
|
||||
the covered work, and you disclaim any intention to limit operation or
|
||||
modification of the work as a means of enforcing, against the work's
|
||||
users, your or third parties' legal rights to forbid circumvention of
|
||||
technological measures.
|
||||
|
||||
4. Conveying Verbatim Copies.
|
||||
|
||||
You may convey verbatim copies of the Program's source code as you
|
||||
receive it, in any medium, provided that you conspicuously and
|
||||
appropriately publish on each copy an appropriate copyright notice;
|
||||
keep intact all notices stating that this License and any
|
||||
non-permissive terms added in accord with section 7 apply to the code;
|
||||
keep intact all notices of the absence of any warranty; and give all
|
||||
recipients a copy of this License along with the Program.
|
||||
|
||||
You may charge any price or no price for each copy that you convey,
|
||||
and you may offer support or warranty protection for a fee.
|
||||
|
||||
5. Conveying Modified Source Versions.
|
||||
|
||||
You may convey a work based on the Program, or the modifications to
|
||||
produce it from the Program, in the form of source code under the
|
||||
terms of section 4, provided that you also meet all of these conditions:
|
||||
|
||||
a) The work must carry prominent notices stating that you modified
|
||||
it, and giving a relevant date.
|
||||
|
||||
b) The work must carry prominent notices stating that it is
|
||||
released under this License and any conditions added under section
|
||||
7. This requirement modifies the requirement in section 4 to
|
||||
"keep intact all notices".
|
||||
|
||||
c) You must license the entire work, as a whole, under this
|
||||
License to anyone who comes into possession of a copy. This
|
||||
License will therefore apply, along with any applicable section 7
|
||||
additional terms, to the whole of the work, and all its parts,
|
||||
regardless of how they are packaged. This License gives no
|
||||
permission to license the work in any other way, but it does not
|
||||
invalidate such permission if you have separately received it.
|
||||
|
||||
d) If the work has interactive user interfaces, each must display
|
||||
Appropriate Legal Notices; however, if the Program has interactive
|
||||
interfaces that do not display Appropriate Legal Notices, your
|
||||
work need not make them do so.
|
||||
|
||||
A compilation of a covered work with other separate and independent
|
||||
works, which are not by their nature extensions of the covered work,
|
||||
and which are not combined with it such as to form a larger program,
|
||||
in or on a volume of a storage or distribution medium, is called an
|
||||
"aggregate" if the compilation and its resulting copyright are not
|
||||
used to limit the access or legal rights of the compilation's users
|
||||
beyond what the individual works permit. Inclusion of a covered work
|
||||
in an aggregate does not cause this License to apply to the other
|
||||
parts of the aggregate.
|
||||
|
||||
6. Conveying Non-Source Forms.
|
||||
|
||||
You may convey a covered work in object code form under the terms
|
||||
of sections 4 and 5, provided that you also convey the
|
||||
machine-readable Corresponding Source under the terms of this License,
|
||||
in one of these ways:
|
||||
|
||||
a) Convey the object code in, or embodied in, a physical product
|
||||
(including a physical distribution medium), accompanied by the
|
||||
Corresponding Source fixed on a durable physical medium
|
||||
customarily used for software interchange.
|
||||
|
||||
b) Convey the object code in, or embodied in, a physical product
|
||||
(including a physical distribution medium), accompanied by a
|
||||
written offer, valid for at least three years and valid for as
|
||||
long as you offer spare parts or customer support for that product
|
||||
model, to give anyone who possesses the object code either (1) a
|
||||
copy of the Corresponding Source for all the software in the
|
||||
product that is covered by this License, on a durable physical
|
||||
medium customarily used for software interchange, for a price no
|
||||
more than your reasonable cost of physically performing this
|
||||
conveying of source, or (2) access to copy the
|
||||
Corresponding Source from a network server at no charge.
|
||||
|
||||
c) Convey individual copies of the object code with a copy of the
|
||||
written offer to provide the Corresponding Source. This
|
||||
alternative is allowed only occasionally and noncommercially, and
|
||||
only if you received the object code with such an offer, in accord
|
||||
with subsection 6b.
|
||||
|
||||
d) Convey the object code by offering access from a designated
|
||||
place (gratis or for a charge), and offer equivalent access to the
|
||||
Corresponding Source in the same way through the same place at no
|
||||
further charge. You need not require recipients to copy the
|
||||
Corresponding Source along with the object code. If the place to
|
||||
copy the object code is a network server, the Corresponding Source
|
||||
may be on a different server (operated by you or a third party)
|
||||
that supports equivalent copying facilities, provided you maintain
|
||||
clear directions next to the object code saying where to find the
|
||||
Corresponding Source. Regardless of what server hosts the
|
||||
Corresponding Source, you remain obligated to ensure that it is
|
||||
available for as long as needed to satisfy these requirements.
|
||||
|
||||
e) Convey the object code using peer-to-peer transmission, provided
|
||||
you inform other peers where the object code and Corresponding
|
||||
Source of the work are being offered to the general public at no
|
||||
charge under subsection 6d.
|
||||
|
||||
A separable portion of the object code, whose source code is excluded
|
||||
from the Corresponding Source as a System Library, need not be
|
||||
included in conveying the object code work.
|
||||
|
||||
A "User Product" is either (1) a "consumer product", which means any
|
||||
tangible personal property which is normally used for personal, family,
|
||||
or household purposes, or (2) anything designed or sold for incorporation
|
||||
into a dwelling. In determining whether a product is a consumer product,
|
||||
doubtful cases shall be resolved in favor of coverage. For a particular
|
||||
product received by a particular user, "normally used" refers to a
|
||||
typical or common use of that class of product, regardless of the status
|
||||
of the particular user or of the way in which the particular user
|
||||
actually uses, or expects or is expected to use, the product. A product
|
||||
is a consumer product regardless of whether the product has substantial
|
||||
commercial, industrial or non-consumer uses, unless such uses represent
|
||||
the only significant mode of use of the product.
|
||||
|
||||
"Installation Information" for a User Product means any methods,
|
||||
procedures, authorization keys, or other information required to install
|
||||
and execute modified versions of a covered work in that User Product from
|
||||
a modified version of its Corresponding Source. The information must
|
||||
suffice to ensure that the continued functioning of the modified object
|
||||
code is in no case prevented or interfered with solely because
|
||||
modification has been made.
|
||||
|
||||
If you convey an object code work under this section in, or with, or
|
||||
specifically for use in, a User Product, and the conveying occurs as
|
||||
part of a transaction in which the right of possession and use of the
|
||||
User Product is transferred to the recipient in perpetuity or for a
|
||||
fixed term (regardless of how the transaction is characterized), the
|
||||
Corresponding Source conveyed under this section must be accompanied
|
||||
by the Installation Information. But this requirement does not apply
|
||||
if neither you nor any third party retains the ability to install
|
||||
modified object code on the User Product (for example, the work has
|
||||
been installed in ROM).
|
||||
|
||||
The requirement to provide Installation Information does not include a
|
||||
requirement to continue to provide support service, warranty, or updates
|
||||
for a work that has been modified or installed by the recipient, or for
|
||||
the User Product in which it has been modified or installed. Access to a
|
||||
network may be denied when the modification itself materially and
|
||||
adversely affects the operation of the network or violates the rules and
|
||||
protocols for communication across the network.
|
||||
|
||||
Corresponding Source conveyed, and Installation Information provided,
|
||||
in accord with this section must be in a format that is publicly
|
||||
documented (and with an implementation available to the public in
|
||||
source code form), and must require no special password or key for
|
||||
unpacking, reading or copying.
|
||||
|
||||
7. Additional Terms.
|
||||
|
||||
"Additional permissions" are terms that supplement the terms of this
|
||||
License by making exceptions from one or more of its conditions.
|
||||
Additional permissions that are applicable to the entire Program shall
|
||||
be treated as though they were included in this License, to the extent
|
||||
that they are valid under applicable law. If additional permissions
|
||||
apply only to part of the Program, that part may be used separately
|
||||
under those permissions, but the entire Program remains governed by
|
||||
this License without regard to the additional permissions.
|
||||
|
||||
When you convey a copy of a covered work, you may at your option
|
||||
remove any additional permissions from that copy, or from any part of
|
||||
it. (Additional permissions may be written to require their own
|
||||
removal in certain cases when you modify the work.) You may place
|
||||
additional permissions on material, added by you to a covered work,
|
||||
for which you have or can give appropriate copyright permission.
|
||||
|
||||
Notwithstanding any other provision of this License, for material you
|
||||
add to a covered work, you may (if authorized by the copyright holders of
|
||||
that material) supplement the terms of this License with terms:
|
||||
|
||||
a) Disclaiming warranty or limiting liability differently from the
|
||||
terms of sections 15 and 16 of this License; or
|
||||
|
||||
b) Requiring preservation of specified reasonable legal notices or
|
||||
author attributions in that material or in the Appropriate Legal
|
||||
Notices displayed by works containing it; or
|
||||
|
||||
c) Prohibiting misrepresentation of the origin of that material, or
|
||||
requiring that modified versions of such material be marked in
|
||||
reasonable ways as different from the original version; or
|
||||
|
||||
d) Limiting the use for publicity purposes of names of licensors or
|
||||
authors of the material; or
|
||||
|
||||
e) Declining to grant rights under trademark law for use of some
|
||||
trade names, trademarks, or service marks; or
|
||||
|
||||
f) Requiring indemnification of licensors and authors of that
|
||||
material by anyone who conveys the material (or modified versions of
|
||||
it) with contractual assumptions of liability to the recipient, for
|
||||
any liability that these contractual assumptions directly impose on
|
||||
those licensors and authors.
|
||||
|
||||
All other non-permissive additional terms are considered "further
|
||||
restrictions" within the meaning of section 10. If the Program as you
|
||||
received it, or any part of it, contains a notice stating that it is
|
||||
governed by this License along with a term that is a further
|
||||
restriction, you may remove that term. If a license document contains
|
||||
a further restriction but permits relicensing or conveying under this
|
||||
License, you may add to a covered work material governed by the terms
|
||||
of that license document, provided that the further restriction does
|
||||
not survive such relicensing or conveying.
|
||||
|
||||
If you add terms to a covered work in accord with this section, you
|
||||
must place, in the relevant source files, a statement of the
|
||||
additional terms that apply to those files, or a notice indicating
|
||||
where to find the applicable terms.
|
||||
|
||||
Additional terms, permissive or non-permissive, may be stated in the
|
||||
form of a separately written license, or stated as exceptions;
|
||||
the above requirements apply either way.
|
||||
|
||||
8. Termination.
|
||||
|
||||
You may not propagate or modify a covered work except as expressly
|
||||
provided under this License. Any attempt otherwise to propagate or
|
||||
modify it is void, and will automatically terminate your rights under
|
||||
this License (including any patent licenses granted under the third
|
||||
paragraph of section 11).
|
||||
|
||||
However, if you cease all violation of this License, then your
|
||||
license from a particular copyright holder is reinstated (a)
|
||||
provisionally, unless and until the copyright holder explicitly and
|
||||
finally terminates your license, and (b) permanently, if the copyright
|
||||
holder fails to notify you of the violation by some reasonable means
|
||||
prior to 60 days after the cessation.
|
||||
|
||||
Moreover, your license from a particular copyright holder is
|
||||
reinstated permanently if the copyright holder notifies you of the
|
||||
violation by some reasonable means, this is the first time you have
|
||||
received notice of violation of this License (for any work) from that
|
||||
copyright holder, and you cure the violation prior to 30 days after
|
||||
your receipt of the notice.
|
||||
|
||||
Termination of your rights under this section does not terminate the
|
||||
licenses of parties who have received copies or rights from you under
|
||||
this License. If your rights have been terminated and not permanently
|
||||
reinstated, you do not qualify to receive new licenses for the same
|
||||
material under section 10.
|
||||
|
||||
9. Acceptance Not Required for Having Copies.
|
||||
|
||||
You are not required to accept this License in order to receive or
|
||||
run a copy of the Program. Ancillary propagation of a covered work
|
||||
occurring solely as a consequence of using peer-to-peer transmission
|
||||
to receive a copy likewise does not require acceptance. However,
|
||||
nothing other than this License grants you permission to propagate or
|
||||
modify any covered work. These actions infringe copyright if you do
|
||||
not accept this License. Therefore, by modifying or propagating a
|
||||
covered work, you indicate your acceptance of this License to do so.
|
||||
|
||||
10. Automatic Licensing of Downstream Recipients.
|
||||
|
||||
Each time you convey a covered work, the recipient automatically
|
||||
receives a license from the original licensors, to run, modify and
|
||||
propagate that work, subject to this License. You are not responsible
|
||||
for enforcing compliance by third parties with this License.
|
||||
|
||||
An "entity transaction" is a transaction transferring control of an
|
||||
organization, or substantially all assets of one, or subdividing an
|
||||
organization, or merging organizations. If propagation of a covered
|
||||
work results from an entity transaction, each party to that
|
||||
transaction who receives a copy of the work also receives whatever
|
||||
licenses to the work the party's predecessor in interest had or could
|
||||
give under the previous paragraph, plus a right to possession of the
|
||||
Corresponding Source of the work from the predecessor in interest, if
|
||||
the predecessor has it or can get it with reasonable efforts.
|
||||
|
||||
You may not impose any further restrictions on the exercise of the
|
||||
rights granted or affirmed under this License. For example, you may
|
||||
not impose a license fee, royalty, or other charge for exercise of
|
||||
rights granted under this License, and you may not initiate litigation
|
||||
(including a cross-claim or counterclaim in a lawsuit) alleging that
|
||||
any patent claim is infringed by making, using, selling, offering for
|
||||
sale, or importing the Program or any portion of it.
|
||||
|
||||
11. Patents.
|
||||
|
||||
A "contributor" is a copyright holder who authorizes use under this
|
||||
License of the Program or a work on which the Program is based. The
|
||||
work thus licensed is called the contributor's "contributor version".
|
||||
|
||||
A contributor's "essential patent claims" are all patent claims
|
||||
owned or controlled by the contributor, whether already acquired or
|
||||
hereafter acquired, that would be infringed by some manner, permitted
|
||||
by this License, of making, using, or selling its contributor version,
|
||||
but do not include claims that would be infringed only as a
|
||||
consequence of further modification of the contributor version. For
|
||||
purposes of this definition, "control" includes the right to grant
|
||||
patent sublicenses in a manner consistent with the requirements of
|
||||
this License.
|
||||
|
||||
Each contributor grants you a non-exclusive, worldwide, royalty-free
|
||||
patent license under the contributor's essential patent claims, to
|
||||
make, use, sell, offer for sale, import and otherwise run, modify and
|
||||
propagate the contents of its contributor version.
|
||||
|
||||
In the following three paragraphs, a "patent license" is any express
|
||||
agreement or commitment, however denominated, not to enforce a patent
|
||||
(such as an express permission to practice a patent or covenant not to
|
||||
sue for patent infringement). To "grant" such a patent license to a
|
||||
party means to make such an agreement or commitment not to enforce a
|
||||
patent against the party.
|
||||
|
||||
If you convey a covered work, knowingly relying on a patent license,
|
||||
and the Corresponding Source of the work is not available for anyone
|
||||
to copy, free of charge and under the terms of this License, through a
|
||||
publicly available network server or other readily accessible means,
|
||||
then you must either (1) cause the Corresponding Source to be so
|
||||
available, or (2) arrange to deprive yourself of the benefit of the
|
||||
patent license for this particular work, or (3) arrange, in a manner
|
||||
consistent with the requirements of this License, to extend the patent
|
||||
license to downstream recipients. "Knowingly relying" means you have
|
||||
actual knowledge that, but for the patent license, your conveying the
|
||||
covered work in a country, or your recipient's use of the covered work
|
||||
in a country, would infringe one or more identifiable patents in that
|
||||
country that you have reason to believe are valid.
|
||||
|
||||
If, pursuant to or in connection with a single transaction or
|
||||
arrangement, you convey, or propagate by procuring conveyance of, a
|
||||
covered work, and grant a patent license to some of the parties
|
||||
receiving the covered work authorizing them to use, propagate, modify
|
||||
or convey a specific copy of the covered work, then the patent license
|
||||
you grant is automatically extended to all recipients of the covered
|
||||
work and works based on it.
|
||||
|
||||
A patent license is "discriminatory" if it does not include within
|
||||
the scope of its coverage, prohibits the exercise of, or is
|
||||
conditioned on the non-exercise of one or more of the rights that are
|
||||
specifically granted under this License. You may not convey a covered
|
||||
work if you are a party to an arrangement with a third party that is
|
||||
in the business of distributing software, under which you make payment
|
||||
to the third party based on the extent of your activity of conveying
|
||||
the work, and under which the third party grants, to any of the
|
||||
parties who would receive the covered work from you, a discriminatory
|
||||
patent license (a) in connection with copies of the covered work
|
||||
conveyed by you (or copies made from those copies), or (b) primarily
|
||||
for and in connection with specific products or compilations that
|
||||
contain the covered work, unless you entered into that arrangement,
|
||||
or that patent license was granted, prior to 28 March 2007.
|
||||
|
||||
Nothing in this License shall be construed as excluding or limiting
|
||||
any implied license or other defenses to infringement that may
|
||||
otherwise be available to you under applicable patent law.
|
||||
|
||||
12. No Surrender of Others' Freedom.
|
||||
|
||||
If conditions are imposed on you (whether by court order, agreement or
|
||||
otherwise) that contradict the conditions of this License, they do not
|
||||
excuse you from the conditions of this License. If you cannot convey a
|
||||
covered work so as to satisfy simultaneously your obligations under this
|
||||
License and any other pertinent obligations, then as a consequence you may
|
||||
not convey it at all. For example, if you agree to terms that obligate you
|
||||
to collect a royalty for further conveying from those to whom you convey
|
||||
the Program, the only way you could satisfy both those terms and this
|
||||
License would be to refrain entirely from conveying the Program.
|
||||
|
||||
13. Remote Network Interaction; Use with the GNU General Public License.
|
||||
|
||||
Notwithstanding any other provision of this License, if you modify the
|
||||
Program, your modified version must prominently offer all users
|
||||
interacting with it remotely through a computer network (if your version
|
||||
supports such interaction) an opportunity to receive the Corresponding
|
||||
Source of your version by providing access to the Corresponding Source
|
||||
from a network server at no charge, through some standard or customary
|
||||
means of facilitating copying of software. This Corresponding Source
|
||||
shall include the Corresponding Source for any work covered by version 3
|
||||
of the GNU General Public License that is incorporated pursuant to the
|
||||
following paragraph.
|
||||
|
||||
Notwithstanding any other provision of this License, you have
|
||||
permission to link or combine any covered work with a work licensed
|
||||
under version 3 of the GNU General Public License into a single
|
||||
combined work, and to convey the resulting work. The terms of this
|
||||
License will continue to apply to the part which is the covered work,
|
||||
but the work with which it is combined will remain governed by version
|
||||
3 of the GNU General Public License.
|
||||
|
||||
14. Revised Versions of this License.
|
||||
|
||||
The Free Software Foundation may publish revised and/or new versions of
|
||||
the GNU Affero General Public License from time to time. Such new versions
|
||||
will be similar in spirit to the present version, but may differ in detail to
|
||||
address new problems or concerns.
|
||||
|
||||
Each version is given a distinguishing version number. If the
|
||||
Program specifies that a certain numbered version of the GNU Affero General
|
||||
Public License "or any later version" applies to it, you have the
|
||||
option of following the terms and conditions either of that numbered
|
||||
version or of any later version published by the Free Software
|
||||
Foundation. If the Program does not specify a version number of the
|
||||
GNU Affero General Public License, you may choose any version ever published
|
||||
by the Free Software Foundation.
|
||||
|
||||
If the Program specifies that a proxy can decide which future
|
||||
versions of the GNU Affero General Public License can be used, that proxy's
|
||||
public statement of acceptance of a version permanently authorizes you
|
||||
to choose that version for the Program.
|
||||
|
||||
Later license versions may give you additional or different
|
||||
permissions. However, no additional obligations are imposed on any
|
||||
author or copyright holder as a result of your choosing to follow a
|
||||
later version.
|
||||
|
||||
15. Disclaimer of Warranty.
|
||||
|
||||
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
|
||||
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
|
||||
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
|
||||
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
|
||||
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
||||
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
|
||||
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
|
||||
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
|
||||
|
||||
16. Limitation of Liability.
|
||||
|
||||
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
|
||||
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
|
||||
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
|
||||
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
|
||||
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
|
||||
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
|
||||
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
|
||||
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
|
||||
SUCH DAMAGES.
|
||||
|
||||
17. Interpretation of Sections 15 and 16.
|
||||
|
||||
If the disclaimer of warranty and limitation of liability provided
|
||||
above cannot be given local legal effect according to their terms,
|
||||
reviewing courts shall apply local law that most closely approximates
|
||||
an absolute waiver of all civil liability in connection with the
|
||||
Program, unless a warranty or assumption of liability accompanies a
|
||||
copy of the Program in return for a fee.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
How to Apply These Terms to Your New Programs
|
||||
|
||||
If you develop a new program, and you want it to be of the greatest
|
||||
possible use to the public, the best way to achieve this is to make it
|
||||
free software which everyone can redistribute and change under these terms.
|
||||
|
||||
To do so, attach the following notices to the program. It is safest
|
||||
to attach them to the start of each source file to most effectively
|
||||
state the exclusion of warranty; and each file should have at least
|
||||
the "copyright" line and a pointer to where the full notice is found.
|
||||
|
||||
<one line to give the program's name and a brief idea of what it does.>
|
||||
Copyright (C) <year> <name of author>
|
||||
|
||||
This program is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU Affero General Public License as published by
|
||||
the Free Software Foundation, either version 3 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
This program is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU Affero General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU Affero General Public License
|
||||
along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
Also add information on how to contact you by electronic and paper mail.
|
||||
|
||||
If your software can interact with users remotely through a computer
|
||||
network, you should also make sure that it provides a way for users to
|
||||
get its source. For example, if your program is a web application, its
|
||||
interface could display a "Source" link that leads users to an archive
|
||||
of the code. There are many ways you could offer source, and different
|
||||
solutions will be better for different programs; see section 13 for the
|
||||
specific requirements.
|
||||
|
||||
You should also get your employer (if you work as a programmer) or school,
|
||||
if any, to sign a "copyright disclaimer" for the program, if necessary.
|
||||
For more information on this, and how to apply and follow the GNU AGPL, see
|
||||
<http://www.gnu.org/licenses/>.
|
|
@ -0,0 +1,10 @@
|
|||
{
|
||||
"type_source": "SVN",
|
||||
"goal": "package -U source:jar",
|
||||
"url": "http://svn-public.driver.research-infrastructures.eu/driver/dnet50/modules/dnet-bioschemas-api/trunk/",
|
||||
"deploy_repository": "dnet5-snapshots",
|
||||
"version": "5",
|
||||
"mail": "sandro.labruzzo@isti.cnr.it,michele.artini@isti.cnr.it, claudio.atzori@isti.cnr.it, alessia.bardi@isti.cnr.it, enrico.ottonello@isti.cnr.it",
|
||||
"deploy_repository_url": "http://maven.research-infrastructures.eu/nexus/content/repositories/dnet5-snapshots",
|
||||
"name": "dnet-ariadneplus-graphdb-publisher"
|
||||
}
|
|
@ -0,0 +1,62 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<parent>
|
||||
<groupId>eu.dnetlib.dhp</groupId>
|
||||
<artifactId>apps</artifactId>
|
||||
<version>3.5.5-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
<packaging>jar</packaging>
|
||||
<artifactId>bioschemas-api</artifactId>
|
||||
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>hwu.elixir</groupId>
|
||||
<artifactId>bmuse-core</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.any23</groupId>
|
||||
<artifactId>apache-any23-core</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.eclipse.rdf4j</groupId>
|
||||
<artifactId>rdf4j-rio-rdfxml</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.eclipse.rdf4j</groupId>
|
||||
<artifactId>rdf4j-model</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.jsoup</groupId>
|
||||
<artifactId>jsoup</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.seleniumhq.selenium</groupId>
|
||||
<artifactId>selenium-java</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>commons-io</groupId>
|
||||
<artifactId>commons-io</artifactId>
|
||||
<version>${bioschemas-commons-io-version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>commons-validator</groupId>
|
||||
<artifactId>commons-validator</artifactId>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
<build>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-help-plugin</artifactId>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-maven-plugin</artifactId>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
|
||||
</project>
|
|
@ -0,0 +1,8 @@
|
|||
https://mobidb.org/sitemap2.xml.gz
|
||||
scrape?datasourceKey=mobidb&sitemapUrl=https%3A%2F%2Fmobidb.org%2Fsitemap2.xml.gz
|
||||
|
||||
https://proteinensemble.org/sitemap2.xml.gz
|
||||
scrape?datasourceKey=ped&sitemapUrl=https%3A%2F%2Fproteinensemble.org%2Fsitemap2.xml.gz
|
||||
|
||||
https://disprot.org/sitemap2.xml.gz
|
||||
scrape?datasourceKey=disprot&sitemapUrl=https%3A%2F%2Fdisprot.org%2Fsitemap2.xml.gz
|
|
@ -0,0 +1,14 @@
|
|||
package eu.dnetlib.bioschemas.api;
|
||||
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.context.annotation.Profile;
|
||||
|
||||
/**
|
||||
* @author enrico.ottonello
|
||||
*
|
||||
*/
|
||||
@Profile("garr")
|
||||
@Configuration
|
||||
public class AppConfigGarr {
|
||||
|
||||
}
|
|
@ -0,0 +1,52 @@
|
|||
package eu.dnetlib.bioschemas.api;
|
||||
|
||||
import io.swagger.v3.oas.models.tags.Tag;
|
||||
import org.springdoc.core.GroupedOpenApi;
|
||||
import org.springframework.boot.SpringApplication;
|
||||
import org.springframework.boot.autoconfigure.SpringBootApplication;
|
||||
import org.springframework.cache.annotation.EnableCaching;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.ComponentScan;
|
||||
import org.springframework.scheduling.annotation.EnableScheduling;
|
||||
|
||||
import eu.dnetlib.common.app.AbstractDnetApp;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
@SpringBootApplication
|
||||
@EnableCaching
|
||||
@EnableScheduling
|
||||
@ComponentScan(basePackages = "eu.dnetlib")
|
||||
public class MainApplication extends AbstractDnetApp {
|
||||
|
||||
public static final String BIOSCHEMAS_APIS = "D-Net Bioschemas Service APIs";
|
||||
|
||||
public static void main(final String[] args) {
|
||||
SpringApplication.run(MainApplication.class, args);
|
||||
}
|
||||
|
||||
@Bean
|
||||
public GroupedOpenApi publicApi() {
|
||||
return GroupedOpenApi.builder()
|
||||
.group(BIOSCHEMAS_APIS)
|
||||
.pathsToMatch("/api/**")
|
||||
.build();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String swaggerTitle() {
|
||||
return BIOSCHEMAS_APIS;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected List<Tag> swaggerTags() {
|
||||
return Arrays.asList(new Tag().name(BIOSCHEMAS_APIS).description(BIOSCHEMAS_APIS));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String swaggerDesc() {
|
||||
return BIOSCHEMAS_APIS;
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,206 @@
|
|||
package eu.dnetlib.bioschemas.api;
|
||||
|
||||
import eu.dnetlib.bioschemas.api.crawl.CrawlRecord;
|
||||
import eu.dnetlib.bioschemas.api.scraper.BMUSEScraper;
|
||||
import eu.dnetlib.bioschemas.api.scraper.ScrapeState;
|
||||
import eu.dnetlib.bioschemas.api.scraper.ScrapeThread;
|
||||
import eu.dnetlib.bioschemas.api.utils.UrlParser;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.jsoup.nodes.Element;
|
||||
import org.jsoup.select.Elements;
|
||||
|
||||
import java.io.*;
|
||||
import java.text.SimpleDateFormat;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.Properties;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
|
||||
/**
|
||||
* Runs the scrape. Collect a list of URLs (in the form of CrawlRecords) to scrape.
|
||||
*
|
||||
*/
|
||||
public class ServiceScrapeDriver {
|
||||
|
||||
private static final String propertiesFile = "application.properties";
|
||||
|
||||
private int waitTime = 1;
|
||||
private int numberOfPagesToCrawlInALoop;
|
||||
private int totalNumberOfPagesToCrawlInASession;
|
||||
private String outputFolder;
|
||||
private int pagesCounter = 0;
|
||||
private int scrapeVersion = 1;
|
||||
|
||||
private String sitemapUrl;
|
||||
private String sitemapURLKey;
|
||||
private String maxScrapedPages;
|
||||
private String outputFilename;
|
||||
|
||||
private static SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd 'at' HH:mm:ss z");
|
||||
|
||||
private static final Log logger = LogFactory.getLog(ServiceScrapeDriver.class);
|
||||
|
||||
public ServiceScrapeDriver(String sitemapUrl, String sitemapURLKey, String maxScrapedPages, String outputFilename, String outputFolder) {
|
||||
this.sitemapUrl = sitemapUrl;
|
||||
this.sitemapURLKey = sitemapURLKey;
|
||||
this.maxScrapedPages = maxScrapedPages;
|
||||
this.outputFilename = outputFilename;
|
||||
this.outputFolder = outputFolder;
|
||||
}
|
||||
|
||||
/**
|
||||
* Fires off threads
|
||||
* Originally designed as a multi-threaded process; now reduced to a single thread as
|
||||
* the selenium webdriver is too expensive to run multi-threaded. However, the threading
|
||||
* as been left in situ in case it is useful in the future.
|
||||
*
|
||||
*/
|
||||
public void runScrape() throws IOException {
|
||||
processProperties();
|
||||
String url = sitemapUrl.toLowerCase();
|
||||
Elements urls = UrlParser.getSitemapList(getSitemapUrl(), getSitemapURLKey());
|
||||
Stream<Element> urlStream = null;
|
||||
if (Objects.nonNull(maxScrapedPages)) {
|
||||
urlStream = urls.stream().limit(Long.parseLong(maxScrapedPages));
|
||||
} else {
|
||||
urlStream = urls.stream();
|
||||
}
|
||||
List<Element> sites = urlStream.collect(Collectors.toList());
|
||||
logger.info("Pages available for scraping: " + sites.size());
|
||||
|
||||
List<CrawlRecord> pagesToPull = generatePagesToPull(sites);
|
||||
if (pagesToPull.isEmpty()) {
|
||||
logger.error("Cannot retrieve URLs");
|
||||
throw new RuntimeException("No pages found from sitemap");
|
||||
}
|
||||
|
||||
ScrapeState scrapeState = new ScrapeState(pagesToPull);
|
||||
|
||||
logger.info("STARTING CRAWL: " + formatter.format(new Date(System.currentTimeMillis())));
|
||||
while (pagesCounter < totalNumberOfPagesToCrawlInASession) {
|
||||
logger.debug(pagesCounter + " scraped of " + totalNumberOfPagesToCrawlInASession);
|
||||
|
||||
ScrapeThread scrape1 = new ScrapeThread(new BMUSEScraper(), scrapeState, waitTime, scrapeVersion);
|
||||
scrape1.setName("S1");
|
||||
scrape1.start();
|
||||
long startTime = System.nanoTime();
|
||||
|
||||
try {
|
||||
scrape1.join();
|
||||
} catch (InterruptedException e) {
|
||||
logger.error("Exception waiting on thread");
|
||||
e.printStackTrace();
|
||||
return;
|
||||
}
|
||||
|
||||
if(!scrape1.isFileWritten()) {
|
||||
logger.error("Could not write output file so shutting down!");
|
||||
Date date = new Date(System.currentTimeMillis());
|
||||
logger.info("ENDING CRAWL after failure at: " + formatter.format(date));
|
||||
return;
|
||||
}
|
||||
|
||||
logger.debug("Value of isFileWritten: " + scrape1.isFileWritten());
|
||||
long endTime = System.nanoTime();
|
||||
long timeElapsed = endTime - startTime;
|
||||
logger.debug("Time in s to complete: " + timeElapsed / 1e+9);
|
||||
pagesCounter += numberOfPagesToCrawlInALoop;
|
||||
logger.debug("ENDED loop");
|
||||
}
|
||||
|
||||
logger.info("ENDING CRAWL: " + formatter.format(new Date(System.currentTimeMillis())));
|
||||
|
||||
File output = new File(outputFolder.concat("/").concat(outputFilename));
|
||||
if (output.exists()) {
|
||||
output.delete();
|
||||
output.createNewFile();
|
||||
}
|
||||
FileWriter fileWriter;
|
||||
BufferedWriter bufferedWriter;
|
||||
fileWriter = new FileWriter(output.getAbsoluteFile(), true); // true to append
|
||||
bufferedWriter = new BufferedWriter(fileWriter);
|
||||
|
||||
List<CrawlRecord> processed = scrapeState.getPagesProcessed();
|
||||
for (int i=0;i<processed.size();i++) {
|
||||
try {
|
||||
bufferedWriter.write(processed.get(i).getNquads());
|
||||
bufferedWriter.newLine();
|
||||
bufferedWriter.flush();
|
||||
} catch (IOException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
bufferedWriter.close();
|
||||
logger.info(" Data stored into "+output.getAbsolutePath());
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a list of URLs (in the form of CrawlRecords) that need to be scraped
|
||||
*
|
||||
* @return List of URLs to be scraped
|
||||
* @see CrawlRecord
|
||||
*/
|
||||
private List<CrawlRecord> generatePagesToPull(List<Element> sites) {
|
||||
List<CrawlRecord> crawls = sites
|
||||
.stream()
|
||||
.map(s -> {
|
||||
CrawlRecord crawlRecord = new CrawlRecord(s.text());
|
||||
String[] urlSplitted = crawlRecord.getUrl().split("/");
|
||||
String name = urlSplitted[urlSplitted.length - 1];
|
||||
crawlRecord.setName(name);
|
||||
return crawlRecord;
|
||||
})
|
||||
.collect(Collectors.toList());
|
||||
return crawls;
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates properties based on properties file in src > main > resources
|
||||
*
|
||||
*/
|
||||
private void processProperties() {
|
||||
ClassLoader classLoader = ServiceScrapeDriver.class.getClassLoader();
|
||||
|
||||
InputStream is = classLoader.getResourceAsStream(propertiesFile);
|
||||
if(is == null) {
|
||||
logger.error(" Cannot find " + propertiesFile + " file");
|
||||
throw new IllegalArgumentException(propertiesFile + "file is not found!");
|
||||
}
|
||||
|
||||
Properties prop = new Properties();
|
||||
|
||||
try {
|
||||
prop.load(is);
|
||||
} catch (IOException e) {
|
||||
logger.error(" Cannot load application.properties", e);
|
||||
System.exit(0);
|
||||
}
|
||||
|
||||
waitTime = Integer.parseInt(prop.getProperty("waitTime").trim());
|
||||
logger.info(" waitTime: " + waitTime);
|
||||
numberOfPagesToCrawlInALoop = Integer.parseInt(prop.getProperty("numberOfPagesToCrawlInALoop").trim());
|
||||
logger.info(" numberOfPagesToCrawl: " + numberOfPagesToCrawlInALoop);
|
||||
totalNumberOfPagesToCrawlInASession = Integer.parseInt(prop.getProperty("totalNumberOfPagesToCrawlInASession").trim());
|
||||
logger.info(" totalNumberOfPagesToCrawlInASession: " + totalNumberOfPagesToCrawlInASession);
|
||||
scrapeVersion = Integer.parseInt(prop.getProperty("scrapeVersion").trim());
|
||||
logger.info(" scrapeVersion: " + scrapeVersion);
|
||||
logger.info("\n\n\n");
|
||||
}
|
||||
|
||||
public String getSitemapUrl() {
|
||||
return sitemapUrl;
|
||||
}
|
||||
|
||||
public String getSitemapURLKey() {
|
||||
return sitemapURLKey;
|
||||
}
|
||||
|
||||
private String getId(String pageUrl) {
|
||||
String[] parts = pageUrl.split("/");
|
||||
return parts[parts.length - 1];
|
||||
}
|
||||
}
|
|
@ -0,0 +1,98 @@
|
|||
package eu.dnetlib.bioschemas.api.controller;
|
||||
|
||||
import eu.dnetlib.bioschemas.api.MainApplication;
|
||||
import eu.dnetlib.bioschemas.api.scraper.ScrapingExecution;
|
||||
import eu.dnetlib.bioschemas.api.scraper.ScrapingExecutor;
|
||||
import eu.dnetlib.bioschemas.api.utils.BioschemasException;
|
||||
import eu.dnetlib.common.controller.AbstractDnetController;
|
||||
import io.swagger.v3.oas.annotations.Operation;
|
||||
import io.swagger.v3.oas.annotations.Parameter;
|
||||
import io.swagger.v3.oas.annotations.enums.ParameterIn;
|
||||
import io.swagger.v3.oas.annotations.tags.Tag;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.commons.io.LineIterator;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
import org.springframework.web.bind.annotation.*;
|
||||
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
|
||||
/**
|
||||
* @author enrico.ottonello
|
||||
*
|
||||
*/
|
||||
|
||||
@RestController
|
||||
@RequestMapping("/api")
|
||||
@Tag(name = MainApplication.BIOSCHEMAS_APIS)
|
||||
public class BioschemasAPIController extends AbstractDnetController {
|
||||
|
||||
@Value("${outputFolder}")
|
||||
private String outputFolder;
|
||||
@Value("${outputDataPattern}")
|
||||
private String outputDataPattern;
|
||||
|
||||
private static Logger logger = LoggerFactory.getLogger(BioschemasAPIController.class);
|
||||
|
||||
@Autowired
|
||||
private ScrapingExecutor scrapingExecutor;
|
||||
|
||||
private static final Log log = LogFactory.getLog(BioschemasAPIController.class);
|
||||
|
||||
@Operation(summary = "start the scraping operation", description = "<H1>Working input values are in the following table</H1><BR><TABLE><TR><TH>datasourceKey</TH><TH>sitemapUrl</TH></TR><TR><TD>ped</TD><TD>https://proteinensemble.org/sitemap2.xml.gz</TD></TR><TR><TD>disprot</TD><TD>https://disprot.org/sitemap2.xml.gz</TD></TR><TR><TD>mobidb</TD><TD>https://mobidb.org/sitemap2.xml.gz</TD></TR></TABLE>")
|
||||
@GetMapping("/startScraping")
|
||||
public ScrapingExecution startScraping(@Parameter(name = "datasourceKey") @RequestParam final String datasourceKey,
|
||||
@Parameter(name = "sitemapUrl") @RequestParam final String sitemapUrl,
|
||||
final HttpServletRequest req) {
|
||||
logger.info("<STARTSCRAPING> datasourceKey: "+datasourceKey+" sitemapUrl:"+sitemapUrl);
|
||||
return scrapingExecutor.startScraping(datasourceKey, sitemapUrl, getOutputDataPattern(), req.getRemoteAddr(), getOutputFolder());
|
||||
}
|
||||
|
||||
@Operation(summary = "check the status of last scraping operation")
|
||||
@GetMapping("/startScraping/status")
|
||||
public final ScrapingExecution statusScraping() {
|
||||
return scrapingExecutor.getLastScrapingExecution();
|
||||
}
|
||||
|
||||
@Operation(summary = "retrieve the nquads downloaded for one specific provider")
|
||||
@RequestMapping(value = "/getNQuads", method = RequestMethod.GET)
|
||||
public String getNQuads(@Parameter(name = "datasourceKey") @RequestParam final String datasourceKey, HttpServletResponse response) throws BioschemasException, IOException {
|
||||
|
||||
logger.info("<GETNQUADS> datasourceKey: "+datasourceKey);
|
||||
|
||||
LineIterator it = FileUtils.lineIterator(new File(getOutputFolder().concat("/").concat(datasourceKey).concat(getOutputDataPattern())), "UTF-8");
|
||||
try {
|
||||
while (it.hasNext()) {
|
||||
String line = it.nextLine();
|
||||
response.getOutputStream().write(line.getBytes(StandardCharsets.UTF_8));
|
||||
response.getOutputStream().println();
|
||||
}
|
||||
} finally {
|
||||
}
|
||||
return "";
|
||||
}
|
||||
|
||||
public String getOutputFolder() {
|
||||
return outputFolder;
|
||||
}
|
||||
|
||||
public String getOutputDataPattern() {
|
||||
return outputDataPattern;
|
||||
}
|
||||
|
||||
public void setOutputFolder(String outputFolder) {
|
||||
this.outputFolder = outputFolder;
|
||||
}
|
||||
|
||||
public void setOutputDataPattern(String outputDataPattern) {
|
||||
this.outputDataPattern = outputDataPattern;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,17 @@
|
|||
package eu.dnetlib.bioschemas.api.controller;
|
||||
|
||||
import eu.dnetlib.common.controller.AbstractDnetController;
|
||||
import org.springframework.stereotype.Controller;
|
||||
import org.springframework.web.bind.annotation.GetMapping;
|
||||
|
||||
@Controller
|
||||
public class HomeController extends AbstractDnetController {
|
||||
|
||||
@GetMapping({
|
||||
"/doc", "/swagger"
|
||||
})
|
||||
public String apiDoc() {
|
||||
return "redirect:swagger-ui/index.html";
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,136 @@
|
|||
package eu.dnetlib.bioschemas.api.crawl;
|
||||
|
||||
import java.util.Date;
|
||||
|
||||
import hwu.elixir.utils.Validation;
|
||||
|
||||
|
||||
/**
|
||||
*
|
||||
* Store the current status of a single URL in the scrape service.
|
||||
*
|
||||
*
|
||||
*/
|
||||
|
||||
|
||||
public class CrawlRecord {
|
||||
|
||||
private Long id;
|
||||
|
||||
private String context = "";
|
||||
|
||||
private String url;
|
||||
|
||||
private Date dateScraped;
|
||||
|
||||
private StatusOfScrape status;
|
||||
|
||||
private boolean beingScraped;
|
||||
|
||||
private String name;
|
||||
|
||||
private String nquads;
|
||||
|
||||
public CrawlRecord() {
|
||||
status = StatusOfScrape.UNTRIED;
|
||||
}
|
||||
|
||||
public CrawlRecord(String url) {
|
||||
Validation validation = new Validation();
|
||||
if(validation.validateURI(url)) {
|
||||
this.url = url;
|
||||
context = "";
|
||||
status = StatusOfScrape.UNTRIED;
|
||||
dateScraped = null;
|
||||
} else {
|
||||
throw new IllegalArgumentException(url +" is not a valid url");
|
||||
}
|
||||
this.setId(System.currentTimeMillis());
|
||||
}
|
||||
|
||||
public Long getId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
public void setId(Long id) {
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
public String getUrl() {
|
||||
return url;
|
||||
}
|
||||
|
||||
public Date getDateScraped() {
|
||||
return dateScraped;
|
||||
}
|
||||
|
||||
public void setDateScraped(Date dateScraped) {
|
||||
this.dateScraped = dateScraped;
|
||||
}
|
||||
|
||||
public StatusOfScrape getStatus() {
|
||||
return status;
|
||||
}
|
||||
|
||||
public void setStatus(StatusOfScrape status) {
|
||||
this.status = status;
|
||||
}
|
||||
|
||||
public String getContext() {
|
||||
return context;
|
||||
}
|
||||
|
||||
public void setContext(String context) {
|
||||
this.context = context;
|
||||
}
|
||||
|
||||
public boolean isBeingScraped() {
|
||||
return beingScraped;
|
||||
}
|
||||
|
||||
public void setBeingScraped(boolean beingScraped) {
|
||||
this.beingScraped = beingScraped;
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public void setName(String name) {
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
public String getNquads() {
|
||||
return nquads;
|
||||
}
|
||||
|
||||
public void setNquads(String nquads) {
|
||||
this.nquads = nquads;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o)
|
||||
return true;
|
||||
if (!(o instanceof CrawlRecord))
|
||||
return false;
|
||||
|
||||
CrawlRecord otherCrawl = (CrawlRecord) o;
|
||||
|
||||
if(this.url.equals(otherCrawl.getUrl())) {
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result = getId() != null ? getId().hashCode() : 0;
|
||||
result = 31 * result + (getUrl() != null ? getUrl().hashCode() : 0);
|
||||
result = 31 * result + (getContext() != null ? getContext().hashCode() : 0);
|
||||
result = 31 * result + (getDateScraped() != null ? getDateScraped().hashCode() : 0);
|
||||
return result;
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,19 @@
|
|||
package eu.dnetlib.bioschemas.api.crawl;
|
||||
|
||||
/**
|
||||
*
|
||||
* {@link StatusOfScrape} describes the possible status levels the scrape for each URL/CrawlRecord.
|
||||
*
|
||||
* Each URL/CrawlRecord can have one of the following:
|
||||
* DOES_NOT_EXIST = 404.
|
||||
* HUMAN_INSPECTION = cannot parse for some reason; a human should see what is happening.
|
||||
* UNTRIED = not scraped yet.
|
||||
* FAILED = one failed attempt at scraping; will try again.
|
||||
* GIVEN_UP = two failed attempts at scraping. Will not try again.
|
||||
* SUCCESS = successfully scraped.
|
||||
*
|
||||
*/
|
||||
|
||||
public enum StatusOfScrape {
|
||||
DOES_NOT_EXIST, HUMAN_INSPECTION, UNTRIED, FAILED, GIVEN_UP, SUCCESS;
|
||||
}
|
|
@ -0,0 +1,87 @@
|
|||
|
||||
package eu.dnetlib.bioschemas.api.scraper;
|
||||
|
||||
import hwu.elixir.scrape.exceptions.MissingMarkupException;
|
||||
import hwu.elixir.scrape.scraper.ScraperFilteredCore;
|
||||
import org.apache.any23.Any23;
|
||||
import org.apache.any23.extractor.ExtractionException;
|
||||
import org.apache.any23.source.DocumentSource;
|
||||
import org.apache.any23.source.StringDocumentSource;
|
||||
import org.apache.any23.writer.NTriplesWriter;
|
||||
import org.apache.any23.writer.TripleHandler;
|
||||
import org.apache.any23.writer.TripleHandlerException;
|
||||
import org.apache.commons.io.output.ByteArrayOutputStream;
|
||||
import org.eclipse.rdf4j.model.IRI;
|
||||
import org.eclipse.rdf4j.model.Model;
|
||||
import org.eclipse.rdf4j.model.impl.SimpleValueFactory;
|
||||
import org.eclipse.rdf4j.rio.RDFFormat;
|
||||
import org.eclipse.rdf4j.rio.Rio;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.StringWriter;
|
||||
|
||||
public class BMUSEScraper extends ScraperFilteredCore {
|
||||
|
||||
private static Logger logger = LoggerFactory.getLogger(BMUSEScraper.class);
|
||||
|
||||
public String getNQUADSFromUrl(String url, Boolean dynamic) throws Exception {
|
||||
logger.debug(url + " > scraping");
|
||||
url = fixURL(url);
|
||||
|
||||
String html = "";
|
||||
// The dynamic boolean determines if the scraper should start using selenium or JSOUP to scrape the information
|
||||
// (dynamic and static respectively)
|
||||
|
||||
if (dynamic) {
|
||||
html = wrapHTMLExtraction(url);
|
||||
} else {
|
||||
html = wrapHTMLExtractionStatic(url);
|
||||
}
|
||||
|
||||
if (html == null || html.contentEquals(""))
|
||||
throw new Exception("empty html");
|
||||
|
||||
html = injectId(html, url);
|
||||
|
||||
logger.debug(url + " > html scraped from " + url);
|
||||
DocumentSource source = new StringDocumentSource(html, url);
|
||||
String n3 = html2Triples(source, url);
|
||||
if (n3 == null) {
|
||||
throw new MissingMarkupException(url);
|
||||
}
|
||||
|
||||
logger.debug(url + " > processing triples");
|
||||
IRI sourceIRI = SimpleValueFactory.getInstance().createIRI(source.getDocumentIRI());
|
||||
Model updatedModel = updatedModel = processTriples(n3, sourceIRI, 0l);
|
||||
if (updatedModel == null) {
|
||||
throw new Exception("rdf model null");
|
||||
}
|
||||
|
||||
logger.debug(url + " > generating nquads");
|
||||
try (StringWriter jsonLDWriter = new StringWriter()) {
|
||||
Rio.write(updatedModel, jsonLDWriter, RDFFormat.NQUADS);
|
||||
logger.debug(url + " > nquads generated");
|
||||
return jsonLDWriter.toString();
|
||||
} catch (Exception e) {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
private String html2Triples(DocumentSource source, String url) throws Exception {
|
||||
Any23 runner = new Any23();
|
||||
try (ByteArrayOutputStream out = new ByteArrayOutputStream();
|
||||
TripleHandler handler = new NTriplesWriter(out);) {
|
||||
runner.extract(source, handler);
|
||||
return out.toString("UTF-8");
|
||||
} catch (ExtractionException e) {
|
||||
logger.error("Cannot extract triples", e);
|
||||
} catch (IOException e1) {
|
||||
logger.error(" IO error whilst extracting triples", e1);
|
||||
} catch (TripleHandlerException e2) {
|
||||
logger.error("TripleHanderException", e2);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,157 @@
|
|||
package eu.dnetlib.bioschemas.api.scraper;
|
||||
|
||||
import eu.dnetlib.bioschemas.api.crawl.StatusOfScrape;
|
||||
import eu.dnetlib.bioschemas.api.crawl.CrawlRecord;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
|
||||
/**
|
||||
|
||||
*/
|
||||
public class ScrapeState {
|
||||
|
||||
private List<CrawlRecord> urlsToScrape = Collections.synchronizedList(new ArrayList<CrawlRecord>());
|
||||
private List<CrawlRecord> urlsProcessed = Collections.synchronizedList(new ArrayList<CrawlRecord>()); // should this be a set?
|
||||
private Map<String, Object> nquadsConcurrentHashMap = new ConcurrentHashMap<>();
|
||||
|
||||
/**
|
||||
*
|
||||
* @param pagesToBeScraped The list of sites to be scraped
|
||||
* @see ScrapeThread
|
||||
* @see CrawlRecord
|
||||
*/
|
||||
public ScrapeState(List<CrawlRecord> pagesToBeScraped) {
|
||||
urlsToScrape.addAll(pagesToBeScraped);
|
||||
}
|
||||
|
||||
/**
|
||||
* Any pages/URLs left to scrape?
|
||||
* @return True for yes & false for no
|
||||
* @see CrawlRecord
|
||||
*/
|
||||
public synchronized boolean pagesLeftToScrape() {
|
||||
return !urlsToScrape.isEmpty();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the next URL/CrawlRecord to be scraped
|
||||
*
|
||||
* @return First page/URL that needs to be scraped next
|
||||
* @see CrawlRecord
|
||||
*/
|
||||
public synchronized CrawlRecord getURLToProcess() {
|
||||
if (urlsToScrape.isEmpty())
|
||||
return null;
|
||||
|
||||
return urlsToScrape.remove(0);
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds the given CrawlRecord to the list of CrawlRecords successfully scraped.
|
||||
* Updates the status of the CrawlRecord to SUCCESS.
|
||||
*
|
||||
* @param url The latest URL/page that has been successfully scraped
|
||||
* @see CrawlRecord
|
||||
*/
|
||||
public synchronized void addSuccessfulScrapedURL(CrawlRecord record) {
|
||||
record.setStatus(StatusOfScrape.SUCCESS);
|
||||
urlsProcessed.add(record);
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds the given CrawlRecord to the list of CrawlRecords NOT successfully scraped.
|
||||
* Updates the status of the CrawlRecord; if first failure the status is FAILED.
|
||||
* If status is already FAILED it is changed to GIVEN_UP.
|
||||
*
|
||||
* If the status is FAILED, another try will be made in a future run.
|
||||
*
|
||||
*
|
||||
* @param url The latest URL/page that has been unsuccessfully scraped
|
||||
* @see CrawlRecord
|
||||
*/
|
||||
public synchronized void addFailedToScrapeURL(CrawlRecord record) {
|
||||
if (record.getStatus().equals(StatusOfScrape.FAILED)) {
|
||||
record.setStatus(StatusOfScrape.GIVEN_UP);
|
||||
} else {
|
||||
record.setStatus(StatusOfScrape.FAILED);
|
||||
}
|
||||
urlsProcessed.add(record);
|
||||
}
|
||||
|
||||
/**
|
||||
* Changes the status of the CrawlRecord to DOES_NOT_EXIST.
|
||||
* As Selenium does not return the HTTP codes, it is questionable
|
||||
* how useful this is.
|
||||
*
|
||||
*
|
||||
* @param url The latest URL/page that has been 404'd
|
||||
* @see CrawlRecord
|
||||
*/
|
||||
public synchronized void setStatusTo404(CrawlRecord record) {
|
||||
record.setStatus(StatusOfScrape.DOES_NOT_EXIST);
|
||||
urlsProcessed.add(record);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
*
|
||||
* Changes the status of the CrawlRecord to HUMAN_INSPECTION.
|
||||
* This captures the idea that the URLs may contain unexpected markup that needs a human to
|
||||
* review and possibly update the scraper.
|
||||
*
|
||||
* @param url The latest URL/page that needs human inspection
|
||||
* @see CrawlRecord
|
||||
*/
|
||||
public synchronized void setStatusToHumanInspection(CrawlRecord record) {
|
||||
record.setStatus(StatusOfScrape.HUMAN_INSPECTION);
|
||||
urlsProcessed.add(record);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Returns the number of URLs that are still to be scraped in this cycle.
|
||||
* This does not return the number of URLs left to scrape in the DBMS, just in the current cycle.
|
||||
*
|
||||
* @return Number of URLs left to scrape in this cycle
|
||||
* @see CrawlRecord
|
||||
*/
|
||||
public synchronized int getNumberPagesLeftToScrape() {
|
||||
return urlsToScrape.size();
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the full list of URLs that have been processed in this cycle.
|
||||
* This does not return the number of URLs that have been scraped in total across all cycles.
|
||||
*
|
||||
* @return
|
||||
* @see CrawlRecord
|
||||
*/
|
||||
public synchronized List<CrawlRecord> getPagesProcessed() {
|
||||
return urlsProcessed;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the full list of URLs/CrawlRecords regardless of whether scraped or not in the current cycle.
|
||||
*
|
||||
* @return List of all CrawlRecords in this cycle.
|
||||
* @see CrawlRecord
|
||||
*/
|
||||
public synchronized List<CrawlRecord> getPagesProcessedAndUnprocessed() {
|
||||
List<CrawlRecord> urlsCombined = Collections.synchronizedList(new ArrayList<CrawlRecord>());
|
||||
urlsCombined.addAll(urlsProcessed);
|
||||
urlsCombined.addAll(urlsToScrape);
|
||||
return urlsCombined;
|
||||
}
|
||||
|
||||
public void addNquads(String key, String nquads) {
|
||||
nquadsConcurrentHashMap.putIfAbsent(key, nquads);
|
||||
}
|
||||
|
||||
public Map<String, Object> getNquadsConcurrentHashMap() {
|
||||
return nquadsConcurrentHashMap;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,103 @@
|
|||
package eu.dnetlib.bioschemas.api.scraper;
|
||||
|
||||
import eu.dnetlib.bioschemas.api.crawl.CrawlRecord;
|
||||
import eu.dnetlib.bioschemas.api.utils.CompressorUtil;
|
||||
import hwu.elixir.scrape.exceptions.CannotWriteException;
|
||||
import hwu.elixir.scrape.exceptions.FourZeroFourException;
|
||||
import hwu.elixir.scrape.exceptions.JsonLDInspectionException;
|
||||
import hwu.elixir.scrape.exceptions.MissingMarkupException;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import java.util.Date;
|
||||
|
||||
/**
|
||||
*
|
||||
* @see BMUSEScraper
|
||||
* @see ScrapeState
|
||||
*
|
||||
*/
|
||||
public class ScrapeThread extends Thread {
|
||||
private ScrapeState scrapeState;
|
||||
private BMUSEScraper process;
|
||||
private int waitTime;
|
||||
private boolean fileWritten = true;
|
||||
private int scrapeVersion = 1;
|
||||
|
||||
private static Logger logger = LoggerFactory.getLogger(ScrapeThread.class);
|
||||
|
||||
/**
|
||||
* Sets up a thread for actually scrapping.
|
||||
*
|
||||
* @param scraper Scraper that will actually do the scraping.
|
||||
* @param scrapeState Object that maintains state across threads.
|
||||
* @param waitTime How long (in seconds) thread should wait after scraping
|
||||
* page before attempting new page.
|
||||
* @param contextVersion The context URL used is 'https://bioschemas.org/crawl/CONTEXTVERSION/ID' Where ID is the id of the CrawlRecord pulled.
|
||||
*
|
||||
*/
|
||||
public ScrapeThread(BMUSEScraper scraper, ScrapeState scrapeState, int waitTime, int contextVersion) {
|
||||
this.scrapeState = scrapeState;
|
||||
process = scraper;
|
||||
this.waitTime = waitTime;
|
||||
this.scrapeVersion = contextVersion;
|
||||
}
|
||||
|
||||
@Override
|
||||
/**
|
||||
* Defines high-level process of scraping. Actual scraping done by an
|
||||
* implementation of Scraper. If page scrape successful will add url to
|
||||
* Scrape.sitesScraped
|
||||
*
|
||||
* @see Scraper
|
||||
* @see SimpleScraper
|
||||
*/
|
||||
public void run() {
|
||||
while (scrapeState.pagesLeftToScrape()) {
|
||||
CrawlRecord record = scrapeState.getURLToProcess();
|
||||
|
||||
if (record == null)
|
||||
break;
|
||||
|
||||
record.setContext("https://bioschemas.org/crawl/" + scrapeVersion +"/" + record.getId());
|
||||
record.setDateScraped(new Date());
|
||||
|
||||
try {
|
||||
String nquads = process.getNQUADSFromUrl(record.getUrl(), true);
|
||||
logger.info("downloaded "+record.getUrl() + " leftToScrape:" + scrapeState.getNumberPagesLeftToScrape());
|
||||
record.setNquads(CompressorUtil.compressValue(nquads));
|
||||
if (!nquads.isEmpty()) {
|
||||
scrapeState.addSuccessfulScrapedURL(record);
|
||||
} else {
|
||||
scrapeState.addFailedToScrapeURL(record);
|
||||
}
|
||||
} catch(FourZeroFourException fourZeroFourException) {
|
||||
scrapeState.setStatusTo404(record);
|
||||
fileWritten = false;
|
||||
} catch (JsonLDInspectionException je) {
|
||||
scrapeState.setStatusToHumanInspection(record);
|
||||
fileWritten = false;
|
||||
} catch (CannotWriteException cannotWrite) {
|
||||
logger.error("Caught cannot read file, setting worked to false!");
|
||||
fileWritten = false;
|
||||
scrapeState.addFailedToScrapeURL(record);
|
||||
return; // no point in continuing
|
||||
} catch (MissingMarkupException e) {
|
||||
logger.error("Cannot obtain markup from " + record.getUrl() +".");
|
||||
fileWritten = false;
|
||||
scrapeState.addFailedToScrapeURL(record);
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
try {
|
||||
ScrapeThread.sleep(100 * waitTime);
|
||||
} catch (InterruptedException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
process.shutdown();
|
||||
}
|
||||
|
||||
public boolean isFileWritten() {
|
||||
return fileWritten;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,99 @@
|
|||
package eu.dnetlib.bioschemas.api.scraper;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
|
||||
import java.util.Date;
|
||||
import java.util.UUID;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
public class ScrapingExecution {
|
||||
|
||||
private String id;
|
||||
private Long dateStart;
|
||||
private Long dateEnd;
|
||||
private ScrapingStatus status = ScrapingStatus.NOT_YET_STARTED;
|
||||
private String message;
|
||||
|
||||
private static final Log log = LogFactory.getLog(ScrapingExecution.class);
|
||||
|
||||
public ScrapingExecution() {}
|
||||
|
||||
public ScrapingExecution(final String id, final Long dateStart, final Long dateEnd, final ScrapingStatus status, final String message) {
|
||||
this.id = id;
|
||||
this.dateStart = dateStart;
|
||||
this.dateEnd = dateEnd;
|
||||
this.status = status;
|
||||
this.message = message;
|
||||
}
|
||||
|
||||
public String getId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
public void setId(final String id) {
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
public Long getDateStart() {
|
||||
return dateStart;
|
||||
}
|
||||
|
||||
public void setDateStart(final Long dateStart) {
|
||||
this.dateStart = dateStart;
|
||||
}
|
||||
|
||||
public Long getDateEnd() {
|
||||
return dateEnd;
|
||||
}
|
||||
|
||||
public void setDateEnd(final Long dateEnd) {
|
||||
this.dateEnd = dateEnd;
|
||||
}
|
||||
|
||||
public ScrapingStatus getStatus() {
|
||||
return status;
|
||||
}
|
||||
|
||||
public void setStatus(final ScrapingStatus status) {
|
||||
this.status = status;
|
||||
}
|
||||
|
||||
public String getMessage() {
|
||||
return message;
|
||||
}
|
||||
|
||||
public void setMessage(final String message) {
|
||||
this.message = message;
|
||||
}
|
||||
|
||||
public void startNew(final String message) {
|
||||
setId("scraping-" + UUID.randomUUID());
|
||||
setDateStart(System.currentTimeMillis());
|
||||
setDateEnd(null);
|
||||
setStatus(ScrapingStatus.RUNNING);
|
||||
setMessage(message);
|
||||
log.info(message);
|
||||
}
|
||||
|
||||
public void complete() {
|
||||
setDateEnd(System.currentTimeMillis());
|
||||
setStatus(ScrapingStatus.SUCCESS);
|
||||
|
||||
final long millis = getDateEnd() - getDateStart();
|
||||
setMessage(String
|
||||
.format("Scraping completed in %d min, %d sec", TimeUnit.MILLISECONDS.toMinutes(millis), TimeUnit.MILLISECONDS.toSeconds(millis) -
|
||||
TimeUnit.MINUTES.toSeconds(TimeUnit.MILLISECONDS.toMinutes(millis))));
|
||||
|
||||
log.info(getMessage());
|
||||
|
||||
}
|
||||
|
||||
public void fail(final Throwable e) {
|
||||
setDateEnd(new Date().getTime());
|
||||
setStatus(ScrapingStatus.FAILED);
|
||||
setMessage(e.getMessage());
|
||||
log.error("Error scraping", e);
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,40 @@
|
|||
package eu.dnetlib.bioschemas.api.scraper;
|
||||
|
||||
import eu.dnetlib.bioschemas.api.ServiceScrapeDriver;
|
||||
import org.springframework.stereotype.Component;
|
||||
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
|
||||
@Component
|
||||
public class ScrapingExecutor {
|
||||
|
||||
private final ScrapingExecution lastScrapingExecution = new ScrapingExecution();
|
||||
|
||||
public ScrapingExecution getLastScrapingExecution() {
|
||||
return lastScrapingExecution;
|
||||
}
|
||||
|
||||
public ScrapingExecution startScraping(final String datasourceKey, final String sitemapUrl, final String outputDataPattern, final String remoteAddr, final String outputFolder) {
|
||||
synchronized (lastScrapingExecution) {
|
||||
if (lastScrapingExecution.getStatus() != ScrapingStatus.RUNNING) {
|
||||
lastScrapingExecution.startNew("Scraping for " + datasourceKey + " " + sitemapUrl + " - request from " + remoteAddr);
|
||||
new Thread(() -> {
|
||||
try {
|
||||
String sitemapUrlKey = "loc";
|
||||
String outputFilename = datasourceKey.concat(outputDataPattern);
|
||||
ServiceScrapeDriver service = new ServiceScrapeDriver(sitemapUrl, sitemapUrlKey, null, outputFilename, outputFolder);
|
||||
service.runScrape();
|
||||
lastScrapingExecution.complete();
|
||||
} catch (final Throwable e) {
|
||||
lastScrapingExecution.fail(e);
|
||||
}
|
||||
}).start();
|
||||
} else {
|
||||
final long now = System.currentTimeMillis();
|
||||
return new ScrapingExecution(null, now, now, ScrapingStatus.NOT_LAUNCHED, "An other scraping is running");
|
||||
}
|
||||
|
||||
}
|
||||
return lastScrapingExecution;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,9 @@
|
|||
package eu.dnetlib.bioschemas.api.scraper;
|
||||
|
||||
public enum ScrapingStatus {
|
||||
SUCCESS,
|
||||
FAILED,
|
||||
RUNNING,
|
||||
NOT_LAUNCHED,
|
||||
NOT_YET_STARTED
|
||||
}
|
|
@ -0,0 +1,71 @@
|
|||
package eu.dnetlib.bioschemas.api.scraper;
|
||||
|
||||
import eu.dnetlib.bioschemas.api.crawl.StatusOfScrape;
|
||||
import hwu.elixir.scrape.exceptions.*;
|
||||
import hwu.elixir.scrape.scraper.ScraperFilteredCore;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
* Provides the
|
||||
* actual scraping functionality.
|
||||
*
|
||||
* Scrapes a given URL, converts into NQuads and writes to a file (name derived
|
||||
* from URL). If the file already exists it will be overwritten.
|
||||
*
|
||||
*
|
||||
* @see ScraperFilteredCore
|
||||
*
|
||||
*/
|
||||
public class ServiceScraper extends ScraperFilteredCore {
|
||||
|
||||
private static Logger logger = LoggerFactory.getLogger(ServiceScraper.class);
|
||||
|
||||
private StatusOfScrape status= null;
|
||||
|
||||
|
||||
/**
|
||||
* Orchestrates the process of scraping a site before converting the extracted
|
||||
* triples to NQuads and writing to a file.
|
||||
*
|
||||
* @param url Site to be scraped
|
||||
* @param contextCounter Number used to generate the named graph/context and
|
||||
* the URLs used to replace blank nodes.
|
||||
* @param outputFolderName Location to which the NQuads will be written
|
||||
* @return True if success; false otherwise
|
||||
* @throws FourZeroFourException
|
||||
* @throws JsonLDInspectionException
|
||||
* @throws CannotWriteException
|
||||
* @throws MissingMarkupException
|
||||
*
|
||||
*/
|
||||
public boolean scrape(String url, Long contextCounter, String outputFolderName, String fileName, StatusOfScrape status) throws FourZeroFourException, JsonLDInspectionException, CannotWriteException, MissingMarkupException {
|
||||
this.status = status;
|
||||
logger.info("scraping "+url + " to "+fileName);
|
||||
return scrape(url, outputFolderName, fileName, contextCounter, true);
|
||||
}
|
||||
|
||||
|
||||
|
||||
@Override
|
||||
/* Now takes account of StateOfCrawl
|
||||
*/
|
||||
protected String wrapHTMLExtraction(String url) throws FourZeroFourException {
|
||||
String html = "";
|
||||
if (status.equals(StatusOfScrape.UNTRIED) || status.equals(StatusOfScrape.FAILED)) {
|
||||
try {
|
||||
html = getHtmlViaSelenium(url);
|
||||
} catch (SeleniumException e) {
|
||||
// try again
|
||||
try {
|
||||
html = getHtmlViaSelenium(url);
|
||||
} catch (SeleniumException e2) {
|
||||
return "";
|
||||
}
|
||||
}
|
||||
} else {
|
||||
return "";
|
||||
}
|
||||
return html;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,28 @@
|
|||
package eu.dnetlib.bioschemas.api.utils;
|
||||
|
||||
/**
|
||||
* @author enrico.ottonello
|
||||
*
|
||||
*/
|
||||
|
||||
public class BioschemasException extends Exception{
|
||||
|
||||
public BioschemasException() {
|
||||
}
|
||||
|
||||
public BioschemasException(final String message) {
|
||||
super(message);
|
||||
}
|
||||
|
||||
public BioschemasException(final String message, final Throwable cause) {
|
||||
super(message, cause);
|
||||
}
|
||||
|
||||
public BioschemasException(final Throwable cause) {
|
||||
super(cause);
|
||||
}
|
||||
|
||||
public BioschemasException(final String message, final Throwable cause, final boolean enableSuppression, final boolean writableStackTrace) {
|
||||
super(message, cause, enableSuppression, writableStackTrace);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,34 @@
|
|||
package eu.dnetlib.bioschemas.api.utils;
|
||||
|
||||
import org.apache.commons.codec.binary.Base64;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.StringWriter;
|
||||
import java.util.zip.GZIPInputStream;
|
||||
import java.util.zip.GZIPOutputStream;
|
||||
|
||||
public class CompressorUtil {
|
||||
|
||||
public static String decompressValue(final String abstractCompressed) {
|
||||
try {
|
||||
byte[] byteArray = Base64.decodeBase64(abstractCompressed.getBytes());
|
||||
GZIPInputStream gis = new GZIPInputStream(new ByteArrayInputStream(byteArray));
|
||||
final StringWriter stringWriter = new StringWriter();
|
||||
IOUtils.copy(gis, stringWriter);
|
||||
return stringWriter.toString();
|
||||
} catch (IOException e) {
|
||||
throw new IllegalArgumentException(e);
|
||||
}
|
||||
}
|
||||
|
||||
public static String compressValue(final String value) throws IOException {
|
||||
ByteArrayOutputStream out = new ByteArrayOutputStream();
|
||||
GZIPOutputStream gzip = new GZIPOutputStream(out);
|
||||
gzip.write(value.getBytes());
|
||||
gzip.close();
|
||||
return java.util.Base64.getEncoder().encodeToString(out.toByteArray());
|
||||
}
|
||||
}
|
|
@ -0,0 +1,64 @@
|
|||
|
||||
package eu.dnetlib.bioschemas.api.utils;
|
||||
|
||||
import hwu.elixir.utils.Helpers;
|
||||
import org.jsoup.Jsoup;
|
||||
import org.jsoup.nodes.Document;
|
||||
import org.jsoup.select.Elements;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class UrlParser {
|
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger(UrlParser.class.getName());
|
||||
|
||||
public static Elements getSitemapList(String url, String sitemapURLKey) throws IOException {
|
||||
|
||||
Document doc = new Document(url);
|
||||
Document urlSitemapListsNested;
|
||||
Elements elements = new Elements();
|
||||
Elements sitemaps = new Elements();
|
||||
boolean sitemapindex = false;
|
||||
boolean urlset = false;
|
||||
|
||||
try {
|
||||
int urlLength = url.length();
|
||||
logger.info("parse sitemap list");
|
||||
String sitemapExt = url.substring(urlLength - 3, urlLength);
|
||||
if (sitemapExt.equalsIgnoreCase(".gz")) { // this checks only the extension at the ending
|
||||
logger.info("compressed sitemap");
|
||||
byte[] bytes = Jsoup.connect(url).ignoreContentType(true).execute().bodyAsBytes();
|
||||
doc = Helpers.gzipFileDecompression(bytes);
|
||||
} else {
|
||||
doc = Jsoup.connect(url).maxBodySize(0).get();
|
||||
}
|
||||
|
||||
} catch (IOException e) {
|
||||
logger.error("Jsoup parsing exception: " + e.getMessage());
|
||||
}
|
||||
|
||||
try {
|
||||
|
||||
elements = doc.select(sitemapURLKey);
|
||||
|
||||
// check the html if it is a sitemapindex or a urlset
|
||||
sitemapindex = doc.outerHtml().contains("sitemapindex");
|
||||
urlset = doc.outerHtml().contains("urlset");
|
||||
} catch (NullPointerException e) {
|
||||
logger.error(e.getMessage());
|
||||
}
|
||||
|
||||
if (sitemapindex) {
|
||||
// if sitemapindex get the loc of all the sitemaps
|
||||
// added warning for sitemap index files
|
||||
logger
|
||||
.warn(
|
||||
"please note this is a sitemapindex file which is not currently supported, please use the content (url) of the urlset instead");
|
||||
sitemaps = doc.select(sitemapURLKey);
|
||||
}
|
||||
|
||||
return elements;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,27 @@
|
|||
server.servlet.context-path=/bioschemas-api
|
||||
server.port=8281
|
||||
|
||||
server.public_url = http://localhost:8281/bioschemas-api
|
||||
server.public_desc = API Base URL
|
||||
|
||||
spring.profiles.active=garr
|
||||
|
||||
logging.file.name = /var/log/bioschemas-api/bioschemas.log
|
||||
|
||||
maven.pom.path = /META-INF/maven/eu.dnetlib.dhp/bioschemas-api/effective-pom.xml
|
||||
|
||||
spring.main.banner-mode = off
|
||||
|
||||
logging.level.root = INFO
|
||||
|
||||
management.endpoints.web.exposure.include = prometheus,health
|
||||
management.endpoints.web.base-path = /
|
||||
management.endpoints.web.path-mapping.prometheus = metrics
|
||||
management.endpoints.web.path-mapping.health = health
|
||||
|
||||
waitTime=5
|
||||
outputFolder=/data/bioschemas-harvest
|
||||
outputDataPattern=_base64_gzipped_nquads.txt
|
||||
numberOfPagesToCrawlInALoop=8
|
||||
totalNumberOfPagesToCrawlInASession=32
|
||||
scrapeVersion=1
|
|
@ -4,8 +4,8 @@
|
|||
<parent>
|
||||
<groupId>eu.dnetlib.dhp</groupId>
|
||||
<artifactId>apps</artifactId>
|
||||
<version>3.2.0</version>
|
||||
<relativePath>../</relativePath>
|
||||
<version>3.5.5-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
@ -18,14 +18,6 @@
|
|||
|
||||
<dependencies>
|
||||
|
||||
<!-- Mail -->
|
||||
<dependency>
|
||||
<groupId>javax.mail</groupId>
|
||||
<artifactId>mail</artifactId>
|
||||
<version>1.4.7</version>
|
||||
</dependency>
|
||||
|
||||
|
||||
<!-- Openaire -->
|
||||
<dependency>
|
||||
<groupId>eu.dnetlib.dhp</groupId>
|
||||
|
@ -33,6 +25,31 @@
|
|||
<version>${project.version}</version>
|
||||
</dependency>
|
||||
|
||||
<!-- Tests -->
|
||||
<dependency>
|
||||
<groupId>org.junit.jupiter</groupId>
|
||||
<artifactId>junit-jupiter</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.mockito</groupId>
|
||||
<artifactId>mockito-core</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.mockito</groupId>
|
||||
<artifactId>mockito-junit-jupiter</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-starter-test</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
</dependencies>
|
||||
|
||||
<build>
|
||||
|
|
|
@ -1,14 +1,15 @@
|
|||
package eu.dnetlib.broker;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.springdoc.core.GroupedOpenApi;
|
||||
import org.springframework.boot.SpringApplication;
|
||||
import org.springframework.boot.autoconfigure.SpringBootApplication;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
|
||||
import eu.dnetlib.common.app.AbstractDnetApp;
|
||||
import springfox.documentation.builders.ApiInfoBuilder;
|
||||
import springfox.documentation.builders.RequestHandlerSelectors;
|
||||
import springfox.documentation.service.ApiInfo;
|
||||
import springfox.documentation.service.Tag;
|
||||
import springfox.documentation.spring.web.plugins.Docket;
|
||||
import io.swagger.v3.oas.models.tags.Tag;
|
||||
|
||||
@SpringBootApplication
|
||||
public class LiteratureBrokerServiceApplication extends AbstractDnetApp {
|
||||
|
@ -24,22 +25,28 @@ public class LiteratureBrokerServiceApplication extends AbstractDnetApp {
|
|||
SpringApplication.run(LiteratureBrokerServiceApplication.class, args);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void configSwagger(final Docket docket) {
|
||||
docket.select()
|
||||
.apis(RequestHandlerSelectors.any())
|
||||
.paths(p -> p.startsWith("/api/"))
|
||||
.build()
|
||||
.tags(new Tag(TAG_EVENTS, "Events management"), new Tag(TAG_SUBSCRIPTIONS, "Subscriptions management"), new Tag(TAG_NOTIFICATIONS,
|
||||
"Notifications management"), new Tag(TAG_TOPIC_TYPES, "Topic types management"), new Tag(TAG_OPENAIRE, "OpenAIRE use case"))
|
||||
.apiInfo(new ApiInfoBuilder()
|
||||
.title("Literature Broker Service")
|
||||
.description("APIs documentation")
|
||||
.version("1.1")
|
||||
.contact(ApiInfo.DEFAULT_CONTACT)
|
||||
.license("Apache 2.0")
|
||||
.licenseUrl("http://www.apache.org/licenses/LICENSE-2.0")
|
||||
.build());
|
||||
|
||||
@Bean
|
||||
public GroupedOpenApi publicApi() {
|
||||
return GroupedOpenApi.builder()
|
||||
.group("Broker APIs")
|
||||
.pathsToMatch("/api/**")
|
||||
.build();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String swaggerTitle() {
|
||||
return "OpenAIRE Broker API";
|
||||
}
|
||||
|
||||
@Override
|
||||
protected List<Tag> swaggerTags() {
|
||||
final List<Tag> tags = new ArrayList<>();
|
||||
tags.add(new Tag().name(TAG_EVENTS).description("Events management"));
|
||||
tags.add(new Tag().name(TAG_SUBSCRIPTIONS).description("Subscriptions management"));
|
||||
tags.add(new Tag().name(TAG_NOTIFICATIONS).description("Notifications management"));
|
||||
tags.add(new Tag().name(TAG_TOPIC_TYPES).description("Topic types management"));
|
||||
tags.add(new Tag().name(TAG_OPENAIRE).description("OpenAIRE use case"));
|
||||
return tags;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -2,7 +2,9 @@ package eu.dnetlib.broker;
|
|||
|
||||
import org.elasticsearch.client.RestHighLevelClient;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.cache.annotation.EnableCaching;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.ComponentScan;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.data.elasticsearch.client.ClientConfiguration;
|
||||
import org.springframework.data.elasticsearch.client.RestClients;
|
||||
|
@ -14,15 +16,15 @@ import org.springframework.transaction.annotation.EnableTransactionManagement;
|
|||
import eu.dnetlib.broker.common.elasticsearch.Event;
|
||||
import eu.dnetlib.broker.common.elasticsearch.Notification;
|
||||
import eu.dnetlib.broker.common.properties.ElasticSearchProperties;
|
||||
import springfox.documentation.swagger2.annotations.EnableSwagger2;
|
||||
|
||||
@Configuration
|
||||
@EnableSwagger2
|
||||
@EnableCaching
|
||||
@EnableScheduling
|
||||
@EnableTransactionManagement
|
||||
@EnableElasticsearchRepositories(basePackageClasses = {
|
||||
Event.class, Notification.class
|
||||
})
|
||||
@ComponentScan(basePackages = "eu.dnetlib")
|
||||
public class LiteratureBrokerServiceConfiguration extends AbstractElasticsearchConfiguration {
|
||||
|
||||
@Autowired
|
||||
|
|
|
@ -4,16 +4,15 @@ import java.util.ArrayList;
|
|||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
import org.springframework.jdbc.core.JdbcTemplate;
|
||||
import org.springframework.web.bind.annotation.GetMapping;
|
||||
import org.springframework.web.bind.annotation.RequestMapping;
|
||||
import org.springframework.web.bind.annotation.RestController;
|
||||
|
||||
import eu.dnetlib.broker.common.elasticsearch.EventRepository;
|
||||
import eu.dnetlib.broker.common.elasticsearch.NotificationRepository;
|
||||
import eu.dnetlib.broker.common.stats.OpenaireDsStatRepository;
|
||||
import eu.dnetlib.broker.common.subscriptions.SubscriptionRepository;
|
||||
import eu.dnetlib.broker.common.topics.TopicTypeRepository;
|
||||
import eu.dnetlib.broker.controllers.objects.BufferStatus;
|
||||
|
@ -57,7 +56,7 @@ public class AjaxController extends AbstractDnetController {
|
|||
private ThreadManager threadManager;
|
||||
|
||||
@Autowired
|
||||
private JdbcTemplate jdbcTemplate;
|
||||
private OpenaireDsStatRepository openaireDsStatRepository;
|
||||
|
||||
@Value("${lbs.elastic.homepage}")
|
||||
private String elasticSearchUiUrl;
|
||||
|
@ -98,7 +97,7 @@ public class AjaxController extends AbstractDnetController {
|
|||
currentStatus.setThreads(threads);
|
||||
currentStatus.getTotals().put("topics", topicTypeRepo.count());
|
||||
currentStatus.getTotals().put("events_es", eventRepository.count());
|
||||
currentStatus.getTotals().put("events_db", countEventsInTheDb());
|
||||
currentStatus.getTotals().put("events_db", openaireDsStatRepository.totalEvents());
|
||||
currentStatus.getTotals().put("subscriptions", subscriptionRepo.count());
|
||||
currentStatus.getTotals().put("notifications_es", notificationRepository.count());
|
||||
|
||||
|
@ -106,15 +105,6 @@ public class AjaxController extends AbstractDnetController {
|
|||
}
|
||||
}
|
||||
|
||||
private Long countEventsInTheDb() {
|
||||
try {
|
||||
final String sql = IOUtils.toString(getClass().getResourceAsStream("/sql/totalEvents.sql"));
|
||||
return jdbcTemplate.queryForObject(sql, Long.class);
|
||||
} catch (final Exception e) {
|
||||
return 0l;
|
||||
}
|
||||
}
|
||||
|
||||
@GetMapping("/resetCounters")
|
||||
public CurrentStatus resetCounters() {
|
||||
dispatcherManager.getDispatchers().forEach(NotificationDispatcher::resetCount);
|
||||
|
|
|
@ -8,6 +8,6 @@ public class ApiDocController {
|
|||
|
||||
@GetMapping({ "/apidoc", "/api-doc", "/doc", "/swagger" })
|
||||
public String apiDoc() {
|
||||
return "redirect:swagger-ui/";
|
||||
return "redirect:swagger-ui/index.html";
|
||||
}
|
||||
}
|
||||
|
|
|
@ -31,12 +31,12 @@ import eu.dnetlib.broker.common.elasticsearch.EventStatsManager.BrowseEntry;
|
|||
import eu.dnetlib.broker.common.subscriptions.Subscription;
|
||||
import eu.dnetlib.broker.common.subscriptions.SubscriptionRepository;
|
||||
import eu.dnetlib.common.controller.AbstractDnetController;
|
||||
import io.swagger.annotations.Api;
|
||||
import io.swagger.annotations.ApiOperation;
|
||||
import io.swagger.v3.oas.annotations.Operation;
|
||||
import io.swagger.v3.oas.annotations.tags.Tag;
|
||||
|
||||
@RestController
|
||||
@RequestMapping("/api/events")
|
||||
@Api(tags = LiteratureBrokerServiceApplication.TAG_EVENTS)
|
||||
@Tag(name = LiteratureBrokerServiceApplication.TAG_EVENTS)
|
||||
public class EventsController extends AbstractDnetController {
|
||||
|
||||
private static final Log log = LogFactory.getLog(AbstractDnetController.class);
|
||||
|
@ -50,25 +50,25 @@ public class EventsController extends AbstractDnetController {
|
|||
@Autowired
|
||||
private EventStatsManager eventStatsManager;
|
||||
|
||||
@ApiOperation("Return an event by ID")
|
||||
@Operation(summary = "Return an event by ID")
|
||||
@GetMapping("/{id}")
|
||||
public Event getEvent(@PathVariable final String id) {
|
||||
return eventRepository.findById(id).get();
|
||||
}
|
||||
|
||||
@ApiOperation("Delete an event by ID")
|
||||
@Operation(summary = "Delete an event by ID")
|
||||
@DeleteMapping("/{id}")
|
||||
public void deleteEvent(@PathVariable final String id) {
|
||||
eventRepository.deleteById(id);
|
||||
}
|
||||
|
||||
@ApiOperation("Save an event by ID")
|
||||
@Operation(summary = "Save an event by ID")
|
||||
@PostMapping("/{id}")
|
||||
public Event saveEvent(@RequestBody final Event event) {
|
||||
return eventRepository.save(event);
|
||||
}
|
||||
|
||||
@ApiOperation("Return a page of events")
|
||||
@Operation(summary = "Return a page of events")
|
||||
@GetMapping("/list/{page}/{pageSize}")
|
||||
public List<Event> events(
|
||||
@PathVariable final int page,
|
||||
|
@ -76,7 +76,7 @@ public class EventsController extends AbstractDnetController {
|
|||
return Lists.newArrayList(eventRepository.findAll(PageRequest.of(page, pageSize)));
|
||||
}
|
||||
|
||||
@ApiOperation("Return a page of events by topic")
|
||||
@Operation(summary = "Return a page of events by topic")
|
||||
@GetMapping("/byTopic/{page}/{pageSize}")
|
||||
public List<Event> eventsByTopic(
|
||||
@PathVariable final int page,
|
||||
|
@ -85,7 +85,7 @@ public class EventsController extends AbstractDnetController {
|
|||
return Lists.newArrayList(eventRepository.findByTopic(topic, PageRequest.of(page, pageSize)));
|
||||
}
|
||||
|
||||
@ApiOperation("Delete all the events")
|
||||
@Operation(summary = "Delete all the events")
|
||||
@DeleteMapping("/all")
|
||||
public Map<String, Object> clearEvents() {
|
||||
eventRepository.deleteAll();
|
||||
|
@ -94,13 +94,13 @@ public class EventsController extends AbstractDnetController {
|
|||
return res;
|
||||
}
|
||||
|
||||
@ApiOperation("Delete the expired events")
|
||||
@Operation(summary = "Delete the expired events")
|
||||
@DeleteMapping("/expired")
|
||||
public Map<String, Object> deleteExpiredEvents() {
|
||||
return deleteEventsByExpiryDate(0, new Date().getTime());
|
||||
}
|
||||
|
||||
@ApiOperation("Delete the events with the creationDate in a range")
|
||||
@Operation(summary = "Delete the events with the creationDate in a range")
|
||||
@DeleteMapping("/byCreationDate/{from}/{to}")
|
||||
public Map<String, Long> deleteEventsByCreationDate(@PathVariable final long from, @PathVariable final long to) {
|
||||
final Map<String, Long> res = new HashMap<>();
|
||||
|
@ -113,7 +113,7 @@ public class EventsController extends AbstractDnetController {
|
|||
return res;
|
||||
}
|
||||
|
||||
@ApiOperation("Delete the events with the expiryDate in a range")
|
||||
@Operation(summary = "Delete the events with the expiryDate in a range")
|
||||
@DeleteMapping("/byExpiryDate/{from}/{to}")
|
||||
public Map<String, Object> deleteEventsByExpiryDate(@PathVariable final long from, @PathVariable final long to) {
|
||||
new Thread(() -> {
|
||||
|
@ -128,13 +128,13 @@ public class EventsController extends AbstractDnetController {
|
|||
return res;
|
||||
}
|
||||
|
||||
@ApiOperation("Return the topics of the indexed events (all)")
|
||||
@Operation(summary = "Return the topics of the indexed events (all)")
|
||||
@GetMapping("/topics/all")
|
||||
public List<BrowseEntry> browseTopics() {
|
||||
return eventStatsManager.browseTopics();
|
||||
}
|
||||
|
||||
@ApiOperation("Return the topics of the indexed events (only with subscriptions)")
|
||||
@Operation(summary = "Return the topics of the indexed events (only with subscriptions)")
|
||||
@GetMapping("/topics/withSubscriptions")
|
||||
public List<BrowseEntry> browseTopicsWithSubscriptions() {
|
||||
|
||||
|
|
|
@ -14,16 +14,15 @@ import org.springframework.web.bind.annotation.RequestBody;
|
|||
import org.springframework.web.bind.annotation.RequestMapping;
|
||||
import org.springframework.web.bind.annotation.RestController;
|
||||
|
||||
import eu.dnetlib.broker.LiteratureBrokerServiceApplication;
|
||||
import eu.dnetlib.broker.common.elasticsearch.Notification;
|
||||
import eu.dnetlib.broker.common.elasticsearch.NotificationRepository;
|
||||
import eu.dnetlib.common.controller.AbstractDnetController;
|
||||
import io.swagger.annotations.Api;
|
||||
import io.swagger.annotations.ApiOperation;
|
||||
import io.swagger.v3.oas.annotations.Operation;
|
||||
import io.swagger.v3.oas.annotations.tags.Tag;
|
||||
|
||||
@RestController
|
||||
@RequestMapping("/api/notifications")
|
||||
@Api(tags = LiteratureBrokerServiceApplication.TAG_NOTIFICATIONS)
|
||||
@Tag(name = "LiteratureBrokerServiceApplication.TAG_NOTIFICATIONS")
|
||||
public class NotificationsController extends AbstractDnetController {
|
||||
|
||||
private static final Log log = LogFactory.getLog(NotificationsController.class);
|
||||
|
@ -31,31 +30,31 @@ public class NotificationsController extends AbstractDnetController {
|
|||
@Autowired
|
||||
private NotificationRepository notificationRepository;
|
||||
|
||||
@ApiOperation("Return a notification by ID")
|
||||
@Operation(summary = "Return a notification by ID")
|
||||
@GetMapping("/{id}")
|
||||
public Notification getNotification(@PathVariable final String id) {
|
||||
return notificationRepository.findById(id).get();
|
||||
}
|
||||
|
||||
@ApiOperation("Delete a notification by ID")
|
||||
@Operation(summary = "Delete a notification by ID")
|
||||
@DeleteMapping("/{id}")
|
||||
public void deleteNotification(@PathVariable final String id) {
|
||||
notificationRepository.deleteById(id);
|
||||
}
|
||||
|
||||
@ApiOperation("Save a notification by ID")
|
||||
@Operation(summary = "Save a notification by ID")
|
||||
@PostMapping("/{id}")
|
||||
public Notification saveNotification(@RequestBody final Notification notification) {
|
||||
return notificationRepository.save(notification);
|
||||
}
|
||||
|
||||
@ApiOperation("Delete all notifications")
|
||||
@Operation(summary = "Delete all notifications")
|
||||
@DeleteMapping("")
|
||||
public void deleteAllNotifications() {
|
||||
notificationRepository.deleteAll();
|
||||
}
|
||||
|
||||
@ApiOperation("Delete the notifications with the date in a range")
|
||||
@Operation(summary = "Delete the notifications with the date in a range")
|
||||
@DeleteMapping("/byDate/{from}/{to}")
|
||||
public Map<String, Object> deleteNotificationsByDate(@PathVariable final long from, @PathVariable final long to) {
|
||||
new Thread(() -> {
|
||||
|
|
|
@ -16,13 +16,13 @@ import eu.dnetlib.broker.common.subscriptions.Subscription;
|
|||
import eu.dnetlib.broker.common.subscriptions.SubscriptionRepository;
|
||||
import eu.dnetlib.broker.matchers.SubscriptionEventMatcher;
|
||||
import eu.dnetlib.common.controller.AbstractDnetController;
|
||||
import io.swagger.annotations.Api;
|
||||
import io.swagger.annotations.ApiOperation;
|
||||
import io.swagger.v3.oas.annotations.Operation;
|
||||
import io.swagger.v3.oas.annotations.tags.Tag;
|
||||
|
||||
@Profile("!openaire")
|
||||
@RestController
|
||||
@RequestMapping("/api/matching")
|
||||
@Api(tags = LiteratureBrokerServiceApplication.TAG_MATCHING)
|
||||
@Tag(name = LiteratureBrokerServiceApplication.TAG_MATCHING)
|
||||
public class StartMatchingController extends AbstractDnetController {
|
||||
|
||||
@Autowired
|
||||
|
@ -31,7 +31,7 @@ public class StartMatchingController extends AbstractDnetController {
|
|||
@Autowired(required = false)
|
||||
private SubscriptionEventMatcher subscriptionEventMatcher;
|
||||
|
||||
@ApiOperation("Launch the thread that produces new notifications")
|
||||
@Operation(summary = "Launch the thread that produces new notifications")
|
||||
@GetMapping("/start")
|
||||
public List<String> startMatching() {
|
||||
if (subscriptionEventMatcher != null) {
|
||||
|
@ -42,7 +42,7 @@ public class StartMatchingController extends AbstractDnetController {
|
|||
}
|
||||
}
|
||||
|
||||
@ApiOperation("Launch the thread that produces new notifications by subscriptuion id")
|
||||
@Operation(summary = "Launch the thread that produces new notifications by subscriptuion id")
|
||||
@GetMapping("/start/{subscriptionId}")
|
||||
public List<String> startMatching(@PathVariable final String subscriptionId) {
|
||||
final Optional<Subscription> s = subscriptionRepo.findById(subscriptionId);
|
||||
|
|
|
@ -29,12 +29,12 @@ import eu.dnetlib.broker.common.subscriptions.NotificationMode;
|
|||
import eu.dnetlib.broker.common.subscriptions.Subscription;
|
||||
import eu.dnetlib.broker.common.subscriptions.SubscriptionRepository;
|
||||
import eu.dnetlib.common.controller.AbstractDnetController;
|
||||
import io.swagger.annotations.Api;
|
||||
import io.swagger.annotations.ApiOperation;
|
||||
import io.swagger.v3.oas.annotations.Operation;
|
||||
import io.swagger.v3.oas.annotations.tags.Tag;
|
||||
|
||||
@RestController
|
||||
@RequestMapping("/api/subscriptions")
|
||||
@Api(tags = LiteratureBrokerServiceApplication.TAG_SUBSCRIPTIONS)
|
||||
@Tag(name = LiteratureBrokerServiceApplication.TAG_SUBSCRIPTIONS)
|
||||
public class SubscriptionsController extends AbstractDnetController {
|
||||
|
||||
@Autowired
|
||||
|
@ -53,26 +53,26 @@ public class SubscriptionsController extends AbstractDnetController {
|
|||
}
|
||||
};
|
||||
|
||||
@ApiOperation("Return the list of subscriptions")
|
||||
@Operation(summary = "Return the list of subscriptions")
|
||||
@GetMapping("")
|
||||
public Iterable<Subscription> listSubscriptions() {
|
||||
return subscriptionRepo.findAll();
|
||||
}
|
||||
|
||||
@ApiOperation("Return a subscription by ID")
|
||||
@Operation(summary = "Return a subscription by ID")
|
||||
@GetMapping("/{id}")
|
||||
public Subscription getSubscription(@PathVariable final String id) {
|
||||
return subscriptionRepo.findById(id).get();
|
||||
}
|
||||
|
||||
@ApiOperation("Delete a subscription by ID and its notifications")
|
||||
@Operation(summary = "Delete a subscription by ID and its notifications")
|
||||
@DeleteMapping("/{id}")
|
||||
public void deleteSubscription(@PathVariable final String id) {
|
||||
subscriptionRepo.deleteById(id);
|
||||
notificationRepo.deleteBySubscriptionId(id);
|
||||
}
|
||||
|
||||
@ApiOperation("Perform a new subscription")
|
||||
@Operation(summary = "Perform a new subscription")
|
||||
@PostMapping("")
|
||||
public Subscription registerSubscription(@RequestBody final InSubscription inSub) {
|
||||
final Subscription sub = inSub.asSubscription();
|
||||
|
@ -80,7 +80,7 @@ public class SubscriptionsController extends AbstractDnetController {
|
|||
return sub;
|
||||
}
|
||||
|
||||
@ApiOperation("Delete all subscriptions and notifications")
|
||||
@Operation(summary = "Delete all subscriptions and notifications")
|
||||
@DeleteMapping("")
|
||||
public Map<String, Object> clearSubscriptions() {
|
||||
final Map<String, Object> res = new HashMap<>();
|
||||
|
@ -90,7 +90,7 @@ public class SubscriptionsController extends AbstractDnetController {
|
|||
return res;
|
||||
}
|
||||
|
||||
@ApiOperation("Reset the last notification date")
|
||||
@Operation(summary = "Reset the last notification date")
|
||||
@DeleteMapping("/{id}/date")
|
||||
public void deleteNotificationDate(@PathVariable final String id) {
|
||||
final Subscription s = subscriptionRepo.findById(id).get();
|
||||
|
@ -98,7 +98,7 @@ public class SubscriptionsController extends AbstractDnetController {
|
|||
subscriptionRepo.save(s);
|
||||
}
|
||||
|
||||
@ApiOperation("Reset all the last notification dates")
|
||||
@Operation(summary = "Reset all the last notification dates")
|
||||
@GetMapping("/resetLastNotificationDates")
|
||||
public void deleteAllNotificationDates() {
|
||||
for (final Subscription s : subscriptionRepo.findAll()) {
|
||||
|
|
|
@ -22,12 +22,12 @@ import eu.dnetlib.broker.LiteratureBrokerServiceApplication;
|
|||
import eu.dnetlib.broker.common.topics.TopicType;
|
||||
import eu.dnetlib.broker.common.topics.TopicTypeRepository;
|
||||
import eu.dnetlib.common.controller.AbstractDnetController;
|
||||
import io.swagger.annotations.Api;
|
||||
import io.swagger.annotations.ApiOperation;
|
||||
import io.swagger.v3.oas.annotations.Operation;
|
||||
import io.swagger.v3.oas.annotations.tags.Tag;
|
||||
|
||||
@RestController
|
||||
@RequestMapping("/api/topic-types")
|
||||
@Api(tags = LiteratureBrokerServiceApplication.TAG_TOPIC_TYPES)
|
||||
@Tag(name = LiteratureBrokerServiceApplication.TAG_TOPIC_TYPES)
|
||||
public class TopicsController extends AbstractDnetController {
|
||||
|
||||
@Autowired
|
||||
|
@ -36,13 +36,13 @@ public class TopicsController extends AbstractDnetController {
|
|||
private final Predicate<String> verifyExpression =
|
||||
Pattern.compile("^([a-zA-Z0-9._-]+|<[a-zA-Z0-9._-]+>)(\\/([a-zA-Z0-9._-]+|<[a-zA-Z0-9._-]+>))+$").asPredicate();
|
||||
|
||||
@ApiOperation("Return the list of topic types")
|
||||
@Operation(summary = "Return the list of topic types")
|
||||
@GetMapping("")
|
||||
public Iterable<TopicType> listTopicTypes() {
|
||||
return topicTypeRepo.findAll();
|
||||
}
|
||||
|
||||
@ApiOperation("Register a new topic type")
|
||||
@Operation(summary = "Register a new topic type")
|
||||
@PostMapping("/add")
|
||||
public TopicType registerTopicType(@RequestParam final String name,
|
||||
@RequestParam final String expression,
|
||||
|
@ -61,20 +61,20 @@ public class TopicsController extends AbstractDnetController {
|
|||
return type;
|
||||
}
|
||||
|
||||
@ApiOperation("Return a topic type by ID")
|
||||
@Operation(summary = "Return a topic type by ID")
|
||||
@GetMapping("/{id}")
|
||||
public TopicType getTopicType(@PathVariable final String id) {
|
||||
return topicTypeRepo.findById(id).get();
|
||||
}
|
||||
|
||||
@ApiOperation("Delete a topic type by ID")
|
||||
@Operation(summary = "Delete a topic type by ID")
|
||||
@DeleteMapping("/{id}")
|
||||
public List<String> deleteTopicType(@PathVariable final String id) {
|
||||
topicTypeRepo.deleteById(id);
|
||||
return Arrays.asList("Done.");
|
||||
}
|
||||
|
||||
@ApiOperation("Delete all topic types")
|
||||
@Operation(summary = "Delete all topic types")
|
||||
@DeleteMapping("")
|
||||
public Map<String, Object> clearTopicTypes() {
|
||||
final Map<String, Object> res = new HashMap<>();
|
||||
|
|
|
@ -56,13 +56,13 @@ import eu.dnetlib.broker.common.subscriptions.SubscriptionRepository;
|
|||
import eu.dnetlib.broker.events.output.DispatcherManager;
|
||||
import eu.dnetlib.broker.objects.OaBrokerEventPayload;
|
||||
import eu.dnetlib.common.controller.AbstractDnetController;
|
||||
import io.swagger.annotations.Api;
|
||||
import io.swagger.annotations.ApiOperation;
|
||||
import io.swagger.v3.oas.annotations.Operation;
|
||||
import io.swagger.v3.oas.annotations.tags.Tag;
|
||||
|
||||
@Profile("openaire")
|
||||
@RestController
|
||||
@RequestMapping("/api/openaireBroker")
|
||||
@Api(tags = LiteratureBrokerServiceApplication.TAG_OPENAIRE)
|
||||
@Tag(name = LiteratureBrokerServiceApplication.TAG_OPENAIRE)
|
||||
public class OpenaireBrokerController extends AbstractDnetController {
|
||||
|
||||
@Autowired
|
||||
|
@ -85,7 +85,7 @@ public class OpenaireBrokerController extends AbstractDnetController {
|
|||
|
||||
private static final Log log = LogFactory.getLog(OpenaireBrokerController.class);
|
||||
|
||||
@ApiOperation("Return the datasources having events")
|
||||
@Operation(summary = "Return the datasources having events")
|
||||
@GetMapping("/datasources")
|
||||
public List<BrowseEntry> findDatasourcesWithEvents(@RequestParam(defaultValue = "false", required = false) final boolean useIndex) {
|
||||
return useIndex ? findDatasourcesWithEventsUsingIndex() : findDatasourcesWithEventsUsingDb();
|
||||
|
@ -123,7 +123,7 @@ public class OpenaireBrokerController extends AbstractDnetController {
|
|||
}
|
||||
}
|
||||
|
||||
@ApiOperation("Return the topics of the events of a datasource")
|
||||
@Operation(summary = "Return the topics of the events of a datasource")
|
||||
@GetMapping("/topicsForDatasource")
|
||||
public List<BrowseEntry> findTopicsForDatasource(@RequestParam final String ds,
|
||||
@RequestParam(defaultValue = "false", required = false) final boolean useIndex) {
|
||||
|
@ -163,7 +163,7 @@ public class OpenaireBrokerController extends AbstractDnetController {
|
|||
}
|
||||
}
|
||||
|
||||
@ApiOperation("Return a page of events of a datasource (by topic)")
|
||||
@Operation(summary = "Return a page of events of a datasource (by topic)")
|
||||
@GetMapping("/events/{nPage}/{size}")
|
||||
public EventsPage showEvents(@RequestParam final String ds, @RequestParam final String topic, @PathVariable final int nPage, @PathVariable final int size) {
|
||||
|
||||
|
@ -191,7 +191,7 @@ public class OpenaireBrokerController extends AbstractDnetController {
|
|||
return new EventsPage(ds, topic, nPage, overrideGetTotalPage(page, size), page.getTotalHits(), list);
|
||||
}
|
||||
|
||||
@ApiOperation("Return a page of events of a datasource (by query)")
|
||||
@Operation(summary = "Return a page of events of a datasource (by query)")
|
||||
@PostMapping("/events/{nPage}/{size}")
|
||||
public EventsPage advancedShowEvents(@PathVariable final int nPage, @PathVariable final int size, @RequestBody final AdvQueryObject qObj) {
|
||||
|
||||
|
@ -227,7 +227,7 @@ public class OpenaireBrokerController extends AbstractDnetController {
|
|||
return new EventsPage(qObj.getDatasource(), qObj.getTopic(), nPage, overrideGetTotalPage(page, size), page.getTotalHits(), list);
|
||||
}
|
||||
|
||||
@ApiOperation("Perform a subscription")
|
||||
@Operation(summary = "Perform a subscription")
|
||||
@PostMapping("/subscribe")
|
||||
public Subscription registerSubscription(@RequestBody final OpenaireSubscription oSub) {
|
||||
final Subscription sub = oSub.asSubscription();
|
||||
|
@ -237,7 +237,7 @@ public class OpenaireBrokerController extends AbstractDnetController {
|
|||
return sub;
|
||||
}
|
||||
|
||||
@ApiOperation("Return the subscriptions of an user (by email and datasource (optional))")
|
||||
@Operation(summary = "Return the subscriptions of an user (by email and datasource (optional))")
|
||||
@GetMapping("/subscriptions")
|
||||
public Map<String, List<SimpleSubscriptionDesc>> subscriptions(@RequestParam final String email, @RequestParam(required = false) final String ds) {
|
||||
final Iterable<Subscription> iter = subscriptionRepo.findBySubscriber(email);
|
||||
|
@ -247,7 +247,7 @@ public class OpenaireBrokerController extends AbstractDnetController {
|
|||
.collect(Collectors.groupingBy(SimpleSubscriptionDesc::getDatasource));
|
||||
}
|
||||
|
||||
@ApiOperation("Return a page of notifications")
|
||||
@Operation(summary = "Return a page of notifications")
|
||||
@GetMapping("/notifications/{subscrId}/{nPage}/{size}")
|
||||
public EventsPage notifications(@PathVariable final String subscrId, @PathVariable final int nPage, @PathVariable final int size) {
|
||||
|
||||
|
@ -279,14 +279,14 @@ public class OpenaireBrokerController extends AbstractDnetController {
|
|||
|
||||
}
|
||||
|
||||
@ApiOperation("Send notifications")
|
||||
@Operation(summary = "Send notifications")
|
||||
@GetMapping("/notifications/send/{date}")
|
||||
private List<String> sendMailForNotifications(@PathVariable final long date) {
|
||||
new Thread(() -> innerSendMailForNotifications(date)).start();
|
||||
return Arrays.asList("Sending ...");
|
||||
}
|
||||
|
||||
@ApiOperation("Update stats")
|
||||
@Operation(summary = "Update stats")
|
||||
@GetMapping("/stats/update")
|
||||
private List<String> updateStats() {
|
||||
new Thread(() -> {
|
||||
|
|
|
@ -1,5 +1,8 @@
|
|||
spring.profiles.active = dev,openaire
|
||||
|
||||
server.public_url =
|
||||
server.public_desc = API Base URL
|
||||
|
||||
#logging.level.root=DEBUG
|
||||
|
||||
maven.pom.path = /META-INF/maven/eu.dnetlib.dhp/dhp-broker-application/effective-pom.xml
|
||||
|
@ -48,10 +51,9 @@ lbs.mail.smtpUser = smtp-dnet
|
|||
lbs.mail.smtpPassword = hhr*7932
|
||||
lbs.mail.message.template = classpath:/templates/dhp_openaire_mail.st
|
||||
lbs.mail.message.template.dashboard.baseUrl = https://provide.openaire.eu/content/notifications/
|
||||
lbs.mail.message.template.dashboard.helpdesk = https://www.openaire.eu/support/helpdesk
|
||||
lbs.mail.message.template.dashboard.helpdesk = https://www.openaire.eu/helpdesk
|
||||
lbs.mail.message.template.publicApi = https://api.openaire.eu/broker
|
||||
|
||||
|
||||
lbs.queues.maxReturnedValues = 1000
|
||||
|
||||
lbs.task.deleteOldEvents.cron = 0 0 4 1/1 * ?
|
||||
|
|
|
@ -1 +0,0 @@
|
|||
select sum(size) from oa_datasource_stats;
|
|
@ -27,7 +27,7 @@
|
|||
<a class="nav-link dropdown-toggle" href="javascript:void(0)" data-toggle="dropdown">Tools <span class="caret"></span></a>
|
||||
<div class="dropdown-menu dropdown-menu-right">
|
||||
<a class="dropdown-item" href="{{t.url}}" target="_blank" ng-repeat="t in tools">{{t.name}}</a>
|
||||
<a class="dropdown-item" href="/swagger-ui/" target="_blank">API documentation</a>
|
||||
<a class="dropdown-item" href="/apidoc" target="_blank">API documentation</a>
|
||||
</div>
|
||||
</li>
|
||||
</ul>
|
||||
|
|
|
@ -4,8 +4,8 @@
|
|||
<parent>
|
||||
<groupId>eu.dnetlib.dhp</groupId>
|
||||
<artifactId>apps</artifactId>
|
||||
<version>3.2.0</version>
|
||||
<relativePath>../</relativePath>
|
||||
<version>3.5.5-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
@ -22,10 +22,37 @@
|
|||
<artifactId>dnet-broker-apps-common</artifactId>
|
||||
<version>${project.version}</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.apache.hadoop</groupId>
|
||||
<artifactId>hadoop-client</artifactId>
|
||||
</dependency>
|
||||
|
||||
<!-- Tests -->
|
||||
<dependency>
|
||||
<groupId>org.junit.jupiter</groupId>
|
||||
<artifactId>junit-jupiter</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.mockito</groupId>
|
||||
<artifactId>mockito-core</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.mockito</groupId>
|
||||
<artifactId>mockito-junit-jupiter</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-starter-test</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
</dependencies>
|
||||
|
||||
<build>
|
||||
|
|
|
@ -2,7 +2,9 @@ package eu.dnetlib.broker;
|
|||
|
||||
import org.elasticsearch.client.RestHighLevelClient;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.cache.annotation.EnableCaching;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.ComponentScan;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.data.elasticsearch.client.ClientConfiguration;
|
||||
import org.springframework.data.elasticsearch.client.RestClients;
|
||||
|
@ -14,15 +16,15 @@ import org.springframework.transaction.annotation.EnableTransactionManagement;
|
|||
import eu.dnetlib.broker.common.elasticsearch.Event;
|
||||
import eu.dnetlib.broker.common.elasticsearch.Notification;
|
||||
import eu.dnetlib.broker.common.properties.ElasticSearchProperties;
|
||||
import springfox.documentation.swagger2.annotations.EnableSwagger2;
|
||||
|
||||
@Configuration
|
||||
@EnableSwagger2
|
||||
@EnableCaching
|
||||
@EnableScheduling
|
||||
@EnableTransactionManagement
|
||||
@EnableElasticsearchRepositories(basePackageClasses = {
|
||||
Event.class, Notification.class
|
||||
})
|
||||
@ComponentScan(basePackages = "eu.dnetlib")
|
||||
public class BrokerConfiguration extends AbstractElasticsearchConfiguration {
|
||||
|
||||
@Autowired
|
||||
|
|
|
@ -1,14 +1,15 @@
|
|||
package eu.dnetlib.broker;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
import org.springdoc.core.GroupedOpenApi;
|
||||
import org.springframework.boot.SpringApplication;
|
||||
import org.springframework.boot.autoconfigure.SpringBootApplication;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
|
||||
import eu.dnetlib.common.app.AbstractDnetApp;
|
||||
import springfox.documentation.builders.ApiInfoBuilder;
|
||||
import springfox.documentation.builders.RequestHandlerSelectors;
|
||||
import springfox.documentation.service.ApiInfo;
|
||||
import springfox.documentation.service.Tag;
|
||||
import springfox.documentation.spring.web.plugins.Docket;
|
||||
import io.swagger.v3.oas.models.tags.Tag;
|
||||
|
||||
@SpringBootApplication
|
||||
public class BrokerPublicApplication extends AbstractDnetApp {
|
||||
|
@ -19,22 +20,22 @@ public class BrokerPublicApplication extends AbstractDnetApp {
|
|||
SpringApplication.run(BrokerPublicApplication.class, args);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void configSwagger(final Docket docket) {
|
||||
|
||||
docket.select()
|
||||
.apis(RequestHandlerSelectors.any())
|
||||
.paths(p -> p.startsWith("/"))
|
||||
.build()
|
||||
.tags(new Tag(OA_PUBLIC_APIS, OA_PUBLIC_APIS))
|
||||
.apiInfo(new ApiInfoBuilder()
|
||||
.title("OpenAIRE Public Broker API")
|
||||
.description("APIs documentation")
|
||||
.version("1.1")
|
||||
.contact(ApiInfo.DEFAULT_CONTACT)
|
||||
.license("Apache 2.0")
|
||||
.licenseUrl("http://www.apache.org/licenses/LICENSE-2.0")
|
||||
.build());
|
||||
|
||||
@Bean
|
||||
public GroupedOpenApi publicApi() {
|
||||
return GroupedOpenApi.builder()
|
||||
.group("Broker Public APIs")
|
||||
.pathsToMatch("/**")
|
||||
.build();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String swaggerTitle() {
|
||||
return "OpenAIRE Public Broker API";
|
||||
}
|
||||
|
||||
@Override
|
||||
protected List<Tag> swaggerTags() {
|
||||
return Arrays.asList(new Tag().name(OA_PUBLIC_APIS).description(OA_PUBLIC_APIS));
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -8,6 +8,6 @@ public class ApiDocController {
|
|||
|
||||
@GetMapping({ "/apidoc", "/api-doc", "/doc", "/swagger" })
|
||||
public String apiDoc() {
|
||||
return "redirect:swagger-ui/";
|
||||
return "redirect:swagger-ui/index.html";
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2,6 +2,8 @@ package eu.dnetlib.broker.oa.controllers;
|
|||
|
||||
import java.io.Serializable;
|
||||
|
||||
import eu.dnetlib.broker.common.feedbacks.FeedbackStatus;
|
||||
|
||||
public class EventFeedback implements Serializable {
|
||||
|
||||
/**
|
||||
|
@ -10,27 +12,22 @@ public class EventFeedback implements Serializable {
|
|||
private static final long serialVersionUID = -6967719685282712195L;
|
||||
|
||||
private String eventId;
|
||||
private String status;
|
||||
|
||||
// TOOD status should be an enum having the following values:
|
||||
// * DISCARDED: the event was not processable by the system. OpenAIRE should not interpret such status in a negative or positive sense
|
||||
// with regard to the accuracy of the notification
|
||||
// * REJECTED: a human takes the decision to reject the suggestion as it was wrong
|
||||
// * ACCEPTED: a human takes the decision to apply the suggested enrichment to the local record
|
||||
private FeedbackStatus status;
|
||||
|
||||
protected String getEventId() {
|
||||
public String getEventId() {
|
||||
return eventId;
|
||||
}
|
||||
|
||||
protected void setEventId(final String eventId) {
|
||||
public void setEventId(final String eventId) {
|
||||
this.eventId = eventId;
|
||||
}
|
||||
|
||||
protected String getStatus() {
|
||||
public FeedbackStatus getStatus() {
|
||||
return status;
|
||||
}
|
||||
|
||||
protected void setStatus(final String status) {
|
||||
public void setStatus(final FeedbackStatus status) {
|
||||
this.status = status;
|
||||
}
|
||||
|
||||
|
|
|
@ -4,6 +4,7 @@ import java.io.BufferedReader;
|
|||
import java.io.FileNotFoundException;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStreamReader;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashMap;
|
||||
|
@ -56,18 +57,21 @@ import eu.dnetlib.broker.api.ShortEventMessage;
|
|||
import eu.dnetlib.broker.common.elasticsearch.EventRepository;
|
||||
import eu.dnetlib.broker.common.elasticsearch.Notification;
|
||||
import eu.dnetlib.broker.common.elasticsearch.NotificationRepository;
|
||||
import eu.dnetlib.broker.common.feedbacks.DbEventFeedback;
|
||||
import eu.dnetlib.broker.common.feedbacks.DbEventFeedbackRepository;
|
||||
import eu.dnetlib.broker.common.properties.ElasticSearchProperties;
|
||||
import eu.dnetlib.broker.common.stats.OpenaireDsStatRepository;
|
||||
import eu.dnetlib.broker.common.subscriptions.Subscription;
|
||||
import eu.dnetlib.broker.common.subscriptions.SubscriptionRepository;
|
||||
import eu.dnetlib.broker.objects.OaBrokerEventPayload;
|
||||
import eu.dnetlib.common.controller.AbstractDnetController;
|
||||
import io.swagger.annotations.Api;
|
||||
import io.swagger.annotations.ApiOperation;
|
||||
import io.swagger.v3.oas.annotations.Operation;
|
||||
import io.swagger.v3.oas.annotations.tags.Tag;
|
||||
|
||||
@Profile("openaire")
|
||||
@RestController
|
||||
@RequestMapping("/")
|
||||
@Api(tags = BrokerPublicApplication.OA_PUBLIC_APIS)
|
||||
@Tag(name = BrokerPublicApplication.OA_PUBLIC_APIS)
|
||||
public class OpenairePublicController extends AbstractDnetController {
|
||||
|
||||
@Autowired
|
||||
|
@ -82,6 +86,12 @@ public class OpenairePublicController extends AbstractDnetController {
|
|||
@Autowired
|
||||
private SubscriptionRepository subscriptionRepo;
|
||||
|
||||
@Autowired
|
||||
private OpenaireDsStatRepository openaireDsStatRepository;
|
||||
|
||||
@Autowired
|
||||
private DbEventFeedbackRepository feedbackRepository;
|
||||
|
||||
@Autowired
|
||||
private ElasticSearchProperties props;
|
||||
|
||||
|
@ -92,57 +102,52 @@ public class OpenairePublicController extends AbstractDnetController {
|
|||
|
||||
private static final Log log = LogFactory.getLog(OpenairePublicController.class);
|
||||
|
||||
@ApiOperation("Returns notifications by subscription using scrolls (first page)")
|
||||
@Operation(summary = "Returns notifications by subscription using scrolls (first page)")
|
||||
@GetMapping("/scroll/notifications/bySubscriptionId/{subscrId}")
|
||||
public ScrollPage<ShortEventMessage> prepareScrollNotificationsBySubscrId(@PathVariable final String subscrId) {
|
||||
|
||||
final Optional<Subscription> optSub = subscriptionRepo.findById(subscrId);
|
||||
|
||||
if (optSub.isPresent()) {
|
||||
if (!optSub.isPresent()) {
|
||||
log.warn("Invalid subscription: " + subscrId);
|
||||
return new ScrollPage<>(null, true, new ArrayList<>());
|
||||
}
|
||||
final ElasticsearchRestTemplate esTemplate = (ElasticsearchRestTemplate) esOperations;
|
||||
|
||||
final ElasticsearchRestTemplate esTemplate = (ElasticsearchRestTemplate) esOperations;
|
||||
|
||||
final NativeSearchQuery searchQuery = new NativeSearchQueryBuilder()
|
||||
final NativeSearchQuery searchQuery = new NativeSearchQueryBuilder()
|
||||
.withQuery(QueryBuilders.termQuery("subscriptionId.keyword", subscrId))
|
||||
.withSearchType(SearchType.DEFAULT)
|
||||
.withFields("topic", "payload", "eventId")
|
||||
.withPageable(PageRequest.of(0, 100))
|
||||
.build();
|
||||
|
||||
final SearchScrollHits<Notification> scroll =
|
||||
final SearchScrollHits<Notification> scroll =
|
||||
esTemplate.searchScrollStart(SCROLL_TIMEOUT_IN_MILLIS, searchQuery, Notification.class, IndexCoordinates.of(props.getNotificationsIndexName()));
|
||||
if (scroll.hasSearchHits()) {
|
||||
final List<ShortEventMessage> values = calculateNotificationMessages(scroll);
|
||||
return new ScrollPage<>(scroll.getScrollId(), values.isEmpty() || scroll.getScrollId() == null, values);
|
||||
} else {
|
||||
esTemplate.searchScrollClear(Arrays.asList(scroll.getScrollId()));
|
||||
return new ScrollPage<>(null, true, new ArrayList<>());
|
||||
}
|
||||
|
||||
} else {
|
||||
log.warn("Invalid subscription: " + subscrId);
|
||||
return new ScrollPage<>(null, true, new ArrayList<>());
|
||||
if (scroll.hasSearchHits()) {
|
||||
final List<ShortEventMessage> values = calculateNotificationMessages(scroll);
|
||||
return new ScrollPage<>(scroll.getScrollId(), values.isEmpty() || scroll.getScrollId() == null, values);
|
||||
}
|
||||
esTemplate.searchScrollClear(Arrays.asList(scroll.getScrollId()));
|
||||
return new ScrollPage<>(null, true, new ArrayList<>());
|
||||
}
|
||||
|
||||
@ApiOperation("Returns notifications using scrolls (other pages)")
|
||||
@Operation(summary = "Returns notifications using scrolls (other pages)")
|
||||
@GetMapping("/scroll/notifications/{scrollId}")
|
||||
public ScrollPage<ShortEventMessage> scrollNotifications(@PathVariable final String scrollId) {
|
||||
|
||||
final ElasticsearchRestTemplate esTemplate = (ElasticsearchRestTemplate) esOperations;
|
||||
|
||||
final SearchScrollHits<Notification> scroll =
|
||||
esTemplate.searchScrollContinue(scrollId, SCROLL_TIMEOUT_IN_MILLIS, Notification.class, IndexCoordinates.of(props.getNotificationsIndexName()));
|
||||
esTemplate.searchScrollContinue(scrollId, SCROLL_TIMEOUT_IN_MILLIS, Notification.class, IndexCoordinates.of(props.getNotificationsIndexName()));
|
||||
if (scroll.hasSearchHits()) {
|
||||
final List<ShortEventMessage> values = calculateNotificationMessages(scroll);
|
||||
return new ScrollPage<>(scroll.getScrollId(), values.isEmpty() || scroll.getScrollId() == null, values);
|
||||
} else {
|
||||
esTemplate.searchScrollClear(Arrays.asList(scroll.getScrollId()));
|
||||
return new ScrollPage<>(null, true, new ArrayList<>());
|
||||
}
|
||||
esTemplate.searchScrollClear(Arrays.asList(scroll.getScrollId()));
|
||||
return new ScrollPage<>(null, true, new ArrayList<>());
|
||||
}
|
||||
|
||||
@ApiOperation("Returns notifications as file")
|
||||
@Operation(summary = "Returns notifications as file")
|
||||
@GetMapping(value = "/file/notifications/bySubscriptionId/{subscrId}", produces = "application/gzip")
|
||||
public void notificationsAsFile(final HttpServletResponse res, @PathVariable final String subscrId) throws Exception {
|
||||
|
||||
|
@ -154,7 +159,7 @@ public class OpenairePublicController extends AbstractDnetController {
|
|||
|
||||
boolean first = true;
|
||||
|
||||
IOUtils.write("[\n", gzOut);
|
||||
IOUtils.write("[\n", gzOut, StandardCharsets.UTF_8);
|
||||
|
||||
ScrollPage<ShortEventMessage> page = null;
|
||||
|
||||
|
@ -165,13 +170,13 @@ public class OpenairePublicController extends AbstractDnetController {
|
|||
if (first) {
|
||||
first = false;
|
||||
} else {
|
||||
IOUtils.write(",\n", gzOut);
|
||||
IOUtils.write(",\n", gzOut, StandardCharsets.UTF_8);
|
||||
}
|
||||
IOUtils.write(gson.toJson(msg), gzOut);
|
||||
IOUtils.write(gson.toJson(msg), gzOut, StandardCharsets.UTF_8);
|
||||
}
|
||||
} while (!page.isCompleted());
|
||||
|
||||
IOUtils.write("\n]\n", gzOut);
|
||||
IOUtils.write("\n]\n", gzOut, StandardCharsets.UTF_8);
|
||||
|
||||
gzOut.flush();
|
||||
|
||||
|
@ -179,7 +184,7 @@ public class OpenairePublicController extends AbstractDnetController {
|
|||
|
||||
}
|
||||
|
||||
@ApiOperation("Returns events as file by opendoarId")
|
||||
@Operation(summary = "Returns events as file by opendoarId")
|
||||
@GetMapping(value = "/file/events/opendoar/{id}", produces = "application/gzip")
|
||||
public void opendoarEventsAsFile(final HttpServletResponse res, @PathVariable final String id) {
|
||||
|
||||
|
@ -192,11 +197,11 @@ public class OpenairePublicController extends AbstractDnetController {
|
|||
final Path pathDir = new Path(opendoarEventsPath + "/" + DigestUtils.md5Hex(id));
|
||||
|
||||
try (final FileSystem fs = FileSystem.get(conf);
|
||||
final ServletOutputStream out = res.getOutputStream();
|
||||
final GZIPOutputStream gzOut = new GZIPOutputStream(out)) {
|
||||
final ServletOutputStream out = res.getOutputStream();
|
||||
final GZIPOutputStream gzOut = new GZIPOutputStream(out)) {
|
||||
boolean first = true;
|
||||
|
||||
IOUtils.write("[\n", gzOut);
|
||||
IOUtils.write("[\n", gzOut, StandardCharsets.UTF_8);
|
||||
|
||||
try {
|
||||
for (final FileStatus fileStatus : fs.listStatus(pathDir)) {
|
||||
|
@ -204,16 +209,16 @@ public class OpenairePublicController extends AbstractDnetController {
|
|||
final Path path = fileStatus.getPath();
|
||||
if (path.getName().endsWith(".json")) {
|
||||
try (final FSDataInputStream fis = fs.open(path);
|
||||
final InputStreamReader isr = new InputStreamReader(fis);
|
||||
final BufferedReader br = new BufferedReader(isr)) {
|
||||
final InputStreamReader isr = new InputStreamReader(fis);
|
||||
final BufferedReader br = new BufferedReader(isr)) {
|
||||
|
||||
first = processLine(gzOut, first, br);
|
||||
}
|
||||
} else if (path.getName().endsWith(".json.gz")) {
|
||||
try (final FSDataInputStream fis = fs.open(path);
|
||||
final GZIPInputStream gzIn = new GZIPInputStream(fis);
|
||||
final InputStreamReader isr = new InputStreamReader(gzIn);
|
||||
final BufferedReader br = new BufferedReader(isr)) {
|
||||
final GZIPInputStream gzIn = new GZIPInputStream(fis);
|
||||
final InputStreamReader isr = new InputStreamReader(gzIn);
|
||||
final BufferedReader br = new BufferedReader(isr)) {
|
||||
|
||||
first = processLine(gzOut, first, br);
|
||||
}
|
||||
|
@ -223,7 +228,7 @@ public class OpenairePublicController extends AbstractDnetController {
|
|||
} catch (final FileNotFoundException e) {
|
||||
log.warn("File not found - " + e.getMessage());
|
||||
}
|
||||
IOUtils.write("\n]\n", gzOut);
|
||||
IOUtils.write("\n]\n", gzOut, StandardCharsets.UTF_8);
|
||||
gzOut.flush();
|
||||
} catch (final Throwable e) {
|
||||
log.error("Error accessing hdfs file", e);
|
||||
|
@ -237,52 +242,59 @@ public class OpenairePublicController extends AbstractDnetController {
|
|||
if (first) {
|
||||
first = false;
|
||||
} else {
|
||||
IOUtils.write(",\n", gzOut);
|
||||
IOUtils.write(",\n", gzOut, StandardCharsets.UTF_8);
|
||||
}
|
||||
|
||||
IOUtils.write(line, gzOut);
|
||||
IOUtils.write(line, gzOut, StandardCharsets.UTF_8);
|
||||
|
||||
line = br.readLine();
|
||||
}
|
||||
return first;
|
||||
}
|
||||
|
||||
@ApiOperation("Returns the list of subscriptions by user email")
|
||||
@Operation(summary = "Returns the list of subscriptions by user email")
|
||||
@GetMapping(value = "/subscriptions")
|
||||
private Iterable<Subscription> listSubscriptionsByUser(@RequestParam final String email) {
|
||||
return subscriptionRepo.findBySubscriber(email);
|
||||
}
|
||||
|
||||
@ApiOperation("Returns the status of the application")
|
||||
@Operation(summary = "Returns the status of the application")
|
||||
@GetMapping(value = "/status")
|
||||
private Map<String, Long> status() {
|
||||
final Map<String, Long> res = new LinkedHashMap<>();
|
||||
res.put("n_subscriptions", subscriptionRepo.count());
|
||||
res.put("n_events", eventRepository.count());
|
||||
res.put("n_events_es", eventRepository.count());
|
||||
res.put("n_events_db", openaireDsStatRepository.totalEvents());
|
||||
res.put("n_notifications", notificationRepository.count());
|
||||
return res;
|
||||
}
|
||||
|
||||
@ApiOperation("Store the feedback of an event (MOCK)")
|
||||
@Operation(summary = "Store the feedback of an event (MOCK)")
|
||||
@RequestMapping(value = "/feedback/events", method = {
|
||||
RequestMethod.POST, RequestMethod.PATCH
|
||||
RequestMethod.POST, RequestMethod.PATCH
|
||||
})
|
||||
private Map<String, String> feedbackEvent(@RequestBody final EventFeedback feedback) {
|
||||
// TOOD
|
||||
final Map<String, String> res = new HashMap<>();
|
||||
private Map<String, Object> feedbackEvent(@RequestBody final EventFeedback feedback) {
|
||||
|
||||
final DbEventFeedback dbEntry = new DbEventFeedback();
|
||||
dbEntry.setEventId(feedback.getEventId());
|
||||
dbEntry.setStatus(feedback.getStatus());
|
||||
|
||||
feedbackRepository.save(dbEntry);
|
||||
|
||||
final Map<String, Object> res = new HashMap<>();
|
||||
res.put("status", "done");
|
||||
res.put("feedback", dbEntry);
|
||||
return res;
|
||||
}
|
||||
|
||||
private List<ShortEventMessage> calculateNotificationMessages(final SearchScrollHits<Notification> scroll) {
|
||||
if (scroll.getSearchHits().size() > 0) {
|
||||
return scroll.stream()
|
||||
.map(SearchHit::getContent)
|
||||
.map(this::messageFromNotification)
|
||||
.collect(Collectors.toList());
|
||||
} else {
|
||||
return new ArrayList<>();
|
||||
.map(SearchHit::getContent)
|
||||
.map(this::messageFromNotification)
|
||||
.collect(Collectors.toList());
|
||||
}
|
||||
return new ArrayList<>();
|
||||
}
|
||||
|
||||
private ShortEventMessage messageFromNotification(final Notification n) {
|
||||
|
|
|
@ -1,5 +1,8 @@
|
|||
spring.profiles.active = dev,openaire
|
||||
|
||||
server.public_url =
|
||||
server.public_desc = API Base URL
|
||||
|
||||
#logging.level.root=DEBUG
|
||||
|
||||
maven.pom.path = /META-INF/maven/eu.dnetlib.dhp/dhp-broker-public-application/effective-pom.xml
|
||||
|
|
|
@ -2,6 +2,6 @@
|
|||
<html>
|
||||
<head>
|
||||
<title>OpenAIRE Broker Public API</title>
|
||||
<meta http-equiv="refresh" content="2; url = ./swagger" />
|
||||
<meta http-equiv="refresh" content="2; url = ./apidoc" />
|
||||
</head>
|
||||
</html>
|
||||
|
|
|
@ -1,3 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
ssh -vNL 5432:10.19.65.40:5432 michele.artini@iis-cdh5-test-gw.ocean.icm.edu.pl
|
|
@ -4,8 +4,8 @@
|
|||
<parent>
|
||||
<groupId>eu.dnetlib.dhp</groupId>
|
||||
<artifactId>apps</artifactId>
|
||||
<version>3.2.0</version>
|
||||
<relativePath>../</relativePath>
|
||||
<version>3.5.5-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
@ -60,12 +60,31 @@
|
|||
<artifactId>dhp-schemas</artifactId>
|
||||
</dependency>
|
||||
|
||||
<!-- JUnit -->
|
||||
<!-- Tests -->
|
||||
<dependency>
|
||||
<groupId>junit</groupId>
|
||||
<artifactId>junit</artifactId>
|
||||
<groupId>org.junit.jupiter</groupId>
|
||||
<artifactId>junit-jupiter</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.mockito</groupId>
|
||||
<artifactId>mockito-core</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.mockito</groupId>
|
||||
<artifactId>mockito-junit-jupiter</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-starter-test</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
</dependencies>
|
||||
|
||||
|
||||
|
|
|
@ -1,51 +1,36 @@
|
|||
package eu.dnetlib.data.mdstore.manager;
|
||||
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
import org.springdoc.core.GroupedOpenApi;
|
||||
import org.springframework.boot.SpringApplication;
|
||||
import org.springframework.boot.autoconfigure.SpringBootApplication;
|
||||
import org.springframework.boot.autoconfigure.domain.EntityScan;
|
||||
import org.springframework.cache.annotation.EnableCaching;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.scheduling.annotation.EnableScheduling;
|
||||
|
||||
import eu.dnetlib.common.app.AbstractDnetApp;
|
||||
import springfox.documentation.builders.ApiInfoBuilder;
|
||||
import springfox.documentation.builders.RequestHandlerSelectors;
|
||||
import springfox.documentation.service.ApiInfo;
|
||||
import springfox.documentation.spring.web.plugins.Docket;
|
||||
import springfox.documentation.swagger2.annotations.EnableSwagger2;
|
||||
|
||||
@SpringBootApplication
|
||||
@EnableSwagger2
|
||||
@EnableCaching
|
||||
@EnableScheduling
|
||||
@EntityScan("eu.dnetlib.dhp.schema.mdstore")
|
||||
public class MainApplication extends AbstractDnetApp {
|
||||
|
||||
@Value("${dhp.swagger.api.host}")
|
||||
private String swaggetHost;
|
||||
|
||||
@Value("${dhp.swagger.api.basePath}")
|
||||
private String swaggerPath;
|
||||
|
||||
public static void main(final String[] args) {
|
||||
SpringApplication.run(MainApplication.class, args);
|
||||
}
|
||||
|
||||
@Bean
|
||||
public GroupedOpenApi publicApi() {
|
||||
return GroupedOpenApi.builder()
|
||||
.group("MDStore APIs")
|
||||
.pathsToMatch("/mdstores/**")
|
||||
.build();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void configSwagger(final Docket docket) {
|
||||
docket
|
||||
.host(swaggetHost)
|
||||
.pathMapping(swaggerPath)
|
||||
.select()
|
||||
.apis(RequestHandlerSelectors.any())
|
||||
.paths(p -> p.startsWith("/mdstores"))
|
||||
.build()
|
||||
.apiInfo(new ApiInfoBuilder()
|
||||
.title("MDStore Manager APIs")
|
||||
.description("APIs documentation")
|
||||
.version("1.1")
|
||||
.contact(ApiInfo.DEFAULT_CONTACT)
|
||||
.license("Apache 2.0")
|
||||
.licenseUrl("http://www.apache.org/licenses/LICENSE-2.0")
|
||||
.build());
|
||||
protected String swaggerTitle() {
|
||||
return "MDStore Manager APIs";
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -18,20 +18,18 @@ import org.springframework.web.bind.annotation.RestController;
|
|||
import com.google.common.collect.Sets;
|
||||
|
||||
import eu.dnetlib.common.controller.AbstractDnetController;
|
||||
import eu.dnetlib.dhp.schema.mdstore.MDStoreVersion;
|
||||
import eu.dnetlib.dhp.schema.mdstore.MDStoreWithInfo;
|
||||
import eu.dnetlib.data.mdstore.manager.exceptions.MDStoreManagerException;
|
||||
import eu.dnetlib.data.mdstore.manager.utils.DatabaseUtils;
|
||||
import eu.dnetlib.data.mdstore.manager.utils.HdfsClient;
|
||||
import io.swagger.annotations.Api;
|
||||
import io.swagger.annotations.ApiOperation;
|
||||
import io.swagger.annotations.ApiParam;
|
||||
import eu.dnetlib.dhp.schema.mdstore.MDStoreVersion;
|
||||
import eu.dnetlib.dhp.schema.mdstore.MDStoreWithInfo;
|
||||
import io.swagger.v3.oas.annotations.Operation;
|
||||
import io.swagger.v3.oas.annotations.Parameter;
|
||||
import io.swagger.v3.oas.annotations.tags.Tag;
|
||||
|
||||
@RestController
|
||||
@RequestMapping("/mdstores")
|
||||
@Api(tags = {
|
||||
"Metadata Stores"
|
||||
})
|
||||
@Tag(name = "Metadata Stores")
|
||||
public class MDStoreController extends AbstractDnetController {
|
||||
|
||||
@Autowired
|
||||
|
@ -42,67 +40,67 @@ public class MDStoreController extends AbstractDnetController {
|
|||
|
||||
private static final Logger log = LoggerFactory.getLogger(DatabaseUtils.class);
|
||||
|
||||
@ApiOperation("Return all the mdstores")
|
||||
@Operation(summary = "Return all the mdstores")
|
||||
@GetMapping("/")
|
||||
public Iterable<MDStoreWithInfo> find() {
|
||||
return databaseUtils.listMdStores();
|
||||
}
|
||||
|
||||
@ApiOperation("Return all the mdstore identifiers")
|
||||
@Operation(summary = "Return all the mdstore identifiers")
|
||||
@GetMapping("/ids")
|
||||
public List<String> findIdentifiers() {
|
||||
return databaseUtils.listMdStoreIDs();
|
||||
}
|
||||
|
||||
@ApiOperation("Return a mdstores by id")
|
||||
@Operation(summary = "Return a mdstores by id")
|
||||
@GetMapping("/mdstore/{mdId}")
|
||||
public MDStoreWithInfo getMdStore(@ApiParam("the mdstore identifier") @PathVariable final String mdId) throws MDStoreManagerException {
|
||||
public MDStoreWithInfo getMdStore(@Parameter(name = "the mdstore identifier") @PathVariable final String mdId) throws MDStoreManagerException {
|
||||
return databaseUtils.findMdStore(mdId);
|
||||
}
|
||||
|
||||
@ApiOperation("Increase the read count of the current mdstore")
|
||||
@Operation(summary = "Increase the read count of the current mdstore")
|
||||
@GetMapping("/mdstore/{mdId}/startReading")
|
||||
public MDStoreVersion startReading(@ApiParam("the mdstore identifier") @PathVariable final String mdId) throws MDStoreManagerException {
|
||||
public MDStoreVersion startReading(@Parameter(name = "the mdstore identifier") @PathVariable final String mdId) throws MDStoreManagerException {
|
||||
return databaseUtils.startReading(mdId);
|
||||
}
|
||||
|
||||
@ApiOperation("Create a new mdstore")
|
||||
@Operation(summary = "Create a new mdstore")
|
||||
@GetMapping("/new/{format}/{layout}/{interpretation}")
|
||||
public MDStoreWithInfo createMDStore(
|
||||
@ApiParam("mdstore format") @PathVariable final String format,
|
||||
@ApiParam("mdstore layout") @PathVariable final String layout,
|
||||
@ApiParam("mdstore interpretation") @PathVariable final String interpretation,
|
||||
@ApiParam("datasource name") @RequestParam(required = false) final String dsName,
|
||||
@ApiParam("datasource id") @RequestParam(required = false) final String dsId,
|
||||
@ApiParam("api id") @RequestParam(required = false) final String apiId) throws MDStoreManagerException {
|
||||
@Parameter(name = "mdstore format") @PathVariable final String format,
|
||||
@Parameter(name = "mdstore layout") @PathVariable final String layout,
|
||||
@Parameter(name = "mdstore interpretation") @PathVariable final String interpretation,
|
||||
@Parameter(name = "datasource name") @RequestParam(required = true) final String dsName,
|
||||
@Parameter(name = "datasource id") @RequestParam(required = true) final String dsId,
|
||||
@Parameter(name = "api id") @RequestParam(required = true) final String apiId) throws MDStoreManagerException {
|
||||
final String id = databaseUtils.createMDStore(format, layout, interpretation, dsName, dsId, apiId);
|
||||
return databaseUtils.findMdStore(id);
|
||||
}
|
||||
|
||||
@ApiOperation("Delete a mdstore by id")
|
||||
@Operation(summary = "Delete a mdstore by id")
|
||||
@DeleteMapping("/mdstore/{mdId}")
|
||||
public StatusResponse delete(@ApiParam("the id of the mdstore that will be deleted") @PathVariable final String mdId) throws MDStoreManagerException {
|
||||
public StatusResponse delete(@Parameter(name = "the id of the mdstore that will be deleted") @PathVariable final String mdId) throws MDStoreManagerException {
|
||||
final String hdfsPath = databaseUtils.deleteMdStore(mdId);
|
||||
hdfsClient.deletePath(hdfsPath);
|
||||
return StatusResponse.DELETED;
|
||||
}
|
||||
|
||||
@ApiOperation("Return all the versions of a mdstore")
|
||||
@Operation(summary = "Return all the versions of a mdstore")
|
||||
@GetMapping("/mdstore/{mdId}/versions")
|
||||
public Iterable<MDStoreVersion> listVersions(@PathVariable final String mdId) throws MDStoreManagerException {
|
||||
return databaseUtils.listVersions(mdId);
|
||||
}
|
||||
|
||||
@ApiOperation("Create a new preliminary version of a mdstore")
|
||||
@Operation(summary = "Create a new preliminary version of a mdstore")
|
||||
@GetMapping("/mdstore/{mdId}/newVersion")
|
||||
public MDStoreVersion prepareNewVersion(@ApiParam("the id of the mdstore for which will be created a new version") @PathVariable final String mdId) {
|
||||
public MDStoreVersion prepareNewVersion(@Parameter(name = "the id of the mdstore for which will be created a new version") @PathVariable final String mdId) {
|
||||
return databaseUtils.prepareMdStoreVersion(mdId);
|
||||
}
|
||||
|
||||
@ApiOperation("Promote a preliminary version to current")
|
||||
@Operation(summary = "Promote a preliminary version to current")
|
||||
@GetMapping("/version/{versionId}/commit/{size}")
|
||||
public MDStoreVersion commitVersion(@ApiParam("the id of the version that will be promoted to the current version") @PathVariable final String versionId,
|
||||
@ApiParam("the size of the new current mdstore") @PathVariable final long size) throws MDStoreManagerException {
|
||||
public MDStoreVersion commitVersion(@Parameter(name = "the id of the version that will be promoted to the current version") @PathVariable final String versionId,
|
||||
@Parameter(name = "the size of the new current mdstore") @PathVariable final long size) throws MDStoreManagerException {
|
||||
try {
|
||||
return databaseUtils.commitMdStoreVersion(versionId, size);
|
||||
} finally {
|
||||
|
@ -110,46 +108,46 @@ public class MDStoreController extends AbstractDnetController {
|
|||
}
|
||||
}
|
||||
|
||||
@ApiOperation("Abort a preliminary version")
|
||||
@Operation(summary = "Abort a preliminary version")
|
||||
@GetMapping("/version/{versionId}/abort")
|
||||
public StatusResponse commitVersion(@ApiParam("the id of the version to abort") @PathVariable final String versionId) throws MDStoreManagerException {
|
||||
public StatusResponse commitVersion(@Parameter(name = "the id of the version to abort") @PathVariable final String versionId) throws MDStoreManagerException {
|
||||
final String hdfsPath = databaseUtils.deleteMdStoreVersion(versionId, true);
|
||||
hdfsClient.deletePath(hdfsPath);
|
||||
return StatusResponse.ABORTED;
|
||||
}
|
||||
|
||||
@ApiOperation("Return an existing mdstore version")
|
||||
@Operation(summary = "Return an existing mdstore version")
|
||||
@GetMapping("/version/{versionId}")
|
||||
public MDStoreVersion getVersion(@ApiParam("the id of the version that has to be deleted") @PathVariable final String versionId)
|
||||
public MDStoreVersion getVersion(@Parameter(name = "the id of the version that has to be deleted") @PathVariable final String versionId)
|
||||
throws MDStoreManagerException {
|
||||
return databaseUtils.findVersion(versionId);
|
||||
}
|
||||
|
||||
@ApiOperation("Delete a mdstore version")
|
||||
@Operation(summary = "Delete a mdstore version")
|
||||
@DeleteMapping("/version/{versionId}")
|
||||
public StatusResponse deleteVersion(@ApiParam("the id of the version that has to be deleted") @PathVariable final String versionId,
|
||||
@ApiParam("if true, the controls on writing and readcount values will be skipped") @RequestParam(required = false, defaultValue = "false") final boolean force)
|
||||
public StatusResponse deleteVersion(@Parameter(name = "the id of the version that has to be deleted") @PathVariable final String versionId,
|
||||
@Parameter(name = "if true, the controls on writing and readcount values will be skipped") @RequestParam(required = false, defaultValue = "false") final boolean force)
|
||||
throws MDStoreManagerException {
|
||||
final String hdfsPath = databaseUtils.deleteMdStoreVersion(versionId, force);
|
||||
hdfsClient.deletePath(hdfsPath);
|
||||
return StatusResponse.DELETED;
|
||||
}
|
||||
|
||||
@ApiOperation("Decrease the read count of a mdstore version")
|
||||
@Operation(summary = "Decrease the read count of a mdstore version")
|
||||
@GetMapping("/version/{versionId}/endReading")
|
||||
public MDStoreVersion endReading(@ApiParam("the id of the version that has been completely read") @PathVariable final String versionId)
|
||||
public MDStoreVersion endReading(@Parameter(name = "the id of the version that has been completely read") @PathVariable final String versionId)
|
||||
throws MDStoreManagerException {
|
||||
return databaseUtils.endReading(versionId);
|
||||
}
|
||||
|
||||
@ApiOperation("Reset the read count of a mdstore version")
|
||||
@Operation(summary = "Reset the read count of a mdstore version")
|
||||
@GetMapping("/version/{versionId}/resetReading")
|
||||
public MDStoreVersion resetReading(@ApiParam("the id of the version") @PathVariable final String versionId)
|
||||
public MDStoreVersion resetReading(@Parameter(name = "the id of the version") @PathVariable final String versionId)
|
||||
throws MDStoreManagerException {
|
||||
return databaseUtils.resetReading(versionId);
|
||||
}
|
||||
|
||||
@ApiOperation("Delete expired versions")
|
||||
@Operation(summary = "Delete expired versions")
|
||||
@DeleteMapping("/versions/expired")
|
||||
public StatusResponse deleteExpiredVersions() {
|
||||
new Thread(this::performDeleteOfExpiredVersions).start();
|
||||
|
@ -169,10 +167,10 @@ public class MDStoreController extends AbstractDnetController {
|
|||
log.info("Done.");
|
||||
}
|
||||
|
||||
@ApiOperation("Fix the inconsistencies on HDFS")
|
||||
@Operation(summary = "Fix the inconsistencies on HDFS")
|
||||
@GetMapping("/hdfs/inconsistencies")
|
||||
public Set<String> fixHdfsInconsistencies(
|
||||
@ApiParam("force the deletion of hdfs paths") @RequestParam(required = false, defaultValue = "false") final boolean delete)
|
||||
@Parameter(name = "force the deletion of hdfs paths") @RequestParam(required = false, defaultValue = "false") final boolean delete)
|
||||
throws MDStoreManagerException {
|
||||
|
||||
final Set<String> hdfsDirs = hdfsClient.listHadoopDirs();
|
||||
|
@ -189,7 +187,7 @@ public class MDStoreController extends AbstractDnetController {
|
|||
return toDelete;
|
||||
}
|
||||
|
||||
@ApiOperation("Show informations")
|
||||
@Operation(summary = "Show informations")
|
||||
@GetMapping("/info")
|
||||
public Map<String, Object> info() {
|
||||
final Map<String, Object> info = new LinkedHashMap<>();
|
||||
|
@ -201,21 +199,21 @@ public class MDStoreController extends AbstractDnetController {
|
|||
return info;
|
||||
}
|
||||
|
||||
@ApiOperation("list the file inside the path of a mdstore version")
|
||||
@Operation(summary = "list the file inside the path of a mdstore version")
|
||||
@GetMapping("/version/{versionId}/parquet/files")
|
||||
public Set<String> listVersionFiles(@PathVariable final String versionId) throws MDStoreManagerException {
|
||||
final String path = databaseUtils.findVersion(versionId).getHdfsPath();
|
||||
return hdfsClient.listContent(path + "/store", HdfsClient::isParquetFile);
|
||||
}
|
||||
|
||||
@ApiOperation("read the parquet file of a mdstore version")
|
||||
@Operation(summary = "read the parquet file of a mdstore version")
|
||||
@GetMapping("/version/{versionId}/parquet/content/{limit}")
|
||||
public List<Map<String, String>> listVersionParquet(@PathVariable final String versionId, @PathVariable final long limit) throws MDStoreManagerException {
|
||||
final String path = databaseUtils.findVersion(versionId).getHdfsPath();
|
||||
return hdfsClient.readParquetFiles(path + "/store", limit);
|
||||
}
|
||||
|
||||
@ApiOperation("read the parquet file of a mdstore (current version)")
|
||||
@Operation(summary = "read the parquet file of a mdstore (current version)")
|
||||
@GetMapping("/mdstore/{mdId}/parquet/content/{limit}")
|
||||
public List<Map<String, String>> listMdstoreParquet(@PathVariable final String mdId, @PathVariable final long limit) throws MDStoreManagerException {
|
||||
final String versionId = databaseUtils.findMdStore(mdId).getCurrentVersion();
|
||||
|
|
|
@ -11,6 +11,6 @@ public class SwaggerController {
|
|||
"/apidoc", "/api-doc", "/doc", "/swagger"
|
||||
}, method = RequestMethod.GET)
|
||||
public String apiDoc() {
|
||||
return "redirect:swagger-ui/";
|
||||
return "redirect:swagger-ui/index.html";
|
||||
}
|
||||
}
|
||||
|
|
|
@ -13,9 +13,9 @@ import eu.dnetlib.data.mdstore.manager.exceptions.MDStoreManagerException;
|
|||
import eu.dnetlib.data.mdstore.manager.utils.ControllerUtils;
|
||||
import eu.dnetlib.data.mdstore.manager.utils.DatabaseUtils;
|
||||
import eu.dnetlib.data.mdstore.manager.utils.ZeppelinClient;
|
||||
import eu.dnetlib.dhp.schema.mdstore.MDStoreWithInfo;
|
||||
|
||||
@Controller
|
||||
@RequestMapping("/zeppelin")
|
||||
public class ZeppelinController {
|
||||
|
||||
@Autowired
|
||||
|
@ -24,11 +24,12 @@ public class ZeppelinController {
|
|||
@Autowired
|
||||
private DatabaseUtils databaseUtils;
|
||||
|
||||
@RequestMapping("/{mdId}/{note}")
|
||||
@RequestMapping("/zeppelin/{mdId}/{note}")
|
||||
public String goToZeppelin(@PathVariable final String mdId, final @PathVariable String note) throws MDStoreManagerException {
|
||||
final String currentVersion = databaseUtils.findMdStore(mdId).getCurrentVersion();
|
||||
final MDStoreWithInfo mdstore = databaseUtils.findMdStore(mdId);
|
||||
final String currentVersion = mdstore.getCurrentVersion();
|
||||
final String path = databaseUtils.findVersion(currentVersion).getHdfsPath() + "/store";
|
||||
return "redirect:" + zeppelinClient.zeppelinNote(note, mdId, currentVersion, path);
|
||||
return "redirect:" + zeppelinClient.zeppelinNote(note, mdstore, path);
|
||||
}
|
||||
|
||||
@ExceptionHandler(Exception.class)
|
||||
|
|
|
@ -0,0 +1,29 @@
|
|||
package eu.dnetlib.data.mdstore.manager.controller;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.web.bind.annotation.GetMapping;
|
||||
import org.springframework.web.bind.annotation.RestController;
|
||||
|
||||
import eu.dnetlib.common.controller.AbstractDnetController;
|
||||
import eu.dnetlib.data.mdstore.manager.exceptions.MDStoreManagerException;
|
||||
import eu.dnetlib.data.mdstore.manager.utils.ZeppelinClient;
|
||||
|
||||
@RestController
|
||||
public class ZeppelinRestController extends AbstractDnetController {
|
||||
|
||||
@Autowired
|
||||
private ZeppelinClient zeppelinClient;
|
||||
|
||||
@GetMapping("/zeppelin/templates")
|
||||
public List<String> getTemplates() throws MDStoreManagerException {
|
||||
try {
|
||||
// if (zeppelinClient.get)
|
||||
return zeppelinClient.listTemplates();
|
||||
} catch (final Throwable e) {
|
||||
throw new MDStoreManagerException("Zeppelin is unreachable", e);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -16,15 +16,15 @@ import org.springframework.beans.factory.annotation.Value;
|
|||
import org.springframework.jdbc.core.JdbcTemplate;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import eu.dnetlib.dhp.schema.mdstore.MDStore;
|
||||
import eu.dnetlib.dhp.schema.mdstore.MDStoreCurrentVersion;
|
||||
import eu.dnetlib.dhp.schema.mdstore.MDStoreVersion;
|
||||
import eu.dnetlib.dhp.schema.mdstore.MDStoreWithInfo;
|
||||
import eu.dnetlib.data.mdstore.manager.exceptions.MDStoreManagerException;
|
||||
import eu.dnetlib.data.mdstore.manager.repository.MDStoreCurrentVersionRepository;
|
||||
import eu.dnetlib.data.mdstore.manager.repository.MDStoreRepository;
|
||||
import eu.dnetlib.data.mdstore.manager.repository.MDStoreVersionRepository;
|
||||
import eu.dnetlib.data.mdstore.manager.repository.MDStoreWithInfoRepository;
|
||||
import eu.dnetlib.dhp.schema.mdstore.MDStore;
|
||||
import eu.dnetlib.dhp.schema.mdstore.MDStoreCurrentVersion;
|
||||
import eu.dnetlib.dhp.schema.mdstore.MDStoreVersion;
|
||||
import eu.dnetlib.dhp.schema.mdstore.MDStoreWithInfo;
|
||||
|
||||
@Service
|
||||
public class DatabaseUtils {
|
||||
|
|
|
@ -1,34 +1,46 @@
|
|||
package eu.dnetlib.data.mdstore.manager.utils;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashMap;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import javax.annotation.PostConstruct;
|
||||
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.commons.lang.StringEscapeUtils;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
import org.springframework.http.HttpEntity;
|
||||
import org.springframework.http.HttpHeaders;
|
||||
import org.springframework.http.HttpMethod;
|
||||
import org.springframework.http.HttpStatus;
|
||||
import org.springframework.http.MediaType;
|
||||
import org.springframework.http.ResponseEntity;
|
||||
import org.springframework.scheduling.annotation.Scheduled;
|
||||
import org.springframework.stereotype.Component;
|
||||
import org.springframework.util.LinkedMultiValueMap;
|
||||
import org.springframework.util.MultiValueMap;
|
||||
import org.springframework.web.client.RestTemplate;
|
||||
|
||||
import eu.dnetlib.data.mdstore.manager.exceptions.MDStoreManagerException;
|
||||
import eu.dnetlib.data.mdstore.manager.utils.zeppelin.HasStatus;
|
||||
import eu.dnetlib.data.mdstore.manager.utils.zeppelin.ListResponse;
|
||||
import eu.dnetlib.data.mdstore.manager.utils.zeppelin.Note;
|
||||
import eu.dnetlib.data.mdstore.manager.utils.zeppelin.Paragraph;
|
||||
import eu.dnetlib.data.mdstore.manager.utils.zeppelin.SimpleResponse;
|
||||
import eu.dnetlib.data.mdstore.manager.utils.zeppelin.StringResponse;
|
||||
import eu.dnetlib.dhp.schema.mdstore.MDStoreWithInfo;
|
||||
|
||||
@Component
|
||||
public class ZeppelinClient {
|
||||
|
@ -47,46 +59,126 @@ public class ZeppelinClient {
|
|||
|
||||
private static final Log log = LogFactory.getLog(ZeppelinClient.class);
|
||||
|
||||
public String zeppelinNote(final String note, final String mdId, final String currentVersion, final String currentVersionPath)
|
||||
throws MDStoreManagerException {
|
||||
final String jsessionid = obtainJsessionID();
|
||||
private static final Map<String, List<String>> DEFAULT_RIGHTS = new LinkedHashMap<>();
|
||||
|
||||
final String newName = zeppelinNamePrefix + "/notes/" + note + "/" + currentVersion;
|
||||
private static final Integer MAX_NUMBER_OF_MD_NOTES = 2;
|
||||
|
||||
final Optional<String> oldNoteId = listNotes(jsessionid).stream()
|
||||
@PostConstruct
|
||||
public void init() {
|
||||
DEFAULT_RIGHTS.put("owners", Arrays.asList(zeppelinLogin));
|
||||
DEFAULT_RIGHTS.put("readers", new ArrayList<>()); // ALL
|
||||
DEFAULT_RIGHTS.put("runners", new ArrayList<>()); // ALL
|
||||
DEFAULT_RIGHTS.put("writers", new ArrayList<>()); // ALL
|
||||
}
|
||||
|
||||
private String jsessionid;
|
||||
|
||||
public String zeppelinNote(final String note, final MDStoreWithInfo mdstore, final String currentVersionPath) throws MDStoreManagerException {
|
||||
|
||||
if (notConfigured()) { throw new MDStoreManagerException("A zeppelin property is empty"); }
|
||||
|
||||
final String newName =
|
||||
StringUtils.join(Arrays.asList(zeppelinNamePrefix, "notes", mdstore.getDatasourceName().replaceAll("/", "-"), mdstore.getApiId()
|
||||
.replaceAll("/", "-"), note.replaceAll("/", "-"), mdstore.getCurrentVersion().replaceAll("/", "-")), "/");
|
||||
|
||||
final List<Map<String, String>> notes = listNotes();
|
||||
|
||||
final Optional<String> oldNoteId = notes.stream()
|
||||
.filter(Objects::nonNull)
|
||||
.filter(map -> newName.equals(map.get("name")))
|
||||
.map(map -> map.get("id"))
|
||||
.findFirst();
|
||||
|
||||
if (oldNoteId.isPresent()) {
|
||||
log.info("Returning existing note: " + oldNoteId.get());
|
||||
log.debug("Returning existing note: " + oldNoteId.get());
|
||||
return zeppelinBaseUrl + "/#/notebook/" + oldNoteId.get();
|
||||
}
|
||||
|
||||
final String templateNoteId = findTemplateNoteId(note, jsessionid);
|
||||
final String templateName = zeppelinNamePrefix + "/templates/" + note;
|
||||
final String templateNoteId = notes.stream()
|
||||
.filter(map -> map.get("name").equals(templateName))
|
||||
.map(map -> map.get("id"))
|
||||
.findFirst()
|
||||
.orElseThrow(() -> new MDStoreManagerException("Template Note not found: " + templateName));
|
||||
|
||||
final String newId = cloneNote(templateNoteId, newName, jsessionid);
|
||||
|
||||
log.info("New note created, id: " + newId + ", name: " + newName);
|
||||
|
||||
addParagraph(newId, confParagraph(mdId, currentVersion, currentVersionPath), jsessionid);
|
||||
|
||||
reassignRights(newId, jsessionid);
|
||||
final String newId = cloneNote(templateNoteId, newName, mdstore, currentVersionPath);
|
||||
|
||||
return zeppelinBaseUrl + "/#/notebook/" + newId;
|
||||
|
||||
}
|
||||
|
||||
// TODO: prepare the cron job
|
||||
public void cleanExpiredNotes() {
|
||||
try {
|
||||
final String jsessionid = obtainJsessionID();
|
||||
public List<String> listTemplates() {
|
||||
final String prefix = zeppelinNamePrefix + "/templates/";
|
||||
|
||||
for (final Map<String, String> n : listNotes(jsessionid)) {
|
||||
final String id = n.get("id");
|
||||
if (n.get("name").startsWith(zeppelinNamePrefix + "/notes/") && isExpired(id, jsessionid)) {
|
||||
deleteNote(id, jsessionid);
|
||||
if (notConfigured()) {
|
||||
return new ArrayList<>();
|
||||
} else {
|
||||
return listNotes().stream()
|
||||
.map(map -> map.get("name"))
|
||||
.filter(s -> s.startsWith(prefix))
|
||||
.map(s -> StringUtils.substringAfter(s, prefix))
|
||||
.sorted()
|
||||
.collect(Collectors.toList());
|
||||
}
|
||||
}
|
||||
|
||||
private List<Map<String, String>> listNotes() {
|
||||
return callApi(HttpMethod.GET, "notebook", ListResponse.class, null).getBody();
|
||||
}
|
||||
|
||||
private String cloneNote(final String noteId, final String newName, final MDStoreWithInfo mdstore, final String currentVersionPath)
|
||||
throws MDStoreManagerException {
|
||||
final String newId = callApi(HttpMethod.POST, "notebook/" + noteId, StringResponse.class, new Note(newName)).getBody();
|
||||
callApi(HttpMethod.POST, "notebook/" + newId + "/paragraph", StringResponse.class, confParagraph(mdstore, currentVersionPath)).getBody();
|
||||
callApi(HttpMethod.PUT, "notebook/" + newId + "/permissions", SimpleResponse.class, DEFAULT_RIGHTS);
|
||||
|
||||
log.info("New note created, id: " + newId + ", name: " + newName);
|
||||
|
||||
return newId;
|
||||
|
||||
}
|
||||
|
||||
private Paragraph confParagraph(final MDStoreWithInfo mdstore, final String currentVersionPath) throws MDStoreManagerException {
|
||||
try {
|
||||
final String code = IOUtils.toString(getClass().getResourceAsStream("/zeppelin/paragraph_conf.tmpl"), StandardCharsets.UTF_8)
|
||||
.replaceAll("__DS_NAME__", StringEscapeUtils.escapeJava(mdstore.getDatasourceName()))
|
||||
.replaceAll("__DS_ID__", StringEscapeUtils.escapeJava(mdstore.getDatasourceId()))
|
||||
.replaceAll("__API_ID__", StringEscapeUtils.escapeJava(mdstore.getApiId()))
|
||||
.replaceAll("__MDSTORE_ID__", mdstore.getId())
|
||||
.replaceAll("__VERSION__", mdstore.getCurrentVersion())
|
||||
.replaceAll("__PATH__", currentVersionPath);
|
||||
return new Paragraph("Configuration", code, 0);
|
||||
} catch (final IOException e) {
|
||||
log.error("Error preparing configuration paragraph", e);
|
||||
throw new MDStoreManagerException("Error preparing configuration paragraph", e);
|
||||
}
|
||||
}
|
||||
|
||||
@Scheduled(fixedRate = 12 * 60 * 60 * 1000) // 12 hours
|
||||
public void cleanExpiredNotes() {
|
||||
if (notConfigured()) { return; }
|
||||
|
||||
try {
|
||||
// I sort the notes according to the version datestamp (more recent first)
|
||||
final List<Map<String, String>> notes = listNotes()
|
||||
.stream()
|
||||
.filter(n -> n.get("name").startsWith(zeppelinNamePrefix + "/notes/"))
|
||||
.sorted((o1, o2) -> StringUtils.compare(o2.get("name"), o1.get("name")))
|
||||
.collect(Collectors.toList());
|
||||
|
||||
final Map<String, Integer> map = new HashMap<>();
|
||||
for (final Map<String, String> n : notes) {
|
||||
|
||||
final String firstPart = StringUtils.substringBeforeLast(n.get("name"), "-");
|
||||
if (!map.containsKey(firstPart)) {
|
||||
log.debug("Evaluating note " + n.get("name") + " for deletion: CONFIRMED");
|
||||
map.put(firstPart, 1);
|
||||
} else if (map.get(firstPart) < MAX_NUMBER_OF_MD_NOTES) {
|
||||
log.debug("Evaluating note " + n.get("name") + " for deletion: CONFIRMED");
|
||||
map.put(firstPart, map.get(firstPart) + 1);
|
||||
} else {
|
||||
log.debug("Evaluating note " + n.get("name") + " for deletion: TO_DELETE");
|
||||
callApi(HttpMethod.DELETE, "notebook/" + n.get("id"), SimpleResponse.class, null);
|
||||
}
|
||||
}
|
||||
} catch (final Exception e) {
|
||||
|
@ -94,7 +186,90 @@ public class ZeppelinClient {
|
|||
}
|
||||
}
|
||||
|
||||
private String obtainJsessionID() throws MDStoreManagerException {
|
||||
private <T extends HasStatus> T callApi(final HttpMethod method, final String api, final Class<T> resClazz, final Object objRequest) {
|
||||
|
||||
if (jsessionid == null) {
|
||||
final T res = findNewJsessionId(method, api, resClazz, objRequest);
|
||||
if (res != null) { return res; }
|
||||
} else {
|
||||
try {
|
||||
return callApi(method, api, resClazz, objRequest, jsessionid);
|
||||
} catch (final MDStoreManagerException e) {
|
||||
final T res = findNewJsessionId(method, api, resClazz, objRequest);
|
||||
if (res != null) { return res; }
|
||||
}
|
||||
}
|
||||
throw new RuntimeException("All attempted calls are failed");
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private <T extends HasStatus> T callApi(final HttpMethod method,
|
||||
final String api,
|
||||
final Class<T> resClazz,
|
||||
final Object objRequest,
|
||||
final String jsessionid)
|
||||
throws MDStoreManagerException {
|
||||
final String url = String.format("%s/api/%s;JSESSIONID=%s", zeppelinBaseUrl, api, jsessionid);
|
||||
|
||||
final RestTemplate restTemplate = new RestTemplate();
|
||||
|
||||
ResponseEntity<T> res = null;
|
||||
|
||||
switch (method) {
|
||||
case GET:
|
||||
log.debug("Performing GET: " + url);
|
||||
res = restTemplate.getForEntity(url, resClazz);
|
||||
break;
|
||||
case POST:
|
||||
log.debug("Performing POST: " + url);
|
||||
res = restTemplate.postForEntity(url, objRequest, resClazz);
|
||||
break;
|
||||
case PUT:
|
||||
log.debug("Performing PUT: " + url);
|
||||
restTemplate.put(url, objRequest);
|
||||
break;
|
||||
case DELETE:
|
||||
log.debug("Performing DELETE: " + url);
|
||||
restTemplate.delete(url);
|
||||
break;
|
||||
default:
|
||||
throw new RuntimeException("Unsupported method: " + method);
|
||||
}
|
||||
|
||||
if (method == HttpMethod.PUT || method == HttpMethod.DELETE) {
|
||||
return (T) new SimpleResponse("OK");
|
||||
} else if (res == null) {
|
||||
log.error("NULL response from the API");
|
||||
throw new MDStoreManagerException("NULL response from the API");
|
||||
} else if (res.getStatusCode() != HttpStatus.OK) {
|
||||
log.error("Zeppelin API failed with HTTP error: " + res);
|
||||
throw new MDStoreManagerException("Zeppelin API failed with HTTP error: " + res);
|
||||
} else if (res.getBody() == null) {
|
||||
log.error("Zeppelin API returned a null response");
|
||||
throw new MDStoreManagerException("Zeppelin API returned a null response");
|
||||
} else if (!res.getBody().getStatus().equals("OK")) {
|
||||
log.error("Zeppelin API Operation failed: " + res.getBody());
|
||||
throw new MDStoreManagerException("Registration of zeppelin note failed: " + res.getBody());
|
||||
} else {
|
||||
return res.getBody();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private <T extends HasStatus> T findNewJsessionId(final HttpMethod method, final String api, final Class<T> resClazz, final Object objRequest) {
|
||||
for (final String id : obtainJsessionIDs()) {
|
||||
try {
|
||||
final T res = callApi(method, api, resClazz, objRequest, id);
|
||||
setJsessionid(id);
|
||||
return res;
|
||||
} catch (final MDStoreManagerException e) {
|
||||
log.warn("Skipping invalid jsessionid: " + id);
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
private Set<String> obtainJsessionIDs() {
|
||||
|
||||
final HttpHeaders headers = new HttpHeaders();
|
||||
headers.setContentType(MediaType.APPLICATION_FORM_URLENCODED);
|
||||
|
@ -109,10 +284,10 @@ public class ZeppelinClient {
|
|||
|
||||
if (res.getStatusCode() != HttpStatus.OK) {
|
||||
log.error("Zeppelin API: login failed with HTTP error: " + res);
|
||||
throw new MDStoreManagerException("Zeppelin API: login failed with HTTP error: " + res);
|
||||
throw new RuntimeException("Zeppelin API: login failed with HTTP error: " + res);
|
||||
} else if (!res.getHeaders().containsKey(HttpHeaders.SET_COOKIE)) {
|
||||
log.error("Zeppelin API: login failed (missing SET_COOKIE header)");
|
||||
throw new MDStoreManagerException("Zeppelin API: login failed (missing SET_COOKIE header)");
|
||||
throw new RuntimeException("Zeppelin API: login failed (missing SET_COOKIE header)");
|
||||
} else {
|
||||
return res.getHeaders()
|
||||
.get(HttpHeaders.SET_COOKIE)
|
||||
|
@ -123,138 +298,20 @@ public class ZeppelinClient {
|
|||
.filter(s -> s.startsWith("JSESSIONID="))
|
||||
.map(s -> StringUtils.removeStart(s, "JSESSIONID="))
|
||||
.filter(s -> !s.equalsIgnoreCase("deleteMe"))
|
||||
.distinct()
|
||||
.filter(this::testConnection)
|
||||
.findFirst()
|
||||
.orElseThrow(() -> new MDStoreManagerException("Zeppelin API: login failed (invalid jsessionid)"));
|
||||
.collect(Collectors.toSet());
|
||||
}
|
||||
}
|
||||
|
||||
private boolean testConnection(final String jsessionid) {
|
||||
|
||||
final String url = zeppelinBaseUrl + "/api/notebook;JSESSIONID=" + jsessionid;
|
||||
log.info("Performing GET: " + url);
|
||||
|
||||
final ResponseEntity<ListResponse> res = new RestTemplate().getForEntity(url, ListResponse.class);
|
||||
|
||||
if (res.getStatusCode() != HttpStatus.OK) {
|
||||
return false;
|
||||
} else if (res.getBody() == null) {
|
||||
return false;
|
||||
} else if (!res.getBody().getStatus().equals("OK")) {
|
||||
return false;
|
||||
} else {
|
||||
log.info("Connected to zeppelin: " + res.getBody());
|
||||
log.info("Found JSESSIONID: " + jsessionid);
|
||||
return true;
|
||||
}
|
||||
public String getJsessionid() {
|
||||
return jsessionid;
|
||||
}
|
||||
|
||||
private List<Map<String, String>> listNotes(final String jsessionid) throws MDStoreManagerException {
|
||||
final String url = zeppelinBaseUrl + "/api/notebook;JSESSIONID=" + jsessionid;
|
||||
log.info("Performing GET: " + url);
|
||||
|
||||
final ResponseEntity<ListResponse> res = new RestTemplate().getForEntity(url, ListResponse.class);
|
||||
|
||||
if (res.getStatusCode() != HttpStatus.OK) {
|
||||
log.error("Zeppelin API failed with HTTP error: " + res);
|
||||
throw new MDStoreManagerException("Zeppelin API failed with HTTP error: " + res);
|
||||
} else if (res.getBody() == null) {
|
||||
log.error("Zeppelin API returned a null response");
|
||||
throw new MDStoreManagerException("Zeppelin API returned a null response");
|
||||
} else if (!res.getBody().getStatus().equals("OK")) {
|
||||
log.error("Registration of zeppelin note failed: " + res.getBody());
|
||||
throw new MDStoreManagerException("Registration of zeppelin note failed: " + res.getBody());
|
||||
} else {
|
||||
return res.getBody().getBody();
|
||||
}
|
||||
public void setJsessionid(final String jsessionid) {
|
||||
this.jsessionid = jsessionid;
|
||||
}
|
||||
|
||||
private String findTemplateNoteId(final String noteTemplate, final String jsessionid) throws MDStoreManagerException {
|
||||
final String templateName = zeppelinNamePrefix + "/templates/" + noteTemplate;
|
||||
|
||||
return listNotes(jsessionid).stream()
|
||||
.filter(map -> map.get("name").equals(templateName))
|
||||
.map(map -> map.get("id"))
|
||||
.findFirst()
|
||||
.orElseThrow(() -> new MDStoreManagerException("Template Note not found: " + templateName));
|
||||
}
|
||||
|
||||
private String cloneNote(final String noteId, final String newName, final String jsessionid) throws MDStoreManagerException {
|
||||
final String url = zeppelinBaseUrl + "/api/notebook/" + noteId + ";JSESSIONID=" + jsessionid;
|
||||
log.debug("Performing POST: " + url);
|
||||
|
||||
final ResponseEntity<StringResponse> res = new RestTemplate().postForEntity(url, new Note(newName), StringResponse.class);
|
||||
|
||||
if (res.getStatusCode() != HttpStatus.OK) {
|
||||
log.error("Zeppelin API failed with HTTP error: " + res);
|
||||
throw new MDStoreManagerException("Zeppelin API failed with HTTP error: " + res);
|
||||
} else if (res.getBody() == null) {
|
||||
log.error("Zeppelin API returned a null response");
|
||||
throw new MDStoreManagerException("Zeppelin API returned a null response");
|
||||
} else if (!res.getBody().getStatus().equals("OK")) {
|
||||
log.error("Registration of zeppelin note failed: " + res.getBody());
|
||||
throw new MDStoreManagerException("Registration of zeppelin note failed: " + res.getBody());
|
||||
} else {
|
||||
return res.getBody().getBody();
|
||||
}
|
||||
}
|
||||
|
||||
private Paragraph confParagraph(final String mdId, final String currentVersion, final String currentVersionPath) throws MDStoreManagerException {
|
||||
try {
|
||||
final String code = IOUtils.toString(getClass().getResourceAsStream("/zeppelin/conf.tmpl.py"))
|
||||
.replaceAll("__MDSTORE_ID__", mdId)
|
||||
.replaceAll("__VERSION__", currentVersion)
|
||||
.replaceAll("__PATH__", currentVersionPath);
|
||||
return new Paragraph("Configuration", code, 0);
|
||||
} catch (final IOException e) {
|
||||
log.error("Error preparing configuration paragraph", e);
|
||||
throw new MDStoreManagerException("Error preparing configuration paragraph", e);
|
||||
}
|
||||
}
|
||||
|
||||
private String addParagraph(final String noteId, final Paragraph paragraph, final String jsessionid) throws MDStoreManagerException {
|
||||
final String url = zeppelinBaseUrl + "/api/notebook/" + noteId + "/paragraph;JSESSIONID=" + jsessionid;
|
||||
log.debug("Performing POST: " + url);
|
||||
|
||||
final ResponseEntity<StringResponse> res = new RestTemplate().postForEntity(url, paragraph, StringResponse.class);
|
||||
|
||||
if (res.getStatusCode() != HttpStatus.OK) {
|
||||
log.error("Zeppelin API failed with HTTP error: " + res);
|
||||
throw new MDStoreManagerException("Zeppelin API failed with HTTP error: " + res);
|
||||
} else if (res.getBody() == null) {
|
||||
log.error("Zeppelin API returned a null response");
|
||||
throw new MDStoreManagerException("Zeppelin API returned a null response");
|
||||
} else if (!res.getBody().getStatus().equals("OK")) {
|
||||
log.error("Registration of zeppelin note failed: " + res.getBody());
|
||||
throw new MDStoreManagerException("Registration of zeppelin note failed: " + res.getBody());
|
||||
} else {
|
||||
return res.getBody().getBody();
|
||||
}
|
||||
}
|
||||
|
||||
private void reassignRights(final String noteId, final String jsessionid) {
|
||||
final String url = zeppelinBaseUrl + "/api/notebook/" + noteId + "/permissions;JSESSIONID=" + jsessionid;
|
||||
log.info("Performing PUT: " + url);
|
||||
|
||||
final Map<String, List<String>> rights = new LinkedHashMap<>();
|
||||
rights.put("owners", Arrays.asList(zeppelinLogin));
|
||||
rights.put("readers", new ArrayList<>()); // ALL
|
||||
rights.put("runners", new ArrayList<>()); // ALL
|
||||
rights.put("writers", new ArrayList<>()); // ALL
|
||||
|
||||
new RestTemplate().put(url, rights);
|
||||
}
|
||||
|
||||
private void deleteNote(final String id, final String jsessionid) {
|
||||
final String url = zeppelinBaseUrl + "/api/notebook/" + id + ";JSESSIONID=" + jsessionid;
|
||||
log.debug("Performing DELETE: " + url);
|
||||
new RestTemplate().delete(url);
|
||||
}
|
||||
|
||||
private boolean isExpired(final String id, final String jsessionid) {
|
||||
// TODO Auto-generated method stub
|
||||
return false;
|
||||
private boolean notConfigured() {
|
||||
return StringUtils.isAnyBlank(zeppelinBaseUrl, zeppelinLogin, zeppelinPassword, zeppelinNamePrefix);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -0,0 +1,6 @@
|
|||
package eu.dnetlib.data.mdstore.manager.utils.zeppelin;
|
||||
|
||||
public interface HasStatus {
|
||||
|
||||
String getStatus();
|
||||
}
|
|
@ -3,12 +3,13 @@ package eu.dnetlib.data.mdstore.manager.utils.zeppelin;
|
|||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
public class ListResponse {
|
||||
public class ListResponse implements HasStatus {
|
||||
|
||||
private String status;
|
||||
private String message;
|
||||
private List<Map<String, String>> body;
|
||||
|
||||
@Override
|
||||
public String getStatus() {
|
||||
return status;
|
||||
}
|
||||
|
|
|
@ -0,0 +1,16 @@
|
|||
package eu.dnetlib.data.mdstore.manager.utils.zeppelin;
|
||||
|
||||
public class SimpleResponse implements HasStatus {
|
||||
|
||||
private final String status;
|
||||
|
||||
public SimpleResponse(final String status) {
|
||||
this.status = status;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getStatus() {
|
||||
return status;
|
||||
}
|
||||
|
||||
}
|
|
@ -1,11 +1,12 @@
|
|||
package eu.dnetlib.data.mdstore.manager.utils.zeppelin;
|
||||
|
||||
public class StringResponse {
|
||||
public class StringResponse implements HasStatus {
|
||||
|
||||
private String status;
|
||||
private String message;
|
||||
private String body;
|
||||
|
||||
@Override
|
||||
public String getStatus() {
|
||||
return status;
|
||||
}
|
||||
|
|
|
@ -1,5 +1,8 @@
|
|||
spring.main.banner-mode = console
|
||||
|
||||
server.public_url =
|
||||
server.public_desc = API Base URL
|
||||
|
||||
logging.level.root = INFO
|
||||
|
||||
maven.pom.path = /META-INF/maven/eu.dnetlib.dhp/dhp-mdstore-manager/effective-pom.xml
|
||||
|
@ -12,8 +15,8 @@ management.endpoints.web.path-mapping.prometheus = metrics
|
|||
management.endpoints.web.path-mapping.health = health
|
||||
|
||||
spring.datasource.url=jdbc:postgresql://localhost:5432/mdstoremanager
|
||||
spring.datasource.username=dnet
|
||||
spring.datasource.password=dnetPwd
|
||||
spring.datasource.username=
|
||||
spring.datasource.password=
|
||||
|
||||
spring.jpa.properties.hibernate.dialect = org.hibernate.dialect.PostgreSQLDialect
|
||||
|
||||
|
@ -31,12 +34,16 @@ dhp.mdstore-manager.hadoop.cluster = GARR
|
|||
dhp.mdstore-manager.hdfs.base-path = /data/dnet.dev/mdstore
|
||||
dhp.mdstore-manager.hadoop.user = dnet.dev
|
||||
|
||||
dhp.mdstore-manager.hadoop.zeppelin.base-url = https://iis-cdh5-test-gw.ocean.icm.edu.pl/zeppelin
|
||||
#dhp.mdstore-manager.hadoop.zeppelin.base-url = https://iis-cdh5-test-gw.ocean.icm.edu.pl/zeppelin
|
||||
#dhp.mdstore-manager.hadoop.zeppelin.login =
|
||||
#dhp.mdstore-manager.hadoop.zeppelin.password =
|
||||
dhp.mdstore-manager.hadoop.zeppelin.base-url = https://hadoop-zeppelin.garr-pa1.d4science.org
|
||||
dhp.mdstore-manager.hadoop.zeppelin.login =
|
||||
dhp.mdstore-manager.hadoop.zeppelin.password =
|
||||
|
||||
dhp.mdstore-manager.hadoop.zeppelin.name-prefix = mdstoreManager
|
||||
|
||||
dhp.mdstore-manager.inspector.records.max = 1000
|
||||
|
||||
dhp.swagger.api.host = localhost
|
||||
dhp.swagger.api.basePath = /
|
||||
# dhp.swagger.api.host = localhost
|
||||
dhp.swagger.api.basePath = /**
|
||||
|
|
|
@ -20,7 +20,7 @@
|
|||
<h1>Metadata Store Manager</h1>
|
||||
|
||||
<hr />
|
||||
<a href="./swagger-ui/" target="_blank">API documentation</a>
|
||||
<a href="./apidoc" target="_blank">API documentation</a>
|
||||
<hr />
|
||||
<a href="javascript:void(0)" data-toggle="modal" data-target="#newMdstoreModal">create a new mdstore</a>
|
||||
<hr />
|
||||
|
@ -77,14 +77,11 @@
|
|||
|
||||
<div class="float-right">
|
||||
<a href="./mdrecords/{{md.id}}/50" class="btn btn-sm btn-primary" target="_blank">inspect</a>
|
||||
|
||||
<div class="btn-group">
|
||||
<button class="btn btn-sm btn-outline-warning dropdown-toggle" disabled="disabled" ng-show="zeppelinTemplates.length == 0">zeppelin <span class="caret"></span></button>
|
||||
<div class="btn-group" ng-show="zeppelinTemplates.length > 0">
|
||||
<button class="btn btn-sm btn-warning dropdown-toggle" data-toggle="dropdown">zeppelin <span class="caret"></span></button>
|
||||
<div class="dropdown-menu dropdown-menu-right">
|
||||
<a class="dropdown-item small" href="./zeppelin/{{md.id}}/default" target="_blank">default note</a>
|
||||
<a class="dropdown-item small" href="./zeppelin/{{md.id}}/dc_native" target="_blank">note for native stores (oai_dc)</a>
|
||||
<a class="dropdown-item small" href="./zeppelin/{{md.id}}/datacite_native" target="_blank">note for native stores (datacite)</a>
|
||||
<a class="dropdown-item small" href="./zeppelin/{{md.id}}/oaf_cleaned" target="_blank">note for transformed stores</a>
|
||||
<a class="dropdown-item small" href="./zeppelin/{{md.id}}/{{t}}" target="_blank" ng-repeat="t in zeppelinTemplates">{{t}}</a>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
|
|
@ -5,6 +5,7 @@ app.controller('mdstoreManagerController', function($scope, $http) {
|
|||
$scope.versions = [];
|
||||
$scope.openMdstore = '';
|
||||
$scope.openCurrentVersion = ''
|
||||
$scope.zeppelinTemplates = [];
|
||||
|
||||
$scope.forceVersionDelete = false;
|
||||
|
||||
|
@ -14,7 +15,16 @@ app.controller('mdstoreManagerController', function($scope, $http) {
|
|||
}, function errorCallback(res) {
|
||||
alert('ERROR: ' + res.data.message);
|
||||
});
|
||||
};
|
||||
};
|
||||
|
||||
$scope.obtainZeppelinTemplates = function() {
|
||||
$http.get('./zeppelin/templates?' + $.now()).then(function successCallback(res) {
|
||||
$scope.zeppelinTemplates = res.data;
|
||||
}, function errorCallback(res) {
|
||||
alert('ERROR: ' + res.data.message);
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
$scope.newMdstore = function(format, layout, interpretation, dsName, dsId, apiId) {
|
||||
var url = './mdstores/new/' + encodeURIComponent(format) + '/' + encodeURIComponent(layout) + '/' + encodeURIComponent(interpretation);
|
||||
|
@ -112,5 +122,5 @@ app.controller('mdstoreManagerController', function($scope, $http) {
|
|||
};
|
||||
|
||||
$scope.reload();
|
||||
|
||||
$scope.obtainZeppelinTemplates();
|
||||
});
|
||||
|
|
|
@ -40,6 +40,10 @@
|
|||
<script th:inline="javascript">
|
||||
/*<![CDATA[*/
|
||||
|
||||
function mdId() {
|
||||
return /*[[${mdId}]]*/ '';
|
||||
}
|
||||
|
||||
function versionId() {
|
||||
return /*[[${versionId}]]*/ '';
|
||||
}
|
||||
|
@ -62,7 +66,16 @@
|
|||
<div class="col">
|
||||
<h1>Metadata Inspector</h1>
|
||||
|
||||
<br />
|
||||
<hr />
|
||||
<div ng-show="zeppelinTemplates.length > 0">
|
||||
<div class="btn-group">
|
||||
<button class="btn btn-sm btn-warning dropdown-toggle" data-toggle="dropdown">zeppelin <span class="caret"></span></button>
|
||||
<div class="dropdown-menu dropdown-menu-right">
|
||||
<a class="dropdown-item small" href="../../zeppelin/{{mdId}}/{{t}}" target="_blank" ng-repeat="t in zeppelinTemplates">{{t}}</a>
|
||||
</div>
|
||||
</div>
|
||||
<hr />
|
||||
</div>
|
||||
|
||||
<table class="table table-condensed table-sm small">
|
||||
<tr>
|
||||
|
@ -111,8 +124,12 @@
|
|||
<br />
|
||||
|
||||
<div class="card mt-4" ng-repeat="rec in records | filter:recordsFilter">
|
||||
<div class="card-header text-white bg-primary small">{{rec.id}}</div>
|
||||
<table class="table table-condensed table-striped small">
|
||||
<div class="card-header text-white bg-primary small">
|
||||
<span ng-show="rec.id">{{rec.id}}</span>
|
||||
<span ng-hide="rec.id">the record is unreadable</span>
|
||||
</div>
|
||||
<div class="card-body" ng-hide="rec.id">Invalid record format</div>
|
||||
<table class="table table-condensed table-striped small" ng-show="rec.id">
|
||||
<tr>
|
||||
<th class="col-xs-3">Original Id</th>
|
||||
<td class="col-xs-9">{{rec.originalId}}</td>
|
||||
|
@ -134,7 +151,7 @@
|
|||
</td>
|
||||
</tr>
|
||||
</table>
|
||||
<div class="card-body">
|
||||
<div class="card-body" ng-show="rec.id">
|
||||
<span class="badge badge-success float-right">{{rec.encoding}}</span>
|
||||
<br />
|
||||
<pre class="small">{{rec.body}}</pre>
|
||||
|
@ -181,9 +198,11 @@
|
|||
var app = angular.module('mdInspectorApp', []);
|
||||
|
||||
app.controller('mdInspectorController', function($scope, $http) {
|
||||
$scope.mdId = mdId();
|
||||
$scope.records = [];
|
||||
$scope.versionId = versionId();
|
||||
$scope.limit = limit();
|
||||
$scope.zeppelinTemplates = [];
|
||||
|
||||
$scope.reload = function() {
|
||||
showSpinner();
|
||||
|
@ -199,9 +218,17 @@
|
|||
alert('ERROR: ' + res.data.message);
|
||||
});
|
||||
};
|
||||
|
||||
$scope.obtainZeppelinTemplates = function() {
|
||||
$http.get('../../zeppelin/templates?' + $.now()).then(function successCallback(res) {
|
||||
$scope.zeppelinTemplates = res.data;
|
||||
}, function errorCallback(res) {
|
||||
alert('ERROR: ' + res.data.message);
|
||||
});
|
||||
};
|
||||
|
||||
$scope.reload();
|
||||
|
||||
$scope.obtainZeppelinTemplates();
|
||||
});
|
||||
</script>
|
||||
|
||||
|
|
|
@ -1,9 +0,0 @@
|
|||
%pyspark
|
||||
|
||||
mdId = "__MDSTORE_ID__"
|
||||
mdVersion = "__VERSION__"
|
||||
path = "__PATH__"
|
||||
|
||||
print "MdStore ID:", mdId
|
||||
print "Version ID:", mdVersion
|
||||
print "Version Data Path:", path
|
|
@ -0,0 +1,8 @@
|
|||
%spark
|
||||
|
||||
val dsName = "__DS_NAME__"
|
||||
val dsId = "__DS_ID__"
|
||||
val apiId = "__API_ID__"
|
||||
val mdId = "__MDSTORE_ID__"
|
||||
val mdVersion = "__VERSION__"
|
||||
val path = "__PATH__"
|
|
@ -1,8 +1,8 @@
|
|||
package eu.dnetlib.data.mdstore.manager.controller;
|
||||
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertNotEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
|
||||
import java.util.Date;
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
package eu.dnetlib.data.mdstore.manager.controller;
|
||||
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
import static org.mockito.ArgumentMatchers.anyString;
|
||||
import static org.mockito.Mockito.never;
|
||||
import static org.mockito.Mockito.verify;
|
||||
|
|
|
@ -0,0 +1,6 @@
|
|||
SpringBoot application implementing OpenAIRE REST API to manage
|
||||
- Datasources
|
||||
- Contexts
|
||||
- Communities
|
||||
- Funders
|
||||
- Projects
|
|
@ -0,0 +1,169 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<parent>
|
||||
<groupId>eu.dnetlib.dhp</groupId>
|
||||
<artifactId>apps</artifactId>
|
||||
<version>3.5.5-SNAPSHOT</version>
|
||||
<relativePath>../</relativePath>
|
||||
</parent>
|
||||
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
<artifactId>dnet-exporter-api</artifactId>
|
||||
<packaging>jar</packaging>
|
||||
|
||||
<!-- Add typical dependencies for a web application -->
|
||||
<dependencies>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-starter-jdbc</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-starter-data-jpa</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-starter-cache</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>eu.dnetlib.dhp</groupId>
|
||||
<artifactId>dnet-exporter-model</artifactId>
|
||||
<version>${project.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>eu.dnetlib</groupId>
|
||||
<artifactId>cnr-rmi-api</artifactId>
|
||||
<version>[2.0.0,3.0.0)</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.cxf</groupId>
|
||||
<artifactId>cxf-rt-transports-http</artifactId>
|
||||
<version>3.1.5</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>eu.dnetlib</groupId>
|
||||
<artifactId>cnr-service-common</artifactId>
|
||||
<version>[2.0.0,3.0.0)</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>eu.dnetlib</groupId>
|
||||
<artifactId>dnet-openaireplus-mapping-utils</artifactId>
|
||||
<version>[6.3.0,7.0.0)</version>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>com.sun.jersey</groupId>
|
||||
<artifactId>jersey-client</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>eu.dnetlib</groupId>
|
||||
<artifactId>dnet-hadoop-commons</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>eu.dnetlib</groupId>
|
||||
<artifactId>dnet-objectstore-rmi</artifactId>
|
||||
<version>[2.0.0,3.0.0)</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.solr</groupId>
|
||||
<artifactId>solr-solrj</artifactId>
|
||||
<version>7.5.0</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.postgresql</groupId>
|
||||
<artifactId>postgresql</artifactId>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.apache.commons</groupId>
|
||||
<artifactId>commons-dbcp2</artifactId>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.antlr</groupId>
|
||||
<artifactId>stringtemplate</artifactId>
|
||||
<version>3.2.1</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.httpcomponents</groupId>
|
||||
<artifactId>httpclient</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.httpcomponents</groupId>
|
||||
<artifactId>httpcore</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.httpcomponents</groupId>
|
||||
<artifactId>httpmime</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>net.sf.supercsv</groupId>
|
||||
<artifactId>super-csv</artifactId>
|
||||
<version>2.4.0</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.google.code.gson</groupId>
|
||||
<artifactId>gson</artifactId>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>com.fasterxml.jackson.datatype</groupId>
|
||||
<artifactId>jackson-datatype-joda</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.fasterxml.jackson.datatype</groupId>
|
||||
<artifactId>jackson-datatype-jsr310</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>joda-time</groupId>
|
||||
<artifactId>joda-time</artifactId>
|
||||
<version>2.8.2</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.mongodb</groupId>
|
||||
<artifactId>mongo-java-driver</artifactId>
|
||||
<version>3.4.2</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>eu.dnetlib</groupId>
|
||||
<artifactId>dnet-datasource-manager-common</artifactId>
|
||||
<version>[2.0.1,3.0.0)</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-starter-test</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework</groupId>
|
||||
<artifactId>spring-web</artifactId>
|
||||
<version>5.3.8</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>com.vladmihalcea</groupId>
|
||||
<artifactId>hibernate-types-52</artifactId>
|
||||
</dependency>
|
||||
|
||||
</dependencies>
|
||||
|
||||
<build>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-help-plugin</artifactId>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-maven-plugin</artifactId>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
|
||||
</project>
|
|
@ -0,0 +1,18 @@
|
|||
package eu.dnetlib;
|
||||
|
||||
import static java.util.Arrays.asList;
|
||||
|
||||
import org.springframework.boot.autoconfigure.cache.CacheManagerCustomizer;
|
||||
import org.springframework.cache.concurrent.ConcurrentMapCacheManager;
|
||||
import org.springframework.stereotype.Component;
|
||||
|
||||
@Component
|
||||
public class CacheCustomizer implements CacheManagerCustomizer<ConcurrentMapCacheManager> {
|
||||
|
||||
@Override
|
||||
public void customize(final ConcurrentMapCacheManager cacheManager) {
|
||||
cacheManager
|
||||
.setCacheNames(asList("fundingpath-ids", "indexdsinfo-cache", "objectstoreid-cache", "context-cache", "context-cache-funder", "context-cache-community", "dsm-aggregationhistory-cache-v1", "dsm-aggregationhistory-cache-v2", "dsm-firstharvestdate-cache", "vocabularies-cache", "community-cache", "info"));
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,88 @@
|
|||
package eu.dnetlib;
|
||||
|
||||
import org.springdoc.core.GroupedOpenApi;
|
||||
import org.springframework.boot.SpringApplication;
|
||||
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
|
||||
import org.springframework.boot.autoconfigure.SpringBootApplication;
|
||||
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
|
||||
import org.springframework.boot.autoconfigure.solr.SolrAutoConfiguration;
|
||||
import org.springframework.cache.annotation.EnableCaching;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.scheduling.annotation.EnableScheduling;
|
||||
|
||||
import eu.dnetlib.common.app.AbstractDnetApp;
|
||||
import eu.dnetlib.openaire.community.CommunityApiController;
|
||||
import eu.dnetlib.openaire.context.ContextApiController;
|
||||
import eu.dnetlib.openaire.dsm.DsmApiController;
|
||||
import eu.dnetlib.openaire.funders.FundersApiController;
|
||||
import eu.dnetlib.openaire.info.InfoController;
|
||||
import eu.dnetlib.openaire.project.ProjectsController;
|
||||
|
||||
@EnableCaching
|
||||
@EnableScheduling
|
||||
@SpringBootApplication
|
||||
@EnableAutoConfiguration(exclude = {
|
||||
SolrAutoConfiguration.class
|
||||
})
|
||||
public class DNetOpenaireExporterApplication extends AbstractDnetApp {
|
||||
|
||||
public static final String V1 = "1.0.0";
|
||||
|
||||
public static void main(final String[] args) throws Exception {
|
||||
SpringApplication.run(DNetOpenaireExporterApplication.class, args);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String swaggerTitle() {
|
||||
return "D-Net Exporter APIs";
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String swaggerVersion() {
|
||||
return V1;
|
||||
}
|
||||
|
||||
@Bean
|
||||
@ConditionalOnProperty(value = "openaire.exporter.enable.dsm", havingValue = "true")
|
||||
public GroupedOpenApi dsm() {
|
||||
return newGroupedOpenApi("Datasource Manager", DsmApiController.class.getPackage().getName());
|
||||
}
|
||||
|
||||
@Bean
|
||||
@ConditionalOnProperty(value = "openaire.exporter.enable.project", havingValue = "true")
|
||||
public GroupedOpenApi projects() {
|
||||
return newGroupedOpenApi("OpenAIRE Projects", ProjectsController.class.getPackage().getName());
|
||||
}
|
||||
|
||||
@Bean
|
||||
@ConditionalOnProperty(value = "openaire.exporter.enable.funders", havingValue = "true")
|
||||
public GroupedOpenApi funders() {
|
||||
return newGroupedOpenApi("OpenAIRE Funders", FundersApiController.class.getPackage().getName());
|
||||
}
|
||||
|
||||
@Bean
|
||||
@ConditionalOnProperty(value = "openaire.exporter.enable.community", havingValue = "true")
|
||||
public GroupedOpenApi communities() {
|
||||
return newGroupedOpenApi("OpenAIRE Communities", CommunityApiController.class.getPackage().getName());
|
||||
}
|
||||
|
||||
@Bean
|
||||
@ConditionalOnProperty(value = "openaire.exporter.enable.context", havingValue = "true")
|
||||
public GroupedOpenApi contexts() {
|
||||
return newGroupedOpenApi("OpenAIRE Contexts", ContextApiController.class.getPackage().getName());
|
||||
}
|
||||
|
||||
@Bean
|
||||
@ConditionalOnProperty(value = "openaire.exporter.enable.info", havingValue = "true")
|
||||
public GroupedOpenApi info() {
|
||||
return newGroupedOpenApi("OpenAIRE Info", InfoController.class.getPackage().getName());
|
||||
}
|
||||
|
||||
private GroupedOpenApi newGroupedOpenApi(final String groupName, final String controllerPackage) {
|
||||
return GroupedOpenApi.builder()
|
||||
.group(groupName)
|
||||
.packagesToScan(controllerPackage)
|
||||
.build();
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,106 @@
|
|||
package eu.dnetlib;
|
||||
|
||||
import javax.sql.DataSource;
|
||||
|
||||
import org.apache.commons.dbcp2.BasicDataSource;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.cxf.endpoint.Client;
|
||||
import org.apache.cxf.frontend.ClientProxy;
|
||||
import org.apache.cxf.jaxws.JaxWsProxyFactoryBean;
|
||||
import org.apache.cxf.transport.http.HTTPConduit;
|
||||
import org.apache.cxf.transports.http.configuration.HTTPClientPolicy;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
|
||||
import com.mongodb.MongoClient;
|
||||
import com.mongodb.MongoClientOptions;
|
||||
import com.mongodb.ServerAddress;
|
||||
|
||||
import eu.dnetlib.DnetOpenaireExporterProperties.Jdbc;
|
||||
import eu.dnetlib.data.objectstore.rmi.ObjectStoreService;
|
||||
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpService;
|
||||
import eu.dnetlib.enabling.is.registry.rmi.ISRegistryService;
|
||||
|
||||
/**
|
||||
* Created by claudio on 07/07/2017.
|
||||
*/
|
||||
@Configuration
|
||||
public class DNetOpenaireExporterConfiguration {
|
||||
|
||||
private static final Log log = LogFactory.getLog(DNetOpenaireExporterConfiguration.class);
|
||||
|
||||
@Autowired
|
||||
private DnetOpenaireExporterProperties props;
|
||||
|
||||
@Bean
|
||||
public ISLookUpService getLookUpService() {
|
||||
return getServiceStub(ISLookUpService.class, props.getIsLookupUrl());
|
||||
}
|
||||
|
||||
@Bean
|
||||
public ObjectStoreService getObjectStoreService() {
|
||||
return getServiceStub(ObjectStoreService.class, props.getObjectStoreServiceUrl());
|
||||
}
|
||||
|
||||
@Bean
|
||||
public ISRegistryService getRegistryService() {
|
||||
return getServiceStub(ISRegistryService.class, props.getIsRegistryServiceUrl());
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private <T> T getServiceStub(final Class<T> clazz, final String endpoint) {
|
||||
log.info(String.format("Initializing service stub %s, endpoint %s", clazz.toString(), endpoint));
|
||||
final JaxWsProxyFactoryBean jaxWsProxyFactory = new JaxWsProxyFactoryBean();
|
||||
jaxWsProxyFactory.setServiceClass(clazz);
|
||||
jaxWsProxyFactory.setAddress(endpoint);
|
||||
|
||||
final T service = (T) jaxWsProxyFactory.create();
|
||||
|
||||
final Client client = ClientProxy.getClient(service);
|
||||
if (client != null) {
|
||||
final HTTPConduit conduit = (HTTPConduit) client.getConduit();
|
||||
final HTTPClientPolicy policy = new HTTPClientPolicy();
|
||||
|
||||
log.info(String.format("setting connectTimeout to %s, receiveTimeout to %s for service %s", props.getCxfClientConnectTimeout(), props
|
||||
.getCxfClientReceiveTimeout(), clazz.getCanonicalName()));
|
||||
|
||||
policy.setConnectionTimeout(props.getCxfClientConnectTimeout());
|
||||
policy.setReceiveTimeout(props.getCxfClientReceiveTimeout());
|
||||
conduit.setClient(policy);
|
||||
}
|
||||
|
||||
return service;
|
||||
}
|
||||
|
||||
@Bean
|
||||
public DataSource getSqlDataSource() {
|
||||
final Jdbc jdbc = props.getJdbc();
|
||||
return getDatasource(jdbc.getDriverClassName(), jdbc.getUrl(), jdbc.getUser(), jdbc.getPwd(), jdbc.getMinIdle(), jdbc.getMaxRows());
|
||||
}
|
||||
|
||||
private BasicDataSource getDatasource(final String driverClassName,
|
||||
final String jdbcUrl,
|
||||
final String jdbcUser,
|
||||
final String jdbcPwd,
|
||||
final int jdbcMinIdle,
|
||||
final int jdbcMaxIdle) {
|
||||
final BasicDataSource d = new BasicDataSource();
|
||||
d.setDriverClassName(driverClassName);
|
||||
d.setUrl(jdbcUrl);
|
||||
d.setUsername(jdbcUser);
|
||||
d.setPassword(jdbcPwd);
|
||||
d.setMinIdle(jdbcMinIdle);
|
||||
d.setMaxIdle(jdbcMaxIdle);
|
||||
return d;
|
||||
}
|
||||
|
||||
@Bean
|
||||
public MongoClient getMongoClient() {
|
||||
return new MongoClient(
|
||||
new ServerAddress(props.getDatasource().getMongoHost(), props.getDatasource().getMongoPort()),
|
||||
MongoClientOptions.builder().connectionsPerHost(props.getDatasource().getMongoConnectionsPerHost()).build());
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,489 @@
|
|||
package eu.dnetlib;
|
||||
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
import org.springframework.boot.context.properties.ConfigurationProperties;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.context.annotation.PropertySource;
|
||||
import org.springframework.core.io.ClassPathResource;
|
||||
import org.springframework.core.io.Resource;
|
||||
|
||||
/**
|
||||
* Created by Alessia Bardi on 31/03/17.
|
||||
*
|
||||
* @author Alessia Bardi, Claudio Atzori
|
||||
*/
|
||||
@Configuration
|
||||
@PropertySource("classpath:global.properties")
|
||||
@ConfigurationProperties(prefix = "openaire.exporter")
|
||||
public class DnetOpenaireExporterProperties {
|
||||
|
||||
// ISLOOKUP
|
||||
private ClassPathResource findSolrIndexUrl;
|
||||
private ClassPathResource findIndexDsInfo;
|
||||
private ClassPathResource findObjectStore;
|
||||
private ClassPathResource findFunderContexts;
|
||||
private ClassPathResource findCommunityContexts;
|
||||
private ClassPathResource findContextProfiles;
|
||||
private ClassPathResource findContextProfilesByType;
|
||||
private ClassPathResource getRepoProfile;
|
||||
|
||||
private String isLookupUrl;
|
||||
private String objectStoreServiceUrl;
|
||||
private String isRegistryServiceUrl;
|
||||
|
||||
private int requestWorkers = 100;
|
||||
private int requestTimeout = 10;
|
||||
|
||||
private int cxfClientConnectTimeout = 120;
|
||||
private int cxfClientReceiveTimeout = 120;
|
||||
|
||||
private Datasource datasource;
|
||||
private Project project;
|
||||
private Jdbc jdbc;
|
||||
|
||||
private Vocabularies vocabularies;
|
||||
|
||||
public static class Datasource {
|
||||
|
||||
// MONGODB
|
||||
private String mongoHost;
|
||||
private int mongoPort;
|
||||
private String mongoCollectionName;
|
||||
private String mongoDbName;
|
||||
private int mongoConnectionsPerHost;
|
||||
private int mongoQueryLimit;
|
||||
|
||||
public String getMongoHost() {
|
||||
return mongoHost;
|
||||
}
|
||||
|
||||
public void setMongoHost(final String mongoHost) {
|
||||
this.mongoHost = mongoHost;
|
||||
}
|
||||
|
||||
public int getMongoPort() {
|
||||
return mongoPort;
|
||||
}
|
||||
|
||||
public void setMongoPort(final int mongoPort) {
|
||||
this.mongoPort = mongoPort;
|
||||
}
|
||||
|
||||
public String getMongoCollectionName() {
|
||||
return mongoCollectionName;
|
||||
}
|
||||
|
||||
public void setMongoCollectionName(final String mongoCollectionName) {
|
||||
this.mongoCollectionName = mongoCollectionName;
|
||||
}
|
||||
|
||||
public String getMongoDbName() {
|
||||
return mongoDbName;
|
||||
}
|
||||
|
||||
public void setMongoDbName(final String mongoDbName) {
|
||||
this.mongoDbName = mongoDbName;
|
||||
}
|
||||
|
||||
public int getMongoConnectionsPerHost() {
|
||||
return mongoConnectionsPerHost;
|
||||
}
|
||||
|
||||
public void setMongoConnectionsPerHost(final int mongoConnectionsPerHost) {
|
||||
this.mongoConnectionsPerHost = mongoConnectionsPerHost;
|
||||
}
|
||||
|
||||
public int getMongoQueryLimit() {
|
||||
return mongoQueryLimit;
|
||||
}
|
||||
|
||||
public void setMongoQueryLimit(final int mongoQueryLimit) {
|
||||
this.mongoQueryLimit = mongoQueryLimit;
|
||||
}
|
||||
}
|
||||
|
||||
public static class Project {
|
||||
|
||||
private int flushSize;
|
||||
private String tsvFields;
|
||||
private Resource projectsFundingQueryTemplate;
|
||||
private Resource dspaceTemplate;
|
||||
private Resource dspaceHeadTemplate;
|
||||
private Resource dspaceTailTemplate;
|
||||
private Resource eprintsTemplate;
|
||||
|
||||
public int getFlushSize() {
|
||||
return flushSize;
|
||||
}
|
||||
|
||||
public void setFlushSize(final int flushSize) {
|
||||
this.flushSize = flushSize;
|
||||
}
|
||||
|
||||
public String getTsvFields() {
|
||||
return tsvFields;
|
||||
}
|
||||
|
||||
public void setTsvFields(final String tsvFields) {
|
||||
this.tsvFields = tsvFields;
|
||||
}
|
||||
|
||||
public Resource getProjectsFundingQueryTemplate() {
|
||||
return projectsFundingQueryTemplate;
|
||||
}
|
||||
|
||||
public void setProjectsFundingQueryTemplate(final Resource projectsFundingQueryTemplate) {
|
||||
this.projectsFundingQueryTemplate = projectsFundingQueryTemplate;
|
||||
}
|
||||
|
||||
public Resource getDspaceTemplate() {
|
||||
return dspaceTemplate;
|
||||
}
|
||||
|
||||
public void setDspaceTemplate(final Resource dspaceTemplate) {
|
||||
this.dspaceTemplate = dspaceTemplate;
|
||||
}
|
||||
|
||||
public Resource getDspaceHeadTemplate() {
|
||||
return dspaceHeadTemplate;
|
||||
}
|
||||
|
||||
public void setDspaceHeadTemplate(final Resource dspaceHeadTemplate) {
|
||||
this.dspaceHeadTemplate = dspaceHeadTemplate;
|
||||
}
|
||||
|
||||
public Resource getDspaceTailTemplate() {
|
||||
return dspaceTailTemplate;
|
||||
}
|
||||
|
||||
public void setDspaceTailTemplate(final Resource dspaceTailTemplate) {
|
||||
this.dspaceTailTemplate = dspaceTailTemplate;
|
||||
}
|
||||
|
||||
public Resource getEprintsTemplate() {
|
||||
return eprintsTemplate;
|
||||
}
|
||||
|
||||
public void setEprintsTemplate(final Resource eprintsTemplate) {
|
||||
this.eprintsTemplate = eprintsTemplate;
|
||||
}
|
||||
}
|
||||
|
||||
public static class Jdbc {
|
||||
|
||||
// JDBC
|
||||
@Value("${spring.datasource.driverClassName}")
|
||||
private String driverClassName;
|
||||
|
||||
private String url;
|
||||
private String user;
|
||||
private String pwd;
|
||||
private int minIdle;
|
||||
private int maxidle;
|
||||
private int maxRows;
|
||||
|
||||
public String getDriverClassName() {
|
||||
return driverClassName;
|
||||
}
|
||||
|
||||
public String getUrl() {
|
||||
return url;
|
||||
}
|
||||
|
||||
public void setUrl(final String url) {
|
||||
this.url = url;
|
||||
}
|
||||
|
||||
public String getUser() {
|
||||
return user;
|
||||
}
|
||||
|
||||
public void setUser(final String user) {
|
||||
this.user = user;
|
||||
}
|
||||
|
||||
public String getPwd() {
|
||||
return pwd;
|
||||
}
|
||||
|
||||
public void setPwd(final String pwd) {
|
||||
this.pwd = pwd;
|
||||
}
|
||||
|
||||
public int getMinIdle() {
|
||||
return minIdle;
|
||||
}
|
||||
|
||||
public void setMinIdle(final int minIdle) {
|
||||
this.minIdle = minIdle;
|
||||
}
|
||||
|
||||
public int getMaxidle() {
|
||||
return maxidle;
|
||||
}
|
||||
|
||||
public void setMaxidle(final int maxidle) {
|
||||
this.maxidle = maxidle;
|
||||
}
|
||||
|
||||
public int getMaxRows() {
|
||||
return maxRows;
|
||||
}
|
||||
|
||||
public void setMaxRows(final int maxRows) {
|
||||
this.maxRows = maxRows;
|
||||
}
|
||||
}
|
||||
|
||||
public static class Swagger {
|
||||
|
||||
private String apiTitle;
|
||||
private String apiDescription;
|
||||
private String apiLicense;
|
||||
private String apiLicenseUrl;
|
||||
private String apiContactName;
|
||||
private String apiContactUrl;
|
||||
private String apiContactEmail;
|
||||
|
||||
public String getApiTitle() {
|
||||
return apiTitle;
|
||||
}
|
||||
|
||||
public void setApiTitle(final String apiTitle) {
|
||||
this.apiTitle = apiTitle;
|
||||
}
|
||||
|
||||
public String getApiDescription() {
|
||||
return apiDescription;
|
||||
}
|
||||
|
||||
public void setApiDescription(final String apiDescription) {
|
||||
this.apiDescription = apiDescription;
|
||||
}
|
||||
|
||||
public String getApiLicense() {
|
||||
return apiLicense;
|
||||
}
|
||||
|
||||
public void setApiLicense(final String apiLicense) {
|
||||
this.apiLicense = apiLicense;
|
||||
}
|
||||
|
||||
public String getApiLicenseUrl() {
|
||||
return apiLicenseUrl;
|
||||
}
|
||||
|
||||
public void setApiLicenseUrl(final String apiLicenseUrl) {
|
||||
this.apiLicenseUrl = apiLicenseUrl;
|
||||
}
|
||||
|
||||
public String getApiContactName() {
|
||||
return apiContactName;
|
||||
}
|
||||
|
||||
public void setApiContactName(final String apiContactName) {
|
||||
this.apiContactName = apiContactName;
|
||||
}
|
||||
|
||||
public String getApiContactUrl() {
|
||||
return apiContactUrl;
|
||||
}
|
||||
|
||||
public void setApiContactUrl(final String apiContactUrl) {
|
||||
this.apiContactUrl = apiContactUrl;
|
||||
}
|
||||
|
||||
public String getApiContactEmail() {
|
||||
return apiContactEmail;
|
||||
}
|
||||
|
||||
public void setApiContactEmail(final String apiContactEmail) {
|
||||
this.apiContactEmail = apiContactEmail;
|
||||
}
|
||||
}
|
||||
|
||||
public static class Vocabularies {
|
||||
|
||||
private String baseUrl;
|
||||
|
||||
private String countriesEndpoint;
|
||||
|
||||
private String datasourceTypologiesEndpoint;
|
||||
|
||||
public String getCountriesEndpoint() {
|
||||
return countriesEndpoint;
|
||||
}
|
||||
|
||||
public void setCountriesEndpoint(final String countriesEndpoint) {
|
||||
this.countriesEndpoint = countriesEndpoint;
|
||||
}
|
||||
|
||||
public String getBaseUrl() {
|
||||
return baseUrl;
|
||||
}
|
||||
|
||||
public void setBaseUrl(final String baseUrl) {
|
||||
this.baseUrl = baseUrl;
|
||||
}
|
||||
|
||||
public String getDatasourceTypologiesEndpoint() {
|
||||
return datasourceTypologiesEndpoint;
|
||||
}
|
||||
|
||||
public void setDatasourceTypologiesEndpoint(final String datasourceTypologiesEndpoint) {
|
||||
this.datasourceTypologiesEndpoint = datasourceTypologiesEndpoint;
|
||||
}
|
||||
}
|
||||
|
||||
public ClassPathResource getFindSolrIndexUrl() {
|
||||
return findSolrIndexUrl;
|
||||
}
|
||||
|
||||
public void setFindSolrIndexUrl(final ClassPathResource findSolrIndexUrl) {
|
||||
this.findSolrIndexUrl = findSolrIndexUrl;
|
||||
}
|
||||
|
||||
public ClassPathResource getFindIndexDsInfo() {
|
||||
return findIndexDsInfo;
|
||||
}
|
||||
|
||||
public ClassPathResource getFindObjectStore() {
|
||||
return findObjectStore;
|
||||
}
|
||||
|
||||
public void setFindObjectStore(final ClassPathResource findObjectStore) {
|
||||
this.findObjectStore = findObjectStore;
|
||||
}
|
||||
|
||||
public void setFindIndexDsInfo(final ClassPathResource findIndexDsInfo) {
|
||||
this.findIndexDsInfo = findIndexDsInfo;
|
||||
}
|
||||
|
||||
public ClassPathResource getFindFunderContexts() {
|
||||
return findFunderContexts;
|
||||
}
|
||||
|
||||
public void setFindFunderContexts(final ClassPathResource findFunderContexts) {
|
||||
this.findFunderContexts = findFunderContexts;
|
||||
}
|
||||
|
||||
public ClassPathResource getFindCommunityContexts() {
|
||||
return findCommunityContexts;
|
||||
}
|
||||
|
||||
public ClassPathResource getFindContextProfiles() {
|
||||
return findContextProfiles;
|
||||
}
|
||||
|
||||
public ClassPathResource getFindContextProfilesByType() {
|
||||
return findContextProfilesByType;
|
||||
}
|
||||
|
||||
public void setFindContextProfiles(final ClassPathResource findContextProfiles) {
|
||||
this.findContextProfiles = findContextProfiles;
|
||||
}
|
||||
|
||||
public void setFindContextProfilesByType(final ClassPathResource findContextProfilesByType) {
|
||||
this.findContextProfilesByType = findContextProfilesByType;
|
||||
}
|
||||
|
||||
public void setFindCommunityContexts(final ClassPathResource findCommunityContexts) {
|
||||
this.findCommunityContexts = findCommunityContexts;
|
||||
}
|
||||
|
||||
public ClassPathResource getGetRepoProfile() {
|
||||
return getRepoProfile;
|
||||
}
|
||||
|
||||
public void setGetRepoProfile(final ClassPathResource getRepoProfile) {
|
||||
this.getRepoProfile = getRepoProfile;
|
||||
}
|
||||
|
||||
public String getIsLookupUrl() {
|
||||
return isLookupUrl;
|
||||
}
|
||||
|
||||
public void setIsLookupUrl(final String isLookupUrl) {
|
||||
this.isLookupUrl = isLookupUrl;
|
||||
}
|
||||
|
||||
public String getObjectStoreServiceUrl() {
|
||||
return objectStoreServiceUrl;
|
||||
}
|
||||
|
||||
public void setObjectStoreServiceUrl(final String objectStoreServiceUrl) {
|
||||
this.objectStoreServiceUrl = objectStoreServiceUrl;
|
||||
}
|
||||
|
||||
public String getIsRegistryServiceUrl() {
|
||||
return isRegistryServiceUrl;
|
||||
}
|
||||
|
||||
public void setIsRegistryServiceUrl(final String isRegistryServiceUrl) {
|
||||
this.isRegistryServiceUrl = isRegistryServiceUrl;
|
||||
}
|
||||
|
||||
public int getRequestWorkers() {
|
||||
return requestWorkers;
|
||||
}
|
||||
|
||||
public void setRequestWorkers(final int requestWorkers) {
|
||||
this.requestWorkers = requestWorkers;
|
||||
}
|
||||
|
||||
public int getRequestTimeout() {
|
||||
return requestTimeout;
|
||||
}
|
||||
|
||||
public void setRequestTimeout(final int requestTimeout) {
|
||||
this.requestTimeout = requestTimeout;
|
||||
}
|
||||
|
||||
public int getCxfClientConnectTimeout() {
|
||||
return cxfClientConnectTimeout;
|
||||
}
|
||||
|
||||
public void setCxfClientConnectTimeout(final int cxfClientConnectTimeout) {
|
||||
this.cxfClientConnectTimeout = cxfClientConnectTimeout;
|
||||
}
|
||||
|
||||
public int getCxfClientReceiveTimeout() {
|
||||
return cxfClientReceiveTimeout;
|
||||
}
|
||||
|
||||
public void setCxfClientReceiveTimeout(final int cxfClientReceiveTimeout) {
|
||||
this.cxfClientReceiveTimeout = cxfClientReceiveTimeout;
|
||||
}
|
||||
|
||||
public Datasource getDatasource() {
|
||||
return datasource;
|
||||
}
|
||||
|
||||
public void setDatasource(final Datasource datasource) {
|
||||
this.datasource = datasource;
|
||||
}
|
||||
|
||||
public Project getProject() {
|
||||
return project;
|
||||
}
|
||||
|
||||
public void setProject(final Project project) {
|
||||
this.project = project;
|
||||
}
|
||||
|
||||
public Jdbc getJdbc() {
|
||||
return jdbc;
|
||||
}
|
||||
|
||||
public void setJdbc(final Jdbc jdbc) {
|
||||
this.jdbc = jdbc;
|
||||
}
|
||||
|
||||
public Vocabularies getVocabularies() {
|
||||
return vocabularies;
|
||||
}
|
||||
|
||||
public void setVocabularies(final Vocabularies vocabularies) {
|
||||
this.vocabularies = vocabularies;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,16 @@
|
|||
package eu.dnetlib;
|
||||
|
||||
import org.springframework.stereotype.Controller;
|
||||
import org.springframework.web.bind.annotation.RequestMapping;
|
||||
|
||||
@Controller
|
||||
public class SwaggerController {
|
||||
|
||||
@RequestMapping(value = {
|
||||
"/", "/docs", "swagger-ui.html", "swagger-ui/"
|
||||
})
|
||||
public String index() {
|
||||
return "redirect:swagger-ui/index.html";
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,111 @@
|
|||
package eu.dnetlib.openaire.common;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.commons.lang3.exception.ExceptionUtils;
|
||||
import org.apache.commons.lang3.time.StopWatch;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.springframework.http.HttpStatus;
|
||||
import org.springframework.web.bind.MethodArgumentNotValidException;
|
||||
import org.springframework.web.bind.annotation.ExceptionHandler;
|
||||
import org.springframework.web.bind.annotation.ResponseBody;
|
||||
import org.springframework.web.bind.annotation.ResponseStatus;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonAutoDetect;
|
||||
|
||||
import eu.dnetlib.openaire.exporter.exceptions.CommunityException;
|
||||
import eu.dnetlib.openaire.exporter.exceptions.DsmApiException;
|
||||
import eu.dnetlib.openaire.exporter.exceptions.ResourceNotFoundException;
|
||||
import eu.dnetlib.openaire.exporter.model.dsm.Response;
|
||||
|
||||
/**
|
||||
* Created by claudio on 18/07/2017.
|
||||
*/
|
||||
public abstract class AbstractExporterController {
|
||||
|
||||
private static final Log log = LogFactory.getLog(AbstractExporterController.class); // NOPMD by marko on 11/24/08 5:02 PM
|
||||
|
||||
@ResponseBody
|
||||
@ExceptionHandler({
|
||||
DsmApiException.class, CommunityException.class, Exception.class
|
||||
})
|
||||
@ResponseStatus(value = HttpStatus.INTERNAL_SERVER_ERROR)
|
||||
public ErrorMessage handle500(final Exception e) {
|
||||
return _handleError(e);
|
||||
}
|
||||
|
||||
@ResponseBody
|
||||
@ExceptionHandler({
|
||||
ResourceNotFoundException.class
|
||||
})
|
||||
@ResponseStatus(value = HttpStatus.NOT_FOUND)
|
||||
public ErrorMessage handle404(final Exception e) {
|
||||
return _handleError(e);
|
||||
}
|
||||
|
||||
@ResponseBody
|
||||
@ExceptionHandler(MethodArgumentNotValidException.class)
|
||||
@ResponseStatus(HttpStatus.BAD_REQUEST)
|
||||
public List<ErrorMessage> handle400(final MethodArgumentNotValidException e) {
|
||||
return e.getBindingResult()
|
||||
.getFieldErrors()
|
||||
.stream()
|
||||
.map(fe -> new ErrorMessage(
|
||||
String.format("field '%s'", fe.getField()),
|
||||
String.format("rejected value '%s'", fe.getRejectedValue()),
|
||||
fe.getDefaultMessage()))
|
||||
.collect(Collectors.toList());
|
||||
}
|
||||
|
||||
private ErrorMessage _handleError(final Exception e) {
|
||||
log.error(e);
|
||||
if (StringUtils.containsIgnoreCase(ExceptionUtils.getRootCauseMessage(e), "Broken pipe")) {
|
||||
return null; // socket is closed, cannot return any response
|
||||
} else {
|
||||
return new ErrorMessage(e);
|
||||
}
|
||||
}
|
||||
|
||||
// HELPERS
|
||||
protected <T extends Response> T prepareResponse(final int page, final int size, final StopWatch stopWatch, final T rsp) {
|
||||
rsp.getHeader()
|
||||
.setTime(stopWatch.getTime())
|
||||
.setPage(page)
|
||||
.setSize(size);
|
||||
return rsp;
|
||||
}
|
||||
|
||||
@JsonAutoDetect
|
||||
public class ErrorMessage {
|
||||
|
||||
private final String message;
|
||||
private final String details;
|
||||
private final String stacktrace;
|
||||
|
||||
public ErrorMessage(final Exception e) {
|
||||
this(e.getMessage(), "", ExceptionUtils.getStackTrace(e));
|
||||
}
|
||||
|
||||
public ErrorMessage(final String message, final String details, final String stacktrace) {
|
||||
this.message = message;
|
||||
this.details = details;
|
||||
this.stacktrace = stacktrace;
|
||||
}
|
||||
|
||||
public String getMessage() {
|
||||
return this.message;
|
||||
}
|
||||
|
||||
public String getStacktrace() {
|
||||
return this.stacktrace;
|
||||
}
|
||||
|
||||
public String getDetails() {
|
||||
return details;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,58 @@
|
|||
package eu.dnetlib.openaire.common;
|
||||
|
||||
import java.sql.Array;
|
||||
import java.sql.Connection;
|
||||
import java.sql.SQLException;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.stream.Collectors;
|
||||
import javax.persistence.AttributeConverter;
|
||||
import javax.persistence.Converter;
|
||||
import javax.sql.DataSource;
|
||||
|
||||
import org.springframework.beans.BeansException;
|
||||
import org.springframework.context.ApplicationContext;
|
||||
import org.springframework.context.ApplicationContextAware;
|
||||
|
||||
/**
|
||||
* Created by claudio on 05/07/2017.
|
||||
*/
|
||||
@Converter
|
||||
public class ConverterTextArray implements AttributeConverter<List<String>, Array>, ApplicationContextAware {
|
||||
|
||||
private ApplicationContext applicationContext;
|
||||
|
||||
@Override
|
||||
public Array convertToDatabaseColumn(List<String> attribute) {
|
||||
|
||||
final Map<String, DataSource> datasources = applicationContext.getBeansOfType(DataSource.class);
|
||||
DataSource source = datasources.values().stream().findFirst().get();
|
||||
|
||||
try {
|
||||
Connection conn = source.getConnection();
|
||||
return conn.createArrayOf("text", attribute.toArray());
|
||||
|
||||
} catch (SQLException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
|
||||
return null;
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<String> convertToEntityAttribute(Array dbData) {
|
||||
try {
|
||||
return Arrays.stream((Object[]) dbData.getArray()).map(d -> (String) d).collect(Collectors.toList());
|
||||
} catch (SQLException e) {
|
||||
e.printStackTrace();
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setApplicationContext(final ApplicationContext applicationContext) throws BeansException {
|
||||
this.applicationContext = applicationContext;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,31 @@
|
|||
package eu.dnetlib.openaire.common;
|
||||
|
||||
public class ExporterConstants {
|
||||
|
||||
/*
|
||||
* Tags used to group the operations on the swagger UI
|
||||
*/
|
||||
public final static String C = "Community";
|
||||
public final static String C_CP = "Community content providers";
|
||||
public final static String C_PJ = "Community projects";
|
||||
public final static String C_ZC = "Community Zenodo Communities";
|
||||
public final static String C_O = "Community Organizations";
|
||||
public final static String C_SUB = "Subcommunities";
|
||||
|
||||
public final static String DS = "Datasource";
|
||||
public final static String API = "Interface";
|
||||
public final static String R = "Read";
|
||||
public final static String W = "Write";
|
||||
|
||||
public final static String D = "Deprecated";
|
||||
public final static String M = "Management";
|
||||
|
||||
public final static String DSPACE = "DSpace";
|
||||
public final static String EPRINT = "EPrints";
|
||||
public final static String TSV = "TSV";
|
||||
public final static String STREAMING = "Streaming";
|
||||
|
||||
public static final String OAI = "oai";
|
||||
public static final String SET = "set";
|
||||
|
||||
}
|
|
@ -0,0 +1,106 @@
|
|||
package eu.dnetlib.openaire.common;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.sql.*;
|
||||
|
||||
import org.hibernate.HibernateException;
|
||||
import org.hibernate.engine.spi.SharedSessionContractImplementor;
|
||||
import org.hibernate.usertype.UserType;
|
||||
|
||||
/**
|
||||
* Created by claudio on 05/07/2017.
|
||||
*/
|
||||
public class GenericArrayUserType<T extends Serializable> implements UserType {
|
||||
|
||||
protected static final int[] SQL_TYPES = { Types.ARRAY };
|
||||
|
||||
private Class<T> typeParameterClass;
|
||||
|
||||
@Override
|
||||
public Object assemble(Serializable cached, Object owner) throws HibernateException {
|
||||
return this.deepCopy(cached);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object deepCopy(Object value) throws HibernateException {
|
||||
return value;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
@Override
|
||||
public Serializable disassemble(Object value) throws HibernateException {
|
||||
return (T) this.deepCopy(value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object x, Object y) throws HibernateException {
|
||||
|
||||
if (x == null) {
|
||||
return y == null;
|
||||
}
|
||||
return x.equals(y);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode(Object x) throws HibernateException {
|
||||
return x.hashCode();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object nullSafeGet(final ResultSet resultSet,
|
||||
final String[] names,
|
||||
final SharedSessionContractImplementor sharedSessionContractImplementor,
|
||||
final Object o)
|
||||
throws HibernateException, SQLException {
|
||||
if (resultSet.wasNull()) {
|
||||
return null;
|
||||
}
|
||||
if (resultSet.getArray(names[0]) == null) {
|
||||
return new Integer[0];
|
||||
}
|
||||
|
||||
Array array = resultSet.getArray(names[0]);
|
||||
@SuppressWarnings("unchecked")
|
||||
T javaArray = (T) array.getArray();
|
||||
return javaArray;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void nullSafeSet(final PreparedStatement statement,
|
||||
final Object value,
|
||||
final int index,
|
||||
final SharedSessionContractImplementor session)
|
||||
throws HibernateException, SQLException {
|
||||
Connection connection = statement.getConnection();
|
||||
if (value == null) {
|
||||
statement.setNull(index, SQL_TYPES[0]);
|
||||
} else {
|
||||
@SuppressWarnings("unchecked")
|
||||
T castObject = (T) value;
|
||||
Array array = connection.createArrayOf("integer", (Object[]) castObject);
|
||||
statement.setArray(index, array);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isMutable() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object replace(Object original, Object target, Object owner) throws HibernateException {
|
||||
return original;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Class<T> returnedClass() {
|
||||
return typeParameterClass;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int[] sqlTypes() {
|
||||
return new int[] { Types.ARRAY };
|
||||
}
|
||||
|
||||
|
||||
}
|
|
@ -0,0 +1,60 @@
|
|||
package eu.dnetlib.openaire.common;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpException;
|
||||
import eu.dnetlib.openaire.dsm.dao.utils.IndexDsInfo;
|
||||
import eu.dnetlib.openaire.exporter.model.context.Context;
|
||||
|
||||
public interface ISClient {
|
||||
|
||||
IndexDsInfo calculateCurrentIndexDsInfo() throws Exception;
|
||||
|
||||
String getObjectStoreId(String dsId) throws Exception;
|
||||
|
||||
@Deprecated
|
||||
Map<String, Context> getFunderContextMap() throws IOException;
|
||||
|
||||
@Deprecated
|
||||
Map<String, Context> getCommunityContextMap() throws IOException;
|
||||
|
||||
@Deprecated
|
||||
Map<String, Context> getContextMap(final List<String> type) throws IOException;
|
||||
|
||||
@Deprecated
|
||||
void updateContextParam(String id, String name, String value, boolean toEscape);
|
||||
|
||||
@Deprecated
|
||||
void updateContextAttribute(String id, String name, String value);
|
||||
|
||||
@Deprecated
|
||||
void addConcept(String id, String categoryId, String data);
|
||||
|
||||
@Deprecated
|
||||
void removeConcept(String id, String categoryId, String conceptId);
|
||||
|
||||
void dropCache();
|
||||
|
||||
/**
|
||||
*
|
||||
* @param id
|
||||
* id of the concept to be updated (i.e. ni::projects::2)
|
||||
* @param name
|
||||
* name of the attribute to be updated
|
||||
* @param value
|
||||
* new value for the attribute
|
||||
*/
|
||||
@Deprecated
|
||||
void updateConceptAttribute(String id, String name, String value);
|
||||
|
||||
@Deprecated
|
||||
void updateConceptParam(String id, String name, String value);
|
||||
|
||||
@Deprecated
|
||||
void updateConceptParamNoEscape(String id, String name, String value);
|
||||
|
||||
String getProfile(String profileId) throws ISLookUpException;
|
||||
|
||||
}
|
|
@ -0,0 +1,382 @@
|
|||
package eu.dnetlib.openaire.common;
|
||||
|
||||
import static eu.dnetlib.openaire.common.Utils.escape;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.Charset;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.Queue;
|
||||
import java.util.concurrent.LinkedBlockingQueue;
|
||||
import java.util.function.Function;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.cache.annotation.CacheEvict;
|
||||
import org.springframework.cache.annotation.Cacheable;
|
||||
import org.springframework.core.io.ClassPathResource;
|
||||
import org.springframework.scheduling.annotation.Scheduled;
|
||||
import org.springframework.stereotype.Component;
|
||||
|
||||
import com.google.common.collect.Lists;
|
||||
import com.google.common.escape.Escaper;
|
||||
import com.google.common.xml.XmlEscapers;
|
||||
|
||||
import eu.dnetlib.DnetOpenaireExporterProperties;
|
||||
import eu.dnetlib.enabling.datasources.common.DsmException;
|
||||
import eu.dnetlib.enabling.datasources.common.DsmRuntimeException;
|
||||
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpException;
|
||||
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpService;
|
||||
import eu.dnetlib.openaire.context.ContextMappingUtils;
|
||||
import eu.dnetlib.openaire.dsm.dao.utils.IndexDsInfo;
|
||||
import eu.dnetlib.openaire.exporter.model.context.Context;
|
||||
|
||||
/**
|
||||
* Created by claudio on 20/10/2016.
|
||||
*/
|
||||
@Component
|
||||
public class ISClientImpl implements ISClient {
|
||||
|
||||
private static final Log log = LogFactory.getLog(ISClientImpl.class);
|
||||
|
||||
@Autowired
|
||||
private DnetOpenaireExporterProperties config;
|
||||
|
||||
@Autowired
|
||||
private ISLookUpService isLookUpService;
|
||||
|
||||
@Override
|
||||
@Cacheable("indexdsinfo-cache")
|
||||
public IndexDsInfo calculateCurrentIndexDsInfo() throws Exception {
|
||||
log.warn("calculateCurrentIndexDsInfo(): not using cache");
|
||||
final String[] arr;
|
||||
try {
|
||||
arr = _isLookUp(_getQuery(config.getFindIndexDsInfo())).split("@@@");
|
||||
return new IndexDsInfo(
|
||||
_isLookUp(_getQuery(config.getFindSolrIndexUrl())),
|
||||
arr[0].trim(), arr[1].trim(), arr[2].trim());
|
||||
} catch (IOException | ISLookUpException e) {
|
||||
throw new DsmException("unable fetch index DS information from IS");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
@Cacheable("objectstoreid-cache")
|
||||
public String getObjectStoreId(final String dsId) throws Exception {
|
||||
log.warn(String.format("getObjectStoreId(%s): not using cache", dsId));
|
||||
try {
|
||||
final String xqueryTemplate = _getQuery(config.getFindObjectStore());
|
||||
return _isLookUp(String.format(xqueryTemplate, dsId));
|
||||
} catch (IOException | ISLookUpException e) {
|
||||
throw new DsmException("unable to find objectstore for ds " + dsId);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
@Cacheable("context-cache-funder")
|
||||
@Deprecated
|
||||
public Map<String, Context> getFunderContextMap() throws IOException {
|
||||
return _processContext(_getQuery(config.getFindFunderContexts()));
|
||||
}
|
||||
|
||||
@Override
|
||||
@Cacheable("context-cache-community")
|
||||
@Deprecated
|
||||
public Map<String, Context> getCommunityContextMap() throws IOException {
|
||||
return _processContext(_getQuery(config.getFindCommunityContexts()));
|
||||
}
|
||||
|
||||
@Override
|
||||
@Cacheable("context-cache")
|
||||
@Deprecated
|
||||
public Map<String, Context> getContextMap(final List<String> type) throws IOException {
|
||||
if (Objects.isNull(type) || type.isEmpty()) {
|
||||
return _processContext(_getQuery(config.getFindContextProfiles()));
|
||||
} else {
|
||||
try {
|
||||
final String xqueryTemplate = _getQuery(config.getFindContextProfilesByType());
|
||||
final String xquery = String.format(xqueryTemplate, type.stream()
|
||||
.map(t -> String.format("./RESOURCE_PROFILE/BODY/CONFIGURATION/context/@type = '%s'", t))
|
||||
.collect(Collectors.joining(" or ")));
|
||||
|
||||
return _processContext(xquery);
|
||||
} catch (final Exception e) {
|
||||
log.error(e.getMessage());
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
@CacheEvict(value = {
|
||||
"context-cache", "context-cache-funder"
|
||||
}, allEntries = true)
|
||||
@Deprecated
|
||||
public void updateContextParam(final String id, final String name, final String value, final boolean toEscape) {
|
||||
if (getSize(id, name) > 0) {
|
||||
try {
|
||||
_quickSeachProfile(getXQuery(id, name, value, toEscape));
|
||||
} catch (final ISLookUpException e) {
|
||||
throw new DsmRuntimeException(String.format("unable to update context param [id: %s, name: %s, value: %s]", id, name, value), e);
|
||||
}
|
||||
} else {
|
||||
try {
|
||||
_quickSeachProfile(getInsertXQuery(id, name, value, toEscape));
|
||||
} catch (final ISLookUpException e) {
|
||||
throw new DsmRuntimeException(String.format("unable to insert context param [id: %s, name: %s, value: %s]", id, name, value), e);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private int getSize(final String id, final String name) {
|
||||
int size = 0;
|
||||
try {
|
||||
size = _quickSeachProfile(String
|
||||
.format("for $x in collection('/db/DRIVER/ContextDSResources/ContextDSResourceType') where $x//context[@id='%s']/param[@name='%s'] return $x", id, name))
|
||||
.size();
|
||||
} catch (final ISLookUpException e) {
|
||||
throw new DsmRuntimeException("unable to execute search query", e);
|
||||
}
|
||||
return size;
|
||||
}
|
||||
|
||||
@Override
|
||||
@CacheEvict(value = {
|
||||
"context-cache", "context-cache-funder"
|
||||
}, allEntries = true)
|
||||
@Deprecated
|
||||
public void updateContextAttribute(final String id, final String name, final String value) {
|
||||
final Escaper esc = XmlEscapers.xmlAttributeEscaper();
|
||||
try {
|
||||
_quickSeachProfile(String.format("update value collection('/db/DRIVER/ContextDSResources/ContextDSResourceType')" +
|
||||
"/RESOURCE_PROFILE/BODY/CONFIGURATION/context[./@id = '%s']/@%s with '%s'", id, name, escape(esc, value)));
|
||||
} catch (final ISLookUpException e) {
|
||||
throw new DsmRuntimeException(String.format("unable update context attribute [id: %s, name: %s, data: %s]", id, name, value), e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
@CacheEvict(value = {
|
||||
"context-cache", "context-cache-funder"
|
||||
}, allEntries = true)
|
||||
@Deprecated
|
||||
public void addConcept(final String id, final String categoryId, final String data) {
|
||||
try {
|
||||
_quickSeachProfile(String.format("update insert %s into collection('/db/DRIVER/ContextDSResources/ContextDSResourceType')" +
|
||||
"/RESOURCE_PROFILE/BODY/CONFIGURATION/context[./@id = '%s']/category[./@id = '%s']", data, id, categoryId));
|
||||
} catch (final ISLookUpException e) {
|
||||
throw new DsmRuntimeException(String.format("unable add concept [id: %s, categoryId: %s, data: %s]", id, categoryId, data), e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
@CacheEvict(value = {
|
||||
"context-cache", "context-cache-funder"
|
||||
}, allEntries = true)
|
||||
@Deprecated
|
||||
public void removeConcept(final String id, final String categoryId, final String conceptId) {
|
||||
try {
|
||||
_quickSeachProfile(String.format("for $concept in collection('/db/DRIVER/ContextDSResources/ContextDSResourceType')" +
|
||||
"/RESOURCE_PROFILE/BODY/CONFIGURATION/context[./@id = '%s']" +
|
||||
"/category[./@id = '%s']/concept[./@id = '%s'] " +
|
||||
"return update delete $concept", id, categoryId, conceptId));
|
||||
} catch (final ISLookUpException e) {
|
||||
throw new DsmRuntimeException(String.format("unable remove concept [id: %s, categoryId: %s, conceptId: %s]", id, categoryId, conceptId), e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
@CacheEvict(value = {
|
||||
"context-cache", "context-cache-community", "context-cache-funder"
|
||||
}, allEntries = true)
|
||||
@Deprecated
|
||||
public void updateConceptAttribute(final String id, final String name, final String value) {
|
||||
final Escaper esc = XmlEscapers.xmlAttributeEscaper();
|
||||
try {
|
||||
_quickSeachProfile(String.format("update value collection('/db/DRIVER/ContextDSResources/ContextDSResourceType')" +
|
||||
"/RESOURCE_PROFILE/BODY/CONFIGURATION/context/category/concept[./@id = '%s']/@%s with '%s'", id, name, escape(esc, value)));
|
||||
} catch (final ISLookUpException e) {
|
||||
throw new DsmRuntimeException(String.format("unable update concept attribute [id: %s, name: %s, value: %s]", id, name, value), e);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
@CacheEvict(value = {
|
||||
"context-cache", "context-cache-funder"
|
||||
}, allEntries = true)
|
||||
@Deprecated
|
||||
public void updateConceptParam(final String id, final String name, final String value) {
|
||||
try {
|
||||
_quickSeachProfile(getConceptXQuery(id, name, value));
|
||||
} catch (final ISLookUpException e) {
|
||||
throw new DsmRuntimeException(String.format("unable update concept param [id: %s, name: %s, value: %s]", id, name, value), e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
@CacheEvict(value = {
|
||||
"context-cache", "context-cache-funder"
|
||||
}, allEntries = true)
|
||||
@Deprecated
|
||||
public void updateConceptParamNoEscape(final String id, final String name, final String value) {
|
||||
try {
|
||||
_quickSeachProfile(getConceptXQueryNoEscape(id, name, value));
|
||||
} catch (final ISLookUpException e) {
|
||||
throw new DsmRuntimeException(String.format("unable update concept param [id: %s, name: %s, value: %s]", id, name, value), e);
|
||||
}
|
||||
}
|
||||
|
||||
/// HELPERS
|
||||
|
||||
@Deprecated
|
||||
private String getInsertXQuery(final String id, final String paramName, final String paramValue, final boolean toEscape) {
|
||||
String value;
|
||||
if (toEscape) {
|
||||
value = escape(XmlEscapers.xmlContentEscaper(), paramValue);
|
||||
} else {
|
||||
value = paramValue;
|
||||
}
|
||||
if (StringUtils.isNotBlank(value)) {
|
||||
return String.format("update insert <param name='%s'>%s</param> into collection('/db/DRIVER/ContextDSResources/ContextDSResourceType')" +
|
||||
"/RESOURCE_PROFILE/BODY/CONFIGURATION/context[./@id = '%s']", paramName, value, id);
|
||||
} else {
|
||||
return String.format("update insert <param name='%s'/> into collection('/db/DRIVER/ContextDSResources/ContextDSResourceType')" +
|
||||
"/RESOURCE_PROFILE/BODY/CONFIGURATION/context[./@id = '%s']", paramName, id);
|
||||
}
|
||||
}
|
||||
|
||||
@Deprecated
|
||||
private String getXQuery(final String id, final String name, final String paramValue, final boolean toEscape) {
|
||||
String value = paramValue;
|
||||
if (toEscape) {
|
||||
value = escape(XmlEscapers.xmlContentEscaper(), paramValue);
|
||||
}
|
||||
|
||||
if (StringUtils.isNotBlank(value)) {
|
||||
return String.format("update replace collection('/db/DRIVER/ContextDSResources/ContextDSResourceType')" +
|
||||
"/RESOURCE_PROFILE/BODY/CONFIGURATION/context[./@id = '%s']/param[./@name = '%s'] with <param name='%s'>%s</param>", id, name, name, value);
|
||||
} else {
|
||||
return String.format("update replace collection('/db/DRIVER/ContextDSResources/ContextDSResourceType')" +
|
||||
"/RESOURCE_PROFILE/BODY/CONFIGURATION/context[./@id = '%s']/param[./@name = '%s'] with <param name='%s'/>", id, name, name);
|
||||
}
|
||||
}
|
||||
|
||||
// private String getXQueryNoEscape(final String id, final String name, final String value) {
|
||||
// if (StringUtils.isNotBlank(value)) {
|
||||
// return String.format("update replace collection('/db/DRIVER/ContextDSResources/ContextDSResourceType')" +
|
||||
// "/RESOURCE_PROFILE/BODY/CONFIGURATION/context[./@id = '%s']/param[./@name = '%s'] with <param name='%s'>%s</param>", id, name, name,
|
||||
// value);
|
||||
// } else {
|
||||
// return String.format("update replace collection('/db/DRIVER/ContextDSResources/ContextDSResourceType')" +
|
||||
// "/RESOURCE_PROFILE/BODY/CONFIGURATION/context[./@id = '%s']/param[./@name = '%s'] with <param name='%s'/>", id, name, name);
|
||||
// }
|
||||
// }
|
||||
|
||||
@Deprecated
|
||||
private String getConceptXQuery(final String id, final String name, final String value) {
|
||||
final Escaper esc = XmlEscapers.xmlContentEscaper();
|
||||
if (StringUtils.isNotBlank(value)) {
|
||||
return String.format("update replace collection('/db/DRIVER/ContextDSResources/ContextDSResourceType')//" +
|
||||
"concept[./@id = '%s']/param[./@name = '%s'] with <param name='%s'>%s</param>", id, name, name, escape(esc, value));
|
||||
} else {
|
||||
return String
|
||||
.format("update replace collection('/db/DRIVER/ContextDSResources/ContextDSResourceType')//concept[./@id = '%s']/param[./@name = '%s'] with <param name='%s'/>", id, name, name);
|
||||
}
|
||||
}
|
||||
|
||||
@Deprecated
|
||||
private String getConceptXQueryNoEscape(final String id, final String name, final String value) {
|
||||
|
||||
if (StringUtils.isNotBlank(value)) {
|
||||
return String.format("update replace collection('/db/DRIVER/ContextDSResources/ContextDSResourceType')//" +
|
||||
"concept[./@id = '%s']/param[./@name = '%s'] with <param name='%s'>%s</param>", id, name, name, value);
|
||||
} else {
|
||||
return String
|
||||
.format("update replace collection('/db/DRIVER/ContextDSResources/ContextDSResourceType')//concept[./@id = '%s']/param[./@name = '%s'] with <param name='%s'/>", id, name, name);
|
||||
}
|
||||
}
|
||||
|
||||
@Deprecated
|
||||
private Map<String, Context> _processContext(final String xquery) throws IOException {
|
||||
return _processContext(new LinkedBlockingQueue<>(), xquery);
|
||||
}
|
||||
|
||||
@Deprecated
|
||||
private Map<String, Context> _processContext(final Queue<Throwable> errors, final String xquery) throws IOException {
|
||||
try {
|
||||
return getContextProfiles(errors, xquery).stream()
|
||||
.filter(StringUtils::isNotBlank)
|
||||
.map(s -> ContextMappingUtils.parseContext(s, errors))
|
||||
.collect(Collectors.toMap(Context::getId, Function.identity(), (c1, c2) -> {
|
||||
log.warn(String.format("found duplicate context profile '%s'", c1.getId()));
|
||||
return c1;
|
||||
}));
|
||||
} finally {
|
||||
if (!errors.isEmpty()) {
|
||||
log.error(errors);
|
||||
errors.forEach(Throwable::printStackTrace);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Deprecated
|
||||
private List<String> getContextProfiles(final Queue<Throwable> errors, final String xquery) throws IOException {
|
||||
log.warn("getContextProfiles(): not using cache");
|
||||
try {
|
||||
return _quickSeachProfile(xquery);
|
||||
} catch (final ISLookUpException e) {
|
||||
throw new DsmRuntimeException("unable to get context profiles", e);
|
||||
}
|
||||
}
|
||||
|
||||
private String _getQuery(final ClassPathResource resource) throws IOException {
|
||||
return IOUtils.toString(resource.getInputStream(), Charset.defaultCharset());
|
||||
}
|
||||
|
||||
private String _isLookUp(final String xquery) throws ISLookUpException {
|
||||
log.debug(String.format("running xquery:\n%s", xquery));
|
||||
// log.debug(String.format("query result: %s", res));
|
||||
return isLookUpService.getResourceProfileByQuery(xquery);
|
||||
}
|
||||
|
||||
private List<String> _quickSeachProfile(final String xquery) throws ISLookUpException {
|
||||
final List<String> res = Lists.newArrayList();
|
||||
|
||||
log.debug(String.format("running xquery:\n%s", xquery));
|
||||
try {
|
||||
final List<String> list = isLookUpService.quickSearchProfile(xquery);
|
||||
if (list != null) {
|
||||
res.addAll(list);
|
||||
}
|
||||
log.debug(String.format("query result size: %s", res.size()));
|
||||
} catch (final Exception ex) {
|
||||
log.error(ex.getMessage());
|
||||
throw new ISLookUpException("");
|
||||
}
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
@Override
|
||||
@CacheEvict(cacheNames = {
|
||||
"context-cache", "indexdsinfo-cache", "objectstoreid-cache"
|
||||
}, allEntries = true)
|
||||
@Scheduled(fixedDelayString = "${openaire.exporter.cache.ttl}")
|
||||
public void dropCache() {
|
||||
log.debug("dropped dsManager IS cache");
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getProfile(final String profileId) throws ISLookUpException {
|
||||
return isLookUpService.getResourceProfile(profileId);
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,60 @@
|
|||
package eu.dnetlib.openaire.common;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.concurrent.*;
|
||||
import javax.annotation.PostConstruct;
|
||||
import javax.annotation.PreDestroy;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.springframework.stereotype.Component;
|
||||
|
||||
@Component
|
||||
public class OperationManager {
|
||||
|
||||
private static final Log log = LogFactory.getLog(OperationManager.class);
|
||||
|
||||
private static final long SLEEP_TIME = 1000;
|
||||
|
||||
private static final int Q_SIZE = 100;
|
||||
|
||||
private static final int POOL_SIZE = 5;
|
||||
|
||||
private final BlockingQueue<Runnable> ops = new ArrayBlockingQueue<>(Q_SIZE);
|
||||
|
||||
private ExecutorService executor;
|
||||
|
||||
@PostConstruct
|
||||
public void init() {
|
||||
executor = getExecutor();
|
||||
}
|
||||
|
||||
public int dropAll() {
|
||||
final List<Runnable> lostOperations = executor.shutdownNow();
|
||||
log.warn(String.format("discarding %s operations", lostOperations.size()));
|
||||
executor = getExecutor();
|
||||
return lostOperations.size();
|
||||
}
|
||||
|
||||
public int getOpSize() {
|
||||
return ops.size();
|
||||
}
|
||||
|
||||
public void addOperation(final Runnable op) {
|
||||
executor.execute(op);
|
||||
}
|
||||
|
||||
@PreDestroy
|
||||
public void tearDown() throws InterruptedException {
|
||||
executor.shutdown();
|
||||
final boolean done = executor.awaitTermination(SLEEP_TIME, TimeUnit.MILLISECONDS);
|
||||
log.debug(String.format("All operations were completed so far? %s", done));
|
||||
}
|
||||
|
||||
// HELPERS
|
||||
|
||||
private ThreadPoolExecutor getExecutor() {
|
||||
return new ThreadPoolExecutor(POOL_SIZE, POOL_SIZE,0L, TimeUnit.MILLISECONDS, ops);
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,63 @@
|
|||
package eu.dnetlib.openaire.common;
|
||||
|
||||
import java.text.FieldPosition;
|
||||
import java.util.Calendar;
|
||||
import java.util.Date;
|
||||
import java.util.GregorianCalendar;
|
||||
import java.util.Locale;
|
||||
import java.util.TimeZone;
|
||||
|
||||
import com.fasterxml.jackson.databind.util.StdDateFormat;
|
||||
|
||||
public class RFC3339DateFormat extends StdDateFormat {
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
private static final long serialVersionUID = 8174507696046505992L;
|
||||
|
||||
private static final TimeZone TIMEZONE_Z = TimeZone.getTimeZone("UTC");
|
||||
|
||||
// Same as ISO8601DateFormat but serializing milliseconds.
|
||||
@Override
|
||||
public StringBuffer format(final Date date, final StringBuffer toAppendTo, final FieldPosition fieldPosition) {
|
||||
final String value = format(date, true, TIMEZONE_Z, Locale.US);
|
||||
toAppendTo.append(value);
|
||||
return toAppendTo;
|
||||
}
|
||||
|
||||
/**
|
||||
* Format date into yyyy-MM-ddThh:mm:ss[.sss][Z|[+-]hh:mm]
|
||||
*
|
||||
* @param date
|
||||
* the date to format
|
||||
* @param millis
|
||||
* true to include millis precision otherwise false
|
||||
* @param tz
|
||||
* timezone to use for the formatting (UTC will produce 'Z')
|
||||
* @return the date formatted as yyyy-MM-ddThh:mm:ss[.sss][Z|[+-]hh:mm]
|
||||
*/
|
||||
private static String format(final Date date, final boolean millis, final TimeZone tz, final Locale loc) {
|
||||
final Calendar calendar = new GregorianCalendar(tz, loc);
|
||||
calendar.setTime(date);
|
||||
|
||||
// estimate capacity of buffer as close as we can (yeah, that's pedantic ;)
|
||||
final StringBuilder sb = new StringBuilder(30);
|
||||
sb.append(String.format("%04d-%02d-%02dT%02d:%02d:%02d", calendar.get(Calendar.YEAR), calendar.get(Calendar.MONTH) + 1, calendar
|
||||
.get(Calendar.DAY_OF_MONTH), calendar.get(Calendar.HOUR_OF_DAY), calendar.get(Calendar.MINUTE), calendar.get(Calendar.SECOND)));
|
||||
if (millis) {
|
||||
sb.append(String.format(".%03d", calendar.get(Calendar.MILLISECOND)));
|
||||
}
|
||||
|
||||
final int offset = tz.getOffset(calendar.getTimeInMillis());
|
||||
if (offset != 0) {
|
||||
final int hours = Math.abs(offset / (60 * 1000) / 60);
|
||||
final int minutes = Math.abs(offset / (60 * 1000) % 60);
|
||||
sb.append(String.format("%c%02d:%02d", offset < 0 ? '-' : '+', hours, minutes));
|
||||
} else {
|
||||
sb.append('Z');
|
||||
}
|
||||
return sb.toString();
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,22 @@
|
|||
package eu.dnetlib.openaire.common;
|
||||
|
||||
import java.util.Optional;
|
||||
|
||||
import org.springframework.data.domain.Page;
|
||||
import org.springframework.data.domain.Pageable;
|
||||
import org.springframework.data.repository.NoRepositoryBean;
|
||||
import org.springframework.data.repository.Repository;
|
||||
|
||||
@NoRepositoryBean
|
||||
public interface ReadOnlyRepository<T, ID> extends Repository<T, ID> {
|
||||
|
||||
Optional<T> findById(ID id);
|
||||
|
||||
boolean existsById(ID id);
|
||||
|
||||
Page<T> findAll(Pageable pageable);
|
||||
|
||||
Iterable<T> findAll();
|
||||
|
||||
long count();
|
||||
}
|
|
@ -0,0 +1,22 @@
|
|||
package eu.dnetlib.openaire.common;
|
||||
|
||||
import java.util.Iterator;
|
||||
import java.util.Spliterator;
|
||||
import java.util.Spliterators;
|
||||
import java.util.stream.Stream;
|
||||
import java.util.stream.StreamSupport;
|
||||
|
||||
import com.google.common.escape.Escaper;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
|
||||
public class Utils {
|
||||
|
||||
public static <T> Stream<T> stream(Iterator<T> iterator) {
|
||||
return StreamSupport.stream(Spliterators.spliteratorUnknownSize(iterator, Spliterator.ORDERED), false);
|
||||
}
|
||||
|
||||
public static String escape(final Escaper esc, final String value) {
|
||||
return StringUtils.isNotBlank(value) ? esc.escape(value) : "";
|
||||
}
|
||||
|
||||
}
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,479 @@
|
|||
package eu.dnetlib.openaire.community;
|
||||
|
||||
import java.time.LocalDateTime;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.LinkedHashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.function.BiConsumer;
|
||||
import java.util.function.Function;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import javax.persistence.criteria.Predicate;
|
||||
import javax.transaction.Transactional;
|
||||
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
|
||||
import org.springframework.data.domain.Page;
|
||||
import org.springframework.data.domain.PageRequest;
|
||||
import org.springframework.data.domain.Sort;
|
||||
import org.springframework.data.jpa.domain.Specification;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import eu.dnetlib.openaire.community.model.DbCommunity;
|
||||
import eu.dnetlib.openaire.community.model.DbDatasource;
|
||||
import eu.dnetlib.openaire.community.model.DbDatasourcePK;
|
||||
import eu.dnetlib.openaire.community.model.DbOrganization;
|
||||
import eu.dnetlib.openaire.community.model.DbProject;
|
||||
import eu.dnetlib.openaire.community.model.DbProjectPK;
|
||||
import eu.dnetlib.openaire.community.model.DbSubCommunity;
|
||||
import eu.dnetlib.openaire.community.model.DbSupportOrg;
|
||||
import eu.dnetlib.openaire.community.model.DbSupportOrgPK;
|
||||
import eu.dnetlib.openaire.community.repository.DbCommunityRepository;
|
||||
import eu.dnetlib.openaire.community.repository.DbDatasourceRepository;
|
||||
import eu.dnetlib.openaire.community.repository.DbOrganizationRepository;
|
||||
import eu.dnetlib.openaire.community.repository.DbProjectRepository;
|
||||
import eu.dnetlib.openaire.community.repository.DbSubCommunityRepository;
|
||||
import eu.dnetlib.openaire.community.repository.DbSupportOrgRepository;
|
||||
import eu.dnetlib.openaire.community.utils.CommunityMappingUtils;
|
||||
import eu.dnetlib.openaire.exporter.exceptions.CommunityException;
|
||||
import eu.dnetlib.openaire.exporter.exceptions.ResourceNotFoundException;
|
||||
import eu.dnetlib.openaire.exporter.model.community.CommunityContentprovider;
|
||||
import eu.dnetlib.openaire.exporter.model.community.CommunityDetails;
|
||||
import eu.dnetlib.openaire.exporter.model.community.CommunityOrganization;
|
||||
import eu.dnetlib.openaire.exporter.model.community.CommunityProject;
|
||||
import eu.dnetlib.openaire.exporter.model.community.CommunitySummary;
|
||||
import eu.dnetlib.openaire.exporter.model.community.CommunityWritableProperties;
|
||||
import eu.dnetlib.openaire.exporter.model.community.SubCommunity;
|
||||
import eu.dnetlib.openaire.exporter.model.community.selectioncriteria.SelectionCriteria;
|
||||
import eu.dnetlib.openaire.exporter.model.context.IISConfigurationEntry;
|
||||
|
||||
@Service
|
||||
@ConditionalOnProperty(value = "openaire.exporter.enable.community", havingValue = "true")
|
||||
public class CommunityService {
|
||||
|
||||
@Autowired
|
||||
private DbCommunityRepository dbCommunityRepository;
|
||||
@Autowired
|
||||
private DbProjectRepository dbProjectRepository;
|
||||
@Autowired
|
||||
private DbDatasourceRepository dbDatasourceRepository;
|
||||
@Autowired
|
||||
private DbOrganizationRepository dbOrganizationRepository;
|
||||
@Autowired
|
||||
private DbSupportOrgRepository dbSupportOrgRepository;
|
||||
@Autowired
|
||||
private DbSubCommunityRepository dbSubCommunityRepository;
|
||||
|
||||
private static final Log log = LogFactory.getLog(CommunityService.class);
|
||||
|
||||
public List<CommunitySummary> listCommunities() {
|
||||
return dbCommunityRepository.findAll()
|
||||
.stream()
|
||||
.map(CommunityMappingUtils::toCommunitySummary)
|
||||
.collect(Collectors.toList());
|
||||
}
|
||||
|
||||
@Transactional
|
||||
public CommunityDetails newCommunity(final CommunityDetails details) throws CommunityException {
|
||||
if (StringUtils.isBlank(details.getId())) { throw new CommunityException("Empty Id"); }
|
||||
if (dbCommunityRepository.existsById(details.getId())) { throw new CommunityException("Community already exists: " + details.getId()); }
|
||||
details.setCreationDate(LocalDateTime.now());
|
||||
return saveCommunity(details);
|
||||
|
||||
}
|
||||
|
||||
@Transactional
|
||||
public CommunityDetails saveCommunity(final CommunityDetails details) {
|
||||
details.setLastUpdateDate(LocalDateTime.now());
|
||||
dbCommunityRepository.save(CommunityMappingUtils.toCommunity(details));
|
||||
return getCommunity(details.getId());
|
||||
}
|
||||
|
||||
@Transactional
|
||||
public CommunityDetails getCommunity(final String id) {
|
||||
final DbCommunity c = dbCommunityRepository.findById(id).orElseThrow(() -> new ResourceNotFoundException("Community not found: " + id));
|
||||
return CommunityMappingUtils.toCommunityDetails(c);
|
||||
}
|
||||
|
||||
@Transactional
|
||||
public void setCommunity(final String id, final CommunityWritableProperties details) {
|
||||
final DbCommunity c = dbCommunityRepository.findById(id).orElseThrow(() -> new ResourceNotFoundException("Community not found: " + id));
|
||||
CommunityMappingUtils.populateCommunity(c, details);
|
||||
c.setLastUpdateDate(LocalDateTime.now());
|
||||
dbCommunityRepository.save(c);
|
||||
}
|
||||
|
||||
@Transactional
|
||||
public Page<CommunityProject> getCommunityProjects(final String id,
|
||||
final String funder,
|
||||
final String filter,
|
||||
final int page,
|
||||
final int size,
|
||||
final String orderBy) throws CommunityException {
|
||||
if (StringUtils.isBlank(id)) { throw new CommunityException("Empty ID"); }
|
||||
try {
|
||||
final Sort sort;
|
||||
if (StringUtils.isBlank(orderBy)) {
|
||||
sort = Sort.by("projectName");
|
||||
} else if ("funder".equalsIgnoreCase(orderBy)) {
|
||||
sort = Sort.by("projectFunder").and(Sort.by("projectName"));
|
||||
} else if ("grantId".equalsIgnoreCase(orderBy)) {
|
||||
sort = Sort.by("projectCode");
|
||||
} else if ("acronym".equalsIgnoreCase(orderBy)) {
|
||||
sort = Sort.by("projectAcronym");
|
||||
} else if ("openaireId".equalsIgnoreCase(orderBy)) {
|
||||
sort = Sort.by("projectId");
|
||||
} else {
|
||||
sort = Sort.by("projectName");
|
||||
}
|
||||
|
||||
final PageRequest pageable = PageRequest.of(page, size, sort);
|
||||
if (StringUtils.isAllBlank(filter, funder)) {
|
||||
return dbProjectRepository.findByCommunity(id, pageable).map(CommunityMappingUtils::toCommunityProject);
|
||||
}
|
||||
final Specification<DbProject> projSpec = prepareProjectSpec(id, funder, filter);
|
||||
return dbProjectRepository.findAll(projSpec, pageable).map(CommunityMappingUtils::toCommunityProject);
|
||||
} catch (final Throwable e) {
|
||||
log.error(e);
|
||||
throw new CommunityException(e);
|
||||
}
|
||||
}
|
||||
|
||||
private Specification<DbProject> prepareProjectSpec(final String community, final String funder, final String other) {
|
||||
return (project, query, cb) -> {
|
||||
|
||||
final List<Predicate> andConds = new ArrayList<>();
|
||||
andConds.add(cb.equal(project.get("community"), community));
|
||||
|
||||
if (StringUtils.isNotBlank(funder)) {
|
||||
andConds.add(cb.equal(project.get("projectFunder"), funder));
|
||||
}
|
||||
|
||||
if (StringUtils.isNotBlank(other)) {
|
||||
final String s = other.toLowerCase().trim();
|
||||
|
||||
final List<Predicate> orConds = new ArrayList<>();
|
||||
orConds.add(cb.equal(cb.lower(project.get("projectId")), s));
|
||||
orConds.add(cb.equal(cb.lower(project.get("projectCode")), s));
|
||||
orConds.add(cb.equal(cb.lower(project.get("projectAcronym")), s));
|
||||
orConds.add(cb.like(cb.lower(project.get("projectName")), "%" + s + "%"));
|
||||
if (StringUtils.isBlank(funder)) {
|
||||
orConds.add(cb.equal(cb.lower(project.get("projectFunder")), s));
|
||||
}
|
||||
|
||||
andConds.add(cb.or(orConds.toArray(new Predicate[orConds.size()])));
|
||||
}
|
||||
|
||||
return cb.and(andConds.toArray(new Predicate[andConds.size()]));
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
@Transactional
|
||||
public CommunityProject addCommunityProject(final String id, final CommunityProject project) {
|
||||
final DbProject p = CommunityMappingUtils.toDbProject(id, project);
|
||||
dbProjectRepository.save(p);
|
||||
return project;
|
||||
}
|
||||
|
||||
@Transactional
|
||||
public void addCommunityProjects(final String id, final CommunityProject... projects) throws CommunityException {
|
||||
try {
|
||||
final List<DbProject> list = Arrays.stream(projects)
|
||||
.map(p -> CommunityMappingUtils.toDbProject(id, p))
|
||||
.collect(Collectors.toList());
|
||||
|
||||
dbProjectRepository.saveAll(list);
|
||||
} catch (final Throwable e) {
|
||||
log.error(e);
|
||||
throw new CommunityException(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Transactional
|
||||
public void removeCommunityProjects(final String id, final String... ids) {
|
||||
final List<DbProjectPK> list = Arrays.stream(ids)
|
||||
.map(projectId -> new DbProjectPK(id, projectId))
|
||||
.collect(Collectors.toList());
|
||||
dbProjectRepository.deleteAllById(list);
|
||||
}
|
||||
|
||||
public List<CommunityContentprovider> getCommunityDatasources(final String id) {
|
||||
return dbDatasourceRepository.findByCommunity(id)
|
||||
.stream()
|
||||
.map(CommunityMappingUtils::toCommunityContentprovider)
|
||||
.collect(Collectors.toList());
|
||||
}
|
||||
|
||||
public List<CommunityContentprovider> getCommunityDatasourcesWithDeposit(final String id, final boolean deposit) {
|
||||
return dbDatasourceRepository.findByCommunityAndDeposit(id, deposit)
|
||||
.stream()
|
||||
.map(CommunityMappingUtils::toCommunityContentprovider)
|
||||
.collect(Collectors.toList());
|
||||
}
|
||||
|
||||
@Transactional
|
||||
public CommunityContentprovider updateCommunityDatasourcesDeposit(final String id, final String dsId, final Boolean deposit, final String message) {
|
||||
return dbDatasourceRepository.findById(new DbDatasourcePK(id, dsId))
|
||||
.map(ds -> {
|
||||
ds.setDeposit(deposit != null ? deposit : false);
|
||||
ds.setMessage(message);
|
||||
return ds;
|
||||
})
|
||||
.map(CommunityMappingUtils::toCommunityContentprovider)
|
||||
.orElseThrow(() -> new ResourceNotFoundException("Community and/or Datasource not found"));
|
||||
}
|
||||
|
||||
@Transactional
|
||||
public void addCommunityDatasources(final String id, final CommunityContentprovider... contentproviders) {
|
||||
final List<DbDatasource> list = Arrays.stream(contentproviders)
|
||||
.map(cp -> CommunityMappingUtils.toDbDatasource(id, cp))
|
||||
.collect(Collectors.toList());
|
||||
|
||||
dbDatasourceRepository.saveAll(list);
|
||||
}
|
||||
|
||||
@Transactional
|
||||
public void removeCommunityDatasources(final String id, final String... ids) {
|
||||
final List<DbDatasourcePK> list = Arrays.stream(ids)
|
||||
.map(dsId -> new DbDatasourcePK(id, dsId))
|
||||
.collect(Collectors.toList());
|
||||
dbDatasourceRepository.deleteAllById(list);
|
||||
}
|
||||
|
||||
@Transactional
|
||||
public void removeCommunityOrganizations(final String id, final String... orgNames) {
|
||||
final List<DbSupportOrgPK> list = Arrays.stream(orgNames)
|
||||
.map(name -> new DbSupportOrgPK(id, name))
|
||||
.collect(Collectors.toList());
|
||||
dbSupportOrgRepository.deleteAllById(list);
|
||||
}
|
||||
|
||||
@Transactional
|
||||
public List<CommunityOrganization> getCommunityOrganizations(final String id) {
|
||||
return dbSupportOrgRepository.findByCommunity(id)
|
||||
.stream()
|
||||
.map(CommunityMappingUtils::toCommunityOrganization)
|
||||
.collect(Collectors.toList());
|
||||
}
|
||||
|
||||
@Transactional
|
||||
public void addCommunityOrganizations(final String id, final CommunityOrganization... orgs) {
|
||||
final List<DbSupportOrg> list = Arrays.stream(orgs)
|
||||
.map(o -> CommunityMappingUtils.toDbSupportOrg(id, o))
|
||||
.collect(Collectors.toList());
|
||||
|
||||
dbSupportOrgRepository.saveAll(list);
|
||||
}
|
||||
|
||||
@Transactional
|
||||
public void removeSubCommunities(final String id, final String... subCommunityIds) {
|
||||
dbSubCommunityRepository.deleteAllById(Arrays.asList(subCommunityIds));
|
||||
}
|
||||
|
||||
@Transactional
|
||||
public List<SubCommunity> getSubCommunities(final String id) {
|
||||
return dbSubCommunityRepository.findByCommunity(id)
|
||||
.stream()
|
||||
.map(CommunityMappingUtils::toSubCommunity)
|
||||
.collect(Collectors.toList());
|
||||
}
|
||||
|
||||
@Transactional
|
||||
public void addSubCommunities(final String id, final SubCommunity... subs) {
|
||||
final List<DbSubCommunity> list = Arrays.stream(subs)
|
||||
.map(s -> CommunityMappingUtils.toDbSubCommunity(id, s))
|
||||
.collect(Collectors.toList());
|
||||
|
||||
dbSubCommunityRepository.saveAll(list);
|
||||
}
|
||||
|
||||
@Transactional
|
||||
public CommunityDetails addCommunitySubjects(final String id, final String... subjects) {
|
||||
return modifyElementToArrayField(id, DbCommunity::getSubjects, DbCommunity::setSubjects, false, subjects);
|
||||
}
|
||||
|
||||
public CommunityDetails removeCommunitySubjects(final String id, final String... subjects) {
|
||||
return modifyElementToArrayField(id, DbCommunity::getSubjects, DbCommunity::setSubjects, true, subjects);
|
||||
}
|
||||
|
||||
public CommunityDetails addCommunityFOS(final String id, final String... foss) {
|
||||
return modifyElementToArrayField(id, DbCommunity::getFos, DbCommunity::setFos, false, foss);
|
||||
}
|
||||
|
||||
public CommunityDetails removeCommunityFOS(final String id, final String... foss) {
|
||||
return modifyElementToArrayField(id, DbCommunity::getFos, DbCommunity::setFos, true, foss);
|
||||
}
|
||||
|
||||
public CommunityDetails addCommunitySDG(final String id, final String... sdgs) {
|
||||
return modifyElementToArrayField(id, DbCommunity::getSdg, DbCommunity::setSdg, false, sdgs);
|
||||
}
|
||||
|
||||
public CommunityDetails removeCommunitySDG(final String id, final String... sdgs) {
|
||||
return modifyElementToArrayField(id, DbCommunity::getSdg, DbCommunity::setSdg, true, sdgs);
|
||||
}
|
||||
|
||||
@Transactional
|
||||
public CommunityDetails addCommunityAdvancedConstraint(final String id, final SelectionCriteria advancedCosntraint) {
|
||||
final DbCommunity dbEntry = dbCommunityRepository.findById(id).orElseThrow(() -> new ResourceNotFoundException("Community not found: " + id));
|
||||
dbEntry.setAdvancedConstraints(advancedCosntraint);
|
||||
dbEntry.setLastUpdateDate(LocalDateTime.now());
|
||||
dbCommunityRepository.save(dbEntry);
|
||||
return getCommunity(id);
|
||||
}
|
||||
|
||||
@Transactional
|
||||
public CommunityDetails removeCommunityAdvancedConstraint(final String id) {
|
||||
final DbCommunity dbEntry = dbCommunityRepository.findById(id).orElseThrow(() -> new ResourceNotFoundException("Community not found: " + id));
|
||||
dbEntry.setAdvancedConstraints(null);
|
||||
dbEntry.setLastUpdateDate(LocalDateTime.now());
|
||||
dbCommunityRepository.save(dbEntry);
|
||||
return getCommunity(id);
|
||||
}
|
||||
|
||||
@Transactional
|
||||
public CommunityDetails addCommunityRemoveConstraint(final String id, final SelectionCriteria removeConstraint) {
|
||||
final DbCommunity dbEntry = dbCommunityRepository.findById(id).orElseThrow(() -> new ResourceNotFoundException("Community not found: " + id));
|
||||
dbEntry.setRemoveConstraints(removeConstraint);
|
||||
dbEntry.setLastUpdateDate(LocalDateTime.now());
|
||||
dbCommunityRepository.save(dbEntry);
|
||||
return getCommunity(id);
|
||||
}
|
||||
|
||||
@Transactional
|
||||
public CommunityDetails removeCommunityRemoveConstraint(final String id) {
|
||||
final DbCommunity dbEntry = dbCommunityRepository.findById(id).orElseThrow(() -> new ResourceNotFoundException("Community not found: " + id));
|
||||
dbEntry.setRemoveConstraints(null);
|
||||
dbEntry.setLastUpdateDate(LocalDateTime.now());
|
||||
dbCommunityRepository.save(dbEntry);
|
||||
return getCommunity(id);
|
||||
}
|
||||
|
||||
public CommunityDetails removeCommunityZenodoCommunity(final String id, final String zenodoCommunity, final boolean isMain) {
|
||||
if (isMain) { return updateElementToSimpleField(id, DbCommunity::setMainZenodoCommunity, null); }
|
||||
return modifyElementToArrayField(id, DbCommunity::getOtherZenodoCommunities, DbCommunity::setOtherZenodoCommunities, true, zenodoCommunity);
|
||||
}
|
||||
|
||||
public CommunityDetails addCommunityZenodoCommunity(final String id, final String zenodoCommunity, final boolean isMain) {
|
||||
if (isMain) { return updateElementToSimpleField(id, DbCommunity::setMainZenodoCommunity, zenodoCommunity); }
|
||||
return modifyElementToArrayField(id, DbCommunity::getOtherZenodoCommunities, DbCommunity::setOtherZenodoCommunities, false, zenodoCommunity);
|
||||
}
|
||||
|
||||
@Transactional
|
||||
private CommunityDetails updateElementToSimpleField(final String id,
|
||||
final BiConsumer<DbCommunity, String> setter,
|
||||
final String value) {
|
||||
final DbCommunity dbEntry = dbCommunityRepository.findById(id).orElseThrow(() -> new ResourceNotFoundException("Community not found: " + id));
|
||||
setter.accept(dbEntry, value);
|
||||
dbEntry.setLastUpdateDate(LocalDateTime.now());
|
||||
dbCommunityRepository.save(dbEntry);
|
||||
return getCommunity(id);
|
||||
}
|
||||
|
||||
@Transactional
|
||||
private CommunityDetails modifyElementToArrayField(final String id,
|
||||
final Function<DbCommunity, String[]> getter,
|
||||
final BiConsumer<DbCommunity, String[]> setter,
|
||||
final boolean remove,
|
||||
final String... values) {
|
||||
|
||||
final DbCommunity dbEntry = dbCommunityRepository.findById(id).orElseThrow(() -> new ResourceNotFoundException("Community not found: " + id));
|
||||
|
||||
final Set<String> tmpList = new LinkedHashSet<>();
|
||||
final String[] oldValues = getter.apply(dbEntry);
|
||||
if (oldValues != null) {
|
||||
Collections.addAll(tmpList, oldValues);
|
||||
}
|
||||
if (remove) {
|
||||
tmpList.removeAll(Arrays.asList(values));
|
||||
} else {
|
||||
tmpList.addAll(Arrays.asList(values));
|
||||
}
|
||||
|
||||
setter.accept(dbEntry, tmpList.toArray(new String[tmpList.size()]));
|
||||
|
||||
dbEntry.setLastUpdateDate(LocalDateTime.now());
|
||||
|
||||
dbCommunityRepository.save(dbEntry);
|
||||
|
||||
return getCommunity(id);
|
||||
}
|
||||
|
||||
@Transactional
|
||||
public List<String> getOpenAIRECommunitiesByZenodoId(final String zenodoId) {
|
||||
return dbCommunityRepository.findByZenodoId(zenodoId);
|
||||
}
|
||||
|
||||
@Transactional
|
||||
public Map<String, Set<String>> getPropagationOrganizationCommunityMap() {
|
||||
return dbOrganizationRepository.findAll()
|
||||
.stream()
|
||||
.collect(Collectors.groupingBy(DbOrganization::getOrgId, Collectors.mapping(DbOrganization::getCommunity, Collectors.toSet())));
|
||||
}
|
||||
|
||||
@Transactional
|
||||
public Set<String> getPropagationOrganizationsForCommunity(final String communityId) {
|
||||
return dbOrganizationRepository.findByCommunity(communityId)
|
||||
.stream()
|
||||
.map(DbOrganization::getOrgId)
|
||||
.collect(Collectors.toSet());
|
||||
}
|
||||
|
||||
@Transactional
|
||||
public Set<String> addPropagationOrganizationForCommunity(final String communityId, final String... organizationIds) {
|
||||
for (final String orgId : organizationIds) {
|
||||
final DbOrganization o = new DbOrganization(communityId.trim(), orgId.trim());
|
||||
dbOrganizationRepository.save(o);
|
||||
}
|
||||
return getPropagationOrganizationsForCommunity(communityId);
|
||||
}
|
||||
|
||||
@Transactional
|
||||
public Set<String> removePropagationOrganizationForCommunity(final String communityId, final String... organizationIds) {
|
||||
for (final String orgId : organizationIds) {
|
||||
final DbOrganization o = new DbOrganization(communityId.trim(), orgId.trim());
|
||||
dbOrganizationRepository.delete(o);
|
||||
}
|
||||
return getPropagationOrganizationsForCommunity(communityId);
|
||||
}
|
||||
|
||||
@Transactional
|
||||
public void deleteCommunity(final String id, final boolean recursive) {
|
||||
if (recursive) {
|
||||
dbProjectRepository.deleteByCommunity(id);
|
||||
dbDatasourceRepository.deleteByCommunity(id);
|
||||
dbOrganizationRepository.deleteByCommunity(id);
|
||||
dbSupportOrgRepository.deleteByCommunity(id);
|
||||
dbSubCommunityRepository.deleteByCommunity(id);
|
||||
}
|
||||
dbCommunityRepository.deleteById(id);
|
||||
}
|
||||
|
||||
@Transactional
|
||||
public List<IISConfigurationEntry> getIISConfiguration(final String id) {
|
||||
final List<IISConfigurationEntry> res = new ArrayList<>();
|
||||
|
||||
res.add(dbCommunityRepository.findById(id)
|
||||
.map(CommunityMappingUtils::asIISConfigurationEntry)
|
||||
.orElseThrow(() -> new ResourceNotFoundException("Community not found: " + id)));
|
||||
|
||||
for (final DbSubCommunity subc : dbSubCommunityRepository.findByCommunity(id)) {
|
||||
res.add(CommunityMappingUtils.asIISConfigurationEntry(subc));
|
||||
}
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
@Transactional
|
||||
public List<String> getCommunityFunders(final String id) {
|
||||
return dbProjectRepository.findFundersByCommunity(id);
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,87 @@
|
|||
package eu.dnetlib.openaire.community.importer;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
|
||||
import org.springframework.web.bind.annotation.CrossOrigin;
|
||||
import org.springframework.web.bind.annotation.GetMapping;
|
||||
import org.springframework.web.bind.annotation.RequestParam;
|
||||
import org.springframework.web.bind.annotation.RestController;
|
||||
|
||||
import com.google.common.collect.Sets;
|
||||
|
||||
import eu.dnetlib.common.controller.AbstractDnetController;
|
||||
import eu.dnetlib.openaire.common.ISClient;
|
||||
import eu.dnetlib.openaire.community.model.DbOrganization;
|
||||
import eu.dnetlib.openaire.exporter.exceptions.CommunityException;
|
||||
import eu.dnetlib.openaire.exporter.model.context.Context;
|
||||
import io.swagger.v3.oas.annotations.tags.Tag;
|
||||
|
||||
@RestController
|
||||
@CrossOrigin(origins = {
|
||||
"*"
|
||||
})
|
||||
@ConditionalOnProperty(value = "openaire.exporter.enable.community.import", havingValue = "true")
|
||||
@Tag(name = "OpenAIRE Communities: Migration API", description = "OpenAIRE Communities: Migration API")
|
||||
public class CommunityImporterController extends AbstractDnetController {
|
||||
|
||||
// public final static Set<String> communityBlackList = Sets.newHashSet("fet-fp7", "fet-h2020");
|
||||
public final static Set<String> communityBlackList = Sets.newHashSet();
|
||||
|
||||
@Autowired
|
||||
private CommunityImporterService importer;
|
||||
|
||||
@Autowired
|
||||
private ISClient isClient;
|
||||
|
||||
private static final Log log = LogFactory.getLog(CommunityImporterController.class);
|
||||
|
||||
@GetMapping("/community_importer/communities")
|
||||
public List<String> importProfiles() throws CommunityException {
|
||||
try {
|
||||
final Map<String, Context> contextMap = getContextMap();
|
||||
|
||||
final List<String> list = contextMap.keySet()
|
||||
.stream()
|
||||
.filter(id -> !communityBlackList.contains(id))
|
||||
.collect(Collectors.toList());
|
||||
|
||||
list.forEach(id -> {
|
||||
importer.importCommunity(contextMap.get(id));
|
||||
});
|
||||
|
||||
return list;
|
||||
} catch (final Throwable e) {
|
||||
log.error("Error importing communities", e);
|
||||
throw new CommunityException(e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
@GetMapping("/community_importer/propagationOrgs")
|
||||
public List<DbOrganization> importPropagationOrgs(@RequestParam final String profileId,
|
||||
@RequestParam(required = false, defaultValue = "false") final boolean simulation) throws Exception {
|
||||
try {
|
||||
final String xml = isClient.getProfile(profileId);
|
||||
return importer.importPropagationOrganizationsFromProfile(xml, simulation);
|
||||
} catch (final Throwable e) {
|
||||
log.error("Error importing communities", e);
|
||||
throw new CommunityException(e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
private Map<String, Context> getContextMap() throws CommunityException {
|
||||
try {
|
||||
return isClient.getCommunityContextMap();
|
||||
} catch (final IOException e) {
|
||||
throw new CommunityException(e);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,411 @@
|
|||
package eu.dnetlib.openaire.community.importer;
|
||||
|
||||
import java.time.LocalDate;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Base64;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.function.Function;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import javax.transaction.Transactional;
|
||||
|
||||
import org.apache.commons.lang3.BooleanUtils;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.dom4j.DocumentHelper;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
|
||||
import org.springframework.jdbc.core.JdbcTemplate;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import com.fasterxml.jackson.core.type.TypeReference;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.google.common.collect.Lists;
|
||||
|
||||
import eu.dnetlib.miscutils.functional.hash.Hashing;
|
||||
import eu.dnetlib.openaire.community.CommunityService;
|
||||
import eu.dnetlib.openaire.community.model.DbOrganization;
|
||||
import eu.dnetlib.openaire.community.repository.DbOrganizationRepository;
|
||||
import eu.dnetlib.openaire.community.utils.CommunityMappingUtils;
|
||||
import eu.dnetlib.openaire.exporter.exceptions.CommunityException;
|
||||
import eu.dnetlib.openaire.exporter.model.community.CommunityClaimType;
|
||||
import eu.dnetlib.openaire.exporter.model.community.CommunityContentprovider;
|
||||
import eu.dnetlib.openaire.exporter.model.community.CommunityDetails;
|
||||
import eu.dnetlib.openaire.exporter.model.community.CommunityMembershipType;
|
||||
import eu.dnetlib.openaire.exporter.model.community.CommunityOrganization;
|
||||
import eu.dnetlib.openaire.exporter.model.community.CommunityPlanType;
|
||||
import eu.dnetlib.openaire.exporter.model.community.CommunityProject;
|
||||
import eu.dnetlib.openaire.exporter.model.community.CommunityStatus;
|
||||
import eu.dnetlib.openaire.exporter.model.community.CommunityType;
|
||||
import eu.dnetlib.openaire.exporter.model.community.SubCommunity;
|
||||
import eu.dnetlib.openaire.exporter.model.community.selectioncriteria.SelectionCriteria;
|
||||
import eu.dnetlib.openaire.exporter.model.context.Category;
|
||||
import eu.dnetlib.openaire.exporter.model.context.Concept;
|
||||
import eu.dnetlib.openaire.exporter.model.context.Context;
|
||||
import eu.dnetlib.openaire.exporter.model.context.Param;
|
||||
|
||||
@Service
|
||||
@ConditionalOnProperty(value = "openaire.exporter.enable.community.import", havingValue = "true")
|
||||
public class CommunityImporterService {
|
||||
|
||||
// common
|
||||
public final static String OPENAIRE_ID = "openaireId";
|
||||
public final static String PIPE_SEPARATOR = "||";
|
||||
public final static String ID_SEPARATOR = "::";
|
||||
public final static String CSV_DELIMITER = ",";
|
||||
public final static String CLABEL = "label";
|
||||
|
||||
// id suffixes
|
||||
public final static String PROJECTS_ID_SUFFIX = ID_SEPARATOR + "projects";
|
||||
public final static String CONTENTPROVIDERS_ID_SUFFIX = ID_SEPARATOR + "contentproviders";
|
||||
public final static String ZENODOCOMMUNITY_ID_SUFFIX = ID_SEPARATOR + "zenodocommunities";
|
||||
public final static String ORGANIZATION_ID_SUFFIX = ID_SEPARATOR + "organizations";
|
||||
|
||||
// community summary
|
||||
public final static String CSUMMARY_DESCRIPTION = "description";
|
||||
public final static String CSUMMARY_LOGOURL = "logourl";
|
||||
public final static String CSUMMARY_STATUS = "status";
|
||||
public final static String CSUMMARY_NAME = "name";
|
||||
public final static String CSUMMARY_MANAGER = "manager";
|
||||
public final static String CSUMMARY_ZENODOC = "zenodoCommunity";
|
||||
|
||||
// community profile
|
||||
public final static String CPROFILE_SUBJECT = "subject";
|
||||
public final static String CPROFILE_CREATIONDATE = "creationdate";
|
||||
public final static String CPROFILE_FOS = "fos";
|
||||
public final static String CPROFILE_SDG = "sdg";
|
||||
public final static String CPROFILE_ADVANCED_CONSTRAINT = "advancedConstraints";
|
||||
public final static String CPROFILE_REMOVE_CONSTRAINT = "removeConstraints";
|
||||
public final static String CPROFILE_SUGGESTED_ACKNOWLEDGEMENT = "suggestedAcknowledgement";
|
||||
|
||||
// community project
|
||||
public final static String CPROJECT_FUNDER = "funder";
|
||||
public final static String CPROJECT_NUMBER = "CD_PROJECT_NUMBER";
|
||||
public final static String CPROJECT_FULLNAME = "projectfullname";
|
||||
public final static String CPROJECT_ACRONYM = "acronym";
|
||||
|
||||
// community content provider
|
||||
public final static String CCONTENTPROVIDER_NAME = "name";
|
||||
public final static String CCONTENTPROVIDER_OFFICIALNAME = "officialname";
|
||||
public final static String CCONTENTPROVIDER_ENABLED = "enabled";
|
||||
public final static String CCONTENTPROVIDERENABLED_DEFAULT = "true";
|
||||
public final static String CCONTENTPROVIDER_SELCRITERIA = "selcriteria";
|
||||
|
||||
// community zenodo community
|
||||
public final static String CZENODOCOMMUNITY_ID = "zenodoid";
|
||||
|
||||
// community organization
|
||||
public final static String CORGANIZATION_NAME = "name";
|
||||
public final static String CORGANIZATION_LOGOURL = "logourl";
|
||||
public final static String CORGANIZATION_WEBSITEURL = "websiteurl";
|
||||
|
||||
@Autowired
|
||||
private DbOrganizationRepository dbOrganizationRepository;
|
||||
|
||||
@Autowired
|
||||
private CommunityService service;
|
||||
|
||||
@Autowired
|
||||
private JdbcTemplate jdbcTemplate;
|
||||
|
||||
private static final Log log = LogFactory.getLog(CommunityImporterService.class);
|
||||
|
||||
public List<DbOrganization> importPropagationOrganizationsFromProfile(final String xml, final boolean simulation) throws Exception {
|
||||
final String json = DocumentHelper.parseText(xml)
|
||||
.selectSingleNode("//NODE[@name='setPropagationOrganizationCommunityMap']//PARAM[@name='parameterValue']")
|
||||
.getText();
|
||||
|
||||
final List<DbOrganization> list = new ObjectMapper()
|
||||
.readValue(json, new TypeReference<Map<String, List<String>>>() {})
|
||||
.entrySet()
|
||||
.stream()
|
||||
.flatMap(e -> e.getValue()
|
||||
.stream()
|
||||
.map(community -> {
|
||||
if (e.getKey().contains("|")) { return new DbOrganization(community, StringUtils.substringAfter(e.getKey(), "|")); }
|
||||
return new DbOrganization(community, e.getKey());
|
||||
}))
|
||||
.collect(Collectors.toList());
|
||||
|
||||
if (!simulation) {
|
||||
list.forEach(o -> {
|
||||
try {
|
||||
dbOrganizationRepository.save(o);
|
||||
} catch (final Throwable e) {
|
||||
log.error("ERROR saving org: " + o);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
return list;
|
||||
}
|
||||
|
||||
@Transactional
|
||||
public void importCommunity(final Context context) {
|
||||
try {
|
||||
|
||||
final CommunityDetails community = asCommunityDetails(context);
|
||||
|
||||
final List<CommunityContentprovider> datasources =
|
||||
getCommunityInfo(context, CONTENTPROVIDERS_ID_SUFFIX, c -> asCommunityDataprovider(context.getId(), c))
|
||||
.stream()
|
||||
.map(o -> {
|
||||
if (o.getOpenaireId() == null) {
|
||||
log.warn("Openaire ID is missing, organization: " + o.getOfficialname());
|
||||
} else if (o.getOpenaireId().contains("|")) {
|
||||
o.setOpenaireId(StringUtils.substringAfter(o.getOpenaireId(), "|"));
|
||||
}
|
||||
return o;
|
||||
})
|
||||
.filter(o -> o.getOpenaireId() != null)
|
||||
.collect(Collectors.toList());
|
||||
|
||||
final List<CommunityProject> projects =
|
||||
getCommunityInfo(context, PROJECTS_ID_SUFFIX, c -> asCommunityProject(context.getId(), c))
|
||||
.stream()
|
||||
.map(p -> {
|
||||
if (p.getOpenaireId() == null) {
|
||||
if ("EC".equalsIgnoreCase(p.getFunder())) {
|
||||
final String ns = findNamespaceForECProject(p.getGrantId());
|
||||
if (ns != null) {
|
||||
p.setOpenaireId(ns + "::" + Hashing.md5(p.getGrantId()));
|
||||
} else {
|
||||
log.warn("EC project not in the db: " + p.getGrantId());
|
||||
}
|
||||
} else if ("NSF".equalsIgnoreCase(p.getFunder())) {
|
||||
p.setOpenaireId("nsf_________::" + Hashing.md5(p.getGrantId()));
|
||||
} else if ("NIH".equalsIgnoreCase(p.getFunder())) {
|
||||
p.setOpenaireId("nih_________::" + Hashing.md5(p.getGrantId()));
|
||||
} else {
|
||||
log.warn("Openaire ID is missing, funder: " + p.getFunder());
|
||||
}
|
||||
} else if (p.getOpenaireId().contains("|")) {
|
||||
p.setOpenaireId(StringUtils.substringAfter(p.getOpenaireId(), "|"));
|
||||
}
|
||||
return p;
|
||||
})
|
||||
.filter(p -> p.getOpenaireId() != null)
|
||||
.collect(Collectors.toList());
|
||||
|
||||
final List<CommunityOrganization> orgs =
|
||||
getCommunityInfo(context, ORGANIZATION_ID_SUFFIX, c -> asCommunityOrganization(context.getId(), c));
|
||||
|
||||
final List<String> otherZenodoCommunities =
|
||||
getCommunityInfo(context, ZENODOCOMMUNITY_ID_SUFFIX, CommunityImporterService::asZenodoCommunity);
|
||||
|
||||
community.setOtherZenodoCommunities(otherZenodoCommunities);
|
||||
|
||||
final List<SubCommunity> subs = context.getCategories()
|
||||
.entrySet()
|
||||
.stream()
|
||||
.filter(e -> !(context.getId() + CONTENTPROVIDERS_ID_SUFFIX).equals(e.getKey()))
|
||||
.filter(e -> !(context.getId() + PROJECTS_ID_SUFFIX).equals(e.getKey()))
|
||||
.filter(e -> !(context.getId() + ORGANIZATION_ID_SUFFIX).equals(e.getKey()))
|
||||
.filter(e -> !(context.getId() + ZENODOCOMMUNITY_ID_SUFFIX).equals(e.getKey()))
|
||||
.map(Entry::getValue)
|
||||
.map(cat -> asSubCommunities(context.getId(), null, cat.getLabel(), cat.getConcepts()))
|
||||
.flatMap(List::stream)
|
||||
.collect(Collectors.toList());
|
||||
|
||||
service.saveCommunity(community);
|
||||
service.addCommunityProjects(context.getId(), projects.toArray(new CommunityProject[projects.size()]));
|
||||
service.addCommunityDatasources(context.getId(), datasources.toArray(new CommunityContentprovider[datasources.size()]));
|
||||
service.addCommunityOrganizations(context.getId(), orgs.toArray(new CommunityOrganization[orgs.size()]));
|
||||
service.addSubCommunities(context.getId(), subs.toArray(new SubCommunity[subs.size()]));
|
||||
} catch (
|
||||
|
||||
final Exception e) {
|
||||
throw new RuntimeException("Error importing community: " + context.getId(), e);
|
||||
}
|
||||
}
|
||||
|
||||
private <R> List<R> getCommunityInfo(final Context context, final String idSuffix, final Function<Concept, R> mapping)
|
||||
throws CommunityException {
|
||||
if (context != null) {
|
||||
final Map<String, Category> categories = context.getCategories();
|
||||
final Category category = categories.get(context.getId() + idSuffix);
|
||||
if (category != null) { return category.getConcepts()
|
||||
.stream()
|
||||
.map(mapping)
|
||||
.collect(Collectors.toList()); }
|
||||
}
|
||||
return Lists.newArrayList();
|
||||
}
|
||||
|
||||
private static CommunityDetails asCommunityDetails(final Context c) {
|
||||
|
||||
final CommunityDetails details = new CommunityDetails();
|
||||
|
||||
details.setId(c.getId());
|
||||
details.setShortName(c.getLabel());
|
||||
details.setDisplayShortName(c.getLabel());
|
||||
details.setLastUpdateDate(CommunityMappingUtils.asLocalDateTime(c.getLastUpdateDate()));
|
||||
details.setCreationDate(CommunityMappingUtils.asLocalDateTime(c.getCreationDate()));
|
||||
details.setQueryId(c.getId() + PIPE_SEPARATOR + c.getLabel());
|
||||
details.setType(CommunityType.valueOf(c.getType()));
|
||||
details.setMembership(CommunityMembershipType.open);
|
||||
details.setClaim(CommunityClaimType.all);
|
||||
details.setDescription(asCsv(CSUMMARY_DESCRIPTION, c.getParams()));
|
||||
details.setLogoUrl(asCsv(CSUMMARY_LOGOURL, c.getParams()));
|
||||
|
||||
final String status = firstValue(CSUMMARY_STATUS, c.getParams());
|
||||
if (StringUtils.isNotBlank(status)) {
|
||||
details.setStatus(CommunityStatus.valueOf(status));
|
||||
} else {
|
||||
details.setStatus(CommunityStatus.hidden);
|
||||
}
|
||||
|
||||
details.setName(StringUtils.firstNonBlank(asCsv(CSUMMARY_NAME, c.getParams()), c.getLabel()));
|
||||
details.setDisplayName(StringUtils.firstNonBlank(asCsv(CSUMMARY_NAME, c.getParams()), c.getLabel()));
|
||||
|
||||
details.setZenodoCommunity(asCsv(CSUMMARY_ZENODOC, c.getParams()));
|
||||
details.setSubjects(splitValues(asValues(CPROFILE_SUBJECT, c.getParams()), CSV_DELIMITER));
|
||||
details.setFos(splitValues(asValues(CPROFILE_FOS, c.getParams()), CSV_DELIMITER));
|
||||
details.setSdg(splitValues(asValues(CPROFILE_SDG, c.getParams()), CSV_DELIMITER));
|
||||
// In the map the string is the serialization of the json representing the selection criteria so it is a valid json
|
||||
details.setAdvancedConstraints(SelectionCriteria.fromJson(asCsv(CPROFILE_ADVANCED_CONSTRAINT, c.getParams())));
|
||||
// In the map the string is the serialization of the json representing the selection criteria so it is a valid json
|
||||
details.setRemoveConstraints(SelectionCriteria.fromJson(asCsv(CPROFILE_REMOVE_CONSTRAINT, c.getParams())));
|
||||
details.setSuggestedAcknowledgements(splitValues(asValues(CPROFILE_SUGGESTED_ACKNOWLEDGEMENT, c.getParams()), CSV_DELIMITER));
|
||||
details.setPlan(CommunityPlanType.Default);
|
||||
try {
|
||||
details.setCreationDate(CommunityMappingUtils.asLocalDateTime(asCsv(CPROFILE_CREATIONDATE, c.getParams())));
|
||||
} catch (final Exception e) {
|
||||
log.debug("Exception on date format: " + e.getMessage());
|
||||
}
|
||||
|
||||
return details;
|
||||
}
|
||||
|
||||
private static CommunityProject asCommunityProject(final String communityId, final Concept c) {
|
||||
final List<Param> p = c.getParams();
|
||||
final CommunityProject project = new CommunityProject();
|
||||
project.setCommunityId(communityId);
|
||||
project.setOpenaireId(firstValue(OPENAIRE_ID, p));
|
||||
project.setFunder(firstValue(CPROJECT_FUNDER, p));
|
||||
project.setGrantId(firstValue(CPROJECT_NUMBER, p));
|
||||
project.setName(firstValue(CPROJECT_FULLNAME, p));
|
||||
project.setAcronym(firstValue(CPROJECT_ACRONYM, p));
|
||||
project.setAvailableSince(LocalDate.of(2017, 2, 25)); // Birillo Birth Date
|
||||
return project;
|
||||
}
|
||||
|
||||
private static CommunityContentprovider asCommunityDataprovider(final String communityId, final Concept c) {
|
||||
final List<Param> p = c.getParams();
|
||||
final CommunityContentprovider d = new CommunityContentprovider();
|
||||
d.setCommunityId(communityId);
|
||||
d.setOpenaireId(firstValue(OPENAIRE_ID, p));
|
||||
d.setName(firstValue(CCONTENTPROVIDER_NAME, p));
|
||||
d.setOfficialname(firstValue(CCONTENTPROVIDER_OFFICIALNAME, p));
|
||||
d.setEnabled(BooleanUtils.toBoolean(firstValue(CCONTENTPROVIDER_ENABLED, p)));
|
||||
d.setSelectioncriteria(SelectionCriteria.fromJson(firstValue(CCONTENTPROVIDER_SELCRITERIA, p)));
|
||||
d.setDeposit(false);
|
||||
d.setMessage(null);
|
||||
return d;
|
||||
}
|
||||
|
||||
private static CommunityOrganization asCommunityOrganization(final String id, final Concept c) {
|
||||
final List<Param> p = c.getParams();
|
||||
final CommunityOrganization o = new CommunityOrganization();
|
||||
o.setCommunityId(id);
|
||||
o.setName(firstValue(CORGANIZATION_NAME, p));
|
||||
o.setLogo_url(getDecodedUrl(firstValue(CORGANIZATION_LOGOURL, p)));
|
||||
o.setWebsite_url(getDecodedUrl(firstValue(CORGANIZATION_WEBSITEURL, p)));
|
||||
return o;
|
||||
}
|
||||
|
||||
private static String asZenodoCommunity(final Concept c) {
|
||||
return firstValue(CZENODOCOMMUNITY_ID, c.getParams());
|
||||
}
|
||||
|
||||
private static List<SubCommunity> asSubCommunities(final String communityId, final String parent, final String category, final List<Concept> concepts) {
|
||||
final List<SubCommunity> list = new ArrayList<>();
|
||||
for (final Concept c : concepts) {
|
||||
final SubCommunity sc = new SubCommunity();
|
||||
sc.setSubCommunityId(c.getId());
|
||||
sc.setCommunityId(communityId);
|
||||
sc.setParent(parent);
|
||||
sc.setCategory(category);
|
||||
sc.setLabel(c.getLabel());
|
||||
sc.setParams(c.getParams());
|
||||
sc.setClaim(c.isClaim());
|
||||
sc.setBrowsable(false);
|
||||
list.add(sc);
|
||||
list.addAll(asSubCommunities(communityId, c.getId(), category, c.getConcepts()));
|
||||
}
|
||||
return list;
|
||||
}
|
||||
|
||||
private String findNamespaceForECProject(final String code) {
|
||||
final List<String> list =
|
||||
jdbcTemplate.queryForList("SELECT substr(id, 1, 12) from projects where code = ? and id like 'corda%'", String.class, code);
|
||||
return list.isEmpty() ? null : list.get(0);
|
||||
}
|
||||
|
||||
private static String getDecodedUrl(final String encoded_url) {
|
||||
if (encoded_url == null || encoded_url.startsWith("http")) { return encoded_url; }
|
||||
try {
|
||||
return new String(Base64.getDecoder().decode(encoded_url));
|
||||
} catch (final Exception e) {
|
||||
log.warn("Invalid base64: " + encoded_url);
|
||||
return encoded_url;
|
||||
}
|
||||
}
|
||||
|
||||
private static List<String> splitValues(final Stream<String> stream, final String separator) {
|
||||
return stream.map(s -> s.split(separator))
|
||||
.map(Arrays::asList)
|
||||
.flatMap(List::stream)
|
||||
.filter(StringUtils::isNotBlank)
|
||||
.map(StringUtils::trim)
|
||||
.collect(Collectors.toList());
|
||||
}
|
||||
|
||||
private static String firstValue(final String name, final List<Param> params) {
|
||||
return asValues(name, params).findFirst().orElse(null);
|
||||
}
|
||||
|
||||
private static String asCsv(final String name, final List<Param> params) {
|
||||
return asValues(name, params).collect(Collectors.joining(CSV_DELIMITER));
|
||||
}
|
||||
|
||||
private static Stream<String> asValues(final String name, final List<Param> params) {
|
||||
return params == null ? Stream.empty()
|
||||
: params.stream()
|
||||
.filter(p -> p != null)
|
||||
.filter(p -> StringUtils.isNotBlank(p.getName()))
|
||||
.filter(p -> p.getName().trim().equals(name.trim()))
|
||||
.map(Param::getValue)
|
||||
.map(StringUtils::trim)
|
||||
.distinct();
|
||||
}
|
||||
|
||||
protected DbOrganizationRepository getDbOrganizationRepository() {
|
||||
return dbOrganizationRepository;
|
||||
}
|
||||
|
||||
protected void setDbOrganizationRepository(final DbOrganizationRepository dbOrganizationRepository) {
|
||||
this.dbOrganizationRepository = dbOrganizationRepository;
|
||||
}
|
||||
|
||||
protected CommunityService getService() {
|
||||
return service;
|
||||
}
|
||||
|
||||
protected void setService(final CommunityService service) {
|
||||
this.service = service;
|
||||
}
|
||||
|
||||
protected JdbcTemplate getJdbcTemplate() {
|
||||
return jdbcTemplate;
|
||||
}
|
||||
|
||||
protected void setJdbcTemplate(final JdbcTemplate jdbcTemplate) {
|
||||
this.jdbcTemplate = jdbcTemplate;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,304 @@
|
|||
package eu.dnetlib.openaire.community.model;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.time.LocalDateTime;
|
||||
|
||||
import javax.persistence.Column;
|
||||
import javax.persistence.Convert;
|
||||
import javax.persistence.Entity;
|
||||
import javax.persistence.EnumType;
|
||||
import javax.persistence.Enumerated;
|
||||
import javax.persistence.GeneratedValue;
|
||||
import javax.persistence.GenerationType;
|
||||
import javax.persistence.Id;
|
||||
import javax.persistence.Table;
|
||||
|
||||
import org.hibernate.annotations.Type;
|
||||
import org.hibernate.annotations.TypeDef;
|
||||
import org.hibernate.annotations.TypeDefs;
|
||||
import org.springframework.data.annotation.CreatedDate;
|
||||
import org.springframework.data.annotation.LastModifiedDate;
|
||||
|
||||
import com.vladmihalcea.hibernate.type.array.StringArrayType;
|
||||
import com.vladmihalcea.hibernate.type.json.JsonBinaryType;
|
||||
import com.vladmihalcea.hibernate.type.json.JsonStringType;
|
||||
|
||||
import eu.dnetlib.openaire.community.utils.CommunityClaimTypeConverter;
|
||||
import eu.dnetlib.openaire.community.utils.CommunityMembershipTypeConverter;
|
||||
import eu.dnetlib.openaire.exporter.model.community.CommunityClaimType;
|
||||
import eu.dnetlib.openaire.exporter.model.community.CommunityMembershipType;
|
||||
import eu.dnetlib.openaire.exporter.model.community.CommunityPlanType;
|
||||
import eu.dnetlib.openaire.exporter.model.community.CommunityStatus;
|
||||
import eu.dnetlib.openaire.exporter.model.community.CommunityType;
|
||||
import eu.dnetlib.openaire.exporter.model.community.selectioncriteria.SelectionCriteria;
|
||||
|
||||
@Entity
|
||||
@Table(name = "communities")
|
||||
@TypeDefs({
|
||||
@TypeDef(name = "string-array", typeClass = StringArrayType.class),
|
||||
@TypeDef(name = "json", typeClass = JsonStringType.class),
|
||||
@TypeDef(name = "jsonb", typeClass = JsonBinaryType.class)
|
||||
})
|
||||
public class DbCommunity implements Serializable {
|
||||
|
||||
private static final long serialVersionUID = 4315597783109726539L;
|
||||
|
||||
@Id
|
||||
@Column(name = "id")
|
||||
private String id;
|
||||
|
||||
@Column(name = "name")
|
||||
private String name;
|
||||
|
||||
@Column(name = "shortname")
|
||||
private String shortName;
|
||||
|
||||
@Column(name = "displayname")
|
||||
private String displayName;
|
||||
|
||||
@Column(name = "displayshortname")
|
||||
private String displayShortName;
|
||||
|
||||
@Column(name = "description")
|
||||
private String description;
|
||||
|
||||
@Column(name = "status")
|
||||
@Enumerated(EnumType.STRING)
|
||||
private CommunityStatus status = CommunityStatus.hidden;
|
||||
|
||||
@Column(name = "membership")
|
||||
@Convert(converter = CommunityMembershipTypeConverter.class)
|
||||
private CommunityMembershipType membership = CommunityMembershipType.byInvitation;
|
||||
|
||||
@Column(name = "type")
|
||||
@Enumerated(EnumType.STRING)
|
||||
private CommunityType type;
|
||||
|
||||
@Column(name = "claim")
|
||||
@Convert(converter = CommunityClaimTypeConverter.class)
|
||||
private CommunityClaimType claim;
|
||||
|
||||
@Type(type = "string-array")
|
||||
@Column(name = "subjects", columnDefinition = "text[]")
|
||||
private String[] subjects;
|
||||
|
||||
@Type(type = "string-array")
|
||||
@Column(name = "fos", columnDefinition = "text[]")
|
||||
private String[] fos;
|
||||
|
||||
@Type(type = "string-array")
|
||||
@Column(name = "sdg", columnDefinition = "text[]")
|
||||
private String[] sdg;
|
||||
|
||||
@Type(type = "jsonb")
|
||||
@Column(name = "adv_constraints")
|
||||
private SelectionCriteria advancedConstraints;
|
||||
|
||||
@Type(type = "jsonb")
|
||||
@Column(name = "remove_constraints")
|
||||
private SelectionCriteria removeConstraints;
|
||||
|
||||
@Column(name = "main_zenodo_community")
|
||||
private String mainZenodoCommunity;
|
||||
|
||||
@Type(type = "string-array")
|
||||
@Column(name = "other_zenodo_communities", columnDefinition = "text[]")
|
||||
private String[] otherZenodoCommunities;
|
||||
|
||||
@CreatedDate
|
||||
@Column(name = "creation_date")
|
||||
private LocalDateTime creationDate;
|
||||
|
||||
@LastModifiedDate
|
||||
@Column(name = "last_update")
|
||||
@GeneratedValue(strategy = GenerationType.IDENTITY)
|
||||
private LocalDateTime lastUpdateDate;
|
||||
|
||||
@Column(name = "logo_url")
|
||||
private String logoUrl;
|
||||
|
||||
@Type(type = "string-array")
|
||||
@Column(name = "suggested_acknowledgements", columnDefinition = "text[]")
|
||||
private String[] suggestedAcknowledgements;
|
||||
|
||||
@Column(name = "plan")
|
||||
@Enumerated(EnumType.STRING)
|
||||
private CommunityPlanType plan;
|
||||
|
||||
public String getId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
public void setId(final String id) {
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public void setName(final String name) {
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
public String getShortName() {
|
||||
return shortName;
|
||||
}
|
||||
|
||||
public void setShortName(final String shortName) {
|
||||
this.shortName = shortName;
|
||||
}
|
||||
|
||||
public String getDisplayName() {
|
||||
return displayName;
|
||||
}
|
||||
|
||||
public void setDisplayName(final String displayName) {
|
||||
this.displayName = displayName;
|
||||
}
|
||||
|
||||
public String getDisplayShortName() {
|
||||
return displayShortName;
|
||||
}
|
||||
|
||||
public void setDisplayShortName(final String displayShortName) {
|
||||
this.displayShortName = displayShortName;
|
||||
}
|
||||
|
||||
public String getDescription() {
|
||||
return description;
|
||||
}
|
||||
|
||||
public void setDescription(final String description) {
|
||||
this.description = description;
|
||||
}
|
||||
|
||||
public CommunityStatus getStatus() {
|
||||
return status;
|
||||
}
|
||||
|
||||
public void setStatus(final CommunityStatus status) {
|
||||
this.status = status;
|
||||
}
|
||||
|
||||
public CommunityMembershipType getMembership() {
|
||||
return membership;
|
||||
}
|
||||
|
||||
public void setMembership(final CommunityMembershipType membership) {
|
||||
this.membership = membership;
|
||||
}
|
||||
|
||||
public CommunityType getType() {
|
||||
return type;
|
||||
}
|
||||
|
||||
public void setType(final CommunityType type) {
|
||||
this.type = type;
|
||||
}
|
||||
|
||||
public CommunityClaimType getClaim() {
|
||||
return claim;
|
||||
}
|
||||
|
||||
public void setClaim(final CommunityClaimType claim) {
|
||||
this.claim = claim;
|
||||
}
|
||||
|
||||
public String[] getSubjects() {
|
||||
return subjects;
|
||||
}
|
||||
|
||||
public void setSubjects(final String[] subjects) {
|
||||
this.subjects = subjects;
|
||||
}
|
||||
|
||||
public String[] getFos() {
|
||||
return fos;
|
||||
}
|
||||
|
||||
public void setFos(final String[] fos) {
|
||||
this.fos = fos;
|
||||
}
|
||||
|
||||
public String[] getSdg() {
|
||||
return sdg;
|
||||
}
|
||||
|
||||
public void setSdg(final String[] sdg) {
|
||||
this.sdg = sdg;
|
||||
}
|
||||
|
||||
public SelectionCriteria getAdvancedConstraints() {
|
||||
return advancedConstraints;
|
||||
}
|
||||
|
||||
public void setAdvancedConstraints(final SelectionCriteria advancedConstraints) {
|
||||
this.advancedConstraints = advancedConstraints;
|
||||
}
|
||||
|
||||
public SelectionCriteria getRemoveConstraints() {
|
||||
return removeConstraints;
|
||||
}
|
||||
|
||||
public void setRemoveConstraints(final SelectionCriteria removeConstraints) {
|
||||
this.removeConstraints = removeConstraints;
|
||||
}
|
||||
|
||||
public String getMainZenodoCommunity() {
|
||||
return mainZenodoCommunity;
|
||||
}
|
||||
|
||||
public void setMainZenodoCommunity(final String mainZenodoCommunity) {
|
||||
this.mainZenodoCommunity = mainZenodoCommunity;
|
||||
}
|
||||
|
||||
public String[] getOtherZenodoCommunities() {
|
||||
return otherZenodoCommunities;
|
||||
}
|
||||
|
||||
public void setOtherZenodoCommunities(final String[] otherZenodoCommunities) {
|
||||
this.otherZenodoCommunities = otherZenodoCommunities;
|
||||
}
|
||||
|
||||
public LocalDateTime getCreationDate() {
|
||||
return creationDate;
|
||||
}
|
||||
|
||||
public void setCreationDate(final LocalDateTime creationDate) {
|
||||
this.creationDate = creationDate;
|
||||
}
|
||||
|
||||
public LocalDateTime getLastUpdateDate() {
|
||||
return lastUpdateDate;
|
||||
}
|
||||
|
||||
public void setLastUpdateDate(final LocalDateTime lastUpdateDate) {
|
||||
this.lastUpdateDate = lastUpdateDate;
|
||||
}
|
||||
|
||||
public String getLogoUrl() {
|
||||
return logoUrl;
|
||||
}
|
||||
|
||||
public void setLogoUrl(final String logoUrl) {
|
||||
this.logoUrl = logoUrl;
|
||||
}
|
||||
|
||||
public String[] getSuggestedAcknowledgements() {
|
||||
return suggestedAcknowledgements;
|
||||
}
|
||||
|
||||
public void setSuggestedAcknowledgements(final String[] suggestedAcknowledgements) {
|
||||
this.suggestedAcknowledgements = suggestedAcknowledgements;
|
||||
}
|
||||
|
||||
public CommunityPlanType getPlan() {
|
||||
return plan;
|
||||
}
|
||||
|
||||
public void setPlan(final CommunityPlanType plan) {
|
||||
this.plan = plan;
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,123 @@
|
|||
package eu.dnetlib.openaire.community.model;
|
||||
|
||||
import java.io.Serializable;
|
||||
|
||||
import javax.persistence.Column;
|
||||
import javax.persistence.Entity;
|
||||
import javax.persistence.Id;
|
||||
import javax.persistence.IdClass;
|
||||
import javax.persistence.Table;
|
||||
|
||||
import org.hibernate.annotations.Type;
|
||||
|
||||
import eu.dnetlib.openaire.exporter.model.community.selectioncriteria.SelectionCriteria;
|
||||
|
||||
@Entity
|
||||
@Table(name = "community_datasources")
|
||||
@IdClass(DbDatasourcePK.class)
|
||||
public class DbDatasource implements Serializable {
|
||||
|
||||
private static final long serialVersionUID = -8782576185861694228L;
|
||||
|
||||
@Id
|
||||
@Column(name = "community")
|
||||
private String community;
|
||||
|
||||
@Id
|
||||
@Column(name = "ds_id")
|
||||
private String dsId;
|
||||
|
||||
@Column(name = "ds_name")
|
||||
private String dsName;
|
||||
|
||||
@Column(name = "ds_officialname")
|
||||
private String dsOfficialName;
|
||||
|
||||
@Column(name = "enabled")
|
||||
private Boolean enabled;
|
||||
|
||||
@Type(type = "jsonb")
|
||||
@Column(name = "constraints")
|
||||
private SelectionCriteria constraints;
|
||||
|
||||
@Column(name = "deposit")
|
||||
private Boolean deposit;
|
||||
|
||||
@Column(name = "message")
|
||||
private String message;
|
||||
|
||||
public DbDatasource() {}
|
||||
|
||||
public DbDatasource(final String community, final String dsId, final String dsName, final String dsOfficialName, final SelectionCriteria constraints) {
|
||||
this.community = community;
|
||||
this.dsId = dsId;
|
||||
this.dsName = dsName;
|
||||
this.dsOfficialName = dsOfficialName;
|
||||
this.constraints = constraints;
|
||||
}
|
||||
|
||||
public String getCommunity() {
|
||||
return community;
|
||||
}
|
||||
|
||||
public void setCommunity(final String community) {
|
||||
this.community = community;
|
||||
}
|
||||
|
||||
public String getDsId() {
|
||||
return dsId;
|
||||
}
|
||||
|
||||
public void setDsId(final String dsId) {
|
||||
this.dsId = dsId;
|
||||
}
|
||||
|
||||
public String getDsName() {
|
||||
return dsName;
|
||||
}
|
||||
|
||||
public void setDsName(final String dsName) {
|
||||
this.dsName = dsName;
|
||||
}
|
||||
|
||||
public String getDsOfficialName() {
|
||||
return dsOfficialName;
|
||||
}
|
||||
|
||||
public void setDsOfficialName(final String dsOfficialName) {
|
||||
this.dsOfficialName = dsOfficialName;
|
||||
}
|
||||
|
||||
public Boolean getEnabled() {
|
||||
return enabled;
|
||||
}
|
||||
|
||||
public void setEnabled(final Boolean enabled) {
|
||||
this.enabled = enabled;
|
||||
}
|
||||
|
||||
public SelectionCriteria getConstraints() {
|
||||
return constraints;
|
||||
}
|
||||
|
||||
public void setConstraints(final SelectionCriteria constraints) {
|
||||
this.constraints = constraints;
|
||||
}
|
||||
|
||||
public Boolean getDeposit() {
|
||||
return deposit;
|
||||
}
|
||||
|
||||
public void setDeposit(final Boolean deposit) {
|
||||
this.deposit = deposit;
|
||||
}
|
||||
|
||||
public String getMessage() {
|
||||
return message;
|
||||
}
|
||||
|
||||
public void setMessage(final String message) {
|
||||
this.message = message;
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,55 @@
|
|||
package eu.dnetlib.openaire.community.model;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.Objects;
|
||||
|
||||
public class DbDatasourcePK implements Serializable {
|
||||
|
||||
private static final long serialVersionUID = -8073510491611213955L;
|
||||
|
||||
private String community;
|
||||
|
||||
private String dsId;
|
||||
|
||||
public DbDatasourcePK() {}
|
||||
|
||||
public DbDatasourcePK(final String community, final String dsId) {
|
||||
this.community = community;
|
||||
this.dsId = dsId;
|
||||
}
|
||||
|
||||
public String getCommunity() {
|
||||
return community;
|
||||
}
|
||||
|
||||
public void setCommunity(final String community) {
|
||||
this.community = community;
|
||||
}
|
||||
|
||||
public String getDsId() {
|
||||
return dsId;
|
||||
}
|
||||
|
||||
public void setDsId(final String dsId) {
|
||||
this.dsId = dsId;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(community, dsId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(final Object obj) {
|
||||
if (this == obj) { return true; }
|
||||
if (!(obj instanceof DbDatasourcePK)) { return false; }
|
||||
final DbDatasourcePK other = (DbDatasourcePK) obj;
|
||||
return Objects.equals(community, other.community) && Objects.equals(dsId, other.dsId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return String.format("CommunityDatasourcePK [community=%s, dsId=%s]", community, dsId);
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,54 @@
|
|||
package eu.dnetlib.openaire.community.model;
|
||||
|
||||
import java.io.Serializable;
|
||||
|
||||
import javax.persistence.Column;
|
||||
import javax.persistence.Entity;
|
||||
import javax.persistence.Id;
|
||||
import javax.persistence.IdClass;
|
||||
import javax.persistence.Table;
|
||||
|
||||
@Entity
|
||||
@Table(name = "community_orgs")
|
||||
@IdClass(DbOrganizationPK.class)
|
||||
public class DbOrganization implements Serializable {
|
||||
|
||||
private static final long serialVersionUID = -602114117980437763L;
|
||||
|
||||
@Id
|
||||
@Column(name = "community")
|
||||
private String community;
|
||||
|
||||
@Id
|
||||
@Column(name = "org_id")
|
||||
private String orgId;
|
||||
|
||||
public DbOrganization() {}
|
||||
|
||||
public DbOrganization(final String community, final String orgId) {
|
||||
this.community = community;
|
||||
this.orgId = orgId;
|
||||
}
|
||||
|
||||
public String getCommunity() {
|
||||
return community;
|
||||
}
|
||||
|
||||
public void setCommunity(final String community) {
|
||||
this.community = community;
|
||||
}
|
||||
|
||||
public String getOrgId() {
|
||||
return orgId;
|
||||
}
|
||||
|
||||
public void setOrgId(final String orgId) {
|
||||
this.orgId = orgId;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return String.format("DbOrganization [community=%s, orgId=%s]", community, orgId);
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,54 @@
|
|||
package eu.dnetlib.openaire.community.model;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.Objects;
|
||||
|
||||
public class DbOrganizationPK implements Serializable {
|
||||
|
||||
private static final long serialVersionUID = -6720182815397534837L;
|
||||
|
||||
private String community;
|
||||
|
||||
private String orgId;
|
||||
|
||||
public DbOrganizationPK() {}
|
||||
|
||||
public DbOrganizationPK(final String community, final String orgId) {
|
||||
this.community = community;
|
||||
this.orgId = orgId;
|
||||
}
|
||||
|
||||
public String getCommunity() {
|
||||
return community;
|
||||
}
|
||||
|
||||
public void setCommunity(final String community) {
|
||||
this.community = community;
|
||||
}
|
||||
|
||||
public String getOrgId() {
|
||||
return orgId;
|
||||
}
|
||||
|
||||
public void setOrgId(final String orgId) {
|
||||
this.orgId = orgId;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(community, orgId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(final Object obj) {
|
||||
if (this == obj) { return true; }
|
||||
if (!(obj instanceof DbDatasourcePK)) { return false; }
|
||||
final DbOrganizationPK other = (DbOrganizationPK) obj;
|
||||
return Objects.equals(community, other.community) && Objects.equals(orgId, other.orgId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return String.format("CommunityOrgPK [community=%s, orgId=%s]", community, orgId);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,113 @@
|
|||
package eu.dnetlib.openaire.community.model;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.time.LocalDate;
|
||||
|
||||
import javax.persistence.Column;
|
||||
import javax.persistence.Entity;
|
||||
import javax.persistence.Id;
|
||||
import javax.persistence.IdClass;
|
||||
import javax.persistence.Table;
|
||||
|
||||
import org.springframework.data.annotation.CreatedDate;
|
||||
|
||||
@Entity
|
||||
@Table(name = "community_projects")
|
||||
@IdClass(DbProjectPK.class)
|
||||
public class DbProject implements Serializable {
|
||||
|
||||
private static final long serialVersionUID = 1649065971750517925L;
|
||||
|
||||
@Id
|
||||
@Column(name = "community")
|
||||
private String community;
|
||||
|
||||
@Id
|
||||
@Column(name = "project_id")
|
||||
private String projectId;
|
||||
|
||||
@Column(name = "project_code")
|
||||
private String projectCode;
|
||||
|
||||
@Column(name = "project_name")
|
||||
private String projectName;
|
||||
|
||||
@Column(name = "project_acronym")
|
||||
private String projectAcronym;
|
||||
|
||||
@Column(name = "project_funder")
|
||||
private String projectFunder;
|
||||
|
||||
@CreatedDate
|
||||
@Column(name = "available_since")
|
||||
private LocalDate availableSince;
|
||||
|
||||
public DbProject() {}
|
||||
|
||||
public DbProject(final String community, final String projectId, final String projectCode, final String projectName, final String projectAcronym,
|
||||
final String projectFunder, final LocalDate availableSince) {
|
||||
this.community = community;
|
||||
this.projectId = projectId;
|
||||
this.projectCode = projectCode;
|
||||
this.projectName = projectName;
|
||||
this.projectAcronym = projectAcronym;
|
||||
this.projectFunder = projectFunder;
|
||||
this.availableSince = availableSince;
|
||||
}
|
||||
|
||||
public String getCommunity() {
|
||||
return community;
|
||||
}
|
||||
|
||||
public void setCommunity(final String community) {
|
||||
this.community = community;
|
||||
}
|
||||
|
||||
public String getProjectId() {
|
||||
return projectId;
|
||||
}
|
||||
|
||||
public void setProjectId(final String projectId) {
|
||||
this.projectId = projectId;
|
||||
}
|
||||
|
||||
public String getProjectCode() {
|
||||
return projectCode;
|
||||
}
|
||||
|
||||
public void setProjectCode(final String projectCode) {
|
||||
this.projectCode = projectCode;
|
||||
}
|
||||
|
||||
public String getProjectName() {
|
||||
return projectName;
|
||||
}
|
||||
|
||||
public void setProjectName(final String projectName) {
|
||||
this.projectName = projectName;
|
||||
}
|
||||
|
||||
public String getProjectAcronym() {
|
||||
return projectAcronym;
|
||||
}
|
||||
|
||||
public void setProjectAcronym(final String projectAcronym) {
|
||||
this.projectAcronym = projectAcronym;
|
||||
}
|
||||
|
||||
public String getProjectFunder() {
|
||||
return projectFunder;
|
||||
}
|
||||
|
||||
public void setProjectFunder(final String projectFunder) {
|
||||
this.projectFunder = projectFunder;
|
||||
}
|
||||
|
||||
public LocalDate getAvailableSince() {
|
||||
return availableSince;
|
||||
}
|
||||
|
||||
public void setAvailableSince(final LocalDate availableSince) {
|
||||
this.availableSince = availableSince;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,54 @@
|
|||
package eu.dnetlib.openaire.community.model;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.Objects;
|
||||
|
||||
public class DbProjectPK implements Serializable {
|
||||
|
||||
private static final long serialVersionUID = -4236577148534835803L;
|
||||
|
||||
private String community;
|
||||
|
||||
private String projectId;
|
||||
|
||||
public DbProjectPK() {}
|
||||
|
||||
public DbProjectPK(final String community, final String projectId) {
|
||||
this.community = community;
|
||||
this.projectId = projectId;
|
||||
}
|
||||
|
||||
public String getCommunity() {
|
||||
return community;
|
||||
}
|
||||
|
||||
public void setCommunity(final String community) {
|
||||
this.community = community;
|
||||
}
|
||||
|
||||
public String getProjectId() {
|
||||
return projectId;
|
||||
}
|
||||
|
||||
public void setProjectId(final String projectId) {
|
||||
this.projectId = projectId;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(community, projectId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(final Object obj) {
|
||||
if (this == obj) { return true; }
|
||||
if (!(obj instanceof DbProjectPK)) { return false; }
|
||||
final DbProjectPK other = (DbProjectPK) obj;
|
||||
return Objects.equals(community, other.community) && Objects.equals(projectId, other.projectId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return String.format("CommunityProjectPK [community=%s, projectId=%s]", community, projectId);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,122 @@
|
|||
package eu.dnetlib.openaire.community.model;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import javax.persistence.Column;
|
||||
import javax.persistence.Entity;
|
||||
import javax.persistence.Id;
|
||||
import javax.persistence.Table;
|
||||
|
||||
import org.hibernate.annotations.Type;
|
||||
import org.hibernate.annotations.TypeDef;
|
||||
import org.hibernate.annotations.TypeDefs;
|
||||
|
||||
import com.vladmihalcea.hibernate.type.array.StringArrayType;
|
||||
import com.vladmihalcea.hibernate.type.json.JsonBinaryType;
|
||||
import com.vladmihalcea.hibernate.type.json.JsonStringType;
|
||||
|
||||
import eu.dnetlib.openaire.exporter.model.context.Param;
|
||||
|
||||
@Entity
|
||||
@Table(name = "community_subs")
|
||||
@TypeDefs({
|
||||
@TypeDef(name = "string-array", typeClass = StringArrayType.class),
|
||||
@TypeDef(name = "json", typeClass = JsonStringType.class),
|
||||
@TypeDef(name = "jsonb", typeClass = JsonBinaryType.class)
|
||||
})
|
||||
public class DbSubCommunity implements Serializable {
|
||||
|
||||
private static final long serialVersionUID = 7104936574383307358L;
|
||||
|
||||
@Id
|
||||
@Column(name = "sub_id")
|
||||
private String id;
|
||||
|
||||
@Column(name = "community")
|
||||
private String community;
|
||||
|
||||
@Column(name = "label")
|
||||
private String label;
|
||||
|
||||
@Column(name = "category")
|
||||
private String category;
|
||||
|
||||
@Type(type = "jsonb")
|
||||
@Column(name = "params")
|
||||
private List<Param> params = new ArrayList<>();
|
||||
|
||||
@Column(name = "parent")
|
||||
private String parent;
|
||||
|
||||
@Column(name = "claim")
|
||||
private boolean claim = false;
|
||||
|
||||
@Column(name = "browsable")
|
||||
private boolean browsable = false;
|
||||
|
||||
public String getId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
public void setId(final String id) {
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
public String getCommunity() {
|
||||
return community;
|
||||
}
|
||||
|
||||
public void setCommunity(final String community) {
|
||||
this.community = community;
|
||||
}
|
||||
|
||||
public String getLabel() {
|
||||
return label;
|
||||
}
|
||||
|
||||
public void setLabel(final String label) {
|
||||
this.label = label;
|
||||
}
|
||||
|
||||
public String getCategory() {
|
||||
return category;
|
||||
}
|
||||
|
||||
public void setCategory(final String category) {
|
||||
this.category = category;
|
||||
}
|
||||
|
||||
public List<Param> getParams() {
|
||||
return params;
|
||||
}
|
||||
|
||||
public void setParams(final List<Param> params) {
|
||||
this.params = params;
|
||||
}
|
||||
|
||||
public String getParent() {
|
||||
return parent;
|
||||
}
|
||||
|
||||
public void setParent(final String parent) {
|
||||
this.parent = parent;
|
||||
}
|
||||
|
||||
public boolean isClaim() {
|
||||
return claim;
|
||||
}
|
||||
|
||||
public void setClaim(final boolean claim) {
|
||||
this.claim = claim;
|
||||
}
|
||||
|
||||
public boolean isBrowsable() {
|
||||
return browsable;
|
||||
}
|
||||
|
||||
public void setBrowsable(final boolean browsable) {
|
||||
this.browsable = browsable;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,64 @@
|
|||
package eu.dnetlib.openaire.community.model;
|
||||
|
||||
import java.io.Serializable;
|
||||
|
||||
import javax.persistence.Column;
|
||||
import javax.persistence.Entity;
|
||||
import javax.persistence.Id;
|
||||
import javax.persistence.IdClass;
|
||||
import javax.persistence.Table;
|
||||
|
||||
@Entity
|
||||
@Table(name = "community_support_orgs")
|
||||
@IdClass(DbSupportOrgPK.class)
|
||||
public class DbSupportOrg implements Serializable {
|
||||
|
||||
private static final long serialVersionUID = 1308759097276753411L;
|
||||
|
||||
@Id
|
||||
@Column(name = "community")
|
||||
private String community;
|
||||
|
||||
@Id
|
||||
@Column(name = "org_name")
|
||||
private String orgName;
|
||||
|
||||
@Column(name = "org_url")
|
||||
private String orgUrl;
|
||||
|
||||
@Column(name = "org_logourl")
|
||||
private String orgLogoUrl;
|
||||
|
||||
public String getCommunity() {
|
||||
return community;
|
||||
}
|
||||
|
||||
public void setCommunity(final String community) {
|
||||
this.community = community;
|
||||
}
|
||||
|
||||
public String getOrgName() {
|
||||
return orgName;
|
||||
}
|
||||
|
||||
public void setOrgName(final String orgName) {
|
||||
this.orgName = orgName;
|
||||
}
|
||||
|
||||
public String getOrgUrl() {
|
||||
return orgUrl;
|
||||
}
|
||||
|
||||
public void setOrgUrl(final String orgUrl) {
|
||||
this.orgUrl = orgUrl;
|
||||
}
|
||||
|
||||
public String getOrgLogoUrl() {
|
||||
return orgLogoUrl;
|
||||
}
|
||||
|
||||
public void setOrgLogoUrl(final String orgLogoUrl) {
|
||||
this.orgLogoUrl = orgLogoUrl;
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,55 @@
|
|||
package eu.dnetlib.openaire.community.model;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.Objects;
|
||||
|
||||
public class DbSupportOrgPK implements Serializable {
|
||||
|
||||
private static final long serialVersionUID = -4117154543803798310L;
|
||||
|
||||
private String community;
|
||||
|
||||
private String orgName;
|
||||
|
||||
public DbSupportOrgPK() {}
|
||||
|
||||
public DbSupportOrgPK(final String community, final String orgName) {
|
||||
this.community = community;
|
||||
this.orgName = orgName;
|
||||
}
|
||||
|
||||
public String getCommunity() {
|
||||
return community;
|
||||
}
|
||||
|
||||
public void setCommunity(final String community) {
|
||||
this.community = community;
|
||||
}
|
||||
|
||||
public String getOrgName() {
|
||||
return orgName;
|
||||
}
|
||||
|
||||
public void setOrgName(final String orgName) {
|
||||
this.orgName = orgName;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(community, orgName);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(final Object obj) {
|
||||
if (this == obj) { return true; }
|
||||
if (!(obj instanceof DbSupportOrgPK)) { return false; }
|
||||
final DbSupportOrgPK other = (DbSupportOrgPK) obj;
|
||||
return Objects.equals(community, other.community) && Objects.equals(orgName, other.orgName);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return String.format("CommunitySupportOrgPK [community=%s, orgName=%s]", community, orgName);
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,39 @@
|
|||
package eu.dnetlib.openaire.community.model;
|
||||
|
||||
import java.io.Serializable;
|
||||
|
||||
public class DepositionInfo implements Serializable {
|
||||
|
||||
private static final long serialVersionUID = 7538663287451167904L;
|
||||
|
||||
private String openaireId;
|
||||
|
||||
private Boolean deposit;
|
||||
|
||||
private String message;
|
||||
|
||||
public String getOpenaireId() {
|
||||
return openaireId;
|
||||
}
|
||||
|
||||
public void setOpenaireId(final String openaireId) {
|
||||
this.openaireId = openaireId;
|
||||
}
|
||||
|
||||
public Boolean getDeposit() {
|
||||
return deposit;
|
||||
}
|
||||
|
||||
public void setDeposit(final Boolean deposit) {
|
||||
this.deposit = deposit;
|
||||
}
|
||||
|
||||
public String getMessage() {
|
||||
return message;
|
||||
}
|
||||
|
||||
public void setMessage(final String message) {
|
||||
this.message = message;
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,17 @@
|
|||
package eu.dnetlib.openaire.community.repository;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
|
||||
import org.springframework.data.jpa.repository.JpaRepository;
|
||||
import org.springframework.data.jpa.repository.Query;
|
||||
|
||||
import eu.dnetlib.openaire.community.model.DbCommunity;
|
||||
|
||||
@ConditionalOnProperty(value = "openaire.exporter.enable.community", havingValue = "true")
|
||||
public interface DbCommunityRepository extends JpaRepository<DbCommunity, String> {
|
||||
|
||||
@Query(value = "select id from communities where ?1 = ANY(array_append(other_zenodo_communities, main_zenodo_community))", nativeQuery = true)
|
||||
List<String> findByZenodoId(String zenodoId);
|
||||
|
||||
}
|
|
@ -0,0 +1,20 @@
|
|||
package eu.dnetlib.openaire.community.repository;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
|
||||
import org.springframework.data.jpa.repository.JpaRepository;
|
||||
|
||||
import eu.dnetlib.openaire.community.model.DbDatasource;
|
||||
import eu.dnetlib.openaire.community.model.DbDatasourcePK;
|
||||
|
||||
@ConditionalOnProperty(value = "openaire.exporter.enable.community", havingValue = "true")
|
||||
public interface DbDatasourceRepository extends JpaRepository<DbDatasource, DbDatasourcePK> {
|
||||
|
||||
List<DbDatasource> findByCommunity(String community);
|
||||
|
||||
List<DbDatasource> findByCommunityAndDeposit(String community, boolean deposit);
|
||||
|
||||
void deleteByCommunity(String id);
|
||||
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue