Compare commits
118 Commits
Author | SHA1 | Date |
---|---|---|
Luca Frosini | c138a3fbd2 | |
Luca Frosini | 1618cb9c10 | |
Luca Frosini | f3c2b6197c | |
Luca Frosini | 00bce762f3 | |
Luca Frosini | 9ef16294ff | |
Luca Frosini | fa7be6370a | |
Luca Frosini | 34db58abe2 | |
Luca Frosini | abb35b6e7b | |
Luca Frosini | 77cfbf6a8a | |
luca.frosini | e76b1c3af3 | |
luca.frosini | 009083f335 | |
luca.frosini | dedf11256a | |
Massimiliano Assante | 8c26deb82f | |
Massimiliano Assante | deb8937a10 | |
Massimiliano Assante | 850d6674e5 | |
Massimiliano Assante | 284431ca8d | |
Massimiliano Assante | 4cbdef880e | |
luca.frosini | 89e744d769 | |
luca.frosini | 43865106df | |
luca.frosini | e0fd599c80 | |
Massimiliano Assante | d3ad4f43ae | |
luca.frosini | 9b27e35676 | |
Luca Frosini | bb3e645932 | |
Luca Frosini | 934545c8cf | |
Luca Frosini | 524c3a0411 | |
Luca Frosini | f0ce3c250c | |
Luca Frosini | ec9d30d886 | |
Luca Frosini | dbc270a523 | |
Luca Frosini | 1fe73af6bc | |
Luca Frosini | 120316d1b2 | |
Luca Frosini | 3ff630bbcb | |
Luca Frosini | 459a71bc0d | |
Luca Frosini | 6bd87cedc4 | |
Luca Frosini | ee3a6208a4 | |
Luca Frosini | 523c0d8e34 | |
Luca Frosini | d574e3c797 | |
Luca Frosini | c7a934bd4b | |
Luca Frosini | cc242dee6a | |
Luca Frosini | 9849e7f6ee | |
Luca Frosini | f3a61edbdf | |
Luca Frosini | cdd875bc47 | |
Luca Frosini | 273171704e | |
Luca Frosini | ab8ad166d8 | |
Luca Frosini | e3c8c42dbe | |
Luca Frosini | ac0fe1c671 | |
Luca Frosini | 126034537e | |
Luca Frosini | 40e12c1b85 | |
Luca Frosini | 2d312dbf0b | |
Luca Frosini | 759aaf59a3 | |
Luca Frosini | c567df6a9e | |
Luca Frosini | fd641c937f | |
Giancarlo Panichi | e82971ee29 | |
Giancarlo Panichi | 4a9ec0a773 | |
Giancarlo Panichi | 26b11e96af | |
Giancarlo Panichi | 87f5594109 | |
Luca Frosini | 713dee5082 | |
Giancarlo Panichi | 619e99f08b | |
Giancarlo Panichi | 81d792162d | |
Giancarlo Panichi | 94a558d3c1 | |
Luca Frosini | 77311be1aa | |
Luca Frosini | 8c7bf2c22b | |
Luca Frosini | 695bf798f9 | |
Luca Frosini | 1b500a2f3d | |
Luca Frosini | ba158f3187 | |
Luca Frosini | 72b7aeccf2 | |
Luca Frosini | fff6101491 | |
Luca Frosini | ac305c0a32 | |
Luca Frosini | 42527a425a | |
Luca Frosini | f2b37893a1 | |
Giancarlo Panichi | 9b5d0874ec | |
Giancarlo Panichi | 2bd73c2caa | |
Giancarlo Panichi | 24f2409df7 | |
Giancarlo Panichi | 38ec08e0a3 | |
Giancarlo Panichi | 13481c35a5 | |
Giancarlo Panichi | 7a335cbefd | |
Luca Frosini | afe8a52e5b | |
Luca Frosini | fa381d7313 | |
Luca Frosini | 1fd086e63d | |
Luca Frosini | c47d0bbd25 | |
Luca Frosini | 559926167a | |
Luca Frosini | f94288aa53 | |
Luca Frosini | 553427047f | |
Luca Frosini | f7e22e3d31 | |
Luca Frosini | 7b880efe30 | |
Luca Frosini | 9865f2a1ae | |
Luca Frosini | fc3a042c8f | |
Luca Frosini | 108cb1a767 | |
Luca Frosini | fc7ef68845 | |
Luca Frosini | 9d2b2e5010 | |
Luca Frosini | a6cf5c09a4 | |
Luca Frosini | 4a090b4926 | |
Luca Frosini | 586e0628d9 | |
Luca Frosini | 5125aba591 | |
Luca Frosini | 8f1bfc55a1 | |
Luca Frosini | 769df038c5 | |
Massimiliano Assante | b041f78e1f | |
Massimiliano Assante | baab4ba432 | |
Massimiliano Assante | f953c16195 | |
Luca Frosini | 0a6d49b22b | |
Luca Frosini | 72dff5082b | |
Luca Frosini | 4fe22a57c6 | |
Massimiliano Assante | 813531f5ae | |
Luca Frosini | 9ebbb89eb0 | |
Luca Frosini | 0069f4f363 | |
Luca Frosini | b659c45890 | |
Luca Frosini | c8a87abd73 | |
Massimiliano Assante | a9ab818768 | |
Luca Frosini | 6f7bb6e223 | |
Luca Frosini | 7b367aa471 | |
Luca Frosini | 368422d254 | |
Luca Frosini | cc8d8431ca | |
Luca Frosini | 36ed2a9abc | |
Luca Frosini | aae1bf4806 | |
Luca Frosini | 73ed0a8e6e | |
Luca Frosini | df233dfa36 | |
Luca Frosini | caa4e3eb64 | |
Luca Frosini | 45583f32f3 | |
Luca Frosini | 9f6b745c4d |
36
.classpath
36
.classpath
|
@ -1,36 +0,0 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<classpath>
|
||||
<classpathentry kind="src" output="target/classes" path="src/main/java">
|
||||
<attributes>
|
||||
<attribute name="optional" value="true"/>
|
||||
<attribute name="maven.pomderived" value="true"/>
|
||||
</attributes>
|
||||
</classpathentry>
|
||||
<classpathentry excluding="**" kind="src" output="target/classes" path="src/main/resources">
|
||||
<attributes>
|
||||
<attribute name="maven.pomderived" value="true"/>
|
||||
</attributes>
|
||||
</classpathentry>
|
||||
<classpathentry kind="src" output="target/test-classes" path="src/test/java">
|
||||
<attributes>
|
||||
<attribute name="optional" value="true"/>
|
||||
<attribute name="maven.pomderived" value="true"/>
|
||||
</attributes>
|
||||
</classpathentry>
|
||||
<classpathentry excluding="**" kind="src" output="target/test-classes" path="src/test/resources">
|
||||
<attributes>
|
||||
<attribute name="maven.pomderived" value="true"/>
|
||||
</attributes>
|
||||
</classpathentry>
|
||||
<classpathentry kind="con" path="org.eclipse.m2e.MAVEN2_CLASSPATH_CONTAINER">
|
||||
<attributes>
|
||||
<attribute name="maven.pomderived" value="true"/>
|
||||
</attributes>
|
||||
</classpathentry>
|
||||
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.8">
|
||||
<attributes>
|
||||
<attribute name="maven.pomderived" value="true"/>
|
||||
</attributes>
|
||||
</classpathentry>
|
||||
<classpathentry kind="output" path="target/classes"/>
|
||||
</classpath>
|
|
@ -1 +1,5 @@
|
|||
/target/
|
||||
target
|
||||
.classpath
|
||||
.project
|
||||
.settings
|
||||
/.DS_Store
|
||||
|
|
23
.project
23
.project
|
@ -1,23 +0,0 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<projectDescription>
|
||||
<name>accounting-dashboard-harvester-se-plugin</name>
|
||||
<comment></comment>
|
||||
<projects>
|
||||
</projects>
|
||||
<buildSpec>
|
||||
<buildCommand>
|
||||
<name>org.eclipse.jdt.core.javabuilder</name>
|
||||
<arguments>
|
||||
</arguments>
|
||||
</buildCommand>
|
||||
<buildCommand>
|
||||
<name>org.eclipse.m2e.core.maven2Builder</name>
|
||||
<arguments>
|
||||
</arguments>
|
||||
</buildCommand>
|
||||
</buildSpec>
|
||||
<natures>
|
||||
<nature>org.eclipse.jdt.core.javanature</nature>
|
||||
<nature>org.eclipse.m2e.core.maven2Nature</nature>
|
||||
</natures>
|
||||
</projectDescription>
|
|
@ -1,6 +0,0 @@
|
|||
eclipse.preferences.version=1
|
||||
encoding//src/main/java=UTF-8
|
||||
encoding//src/main/resources=UTF-8
|
||||
encoding//src/test/java=UTF-8
|
||||
encoding//src/test/resources=UTF-8
|
||||
encoding/<project>=UTF-8
|
|
@ -1,5 +0,0 @@
|
|||
eclipse.preferences.version=1
|
||||
org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.8
|
||||
org.eclipse.jdt.core.compiler.compliance=1.8
|
||||
org.eclipse.jdt.core.compiler.problem.forbiddenReference=warning
|
||||
org.eclipse.jdt.core.compiler.source=1.8
|
|
@ -1,4 +0,0 @@
|
|||
activeProfiles=
|
||||
eclipse.preferences.version=1
|
||||
resolveWorkspaceProjects=true
|
||||
version=1
|
|
@ -0,0 +1,75 @@
|
|||
This project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
||||
|
||||
# Changelog for "accounting-dashboard-harvester-se-plugin"
|
||||
|
||||
## [v2.5.0]
|
||||
|
||||
- Upgraded smart-executor-bom to 3.3.0 [#27999]
|
||||
|
||||
|
||||
## [v2.4.0]
|
||||
|
||||
- Removed filter restriction on JobUsageRecord harvesting to get MethodInvocation
|
||||
- Fixed bug on getting ScopeDescriptor for new scopes.
|
||||
|
||||
|
||||
## [v2.3.0]
|
||||
|
||||
- Ported GA harvesters to Analytics Data API (GA4)
|
||||
|
||||
|
||||
## [v2.2.0]
|
||||
|
||||
- Switching security to the new IAM [#21904]
|
||||
|
||||
|
||||
## [v2.1.0]
|
||||
|
||||
- Storagehub-client-library get range from gcube-bom [#22822]
|
||||
|
||||
|
||||
## [v2.0.0]
|
||||
|
||||
- Ported plugin to smart-executor APIs 3.0.0 [#21616]
|
||||
- Added RStudio Harvester [#21557]
|
||||
- Added Jupyter Harvester [#21031]
|
||||
- Switched accounting JSON management to gcube-jackson [#19115]
|
||||
- Switched smart-executor JSON management to gcube-jackson [#19647]
|
||||
|
||||
|
||||
## [v1.6.0] - 2020-05-22
|
||||
|
||||
- [#19047] Added core services accesses
|
||||
|
||||
|
||||
## [v1.5.0] - 2020-03-30
|
||||
|
||||
- [#18290] Google Analytics Plugin for Catalogue pageviews
|
||||
- [#18848] Updated Catalogue Dashboard harvester ENUM
|
||||
|
||||
|
||||
## [v1.4.0] - 2019-12-19
|
||||
|
||||
- [#17800] Allowed partial harvesting of the current period
|
||||
|
||||
|
||||
## [v1.3.0] - 2019-11-06
|
||||
|
||||
- [#17800] Allowed partial harvesting of the current period
|
||||
|
||||
|
||||
## [v1.2.0] - 2019-09-11
|
||||
|
||||
- [#17128] Removed Home Library dependency
|
||||
- [#17128] Removed ckan-util-library dependency
|
||||
|
||||
|
||||
## [v1.1.0] [r4.13.1] - 2019-02-26
|
||||
|
||||
- [#12985] Fixed scope of dependencies
|
||||
|
||||
|
||||
## [v1.0.0] [r4.13.1] - 2018-10-10
|
||||
|
||||
- First Release
|
||||
|
|
@ -0,0 +1,26 @@
|
|||
# Acknowledgments
|
||||
|
||||
The projects leading to this software have received funding from a series of European Union programmes including:
|
||||
|
||||
- the Sixth Framework Programme for Research and Technological Development
|
||||
- [DILIGENT](https://cordis.europa.eu/project/id/004260) (grant no. 004260).
|
||||
- the Seventh Framework Programme for research, technological development and demonstration
|
||||
- [D4Science](https://cordis.europa.eu/project/id/212488) (grant no. 212488);
|
||||
- [D4Science-II](https://cordis.europa.eu/project/id/239019) (grant no.239019);
|
||||
- [ENVRI](https://cordis.europa.eu/project/id/283465) (grant no. 283465);
|
||||
- [iMarine](https://cordis.europa.eu/project/id/283644) (grant no. 283644);
|
||||
- [EUBrazilOpenBio](https://cordis.europa.eu/project/id/288754) (grant no. 288754).
|
||||
- the H2020 research and innovation programme
|
||||
- [SoBigData](https://cordis.europa.eu/project/id/654024) (grant no. 654024);
|
||||
- [PARTHENOS](https://cordis.europa.eu/project/id/654119) (grant no. 654119);
|
||||
- [EGI-Engage](https://cordis.europa.eu/project/id/654142) (grant no. 654142);
|
||||
- [ENVRI PLUS](https://cordis.europa.eu/project/id/654182) (grant no. 654182);
|
||||
- [BlueBRIDGE](https://cordis.europa.eu/project/id/675680) (grant no. 675680);
|
||||
- [PerformFISH](https://cordis.europa.eu/project/id/727610) (grant no. 727610);
|
||||
- [AGINFRA PLUS](https://cordis.europa.eu/project/id/731001) (grant no. 731001);
|
||||
- [DESIRA](https://cordis.europa.eu/project/id/818194) (grant no. 818194);
|
||||
- [ARIADNEplus](https://cordis.europa.eu/project/id/823914) (grant no. 823914);
|
||||
- [RISIS 2](https://cordis.europa.eu/project/id/824091) (grant no. 824091);
|
||||
- [EOSC-Pillar](https://cordis.europa.eu/project/id/857650) (grant no. 857650);
|
||||
- [Blue Cloud](https://cordis.europa.eu/project/id/862409) (grant no. 862409);
|
||||
- [SoBigData-PlusPlus](https://cordis.europa.eu/project/id/871042) (grant no. 871042);
|
|
@ -0,0 +1,312 @@
|
|||
# European Union Public Licence V. 1.1
|
||||
|
||||
|
||||
EUPL © the European Community 2007
|
||||
|
||||
|
||||
This European Union Public Licence (the “EUPL”) applies to the Work or Software
|
||||
(as defined below) which is provided under the terms of this Licence. Any use of
|
||||
the Work, other than as authorised under this Licence is prohibited (to the
|
||||
extent such use is covered by a right of the copyright holder of the Work).
|
||||
|
||||
The Original Work is provided under the terms of this Licence when the Licensor
|
||||
(as defined below) has placed the following notice immediately following the
|
||||
copyright notice for the Original Work:
|
||||
|
||||
Licensed under the EUPL V.1.1
|
||||
|
||||
or has expressed by any other mean his willingness to license under the EUPL.
|
||||
|
||||
|
||||
|
||||
## 1. Definitions
|
||||
|
||||
In this Licence, the following terms have the following meaning:
|
||||
|
||||
- The Licence: this Licence.
|
||||
|
||||
- The Original Work or the Software: the software distributed and/or
|
||||
communicated by the Licensor under this Licence, available as Source Code and
|
||||
also as Executable Code as the case may be.
|
||||
|
||||
- Derivative Works: the works or software that could be created by the Licensee,
|
||||
based upon the Original Work or modifications thereof. This Licence does not
|
||||
define the extent of modification or dependence on the Original Work required
|
||||
in order to classify a work as a Derivative Work; this extent is determined by
|
||||
copyright law applicable in the country mentioned in Article 15.
|
||||
|
||||
- The Work: the Original Work and/or its Derivative Works.
|
||||
|
||||
- The Source Code: the human-readable form of the Work which is the most
|
||||
convenient for people to study and modify.
|
||||
|
||||
- The Executable Code: any code which has generally been compiled and which is
|
||||
meant to be interpreted by a computer as a program.
|
||||
|
||||
- The Licensor: the natural or legal person that distributes and/or communicates
|
||||
the Work under the Licence.
|
||||
|
||||
- Contributor(s): any natural or legal person who modifies the Work under the
|
||||
Licence, or otherwise contributes to the creation of a Derivative Work.
|
||||
|
||||
- The Licensee or “You”: any natural or legal person who makes any usage of the
|
||||
Software under the terms of the Licence.
|
||||
|
||||
- Distribution and/or Communication: any act of selling, giving, lending,
|
||||
renting, distributing, communicating, transmitting, or otherwise making
|
||||
available, on-line or off-line, copies of the Work or providing access to its
|
||||
essential functionalities at the disposal of any other natural or legal
|
||||
person.
|
||||
|
||||
|
||||
|
||||
## 2. Scope of the rights granted by the Licence
|
||||
|
||||
The Licensor hereby grants You a world-wide, royalty-free, non-exclusive,
|
||||
sub-licensable licence to do the following, for the duration of copyright vested
|
||||
in the Original Work:
|
||||
|
||||
- use the Work in any circumstance and for all usage, reproduce the Work, modify
|
||||
- the Original Work, and make Derivative Works based upon the Work, communicate
|
||||
- to the public, including the right to make available or display the Work or
|
||||
- copies thereof to the public and perform publicly, as the case may be, the
|
||||
- Work, distribute the Work or copies thereof, lend and rent the Work or copies
|
||||
- thereof, sub-license rights in the Work or copies thereof.
|
||||
|
||||
Those rights can be exercised on any media, supports and formats, whether now
|
||||
known or later invented, as far as the applicable law permits so.
|
||||
|
||||
In the countries where moral rights apply, the Licensor waives his right to
|
||||
exercise his moral right to the extent allowed by law in order to make effective
|
||||
the licence of the economic rights here above listed.
|
||||
|
||||
The Licensor grants to the Licensee royalty-free, non exclusive usage rights to
|
||||
any patents held by the Licensor, to the extent necessary to make use of the
|
||||
rights granted on the Work under this Licence.
|
||||
|
||||
|
||||
|
||||
## 3. Communication of the Source Code
|
||||
|
||||
The Licensor may provide the Work either in its Source Code form, or as
|
||||
Executable Code. If the Work is provided as Executable Code, the Licensor
|
||||
provides in addition a machine-readable copy of the Source Code of the Work
|
||||
along with each copy of the Work that the Licensor distributes or indicates, in
|
||||
a notice following the copyright notice attached to the Work, a repository where
|
||||
the Source Code is easily and freely accessible for as long as the Licensor
|
||||
continues to distribute and/or communicate the Work.
|
||||
|
||||
|
||||
|
||||
## 4. Limitations on copyright
|
||||
|
||||
Nothing in this Licence is intended to deprive the Licensee of the benefits from
|
||||
any exception or limitation to the exclusive rights of the rights owners in the
|
||||
Original Work or Software, of the exhaustion of those rights or of other
|
||||
applicable limitations thereto.
|
||||
|
||||
|
||||
|
||||
## 5. Obligations of the Licensee
|
||||
|
||||
The grant of the rights mentioned above is subject to some restrictions and
|
||||
obligations imposed on the Licensee. Those obligations are the following:
|
||||
|
||||
Attribution right: the Licensee shall keep intact all copyright, patent or
|
||||
trademarks notices and all notices that refer to the Licence and to the
|
||||
disclaimer of warranties. The Licensee must include a copy of such notices and a
|
||||
copy of the Licence with every copy of the Work he/she distributes and/or
|
||||
communicates. The Licensee must cause any Derivative Work to carry prominent
|
||||
notices stating that the Work has been modified and the date of modification.
|
||||
|
||||
Copyleft clause: If the Licensee distributes and/or communicates copies of the
|
||||
Original Works or Derivative Works based upon the Original Work, this
|
||||
Distribution and/or Communication will be done under the terms of this Licence
|
||||
or of a later version of this Licence unless the Original Work is expressly
|
||||
distributed only under this version of the Licence. The Licensee (becoming
|
||||
Licensor) cannot offer or impose any additional terms or conditions on the Work
|
||||
or Derivative Work that alter or restrict the terms of the Licence.
|
||||
|
||||
Compatibility clause: If the Licensee Distributes and/or Communicates Derivative
|
||||
Works or copies thereof based upon both the Original Work and another work
|
||||
licensed under a Compatible Licence, this Distribution and/or Communication can
|
||||
be done under the terms of this Compatible Licence. For the sake of this clause,
|
||||
“Compatible Licence” refers to the licences listed in the appendix attached to
|
||||
this Licence. Should the Licensee’s obligations under the Compatible Licence
|
||||
conflict with his/her obligations under this Licence, the obligations of the
|
||||
Compatible Licence shall prevail.
|
||||
|
||||
Provision of Source Code: When distributing and/or communicating copies of the
|
||||
Work, the Licensee will provide a machine-readable copy of the Source Code or
|
||||
indicate a repository where this Source will be easily and freely available for
|
||||
as long as the Licensee continues to distribute and/or communicate the Work.
|
||||
|
||||
Legal Protection: This Licence does not grant permission to use the trade names,
|
||||
trademarks, service marks, or names of the Licensor, except as required for
|
||||
reasonable and customary use in describing the origin of the Work and
|
||||
reproducing the content of the copyright notice.
|
||||
|
||||
|
||||
|
||||
## 6. Chain of Authorship
|
||||
|
||||
The original Licensor warrants that the copyright in the Original Work granted
|
||||
hereunder is owned by him/her or licensed to him/her and that he/she has the
|
||||
power and authority to grant the Licence.
|
||||
|
||||
Each Contributor warrants that the copyright in the modifications he/she brings
|
||||
to the Work are owned by him/her or licensed to him/her and that he/she has the
|
||||
power and authority to grant the Licence.
|
||||
|
||||
Each time You accept the Licence, the original Licensor and subsequent
|
||||
Contributors grant You a licence to their contributions to the Work, under the
|
||||
terms of this Licence.
|
||||
|
||||
|
||||
|
||||
## 7. Disclaimer of Warranty
|
||||
|
||||
The Work is a work in progress, which is continuously improved by numerous
|
||||
contributors. It is not a finished work and may therefore contain defects or
|
||||
“bugs” inherent to this type of software development.
|
||||
|
||||
For the above reason, the Work is provided under the Licence on an “as is” basis
|
||||
and without warranties of any kind concerning the Work, including without
|
||||
limitation merchantability, fitness for a particular purpose, absence of defects
|
||||
or errors, accuracy, non-infringement of intellectual property rights other than
|
||||
copyright as stated in Article 6 of this Licence.
|
||||
|
||||
This disclaimer of warranty is an essential part of the Licence and a condition
|
||||
for the grant of any rights to the Work.
|
||||
|
||||
|
||||
|
||||
## 8. Disclaimer of Liability
|
||||
|
||||
Except in the cases of wilful misconduct or damages directly caused to natural
|
||||
persons, the Licensor will in no event be liable for any direct or indirect,
|
||||
material or moral, damages of any kind, arising out of the Licence or of the use
|
||||
of the Work, including without limitation, damages for loss of goodwill, work
|
||||
stoppage, computer failure or malfunction, loss of data or any commercial
|
||||
damage, even if the Licensor has been advised of the possibility of such
|
||||
damage. However, the Licensor will be liable under statutory product liability
|
||||
laws as far such laws apply to the Work.
|
||||
|
||||
|
||||
|
||||
## 9. Additional agreements
|
||||
|
||||
While distributing the Original Work or Derivative Works, You may choose to
|
||||
conclude an additional agreement to offer, and charge a fee for, acceptance of
|
||||
support, warranty, indemnity, or other liability obligations and/or services
|
||||
consistent with this Licence. However, in accepting such obligations, You may
|
||||
act only on your own behalf and on your sole responsibility, not on behalf of
|
||||
the original Licensor or any other Contributor, and only if You agree to
|
||||
indemnify, defend, and hold each Contributor harmless for any liability incurred
|
||||
by, or claims asserted against such Contributor by the fact You have accepted
|
||||
any such warranty or additional liability.
|
||||
|
||||
|
||||
|
||||
## 10. Acceptance of the Licence
|
||||
|
||||
The provisions of this Licence can be accepted by clicking on an icon “I agree”
|
||||
placed under the bottom of a window displaying the text of this Licence or by
|
||||
affirming consent in any other similar way, in accordance with the rules of
|
||||
applicable law. Clicking on that icon indicates your clear and irrevocable
|
||||
acceptance of this Licence and all of its terms and conditions.
|
||||
|
||||
Similarly, you irrevocably accept this Licence and all of its terms and
|
||||
conditions by exercising any rights granted to You by Article 2 of this Licence,
|
||||
such as the use of the Work, the creation by You of a Derivative Work or the
|
||||
Distribution and/or Communication by You of the Work or copies thereof.
|
||||
|
||||
|
||||
|
||||
## 11. Information to the public
|
||||
|
||||
In case of any Distribution and/or Communication of the Work by means of
|
||||
electronic communication by You (for example, by offering to download the Work
|
||||
from a remote location) the distribution channel or media (for example, a
|
||||
website) must at least provide to the public the information requested by the
|
||||
applicable law regarding the Licensor, the Licence and the way it may be
|
||||
accessible, concluded, stored and reproduced by the Licensee.
|
||||
|
||||
|
||||
|
||||
## 12. Termination of the Licence
|
||||
|
||||
The Licence and the rights granted hereunder will terminate automatically upon
|
||||
any breach by the Licensee of the terms of the Licence.
|
||||
|
||||
Such a termination will not terminate the licences of any person who has
|
||||
received the Work from the Licensee under the Licence, provided such persons
|
||||
remain in full compliance with the Licence.
|
||||
|
||||
|
||||
|
||||
## 13. Miscellaneous
|
||||
|
||||
Without prejudice of Article 9 above, the Licence represents the complete
|
||||
agreement between the Parties as to the Work licensed hereunder.
|
||||
|
||||
If any provision of the Licence is invalid or unenforceable under applicable
|
||||
law, this will not affect the validity or enforceability of the Licence as a
|
||||
whole. Such provision will be construed and/or reformed so as necessary to make
|
||||
it valid and enforceable.
|
||||
|
||||
The European Commission may publish other linguistic versions and/or new
|
||||
versions of this Licence, so far this is required and reasonable, without
|
||||
reducing the scope of the rights granted by the Licence. New versions of the
|
||||
Licence will be published with a unique version number.
|
||||
|
||||
All linguistic versions of this Licence, approved by the European Commission,
|
||||
have identical value. Parties can take advantage of the linguistic version of
|
||||
their choice.
|
||||
|
||||
|
||||
|
||||
## 14. Jurisdiction
|
||||
|
||||
Any litigation resulting from the interpretation of this License, arising
|
||||
between the European Commission, as a Licensor, and any Licensee, will be
|
||||
subject to the jurisdiction of the Court of Justice of the European Communities,
|
||||
as laid down in article 238 of the Treaty establishing the European Community.
|
||||
|
||||
Any litigation arising between Parties, other than the European Commission, and
|
||||
resulting from the interpretation of this License, will be subject to the
|
||||
exclusive jurisdiction of the competent court where the Licensor resides or
|
||||
conducts its primary business.
|
||||
|
||||
|
||||
|
||||
## 15. Applicable Law
|
||||
|
||||
This Licence shall be governed by the law of the European Union country where
|
||||
the Licensor resides or has his registered office.
|
||||
|
||||
This licence shall be governed by the Belgian law if:
|
||||
|
||||
- a litigation arises between the European Commission, as a Licensor, and any
|
||||
- Licensee; the Licensor, other than the European Commission, has no residence
|
||||
- or registered office inside a European Union country.
|
||||
|
||||
|
||||
|
||||
## Appendix
|
||||
|
||||
|
||||
|
||||
“Compatible Licences” according to article 5 EUPL are:
|
||||
|
||||
|
||||
- GNU General Public License (GNU GPL) v. 2
|
||||
|
||||
- Open Software License (OSL) v. 2.1, v. 3.0
|
||||
|
||||
- Common Public License v. 1.0
|
||||
|
||||
- Eclipse Public License v. 1.0
|
||||
|
||||
- Cecill v. 2.0
|
||||
|
|
@ -0,0 +1,53 @@
|
|||
# Accounting Dashboard Harvester Smart Executor Plugin
|
||||
|
||||
Accounting Dashboard Harvester Smart Executor Plugin harvest accounting data
|
||||
from different sources, harmonize them and store in a PostgreSQL database to
|
||||
enable aggregated infrastructure analysis.
|
||||
|
||||
## Built With
|
||||
|
||||
* [OpenJDK](https://openjdk.java.net/) - The JDK used
|
||||
* [Maven](https://maven.apache.org/) - Dependency Management
|
||||
|
||||
## Documentation
|
||||
|
||||
[Accounting Dashboard Harvester Smart Executor Plugin](https://wiki.gcube-system.org/gcube/Accounting)
|
||||
|
||||
## Change log
|
||||
|
||||
See [Releases](https://code-repo.d4science.org/gCubeSystem/accounting-lib/releases).
|
||||
|
||||
## Authors
|
||||
|
||||
* **Luca Frosini** ([ORCID](https://orcid.org/0000-0003-3183-2291)) - [ISTI-CNR Infrascience Group](http://nemis.isti.cnr.it/groups/infrascience)
|
||||
* **Massimiliano Assante** ([ORCID](https://orcid.org/0000-0002-3761-1492)) - - [ISTI-CNR Infrascience Group](http://nemis.isti.cnr.it/groups/infrascience)
|
||||
* **Francesco Mangiacrapa** ([ORCID](https://orcid.org/0000-0002-6528-664X)) - - [ISTI-CNR Infrascience Group](http://nemis.isti.cnr.it/groups/infrascience)
|
||||
|
||||
## How to Cite this Software
|
||||
|
||||
Tell people how to cite this software.
|
||||
* Cite an associated paper?
|
||||
* Use a specific BibTeX entry for the software?
|
||||
|
||||
|
||||
@Manual{,
|
||||
title = {Accounting Dashboard Harvester Smart Executor Plugin},
|
||||
author = {{Frosini, Luca}, {Assante, Massimiliano}, {Mangiacrapa, Francesco}},
|
||||
organization = {ISTI - CNR},
|
||||
address = {Pisa, Italy},
|
||||
year = 2019,
|
||||
url = {http://www.gcube-system.org/}
|
||||
}
|
||||
|
||||
## License
|
||||
|
||||
This project is licensed under the EUPL V.1.1 License - see the [LICENSE.md](LICENSE.md) file for details.
|
||||
|
||||
|
||||
## About the gCube Framework
|
||||
This software is part of the [gCubeFramework](https://www.gcube-system.org/ "gCubeFramework"): an
|
||||
open-source software toolkit used for building and operating Hybrid Data
|
||||
Infrastructures enabling the dynamic deployment of Virtual Research Environments
|
||||
by favouring the realisation of reuse oriented policies.
|
||||
|
||||
The projects leading to this software have received funding from a series of European Union programmes see [FUNDING.md](FUNDING.md)
|
|
@ -1,4 +0,0 @@
|
|||
gCube System - License
|
||||
------------------------------------------------------------
|
||||
|
||||
${gcube.license}
|
|
@ -1,68 +0,0 @@
|
|||
The gCube System - ${name}
|
||||
--------------------------------------------------
|
||||
|
||||
${description}
|
||||
|
||||
|
||||
${gcube.description}
|
||||
|
||||
${gcube.funding}
|
||||
|
||||
|
||||
Version
|
||||
--------------------------------------------------
|
||||
|
||||
${version} (${buildDate})
|
||||
|
||||
Please see the file named "changelog.xml" in this directory for the release notes.
|
||||
|
||||
|
||||
Authors
|
||||
--------------------------------------------------
|
||||
|
||||
* Luca Frosini (luca.frosini-AT-isti.cnr.it), Istituto di Scienza e Tecnologie dell'Informazione "A. Faedo" - CNR, Pisa (Italy).
|
||||
* Eric Perrone (eric.perrone-AT-isti.cnr.it), Istituto di Scienza e Tecnologie dell'Informazione "A. Faedo" - CNR, Pisa (Italy).
|
||||
* Francesco Mangiacrapa (francesco.mangiacrapa-AT-isti.cnr.it), Istituto di Scienza e Tecnologie dell'Informazione "A. Faedo" - CNR, Pisa (Italy).
|
||||
* Massimiliano Assante (massimiliano.assante-AT-isti.cnr.it), Istituto di Scienza e Tecnologie dell'Informazione "A. Faedo" - CNR, Pisa (Italy).
|
||||
|
||||
Maintainers
|
||||
-----------
|
||||
|
||||
* Luca Frosini (luca.frosini-AT-isti.cnr.it), Istituto di Scienza e Tecnologie dell'Informazione "A. Faedo" - CNR, Pisa (Italy).
|
||||
|
||||
|
||||
Download information
|
||||
--------------------------------------------------
|
||||
|
||||
Source code is available from SVN:
|
||||
${scm.url}
|
||||
|
||||
Binaries can be downloaded from the gCube website:
|
||||
${gcube.website}
|
||||
|
||||
|
||||
Installation
|
||||
--------------------------------------------------
|
||||
|
||||
Installation documentation is available on-line in the gCube Wiki:
|
||||
${gcube.wikiRoot}/Accounting_Aggregator
|
||||
|
||||
|
||||
Documentation
|
||||
--------------------------------------------------
|
||||
|
||||
Documentation is available on-line in the gCube Wiki:
|
||||
${gcube.wikiRoot}/Accounting_Aggregator
|
||||
|
||||
|
||||
Support
|
||||
--------------------------------------------------
|
||||
|
||||
Bugs and support requests can be reported in the gCube issue tracking tool:
|
||||
${gcube.issueTracking}
|
||||
|
||||
|
||||
Licensing
|
||||
--------------------------------------------------
|
||||
|
||||
This software is licensed under the terms you may find in the file named "LICENSE" in this directory.
|
|
@ -1,17 +0,0 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE xml>
|
||||
<ReleaseNotes>
|
||||
<Changeset component="org.gcube.accounting.accounting-dashboard-harvester-se-plugin.1.3.0" date="${buildDate}">
|
||||
<Change>Allowed partial harvesting of the current period #17800</Change>
|
||||
</Changeset>
|
||||
<Changeset component="org.gcube.accounting.accounting-dashboard-harvester-se-plugin.1.2.0" date="2019-10-04">
|
||||
<Change>Removed Home Library dependecy #17128</Change>
|
||||
<Change>Removed ckan-util-library dependecy #17128</Change>
|
||||
</Changeset>
|
||||
<Changeset component="org.gcube.accounting.accounting-dashboard-harvester-se-plugin.1.1.0" date="2019-02-26">
|
||||
<Change>Fixed scope of dependencies #12985</Change>
|
||||
</Changeset>
|
||||
<Changeset component="org.gcube.accounting.accounting-dashboard-harvester-se-plugin.1.0.0" date="2018-10-10">
|
||||
<Change>First Release</Change>
|
||||
</Changeset>
|
||||
</ReleaseNotes>
|
|
@ -1,28 +0,0 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE xml>
|
||||
<Resource>
|
||||
<ID />
|
||||
<Type>Service</Type>
|
||||
<Profile>
|
||||
<Description>${description}</Description>
|
||||
<Class>${serviceClass}</Class>
|
||||
<Name>${artifactId}</Name>
|
||||
<Version>1.0.0</Version>
|
||||
<Packages>
|
||||
<Software>
|
||||
<Description>${description}</Description>
|
||||
<Name>${artifactId}</Name>
|
||||
<Version>${version}</Version>
|
||||
<MavenCoordinates>
|
||||
<groupId>${groupId}</groupId>
|
||||
<artifactId>${artifactId}</artifactId>
|
||||
<version>${version}</version>
|
||||
</MavenCoordinates>
|
||||
<Type>Plugin</Type>
|
||||
<Files>
|
||||
<File>${build.finalName}.${project.packaging}</File>
|
||||
</Files>
|
||||
</Software>
|
||||
</Packages>
|
||||
</Profile>
|
||||
</Resource>
|
|
@ -0,0 +1,4 @@
|
|||
groupId=${groupId}
|
||||
artifactId=${artifactId}
|
||||
version=${version}
|
||||
description=${description}
|
262
pom.xml
262
pom.xml
|
@ -5,16 +5,20 @@
|
|||
<parent>
|
||||
<artifactId>maven-parent</artifactId>
|
||||
<groupId>org.gcube.tools</groupId>
|
||||
<version>1.1.0</version>
|
||||
<version>1.2.0</version>
|
||||
<relativePath />
|
||||
</parent>
|
||||
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
<groupId>org.gcube.accounting</groupId>
|
||||
<artifactId>accounting-dashboard-harvester-se-plugin</artifactId>
|
||||
<version>1.3.0</version>
|
||||
<name>Accounting Dashboard Harvester SmartExecutor Plugin</name>
|
||||
<description>Accounting Dashboard Harvester SmartExecutor Plugin</description>
|
||||
<version>2.5.0</version>
|
||||
<name>Accounting Dashboard Harvester Smart Executor Plugin</name>
|
||||
<description>
|
||||
Accounting Dashboard Harvester Smart Executor Plugin harvest accounting
|
||||
data from different sources, harmonize them and store in a PostgreSQL
|
||||
database to enable aggregated infrastructure analysis.
|
||||
</description>
|
||||
|
||||
<scm>
|
||||
<connection>scm:git:https://code-repo.d4science.org/gCubeSystem/${project.artifactId}.git</connection>
|
||||
|
@ -24,23 +28,15 @@
|
|||
|
||||
<properties>
|
||||
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
|
||||
<distroDirectory>distro</distroDirectory>
|
||||
<serviceClass>Accounting</serviceClass>
|
||||
</properties>
|
||||
|
||||
<dependencyManagement>
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>org.gcube.distribution</groupId>
|
||||
<artifactId>gcube-bom</artifactId>
|
||||
<version>LATEST</version>
|
||||
<type>pom</type>
|
||||
<scope>import</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.gcube.distribution</groupId>
|
||||
<artifactId>gcube-smartgears-bom</artifactId>
|
||||
<version>LATEST</version>
|
||||
<groupId>org.gcube.vremanagement</groupId>
|
||||
<artifactId>smart-executor-bom</artifactId>
|
||||
<version>3.3.0</version>
|
||||
<type>pom</type>
|
||||
<scope>import</scope>
|
||||
</dependency>
|
||||
|
@ -48,38 +44,9 @@
|
|||
</dependencyManagement>
|
||||
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>org.gcube.vremanagement</groupId>
|
||||
<artifactId>smart-executor-api</artifactId>
|
||||
<version>[1.5.0, 2.0.0-SNAPSHOT)</version>
|
||||
<scope>provided</scope>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>com.fasterxml.jackson.core</groupId>
|
||||
<artifactId>jackson-databind</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.slf4j</groupId>
|
||||
<artifactId>slf4j-api</artifactId>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.gcube.vremanagement</groupId>
|
||||
<artifactId>smart-executor-client</artifactId>
|
||||
<version>[1.3.0,2.0.0-SNAPSHOT)</version>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>commons-lang</groupId>
|
||||
<artifactId>commons-lang</artifactId>
|
||||
<version>2.6</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.gcube.core</groupId>
|
||||
<artifactId>common-encryption</artifactId>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.gcube.portlets.admin</groupId>
|
||||
|
@ -87,113 +54,103 @@
|
|||
<version>[2.7.2,3.0.0-SNAPSHOT)</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.google.apis</groupId>
|
||||
<artifactId>google-api-services-analyticsreporting</artifactId>
|
||||
<version>v4-rev124-1.23.0</version>
|
||||
<groupId>com.google.analytics</groupId>
|
||||
<artifactId>google-analytics-data</artifactId>
|
||||
<version>0.16.0</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.google.guava</groupId>
|
||||
<artifactId>guava</artifactId>
|
||||
<version>23.6-jre</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.google.auth</groupId>
|
||||
<artifactId>google-auth-library-oauth2-http</artifactId>
|
||||
<version>1.12.1</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.fasterxml.jackson.core</groupId>
|
||||
<artifactId>jackson-core</artifactId>
|
||||
<version>2.8.4</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.google.http-client</groupId>
|
||||
<artifactId>google-http-client-gson</artifactId>
|
||||
<version>1.21.0</version>
|
||||
</dependency>
|
||||
|
||||
<!-- <dependency> -->
|
||||
<!-- <groupId>org.gcube.common</groupId> -->
|
||||
<!-- <artifactId>storagehub-client-library</artifactId> -->
|
||||
<!-- <exclusions> -->
|
||||
<!-- <exclusion> -->
|
||||
<!-- <groupId>com.fasterxml.jackson.core</groupId> -->
|
||||
<!-- <artifactId>jackson-core</artifactId> -->
|
||||
<!-- </exclusion> -->
|
||||
<!-- </exclusions> -->
|
||||
<!-- </dependency> -->
|
||||
<dependency>
|
||||
<groupId>org.gcube.common</groupId>
|
||||
<artifactId>storagehub-client-library</artifactId>
|
||||
<version>[1.0.0, 2.0.0-SNAPSHOT)</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.gcube.data-publishing</groupId>
|
||||
<groupId>org.gcube.data-catalogue</groupId>
|
||||
<artifactId>gcat-client</artifactId>
|
||||
<version>[1.0.0, 2.0.0-SNAPSHOT)</version>
|
||||
<version>[2.0.0, 3.0.0-SNAPSHOT)</version>
|
||||
</dependency>
|
||||
|
||||
<!-- Dependencies forced to provided -->
|
||||
<dependency>
|
||||
<groupId>com.fasterxml.jackson.core</groupId>
|
||||
<artifactId>jackson-core</artifactId>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.fasterxml.jackson.core</groupId>
|
||||
<artifactId>jackson-databind</artifactId>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.fasterxml.jackson.core</groupId>
|
||||
<artifactId>jackson-annotations</artifactId>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.gcube.resources</groupId>
|
||||
<artifactId>common-gcore-resources</artifactId>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.gcube.common</groupId>
|
||||
<artifactId>authorization-client</artifactId>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.gcube.common</groupId>
|
||||
<artifactId>common-authorization</artifactId>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.gcube.core</groupId>
|
||||
<artifactId>common-gcore-stubs</artifactId>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.gcube.core</groupId>
|
||||
<artifactId>common-scope-maps</artifactId>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
<!-- END Dependencies forced to provided -->
|
||||
|
||||
<dependency>
|
||||
<groupId>org.gcube.resources.discovery</groupId>
|
||||
<artifactId>ic-client</artifactId>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.gcube.resources</groupId>
|
||||
<artifactId>registry-publisher</artifactId>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.gcube.accounting</groupId>
|
||||
<artifactId>accounting-analytics</artifactId>
|
||||
<version>[2.0.0,3.0.0-SNAPSHOT)</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.gcube.accounting</groupId>
|
||||
<artifactId>accounting-analytics-persistence-couchbase</artifactId>
|
||||
<version>[1.0.0,2.0.0-SNAPSHOT)</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.gcube.accounting</groupId>
|
||||
<artifactId>accounting-lib</artifactId>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.gcube.core</groupId>
|
||||
<artifactId>common-encryption</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.gcube.vremanagement</groupId>
|
||||
<artifactId>smart-executor-api</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.gcube.accounting</groupId>
|
||||
<artifactId>accounting-analytics</artifactId>
|
||||
<version>[3.0.0,4.0.0-SNAPSHOT)</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.gcube.accounting</groupId>
|
||||
<artifactId>accounting-analytics-persistence-postgresql</artifactId>
|
||||
<version>[1.0.0,2.0.0-SNAPSHOT)</version>
|
||||
<scope>runtime</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.gcube.accounting</groupId>
|
||||
<artifactId>accounting-summary-access</artifactId>
|
||||
<version>[1.0.0,2.0.0-SNAPSHOT)</version>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>org.ancoron.postgresql</groupId>
|
||||
<artifactId>org.postgresql</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.gcube.social-networking</groupId>
|
||||
<artifactId>social-service-client</artifactId>
|
||||
<version>[1.0.0, 2.0.0-SNAPSHOT)</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.json</groupId>
|
||||
<artifactId>json</artifactId>
|
||||
<version>20171018</version>
|
||||
<scope>compile</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.gcube.common</groupId>
|
||||
<artifactId>authorization-utils</artifactId>
|
||||
<version>[2.2.0, 3.0.0-SNAPSHOT)</version>
|
||||
</dependency>
|
||||
|
||||
<!-- Test Dependencies -->
|
||||
<!-- Test Dependencies. Setting scope to provided to allow proper creation
|
||||
of uber-jar -->
|
||||
<dependency>
|
||||
<groupId>org.gcube.vremanagement</groupId>
|
||||
<artifactId>smart-executor-client</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>junit</groupId>
|
||||
<artifactId>junit</artifactId>
|
||||
|
@ -203,24 +160,69 @@
|
|||
<dependency>
|
||||
<groupId>ch.qos.logback</groupId>
|
||||
<artifactId>logback-classic</artifactId>
|
||||
<version>1.0.13</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
<build>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-dependency-plugin</artifactId>
|
||||
<version>3.1.1</version>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>copy-dependencies</id>
|
||||
<phase>compile</phase>
|
||||
<goals>
|
||||
<goal>copy-dependencies</goal>
|
||||
</goals>
|
||||
<configuration>
|
||||
<includeScope>runtime</includeScope>
|
||||
<outputDirectory>${basedir}${file.separator}target${file.separator}libs</outputDirectory>
|
||||
<overWriteReleases>false</overWriteReleases>
|
||||
<overWriteSnapshots>false</overWriteSnapshots>
|
||||
<overWriteIfNewer>true</overWriteIfNewer>
|
||||
<excludeTypes>war</excludeTypes>
|
||||
<stripVersion>false</stripVersion>
|
||||
</configuration>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-assembly-plugin</artifactId>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>make-uberjar</id>
|
||||
<phase>install</phase>
|
||||
<id>uberjar</id>
|
||||
<configuration>
|
||||
<appendAssemblyId>true</appendAssemblyId>
|
||||
<descriptors>
|
||||
<descriptor>uberjar.xml</descriptor>
|
||||
</descriptors>
|
||||
</configuration>
|
||||
<phase>package</phase>
|
||||
<goals>
|
||||
<goal>single</goal>
|
||||
</goals>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-javadoc-plugin</artifactId>
|
||||
<configuration>
|
||||
<additionalparam>-Xdoclint:none</additionalparam>
|
||||
<additionalJOption>-Xdoclint:none</additionalJOption>
|
||||
</configuration>
|
||||
<version>3.1.0</version>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>make-servicearchive</id>
|
||||
<id>generate-doc</id>
|
||||
<phase>install</phase>
|
||||
<goals>
|
||||
<goal>jar</goal>
|
||||
</goals>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
|
|
|
@ -16,14 +16,18 @@ import org.gcube.accounting.accounting.summary.access.AccountingDao;
|
|||
import org.gcube.accounting.accounting.summary.access.model.ScopeDescriptor;
|
||||
import org.gcube.accounting.accounting.summary.access.model.internal.Dimension;
|
||||
import org.gcube.accounting.accounting.summary.access.model.update.AccountingRecord;
|
||||
import org.gcube.common.authorization.library.provider.SecurityTokenProvider;
|
||||
import org.gcube.com.fasterxml.jackson.annotation.JsonIgnore;
|
||||
import org.gcube.common.authorization.utils.secret.Secret;
|
||||
import org.gcube.common.scope.impl.ScopeBean;
|
||||
import org.gcube.common.scope.impl.ScopeBean.Type;
|
||||
import org.gcube.dataharvest.harvester.CatalogueAccessesHarvester;
|
||||
import org.gcube.dataharvest.harvester.CoreServicesAccessesHarvester;
|
||||
import org.gcube.dataharvest.harvester.JupyterAccessesHarvester;
|
||||
import org.gcube.dataharvest.harvester.MethodInvocationHarvester;
|
||||
import org.gcube.dataharvest.harvester.RStudioAccessesHarvester;
|
||||
import org.gcube.dataharvest.harvester.SocialInteractionsHarvester;
|
||||
import org.gcube.dataharvest.harvester.VREAccessesHarvester;
|
||||
import org.gcube.dataharvest.harvester.VREUsersHarvester;
|
||||
import org.gcube.dataharvest.harvester.sobigdata.DataMethodDownloadHarvester;
|
||||
import org.gcube.dataharvest.harvester.sobigdata.ResourceCatalogueHarvester;
|
||||
import org.gcube.dataharvest.harvester.sobigdata.TagMeMethodInvocationHarvester;
|
||||
import org.gcube.dataharvest.utils.AggregationType;
|
||||
|
@ -38,9 +42,9 @@ import org.slf4j.LoggerFactory;
|
|||
* @author Eric Perrone (ISTI - CNR)
|
||||
* @author Luca Frosini (ISTI - CNR)
|
||||
*/
|
||||
public class AccountingDataHarvesterPlugin extends Plugin<DataHarvestPluginDeclaration> {
|
||||
public class AccountingDashboardHarvesterPlugin extends Plugin {
|
||||
|
||||
private static Logger logger = LoggerFactory.getLogger(AccountingDataHarvesterPlugin.class);
|
||||
private static Logger logger = LoggerFactory.getLogger(AccountingDashboardHarvesterPlugin.class);
|
||||
|
||||
private static final String PROPERTY_FILENAME = "config.properties";
|
||||
|
||||
|
@ -51,10 +55,11 @@ public class AccountingDataHarvesterPlugin extends Plugin<DataHarvestPluginDecla
|
|||
public static final String DRY_RUN_INPUT_PARAMETER = "dryRun";
|
||||
|
||||
/**
|
||||
* Allows partial harvesting of data of the current period.
|
||||
* This means that in MONTHLY aggregation type the current month is harvested instead of the previous month which
|
||||
* is done when the month is completed.
|
||||
* This allow the portlet to display monthly data in the current moth even the data is partial (till the current day).
|
||||
* Allows partial harvesting of data of the current period. This means that
|
||||
* in MONTHLY aggregation type the current month is harvested instead of the
|
||||
* previous month which is done when the month is completed. This allow the
|
||||
* portlet to display monthly data in the current moth even the data is
|
||||
* partial (till the current day).
|
||||
*/
|
||||
public static final String PARTIAL_HARVESTING = "partialHarvesting";
|
||||
|
||||
|
@ -70,27 +75,13 @@ public class AccountingDataHarvesterPlugin extends Plugin<DataHarvestPluginDecla
|
|||
protected Date start;
|
||||
protected Date end;
|
||||
|
||||
public AccountingDataHarvesterPlugin(DataHarvestPluginDeclaration pluginDeclaration) {
|
||||
super(pluginDeclaration);
|
||||
}
|
||||
|
||||
private static final InheritableThreadLocal<Properties> properties = new InheritableThreadLocal<Properties>() {
|
||||
|
||||
@Override
|
||||
protected Properties initialValue() {
|
||||
return new Properties();
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
|
||||
public static InheritableThreadLocal<Properties> getProperties() {
|
||||
return properties;
|
||||
public AccountingDashboardHarvesterPlugin() {
|
||||
super();
|
||||
}
|
||||
|
||||
public static Dimension getDimension(String key) {
|
||||
Dimension dimension = dimensions.get().get(key);
|
||||
if(dimension == null) {
|
||||
if (dimension == null) {
|
||||
dimension = new Dimension(key, key, null, key);
|
||||
}
|
||||
return dimension;
|
||||
|
@ -105,12 +96,11 @@ public class AccountingDataHarvesterPlugin extends Plugin<DataHarvestPluginDecla
|
|||
|
||||
};
|
||||
|
||||
|
||||
public static ScopeDescriptor getScopeDescriptor(String context) {
|
||||
return scopeDescriptors.get().get(context);
|
||||
}
|
||||
|
||||
protected static final InheritableThreadLocal<Map<String, ScopeDescriptor>> scopeDescriptors = new InheritableThreadLocal<Map<String, ScopeDescriptor>>() {
|
||||
public static final InheritableThreadLocal<Map<String, ScopeDescriptor>> scopeDescriptors = new InheritableThreadLocal<Map<String, ScopeDescriptor>>() {
|
||||
|
||||
@Override
|
||||
protected Map<String, ScopeDescriptor> initialValue() {
|
||||
|
@ -119,29 +109,15 @@ public class AccountingDataHarvesterPlugin extends Plugin<DataHarvestPluginDecla
|
|||
|
||||
};
|
||||
|
||||
public static ScopeDescriptor getScopeDescriptor() {
|
||||
return scopeDescriptor.get();
|
||||
}
|
||||
|
||||
public static final InheritableThreadLocal<ScopeDescriptor> scopeDescriptor = new InheritableThreadLocal<ScopeDescriptor>() {
|
||||
|
||||
@Override
|
||||
protected ScopeDescriptor initialValue() {
|
||||
return new ScopeDescriptor("","");
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
|
||||
|
||||
public Properties getConfigParameters() throws IOException {
|
||||
@JsonIgnore
|
||||
public static Properties getConfigParameters() throws IOException {
|
||||
Properties properties = new Properties();
|
||||
try {
|
||||
InputStream input = AccountingDataHarvesterPlugin.class.getClassLoader()
|
||||
InputStream input = AccountingDashboardHarvesterPlugin.class.getClassLoader()
|
||||
.getResourceAsStream(PROPERTY_FILENAME);
|
||||
properties.load(input);
|
||||
return properties;
|
||||
} catch(Exception e) {
|
||||
} catch (Exception e) {
|
||||
logger.warn(
|
||||
"Unable to load {} file containing configuration properties. AccountingDataHarvesterPlugin will use defaults",
|
||||
PROPERTY_FILENAME);
|
||||
|
@ -151,136 +127,198 @@ public class AccountingDataHarvesterPlugin extends Plugin<DataHarvestPluginDecla
|
|||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public void launch(Map<String,Object> inputs) throws Exception {
|
||||
public void launch(Map<String, Object> inputs) throws Exception {
|
||||
logger.debug("{} is starting", this.getClass().getSimpleName());
|
||||
|
||||
if(inputs == null || inputs.isEmpty()) {
|
||||
if (inputs == null || inputs.isEmpty()) {
|
||||
throw new IllegalArgumentException("The can only be launched providing valid input parameters");
|
||||
}
|
||||
|
||||
if(!inputs.containsKey(MEASURE_TYPE_INPUT_PARAMETER)) {
|
||||
if (!inputs.containsKey(MEASURE_TYPE_INPUT_PARAMETER)) {
|
||||
throw new IllegalArgumentException("Please set required parameter '" + MEASURE_TYPE_INPUT_PARAMETER + "'");
|
||||
}
|
||||
|
||||
AggregationType aggregationType = AggregationType.valueOf((String) inputs.get(MEASURE_TYPE_INPUT_PARAMETER));
|
||||
|
||||
boolean reRun = true;
|
||||
if(inputs.containsKey(RERUN_INPUT_PARAMETER)) {
|
||||
if (inputs.containsKey(RERUN_INPUT_PARAMETER)) {
|
||||
try {
|
||||
reRun = (boolean) inputs.get(RERUN_INPUT_PARAMETER);
|
||||
} catch(Exception e) {
|
||||
} catch (Exception e) {
|
||||
throw new IllegalArgumentException("'" + RERUN_INPUT_PARAMETER + "' must be a boolean");
|
||||
}
|
||||
}
|
||||
|
||||
boolean getVREUsers = true;
|
||||
if(inputs.containsKey(GET_VRE_USERS_INPUT_PARAMETER)) {
|
||||
if (inputs.containsKey(GET_VRE_USERS_INPUT_PARAMETER)) {
|
||||
try {
|
||||
reRun = (boolean) inputs.get(GET_VRE_USERS_INPUT_PARAMETER);
|
||||
} catch(Exception e) {
|
||||
getVREUsers = (boolean) inputs.get(GET_VRE_USERS_INPUT_PARAMETER);
|
||||
} catch (Exception e) {
|
||||
throw new IllegalArgumentException("'" + GET_VRE_USERS_INPUT_PARAMETER + "' must be a boolean");
|
||||
}
|
||||
}
|
||||
|
||||
boolean dryRun = true;
|
||||
if(inputs.containsKey(DRY_RUN_INPUT_PARAMETER)) {
|
||||
if (inputs.containsKey(DRY_RUN_INPUT_PARAMETER)) {
|
||||
try {
|
||||
dryRun = (boolean) inputs.get(DRY_RUN_INPUT_PARAMETER);
|
||||
} catch(Exception e) {
|
||||
} catch (Exception e) {
|
||||
throw new IllegalArgumentException("'" + DRY_RUN_INPUT_PARAMETER + "' must be a boolean");
|
||||
}
|
||||
}
|
||||
|
||||
boolean partialHarvesting = false;
|
||||
if(inputs.containsKey(PARTIAL_HARVESTING)) {
|
||||
if (inputs.containsKey(PARTIAL_HARVESTING)) {
|
||||
partialHarvesting = (boolean) inputs.get(PARTIAL_HARVESTING);
|
||||
}
|
||||
|
||||
if(inputs.containsKey(START_DATE_INPUT_PARAMETER)) {
|
||||
if (inputs.containsKey(START_DATE_INPUT_PARAMETER)) {
|
||||
String startDateString = (String) inputs.get(START_DATE_INPUT_PARAMETER);
|
||||
start = DateUtils.UTC_DATE_FORMAT.parse(startDateString + " " + DateUtils.UTC);
|
||||
} else {
|
||||
start = DateUtils.getPreviousPeriod(aggregationType, partialHarvesting).getTime();
|
||||
}
|
||||
|
||||
end = DateUtils.getEndDateFromStartDate(aggregationType, start, 1, partialHarvesting);
|
||||
end = DateUtils.getEndDateFromStartDate(aggregationType, start, 1);
|
||||
|
||||
logger.debug("Harvesting from {} to {} (ReRun:{} - GetVREUsers:{} - DryRun:{})",
|
||||
DateUtils.format(start), DateUtils.format(end), reRun, getVREUsers, dryRun);
|
||||
|
||||
Properties properties = getConfigParameters();
|
||||
getProperties().set(properties);
|
||||
logger.debug("Harvesting from {} to {} (ReRun:{} - GetVREUsers:{} - DryRun:{})", DateUtils.format(start),
|
||||
DateUtils.format(end), reRun, getVREUsers, dryRun);
|
||||
|
||||
ContextAuthorization contextAuthorization = new ContextAuthorization();
|
||||
SortedSet<String> contexts = contextAuthorization.getContexts();
|
||||
String root = contexts.first();
|
||||
Utils.setContext(contextAuthorization.getSecretForContext(root));
|
||||
|
||||
// DatabaseManager dbaseManager = new DatabaseManager();
|
||||
AccountingDao dao = AccountingDao.get();
|
||||
|
||||
Set<ScopeDescriptor> scopeDescriptorSet = dao.getContexts();
|
||||
Map<String,ScopeDescriptor> scopeDescriptorMap = new HashMap<>();
|
||||
for(ScopeDescriptor scopeDescriptor : scopeDescriptorSet) {
|
||||
Map<String, ScopeDescriptor> scopeDescriptorMap = new HashMap<>();
|
||||
for (ScopeDescriptor scopeDescriptor : scopeDescriptorSet) {
|
||||
scopeDescriptorMap.put(scopeDescriptor.getId(), scopeDescriptor);
|
||||
}
|
||||
scopeDescriptors.set(scopeDescriptorMap);
|
||||
|
||||
|
||||
Set<Dimension> dimensionSet = dao.getDimensions();
|
||||
Map<String,Dimension> dimensionMap = new HashMap<>();
|
||||
for(Dimension dimension : dimensionSet) {
|
||||
Map<String, Dimension> dimensionMap = new HashMap<>();
|
||||
for (Dimension dimension : dimensionSet) {
|
||||
dimensionMap.put(dimension.getId(), dimension);
|
||||
}
|
||||
|
||||
dimensions.set(dimensionMap);
|
||||
|
||||
|
||||
SortedSet<String> contexts = contextAuthorization.getContexts();
|
||||
|
||||
// ArrayList<HarvestedData> data = new ArrayList<HarvestedData>();
|
||||
ArrayList<AccountingRecord> accountingRecords = new ArrayList<AccountingRecord>();
|
||||
|
||||
String initialToken = SecurityTokenProvider.instance.get();
|
||||
|
||||
VREAccessesHarvester vreAccessesHarvester = null;
|
||||
JupyterAccessesHarvester jupyterAccessesHarvester = null;
|
||||
RStudioAccessesHarvester rstudioAccessesHarvester = null;
|
||||
|
||||
for(String context : contexts) {
|
||||
Secret rootSecret = null;
|
||||
|
||||
for (String context : contexts) {
|
||||
// Setting the token for the context
|
||||
Utils.setContext(contextAuthorization.getTokenForContext(context));
|
||||
Secret secret = contextAuthorization.getSecretForContext(context);
|
||||
Utils.setContext(secret);
|
||||
|
||||
ScopeBean scopeBean = new ScopeBean(context);
|
||||
|
||||
ScopeDescriptor actualScopeDescriptor = scopeDescriptorMap.get(context);
|
||||
if(actualScopeDescriptor==null) {
|
||||
if (actualScopeDescriptor == null) {
|
||||
actualScopeDescriptor = new ScopeDescriptor(scopeBean.name(), context);
|
||||
scopeDescriptorMap.put(actualScopeDescriptor.getId(), actualScopeDescriptor);
|
||||
}
|
||||
|
||||
scopeDescriptor.set(actualScopeDescriptor);
|
||||
if (scopeBean.is(Type.INFRASTRUCTURE)) {
|
||||
try {
|
||||
|
||||
rootSecret = secret;
|
||||
|
||||
if(vreAccessesHarvester == null) {
|
||||
CatalogueAccessesHarvester catalogueHarvester = new CatalogueAccessesHarvester(start, end);
|
||||
List<AccountingRecord> harvested = catalogueHarvester.getAccountingRecords();
|
||||
accountingRecords.addAll(harvested);
|
||||
|
||||
if(scopeBean.is(Type.INFRASTRUCTURE)) {
|
||||
CoreServicesAccessesHarvester coreServicesHarvester = new CoreServicesAccessesHarvester(start, end);
|
||||
List<AccountingRecord> records = coreServicesHarvester.getAccountingRecords();
|
||||
accountingRecords.addAll(records);
|
||||
|
||||
} catch (Exception e) {
|
||||
logger.error("Error harvesting {} for {}", CatalogueAccessesHarvester.class.getSimpleName(),
|
||||
context, e);
|
||||
}
|
||||
}
|
||||
|
||||
if (vreAccessesHarvester == null) {
|
||||
|
||||
if (scopeBean.is(Type.INFRASTRUCTURE)) {
|
||||
vreAccessesHarvester = new VREAccessesHarvester(start, end);
|
||||
} else {
|
||||
// This code should be never used because the scopes are sorted by fullname
|
||||
// This code should be never used because the scopes are
|
||||
// sorted by fullname
|
||||
|
||||
ScopeBean parent = scopeBean.enclosingScope();
|
||||
while(!parent.is(Type.INFRASTRUCTURE)) {
|
||||
while (!parent.is(Type.INFRASTRUCTURE)) {
|
||||
parent = scopeBean.enclosingScope();
|
||||
}
|
||||
|
||||
// Setting back token for the context
|
||||
Utils.setContext(contextAuthorization.getTokenForContext(parent.toString()));
|
||||
Utils.setContext(contextAuthorization.getSecretForContext(parent.toString()));
|
||||
|
||||
vreAccessesHarvester = new VREAccessesHarvester(start, end);
|
||||
|
||||
// Setting back token for the context
|
||||
Utils.setContext(contextAuthorization.getTokenForContext(context));
|
||||
Utils.setContext(contextAuthorization.getSecretForContext(context));
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
if (rstudioAccessesHarvester == null) {
|
||||
|
||||
if((context.startsWith(SO_BIG_DATA_VO) || context.startsWith(SO_BIG_DATA_EU_VRE)
|
||||
if (scopeBean.is(Type.INFRASTRUCTURE)) {
|
||||
rstudioAccessesHarvester = new RStudioAccessesHarvester(start, end);
|
||||
} else {
|
||||
// This code should be never used because the scopes are
|
||||
// sorted by fullname
|
||||
|
||||
ScopeBean parent = scopeBean.enclosingScope();
|
||||
while (!parent.is(Type.INFRASTRUCTURE)) {
|
||||
parent = scopeBean.enclosingScope();
|
||||
}
|
||||
|
||||
// Setting back token for the context
|
||||
Utils.setContext(contextAuthorization.getSecretForContext(parent.toString()));
|
||||
|
||||
rstudioAccessesHarvester = new RStudioAccessesHarvester(start, end);
|
||||
|
||||
// Setting back token for the context
|
||||
Utils.setContext(contextAuthorization.getSecretForContext(context));
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
if (jupyterAccessesHarvester == null) {
|
||||
|
||||
if (scopeBean.is(Type.INFRASTRUCTURE)) {
|
||||
jupyterAccessesHarvester = new JupyterAccessesHarvester(start, end);
|
||||
} else {
|
||||
// This code should be never used because the scopes are
|
||||
// sorted by fullname
|
||||
|
||||
ScopeBean parent = scopeBean.enclosingScope();
|
||||
while (!parent.is(Type.INFRASTRUCTURE)) {
|
||||
parent = scopeBean.enclosingScope();
|
||||
}
|
||||
|
||||
// Setting back token for the context
|
||||
Utils.setContext(contextAuthorization.getSecretForContext(parent.toString()));
|
||||
|
||||
jupyterAccessesHarvester = new JupyterAccessesHarvester(start, end);
|
||||
|
||||
// Setting back token for the context
|
||||
Utils.setContext(contextAuthorization.getSecretForContext(context));
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
if ((context.startsWith(SO_BIG_DATA_VO) || context.startsWith(SO_BIG_DATA_EU_VRE)
|
||||
|| context.startsWith(SO_BIG_DATA_IT_VRE))
|
||||
&& start.before(DateUtils.getStartCalendar(2018, Calendar.APRIL, 1).getTime())) {
|
||||
logger.info("Not Harvesting for {} from {} to {}", context, DateUtils.format(start),
|
||||
|
@ -295,13 +333,36 @@ public class AccountingDataHarvesterPlugin extends Plugin<DataHarvestPluginDecla
|
|||
accountingRecords.addAll(harvested);
|
||||
|
||||
/*
|
||||
List<HarvestedData> harvested = vreAccessesHarvester.getData();
|
||||
data.addAll(harvested);
|
||||
* List<HarvestedData> harvested =
|
||||
* vreAccessesHarvester.getData(); data.addAll(harvested);
|
||||
*/
|
||||
} catch(Exception e) {
|
||||
} catch (Exception e) {
|
||||
logger.error("Error harvesting VRE Accesses for {}", context, e);
|
||||
}
|
||||
|
||||
try {
|
||||
// Collecting Google Analytics Data for R Studio Accesses
|
||||
logger.info("Going to harvest R Studio Accesses for {}", context);
|
||||
|
||||
List<AccountingRecord> harvested = rstudioAccessesHarvester.getAccountingRecords();
|
||||
accountingRecords.addAll(harvested);
|
||||
|
||||
} catch (Exception e) {
|
||||
logger.error("Error harvesting R Studio Accesses for {}", context, e);
|
||||
}
|
||||
|
||||
|
||||
try {
|
||||
// Collecting Google Analytics Data for Jupyters Accesses
|
||||
logger.info("Going to harvest Jupyter Accesses for {}", context);
|
||||
|
||||
List<AccountingRecord> harvested = jupyterAccessesHarvester.getAccountingRecords();
|
||||
accountingRecords.addAll(harvested);
|
||||
|
||||
} catch (Exception e) {
|
||||
logger.error("Error harvesting Jupyeter Accesses for {}", context, e);
|
||||
}
|
||||
|
||||
try {
|
||||
// Collecting info on social (posts, replies and likes)
|
||||
logger.info("Going to harvest Social Interactions for {}", context);
|
||||
|
@ -310,20 +371,18 @@ public class AccountingDataHarvesterPlugin extends Plugin<DataHarvestPluginDecla
|
|||
List<AccountingRecord> harvested = socialHarvester.getAccountingRecords();
|
||||
accountingRecords.addAll(harvested);
|
||||
|
||||
/*
|
||||
List<HarvestedData> harvested = socialHarvester.getData();
|
||||
data.addAll(harvested);
|
||||
*/
|
||||
} catch(Exception e) {
|
||||
} catch (Exception e) {
|
||||
logger.error("Error harvesting Social Interactions for {}", context, e);
|
||||
}
|
||||
|
||||
try {
|
||||
// Collecting info on VRE users
|
||||
if(getVREUsers) {
|
||||
// Harvesting Users only for VREs (not for VO and ROOT which is the sum of the children contexts)
|
||||
if (getVREUsers) {
|
||||
// Harvesting Users only for VREs (not for VO and ROOT
|
||||
// which is the sum of the children contexts)
|
||||
// The VREUsers can be only Harvested for the last month
|
||||
if(scopeBean.is(Type.VRE) && start.equals(DateUtils.getPreviousPeriod(aggregationType, partialHarvesting).getTime())) {
|
||||
if (scopeBean.is(Type.VRE) && start
|
||||
.equals(DateUtils.getPreviousPeriod(aggregationType, partialHarvesting).getTime())) {
|
||||
logger.info("Going to harvest Context Users for {}", context);
|
||||
VREUsersHarvester vreUsersHarvester = new VREUsersHarvester(start, end);
|
||||
|
||||
|
@ -331,56 +390,54 @@ public class AccountingDataHarvesterPlugin extends Plugin<DataHarvestPluginDecla
|
|||
accountingRecords.addAll(harvested);
|
||||
|
||||
/*
|
||||
List<HarvestedData> harvested = vreUsersHarvester.getData();
|
||||
data.addAll(harvested);
|
||||
* List<HarvestedData> harvested =
|
||||
* vreUsersHarvester.getData();
|
||||
* data.addAll(harvested);
|
||||
*/
|
||||
}
|
||||
}
|
||||
} catch(Exception e) {
|
||||
} catch (Exception e) {
|
||||
logger.error("Error harvesting Context Users for {}", context, e);
|
||||
}
|
||||
|
||||
if(context.startsWith(SO_BIG_DATA_CATALOGUE_CONTEXT)) {
|
||||
if (context.startsWith(SO_BIG_DATA_CATALOGUE_CONTEXT)) {
|
||||
|
||||
try {
|
||||
// Collecting info on Resource Catalogue (Dataset, Application, Deliverables, Methods)
|
||||
// Collecting info on Resource Catalogue (Dataset,
|
||||
// Application, Deliverables, Methods)
|
||||
logger.info("Going to harvest Resource Catalogue Information for {}", context);
|
||||
ResourceCatalogueHarvester resourceCatalogueHarvester = new ResourceCatalogueHarvester(start, end,
|
||||
contexts);
|
||||
ResourceCatalogueHarvester resourceCatalogueHarvester = new ResourceCatalogueHarvester(start,
|
||||
end, contexts);
|
||||
|
||||
List<AccountingRecord> harvested = resourceCatalogueHarvester.getAccountingRecords();
|
||||
accountingRecords.addAll(harvested);
|
||||
|
||||
/*
|
||||
List<HarvestedData> harvested = resourceCatalogueHarvester.getData();
|
||||
data.addAll(harvested);
|
||||
* List<HarvestedData> harvested =
|
||||
* resourceCatalogueHarvester.getData();
|
||||
* data.addAll(harvested);
|
||||
*/
|
||||
|
||||
} catch(Exception e) {
|
||||
} catch (Exception e) {
|
||||
logger.error("Error harvesting Resource Catalogue Information for {}", context, e);
|
||||
}
|
||||
|
||||
try {
|
||||
// Collecting info on Data/Method download
|
||||
logger.info("Going to harvest Data Method Download for {}", context);
|
||||
DataMethodDownloadHarvester dataMethodDownloadHarvester = new DataMethodDownloadHarvester(start,
|
||||
end, contexts);
|
||||
|
||||
List<AccountingRecord> harvested = dataMethodDownloadHarvester.getAccountingRecords();
|
||||
accountingRecords.addAll(harvested);
|
||||
|
||||
/*
|
||||
List<HarvestedData> harvested = dataMethodDownloadHarvester.getData();
|
||||
data.addAll(harvested);
|
||||
*/
|
||||
|
||||
} catch(Exception e) {
|
||||
logger.error("Error harvesting Data Method Download for {}", context, e);
|
||||
}
|
||||
// try {
|
||||
// // Collecting info on Data/Method download
|
||||
// logger.info("Going to harvest Data Method Download for {}", context);
|
||||
// DataMethodDownloadHarvester dataMethodDownloadHarvester = new DataMethodDownloadHarvester(start,
|
||||
// end, contexts);
|
||||
//
|
||||
// List<AccountingRecord> harvested = dataMethodDownloadHarvester.getAccountingRecords();
|
||||
// accountingRecords.addAll(harvested);
|
||||
//
|
||||
// } catch (Exception e) {
|
||||
// logger.error("Error harvesting Data Method Download for {}", context, e);
|
||||
// }
|
||||
|
||||
}
|
||||
|
||||
if(context.startsWith(TAGME_CONTEXT)) {
|
||||
if (context.startsWith(TAGME_CONTEXT)) {
|
||||
try {
|
||||
// Collecting info on method invocation
|
||||
logger.info("Going to harvest Method Invocations for {}", context);
|
||||
|
@ -391,11 +448,12 @@ public class AccountingDataHarvesterPlugin extends Plugin<DataHarvestPluginDecla
|
|||
accountingRecords.addAll(harvested);
|
||||
|
||||
/*
|
||||
List<HarvestedData> harvested = tagMeMethodInvocationHarvester.getData();
|
||||
data.addAll(harvested);
|
||||
* List<HarvestedData> harvested =
|
||||
* tagMeMethodInvocationHarvester.getData();
|
||||
* data.addAll(harvested);
|
||||
*/
|
||||
|
||||
} catch(Exception e) {
|
||||
} catch (Exception e) {
|
||||
logger.error("Error harvesting Method Invocations for {}", context, e);
|
||||
}
|
||||
} else {
|
||||
|
@ -404,28 +462,28 @@ public class AccountingDataHarvesterPlugin extends Plugin<DataHarvestPluginDecla
|
|||
logger.info("Going to harvest Method Invocations for {}", context);
|
||||
MethodInvocationHarvester methodInvocationHarvester = new MethodInvocationHarvester(start, end);
|
||||
|
||||
|
||||
List<AccountingRecord> harvested = methodInvocationHarvester.getAccountingRecords();
|
||||
accountingRecords.addAll(harvested);
|
||||
|
||||
/*
|
||||
List<HarvestedData> harvested = methodInvocationHarvester.getData();
|
||||
data.addAll(harvested);
|
||||
* List<HarvestedData> harvested =
|
||||
* methodInvocationHarvester.getData();
|
||||
* data.addAll(harvested);
|
||||
*/
|
||||
} catch(Exception e) {
|
||||
} catch (Exception e) {
|
||||
logger.error("Error harvesting Method Invocations for {}", context, e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Utils.setContext(initialToken);
|
||||
Utils.setContext(rootSecret);
|
||||
|
||||
logger.debug("Harvest Measures from {} to {} are {}", DateUtils.format(start), DateUtils.format(end), accountingRecords);
|
||||
if(!dryRun) {
|
||||
logger.debug("Harvest Measures from {} to {} are {}", DateUtils.format(start), DateUtils.format(end),
|
||||
accountingRecords);
|
||||
if (!dryRun) {
|
||||
dao.insertRecords(accountingRecords.toArray(new AccountingRecord[1]));
|
||||
//dbaseManager.insertMonthlyData(start, end, data, reRun);
|
||||
}else {
|
||||
} else {
|
||||
logger.debug("Harvested measures are {}", accountingRecords);
|
||||
}
|
||||
|
|
@ -1,75 +0,0 @@
|
|||
package org.gcube.dataharvest;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import org.gcube.vremanagement.executor.plugin.Plugin;
|
||||
import org.gcube.vremanagement.executor.plugin.PluginDeclaration;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
* @author Eric Perrone (ISTI - CNR)
|
||||
* @author Luca Frosini (ISTI - CNR)
|
||||
*/
|
||||
public class DataHarvestPluginDeclaration implements PluginDeclaration {
|
||||
|
||||
private static Logger logger = LoggerFactory.getLogger(DataHarvestPluginDeclaration.class);
|
||||
|
||||
public static final String NAME = "AccountingDataHarvester";
|
||||
public static final String DESCRIPTION = "Data Harvest for Accounting Summary Dashboard";
|
||||
public static final String VERSION = "1.0.0";
|
||||
|
||||
/**{@inheritDoc}*/
|
||||
@Override
|
||||
public void init() {
|
||||
logger.debug(String.format("%s initialized", AccountingDataHarvesterPlugin.class.getSimpleName()));
|
||||
}
|
||||
|
||||
/**{@inheritDoc}*/
|
||||
@Override
|
||||
public String getName() {
|
||||
return NAME;
|
||||
}
|
||||
|
||||
/**{@inheritDoc}*/
|
||||
@Override
|
||||
public String getDescription() {
|
||||
return DESCRIPTION;
|
||||
}
|
||||
|
||||
/**{@inheritDoc}*/
|
||||
@Override
|
||||
public String getVersion() {
|
||||
return VERSION;
|
||||
}
|
||||
|
||||
/**{@inheritDoc}*/
|
||||
@Override
|
||||
public Map<String, String> getSupportedCapabilities() {
|
||||
Map<String, String> discoveredCapabilities = new HashMap<String, String>();
|
||||
discoveredCapabilities.put("FakeKey", "FakeValue");
|
||||
return discoveredCapabilities;
|
||||
}
|
||||
|
||||
/**{@inheritDoc}*/
|
||||
@Override
|
||||
public Class<? extends Plugin<? extends PluginDeclaration>> getPluginImplementation() {
|
||||
return AccountingDataHarvesterPlugin.class;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString(){
|
||||
return String.format("{"
|
||||
+ "name:%s,"
|
||||
+ "version:%s,"
|
||||
+ "description:%s,"
|
||||
+ "pluginImplementation:%s,"
|
||||
+ "}",
|
||||
getName(),
|
||||
getVersion(),
|
||||
getDescription(),
|
||||
getPluginImplementation().getClass().getSimpleName());
|
||||
}
|
||||
|
||||
}
|
|
@ -59,12 +59,12 @@ public class AnalyticsReportCredentials {
|
|||
/**
|
||||
* Please note:
|
||||
* The key is stored in the resource with blanks " " instead of "\n" as it causes issues and
|
||||
* without the BEGIN and END Delimiters (e.g. -----END PRIVATE KEY-----) which myst be readded
|
||||
* without the BEGIN and END Delimiters (e.g. -----END PRIVATE KEY-----) which must be readded
|
||||
* @param privateKeyPem
|
||||
*/
|
||||
public void setPrivateKeyPem(String privateKeyPem) {
|
||||
privateKeyPem = privateKeyPem.replace(" ", "\n");
|
||||
this.privateKeyPem = "-----BEGIN PRIVATE KEY-----\n"+privateKeyPem+"\n-----END PRIVATE KEY-----";
|
||||
this.privateKeyPem = privateKeyPem.replace(" ", "\n");
|
||||
this.privateKeyPem = "-----BEGIN PRIVATE KEY-----\n"+this.privateKeyPem+"\n-----END PRIVATE KEY-----\n";
|
||||
}
|
||||
|
||||
public String getPrivateKeyId() {
|
||||
|
|
|
@ -0,0 +1,44 @@
|
|||
package org.gcube.dataharvest.datamodel;
|
||||
|
||||
public class CatalogueAccessesReportRow {
|
||||
private String dashboardContext;
|
||||
private HarvestedDataKey key;
|
||||
private String pagePath;
|
||||
private int visitNumber;
|
||||
|
||||
public CatalogueAccessesReportRow() {
|
||||
// TODO Auto-generated constructor stub
|
||||
}
|
||||
public HarvestedDataKey getKey() {
|
||||
return key;
|
||||
}
|
||||
public void setKey(HarvestedDataKey key) {
|
||||
this.key = key;
|
||||
}
|
||||
public String getPagePath() {
|
||||
return pagePath;
|
||||
}
|
||||
public void setPagePath(String pagePath) {
|
||||
this.pagePath = pagePath;
|
||||
}
|
||||
public int getVisitNumber() {
|
||||
return visitNumber;
|
||||
}
|
||||
public void setVisitNumber(int visitNumber) {
|
||||
this.visitNumber = visitNumber;
|
||||
}
|
||||
public String getDashboardContext() {
|
||||
return dashboardContext;
|
||||
}
|
||||
public void setDashboardContext(String dashboardContext) {
|
||||
this.dashboardContext = dashboardContext;
|
||||
}
|
||||
@Override
|
||||
public String toString() {
|
||||
return "CatalogueAccessesReportRow [dashboardContext=" + dashboardContext + ", key=" + key + ", pagePath="
|
||||
+ pagePath + ", visitNumber=" + visitNumber + "]";
|
||||
}
|
||||
|
||||
|
||||
|
||||
}
|
|
@ -0,0 +1,44 @@
|
|||
package org.gcube.dataharvest.datamodel;
|
||||
|
||||
public class CoreServiceAccessesReportRow {
|
||||
private String dashboardContext;
|
||||
private HarvestedDataKey key;
|
||||
private String pagePath;
|
||||
private int visitNumber;
|
||||
|
||||
public CoreServiceAccessesReportRow() {
|
||||
// TODO Auto-generated constructor stub
|
||||
}
|
||||
public HarvestedDataKey getKey() {
|
||||
return key;
|
||||
}
|
||||
public void setKey(HarvestedDataKey key) {
|
||||
this.key = key;
|
||||
}
|
||||
public String getPagePath() {
|
||||
return pagePath;
|
||||
}
|
||||
public void setPagePath(String pagePath) {
|
||||
this.pagePath = pagePath;
|
||||
}
|
||||
public int getVisitNumber() {
|
||||
return visitNumber;
|
||||
}
|
||||
public void setVisitNumber(int visitNumber) {
|
||||
this.visitNumber = visitNumber;
|
||||
}
|
||||
public String getDashboardContext() {
|
||||
return dashboardContext;
|
||||
}
|
||||
public void setDashboardContext(String dashboardContext) {
|
||||
this.dashboardContext = dashboardContext;
|
||||
}
|
||||
@Override
|
||||
public String toString() {
|
||||
return "CoreServiceAccessesReportRow [dashboardContext=" + dashboardContext + ", key=" + key + ", pagePath="
|
||||
+ pagePath + ", visitNumber=" + visitNumber + "]";
|
||||
}
|
||||
|
||||
|
||||
|
||||
}
|
|
@ -5,24 +5,29 @@ package org.gcube.dataharvest.datamodel;
|
|||
|
||||
|
||||
/**
|
||||
*
|
||||
* @author Francesco Mangiacrapa francesco.mangiacrapa@isti.cnr.it
|
||||
* May 22, 2018
|
||||
* @author M. Assante, ISTI-CNR
|
||||
*/
|
||||
public enum HarvestedDataKey {
|
||||
|
||||
WORKSPACE_ACCESSES("Workspace Accesses"),
|
||||
MESSAGES_ACCESSES("Messages Accesses"),
|
||||
NOTIFICATIONS_ACCESSES("Notifications Accesses"),
|
||||
PROFILE_ACCESSES("Profile Accesses"),
|
||||
JUPYTER_ACCESSES("Jupyter Accesses"),
|
||||
RSTUDIO_ACCESSES("R Studio Accesses"),
|
||||
|
||||
CATALOGUE_ACCESSES("Catalogue Accesses"),
|
||||
CATALOGUE_DATASET_LIST_ACCESSES("Item List"),
|
||||
CATALOGUE_DATASET_ACCESSES("Item Metadata"),
|
||||
CATALOGUE_RESOURCE_ACCESSES("Item Resource"),
|
||||
ACCESSES("VRE Accesses"),
|
||||
USERS("VRE Users"),
|
||||
DATA_METHOD_DOWNLOAD("Data/Method download"),
|
||||
NEW_CATALOGUE_METHODS("New Catalogue Methods"),
|
||||
NEW_CATALOGUE_DATASETS("New Catalogue Datasets"),
|
||||
NEW_CATALOGUE_DELIVERABLES("New Catalogue Deliverables"),
|
||||
NEW_CATALOGUE_APPLICATIONS("New Catalogue Applications"),
|
||||
|
||||
SOCIAL_POSTS("VRE Social Interations Posts"),
|
||||
SOCIAL_REPLIES("VRE Social Interations Replies"),
|
||||
SOCIAL_LIKES("VRE Social Interations Likes"),
|
||||
METHOD_INVOCATIONS("VRE Methods Invocation"),
|
||||
VISUAL_TOOLS("VRE Visual Tools");
|
||||
METHOD_INVOCATIONS("VRE Methods Invocation");
|
||||
|
||||
private String key;
|
||||
|
||||
|
|
|
@ -12,7 +12,7 @@ import org.gcube.common.authorization.client.Constants;
|
|||
import org.gcube.common.authorization.library.AuthorizationEntry;
|
||||
import org.gcube.common.authorization.library.provider.SecurityTokenProvider;
|
||||
import org.gcube.common.scope.api.ScopeProvider;
|
||||
import org.gcube.dataharvest.AccountingDataHarvesterPlugin;
|
||||
import org.gcube.dataharvest.AccountingDashboardHarvesterPlugin;
|
||||
import org.gcube.dataharvest.datamodel.HarvestedDataKey;
|
||||
import org.gcube.dataharvest.utils.DateUtils;
|
||||
import org.slf4j.Logger;
|
||||
|
@ -37,27 +37,10 @@ public abstract class BasicHarvester {
|
|||
logger.debug("Creating {} for the period {} {} ", this.getClass().getSimpleName(), DateUtils.format(start), DateUtils.format(end));
|
||||
}
|
||||
|
||||
public static String getCurrentContext(String token) throws Exception {
|
||||
AuthorizationEntry authorizationEntry = Constants.authorizationService().get(token);
|
||||
String context = authorizationEntry.getContext();
|
||||
logger.info("Context of token {} is {}", token, context);
|
||||
return context;
|
||||
}
|
||||
|
||||
public static void setContext(String token) throws Exception {
|
||||
SecurityTokenProvider.instance.set(token);
|
||||
ScopeProvider.instance.set(getCurrentContext(token));
|
||||
}
|
||||
|
||||
public static String getCurrentContext() throws Exception {
|
||||
String token = SecurityTokenProvider.instance.get();
|
||||
return getCurrentContext(token);
|
||||
}
|
||||
|
||||
public abstract List<AccountingRecord> getAccountingRecords() throws Exception;
|
||||
|
||||
public Dimension getDimension(HarvestedDataKey harvestedDataKey) {
|
||||
return AccountingDataHarvesterPlugin.getDimension(harvestedDataKey.getKey());
|
||||
return AccountingDashboardHarvesterPlugin.getDimension(harvestedDataKey.getKey());
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -0,0 +1,395 @@
|
|||
package org.gcube.dataharvest.harvester;
|
||||
|
||||
import static org.gcube.resources.discovery.icclient.ICFactory.clientFor;
|
||||
import static org.gcube.resources.discovery.icclient.ICFactory.queryFor;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.StringReader;
|
||||
import java.security.GeneralSecurityException;
|
||||
import java.time.Instant;
|
||||
import java.time.LocalDate;
|
||||
import java.time.ZoneId;
|
||||
import java.time.format.DateTimeFormatter;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Date;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
|
||||
import javax.xml.parsers.DocumentBuilder;
|
||||
import javax.xml.parsers.DocumentBuilderFactory;
|
||||
|
||||
import org.gcube.accounting.accounting.summary.access.model.ScopeDescriptor;
|
||||
import org.gcube.accounting.accounting.summary.access.model.update.AccountingRecord;
|
||||
import org.gcube.common.authorization.client.exceptions.ObjectNotFound;
|
||||
import org.gcube.common.encryption.encrypter.StringEncrypter;
|
||||
import org.gcube.common.resources.gcore.GenericResource;
|
||||
import org.gcube.common.resources.gcore.ServiceEndpoint;
|
||||
import org.gcube.common.resources.gcore.ServiceEndpoint.AccessPoint;
|
||||
import org.gcube.common.resources.gcore.ServiceEndpoint.Property;
|
||||
import org.gcube.common.resources.gcore.utils.Group;
|
||||
import org.gcube.common.resources.gcore.utils.XPathHelper;
|
||||
import org.gcube.common.scope.api.ScopeProvider;
|
||||
import org.gcube.common.scope.impl.ScopeBean;
|
||||
import org.gcube.dataharvest.datamodel.AnalyticsReportCredentials;
|
||||
import org.gcube.dataharvest.datamodel.CatalogueAccessesReportRow;
|
||||
import org.gcube.dataharvest.datamodel.HarvestedDataKey;
|
||||
import org.gcube.resources.discovery.client.api.DiscoveryClient;
|
||||
import org.gcube.resources.discovery.client.queries.api.SimpleQuery;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.w3c.dom.Node;
|
||||
import org.xml.sax.InputSource;
|
||||
|
||||
import com.google.analytics.data.v1beta.BetaAnalyticsDataClient;
|
||||
import com.google.analytics.data.v1beta.BetaAnalyticsDataSettings;
|
||||
import com.google.analytics.data.v1beta.DateRange;
|
||||
import com.google.analytics.data.v1beta.DateRange.Builder;
|
||||
import com.google.analytics.data.v1beta.Dimension;
|
||||
import com.google.analytics.data.v1beta.Metric;
|
||||
import com.google.analytics.data.v1beta.Row;
|
||||
import com.google.analytics.data.v1beta.RunReportRequest;
|
||||
import com.google.analytics.data.v1beta.RunReportResponse;
|
||||
import com.google.api.client.json.JsonFactory;
|
||||
import com.google.api.client.json.gson.GsonFactory;
|
||||
import com.google.api.gax.core.FixedCredentialsProvider;
|
||||
import com.google.auth.oauth2.ServiceAccountCredentials;
|
||||
|
||||
|
||||
public class CatalogueAccessesHarvester extends BasicHarvester {
|
||||
|
||||
private static Logger logger = LoggerFactory.getLogger(CatalogueAccessesHarvester.class);
|
||||
|
||||
private static final JsonFactory JSON_FACTORY = GsonFactory.getDefaultInstance();
|
||||
|
||||
|
||||
private static final String MAPPING_RESOURCE_CATEGORY = "BigGAnalyticsMapping";
|
||||
private static final String SERVICE_ENDPOINT_CATEGORY = "OnlineService";
|
||||
private static final String SERVICE_ENDPOINT_NAME = "GA4AnalyticsDataService";
|
||||
private static final String AP_CATALOGUE_PAGEVIEWS_PROPERTY = "catalogue-pageviews";
|
||||
private static final String AP_CLIENT_PROPERTY = "client_id";
|
||||
private static final String AP_PRIVATEKEY_PROPERTY = "private_key_id";
|
||||
|
||||
private static final String REGEX_CATALOGUE_ACCESSES = "^\\/$";
|
||||
private static final String REGEX_CATALOGUE_DATASET_LIST_ACCESSES = "^\\/dataset(\\?([a-zA-Z0-9_.-]*.+))*";
|
||||
private static final String REGEX_CATALOGUE_DATASET_ACCESSES = "^\\/dataset\\/[a-zA-Z0-9_.-]+$";
|
||||
private static final String REGEX_CATALOGUE_RESOURCE_ACCESSES = "^\\/dataset\\/[a-zA-Z0-9_.-]+\\/resource\\/[a-zA-Z0-9_.-]+$";
|
||||
|
||||
private HashMap<String, List<CatalogueAccessesReportRow>> catalogueAccesses;
|
||||
|
||||
public CatalogueAccessesHarvester(Date start, Date end) throws Exception {
|
||||
super(start, end);
|
||||
catalogueAccesses = getAllAccesses(start, end);
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<AccountingRecord> getAccountingRecords() throws Exception {
|
||||
try {
|
||||
ArrayList<AccountingRecord> accountingRecords = new ArrayList<AccountingRecord>();
|
||||
for (String dashboardContext : catalogueAccesses.keySet()) {
|
||||
int catalogueTotalAccesses = 0;
|
||||
int catalogueDatasetListAccesses = 0;
|
||||
int catalogueDatasetAccesses = 0;
|
||||
int catalogueResourceAccesses = 0;
|
||||
logger.debug("Catalogue accesses for {} ", dashboardContext);
|
||||
for(CatalogueAccessesReportRow row : catalogueAccesses.get(dashboardContext)) {
|
||||
// String pagePath = row.getPagePath();
|
||||
switch (row.getKey()) {
|
||||
case CATALOGUE_ACCESSES:
|
||||
catalogueTotalAccesses += row.getVisitNumber();
|
||||
break;
|
||||
case CATALOGUE_DATASET_LIST_ACCESSES:
|
||||
catalogueDatasetListAccesses += row.getVisitNumber();
|
||||
break;
|
||||
case CATALOGUE_DATASET_ACCESSES:
|
||||
catalogueDatasetAccesses += row.getVisitNumber();
|
||||
break;
|
||||
case CATALOGUE_RESOURCE_ACCESSES:
|
||||
catalogueResourceAccesses += row.getVisitNumber();
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
ScopeDescriptor scopeDescriptor = new ScopeDescriptor();
|
||||
|
||||
try {
|
||||
ScopeBean scopeBean = new ScopeBean(dashboardContext);
|
||||
scopeDescriptor.setId(dashboardContext);
|
||||
scopeDescriptor.setName(scopeBean.name());
|
||||
|
||||
AccountingRecord ar1 = new AccountingRecord(scopeDescriptor, instant, getDimension(HarvestedDataKey.CATALOGUE_ACCESSES), (long) catalogueTotalAccesses);
|
||||
AccountingRecord ar2 = new AccountingRecord(scopeDescriptor, instant, getDimension(HarvestedDataKey.CATALOGUE_DATASET_LIST_ACCESSES), (long) catalogueDatasetListAccesses);
|
||||
AccountingRecord ar3 = new AccountingRecord(scopeDescriptor, instant, getDimension(HarvestedDataKey.CATALOGUE_DATASET_ACCESSES), (long) catalogueDatasetAccesses);
|
||||
AccountingRecord ar4 = new AccountingRecord(scopeDescriptor, instant, getDimension(HarvestedDataKey.CATALOGUE_RESOURCE_ACCESSES), (long) catalogueResourceAccesses);
|
||||
logger.debug("{} : {}", ar1.getDimension().getId(), ar1.getMeasure());
|
||||
accountingRecords.add(ar1);
|
||||
logger.debug("{} : {}", ar2.getDimension().getId(), ar2.getMeasure());
|
||||
accountingRecords.add(ar2);
|
||||
logger.debug("{} : {}", ar3.getDimension().getId(), ar3.getMeasure());
|
||||
accountingRecords.add(ar3);
|
||||
logger.debug("{} : {}", ar4.getDimension().getId(), ar4.getMeasure());
|
||||
accountingRecords.add(ar4);
|
||||
} catch (NullPointerException e) {
|
||||
logger.warn("I found no correspondance in the Genereric Resource for a PropertyId you should check this, type: BigGAnalyticsMapping name: AccountingDashboardMapping");
|
||||
e.printStackTrace();
|
||||
}
|
||||
|
||||
}
|
||||
logger.debug("Returning {} accountingRecords ", accountingRecords.size());
|
||||
return accountingRecords;
|
||||
|
||||
} catch(Exception e) {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
private static HashMap<String, List<CatalogueAccessesReportRow>> getAllAccesses(Date start, Date end) throws Exception {
|
||||
Builder dateRangeBuilder = getDateRangeBuilderForAnalytics(start, end);
|
||||
logger.debug("Getting Catalogue accesses in this time range {}", dateRangeBuilder.toString());
|
||||
|
||||
AnalyticsReportCredentials credentialsFromD4S = getAuthorisedApplicationInfoFromIs();
|
||||
|
||||
logger.debug("Getting credentials credentialsFromD4S");
|
||||
|
||||
BetaAnalyticsDataSettings serviceSettings = initializeAnalyticsReporting(credentialsFromD4S);
|
||||
|
||||
logger.debug("initializeAnalyticsReporting service settings");
|
||||
|
||||
|
||||
HashMap<String,List<RunReportResponse>> responses = getReportResponses(serviceSettings, credentialsFromD4S.getViewIds(), dateRangeBuilder);
|
||||
HashMap<String, List<CatalogueAccessesReportRow>> toReturn = new HashMap<>();
|
||||
|
||||
for(String view : responses.keySet()) {
|
||||
String dashboardContext = getAccountingDashboardContextGivenGAViewID(view);
|
||||
logger.info("\n\n**************** Parsing responses for this catalogue view, which corresponds to Dashboard Context: " + dashboardContext);
|
||||
List<CatalogueAccessesReportRow> viewReport = parseResponse(view, responses.get(view), dashboardContext);
|
||||
logger.trace("Got {} entries from view id={}", viewReport.size(), view);
|
||||
toReturn.put(dashboardContext, viewReport);
|
||||
}
|
||||
return toReturn;
|
||||
}
|
||||
|
||||
/**
|
||||
* Initializes an Google Analytics Data API service object.
|
||||
*
|
||||
* @return An authorized Google Analytics Data API
|
||||
* @throws IOException
|
||||
* @throws GeneralSecurityException
|
||||
*/
|
||||
private static BetaAnalyticsDataSettings initializeAnalyticsReporting(AnalyticsReportCredentials cred) throws IOException {
|
||||
return BetaAnalyticsDataSettings.newBuilder()
|
||||
.setCredentialsProvider(FixedCredentialsProvider.create(
|
||||
ServiceAccountCredentials.fromPkcs8(cred.getClientId(), cred.getClientEmail(), cred.getPrivateKeyPem(), cred.getPrivateKeyId(), null)))
|
||||
.build();
|
||||
}
|
||||
|
||||
/**
|
||||
* Queries Analytics Data API service
|
||||
*
|
||||
* @param service Analytics Data API service service settings.
|
||||
* @return Row Analytics Data API service
|
||||
* @throws IOException
|
||||
*/
|
||||
private static HashMap<String,List<RunReportResponse>> getReportResponses(BetaAnalyticsDataSettings betaAnalyticsDataSettings,
|
||||
List<String> viewIDs, Builder dateRangeBuilder) throws IOException {
|
||||
|
||||
HashMap<String,List<RunReportResponse>> reports = new HashMap<>();
|
||||
|
||||
try (BetaAnalyticsDataClient analyticsData = BetaAnalyticsDataClient.create(betaAnalyticsDataSettings)) {
|
||||
|
||||
for(String propertyId : viewIDs) {
|
||||
List<RunReportResponse> gReportResponses = new ArrayList<>();
|
||||
logger.debug("Getting data from Analytics Data API for propertyId: " + propertyId);
|
||||
RunReportRequest request =
|
||||
RunReportRequest.newBuilder()
|
||||
.setProperty("properties/" + propertyId)
|
||||
.addDimensions(Dimension.newBuilder().setName("pagePath"))
|
||||
.addMetrics(Metric.newBuilder().setName("screenPageViews"))
|
||||
.addDateRanges(dateRangeBuilder)
|
||||
.build();
|
||||
|
||||
// Make the request.
|
||||
RunReportResponse response = analyticsData.runReport(request);
|
||||
gReportResponses.add(response);
|
||||
// Iterate through every row of the API response.
|
||||
// for (Row row : response.getRowsList()) {
|
||||
// System.out.printf(
|
||||
// "%s, %s%n", row.getDimensionValues(0).getValue(), row.getMetricValues(0).getValue());
|
||||
// }
|
||||
reports.put(propertyId, gReportResponses);
|
||||
}
|
||||
}
|
||||
return reports;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses and prints the Analytics Data API service respose
|
||||
*
|
||||
* @param dashboardContext
|
||||
*/
|
||||
private static List<CatalogueAccessesReportRow> parseResponse(String viewId, List<RunReportResponse> responses, String dashboardContext) {
|
||||
logger.debug("parsing Response for " + viewId);
|
||||
|
||||
List<CatalogueAccessesReportRow> toReturn = new ArrayList<>();
|
||||
for (RunReportResponse response : responses) {
|
||||
for (Row row: response.getRowsList()) {
|
||||
String dimension = row.getDimensionValues(0).getValue();
|
||||
String metric = row.getMetricValues(0).getValue();
|
||||
CatalogueAccessesReportRow var = new CatalogueAccessesReportRow();
|
||||
boolean validEntry = false;
|
||||
String pagePath = dimension;
|
||||
logger.trace("parsing pagepath {}: value: {}", pagePath, Integer.parseInt(metric));
|
||||
if (pagePath.matches(REGEX_CATALOGUE_RESOURCE_ACCESSES)) {
|
||||
var.setKey(HarvestedDataKey.CATALOGUE_RESOURCE_ACCESSES);
|
||||
validEntry = true;
|
||||
}
|
||||
else if (pagePath.matches(REGEX_CATALOGUE_DATASET_ACCESSES)) {
|
||||
var.setKey(HarvestedDataKey.CATALOGUE_DATASET_ACCESSES);
|
||||
validEntry = true;
|
||||
}
|
||||
else if (pagePath.matches(REGEX_CATALOGUE_DATASET_LIST_ACCESSES)) {
|
||||
var.setKey(HarvestedDataKey.CATALOGUE_DATASET_LIST_ACCESSES);
|
||||
validEntry = true;
|
||||
}
|
||||
else if (pagePath.matches(REGEX_CATALOGUE_ACCESSES)) {
|
||||
var.setKey(HarvestedDataKey.CATALOGUE_ACCESSES);
|
||||
validEntry = true;
|
||||
}
|
||||
if (validEntry) {
|
||||
var.setDashboardContext(dashboardContext);
|
||||
var.setPagePath(dimension);
|
||||
var.setVisitNumber(Integer.parseInt(metric));
|
||||
toReturn.add(var);
|
||||
}
|
||||
}
|
||||
}
|
||||
return toReturn;
|
||||
}
|
||||
|
||||
|
||||
private static List<ServiceEndpoint> getAnalyticsReportingConfigurationFromIS(String infrastructureScope)
|
||||
throws Exception {
|
||||
String scope = infrastructureScope;
|
||||
String currScope = ScopeProvider.instance.get();
|
||||
ScopeProvider.instance.set(scope);
|
||||
SimpleQuery query = queryFor(ServiceEndpoint.class);
|
||||
query.addCondition("$resource/Profile/Category/text() eq '" + SERVICE_ENDPOINT_CATEGORY + "'");
|
||||
query.addCondition("$resource/Profile/Name/text() eq '" + SERVICE_ENDPOINT_NAME + "'");
|
||||
DiscoveryClient<ServiceEndpoint> client = clientFor(ServiceEndpoint.class);
|
||||
List<ServiceEndpoint> toReturn = client.submit(query);
|
||||
ScopeProvider.instance.set(currScope);
|
||||
return toReturn;
|
||||
}
|
||||
|
||||
/**
|
||||
* This method look up in the IS the Gateway which corresponds to a given Google Analytics viewId
|
||||
* @param viewID
|
||||
* @return the gateway name, e.g. "Blue-Cloud Gateway" or null if no correspondance was found
|
||||
* @throws Exception
|
||||
* @throws ObjectNotFound
|
||||
*/
|
||||
private static String getAccountingDashboardContextGivenGAViewID(String viewID) throws ObjectNotFound, Exception {
|
||||
String toReturn = null;
|
||||
String context = org.gcube.dataharvest.utils.Utils.getCurrentContext();
|
||||
String currScope = ScopeProvider.instance.get();
|
||||
ScopeProvider.instance.set(context);
|
||||
SimpleQuery query = queryFor(GenericResource.class);
|
||||
query.addCondition("$resource/Profile/SecondaryType/text() eq '" + MAPPING_RESOURCE_CATEGORY + "'");
|
||||
query.addCondition("$resource/Profile/Body/Property/viewID/text() eq '" + viewID + "'");
|
||||
DiscoveryClient<GenericResource> client = clientFor(GenericResource.class);
|
||||
List<GenericResource> list = client.submit(query);
|
||||
if(list.size() > 1) {
|
||||
logger.error("Too many Generic Resources having GA viewID " + viewID
|
||||
+ " in this scope having SecondaryType " + MAPPING_RESOURCE_CATEGORY);
|
||||
} else if(list.size() == 0) {
|
||||
logger.warn("There is no Generic Resources having GA viewID " + viewID + " and SecondaryType "
|
||||
+ MAPPING_RESOURCE_CATEGORY + " in this context: " + context);
|
||||
} else {
|
||||
GenericResource found = list.get(0);
|
||||
String elem = new StringBuilder("<body>").append(found.profile().bodyAsString()).append("</body>").toString();
|
||||
DocumentBuilder docBuilder = DocumentBuilderFactory.newInstance().newDocumentBuilder();
|
||||
Node node = docBuilder.parse(new InputSource(new StringReader(elem))).getDocumentElement();
|
||||
XPathHelper helper = new XPathHelper(node);
|
||||
List<String> currValue = helper.evaluate("//Property/viewID/text()");
|
||||
if (currValue != null && currValue.size() > 0) {
|
||||
List<String> contexts = currValue;
|
||||
for (int i = 0; i < contexts.size(); i++) {
|
||||
if (currValue.get(i).trim().compareTo(viewID) == 0) {
|
||||
toReturn = helper.evaluate("//Property/DashboardContext/text()").get(i);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
logger.debug("Found DashboardContext for viewId {} : {} ", viewID, toReturn);
|
||||
}
|
||||
ScopeProvider.instance.set(currScope);
|
||||
return toReturn;
|
||||
}
|
||||
|
||||
/**
|
||||
* @throws Exception
|
||||
*/
|
||||
private static AnalyticsReportCredentials getAuthorisedApplicationInfoFromIs() throws Exception {
|
||||
AnalyticsReportCredentials reportCredentials = new AnalyticsReportCredentials();
|
||||
|
||||
String context = org.gcube.dataharvest.utils.Utils.getCurrentContext();
|
||||
try {
|
||||
List<ServiceEndpoint> list = getAnalyticsReportingConfigurationFromIS(context);
|
||||
if(list.size() > 1) {
|
||||
logger.error("Too many Service Endpoints having name " + SERVICE_ENDPOINT_NAME
|
||||
+ " in this scope having Category " + SERVICE_ENDPOINT_CATEGORY);
|
||||
} else if(list.size() == 0) {
|
||||
logger.warn("There is no Service Endpoint having name " + SERVICE_ENDPOINT_NAME + " and Category "
|
||||
+ SERVICE_ENDPOINT_CATEGORY + " in this context: " + context);
|
||||
} else {
|
||||
|
||||
for(ServiceEndpoint res : list) {
|
||||
Group<AccessPoint> apGroup = res.profile().accessPoints();
|
||||
AccessPoint[] accessPoints = (AccessPoint[]) apGroup.toArray(new AccessPoint[apGroup.size()]);
|
||||
AccessPoint found = accessPoints[0];
|
||||
reportCredentials.setClientEmail(found.username());
|
||||
String decryptedPrivateKey = StringEncrypter.getEncrypter().decrypt(found.password());
|
||||
reportCredentials.setPrivateKeyPem(decryptedPrivateKey.trim());
|
||||
|
||||
for(Property prop : found.properties()) {
|
||||
if(prop.name().compareTo(AP_CATALOGUE_PAGEVIEWS_PROPERTY) == 0) {
|
||||
String decryptedValue = StringEncrypter.getEncrypter().decrypt(prop.value());
|
||||
String[] views = decryptedValue.split(";");
|
||||
reportCredentials.setViewIds(Arrays.asList(views));
|
||||
}
|
||||
if(prop.name().compareTo(AP_CLIENT_PROPERTY) == 0) {
|
||||
String decryptedValue = StringEncrypter.getEncrypter().decrypt(prop.value());
|
||||
reportCredentials.setClientId(decryptedValue);
|
||||
}
|
||||
if(prop.name().compareTo(AP_PRIVATEKEY_PROPERTY) == 0) {
|
||||
String decryptedValue = StringEncrypter.getEncrypter().decrypt(prop.value());
|
||||
reportCredentials.setPrivateKeyId(decryptedValue);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch(Exception e) {
|
||||
e.printStackTrace();
|
||||
return null;
|
||||
}
|
||||
return reportCredentials;
|
||||
}
|
||||
|
||||
private static LocalDate asLocalDate(Date date) {
|
||||
return Instant.ofEpochMilli(date.getTime()).atZone(ZoneId.systemDefault()).toLocalDate();
|
||||
}
|
||||
|
||||
private static Builder getDateRangeBuilderForAnalytics(Date start, Date end) {
|
||||
DateTimeFormatter formatter = DateTimeFormatter.ofPattern("yyyy-MM-dd"); //required by Analytics
|
||||
String startDate = asLocalDate(start).format(formatter);
|
||||
String endDate = asLocalDate(end).format(formatter);
|
||||
Builder dateRangeBuilder = DateRange.newBuilder().setStartDate(startDate).setEndDate(endDate);
|
||||
|
||||
return dateRangeBuilder;
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,397 @@
|
|||
package org.gcube.dataharvest.harvester;
|
||||
|
||||
import static org.gcube.resources.discovery.icclient.ICFactory.clientFor;
|
||||
import static org.gcube.resources.discovery.icclient.ICFactory.queryFor;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.StringReader;
|
||||
import java.security.GeneralSecurityException;
|
||||
import java.time.Instant;
|
||||
import java.time.LocalDate;
|
||||
import java.time.ZoneId;
|
||||
import java.time.format.DateTimeFormatter;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Date;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
|
||||
import javax.xml.parsers.DocumentBuilder;
|
||||
import javax.xml.parsers.DocumentBuilderFactory;
|
||||
|
||||
import org.gcube.accounting.accounting.summary.access.model.ScopeDescriptor;
|
||||
import org.gcube.accounting.accounting.summary.access.model.update.AccountingRecord;
|
||||
import org.gcube.common.authorization.client.exceptions.ObjectNotFound;
|
||||
import org.gcube.common.encryption.encrypter.StringEncrypter;
|
||||
import org.gcube.common.resources.gcore.GenericResource;
|
||||
import org.gcube.common.resources.gcore.ServiceEndpoint;
|
||||
import org.gcube.common.resources.gcore.ServiceEndpoint.AccessPoint;
|
||||
import org.gcube.common.resources.gcore.ServiceEndpoint.Property;
|
||||
import org.gcube.common.resources.gcore.utils.Group;
|
||||
import org.gcube.common.resources.gcore.utils.XPathHelper;
|
||||
import org.gcube.common.scope.api.ScopeProvider;
|
||||
import org.gcube.common.scope.impl.ScopeBean;
|
||||
import org.gcube.dataharvest.datamodel.AnalyticsReportCredentials;
|
||||
import org.gcube.dataharvest.datamodel.CoreServiceAccessesReportRow;
|
||||
import org.gcube.dataharvest.datamodel.HarvestedDataKey;
|
||||
import org.gcube.resources.discovery.client.api.DiscoveryClient;
|
||||
import org.gcube.resources.discovery.client.queries.api.SimpleQuery;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.w3c.dom.Node;
|
||||
import org.xml.sax.InputSource;
|
||||
|
||||
import com.google.analytics.data.v1beta.BetaAnalyticsDataClient;
|
||||
import com.google.analytics.data.v1beta.BetaAnalyticsDataSettings;
|
||||
import com.google.analytics.data.v1beta.DateRange;
|
||||
import com.google.analytics.data.v1beta.DateRange.Builder;
|
||||
import com.google.analytics.data.v1beta.Dimension;
|
||||
import com.google.analytics.data.v1beta.Metric;
|
||||
import com.google.analytics.data.v1beta.Row;
|
||||
import com.google.analytics.data.v1beta.RunReportRequest;
|
||||
import com.google.analytics.data.v1beta.RunReportResponse;
|
||||
import com.google.api.gax.core.FixedCredentialsProvider;
|
||||
import com.google.auth.oauth2.ServiceAccountCredentials;
|
||||
|
||||
/**
|
||||
* @author Massimiliano Assante (ISTI - CNR)
|
||||
*/
|
||||
public class CoreServicesAccessesHarvester extends BasicHarvester {
|
||||
|
||||
private static Logger logger = LoggerFactory.getLogger(CoreServicesAccessesHarvester.class);
|
||||
|
||||
private static final String MAPPING_RESOURCE_CATEGORY = "BigGAnalyticsMapping";
|
||||
private static final String SERVICE_ENDPOINT_CATEGORY = "OnlineService";
|
||||
private static final String SERVICE_ENDPOINT_NAME = "GA4AnalyticsDataService";
|
||||
private static final String AP_VIEWS_PROPERTY = "views";
|
||||
private static final String AP_CLIENT_ID = "client_id";
|
||||
private static final String AP_PRIVATEKEY_ID_PROPERTY = "private_key_id";
|
||||
|
||||
private static final String PAGE_WORKSPACE_ACCESSES = "/workspace";
|
||||
private static final String PAGE_MESSAGES_ACCESSES = "/messages";
|
||||
private static final String PAGE_PROFILE_ACCESSES = "/profile";
|
||||
private static final String PAGE_NOTIFICATION_ACCESSES = "/notifications";
|
||||
|
||||
private HashMap<String, List<CoreServiceAccessesReportRow>> coreServicesAccesses;
|
||||
|
||||
public CoreServicesAccessesHarvester(Date start, Date end) throws Exception {
|
||||
super(start, end);
|
||||
coreServicesAccesses = getAllAccesses(start, end);
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<AccountingRecord> getAccountingRecords() throws Exception {
|
||||
try {
|
||||
ArrayList<AccountingRecord> accountingRecords = new ArrayList<AccountingRecord>();
|
||||
for (String dashboardContext : coreServicesAccesses.keySet()) {
|
||||
int workspaceAccesses = 0;
|
||||
int messagesAccesses = 0;
|
||||
int notificationsAccesses = 0;
|
||||
int profileAccesses = 0;
|
||||
logger.debug("{};", dashboardContext);
|
||||
for(CoreServiceAccessesReportRow row : coreServicesAccesses.get(dashboardContext)) {
|
||||
// String pagePath = row.getPagePath();
|
||||
switch (row.getKey()) {
|
||||
case WORKSPACE_ACCESSES:
|
||||
workspaceAccesses += row.getVisitNumber();
|
||||
break;
|
||||
case MESSAGES_ACCESSES:
|
||||
messagesAccesses += row.getVisitNumber();
|
||||
break;
|
||||
case NOTIFICATIONS_ACCESSES:
|
||||
notificationsAccesses += row.getVisitNumber();
|
||||
break;
|
||||
case PROFILE_ACCESSES:
|
||||
profileAccesses += row.getVisitNumber();
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
ScopeDescriptor scopeDescriptor = new ScopeDescriptor();
|
||||
ScopeBean scopeBean = new ScopeBean(dashboardContext);
|
||||
scopeDescriptor.setId(dashboardContext);
|
||||
scopeDescriptor.setName(scopeBean.name());
|
||||
|
||||
AccountingRecord ar1 = new AccountingRecord(scopeDescriptor, instant, getDimension(HarvestedDataKey.WORKSPACE_ACCESSES), (long) workspaceAccesses);
|
||||
AccountingRecord ar2 = new AccountingRecord(scopeDescriptor, instant, getDimension(HarvestedDataKey.MESSAGES_ACCESSES), (long) messagesAccesses);
|
||||
AccountingRecord ar3 = new AccountingRecord(scopeDescriptor, instant, getDimension(HarvestedDataKey.NOTIFICATIONS_ACCESSES), (long) notificationsAccesses);
|
||||
AccountingRecord ar4 = new AccountingRecord(scopeDescriptor, instant, getDimension(HarvestedDataKey.PROFILE_ACCESSES), (long) profileAccesses);
|
||||
logger.debug("{};{}", ar1.getDimension().getId(), ar1.getMeasure());
|
||||
accountingRecords.add(ar1);
|
||||
logger.debug("{};{}", ar2.getDimension().getId(), ar2.getMeasure());
|
||||
accountingRecords.add(ar2);
|
||||
logger.debug("{};{}", ar3.getDimension().getId(), ar3.getMeasure());
|
||||
accountingRecords.add(ar3);
|
||||
logger.debug("{};{}", ar4.getDimension().getId(), ar4.getMeasure());
|
||||
accountingRecords.add(ar4);
|
||||
|
||||
}
|
||||
logger.debug("Returning {} accountingRecords ", accountingRecords.size());
|
||||
return accountingRecords;
|
||||
|
||||
} catch(Exception e) {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
private static HashMap<String, List<CoreServiceAccessesReportRow>> getAllAccesses(Date start, Date end) throws Exception {
|
||||
Builder dateRangeBuilder = getDateRangeBuilderForAnalytics(start, end);
|
||||
logger.trace("Getting core services accesses in this time range {}", dateRangeBuilder.toString());
|
||||
|
||||
AnalyticsReportCredentials credentialsFromD4S = getAuthorisedApplicationInfoFromIs();
|
||||
|
||||
logger.trace("gotten credentialsFromD4S id = {}", credentialsFromD4S.getClientId());
|
||||
|
||||
BetaAnalyticsDataSettings serviceSettings = initializeAnalyticsReporting(credentialsFromD4S);
|
||||
|
||||
logger.trace("gotten credentialsFromD4S viewIds= {}", credentialsFromD4S.getViewIds().toString());
|
||||
|
||||
HashMap<String,List<RunReportResponse>> responses = getReportResponses(serviceSettings, credentialsFromD4S.getViewIds(), dateRangeBuilder);
|
||||
HashMap<String, List<CoreServiceAccessesReportRow>> toReturn = new HashMap<>();
|
||||
|
||||
int i = 1;
|
||||
for(String view : responses.keySet()) {
|
||||
String dashboardContext = getAccountingDashboardContextGivenGAViewID(view);
|
||||
if (dashboardContext != null ) {
|
||||
logger.trace("\n ({}) *** Parsing responses for this Gateway view, which corresponds to Dashboard Context: {} \n", i, dashboardContext );
|
||||
List<CoreServiceAccessesReportRow> viewReport = parseResponse(view, responses.get(view), dashboardContext);
|
||||
logger.trace("Got {} entries from view id={}", viewReport.size(), view);
|
||||
toReturn.put(dashboardContext, viewReport);
|
||||
} else {
|
||||
logger.warn("Got entries from view id={} but cannot find Dashboard Context correspondance, I think you need to update the Generic Resource of the Mappings", view);
|
||||
}
|
||||
i++;
|
||||
}
|
||||
return toReturn;
|
||||
}
|
||||
|
||||
/**
|
||||
* Initializes an Google Analytics Data API service object.
|
||||
*
|
||||
* @return An authorized Google Analytics Data API
|
||||
* @throws IOException
|
||||
* @throws GeneralSecurityException
|
||||
*/
|
||||
private static BetaAnalyticsDataSettings initializeAnalyticsReporting(AnalyticsReportCredentials cred) throws IOException {
|
||||
return BetaAnalyticsDataSettings.newBuilder()
|
||||
.setCredentialsProvider(FixedCredentialsProvider.create(
|
||||
ServiceAccountCredentials.fromPkcs8(cred.getClientId(), cred.getClientEmail(), cred.getPrivateKeyPem(), cred.getPrivateKeyId(), null)))
|
||||
.build();
|
||||
}
|
||||
|
||||
/**
|
||||
* Queries Analytics Data API service
|
||||
*
|
||||
* @param service Analytics Data API service service settings.
|
||||
* @return Row Analytics Data API service
|
||||
* @throws IOException
|
||||
*/
|
||||
private static HashMap<String,List<RunReportResponse>> getReportResponses(BetaAnalyticsDataSettings betaAnalyticsDataSettings,
|
||||
List<String> viewIDs, Builder dateRangeBuilder) throws IOException {
|
||||
|
||||
HashMap<String,List<RunReportResponse>> reports = new HashMap<>();
|
||||
|
||||
try (BetaAnalyticsDataClient analyticsData = BetaAnalyticsDataClient.create(betaAnalyticsDataSettings)) {
|
||||
|
||||
for(String propertyId : viewIDs) {
|
||||
List<RunReportResponse> gReportResponses = new ArrayList<>();
|
||||
logger.debug("Getting data from Analytics Data API for propertyId: " + propertyId);
|
||||
RunReportRequest request =
|
||||
RunReportRequest.newBuilder()
|
||||
.setProperty("properties/" + propertyId)
|
||||
.addDimensions(Dimension.newBuilder().setName("pagePath"))
|
||||
.addMetrics(Metric.newBuilder().setName("screenPageViews"))
|
||||
.addDateRanges(dateRangeBuilder)
|
||||
.build();
|
||||
|
||||
// Make the request.
|
||||
RunReportResponse response = analyticsData.runReport(request);
|
||||
gReportResponses.add(response);
|
||||
reports.put(propertyId, gReportResponses);
|
||||
}
|
||||
}
|
||||
return reports;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Parses and prints the Analytics Data API service respose
|
||||
*
|
||||
* @param response An Analytics Data API service response.
|
||||
*/
|
||||
private static List<CoreServiceAccessesReportRow> parseResponse(String viewId, List<RunReportResponse> responses, String dashboardContext) {
|
||||
logger.debug("parsing Response for propertyID=" + viewId);
|
||||
List<CoreServiceAccessesReportRow> toReturn = new ArrayList<>();
|
||||
|
||||
for (RunReportResponse response : responses) {
|
||||
for (Row row: response.getRowsList()) {
|
||||
String dimension = row.getDimensionValues(0).getValue();
|
||||
String metric = row.getMetricValues(0).getValue();
|
||||
CoreServiceAccessesReportRow var = new CoreServiceAccessesReportRow();
|
||||
boolean validEntry = false;
|
||||
String pagePath = dimension;
|
||||
logger.trace("parsing pagepath {}: value: {}", pagePath, Integer.parseInt(metric));
|
||||
|
||||
if (!pagePath.contains("_redirect=/group")) {
|
||||
if ( pagePath.contains(PAGE_WORKSPACE_ACCESSES)) {
|
||||
var.setKey(HarvestedDataKey.WORKSPACE_ACCESSES);
|
||||
logger.trace("**matched "+pagePath);
|
||||
validEntry = true;
|
||||
}
|
||||
else if ( pagePath.contains(PAGE_MESSAGES_ACCESSES)) {
|
||||
var.setKey(HarvestedDataKey.MESSAGES_ACCESSES);
|
||||
logger.trace("**matched "+pagePath);
|
||||
validEntry = true;
|
||||
}
|
||||
else if ( pagePath.contains(PAGE_PROFILE_ACCESSES)) {
|
||||
var.setKey(HarvestedDataKey.PROFILE_ACCESSES);
|
||||
logger.trace("**matched "+pagePath);
|
||||
validEntry = true;
|
||||
}
|
||||
else if ( pagePath.contains(PAGE_NOTIFICATION_ACCESSES)) {
|
||||
var.setKey(HarvestedDataKey.NOTIFICATIONS_ACCESSES);
|
||||
logger.trace("**matched "+pagePath);
|
||||
validEntry = true;
|
||||
}
|
||||
}
|
||||
if (validEntry) {
|
||||
var.setDashboardContext(dashboardContext);
|
||||
var.setPagePath(dimension);
|
||||
var.setVisitNumber(Integer.parseInt(metric));
|
||||
toReturn.add(var);
|
||||
}
|
||||
}
|
||||
}
|
||||
return toReturn;
|
||||
}
|
||||
|
||||
|
||||
|
||||
private static List<ServiceEndpoint> getAnalyticsReportingConfigurationFromIS(String infrastructureScope)
|
||||
throws Exception {
|
||||
String scope = infrastructureScope;
|
||||
String currScope = ScopeProvider.instance.get();
|
||||
ScopeProvider.instance.set(scope);
|
||||
SimpleQuery query = queryFor(ServiceEndpoint.class);
|
||||
query.addCondition("$resource/Profile/Category/text() eq '" + SERVICE_ENDPOINT_CATEGORY + "'");
|
||||
query.addCondition("$resource/Profile/Name/text() eq '" + SERVICE_ENDPOINT_NAME + "'");
|
||||
DiscoveryClient<ServiceEndpoint> client = clientFor(ServiceEndpoint.class);
|
||||
List<ServiceEndpoint> toReturn = client.submit(query);
|
||||
ScopeProvider.instance.set(currScope);
|
||||
return toReturn;
|
||||
}
|
||||
|
||||
/**
|
||||
* This method look up in the IS the Gateway which corresponds to a given Google Analytics viewId
|
||||
* @param viewID
|
||||
* @return the gateway name, e.g. "Blue-Cloud Gateway" or null if no correspondance was found
|
||||
* @throws Exception
|
||||
* @throws ObjectNotFound
|
||||
*/
|
||||
private static String getAccountingDashboardContextGivenGAViewID(String viewID) throws ObjectNotFound, Exception {
|
||||
String toReturn = null;
|
||||
String context = org.gcube.dataharvest.utils.Utils.getCurrentContext();
|
||||
String currScope = ScopeProvider.instance.get();
|
||||
ScopeProvider.instance.set(context);
|
||||
SimpleQuery query = queryFor(GenericResource.class);
|
||||
query.addCondition("$resource/Profile/SecondaryType/text() eq '" + MAPPING_RESOURCE_CATEGORY + "'");
|
||||
query.addCondition("$resource/Profile/Body/Property/viewID/text() eq '" + viewID + "'");
|
||||
DiscoveryClient<GenericResource> client = clientFor(GenericResource.class);
|
||||
List<GenericResource> list = client.submit(query);
|
||||
if(list.size() > 1) {
|
||||
logger.error("Too many Generic Resources having GA viewID " + viewID
|
||||
+ " in this scope having SecondaryType " + MAPPING_RESOURCE_CATEGORY);
|
||||
} else if(list.size() == 0) {
|
||||
logger.warn("There is no Generic Resources having GA viewID " + viewID + " and SecondaryType "
|
||||
+ MAPPING_RESOURCE_CATEGORY + " in this context: " + context);
|
||||
} else {
|
||||
GenericResource found = list.get(0);
|
||||
String elem = new StringBuilder("<body>").append(found.profile().bodyAsString()).append("</body>").toString();
|
||||
DocumentBuilder docBuilder = DocumentBuilderFactory.newInstance().newDocumentBuilder();
|
||||
Node node = docBuilder.parse(new InputSource(new StringReader(elem))).getDocumentElement();
|
||||
XPathHelper helper = new XPathHelper(node);
|
||||
List<String> currValue = helper.evaluate("//Property/viewID/text()");
|
||||
if (currValue != null && currValue.size() > 0) {
|
||||
List<String> contexts = currValue;
|
||||
for (int i = 0; i < contexts.size(); i++) {
|
||||
if (currValue.get(i).trim().compareTo(viewID) == 0) {
|
||||
toReturn = helper.evaluate("//Property/DashboardContext/text()").get(i);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
logger.debug("Found DashboardContext for viewId {} : {} ", viewID, toReturn);
|
||||
}
|
||||
ScopeProvider.instance.set(currScope);
|
||||
return toReturn;
|
||||
}
|
||||
|
||||
/**
|
||||
* l
|
||||
* @throws Exception
|
||||
*/
|
||||
private static AnalyticsReportCredentials getAuthorisedApplicationInfoFromIs() throws Exception {
|
||||
AnalyticsReportCredentials reportCredentials = new AnalyticsReportCredentials();
|
||||
|
||||
String context = org.gcube.dataharvest.utils.Utils.getCurrentContext();
|
||||
try {
|
||||
List<ServiceEndpoint> list = getAnalyticsReportingConfigurationFromIS(context);
|
||||
if(list.size() > 1) {
|
||||
logger.error("Too many Service Endpoints having name " + SERVICE_ENDPOINT_NAME
|
||||
+ " in this scope having Category " + SERVICE_ENDPOINT_CATEGORY);
|
||||
} else if(list.size() == 0) {
|
||||
logger.warn("There is no Service Endpoint having name " + SERVICE_ENDPOINT_NAME + " and Category "
|
||||
+ SERVICE_ENDPOINT_CATEGORY + " in this context: " + context);
|
||||
} else {
|
||||
|
||||
for(ServiceEndpoint res : list) {
|
||||
Group<AccessPoint> apGroup = res.profile().accessPoints();
|
||||
AccessPoint[] accessPoints = (AccessPoint[]) apGroup.toArray(new AccessPoint[apGroup.size()]);
|
||||
AccessPoint found = accessPoints[0];
|
||||
reportCredentials.setClientEmail(found.username());
|
||||
String decryptedPrivateKey = StringEncrypter.getEncrypter().decrypt(found.password());
|
||||
reportCredentials.setPrivateKeyPem(decryptedPrivateKey.trim());
|
||||
|
||||
for(Property prop : found.properties()) {
|
||||
if(prop.name().compareTo(AP_VIEWS_PROPERTY) == 0) {
|
||||
String decryptedValue = StringEncrypter.getEncrypter().decrypt(prop.value());
|
||||
String[] views = decryptedValue.split(";");
|
||||
reportCredentials.setViewIds(Arrays.asList(views));
|
||||
}
|
||||
if(prop.name().compareTo(AP_CLIENT_ID) == 0) {
|
||||
String decryptedValue = StringEncrypter.getEncrypter().decrypt(prop.value());
|
||||
reportCredentials.setClientId(decryptedValue);
|
||||
}
|
||||
if(prop.name().compareTo(AP_PRIVATEKEY_ID_PROPERTY) == 0) {
|
||||
String decryptedValue = StringEncrypter.getEncrypter().decrypt(prop.value());
|
||||
reportCredentials.setPrivateKeyId(decryptedValue);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch(Exception e) {
|
||||
e.printStackTrace();
|
||||
return null;
|
||||
}
|
||||
return reportCredentials;
|
||||
}
|
||||
|
||||
|
||||
private static LocalDate asLocalDate(Date date) {
|
||||
return Instant.ofEpochMilli(date.getTime()).atZone(ZoneId.systemDefault()).toLocalDate();
|
||||
}
|
||||
|
||||
private static Builder getDateRangeBuilderForAnalytics(Date start, Date end) {
|
||||
DateTimeFormatter formatter = DateTimeFormatter.ofPattern("yyyy-MM-dd"); //required by Analytics
|
||||
String startDate = asLocalDate(start).format(formatter);
|
||||
String endDate = asLocalDate(end).format(formatter);
|
||||
Builder dateRangeBuilder = DateRange.newBuilder().setStartDate(startDate).setEndDate(endDate);
|
||||
|
||||
return dateRangeBuilder;
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,312 @@
|
|||
package org.gcube.dataharvest.harvester;
|
||||
|
||||
import static org.gcube.resources.discovery.icclient.ICFactory.clientFor;
|
||||
import static org.gcube.resources.discovery.icclient.ICFactory.queryFor;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.security.GeneralSecurityException;
|
||||
import java.time.Instant;
|
||||
import java.time.LocalDate;
|
||||
import java.time.ZoneId;
|
||||
import java.time.format.DateTimeFormatter;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Date;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
|
||||
import org.gcube.accounting.accounting.summary.access.model.ScopeDescriptor;
|
||||
import org.gcube.accounting.accounting.summary.access.model.update.AccountingRecord;
|
||||
import org.gcube.common.encryption.encrypter.StringEncrypter;
|
||||
import org.gcube.common.resources.gcore.ServiceEndpoint;
|
||||
import org.gcube.common.resources.gcore.ServiceEndpoint.AccessPoint;
|
||||
import org.gcube.common.resources.gcore.ServiceEndpoint.Property;
|
||||
import org.gcube.common.resources.gcore.utils.Group;
|
||||
import org.gcube.common.scope.api.ScopeProvider;
|
||||
import org.gcube.common.scope.impl.ScopeBean;
|
||||
import org.gcube.dataharvest.AccountingDashboardHarvesterPlugin;
|
||||
import org.gcube.dataharvest.datamodel.AnalyticsReportCredentials;
|
||||
import org.gcube.dataharvest.datamodel.HarvestedDataKey;
|
||||
import org.gcube.dataharvest.datamodel.VREAccessesReportRow;
|
||||
import org.gcube.resources.discovery.client.api.DiscoveryClient;
|
||||
import org.gcube.resources.discovery.client.queries.api.SimpleQuery;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import com.google.analytics.data.v1beta.BetaAnalyticsDataClient;
|
||||
import com.google.analytics.data.v1beta.BetaAnalyticsDataSettings;
|
||||
import com.google.analytics.data.v1beta.DateRange;
|
||||
import com.google.analytics.data.v1beta.DateRange.Builder;
|
||||
import com.google.analytics.data.v1beta.Dimension;
|
||||
import com.google.analytics.data.v1beta.Metric;
|
||||
import com.google.analytics.data.v1beta.Row;
|
||||
import com.google.analytics.data.v1beta.RunReportRequest;
|
||||
import com.google.analytics.data.v1beta.RunReportResponse;
|
||||
import com.google.api.client.json.JsonFactory;
|
||||
import com.google.api.client.json.gson.GsonFactory;
|
||||
import com.google.api.gax.core.FixedCredentialsProvider;
|
||||
import com.google.auth.oauth2.ServiceAccountCredentials;
|
||||
|
||||
/**
|
||||
*
|
||||
* @author Giancarlo Panichi (ISTI CNR)
|
||||
*
|
||||
*/
|
||||
public class JupyterAccessesHarvester extends BasicHarvester {
|
||||
|
||||
private static Logger logger = LoggerFactory.getLogger(JupyterAccessesHarvester.class);
|
||||
|
||||
private static final JsonFactory JSON_FACTORY = GsonFactory.getDefaultInstance();
|
||||
|
||||
private static final String SERVICE_ENDPOINT_CATEGORY = "OnlineService";
|
||||
private static final String SERVICE_ENDPOINT_NAME = "GA4AnalyticsDataService";
|
||||
private static final String AP_VIEWS_PROPERTY = "views";
|
||||
private static final String AP_CLIENT_ID = "client_id";
|
||||
private static final String AP_PRIVATEKEY_ID_PROPERTY = "private_key_id";
|
||||
|
||||
private List<VREAccessesReportRow> vreAccesses;
|
||||
|
||||
public JupyterAccessesHarvester(Date start, Date end) throws Exception {
|
||||
super(start, end);
|
||||
logger.debug("JupyerAccessHArvester: {}, {}", start, end);
|
||||
vreAccesses = getAllAccesses(start, end);
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<AccountingRecord> getAccountingRecords() throws Exception {
|
||||
try {
|
||||
String context = org.gcube.dataharvest.utils.Utils.getCurrentContext();
|
||||
|
||||
ArrayList<AccountingRecord> accountingRecords = new ArrayList<AccountingRecord>();
|
||||
|
||||
int measure = 0;
|
||||
|
||||
ScopeBean scopeBean = new ScopeBean(context);
|
||||
String lowerCasedContext = scopeBean.name().toLowerCase();
|
||||
logger.debug("JupyerAccessHArvester lowerCasedContext: {}", lowerCasedContext);
|
||||
for (VREAccessesReportRow row : vreAccesses) {
|
||||
String pagePath = row.getPagePath().toLowerCase();
|
||||
if (pagePath != null && !pagePath.isEmpty()) {
|
||||
if (pagePath.contains(lowerCasedContext)) {
|
||||
if (!pagePath.contains("catalogue")) {
|
||||
if (pagePath.contains("jupyter") || pagePath.contains("jupiter")) {
|
||||
logger.trace("Matched jupyter or jupiter ({}) : {}", lowerCasedContext, pagePath);
|
||||
measure += row.getVisitNumber();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
ScopeDescriptor scopeDescriptor = AccountingDashboardHarvesterPlugin.getScopeDescriptor(context);
|
||||
|
||||
if (measure > 0) {
|
||||
AccountingRecord ar = new AccountingRecord(scopeDescriptor, instant,
|
||||
getDimension(HarvestedDataKey.JUPYTER_ACCESSES), (long) measure);
|
||||
logger.debug("{} : {}", ar.getDimension().getId(), ar.getMeasure());
|
||||
accountingRecords.add(ar);
|
||||
}
|
||||
|
||||
return accountingRecords;
|
||||
|
||||
} catch (Exception e) {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @return a list of {@link VREAccessesReportRow} objects containing the
|
||||
* pagePath and the visit number e.g. VREAccessesReportRow
|
||||
* [pagePath=/group/agroclimaticmodelling/add-new-users,
|
||||
* visitNumber=1] VREAccessesReportRow
|
||||
* [pagePath=/group/agroclimaticmodelling/administration,
|
||||
* visitNumber=2] VREAccessesReportRow
|
||||
* [pagePath=/group/agroclimaticmodelling/agroclimaticmodelling,
|
||||
* visitNumber=39]
|
||||
*/
|
||||
private static List<VREAccessesReportRow> getAllAccesses(Date start, Date end) throws Exception {
|
||||
Builder dateRangeBuilder = getDateRangeBuilderForAnalytics(start, end);
|
||||
logger.debug("Getting accesses in this time range {}", dateRangeBuilder.toString());
|
||||
|
||||
AnalyticsReportCredentials credentialsFromD4S = getAuthorisedApplicationInfoFromIs();
|
||||
|
||||
logger.debug("Getting credentials credentialsFromD4S");
|
||||
|
||||
BetaAnalyticsDataSettings serviceSettings = initializeAnalyticsReporting(credentialsFromD4S);
|
||||
|
||||
logger.debug("initializeAnalyticsReporting service settings");
|
||||
|
||||
|
||||
HashMap<String,List<RunReportResponse>> responses = getReportResponses(serviceSettings, credentialsFromD4S.getViewIds(), dateRangeBuilder);
|
||||
List<VREAccessesReportRow> totalAccesses = new ArrayList<>();
|
||||
|
||||
for(String view : responses.keySet()) {
|
||||
List<VREAccessesReportRow> viewReport = parseResponse(view, responses.get(view));
|
||||
logger.debug("Got {} entries from view id={}", viewReport.size(), view);
|
||||
totalAccesses.addAll(viewReport);
|
||||
}
|
||||
logger.debug("Merged in {} total entries from all views", totalAccesses.size());
|
||||
return totalAccesses;
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* Initializes an Google Analytics Data API service object.
|
||||
*
|
||||
* @return An authorized Google Analytics Data API
|
||||
* @throws IOException
|
||||
* @throws GeneralSecurityException
|
||||
*/
|
||||
private static BetaAnalyticsDataSettings initializeAnalyticsReporting(AnalyticsReportCredentials cred) throws IOException {
|
||||
return BetaAnalyticsDataSettings.newBuilder()
|
||||
.setCredentialsProvider(FixedCredentialsProvider.create(
|
||||
ServiceAccountCredentials.fromPkcs8(cred.getClientId(), cred.getClientEmail(), cred.getPrivateKeyPem(), cred.getPrivateKeyId(), null)))
|
||||
.build();
|
||||
}
|
||||
|
||||
/**
|
||||
* Queries Analytics Data API service
|
||||
*
|
||||
* @param service Analytics Data API service service settings.
|
||||
* @return Row Analytics Data API service
|
||||
* @throws IOException
|
||||
*/
|
||||
private static HashMap<String,List<RunReportResponse>> getReportResponses(BetaAnalyticsDataSettings betaAnalyticsDataSettings,
|
||||
List<String> viewIDs, Builder dateRangeBuilder) throws IOException {
|
||||
|
||||
HashMap<String,List<RunReportResponse>> reports = new HashMap<>();
|
||||
|
||||
try (BetaAnalyticsDataClient analyticsData = BetaAnalyticsDataClient.create(betaAnalyticsDataSettings)) {
|
||||
|
||||
for(String propertyId : viewIDs) {
|
||||
List<RunReportResponse> gReportResponses = new ArrayList<>();
|
||||
logger.debug("Getting data from Analytics Data API for propertyId: " + propertyId);
|
||||
RunReportRequest request =
|
||||
RunReportRequest.newBuilder()
|
||||
.setProperty("properties/" + propertyId)
|
||||
.addDimensions(Dimension.newBuilder().setName("pagePath"))
|
||||
.addMetrics(Metric.newBuilder().setName("screenPageViews"))
|
||||
.addDateRanges(dateRangeBuilder)
|
||||
.build();
|
||||
|
||||
// Make the request.
|
||||
RunReportResponse response = analyticsData.runReport(request);
|
||||
gReportResponses.add(response);
|
||||
reports.put(propertyId, gReportResponses);
|
||||
}
|
||||
}
|
||||
return reports;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Parses and prints the Analytics Data API service respose
|
||||
*
|
||||
* @param response An Analytics Data API service response.
|
||||
*/
|
||||
private static List<VREAccessesReportRow> parseResponse(String viewId, List<RunReportResponse> responses) {
|
||||
logger.debug("parsing Response for propertyID=" + viewId);
|
||||
|
||||
List<VREAccessesReportRow> toReturn = new ArrayList<>();
|
||||
for (RunReportResponse response : responses) {
|
||||
for (Row row: response.getRowsList()) {
|
||||
String dimension = row.getDimensionValues(0).getValue();
|
||||
String metric = row.getMetricValues(0).getValue();
|
||||
VREAccessesReportRow var = new VREAccessesReportRow();
|
||||
boolean validEntry = false;
|
||||
String pagePath = dimension;
|
||||
if (pagePath.startsWith("/group") || pagePath.startsWith("/web")) {
|
||||
var.setPagePath(dimension);
|
||||
validEntry = true;
|
||||
}
|
||||
if (validEntry) {
|
||||
var.setVisitNumber(Integer.parseInt(metric));
|
||||
toReturn.add(var);
|
||||
}
|
||||
}
|
||||
}
|
||||
return toReturn;
|
||||
}
|
||||
|
||||
private static List<ServiceEndpoint> getAnalyticsReportingConfigurationFromIS(String infrastructureScope)
|
||||
throws Exception {
|
||||
String scope = infrastructureScope;
|
||||
String currScope = ScopeProvider.instance.get();
|
||||
ScopeProvider.instance.set(scope);
|
||||
SimpleQuery query = queryFor(ServiceEndpoint.class);
|
||||
query.addCondition("$resource/Profile/Category/text() eq '" + SERVICE_ENDPOINT_CATEGORY + "'");
|
||||
query.addCondition("$resource/Profile/Name/text() eq '" + SERVICE_ENDPOINT_NAME + "'");
|
||||
DiscoveryClient<ServiceEndpoint> client = clientFor(ServiceEndpoint.class);
|
||||
List<ServiceEndpoint> toReturn = client.submit(query);
|
||||
ScopeProvider.instance.set(currScope);
|
||||
return toReturn;
|
||||
}
|
||||
|
||||
/**
|
||||
* l
|
||||
* @throws Exception
|
||||
*/
|
||||
private static AnalyticsReportCredentials getAuthorisedApplicationInfoFromIs() throws Exception {
|
||||
AnalyticsReportCredentials reportCredentials = new AnalyticsReportCredentials();
|
||||
|
||||
String context = org.gcube.dataharvest.utils.Utils.getCurrentContext();
|
||||
try {
|
||||
List<ServiceEndpoint> list = getAnalyticsReportingConfigurationFromIS(context);
|
||||
if(list.size() > 1) {
|
||||
logger.error("Too many Service Endpoints having name " + SERVICE_ENDPOINT_NAME
|
||||
+ " in this scope having Category " + SERVICE_ENDPOINT_CATEGORY);
|
||||
} else if(list.size() == 0) {
|
||||
logger.warn("There is no Service Endpoint having name " + SERVICE_ENDPOINT_NAME + " and Category "
|
||||
+ SERVICE_ENDPOINT_CATEGORY + " in this context: " + context);
|
||||
} else {
|
||||
|
||||
for(ServiceEndpoint res : list) {
|
||||
Group<AccessPoint> apGroup = res.profile().accessPoints();
|
||||
AccessPoint[] accessPoints = (AccessPoint[]) apGroup.toArray(new AccessPoint[apGroup.size()]);
|
||||
AccessPoint found = accessPoints[0];
|
||||
reportCredentials.setClientEmail(found.username());
|
||||
String decryptedPrivateKey = StringEncrypter.getEncrypter().decrypt(found.password());
|
||||
reportCredentials.setPrivateKeyPem(decryptedPrivateKey.trim());
|
||||
|
||||
for(Property prop : found.properties()) {
|
||||
if(prop.name().compareTo(AP_VIEWS_PROPERTY) == 0) {
|
||||
String decryptedValue = StringEncrypter.getEncrypter().decrypt(prop.value());
|
||||
String[] views = decryptedValue.split(";");
|
||||
reportCredentials.setViewIds(Arrays.asList(views));
|
||||
}
|
||||
if(prop.name().compareTo(AP_CLIENT_ID) == 0) {
|
||||
String decryptedValue = StringEncrypter.getEncrypter().decrypt(prop.value());
|
||||
reportCredentials.setClientId(decryptedValue);
|
||||
}
|
||||
if(prop.name().compareTo(AP_PRIVATEKEY_ID_PROPERTY) == 0) {
|
||||
String decryptedValue = StringEncrypter.getEncrypter().decrypt(prop.value());
|
||||
reportCredentials.setPrivateKeyId(decryptedValue);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch(Exception e) {
|
||||
e.printStackTrace();
|
||||
return null;
|
||||
}
|
||||
return reportCredentials;
|
||||
}
|
||||
|
||||
|
||||
private static LocalDate asLocalDate(Date date) {
|
||||
return Instant.ofEpochMilli(date.getTime()).atZone(ZoneId.systemDefault()).toLocalDate();
|
||||
}
|
||||
|
||||
private static Builder getDateRangeBuilderForAnalytics(Date start, Date end) {
|
||||
DateTimeFormatter formatter = DateTimeFormatter.ofPattern("yyyy-MM-dd"); //required by Analytics
|
||||
String startDate = asLocalDate(start).format(formatter);
|
||||
String endDate = asLocalDate(end).format(formatter);
|
||||
Builder dateRangeBuilder = DateRange.newBuilder().setStartDate(startDate).setEndDate(endDate);
|
||||
|
||||
return dateRangeBuilder;
|
||||
}
|
||||
|
||||
}
|
|
@ -19,7 +19,7 @@ import org.gcube.accounting.datamodel.AggregatedUsageRecord;
|
|||
import org.gcube.accounting.datamodel.aggregation.AggregatedJobUsageRecord;
|
||||
import org.gcube.accounting.datamodel.aggregation.AggregatedServiceUsageRecord;
|
||||
import org.gcube.accounting.datamodel.usagerecords.ServiceUsageRecord;
|
||||
import org.gcube.dataharvest.AccountingDataHarvesterPlugin;
|
||||
import org.gcube.dataharvest.AccountingDashboardHarvesterPlugin;
|
||||
import org.gcube.dataharvest.datamodel.HarvestedDataKey;
|
||||
import org.gcube.dataharvest.utils.DateUtils;
|
||||
import org.gcube.dataharvest.utils.Utils;
|
||||
|
@ -28,7 +28,6 @@ import org.slf4j.Logger;
|
|||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
* @author Eric Perrone (ISTI - CNR)
|
||||
* @author Luca Frosini (ISTI - CNR)
|
||||
*/
|
||||
public class MethodInvocationHarvester extends BasicHarvester {
|
||||
|
@ -72,7 +71,7 @@ public class MethodInvocationHarvester extends BasicHarvester {
|
|||
AggregatedServiceUsageRecord.class, temporalConstraint, filters, contexts, true);
|
||||
}
|
||||
|
||||
ScopeDescriptor scopeDescriptor = AccountingDataHarvesterPlugin.getScopeDescriptor();
|
||||
ScopeDescriptor scopeDescriptor = AccountingDashboardHarvesterPlugin.getScopeDescriptor(context);
|
||||
Dimension dimension = getDimension(HarvestedDataKey.METHOD_INVOCATIONS);
|
||||
|
||||
if(result != null) {
|
||||
|
|
|
@ -0,0 +1,313 @@
|
|||
package org.gcube.dataharvest.harvester;
|
||||
|
||||
import static org.gcube.resources.discovery.icclient.ICFactory.clientFor;
|
||||
import static org.gcube.resources.discovery.icclient.ICFactory.queryFor;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.security.GeneralSecurityException;
|
||||
import java.time.Instant;
|
||||
import java.time.LocalDate;
|
||||
import java.time.ZoneId;
|
||||
import java.time.format.DateTimeFormatter;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Date;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
|
||||
import org.gcube.accounting.accounting.summary.access.model.ScopeDescriptor;
|
||||
import org.gcube.accounting.accounting.summary.access.model.update.AccountingRecord;
|
||||
import org.gcube.common.encryption.encrypter.StringEncrypter;
|
||||
import org.gcube.common.resources.gcore.ServiceEndpoint;
|
||||
import org.gcube.common.resources.gcore.ServiceEndpoint.AccessPoint;
|
||||
import org.gcube.common.resources.gcore.ServiceEndpoint.Property;
|
||||
import org.gcube.common.resources.gcore.utils.Group;
|
||||
import org.gcube.common.scope.api.ScopeProvider;
|
||||
import org.gcube.common.scope.impl.ScopeBean;
|
||||
import org.gcube.dataharvest.AccountingDashboardHarvesterPlugin;
|
||||
import org.gcube.dataharvest.datamodel.AnalyticsReportCredentials;
|
||||
import org.gcube.dataharvest.datamodel.HarvestedDataKey;
|
||||
import org.gcube.dataharvest.datamodel.VREAccessesReportRow;
|
||||
import org.gcube.resources.discovery.client.api.DiscoveryClient;
|
||||
import org.gcube.resources.discovery.client.queries.api.SimpleQuery;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import com.google.analytics.data.v1beta.BetaAnalyticsDataClient;
|
||||
import com.google.analytics.data.v1beta.BetaAnalyticsDataSettings;
|
||||
import com.google.analytics.data.v1beta.DateRange;
|
||||
import com.google.analytics.data.v1beta.DateRange.Builder;
|
||||
import com.google.analytics.data.v1beta.Dimension;
|
||||
import com.google.analytics.data.v1beta.Metric;
|
||||
import com.google.analytics.data.v1beta.Row;
|
||||
import com.google.analytics.data.v1beta.RunReportRequest;
|
||||
import com.google.analytics.data.v1beta.RunReportResponse;
|
||||
import com.google.api.client.json.JsonFactory;
|
||||
import com.google.api.client.json.gson.GsonFactory;
|
||||
import com.google.api.gax.core.FixedCredentialsProvider;
|
||||
import com.google.auth.oauth2.ServiceAccountCredentials;
|
||||
|
||||
|
||||
/**
|
||||
*
|
||||
* @author Giancarlo Panichi (ISTI CNR)
|
||||
*
|
||||
*/
|
||||
public class RStudioAccessesHarvester extends BasicHarvester {
|
||||
|
||||
private static Logger logger = LoggerFactory.getLogger(RStudioAccessesHarvester.class);
|
||||
|
||||
private static final JsonFactory JSON_FACTORY = GsonFactory.getDefaultInstance();
|
||||
|
||||
private static final String SERVICE_ENDPOINT_CATEGORY = "OnlineService";
|
||||
private static final String SERVICE_ENDPOINT_NAME = "GA4AnalyticsDataService";
|
||||
private static final String AP_VIEWS_PROPERTY = "views";
|
||||
private static final String AP_CLIENT_ID = "client_id";
|
||||
private static final String AP_PRIVATEKEY_ID_PROPERTY = "private_key_id";
|
||||
|
||||
private List<VREAccessesReportRow> vreAccesses;
|
||||
|
||||
public RStudioAccessesHarvester(Date start, Date end) throws Exception {
|
||||
super(start, end);
|
||||
logger.debug("RStudioAccessHArvester: {}, {}", start, end);
|
||||
vreAccesses = getAllAccesses(start, end);
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<AccountingRecord> getAccountingRecords() throws Exception {
|
||||
try {
|
||||
String context = org.gcube.dataharvest.utils.Utils.getCurrentContext();
|
||||
|
||||
ArrayList<AccountingRecord> accountingRecords = new ArrayList<AccountingRecord>();
|
||||
|
||||
int measure = 0;
|
||||
|
||||
ScopeBean scopeBean = new ScopeBean(context);
|
||||
String lowerCasedContext = scopeBean.name().toLowerCase();
|
||||
logger.debug("RStudioAccessHArvester lowerCasedContext: {}", lowerCasedContext);
|
||||
for (VREAccessesReportRow row : vreAccesses) {
|
||||
String pagePath = row.getPagePath().toLowerCase();
|
||||
if (pagePath != null && !pagePath.isEmpty()) {
|
||||
if (pagePath.contains(lowerCasedContext)) {
|
||||
if (!pagePath.contains("catalogue")) {
|
||||
if (pagePath.contains("rstudio") || pagePath.contains("r-studio")) {
|
||||
logger.trace("Matched rstudio or rstudio ({}) : {}", lowerCasedContext, pagePath);
|
||||
measure += row.getVisitNumber();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
ScopeDescriptor scopeDescriptor = AccountingDashboardHarvesterPlugin.getScopeDescriptor(context);
|
||||
|
||||
if (measure > 0) {
|
||||
AccountingRecord ar = new AccountingRecord(scopeDescriptor, instant,
|
||||
getDimension(HarvestedDataKey.RSTUDIO_ACCESSES), (long) measure);
|
||||
logger.debug("{} : {}", ar.getDimension().getId(), ar.getMeasure());
|
||||
accountingRecords.add(ar);
|
||||
}
|
||||
|
||||
return accountingRecords;
|
||||
|
||||
} catch (Exception e) {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @return a list of {@link VREAccessesReportRow} objects containing the
|
||||
* pagePath and the visit number e.g. VREAccessesReportRow
|
||||
* [pagePath=/group/agroclimaticmodelling/add-new-users,
|
||||
* visitNumber=1] VREAccessesReportRow
|
||||
* [pagePath=/group/agroclimaticmodelling/administration,
|
||||
* visitNumber=2] VREAccessesReportRow
|
||||
* [pagePath=/group/agroclimaticmodelling/agroclimaticmodelling,
|
||||
* visitNumber=39]
|
||||
*/
|
||||
private static List<VREAccessesReportRow> getAllAccesses(Date start, Date end) throws Exception {
|
||||
Builder dateRangeBuilder = getDateRangeBuilderForAnalytics(start, end);
|
||||
logger.debug("Getting accesses in this time range {}", dateRangeBuilder.toString());
|
||||
|
||||
AnalyticsReportCredentials credentialsFromD4S = getAuthorisedApplicationInfoFromIs();
|
||||
|
||||
logger.debug("Getting credentials credentialsFromD4S");
|
||||
|
||||
BetaAnalyticsDataSettings serviceSettings = initializeAnalyticsReporting(credentialsFromD4S);
|
||||
|
||||
logger.debug("initializeAnalyticsReporting service settings");
|
||||
|
||||
|
||||
HashMap<String,List<RunReportResponse>> responses = getReportResponses(serviceSettings, credentialsFromD4S.getViewIds(), dateRangeBuilder);
|
||||
List<VREAccessesReportRow> totalAccesses = new ArrayList<>();
|
||||
|
||||
for(String view : responses.keySet()) {
|
||||
List<VREAccessesReportRow> viewReport = parseResponse(view, responses.get(view));
|
||||
logger.debug("Got {} entries from view id={}", viewReport.size(), view);
|
||||
totalAccesses.addAll(viewReport);
|
||||
}
|
||||
logger.debug("Merged in {} total entries from all views", totalAccesses.size());
|
||||
return totalAccesses;
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* Initializes an Google Analytics Data API service object.
|
||||
*
|
||||
* @return An authorized Google Analytics Data API
|
||||
* @throws IOException
|
||||
* @throws GeneralSecurityException
|
||||
*/
|
||||
private static BetaAnalyticsDataSettings initializeAnalyticsReporting(AnalyticsReportCredentials cred) throws IOException {
|
||||
return BetaAnalyticsDataSettings.newBuilder()
|
||||
.setCredentialsProvider(FixedCredentialsProvider.create(
|
||||
ServiceAccountCredentials.fromPkcs8(cred.getClientId(), cred.getClientEmail(), cred.getPrivateKeyPem(), cred.getPrivateKeyId(), null)))
|
||||
.build();
|
||||
}
|
||||
|
||||
/**
|
||||
* Queries Analytics Data API service
|
||||
*
|
||||
* @param service Analytics Data API service service settings.
|
||||
* @return Row Analytics Data API service
|
||||
* @throws IOException
|
||||
*/
|
||||
private static HashMap<String,List<RunReportResponse>> getReportResponses(BetaAnalyticsDataSettings betaAnalyticsDataSettings,
|
||||
List<String> viewIDs, Builder dateRangeBuilder) throws IOException {
|
||||
|
||||
HashMap<String,List<RunReportResponse>> reports = new HashMap<>();
|
||||
|
||||
try (BetaAnalyticsDataClient analyticsData = BetaAnalyticsDataClient.create(betaAnalyticsDataSettings)) {
|
||||
|
||||
for(String propertyId : viewIDs) {
|
||||
List<RunReportResponse> gReportResponses = new ArrayList<>();
|
||||
logger.debug("Getting data from Analytics Data API for propertyId: " + propertyId);
|
||||
RunReportRequest request =
|
||||
RunReportRequest.newBuilder()
|
||||
.setProperty("properties/" + propertyId)
|
||||
.addDimensions(Dimension.newBuilder().setName("pagePath"))
|
||||
.addMetrics(Metric.newBuilder().setName("screenPageViews"))
|
||||
.addDateRanges(dateRangeBuilder)
|
||||
.build();
|
||||
|
||||
// Make the request.
|
||||
RunReportResponse response = analyticsData.runReport(request);
|
||||
gReportResponses.add(response);
|
||||
reports.put(propertyId, gReportResponses);
|
||||
}
|
||||
}
|
||||
return reports;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Parses and prints the Analytics Data API service respose
|
||||
*
|
||||
* @param response An Analytics Data API service response.
|
||||
*/
|
||||
private static List<VREAccessesReportRow> parseResponse(String viewId, List<RunReportResponse> responses) {
|
||||
logger.debug("parsing Response for propertyID=" + viewId);
|
||||
|
||||
List<VREAccessesReportRow> toReturn = new ArrayList<>();
|
||||
for (RunReportResponse response : responses) {
|
||||
for (Row row: response.getRowsList()) {
|
||||
String dimension = row.getDimensionValues(0).getValue();
|
||||
String metric = row.getMetricValues(0).getValue();
|
||||
VREAccessesReportRow var = new VREAccessesReportRow();
|
||||
boolean validEntry = false;
|
||||
String pagePath = dimension;
|
||||
if (pagePath.startsWith("/group") || pagePath.startsWith("/web")) {
|
||||
var.setPagePath(dimension);
|
||||
validEntry = true;
|
||||
}
|
||||
if (validEntry) {
|
||||
var.setVisitNumber(Integer.parseInt(metric));
|
||||
toReturn.add(var);
|
||||
}
|
||||
}
|
||||
}
|
||||
return toReturn;
|
||||
}
|
||||
|
||||
private static List<ServiceEndpoint> getAnalyticsReportingConfigurationFromIS(String infrastructureScope)
|
||||
throws Exception {
|
||||
String scope = infrastructureScope;
|
||||
String currScope = ScopeProvider.instance.get();
|
||||
ScopeProvider.instance.set(scope);
|
||||
SimpleQuery query = queryFor(ServiceEndpoint.class);
|
||||
query.addCondition("$resource/Profile/Category/text() eq '" + SERVICE_ENDPOINT_CATEGORY + "'");
|
||||
query.addCondition("$resource/Profile/Name/text() eq '" + SERVICE_ENDPOINT_NAME + "'");
|
||||
DiscoveryClient<ServiceEndpoint> client = clientFor(ServiceEndpoint.class);
|
||||
List<ServiceEndpoint> toReturn = client.submit(query);
|
||||
ScopeProvider.instance.set(currScope);
|
||||
return toReturn;
|
||||
}
|
||||
|
||||
/**
|
||||
* l
|
||||
* @throws Exception
|
||||
*/
|
||||
private static AnalyticsReportCredentials getAuthorisedApplicationInfoFromIs() throws Exception {
|
||||
AnalyticsReportCredentials reportCredentials = new AnalyticsReportCredentials();
|
||||
|
||||
String context = org.gcube.dataharvest.utils.Utils.getCurrentContext();
|
||||
try {
|
||||
List<ServiceEndpoint> list = getAnalyticsReportingConfigurationFromIS(context);
|
||||
if(list.size() > 1) {
|
||||
logger.error("Too many Service Endpoints having name " + SERVICE_ENDPOINT_NAME
|
||||
+ " in this scope having Category " + SERVICE_ENDPOINT_CATEGORY);
|
||||
} else if(list.size() == 0) {
|
||||
logger.warn("There is no Service Endpoint having name " + SERVICE_ENDPOINT_NAME + " and Category "
|
||||
+ SERVICE_ENDPOINT_CATEGORY + " in this context: " + context);
|
||||
} else {
|
||||
|
||||
for(ServiceEndpoint res : list) {
|
||||
Group<AccessPoint> apGroup = res.profile().accessPoints();
|
||||
AccessPoint[] accessPoints = (AccessPoint[]) apGroup.toArray(new AccessPoint[apGroup.size()]);
|
||||
AccessPoint found = accessPoints[0];
|
||||
reportCredentials.setClientEmail(found.username());
|
||||
String decryptedPrivateKey = StringEncrypter.getEncrypter().decrypt(found.password());
|
||||
reportCredentials.setPrivateKeyPem(decryptedPrivateKey.trim());
|
||||
|
||||
for(Property prop : found.properties()) {
|
||||
if(prop.name().compareTo(AP_VIEWS_PROPERTY) == 0) {
|
||||
String decryptedValue = StringEncrypter.getEncrypter().decrypt(prop.value());
|
||||
String[] views = decryptedValue.split(";");
|
||||
reportCredentials.setViewIds(Arrays.asList(views));
|
||||
}
|
||||
if(prop.name().compareTo(AP_CLIENT_ID) == 0) {
|
||||
String decryptedValue = StringEncrypter.getEncrypter().decrypt(prop.value());
|
||||
reportCredentials.setClientId(decryptedValue);
|
||||
}
|
||||
if(prop.name().compareTo(AP_PRIVATEKEY_ID_PROPERTY) == 0) {
|
||||
String decryptedValue = StringEncrypter.getEncrypter().decrypt(prop.value());
|
||||
reportCredentials.setPrivateKeyId(decryptedValue);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch(Exception e) {
|
||||
e.printStackTrace();
|
||||
return null;
|
||||
}
|
||||
return reportCredentials;
|
||||
}
|
||||
|
||||
|
||||
private static LocalDate asLocalDate(Date date) {
|
||||
return Instant.ofEpochMilli(date.getTime()).atZone(ZoneId.systemDefault()).toLocalDate();
|
||||
}
|
||||
|
||||
private static Builder getDateRangeBuilderForAnalytics(Date start, Date end) {
|
||||
DateTimeFormatter formatter = DateTimeFormatter.ofPattern("yyyy-MM-dd"); //required by Analytics
|
||||
String startDate = asLocalDate(start).format(formatter);
|
||||
String endDate = asLocalDate(end).format(formatter);
|
||||
Builder dateRangeBuilder = DateRange.newBuilder().setStartDate(startDate).setEndDate(endDate);
|
||||
|
||||
return dateRangeBuilder;
|
||||
}
|
||||
|
||||
}
|
|
@ -1,17 +1,16 @@
|
|||
package org.gcube.dataharvest.harvester;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
|
||||
import org.gcube.accounting.accounting.summary.access.model.ScopeDescriptor;
|
||||
import org.gcube.accounting.accounting.summary.access.model.update.AccountingRecord;
|
||||
import org.gcube.dataharvest.AccountingDataHarvesterPlugin;
|
||||
import org.gcube.dataharvest.AccountingDashboardHarvesterPlugin;
|
||||
import org.gcube.dataharvest.datamodel.HarvestedDataKey;
|
||||
import org.gcube.dataharvest.utils.Utils;
|
||||
import org.json.JSONArray;
|
||||
import org.json.JSONObject;
|
||||
import org.gcube.portal.databook.shared.Feed;
|
||||
import org.gcube.social_networking.social_networking_client_library.PostClient;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
|
@ -19,7 +18,7 @@ import org.slf4j.LoggerFactory;
|
|||
* @author Eric Perrone (ISTI - CNR)
|
||||
* @author Luca Frosini (ISTI - CNR)
|
||||
*/
|
||||
public class SocialInteractionsHarvester extends SocialNetworkingHarvester {
|
||||
public class SocialInteractionsHarvester extends BasicHarvester {
|
||||
|
||||
private static Logger logger = LoggerFactory.getLogger(SocialInteractionsHarvester.class);
|
||||
|
||||
|
@ -27,7 +26,7 @@ public class SocialInteractionsHarvester extends SocialNetworkingHarvester {
|
|||
private int replies;
|
||||
private int posts;
|
||||
|
||||
public static final String PATH = "/2/posts/get-posts-vre?gcube-token=";
|
||||
// public static final String PATH = "/2/posts/get-posts-vre?gcube-token=";
|
||||
|
||||
public SocialInteractionsHarvester(Date start, Date end) throws Exception {
|
||||
super(start, end);
|
||||
|
@ -44,7 +43,7 @@ public class SocialInteractionsHarvester extends SocialNetworkingHarvester {
|
|||
|
||||
getJson();
|
||||
|
||||
ScopeDescriptor scopeDescriptor = AccountingDataHarvesterPlugin.getScopeDescriptor();
|
||||
ScopeDescriptor scopeDescriptor = AccountingDashboardHarvesterPlugin.getScopeDescriptor(context);
|
||||
|
||||
AccountingRecord likesAR = new AccountingRecord(scopeDescriptor, instant, getDimension(HarvestedDataKey.SOCIAL_LIKES), (long) likes);
|
||||
logger.debug("{} : {}", likesAR.getDimension().getId(), likesAR.getMeasure());
|
||||
|
@ -66,30 +65,20 @@ public class SocialInteractionsHarvester extends SocialNetworkingHarvester {
|
|||
|
||||
}
|
||||
|
||||
|
||||
|
||||
private void getJson() throws Exception {
|
||||
JSONObject jsonObject = getJSONObject(PATH);
|
||||
|
||||
Boolean success = (Boolean) jsonObject.get("success");
|
||||
if(success == false) {
|
||||
throw new IOException("Erro while getting posts");
|
||||
}
|
||||
|
||||
JSONArray res = jsonObject.getJSONArray("result");
|
||||
int len = res.length();
|
||||
PostClient postClient = new PostClient();
|
||||
List<Feed> vrePosts = postClient.getPostsVRE();
|
||||
|
||||
likes = replies = posts = 0;
|
||||
|
||||
for(int i = 0; i < len; i++) {
|
||||
for(Feed feed : vrePosts) {
|
||||
|
||||
JSONObject item = res.getJSONObject(i);
|
||||
long time = item.getLong("time");
|
||||
long time = feed.getTime().getTime();
|
||||
|
||||
if(start.getTime() <= time && time <= end.getTime()) {
|
||||
posts++;
|
||||
replies += item.getInt("comments_no");
|
||||
likes += item.getInt("likes_no");
|
||||
replies += Integer.valueOf(feed.getCommentsNo());
|
||||
likes += Integer.valueOf(feed.getLikesNo());
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -1,58 +0,0 @@
|
|||
package org.gcube.dataharvest.harvester;
|
||||
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
|
||||
import org.gcube.common.authorization.library.provider.SecurityTokenProvider;
|
||||
import org.gcube.common.clients.exceptions.DiscoveryException;
|
||||
import org.gcube.common.resources.gcore.GCoreEndpoint;
|
||||
import org.gcube.dataharvest.utils.Utils;
|
||||
import org.gcube.resources.discovery.client.queries.api.SimpleQuery;
|
||||
import org.gcube.resources.discovery.icclient.ICFactory;
|
||||
import org.json.JSONObject;
|
||||
|
||||
public abstract class SocialNetworkingHarvester extends BasicHarvester {
|
||||
|
||||
public SocialNetworkingHarvester(Date start, Date end) throws Exception {
|
||||
super(start, end);
|
||||
}
|
||||
|
||||
public static String CLASS_FORMAT = "$resource/Profile/ServiceClass/text() eq '%1s'";
|
||||
public static String NAME_FORMAT = "$resource/Profile/ServiceName/text() eq '%1s'";
|
||||
public static String STATUS_FORMAT = "$resource/Profile/DeploymentData/Status/text() eq 'ready'";
|
||||
public static String CONTAINS_FORMAT = "$entry/@EntryName eq '%1s'";
|
||||
|
||||
public static String SERVICE_CLASS = "Portal";
|
||||
public static String SERVICE_NAME = "SocialNetworking";
|
||||
public static String ENTRY_NAME = "jersey-servlet";
|
||||
|
||||
protected SimpleQuery getGCoreEndpointQuery() {
|
||||
return ICFactory.queryFor(GCoreEndpoint.class)
|
||||
.addCondition(String.format(CLASS_FORMAT, SERVICE_CLASS))
|
||||
.addCondition(String.format(NAME_FORMAT, SERVICE_NAME))
|
||||
.addCondition(String.format(STATUS_FORMAT))
|
||||
.addVariable("$entry", "$resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint")
|
||||
.addCondition(String.format(CONTAINS_FORMAT, ENTRY_NAME))
|
||||
.setResult("$entry/text()");
|
||||
}
|
||||
|
||||
protected String getAddress() {
|
||||
SimpleQuery gCoreEndpointQuery = getGCoreEndpointQuery();
|
||||
List<String> addresses = ICFactory.client().submit(gCoreEndpointQuery);
|
||||
if(addresses.size()==0) {
|
||||
throw new DiscoveryException("No running Social Networking Service");
|
||||
}
|
||||
return addresses.get(0);
|
||||
}
|
||||
|
||||
|
||||
protected JSONObject getJSONObject(String path) throws Exception {
|
||||
String token = SecurityTokenProvider.instance.get();
|
||||
String baseAddress = getAddress();
|
||||
StringBuffer sb = new StringBuffer(baseAddress);
|
||||
sb.append(path);
|
||||
sb.append(token);
|
||||
return new JSONObject(Utils.getJson(sb.toString()));
|
||||
}
|
||||
|
||||
}
|
|
@ -4,22 +4,13 @@ import static org.gcube.resources.discovery.icclient.ICFactory.clientFor;
|
|||
import static org.gcube.resources.discovery.icclient.ICFactory.queryFor;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.Reader;
|
||||
import java.io.StringReader;
|
||||
import java.security.GeneralSecurityException;
|
||||
import java.security.KeyFactory;
|
||||
import java.security.NoSuchAlgorithmException;
|
||||
import java.security.PrivateKey;
|
||||
import java.security.spec.InvalidKeySpecException;
|
||||
import java.security.spec.PKCS8EncodedKeySpec;
|
||||
import java.time.Instant;
|
||||
import java.time.LocalDate;
|
||||
import java.time.ZoneId;
|
||||
import java.time.format.DateTimeFormatter;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.Date;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
|
@ -33,7 +24,7 @@ import org.gcube.common.resources.gcore.ServiceEndpoint.Property;
|
|||
import org.gcube.common.resources.gcore.utils.Group;
|
||||
import org.gcube.common.scope.api.ScopeProvider;
|
||||
import org.gcube.common.scope.impl.ScopeBean;
|
||||
import org.gcube.dataharvest.AccountingDataHarvesterPlugin;
|
||||
import org.gcube.dataharvest.AccountingDashboardHarvesterPlugin;
|
||||
import org.gcube.dataharvest.datamodel.AnalyticsReportCredentials;
|
||||
import org.gcube.dataharvest.datamodel.HarvestedDataKey;
|
||||
import org.gcube.dataharvest.datamodel.VREAccessesReportRow;
|
||||
|
@ -42,39 +33,31 @@ import org.gcube.resources.discovery.client.queries.api.SimpleQuery;
|
|||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import com.google.api.client.googleapis.auth.oauth2.GoogleCredential;
|
||||
import com.google.api.client.googleapis.auth.oauth2.GoogleCredential.Builder;
|
||||
import com.google.api.client.googleapis.javanet.GoogleNetHttpTransport;
|
||||
import com.google.api.client.googleapis.util.Utils;
|
||||
import com.google.api.client.http.HttpTransport;
|
||||
import com.google.api.client.json.JsonFactory;
|
||||
import com.google.api.client.json.gson.GsonFactory;
|
||||
import com.google.api.client.util.PemReader;
|
||||
import com.google.api.client.util.PemReader.Section;
|
||||
import com.google.api.client.util.SecurityUtils;
|
||||
import com.google.api.services.analyticsreporting.v4.AnalyticsReporting;
|
||||
import com.google.api.services.analyticsreporting.v4.AnalyticsReportingScopes;
|
||||
import com.google.api.services.analyticsreporting.v4.model.DateRange;
|
||||
import com.google.api.services.analyticsreporting.v4.model.DateRangeValues;
|
||||
import com.google.api.services.analyticsreporting.v4.model.GetReportsRequest;
|
||||
import com.google.api.services.analyticsreporting.v4.model.GetReportsResponse;
|
||||
import com.google.api.services.analyticsreporting.v4.model.Metric;
|
||||
import com.google.api.services.analyticsreporting.v4.model.Report;
|
||||
import com.google.api.services.analyticsreporting.v4.model.ReportRequest;
|
||||
import com.google.api.services.analyticsreporting.v4.model.ReportRow;
|
||||
import com.google.analytics.data.v1beta.BetaAnalyticsDataClient;
|
||||
import com.google.analytics.data.v1beta.BetaAnalyticsDataSettings;
|
||||
import com.google.analytics.data.v1beta.DateRange;
|
||||
import com.google.analytics.data.v1beta.DateRange.Builder;
|
||||
import com.google.analytics.data.v1beta.Dimension;
|
||||
import com.google.analytics.data.v1beta.Metric;
|
||||
import com.google.analytics.data.v1beta.Row;
|
||||
import com.google.analytics.data.v1beta.RunReportRequest;
|
||||
import com.google.analytics.data.v1beta.RunReportResponse;
|
||||
import com.google.api.gax.core.FixedCredentialsProvider;
|
||||
import com.google.auth.oauth2.ServiceAccountCredentials;
|
||||
|
||||
|
||||
public class VREAccessesHarvester extends BasicHarvester {
|
||||
|
||||
private static Logger logger = LoggerFactory.getLogger(VREAccessesHarvester.class);
|
||||
|
||||
private static final JsonFactory JSON_FACTORY = GsonFactory.getDefaultInstance();
|
||||
// private static final JsonFactory JSON_FACTORY = GsonFactory.getDefaultInstance();
|
||||
|
||||
private static final String SERVICE_ENDPOINT_CATEGORY = "OnlineService";
|
||||
private static final String SERVICE_ENDPOINT_NAME = "BigGAnalyticsReportService";
|
||||
private static final String SERVICE_ENDPOINT_NAME = "GA4AnalyticsDataService";
|
||||
private static final String AP_VIEWS_PROPERTY = "views";
|
||||
private static final String AP_CLIENT_PROPERTY = "clientId";
|
||||
private static final String AP_PRIVATEKEY_PROPERTY = "privateKeyId";
|
||||
private static final String APPLICATION_NAME = "Analytics Reporting";
|
||||
private static final String AP_CLIENT_ID = "client_id";
|
||||
private static final String AP_PRIVATEKEY_ID_PROPERTY = "private_key_id";
|
||||
|
||||
|
||||
private List<VREAccessesReportRow> vreAccesses;
|
||||
|
||||
|
@ -100,16 +83,16 @@ public class VREAccessesHarvester extends BasicHarvester {
|
|||
String pagePath = row.getPagePath();
|
||||
if (!pagePath.contains("_redirect=/group") && !pagePath.contains("workspace")) {
|
||||
if(pagePath.endsWith(lowerCasedContext)) {
|
||||
logger.trace("Matched endsWith({}) : {}", lowerCasedContext, pagePath);
|
||||
logger.debug("Matched endsWith({}) : {}", lowerCasedContext, pagePath);
|
||||
measure += row.getVisitNumber();
|
||||
} else if(pagePath.contains(case1) || pagePath.contains(case2)) {
|
||||
logger.trace("Matched contains({}) || contains({}) : {}", case1, case2, pagePath);
|
||||
logger.debug("Matched contains({}) || contains({}) : {}", case1, case2, pagePath);
|
||||
measure += row.getVisitNumber();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
ScopeDescriptor scopeDescriptor = AccountingDataHarvesterPlugin.getScopeDescriptor();
|
||||
ScopeDescriptor scopeDescriptor = AccountingDashboardHarvesterPlugin.getScopeDescriptor(context);
|
||||
|
||||
AccountingRecord ar = new AccountingRecord(scopeDescriptor, instant, getDimension(HarvestedDataKey.ACCESSES), (long) measure);
|
||||
logger.debug("{} : {}", ar.getDimension().getId(), ar.getMeasure());
|
||||
|
@ -130,108 +113,96 @@ public class VREAccessesHarvester extends BasicHarvester {
|
|||
* VREAccessesReportRow [pagePath=/group/agroclimaticmodelling/agroclimaticmodelling, visitNumber=39]
|
||||
*/
|
||||
private static List<VREAccessesReportRow> getAllAccesses(Date start, Date end) throws Exception {
|
||||
DateRange dateRange = getDateRangeForAnalytics(start, end);
|
||||
logger.trace("Getting accesses in this time range {}", dateRange.toPrettyString());
|
||||
Builder dateRangeBuilder = getDateRangeBuilderForAnalytics(start, end);
|
||||
logger.debug("Getting accesses in this time range {}", dateRangeBuilder.toString());
|
||||
|
||||
AnalyticsReportCredentials credentialsFromD4S = getAuthorisedApplicationInfoFromIs();
|
||||
AnalyticsReporting service = initializeAnalyticsReporting(credentialsFromD4S);
|
||||
HashMap<String,List<GetReportsResponse>> responses = getReportResponses(service, credentialsFromD4S.getViewIds(), dateRange);
|
||||
|
||||
logger.debug("Getting credentials credentialsFromD4S");
|
||||
|
||||
BetaAnalyticsDataSettings serviceSettings = initializeAnalyticsReporting(credentialsFromD4S);
|
||||
|
||||
logger.debug("initializeAnalyticsReporting service settings");
|
||||
|
||||
|
||||
HashMap<String,List<RunReportResponse>> responses = getReportResponses(serviceSettings, credentialsFromD4S.getViewIds(), dateRangeBuilder);
|
||||
List<VREAccessesReportRow> totalAccesses = new ArrayList<>();
|
||||
|
||||
for(String view : responses.keySet()) {
|
||||
List<VREAccessesReportRow> viewReport = parseResponse(view, responses.get(view));
|
||||
logger.trace("Got {} entries from view id={}", viewReport.size(), view);
|
||||
logger.debug("Got {} entries from view id={}", viewReport.size(), view);
|
||||
totalAccesses.addAll(viewReport);
|
||||
}
|
||||
logger.trace("Merged in {} total entries from all views", totalAccesses.size());
|
||||
logger.debug("Merged in {} total entries from all views", totalAccesses.size());
|
||||
return totalAccesses;
|
||||
}
|
||||
|
||||
/**
|
||||
* Initializes an Analytics Reporting API V4 service object.
|
||||
* Initializes an Google Analytics Data API service object.
|
||||
*
|
||||
* @return An authorized Analytics Reporting API V4 service object.
|
||||
* @return An authorized Google Analytics Data API
|
||||
* @throws IOException
|
||||
* @throws GeneralSecurityException
|
||||
*/
|
||||
private static AnalyticsReporting initializeAnalyticsReporting(AnalyticsReportCredentials cred)
|
||||
throws GeneralSecurityException, IOException {
|
||||
HttpTransport httpTransport = GoogleNetHttpTransport.newTrustedTransport();
|
||||
GoogleCredential credential = fromD4SServiceEndpoint(cred).createScoped(AnalyticsReportingScopes.all());
|
||||
|
||||
// Construct the Analytics Reporting service object.
|
||||
return new AnalyticsReporting.Builder(httpTransport, JSON_FACTORY, credential)
|
||||
.setApplicationName(APPLICATION_NAME).build();
|
||||
private static BetaAnalyticsDataSettings initializeAnalyticsReporting(AnalyticsReportCredentials cred) throws IOException {
|
||||
return BetaAnalyticsDataSettings.newBuilder()
|
||||
.setCredentialsProvider(FixedCredentialsProvider.create(
|
||||
ServiceAccountCredentials.fromPkcs8(cred.getClientId(), cred.getClientEmail(), cred.getPrivateKeyPem(), cred.getPrivateKeyId(), null)))
|
||||
.build();
|
||||
}
|
||||
|
||||
/**
|
||||
* Queries the Analytics Reporting API V4.
|
||||
* Queries Analytics Data API service
|
||||
*
|
||||
* @param service An authorized Analytics Reporting API V4 service object.
|
||||
* @return GetReportResponse The Analytics Reporting API V4 response.
|
||||
* @param service Analytics Data API service service settings.
|
||||
* @return Row Analytics Data API service
|
||||
* @throws IOException
|
||||
*/
|
||||
private static HashMap<String,List<GetReportsResponse>> getReportResponses(AnalyticsReporting service,
|
||||
List<String> viewIDs, DateRange dateRange) throws IOException {
|
||||
private static HashMap<String,List<RunReportResponse>> getReportResponses(BetaAnalyticsDataSettings betaAnalyticsDataSettings,
|
||||
List<String> viewIDs, Builder dateRangeBuilder) throws IOException {
|
||||
|
||||
HashMap<String,List<GetReportsResponse>> reports = new HashMap<>();
|
||||
HashMap<String,List<RunReportResponse>> reports = new HashMap<>();
|
||||
|
||||
// Create the Metrics object.
|
||||
Metric sessions = new Metric().setExpression("ga:pageviews").setAlias("pages");
|
||||
com.google.api.services.analyticsreporting.v4.model.Dimension pageTitle = new com.google.api.services.analyticsreporting.v4.model.Dimension().setName("ga:pagePath");
|
||||
try (BetaAnalyticsDataClient analyticsData = BetaAnalyticsDataClient.create(betaAnalyticsDataSettings)) {
|
||||
|
||||
for(String view : viewIDs) {
|
||||
List<GetReportsResponse> gReportResponses = new ArrayList<>();
|
||||
logger.info("Getting data from Google Analytics for viewid: " + view);
|
||||
boolean iterateMorePages = true;
|
||||
String nextPageToken = null;
|
||||
while (iterateMorePages) {
|
||||
// Create the ReportRequest object.
|
||||
ReportRequest request = new ReportRequest().setViewId(view.trim()).setDateRanges(Arrays.asList(dateRange))
|
||||
.setMetrics(Arrays.asList(sessions)).setDimensions(Arrays.asList(pageTitle));
|
||||
request.setPageSize(1000);
|
||||
request.setPageToken(nextPageToken);
|
||||
ArrayList<ReportRequest> requests = new ArrayList<ReportRequest>();
|
||||
requests.add(request);
|
||||
// Create the GetReportsRequest object.
|
||||
GetReportsRequest getReport = new GetReportsRequest().setReportRequests(requests);
|
||||
// Call the batchGet method.
|
||||
GetReportsResponse response = service.reports().batchGet(getReport).execute();
|
||||
nextPageToken = response.getReports().get(0).getNextPageToken();
|
||||
iterateMorePages = (nextPageToken != null);
|
||||
logger.debug("got nextPageToken: "+nextPageToken);
|
||||
for(String propertyId : viewIDs) {
|
||||
List<RunReportResponse> gReportResponses = new ArrayList<>();
|
||||
logger.debug("Getting data from Analytics Data API for propertyId: " + propertyId);
|
||||
RunReportRequest request =
|
||||
RunReportRequest.newBuilder()
|
||||
.setProperty("properties/" + propertyId)
|
||||
.addDimensions(Dimension.newBuilder().setName("pagePath"))
|
||||
.addMetrics(Metric.newBuilder().setName("screenPageViews"))
|
||||
.addDateRanges(dateRangeBuilder)
|
||||
.build();
|
||||
|
||||
// Make the request.
|
||||
RunReportResponse response = analyticsData.runReport(request);
|
||||
gReportResponses.add(response);
|
||||
// Iterate through every row of the API response.
|
||||
// for (Row row : response.getRowsList()) {
|
||||
// System.out.printf(
|
||||
// "%s, %s%n", row.getDimensionValues(0).getValue(), row.getMetricValues(0).getValue());
|
||||
// }
|
||||
reports.put(propertyId, gReportResponses);
|
||||
}
|
||||
reports.put(view, gReportResponses);
|
||||
}
|
||||
// Return the response.
|
||||
return reports;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses and prints the Analytics Reporting API V4 response.
|
||||
* Parses and prints the Analytics Data API service respose
|
||||
*
|
||||
* @param response An Analytics Reporting API V4 response.
|
||||
* @param response An Analytics Data API service response.
|
||||
*/
|
||||
/**
|
||||
* Parses and prints the Analytics Reporting API V4 response.
|
||||
*
|
||||
* @param response An Analytics Reporting API V4 response.
|
||||
*/
|
||||
private static List<VREAccessesReportRow> parseResponse(String viewId, List<GetReportsResponse> responses) {
|
||||
logger.debug("parsing Response for " + viewId);
|
||||
private static List<VREAccessesReportRow> parseResponse(String viewId, List<RunReportResponse> responses) {
|
||||
logger.debug("parsing Response for propertyID=" + viewId);
|
||||
|
||||
List<VREAccessesReportRow> toReturn = new ArrayList<>();
|
||||
for (GetReportsResponse response : responses) {
|
||||
for (Report report: response.getReports()) {
|
||||
List<ReportRow> rows = report.getData().getRows();
|
||||
if (rows == null) {
|
||||
logger.warn("No data found for " + viewId);
|
||||
}
|
||||
else {
|
||||
for (ReportRow row: rows) {
|
||||
String dimension = row.getDimensions().get(0);
|
||||
DateRangeValues metric = row.getMetrics().get(0);
|
||||
for (RunReportResponse response : responses) {
|
||||
for (Row row: response.getRowsList()) {
|
||||
String dimension = row.getDimensionValues(0).getValue();
|
||||
String metric = row.getMetricValues(0).getValue();
|
||||
VREAccessesReportRow var = new VREAccessesReportRow();
|
||||
boolean validEntry = false;
|
||||
String pagePath = dimension;
|
||||
|
@ -240,72 +211,15 @@ public class VREAccessesHarvester extends BasicHarvester {
|
|||
validEntry = true;
|
||||
}
|
||||
if (validEntry) {
|
||||
var.setVisitNumber(Integer.parseInt(metric.getValues().get(0)));
|
||||
var.setVisitNumber(Integer.parseInt(metric));
|
||||
toReturn.add(var);
|
||||
}
|
||||
}
|
||||
}
|
||||
//System.out.printf("%s, %s%n", row.getDimensionValues(0).getValue(), row.getMetricValues(0).getValue());
|
||||
}
|
||||
}
|
||||
return toReturn;
|
||||
}
|
||||
|
||||
private static GoogleCredential fromD4SServiceEndpoint(AnalyticsReportCredentials cred) throws IOException {
|
||||
|
||||
String clientId = cred.getClientId();
|
||||
String clientEmail = cred.getClientEmail();
|
||||
String privateKeyPem = cred.getPrivateKeyPem();
|
||||
String privateKeyId = cred.getPrivateKeyId();
|
||||
String tokenUri = cred.getTokenUri();
|
||||
String projectId = cred.getProjectId();
|
||||
|
||||
if(clientId == null || clientEmail == null || privateKeyPem == null || privateKeyId == null) {
|
||||
throw new IOException("Error reading service account credential from stream, "
|
||||
+ "expecting 'client_id', 'client_email', 'private_key' and 'private_key_id'.");
|
||||
}
|
||||
|
||||
PrivateKey privateKey = privateKeyFromPkcs8(privateKeyPem);
|
||||
|
||||
Collection<String> emptyScopes = Collections.emptyList();
|
||||
|
||||
Builder credentialBuilder = new GoogleCredential.Builder().setTransport(Utils.getDefaultTransport())
|
||||
.setJsonFactory(Utils.getDefaultJsonFactory()).setServiceAccountId(clientEmail)
|
||||
.setServiceAccountScopes(emptyScopes).setServiceAccountPrivateKey(privateKey)
|
||||
.setServiceAccountPrivateKeyId(privateKeyId);
|
||||
|
||||
if(tokenUri != null) {
|
||||
credentialBuilder.setTokenServerEncodedUrl(tokenUri);
|
||||
}
|
||||
|
||||
if(projectId != null) {
|
||||
credentialBuilder.setServiceAccountProjectId(projectId);
|
||||
}
|
||||
|
||||
// Don't do a refresh at this point, as it will always fail before the scopes are added.
|
||||
return credentialBuilder.build();
|
||||
}
|
||||
|
||||
private static PrivateKey privateKeyFromPkcs8(String privateKeyPem) throws IOException {
|
||||
Reader reader = new StringReader(privateKeyPem);
|
||||
Section section = PemReader.readFirstSectionAndClose(reader, "PRIVATE KEY");
|
||||
if(section == null) {
|
||||
throw new IOException("Invalid PKCS8 data.");
|
||||
}
|
||||
byte[] bytes = section.getBase64DecodedBytes();
|
||||
PKCS8EncodedKeySpec keySpec = new PKCS8EncodedKeySpec(bytes);
|
||||
Exception unexpectedException = null;
|
||||
try {
|
||||
KeyFactory keyFactory = SecurityUtils.getRsaKeyFactory();
|
||||
PrivateKey privateKey = keyFactory.generatePrivate(keySpec);
|
||||
return privateKey;
|
||||
} catch(NoSuchAlgorithmException exception) {
|
||||
unexpectedException = exception;
|
||||
} catch(InvalidKeySpecException exception) {
|
||||
unexpectedException = exception;
|
||||
}
|
||||
throw new IOException("Unexpected exception reading PKCS data", unexpectedException);
|
||||
}
|
||||
|
||||
private static List<ServiceEndpoint> getAnalyticsReportingConfigurationFromIS(String infrastructureScope)
|
||||
throws Exception {
|
||||
String scope = infrastructureScope;
|
||||
|
@ -321,7 +235,6 @@ public class VREAccessesHarvester extends BasicHarvester {
|
|||
}
|
||||
|
||||
/**
|
||||
* l
|
||||
* @throws Exception
|
||||
*/
|
||||
private static AnalyticsReportCredentials getAuthorisedApplicationInfoFromIs() throws Exception {
|
||||
|
@ -339,24 +252,24 @@ public class VREAccessesHarvester extends BasicHarvester {
|
|||
} else {
|
||||
|
||||
for(ServiceEndpoint res : list) {
|
||||
reportCredentials.setTokenUri(res.profile().runtime().hostedOn());
|
||||
Group<AccessPoint> apGroup = res.profile().accessPoints();
|
||||
AccessPoint[] accessPoints = (AccessPoint[]) apGroup.toArray(new AccessPoint[apGroup.size()]);
|
||||
AccessPoint found = accessPoints[0];
|
||||
reportCredentials.setClientEmail(found.address());
|
||||
reportCredentials.setProjectId(found.username());
|
||||
reportCredentials.setPrivateKeyPem(StringEncrypter.getEncrypter().decrypt(found.password()));
|
||||
reportCredentials.setClientEmail(found.username());
|
||||
String decryptedPrivateKey = StringEncrypter.getEncrypter().decrypt(found.password());
|
||||
reportCredentials.setPrivateKeyPem(decryptedPrivateKey.trim());
|
||||
|
||||
for(Property prop : found.properties()) {
|
||||
if(prop.name().compareTo(AP_VIEWS_PROPERTY) == 0) {
|
||||
String decryptedValue = StringEncrypter.getEncrypter().decrypt(prop.value());
|
||||
String[] views = decryptedValue.split(";");
|
||||
reportCredentials.setViewIds(Arrays.asList(views));
|
||||
}
|
||||
if(prop.name().compareTo(AP_CLIENT_PROPERTY) == 0) {
|
||||
if(prop.name().compareTo(AP_CLIENT_ID) == 0) {
|
||||
String decryptedValue = StringEncrypter.getEncrypter().decrypt(prop.value());
|
||||
reportCredentials.setClientId(decryptedValue);
|
||||
}
|
||||
if(prop.name().compareTo(AP_PRIVATEKEY_PROPERTY) == 0) {
|
||||
if(prop.name().compareTo(AP_PRIVATEKEY_ID_PROPERTY) == 0) {
|
||||
String decryptedValue = StringEncrypter.getEncrypter().decrypt(prop.value());
|
||||
reportCredentials.setPrivateKeyId(decryptedValue);
|
||||
}
|
||||
|
@ -374,14 +287,13 @@ public class VREAccessesHarvester extends BasicHarvester {
|
|||
return Instant.ofEpochMilli(date.getTime()).atZone(ZoneId.systemDefault()).toLocalDate();
|
||||
}
|
||||
|
||||
private static DateRange getDateRangeForAnalytics(Date start, Date end) {
|
||||
private static Builder getDateRangeBuilderForAnalytics(Date start, Date end) {
|
||||
DateTimeFormatter formatter = DateTimeFormatter.ofPattern("yyyy-MM-dd"); //required by Analytics
|
||||
String startDate = asLocalDate(start).format(formatter);
|
||||
String endDate = asLocalDate(end).format(formatter);
|
||||
DateRange dateRange = new DateRange();// date format `yyyy-MM-dd`
|
||||
dateRange.setStartDate(startDate);
|
||||
dateRange.setEndDate(endDate);
|
||||
return dateRange;
|
||||
Builder dateRangeBuilder = DateRange.newBuilder().setStartDate(startDate).setEndDate(endDate);
|
||||
|
||||
return dateRangeBuilder;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -1,15 +1,14 @@
|
|||
package org.gcube.dataharvest.harvester;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
|
||||
import org.gcube.accounting.accounting.summary.access.model.ScopeDescriptor;
|
||||
import org.gcube.accounting.accounting.summary.access.model.update.AccountingRecord;
|
||||
import org.gcube.dataharvest.AccountingDataHarvesterPlugin;
|
||||
import org.gcube.dataharvest.AccountingDashboardHarvesterPlugin;
|
||||
import org.gcube.dataharvest.datamodel.HarvestedDataKey;
|
||||
import org.json.JSONObject;
|
||||
import org.gcube.social_networking.social_networking_client_library.UserClient;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
|
@ -17,7 +16,7 @@ import org.slf4j.LoggerFactory;
|
|||
* @author Eric Perrone (ISTI - CNR)
|
||||
* @author Luca Frosini (ISTI - CNR)
|
||||
*/
|
||||
public class VREUsersHarvester extends SocialNetworkingHarvester {
|
||||
public class VREUsersHarvester extends BasicHarvester {
|
||||
|
||||
private static Logger logger = LoggerFactory.getLogger(VREUsersHarvester.class);
|
||||
|
||||
|
@ -31,12 +30,13 @@ public class VREUsersHarvester extends SocialNetworkingHarvester {
|
|||
@Override
|
||||
public List<AccountingRecord> getAccountingRecords() throws Exception {
|
||||
try {
|
||||
// String context = Utils.getCurrentContext();
|
||||
String context = org.gcube.dataharvest.utils.Utils.getCurrentContext();
|
||||
|
||||
int measure = get();
|
||||
|
||||
ArrayList<AccountingRecord> accountingRecords = new ArrayList<AccountingRecord>();
|
||||
|
||||
ScopeDescriptor scopeDescriptor = AccountingDataHarvesterPlugin.getScopeDescriptor();
|
||||
ScopeDescriptor scopeDescriptor = AccountingDashboardHarvesterPlugin.getScopeDescriptor(context);
|
||||
|
||||
AccountingRecord ar = new AccountingRecord(scopeDescriptor, instant, getDimension(HarvestedDataKey.USERS), (long) measure);
|
||||
logger.debug("{} : {}", ar.getDimension().getId(), ar.getMeasure());
|
||||
|
@ -50,17 +50,8 @@ public class VREUsersHarvester extends SocialNetworkingHarvester {
|
|||
}
|
||||
|
||||
private int get() throws Exception {
|
||||
JSONObject jsonObject = getJSONObject(PATH);
|
||||
|
||||
int userNumber = 0;
|
||||
|
||||
Boolean success = (Boolean) jsonObject.get("success");
|
||||
if(success == false) {
|
||||
throw new IOException("Erro while getting VRE Users");
|
||||
}
|
||||
|
||||
userNumber = jsonObject.getJSONArray("result").length();
|
||||
return userNumber;
|
||||
UserClient userClient = new UserClient();
|
||||
return userClient.getAllUsernamesContext().size();
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -1,197 +0,0 @@
|
|||
package org.gcube.dataharvest.harvester.sobigdata;
|
||||
|
||||
import java.text.ParseException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Calendar;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
import java.util.SortedSet;
|
||||
|
||||
import org.gcube.accounting.accounting.summary.access.model.ScopeDescriptor;
|
||||
import org.gcube.accounting.accounting.summary.access.model.update.AccountingRecord;
|
||||
import org.gcube.common.storagehub.client.dsl.ContainerType;
|
||||
import org.gcube.common.storagehub.client.dsl.FolderContainer;
|
||||
import org.gcube.common.storagehub.client.dsl.ItemContainer;
|
||||
import org.gcube.common.storagehub.client.dsl.ListResolverTyped;
|
||||
import org.gcube.common.storagehub.client.dsl.StorageHubClient;
|
||||
import org.gcube.common.storagehub.model.items.FolderItem;
|
||||
import org.gcube.common.storagehub.model.items.Item;
|
||||
import org.gcube.common.storagehub.model.items.nodes.Accounting;
|
||||
import org.gcube.common.storagehub.model.items.nodes.accounting.AccountEntry;
|
||||
import org.gcube.dataharvest.AccountingDataHarvesterPlugin;
|
||||
import org.gcube.dataharvest.datamodel.HarvestedDataKey;
|
||||
import org.gcube.dataharvest.utils.DateUtils;
|
||||
import org.gcube.dataharvest.utils.Utils;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
* The Class DataMethodDownloadHarvester.
|
||||
*
|
||||
* @author Eric Perrone (ISTI - CNR)
|
||||
* @author Luca Frosini (ISTI - CNR)
|
||||
* @author Francesco Mangiacrapa (ISTI - CNR)
|
||||
*/
|
||||
public class DataMethodDownloadHarvester extends SoBigDataHarvester {
|
||||
|
||||
private static Logger logger = LoggerFactory.getLogger(DataMethodDownloadHarvester.class);
|
||||
|
||||
private int count = 0;
|
||||
|
||||
/**
|
||||
* Instantiates a new data method download harvester.
|
||||
*
|
||||
* @param start the start
|
||||
* @param end the end
|
||||
* @param catalogueContext the catalogue context
|
||||
* @param contexts the contexts
|
||||
* @throws ParseException the parse exception
|
||||
*/
|
||||
public DataMethodDownloadHarvester(Date start, Date end, SortedSet<String> contexts) throws Exception {
|
||||
super(start, end, contexts);
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.gcube.dataharvest.harvester.BasicHarvester#getData()
|
||||
*/
|
||||
@Override
|
||||
public List<AccountingRecord> getAccountingRecords() throws Exception {
|
||||
String defaultContext = Utils.getCurrentContext();
|
||||
logger.debug("The context is {}", defaultContext);
|
||||
|
||||
try {
|
||||
/*
|
||||
String vreName = getVRENameToHL(defaultContext);
|
||||
logger.debug("Getting VRE Name to HL from context/scope returns {} ", vreName);
|
||||
|
||||
String user = vreName + "-Manager";
|
||||
logger.debug("Using user '{}' to getHome from HL", user);
|
||||
|
||||
//Getting HL instance and home for VRE MANAGER
|
||||
HomeManager manager = HomeLibrary.getHomeManagerFactory().getHomeManager();
|
||||
@SuppressWarnings("deprecation")
|
||||
Home home = manager.getHome(user);
|
||||
JCRWorkspace ws = (JCRWorkspace) home.getWorkspace();
|
||||
|
||||
String path = "/Workspace/MySpecialFolders/" + vreName;
|
||||
logger.debug("Getting item by Path {}", path);
|
||||
JCRWorkspaceItem item = (JCRWorkspaceItem) ws.getItemByPath(path);
|
||||
*/
|
||||
|
||||
StorageHubClient storageHubClient = new StorageHubClient();
|
||||
FolderContainer vreFolderContainer = storageHubClient.openVREFolder();
|
||||
|
||||
FolderItem vreFolderItem = vreFolderContainer.get();
|
||||
|
||||
logger.debug("Analyzing {} in the period [{} to {}] starting from root {}", defaultContext,
|
||||
DateUtils.format(start), DateUtils.format(end), vreFolderItem.getName());
|
||||
|
||||
|
||||
ScopeDescriptor defaultScopeDescriptor = AccountingDataHarvesterPlugin.getScopeDescriptor();
|
||||
|
||||
|
||||
AccountingRecord defaultHarvesteData = new AccountingRecord(defaultScopeDescriptor, instant, getDimension(HarvestedDataKey.DATA_METHOD_DOWNLOAD), (long) count);
|
||||
logger.debug("{} : {}", defaultHarvesteData.getDimension().getId(), defaultHarvesteData.getMeasure());
|
||||
|
||||
ArrayList<AccountingRecord> accountingRecords = new ArrayList<AccountingRecord>();
|
||||
|
||||
ListResolverTyped listResolverTyped = vreFolderContainer.list();
|
||||
List<ItemContainer<? extends Item>> containers = listResolverTyped.includeHidden().getContainers();
|
||||
for(ItemContainer<? extends Item> itemContainer : containers) {
|
||||
|
||||
|
||||
count = 0; //resettings the counter
|
||||
|
||||
//HarvestedData harvestedData;
|
||||
|
||||
//Getting statistics for folder
|
||||
if(itemContainer.getType() == ContainerType.FOLDER) {
|
||||
Item item = itemContainer.get();
|
||||
logger.debug("Getting statistics for folder {}", item.getName());
|
||||
getStats(itemContainer, start, end);
|
||||
|
||||
String normalizedName = item.getName().replaceAll("[^A-Za-z0-9]", "");
|
||||
String context = mapWsFolderNameToVRE.get(normalizedName);
|
||||
//Checking if it is a VRE name to right accounting...
|
||||
if(context != null && !context.isEmpty()) {
|
||||
logger.debug("Found context '{}' matching with normalized VRE name {} ", context, normalizedName);
|
||||
|
||||
ScopeDescriptor scopeDescriptor = AccountingDataHarvesterPlugin.getScopeDescriptor(context);
|
||||
AccountingRecord ar = new AccountingRecord(scopeDescriptor, instant, getDimension(HarvestedDataKey.DATA_METHOD_DOWNLOAD), (long) count);
|
||||
logger.debug("{} : {}", ar.getDimension().getId(), ar.getMeasure());
|
||||
accountingRecords.add(ar);
|
||||
|
||||
} else {
|
||||
logger.debug(
|
||||
"No scope found matching the folder name {}, accounting its stats in the default context {}",
|
||||
normalizedName, defaultContext);
|
||||
//INCREASING THE DEFAULT CONTEXT COUNTER...
|
||||
defaultHarvesteData.setMeasure(defaultHarvesteData.getMeasure() + count);
|
||||
logger.trace("Increased default context stats {}", defaultHarvesteData);
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
//ADDING DEFAULT ACCOUNTING
|
||||
accountingRecords.add(defaultHarvesteData);
|
||||
|
||||
|
||||
logger.debug("In the period [from {} to {} ] returning workspace accouting data {}", DateUtils.format(start),
|
||||
DateUtils.format(end), accountingRecords);
|
||||
|
||||
return accountingRecords;
|
||||
|
||||
} catch(Exception e) {
|
||||
throw e;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the stats.
|
||||
*
|
||||
* @param baseItem the base item
|
||||
* @param start the start
|
||||
* @param end the end
|
||||
* @return the stats
|
||||
* @throws InternalErrorException the internal error exception
|
||||
*/
|
||||
private void getStats(ItemContainer<? extends Item> itemContainer, Date start, Date end) throws Exception {
|
||||
if(itemContainer.getType() == ContainerType.FOLDER) {
|
||||
|
||||
ListResolverTyped listResolverTyped = ((FolderContainer)itemContainer).list();
|
||||
List<ItemContainer<? extends Item>> containers = listResolverTyped.includeHidden().getContainers();
|
||||
for(ItemContainer<? extends Item> itemCont : containers) {
|
||||
getStats(itemCont , start, end);
|
||||
}
|
||||
|
||||
} else {
|
||||
try {
|
||||
Accounting accounting = itemContainer.get().getAccounting();
|
||||
for(AccountEntry entry : accounting.getEntries()) {
|
||||
|
||||
switch(entry.getType()) {
|
||||
case CREATE:
|
||||
case UPDATE:
|
||||
case READ:
|
||||
Calendar calendar = entry.getDate();
|
||||
if(calendar.after(DateUtils.dateToCalendar(start))
|
||||
&& calendar.before(DateUtils.dateToCalendar(end))) {
|
||||
count++;
|
||||
}
|
||||
|
||||
break;
|
||||
|
||||
default:
|
||||
break;
|
||||
}
|
||||
|
||||
}
|
||||
} catch(Exception e) {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -1,5 +1,11 @@
|
|||
package org.gcube.dataharvest.harvester.sobigdata;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStreamReader;
|
||||
import java.net.HttpURLConnection;
|
||||
import java.net.MalformedURLException;
|
||||
import java.net.URL;
|
||||
import java.net.URLEncoder;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Date;
|
||||
|
@ -12,7 +18,7 @@ import java.util.SortedSet;
|
|||
import org.gcube.accounting.accounting.summary.access.model.ScopeDescriptor;
|
||||
import org.gcube.accounting.accounting.summary.access.model.internal.Dimension;
|
||||
import org.gcube.accounting.accounting.summary.access.model.update.AccountingRecord;
|
||||
import org.gcube.dataharvest.AccountingDataHarvesterPlugin;
|
||||
import org.gcube.dataharvest.AccountingDashboardHarvesterPlugin;
|
||||
import org.gcube.dataharvest.datamodel.HarvestedDataKey;
|
||||
import org.gcube.dataharvest.utils.DateUtils;
|
||||
import org.gcube.dataharvest.utils.Utils;
|
||||
|
@ -45,7 +51,6 @@ public class ResourceCatalogueHarvester extends SoBigDataHarvester {
|
|||
*
|
||||
* @param start the start
|
||||
* @param end the end
|
||||
* @param catalogueContext the catalogue context
|
||||
* @param contexts the contexts. They are the VREs
|
||||
* @throws Exception the exception
|
||||
*/
|
||||
|
@ -153,7 +158,7 @@ public class ResourceCatalogueHarvester extends SoBigDataHarvester {
|
|||
|
||||
logger.trace("The context {} has count ", catalogueContext, catalogueContextCount);
|
||||
|
||||
ScopeDescriptor catalogueScopeDescriptor = AccountingDataHarvesterPlugin.getScopeDescriptor(catalogueContext);
|
||||
ScopeDescriptor catalogueScopeDescriptor = AccountingDashboardHarvesterPlugin.getScopeDescriptor(catalogueContext);
|
||||
Dimension dimension = getDimension(harvestKey);
|
||||
|
||||
AccountingRecord ar = new AccountingRecord(catalogueScopeDescriptor, instant, dimension, (long) catalogueContextCount);
|
||||
|
@ -161,7 +166,7 @@ public class ResourceCatalogueHarvester extends SoBigDataHarvester {
|
|||
|
||||
for(String key : counter.keySet()) {
|
||||
logger.trace("The group {} has count {}", key, counter.get(key));
|
||||
ScopeDescriptor sd = AccountingDataHarvesterPlugin.getScopeDescriptor(mapCatalogueGroupToVRE.get(key));
|
||||
ScopeDescriptor sd = AccountingDashboardHarvesterPlugin.getScopeDescriptor(mapCatalogueGroupToVRE.get(key));
|
||||
AccountingRecord accountingRecord = new AccountingRecord(sd, instant, dimension, (long) counter.get(key));
|
||||
accountingRecords.add(accountingRecord);
|
||||
}
|
||||
|
@ -211,10 +216,27 @@ public class ResourceCatalogueHarvester extends SoBigDataHarvester {
|
|||
query += "q=" + URLEncoder.encode(q, UTF_8_CHARASET) + "&wt=json&indent=true&rows=" + ROWS;
|
||||
query += flValue != null && !flValue.isEmpty() ? "&fl=" + URLEncoder.encode(flValue, UTF_8_CHARASET) : "";
|
||||
logger.debug("\nPerforming query {}", query);
|
||||
String jsonResult = Utils.getJson(query);
|
||||
String jsonResult = requestJson(query);
|
||||
logger.trace("Response is {}", jsonResult);
|
||||
|
||||
return jsonResult;
|
||||
}
|
||||
|
||||
|
||||
public String requestJson(String url) throws MalformedURLException, IOException {
|
||||
URL address = new URL(url);
|
||||
HttpURLConnection connection = (HttpURLConnection) address.openConnection();
|
||||
BufferedReader reader = new BufferedReader(new InputStreamReader(connection.getInputStream()));
|
||||
String json = "";
|
||||
String line = "";
|
||||
|
||||
while(line != null) {
|
||||
line = reader.readLine();
|
||||
if(line != null) {
|
||||
json += line.trim();
|
||||
}
|
||||
}
|
||||
return json;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -12,7 +12,7 @@ import java.util.SortedSet;
|
|||
import java.util.TreeSet;
|
||||
|
||||
import org.gcube.common.authorization.client.exceptions.ObjectNotFound;
|
||||
import org.gcube.dataharvest.AccountingDataHarvesterPlugin;
|
||||
import org.gcube.dataharvest.AccountingDashboardHarvesterPlugin;
|
||||
import org.gcube.dataharvest.datamodel.HarvestedDataKey;
|
||||
import org.gcube.dataharvest.harvester.BasicHarvester;
|
||||
import org.gcube.dataharvest.utils.Utils;
|
||||
|
@ -52,8 +52,7 @@ public abstract class SoBigDataHarvester extends BasicHarvester {
|
|||
*
|
||||
* @param start the start
|
||||
* @param end the end
|
||||
* @param catalogueContext the catalogue context
|
||||
* @param vreScopes the contexts
|
||||
* @param contexts the contexts. They are the VREs
|
||||
* @throws ParseException the parse exception
|
||||
*/
|
||||
public SoBigDataHarvester(Date start, Date end, SortedSet<String> contexts) throws Exception {
|
||||
|
@ -78,7 +77,7 @@ public abstract class SoBigDataHarvester extends BasicHarvester {
|
|||
* @throws ObjectNotFound
|
||||
*/
|
||||
protected void initMappingMaps() throws ObjectNotFound, Exception {
|
||||
Properties properties = AccountingDataHarvesterPlugin.getProperties().get();
|
||||
Properties properties = AccountingDashboardHarvesterPlugin.getConfigParameters();
|
||||
Set<String> keys = properties.stringPropertyNames();
|
||||
|
||||
mapSystemTypeToDBEntry = new HashMap<String,String>();
|
||||
|
|
|
@ -18,7 +18,7 @@ import org.gcube.accounting.analytics.persistence.AccountingPersistenceQueryFact
|
|||
import org.gcube.accounting.datamodel.AggregatedUsageRecord;
|
||||
import org.gcube.accounting.datamodel.aggregation.AggregatedServiceUsageRecord;
|
||||
import org.gcube.accounting.datamodel.usagerecords.ServiceUsageRecord;
|
||||
import org.gcube.dataharvest.AccountingDataHarvesterPlugin;
|
||||
import org.gcube.dataharvest.AccountingDashboardHarvesterPlugin;
|
||||
import org.gcube.dataharvest.datamodel.HarvestedDataKey;
|
||||
import org.gcube.dataharvest.harvester.BasicHarvester;
|
||||
import org.gcube.dataharvest.utils.DateUtils;
|
||||
|
@ -109,7 +109,7 @@ public class TagMeMethodInvocationHarvester extends BasicHarvester {
|
|||
|
||||
}
|
||||
|
||||
ScopeDescriptor scopeDescriptor = AccountingDataHarvesterPlugin.getScopeDescriptor();
|
||||
ScopeDescriptor scopeDescriptor = AccountingDashboardHarvesterPlugin.getScopeDescriptor(context);
|
||||
Dimension dimension = getDimension(HarvestedDataKey.METHOD_INVOCATIONS);
|
||||
|
||||
AccountingRecord ar = new AccountingRecord(scopeDescriptor, instant, dimension, numberOfInvocation);
|
||||
|
|
|
@ -1,8 +1,5 @@
|
|||
package org.gcube.dataharvest.utils;
|
||||
|
||||
import static org.gcube.common.authorization.client.Constants.authorizationService;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.Map;
|
||||
|
@ -10,10 +7,15 @@ import java.util.Properties;
|
|||
import java.util.SortedSet;
|
||||
import java.util.TreeSet;
|
||||
|
||||
import org.gcube.common.authorization.library.provider.SecurityTokenProvider;
|
||||
import org.gcube.common.authorization.library.provider.UserInfo;
|
||||
import javax.ws.rs.InternalServerErrorException;
|
||||
|
||||
import org.gcube.common.authorization.utils.manager.SecretManagerProvider;
|
||||
import org.gcube.common.authorization.utils.secret.JWTSecret;
|
||||
import org.gcube.common.authorization.utils.secret.Secret;
|
||||
import org.gcube.common.keycloak.KeycloakClientFactory;
|
||||
import org.gcube.common.keycloak.model.TokenResponse;
|
||||
import org.gcube.common.scope.impl.ScopeBean;
|
||||
import org.gcube.dataharvest.AccountingDataHarvesterPlugin;
|
||||
import org.gcube.dataharvest.AccountingDashboardHarvesterPlugin;
|
||||
import org.gcube.resourcemanagement.support.server.managers.context.ContextManager;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
@ -25,88 +27,98 @@ public class ContextAuthorization {
|
|||
|
||||
private static Logger logger = LoggerFactory.getLogger(ContextAuthorization.class);
|
||||
|
||||
public static final String USERNAME = "USERNAME";
|
||||
public static final String DEFAULT_USERNAME = "luca.frosini";
|
||||
public static final String CLIENT_ID = "accounting-dashboard-harvester-se-plugin";
|
||||
|
||||
public static final String SERVICE_NAME = "SERVICE_NAME";
|
||||
public static final String DEFAULT_SERVICE_NAME = "accounting-harvester";
|
||||
protected String clientSecret;
|
||||
|
||||
/**
|
||||
* Contains Context full name as key and Token as Value
|
||||
*/
|
||||
protected Map<String,String> contextToToken;
|
||||
protected Map<String,Secret> contextToToken;
|
||||
|
||||
/**
|
||||
* Contains Token as key and Context full name as Value
|
||||
*/
|
||||
protected Map<String,String> tokenToContext;
|
||||
protected Map<Secret,String> tokenToContext;
|
||||
|
||||
protected Properties properties;
|
||||
|
||||
/**
|
||||
* Contains Properties used to generate tokens
|
||||
*/
|
||||
public ContextAuthorization() throws Exception {
|
||||
public ContextAuthorization(Properties properties) throws Exception {
|
||||
this.properties = properties;
|
||||
this.contextToToken = new HashMap<>();
|
||||
this.tokenToContext = new HashMap<>();
|
||||
retrieveContextsAndTokens();
|
||||
}
|
||||
|
||||
public String generateTokenForContext(String context, Properties properties) throws Exception {
|
||||
if(properties==null) {
|
||||
properties = AccountingDataHarvesterPlugin.getProperties().get();
|
||||
/**
|
||||
* Contains Properties used to generate tokens
|
||||
*/
|
||||
public ContextAuthorization() throws Exception {
|
||||
this.properties = AccountingDashboardHarvesterPlugin.getConfigParameters();
|
||||
this.contextToToken = new HashMap<>();
|
||||
this.tokenToContext = new HashMap<>();
|
||||
retrieveContextsAndTokens();
|
||||
}
|
||||
logger.info("Going to generate Token for Context {}", context);
|
||||
UserInfo userInfo = new UserInfo(properties.getProperty(USERNAME, DEFAULT_USERNAME),
|
||||
new ArrayList<>());
|
||||
String userToken = authorizationService().generateUserToken(userInfo, context);
|
||||
SecurityTokenProvider.instance.set(userToken);
|
||||
String generatedToken = authorizationService()
|
||||
.generateExternalServiceToken(properties.getProperty(SERVICE_NAME, DEFAULT_SERVICE_NAME));
|
||||
|
||||
logger.trace("Token for Context {} is {}", context, generatedToken);
|
||||
private String getClientSecret(String context) {
|
||||
try {
|
||||
if(clientSecret==null) {
|
||||
int index = context.indexOf('/', 1);
|
||||
String root = context.substring(0, index == -1 ? context.length() : index);
|
||||
clientSecret = properties.getProperty(root);
|
||||
}
|
||||
return clientSecret;
|
||||
} catch(Exception e) {
|
||||
throw new InternalServerErrorException(
|
||||
"Unable to retrieve Application Token for context " + SecretManagerProvider.instance.get().getContext(), e);
|
||||
}
|
||||
}
|
||||
|
||||
return generatedToken;
|
||||
private TokenResponse getJWTAccessToken(String context) throws Exception {
|
||||
TokenResponse tr = KeycloakClientFactory.newInstance().queryUMAToken(context, CLIENT_ID, getClientSecret(context), context, null);
|
||||
return tr;
|
||||
}
|
||||
|
||||
public Secret getCatalogueSecretForContext(String context) throws Exception {
|
||||
TokenResponse tr = getJWTAccessToken(context);
|
||||
Secret secret = new JWTSecret(tr.getAccessToken());
|
||||
return secret;
|
||||
}
|
||||
|
||||
protected void retrieveContextsAndTokens() throws Exception {
|
||||
|
||||
String initialToken = SecurityTokenProvider.instance.get();
|
||||
|
||||
try {
|
||||
|
||||
Properties properties = AccountingDataHarvesterPlugin.getProperties().get();
|
||||
|
||||
LinkedHashMap<String,ScopeBean> map = ContextManager.readContexts();
|
||||
|
||||
for(String scope : map.keySet()) {
|
||||
try {
|
||||
String context = map.get(scope).toString();
|
||||
|
||||
String generatedToken = generateTokenForContext(context, properties);
|
||||
Secret secret = getCatalogueSecretForContext(context);
|
||||
|
||||
contextToToken.put(context, generatedToken);
|
||||
tokenToContext.put(generatedToken, context);
|
||||
contextToToken.put(context, secret);
|
||||
tokenToContext.put(secret, context);
|
||||
|
||||
} catch(Exception e) {
|
||||
logger.error("Error while elaborating {}", scope, e);
|
||||
throw e;
|
||||
} finally {
|
||||
SecurityTokenProvider.instance.reset();
|
||||
// throw e;
|
||||
}
|
||||
|
||||
}
|
||||
} catch(Exception ex) {
|
||||
throw ex;
|
||||
} finally {
|
||||
SecurityTokenProvider.instance.set(initialToken);
|
||||
}
|
||||
}
|
||||
|
||||
public String getTokenForContext(String contextFullName) {
|
||||
return contextToToken.get(contextFullName);
|
||||
public Secret getSecretForContext(String context) {
|
||||
return contextToToken.get(context);
|
||||
}
|
||||
|
||||
public String getContextFromToken(String token) {
|
||||
return tokenToContext.get(token);
|
||||
public String getContextFromSecret(Secret secret) {
|
||||
return tokenToContext.get(secret);
|
||||
}
|
||||
|
||||
public SortedSet<String> getContexts() {
|
||||
|
|
|
@ -91,19 +91,14 @@ public class DateUtils {
|
|||
aggregationStartCalendar.set(Calendar.MINUTE, 0);
|
||||
aggregationStartCalendar.set(Calendar.SECOND, 0);
|
||||
aggregationStartCalendar.set(Calendar.MILLISECOND, 0);
|
||||
|
||||
logger.debug("{}", DEFAULT_DATE_FORMAT.format(aggregationStartCalendar.getTime()));
|
||||
|
||||
// logger.trace("{}", DEFAULT_DATE_FORMAT.format(aggregationStartCalendar.getTime()));
|
||||
return aggregationStartCalendar;
|
||||
}
|
||||
|
||||
public static Date getEndDateFromStartDate(AggregationType aggregationType, Date startDate, int offset, boolean partialHarvesting) {
|
||||
public static Date getEndDateFromStartDate(AggregationType aggregationType, Date startDate, int offset) {
|
||||
Calendar aggregationEndDate = getUTCCalendarInstance();
|
||||
if(!partialHarvesting) {
|
||||
aggregationEndDate.setTimeInMillis(startDate.getTime());
|
||||
aggregationEndDate.add(aggregationType.getCalendarField(), offset);
|
||||
aggregationEndDate.add(Calendar.MILLISECOND, -1);
|
||||
}
|
||||
return aggregationEndDate.getTime();
|
||||
}
|
||||
|
||||
|
@ -113,8 +108,6 @@ public class DateUtils {
|
|||
return calendar;
|
||||
}
|
||||
|
||||
/* OLD functions of Eric Perrone (ISTI - CNR) */
|
||||
|
||||
public static String format(Date date) {
|
||||
return DateUtils.LAUNCH_DATE_FORMAT.format(date);
|
||||
}
|
||||
|
|
|
@ -1,17 +1,9 @@
|
|||
package org.gcube.dataharvest.utils;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStreamReader;
|
||||
import java.net.HttpURLConnection;
|
||||
import java.net.MalformedURLException;
|
||||
import java.net.URL;
|
||||
|
||||
import org.gcube.common.authorization.client.Constants;
|
||||
import org.gcube.common.authorization.client.exceptions.ObjectNotFound;
|
||||
import org.gcube.common.authorization.library.AuthorizationEntry;
|
||||
import org.gcube.common.authorization.library.provider.SecurityTokenProvider;
|
||||
import org.gcube.common.scope.api.ScopeProvider;
|
||||
import org.gcube.common.authorization.utils.manager.SecretManager;
|
||||
import org.gcube.common.authorization.utils.manager.SecretManagerProvider;
|
||||
import org.gcube.common.authorization.utils.secret.Secret;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
|
@ -23,36 +15,16 @@ public class Utils {
|
|||
|
||||
private static Logger logger = LoggerFactory.getLogger(Utils.class);
|
||||
|
||||
public static String getJson(String url) throws MalformedURLException, IOException {
|
||||
URL address = new URL(url);
|
||||
HttpURLConnection connection = (HttpURLConnection) address.openConnection();
|
||||
BufferedReader reader = new BufferedReader(new InputStreamReader(connection.getInputStream()));
|
||||
String json = "";
|
||||
String line = "";
|
||||
|
||||
while(line != null) {
|
||||
line = reader.readLine();
|
||||
if(line != null) {
|
||||
json += line.trim();
|
||||
}
|
||||
}
|
||||
return json;
|
||||
}
|
||||
|
||||
public static String getCurrentContext() throws ObjectNotFound, Exception {
|
||||
return getCurrentContext(SecurityTokenProvider.instance.get());
|
||||
return SecretManagerProvider.instance.get().getContext();
|
||||
}
|
||||
|
||||
public static String getCurrentContext(String token) throws ObjectNotFound, Exception {
|
||||
AuthorizationEntry authorizationEntry = Constants.authorizationService().get(token);
|
||||
String context = authorizationEntry.getContext();
|
||||
logger.info("Context of token {} is {}", token, context);
|
||||
return context;
|
||||
}
|
||||
|
||||
public static void setContext(String token) throws ObjectNotFound, Exception {
|
||||
SecurityTokenProvider.instance.set(token);
|
||||
ScopeProvider.instance.set(getCurrentContext(token));
|
||||
public static void setContext(Secret secret) throws Exception {
|
||||
SecretManagerProvider.instance.reset();
|
||||
SecretManager secretManager = new SecretManager();
|
||||
SecretManagerProvider.instance.set(secretManager);
|
||||
secretManager.addSecret(secret);
|
||||
secretManager.set();
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -0,0 +1 @@
|
|||
org.gcube.dataharvest.AccountingDashboardHarvesterPlugin
|
|
@ -1 +0,0 @@
|
|||
org.gcube.dataharvest.DataHarvestPluginDeclaration
|
|
@ -1,2 +1 @@
|
|||
USERNAME=luca.frosini
|
||||
SERVICE_NAME=accounting-harvester
|
||||
/d4science.research-infrastructures.eu=XXXXXXXXXX
|
|
@ -0,0 +1 @@
|
|||
/TestDateScorro.java
|
|
@ -1,849 +0,0 @@
|
|||
package org.gcube.dataharvest;
|
||||
|
||||
import java.time.Instant;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Calendar;
|
||||
import java.util.Date;
|
||||
import java.util.HashMap;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Properties;
|
||||
import java.util.Set;
|
||||
import java.util.SortedSet;
|
||||
import java.util.TreeSet;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import org.gcube.accounting.accounting.summary.access.AccountingDao;
|
||||
import org.gcube.accounting.accounting.summary.access.model.ScopeDescriptor;
|
||||
import org.gcube.accounting.accounting.summary.access.model.internal.Dimension;
|
||||
import org.gcube.accounting.accounting.summary.access.model.update.AccountingRecord;
|
||||
import org.gcube.common.authorization.client.exceptions.ObjectNotFound;
|
||||
import org.gcube.common.scope.impl.ScopeBean;
|
||||
import org.gcube.common.scope.impl.ScopeBean.Type;
|
||||
import org.gcube.dataharvest.datamodel.HarvestedDataKey;
|
||||
import org.gcube.dataharvest.harvester.MethodInvocationHarvester;
|
||||
import org.gcube.dataharvest.harvester.SocialInteractionsHarvester;
|
||||
import org.gcube.dataharvest.harvester.VREAccessesHarvester;
|
||||
import org.gcube.dataharvest.harvester.VREUsersHarvester;
|
||||
import org.gcube.dataharvest.harvester.sobigdata.DataMethodDownloadHarvester;
|
||||
import org.gcube.dataharvest.harvester.sobigdata.ResourceCatalogueHarvester;
|
||||
import org.gcube.dataharvest.harvester.sobigdata.TagMeMethodInvocationHarvester;
|
||||
import org.gcube.dataharvest.utils.AggregationType;
|
||||
import org.gcube.dataharvest.utils.ContextAuthorization;
|
||||
import org.gcube.dataharvest.utils.ContextTest;
|
||||
import org.gcube.dataharvest.utils.DateUtils;
|
||||
import org.gcube.resourcemanagement.support.server.managers.context.ContextManager;
|
||||
import org.gcube.vremanagement.executor.api.rest.SmartExecutor;
|
||||
import org.gcube.vremanagement.executor.api.types.LaunchParameter;
|
||||
import org.gcube.vremanagement.executor.api.types.Scheduling;
|
||||
import org.gcube.vremanagement.executor.client.SmartExecutorClientFactory;
|
||||
import org.junit.Assert;
|
||||
import org.quartz.CronExpression;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
public class AccountingDataHarvesterPluginTest extends ContextTest {
|
||||
|
||||
private static Logger logger = LoggerFactory.getLogger(AccountingDataHarvesterPluginTest.class);
|
||||
|
||||
public static final String ROOT = "/d4science.research-infrastructures.eu";
|
||||
public static final String SO_BIG_VO = "/d4science.research-infrastructures.eu/SoBigData";
|
||||
|
||||
public static final String TAGME_VRE = "/d4science.research-infrastructures.eu/SoBigData/TagMe";
|
||||
public static final String STOCK_ASSESMENT_VRE = "/d4science.research-infrastructures.eu/gCubeApps/StockAssessment";
|
||||
|
||||
public static SortedSet<String> getContexts() throws Exception {
|
||||
SortedSet<String> contexts = new TreeSet<>();
|
||||
LinkedHashMap<String,ScopeBean> map = ContextManager.readContexts();
|
||||
for(String scope : map.keySet()) {
|
||||
try {
|
||||
String context = map.get(scope).toString();
|
||||
contexts.add(context);
|
||||
} catch(Exception e) {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
return contexts;
|
||||
}
|
||||
|
||||
// @Test
|
||||
public void getDimensions() {
|
||||
try {
|
||||
|
||||
ContextTest.setContextByName(ROOT);
|
||||
|
||||
AccountingDao dao = AccountingDao.get();
|
||||
|
||||
Set<Dimension> dimensionSet = dao.getDimensions();
|
||||
for(Dimension d : dimensionSet) {
|
||||
logger.debug("{} - {} - {} - {}", d.getId(), d.getGroup(), d.getAggregatedMeasure(), d.getLabel());
|
||||
}
|
||||
|
||||
logger.info("End.");
|
||||
|
||||
} catch(Exception e) {
|
||||
logger.error("", e);
|
||||
}
|
||||
}
|
||||
|
||||
// @Test
|
||||
public void launch() {
|
||||
try {
|
||||
|
||||
ContextTest.setContextByName(ROOT);
|
||||
|
||||
DataHarvestPluginDeclaration dataHarvestPluginDeclaration = new DataHarvestPluginDeclaration();
|
||||
|
||||
AccountingDataHarvesterPlugin accountingDataHarvesterPlugin = new AccountingDataHarvesterPlugin(
|
||||
dataHarvestPluginDeclaration);
|
||||
|
||||
Map<String,Object> inputs = new HashMap<>();
|
||||
|
||||
AggregationType aggregationType = AggregationType.MONTHLY;
|
||||
|
||||
inputs.put(AccountingDataHarvesterPlugin.MEASURE_TYPE_INPUT_PARAMETER, aggregationType.name());
|
||||
inputs.put(AccountingDataHarvesterPlugin.GET_VRE_USERS_INPUT_PARAMETER, true);
|
||||
inputs.put(AccountingDataHarvesterPlugin.RERUN_INPUT_PARAMETER, true);
|
||||
inputs.put(AccountingDataHarvesterPlugin.DRY_RUN_INPUT_PARAMETER, false);
|
||||
inputs.put(AccountingDataHarvesterPlugin.PARTIAL_HARVESTING, true);
|
||||
|
||||
/*
|
||||
Calendar from = DateUtils.getStartCalendar(2016, Calendar.SEPTEMBER, 1);
|
||||
String fromDate = DateUtils.LAUNCH_DATE_FORMAT.format(from.getTime());
|
||||
logger.trace("{} is {}", AccountingDataHarvesterPlugin.START_DATE_INPUT_PARAMETER, fromDate);
|
||||
inputs.put(AccountingDataHarvesterPlugin.START_DATE_INPUT_PARAMETER, fromDate);
|
||||
*/
|
||||
|
||||
accountingDataHarvesterPlugin.launch(inputs);
|
||||
|
||||
logger.info("End.");
|
||||
|
||||
} catch(Exception e) {
|
||||
logger.error("", e);
|
||||
}
|
||||
}
|
||||
|
||||
// @Test
|
||||
public void launchPluginOnSmartExecutor() {
|
||||
try {
|
||||
|
||||
ContextTest.setContextByName(ROOT);
|
||||
|
||||
SmartExecutor smartExecutor = SmartExecutorClientFactory.create(DataHarvestPluginDeclaration.NAME);
|
||||
Assert.assertNotNull(smartExecutor);
|
||||
|
||||
Map<String,Object> inputs = new HashMap<>();
|
||||
|
||||
AggregationType aggregationType = AggregationType.MONTHLY;
|
||||
|
||||
inputs.put(AccountingDataHarvesterPlugin.MEASURE_TYPE_INPUT_PARAMETER, aggregationType.name());
|
||||
inputs.put(AccountingDataHarvesterPlugin.GET_VRE_USERS_INPUT_PARAMETER, true);
|
||||
inputs.put(AccountingDataHarvesterPlugin.RERUN_INPUT_PARAMETER, true);
|
||||
inputs.put(AccountingDataHarvesterPlugin.DRY_RUN_INPUT_PARAMETER, false);
|
||||
|
||||
/*
|
||||
Calendar from = DateUtils.getStartCalendar(2016, Calendar.SEPTEMBER, 1);
|
||||
String fromDate = DateUtils.LAUNCH_DATE_FORMAT.format(from.getTime());
|
||||
logger.trace("{} is {}", AccountingDataHarvesterPlugin.START_DATE_INPUT_PARAMETER, fromDate);
|
||||
inputs.put(AccountingDataHarvesterPlugin.START_DATE_INPUT_PARAMETER, fromDate);
|
||||
*/
|
||||
|
||||
//
|
||||
CronExpression cronExpression = new CronExpression("0 0 10 3 1/1 ? *");
|
||||
Scheduling scheduling = new Scheduling(cronExpression);
|
||||
scheduling.setGlobal(false);
|
||||
LaunchParameter launchParameter = new LaunchParameter(DataHarvestPluginDeclaration.NAME, inputs,
|
||||
scheduling);
|
||||
smartExecutor.launch(launchParameter);
|
||||
|
||||
logger.info("End.");
|
||||
|
||||
} catch(Exception e) {
|
||||
logger.error("", e);
|
||||
}
|
||||
}
|
||||
|
||||
// @Test
|
||||
public void launchOldData() {
|
||||
try {
|
||||
|
||||
ContextTest.setContextByName(ROOT);
|
||||
|
||||
DataHarvestPluginDeclaration dataHarvestPluginDeclaration = new DataHarvestPluginDeclaration();
|
||||
|
||||
AccountingDataHarvesterPlugin accountingDataHarvesterPlugin = new AccountingDataHarvesterPlugin(
|
||||
dataHarvestPluginDeclaration);
|
||||
|
||||
Map<String,Object> inputs = new HashMap<>();
|
||||
|
||||
AggregationType aggregationType = AggregationType.MONTHLY;
|
||||
|
||||
inputs.put(AccountingDataHarvesterPlugin.MEASURE_TYPE_INPUT_PARAMETER, aggregationType.name());
|
||||
inputs.put(AccountingDataHarvesterPlugin.GET_VRE_USERS_INPUT_PARAMETER, true);
|
||||
inputs.put(AccountingDataHarvesterPlugin.RERUN_INPUT_PARAMETER, true);
|
||||
inputs.put(AccountingDataHarvesterPlugin.DRY_RUN_INPUT_PARAMETER, false);
|
||||
|
||||
Calendar from = DateUtils.getStartCalendar(2016, Calendar.SEPTEMBER, 1);
|
||||
|
||||
Calendar runbeforeDate = DateUtils.getStartCalendar(2018, Calendar.JUNE, 1);
|
||||
|
||||
while(from.before(runbeforeDate)) {
|
||||
String fromDate = DateUtils.LAUNCH_DATE_FORMAT.format(from.getTime());
|
||||
logger.trace("{} is {}", AccountingDataHarvesterPlugin.START_DATE_INPUT_PARAMETER, fromDate);
|
||||
inputs.put(AccountingDataHarvesterPlugin.START_DATE_INPUT_PARAMETER, fromDate);
|
||||
accountingDataHarvesterPlugin.launch(inputs);
|
||||
from.add(aggregationType.getCalendarField(), 1);
|
||||
}
|
||||
|
||||
logger.info("End.");
|
||||
|
||||
} catch(Exception e) {
|
||||
logger.error("", e);
|
||||
}
|
||||
}
|
||||
|
||||
// @Test
|
||||
public void launchOldDataVREAccessesHarvester() {
|
||||
try {
|
||||
|
||||
ContextTest.setContextByName(ROOT);
|
||||
// AccountingDao dao = AccountingDao.get();
|
||||
|
||||
DataHarvestPluginDeclaration dataHarvestPluginDeclaration = new DataHarvestPluginDeclaration();
|
||||
|
||||
AccountingDataHarvesterPlugin accountingDataHarvesterPlugin = new AccountingDataHarvesterPlugin(
|
||||
dataHarvestPluginDeclaration);
|
||||
Properties properties = accountingDataHarvesterPlugin.getConfigParameters();
|
||||
AccountingDataHarvesterPlugin.getProperties().set(properties);
|
||||
|
||||
ContextAuthorization contextAuthorization = new ContextAuthorization();
|
||||
SortedSet<String> contexts = contextAuthorization.getContexts();
|
||||
|
||||
AggregationType aggregationType = AggregationType.MONTHLY;
|
||||
|
||||
Calendar from = DateUtils.getStartCalendar(2018, Calendar.APRIL, 1);
|
||||
|
||||
Calendar runbeforeDate = DateUtils.getStartCalendar(2018, Calendar.JUNE, 1);
|
||||
|
||||
while(from.before(runbeforeDate)) {
|
||||
Date start = from.getTime();
|
||||
Date end = DateUtils.getEndDateFromStartDate(aggregationType, start, 1, false);
|
||||
|
||||
logger.debug("Harvesting from {} to {}", DateUtils.format(start), DateUtils.format(end));
|
||||
|
||||
ArrayList<AccountingRecord> accountingRecords = new ArrayList<>();
|
||||
|
||||
VREAccessesHarvester vreAccessesHarvester = null;
|
||||
|
||||
for(String context : contexts) {
|
||||
// Setting the token for the context
|
||||
ContextTest.setContext(contextAuthorization.getTokenForContext(context));
|
||||
|
||||
ScopeBean scopeBean = new ScopeBean(context);
|
||||
|
||||
if(vreAccessesHarvester == null) {
|
||||
|
||||
if(scopeBean.is(Type.INFRASTRUCTURE)) {
|
||||
vreAccessesHarvester = new VREAccessesHarvester(start, end);
|
||||
} else {
|
||||
// This code should be never used because the scopes are sorted by fullname
|
||||
|
||||
ScopeBean parent = scopeBean.enclosingScope();
|
||||
while(!parent.is(Type.INFRASTRUCTURE)) {
|
||||
parent = scopeBean.enclosingScope();
|
||||
}
|
||||
|
||||
// Setting back token for the context
|
||||
ContextTest.setContext(contextAuthorization.getTokenForContext(parent.toString()));
|
||||
|
||||
vreAccessesHarvester = new VREAccessesHarvester(start, end);
|
||||
|
||||
// Setting back token for the context
|
||||
ContextTest.setContext(contextAuthorization.getTokenForContext(context));
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
try {
|
||||
if(context.startsWith(AccountingDataHarvesterPlugin.SO_BIG_DATA_VO)
|
||||
&& start.before(DateUtils.getStartCalendar(2018, Calendar.APRIL, 1).getTime())) {
|
||||
logger.info("Not Harvesting VREs Accesses for {} from {} to {}", context,
|
||||
DateUtils.format(start), DateUtils.format(end));
|
||||
} else {
|
||||
// Collecting Google Analytics Data for VREs Accesses
|
||||
List<AccountingRecord> harvested = vreAccessesHarvester.getAccountingRecords();
|
||||
accountingRecords.addAll(harvested);
|
||||
|
||||
}
|
||||
} catch(Exception e) {
|
||||
logger.error("Error harvesting Social Interactions for {}", context, e);
|
||||
}
|
||||
}
|
||||
|
||||
logger.debug("Harvest Measures from {} to {} are {}", DateUtils.format(start), DateUtils.format(end),
|
||||
accountingRecords);
|
||||
ContextTest.setContextByName(ROOT);
|
||||
|
||||
// dao.insertRecords(accountingRecords.toArray(new AccountingRecord[1]));
|
||||
|
||||
Thread.sleep(TimeUnit.SECONDS.toMillis(10));
|
||||
|
||||
from.add(aggregationType.getCalendarField(), 1);
|
||||
|
||||
}
|
||||
|
||||
ContextTest.setContextByName(ROOT);
|
||||
|
||||
} catch(Exception e) {
|
||||
logger.error("", e);
|
||||
}
|
||||
|
||||
logger.info("End.");
|
||||
|
||||
}
|
||||
|
||||
// @Test
|
||||
public void testScopeBean() throws Exception {
|
||||
ContextTest.setContextByName(ROOT);
|
||||
SortedSet<String> contexts = getContexts();
|
||||
|
||||
AggregationType aggregationType = AggregationType.MONTHLY;
|
||||
|
||||
Date start = DateUtils.getStartCalendar(2018, Calendar.MARCH, 1).getTime();
|
||||
// start = DateUtils.getPreviousPeriod(measureType).getTime();
|
||||
Date end = DateUtils.getEndDateFromStartDate(aggregationType, start, 1, false);
|
||||
|
||||
logger.info("\n\n\n");
|
||||
|
||||
for(String context : contexts) {
|
||||
ScopeBean scopeBean = new ScopeBean(context);
|
||||
// logger.debug("FullName {} - Name {}", scopeBean.toString(), scopeBean.name());
|
||||
|
||||
try {
|
||||
|
||||
if(scopeBean.is(Type.VRE) && start.equals(DateUtils.getPreviousPeriod(aggregationType, false).getTime())) {
|
||||
logger.info("Harvesting (VRE Users) for {} from {} to {}", context, DateUtils.format(start),
|
||||
DateUtils.format(end));
|
||||
} else {
|
||||
logger.info("--- Not Harvesting (VRE Users) for {} from {} to {}", context, DateUtils.format(start),
|
||||
DateUtils.format(end));
|
||||
}
|
||||
|
||||
if((context.startsWith(AccountingDataHarvesterPlugin.SO_BIG_DATA_VO)
|
||||
|| context.startsWith(AccountingDataHarvesterPlugin.SO_BIG_DATA_EU_VRE)
|
||||
|| context.startsWith(AccountingDataHarvesterPlugin.SO_BIG_DATA_IT_VRE))
|
||||
&& start.before(DateUtils.getStartCalendar(2018, Calendar.APRIL, 1).getTime())) {
|
||||
logger.info("--- Not Harvesting (SoBigData Check) for {} from {} to {}", context,
|
||||
DateUtils.format(start), DateUtils.format(end));
|
||||
} else {
|
||||
logger.info("Harvesting (SoBigData Check) for {} from {} to {}", context, DateUtils.format(start),
|
||||
DateUtils.format(end));
|
||||
}
|
||||
|
||||
} catch(Exception e) {
|
||||
logger.error("Error harvesting Social Interactions for {}", context, e);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
// @Test
|
||||
public void testVREAccessesHarvester() throws Exception {
|
||||
try {
|
||||
AccountingDao dao = getAccountingDao();
|
||||
|
||||
List<Date> starts = new ArrayList<>();
|
||||
starts.add(DateUtils.getStartCalendar(2018, Calendar.SEPTEMBER, 1).getTime());
|
||||
starts.add(DateUtils.getStartCalendar(2018, Calendar.OCTOBER, 1).getTime());
|
||||
starts.add(DateUtils.getStartCalendar(2018, Calendar.NOVEMBER, 1).getTime());
|
||||
starts.add(DateUtils.getStartCalendar(2018, Calendar.DECEMBER, 1).getTime());
|
||||
starts.add(DateUtils.getStartCalendar(2019, Calendar.JANUARY, 1).getTime());
|
||||
starts.add(DateUtils.getStartCalendar(2019, Calendar.FEBRUARY, 1).getTime());
|
||||
starts.add(DateUtils.getStartCalendar(2019, Calendar.MARCH, 1).getTime());
|
||||
|
||||
AggregationType measureType = AggregationType.MONTHLY;
|
||||
|
||||
String[] contextFullNames = new String[] {"/d4science.research-infrastructures.eu/FARM/GRSF",
|
||||
"/d4science.research-infrastructures.eu/FARM/GRSF_Admin"};
|
||||
|
||||
List<AccountingRecord> accountingRecords = new ArrayList<>();
|
||||
|
||||
for(Date start : starts) {
|
||||
Date end = DateUtils.getEndDateFromStartDate(measureType, start, 1, false);
|
||||
|
||||
ContextTest.setContextByName(ROOT);
|
||||
VREAccessesHarvester vreAccessesHarvester = new VREAccessesHarvester(start, end);
|
||||
|
||||
for(String contextFullname : contextFullNames) {
|
||||
|
||||
setContextByNameAndScopeDescriptor(contextFullname);
|
||||
|
||||
List<AccountingRecord> harvested = vreAccessesHarvester.getAccountingRecords();
|
||||
accountingRecords.addAll(harvested);
|
||||
|
||||
logger.debug("{} - {}", contextFullname, accountingRecords);
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
logger.debug("{}", accountingRecords);
|
||||
|
||||
ContextTest.setContextByName(ROOT);
|
||||
// dao.insertRecords(accountingRecords.toArray(new AccountingRecord[1]));
|
||||
|
||||
} catch(Exception e) {
|
||||
logger.error("", e);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
// @Test
|
||||
public void testVREAccessesHarvesterAll() {
|
||||
try {
|
||||
|
||||
ContextTest.setContextByName(ROOT);
|
||||
|
||||
AggregationType measureType = AggregationType.MONTHLY;
|
||||
|
||||
// Date start = DateUtils.getStartCalendar(2015, Calendar.FEBRUARY, 1).getTime();
|
||||
// Date end = DateUtils.getStartCalendar(2019, Calendar.FEBRUARY, 1).getTime();
|
||||
|
||||
Date start = DateUtils.getPreviousPeriod(measureType, false).getTime();
|
||||
Date end = DateUtils.getEndDateFromStartDate(measureType, start, 1, false);
|
||||
|
||||
AccountingDataHarvesterPlugin accountingDataHarvesterPlugin = new AccountingDataHarvesterPlugin(null);
|
||||
accountingDataHarvesterPlugin.getConfigParameters();
|
||||
|
||||
ContextAuthorization contextAuthorization = new ContextAuthorization();
|
||||
SortedSet<String> contexts = contextAuthorization.getContexts();
|
||||
|
||||
VREAccessesHarvester vreAccessesHarvester = null;
|
||||
|
||||
ArrayList<AccountingRecord> accountingRecords = new ArrayList<>();
|
||||
|
||||
for(String context : contexts) {
|
||||
// Setting the token for the context
|
||||
ContextTest.setContext(contextAuthorization.getTokenForContext(context));
|
||||
|
||||
ScopeBean scopeBean = new ScopeBean(context);
|
||||
|
||||
if(vreAccessesHarvester == null) {
|
||||
|
||||
if(scopeBean.is(Type.INFRASTRUCTURE)) {
|
||||
vreAccessesHarvester = new VREAccessesHarvester(start, end);
|
||||
} else {
|
||||
// This code should be never used because the scopes are sorted by fullname
|
||||
|
||||
ScopeBean parent = scopeBean.enclosingScope();
|
||||
while(!parent.is(Type.INFRASTRUCTURE)) {
|
||||
parent = scopeBean.enclosingScope();
|
||||
}
|
||||
|
||||
// Setting back token for the context
|
||||
ContextTest.setContext(contextAuthorization.getTokenForContext(parent.toString()));
|
||||
|
||||
vreAccessesHarvester = new VREAccessesHarvester(start, end);
|
||||
|
||||
// Setting back token for the context
|
||||
ContextTest.setContext(contextAuthorization.getTokenForContext(context));
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
try {
|
||||
if(context.startsWith(AccountingDataHarvesterPlugin.SO_BIG_DATA_VO)
|
||||
&& start.before(DateUtils.getStartCalendar(2018, Calendar.APRIL, 1).getTime())) {
|
||||
logger.info("Not Harvesting VREs Accesses for {} from {} to {}", context,
|
||||
DateUtils.format(start), DateUtils.format(end));
|
||||
} else {
|
||||
// Collecting Google Analytics Data for VREs Accesses
|
||||
List<AccountingRecord> harvested = vreAccessesHarvester.getAccountingRecords();
|
||||
accountingRecords.addAll(harvested);
|
||||
}
|
||||
} catch(Exception e) {
|
||||
logger.error("Error harvesting Social Interactions for {}", context, e);
|
||||
}
|
||||
}
|
||||
|
||||
logger.debug("{}", accountingRecords);
|
||||
|
||||
} catch(Exception e) {
|
||||
logger.error("", e);
|
||||
}
|
||||
}
|
||||
|
||||
// @Test
|
||||
public void testSocialInteraction() {
|
||||
try {
|
||||
|
||||
ContextTest.setContextByName(ROOT);
|
||||
// AccountingDao dao = AccountingDao.get();
|
||||
|
||||
DataHarvestPluginDeclaration dataHarvestPluginDeclaration = new DataHarvestPluginDeclaration();
|
||||
|
||||
AccountingDataHarvesterPlugin accountingDataHarvesterPlugin = new AccountingDataHarvesterPlugin(
|
||||
dataHarvestPluginDeclaration);
|
||||
Properties properties = accountingDataHarvesterPlugin.getConfigParameters();
|
||||
AccountingDataHarvesterPlugin.getProperties().set(properties);
|
||||
|
||||
ContextAuthorization contextAuthorization = new ContextAuthorization();
|
||||
|
||||
SortedSet<String> contexts = new TreeSet<>();
|
||||
contexts.add("/d4science.research-infrastructures.eu/D4Research");
|
||||
contexts.add("/d4science.research-infrastructures.eu/FARM/WECAFC-FIRMS");
|
||||
contexts.add("/d4science.research-infrastructures.eu/gCubeApps/BlueBridgeProject");
|
||||
contexts.add("/d4science.research-infrastructures.eu/gCubeApps/Parthenos");
|
||||
contexts.add("/d4science.research-infrastructures.eu/gCubeApps/ScalableDataMining");
|
||||
contexts.add("/d4science.research-infrastructures.eu/gCubeApps/gCube");
|
||||
|
||||
AggregationType aggregationType = AggregationType.MONTHLY;
|
||||
|
||||
Calendar from = DateUtils.getStartCalendar(2018, Calendar.JUNE, 1);
|
||||
Date start = from.getTime();
|
||||
Date end = DateUtils.getEndDateFromStartDate(aggregationType, start, 1, false);
|
||||
|
||||
logger.debug("Harvesting Social Interaction from {} to {}", DateUtils.format(start), DateUtils.format(end));
|
||||
|
||||
ArrayList<AccountingRecord> accountingRecords = new ArrayList<>();
|
||||
|
||||
for(String context : contexts) {
|
||||
// Setting the token for the context
|
||||
ContextTest.setContext(contextAuthorization.getTokenForContext(context));
|
||||
try {
|
||||
// Collecting info on social (posts, replies and likes)
|
||||
logger.info("Going to harvest Social Interactions for {}", context);
|
||||
SocialInteractionsHarvester socialHarvester = new SocialInteractionsHarvester(start, end);
|
||||
List<AccountingRecord> harvested = socialHarvester.getAccountingRecords();
|
||||
accountingRecords.addAll(harvested);
|
||||
} catch(Exception e) {
|
||||
logger.error("Error harvesting Social Interactions for {}", context, e);
|
||||
}
|
||||
}
|
||||
|
||||
logger.debug("Harvest Measures from {} to {} are {}", DateUtils.format(start), DateUtils.format(end),
|
||||
accountingRecords);
|
||||
ContextTest.setContextByName(ROOT);
|
||||
// dao.insertRecords(accountingRecords.toArray(new AccountingRecord[1]));
|
||||
|
||||
} catch(Exception e) {
|
||||
logger.error("", e);
|
||||
}
|
||||
|
||||
logger.info("End.");
|
||||
|
||||
}
|
||||
|
||||
// @Test
|
||||
public void testMethodInvocation() {
|
||||
try {
|
||||
|
||||
ContextTest.setContextByName(ROOT);
|
||||
ContextAuthorization contextAuthorization = new ContextAuthorization();
|
||||
String stockAssessmentToken = contextAuthorization.generateTokenForContext(STOCK_ASSESMENT_VRE, null);
|
||||
|
||||
ContextTest.setContext(stockAssessmentToken);
|
||||
|
||||
AggregationType measureType = AggregationType.MONTHLY;
|
||||
|
||||
Date start = DateUtils.getPreviousPeriod(measureType, false).getTime();
|
||||
Date end = DateUtils.getEndDateFromStartDate(measureType, start, 1, false);
|
||||
|
||||
MethodInvocationHarvester methodInvocationHarvester = new MethodInvocationHarvester(start, end);
|
||||
List<AccountingRecord> accountingRecords = methodInvocationHarvester.getAccountingRecords();
|
||||
|
||||
logger.debug("{}", accountingRecords);
|
||||
|
||||
} catch(Exception e) {
|
||||
logger.error("", e);
|
||||
}
|
||||
}
|
||||
|
||||
protected AccountingDao getAccountingDao() throws ObjectNotFound, Exception {
|
||||
ContextTest.setContextByName(ROOT);
|
||||
AccountingDao dao = AccountingDao.get();
|
||||
|
||||
Set<ScopeDescriptor> scopeDescriptorSet = dao.getContexts();
|
||||
Map<String,ScopeDescriptor> scopeDescriptorMap = new HashMap<>();
|
||||
for(ScopeDescriptor scopeDescriptor : scopeDescriptorSet) {
|
||||
scopeDescriptorMap.put(scopeDescriptor.getId(), scopeDescriptor);
|
||||
}
|
||||
AccountingDataHarvesterPlugin.scopeDescriptors.set(scopeDescriptorMap);
|
||||
|
||||
Set<Dimension> dimensionSet = dao.getDimensions();
|
||||
Map<String,Dimension> dimensionMap = new HashMap<>();
|
||||
for(Dimension dimension : dimensionSet) {
|
||||
dimensionMap.put(dimension.getId(), dimension);
|
||||
}
|
||||
|
||||
AccountingDataHarvesterPlugin.dimensions.set(dimensionMap);
|
||||
|
||||
return dao;
|
||||
}
|
||||
|
||||
protected void setContextByNameAndScopeDescriptor(String contextFullName) throws ObjectNotFound, Exception {
|
||||
ContextAuthorization contextAuthorization = new ContextAuthorization();
|
||||
String tagMeToken = contextAuthorization.getTokenForContext(contextFullName);
|
||||
|
||||
ContextTest.setContext(tagMeToken);
|
||||
|
||||
ScopeBean scopeBean = new ScopeBean(contextFullName);
|
||||
ScopeDescriptor actualScopeDescriptor = AccountingDataHarvesterPlugin.scopeDescriptors.get()
|
||||
.get(contextFullName);
|
||||
if(actualScopeDescriptor == null) {
|
||||
actualScopeDescriptor = new ScopeDescriptor(scopeBean.name(), contextFullName);
|
||||
}
|
||||
|
||||
AccountingDataHarvesterPlugin.scopeDescriptor.set(actualScopeDescriptor);
|
||||
}
|
||||
|
||||
// @Test
|
||||
public void testTagMeMethodInvocation() throws Exception {
|
||||
try {
|
||||
AccountingDao dao = getAccountingDao();
|
||||
setContextByNameAndScopeDescriptor(TAGME_VRE);
|
||||
|
||||
List<AccountingRecord> accountingRecords = new ArrayList<>();
|
||||
|
||||
AggregationType measureType = AggregationType.MONTHLY;
|
||||
|
||||
List<Date> starts = new ArrayList<>();
|
||||
starts.add(DateUtils.getStartCalendar(2018, Calendar.NOVEMBER, 1).getTime());
|
||||
starts.add(DateUtils.getStartCalendar(2018, Calendar.DECEMBER, 1).getTime());
|
||||
starts.add(DateUtils.getStartCalendar(2019, Calendar.JANUARY, 1).getTime());
|
||||
starts.add(DateUtils.getStartCalendar(2019, Calendar.FEBRUARY, 1).getTime());
|
||||
starts.add(DateUtils.getStartCalendar(2019, Calendar.MARCH, 1).getTime());
|
||||
|
||||
for(Date start : starts) {
|
||||
|
||||
Date end = DateUtils.getEndDateFromStartDate(measureType, start, 1, false);
|
||||
|
||||
TagMeMethodInvocationHarvester methodInvocationHarvester = new TagMeMethodInvocationHarvester(start,
|
||||
end);
|
||||
accountingRecords.addAll(methodInvocationHarvester.getAccountingRecords());
|
||||
|
||||
logger.debug("{}", accountingRecords);
|
||||
}
|
||||
|
||||
ContextTest.setContextByName(ROOT);
|
||||
// dao.insertRecords(accountingRecords.toArray(new AccountingRecord[1]));
|
||||
|
||||
} catch(Exception e) {
|
||||
logger.error("", e);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
// @Test
|
||||
public void testGetVREUsersForSpecificVRE() {
|
||||
try {
|
||||
ContextTest.setContextByName(ROOT);
|
||||
|
||||
DataHarvestPluginDeclaration dataHarvestPluginDeclaration = new DataHarvestPluginDeclaration();
|
||||
AccountingDataHarvesterPlugin accountingDataHarvesterPlugin = new AccountingDataHarvesterPlugin(
|
||||
dataHarvestPluginDeclaration);
|
||||
Properties properties = accountingDataHarvesterPlugin.getConfigParameters();
|
||||
AccountingDataHarvesterPlugin.getProperties().set(properties);
|
||||
|
||||
// AccountingDao dao = AccountingDao.get();
|
||||
|
||||
ContextAuthorization contextAuthorization = new ContextAuthorization();
|
||||
ContextTest.setContext(contextAuthorization
|
||||
.getTokenForContext("/d4science.research-infrastructures.eu/SoBigData/SportsDataScience"));
|
||||
|
||||
AggregationType measureType = AggregationType.MONTHLY;
|
||||
|
||||
Date start = DateUtils.getPreviousPeriod(measureType, false).getTime();
|
||||
Date end = DateUtils.getEndDateFromStartDate(measureType, start, 1, false);
|
||||
|
||||
VREUsersHarvester vreUsersHarvester = new VREUsersHarvester(start, end);
|
||||
List<AccountingRecord> harvested = vreUsersHarvester.getAccountingRecords();
|
||||
|
||||
logger.info("Harvested Data from {} to {} : {}", DateUtils.format(start), DateUtils.format(end), harvested);
|
||||
|
||||
ContextTest.setContextByName(ROOT);
|
||||
// dao.insertRecords(accountingRecords.toArray(new AccountingRecord[1]));
|
||||
|
||||
} catch(Exception e) {
|
||||
logger.error("", e);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
// @Test
|
||||
public void testFilteringGenericResource() {
|
||||
try {
|
||||
ContextTest.setContextByName(ROOT);
|
||||
//Utils.setContext(RESOURCE_CATALOGUE);
|
||||
|
||||
AggregationType measureType = AggregationType.MONTHLY;
|
||||
|
||||
Date start = DateUtils.getPreviousPeriod(measureType, false).getTime();
|
||||
Date end = DateUtils.getEndDateFromStartDate(measureType, start, 1, false);
|
||||
|
||||
SortedSet<String> contexts = getContexts();
|
||||
|
||||
AccountingDataHarvesterPlugin accountingDataHarvesterPlugin = new AccountingDataHarvesterPlugin(null);
|
||||
accountingDataHarvesterPlugin.getConfigParameters();
|
||||
|
||||
ResourceCatalogueHarvester resourceCatalogueHarvester = new ResourceCatalogueHarvester(start, end,
|
||||
contexts);
|
||||
SortedSet<String> validContexts = resourceCatalogueHarvester.getValidContexts(contexts, SO_BIG_VO + "/");
|
||||
logger.info("Valid Contexts {}", validContexts);
|
||||
|
||||
} catch(Exception e) {
|
||||
logger.error("", e);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
// @Test
|
||||
public void testResourceCatalogueHarvester() {
|
||||
try {
|
||||
|
||||
//Utils.setContext(RESOURCE_CATALOGUE);
|
||||
ContextTest.setContextByName(ROOT);
|
||||
|
||||
AggregationType measureType = AggregationType.MONTHLY;
|
||||
|
||||
// Date start = DateUtils.getStartCalendar(2015, Calendar.FEBRUARY, 1).getTime();
|
||||
// Date end = DateUtils.getStartCalendar(2019, Calendar.FEBRUARY, 1).getTime();
|
||||
|
||||
Date start = DateUtils.getPreviousPeriod(measureType, false).getTime();
|
||||
Date end = DateUtils.getEndDateFromStartDate(measureType, start, 1, false);
|
||||
|
||||
AccountingDataHarvesterPlugin accountingDataHarvesterPlugin = new AccountingDataHarvesterPlugin(null);
|
||||
accountingDataHarvesterPlugin.getConfigParameters();
|
||||
|
||||
SortedSet<String> contexts = getContexts();
|
||||
|
||||
ResourceCatalogueHarvester resourceCatalogueHarvester = new ResourceCatalogueHarvester(start, end,
|
||||
contexts);
|
||||
List<AccountingRecord> data = resourceCatalogueHarvester.getAccountingRecords();
|
||||
|
||||
logger.debug("{}", data);
|
||||
|
||||
} catch(Exception e) {
|
||||
logger.error("", e);
|
||||
}
|
||||
}
|
||||
|
||||
// @Test
|
||||
public void testDataMethodDownloadHarvester() {
|
||||
try {
|
||||
|
||||
//Utils.setContext(RESOURCE_CATALOGUE);
|
||||
ContextTest.setContextByName(ROOT);
|
||||
|
||||
AggregationType measureType = AggregationType.MONTHLY;
|
||||
|
||||
// Date start = DateUtils.getStartCalendar(2015, Calendar.FEBRUARY, 1).getTime();
|
||||
// Date end = DateUtils.getStartCalendar(2019, Calendar.FEBRUARY, 1).getTime();
|
||||
|
||||
Date start = DateUtils.getPreviousPeriod(measureType, false).getTime();
|
||||
Date end = DateUtils.getEndDateFromStartDate(measureType, start, 1, false);
|
||||
|
||||
AccountingDataHarvesterPlugin accountingDataHarvesterPlugin = new AccountingDataHarvesterPlugin(null);
|
||||
accountingDataHarvesterPlugin.getConfigParameters();
|
||||
|
||||
ContextAuthorization contextAuthorization = new ContextAuthorization();
|
||||
SortedSet<String> contexts = contextAuthorization.getContexts();
|
||||
|
||||
for(String context : contexts) {
|
||||
ScopeBean scopeBean = new ScopeBean(context);
|
||||
logger.debug("FullName {} - Name {}", scopeBean.toString(), scopeBean.name());
|
||||
|
||||
if(context.startsWith(AccountingDataHarvesterPlugin.SO_BIG_DATA_VO)) {
|
||||
if(scopeBean.is(Type.VRE)) {
|
||||
if(context.startsWith(TAGME_VRE)) {
|
||||
continue;
|
||||
}
|
||||
ContextTest.setContext(contextAuthorization.getTokenForContext(context));
|
||||
DataMethodDownloadHarvester dataMethodDownloadHarvester = new DataMethodDownloadHarvester(start,
|
||||
end, contexts);
|
||||
List<AccountingRecord> data = dataMethodDownloadHarvester.getAccountingRecords();
|
||||
logger.debug("{}", data);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
} catch(Exception e) {
|
||||
logger.error("", e);
|
||||
}
|
||||
}
|
||||
|
||||
public static final String E_LEARNING_AREA_VRE = "/d4science.research-infrastructures.eu/SoBigData/E-Learning_Area";
|
||||
|
||||
// @Test
|
||||
public void addMissingVREAccesses() {
|
||||
try {
|
||||
|
||||
ContextTest.setContextByName(ROOT);
|
||||
|
||||
DataHarvestPluginDeclaration dataHarvestPluginDeclaration = new DataHarvestPluginDeclaration();
|
||||
AccountingDataHarvesterPlugin adhp = new AccountingDataHarvesterPlugin(dataHarvestPluginDeclaration);
|
||||
Properties properties = adhp.getConfigParameters();
|
||||
AccountingDataHarvesterPlugin.getProperties().set(properties);
|
||||
|
||||
ContextAuthorization contextAuthorization = new ContextAuthorization();
|
||||
|
||||
// DatabaseManager dbaseManager = new DatabaseManager();
|
||||
AccountingDao dao = AccountingDao.get();
|
||||
|
||||
Set<ScopeDescriptor> scopeDescriptorSet = dao.getContexts();
|
||||
Map<String,ScopeDescriptor> scopeDescriptorMap = new HashMap<>();
|
||||
for(ScopeDescriptor scopeDescriptor : scopeDescriptorSet) {
|
||||
scopeDescriptorMap.put(scopeDescriptor.getId(), scopeDescriptor);
|
||||
}
|
||||
AccountingDataHarvesterPlugin.scopeDescriptors.set(scopeDescriptorMap);
|
||||
|
||||
Set<Dimension> dimensionSet = dao.getDimensions();
|
||||
Map<String,Dimension> dimensionMap = new HashMap<>();
|
||||
for(Dimension dimension : dimensionSet) {
|
||||
dimensionMap.put(dimension.getId(), dimension);
|
||||
}
|
||||
|
||||
AccountingDataHarvesterPlugin.dimensions.set(dimensionMap);
|
||||
|
||||
// ArrayList<HarvestedData> data = new ArrayList<HarvestedData>();
|
||||
ArrayList<AccountingRecord> accountingRecords = new ArrayList<AccountingRecord>();
|
||||
|
||||
String context = E_LEARNING_AREA_VRE;
|
||||
|
||||
// Setting the token for the context
|
||||
ContextTest.setContext(contextAuthorization.getTokenForContext(context));
|
||||
|
||||
ScopeBean scopeBean = new ScopeBean(context);
|
||||
ScopeDescriptor scopeDescriptor = new ScopeDescriptor(scopeBean.name(), context);
|
||||
|
||||
Dimension dimension = AccountingDataHarvesterPlugin.getDimension(HarvestedDataKey.ACCESSES.getKey());
|
||||
|
||||
Calendar calendar = DateUtils.getStartCalendar(2018, Calendar.JULY, 1);
|
||||
calendar.set(Calendar.DAY_OF_MONTH, 15);
|
||||
|
||||
Map<Integer,Integer> monthValues = new HashMap<>();
|
||||
monthValues.put(Calendar.JULY, 54);
|
||||
monthValues.put(Calendar.AUGUST, 23);
|
||||
monthValues.put(Calendar.SEPTEMBER, 127);
|
||||
monthValues.put(Calendar.OCTOBER, 192);
|
||||
|
||||
for(Integer month : monthValues.keySet()) {
|
||||
calendar.set(Calendar.MONTH, month);
|
||||
Instant instant = calendar.toInstant();
|
||||
|
||||
AccountingRecord ar = new AccountingRecord(scopeDescriptor, instant, dimension,
|
||||
(long) monthValues.get(month));
|
||||
logger.debug("{} : {}", ar.getDimension().getId(), ar.getMeasure());
|
||||
accountingRecords.add(ar);
|
||||
}
|
||||
|
||||
logger.trace("{}", accountingRecords);
|
||||
dao.insertRecords(accountingRecords.toArray(new AccountingRecord[1]));
|
||||
|
||||
} catch(Exception e) {
|
||||
logger.error("", e);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,173 @@
|
|||
/**
|
||||
*
|
||||
*/
|
||||
package org.gcube.dataharvest;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.Properties;
|
||||
|
||||
import org.gcube.common.authorization.utils.manager.SecretManager;
|
||||
import org.gcube.common.authorization.utils.manager.SecretManagerProvider;
|
||||
import org.gcube.common.authorization.utils.secret.JWTSecret;
|
||||
import org.gcube.common.authorization.utils.secret.Secret;
|
||||
import org.gcube.common.authorization.utils.secret.SecretUtility;
|
||||
import org.gcube.common.keycloak.KeycloakClientFactory;
|
||||
import org.gcube.common.keycloak.KeycloakClientHelper;
|
||||
import org.gcube.common.keycloak.model.TokenResponse;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.BeforeClass;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
* @author Luca Frosini (ISTI - CNR)
|
||||
*/
|
||||
public class ContextTest {
|
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger(ContextTest.class);
|
||||
|
||||
protected static final String CONFIG_INI_FILENAME = "config.ini";
|
||||
|
||||
public static final String DEFAULT_TEST_SCOPE;
|
||||
|
||||
public static final String GCUBE;
|
||||
public static final String DEVNEXT;
|
||||
public static final String NEXTNEXT;
|
||||
public static final String DEVSEC;
|
||||
public static final String DEVVRE;
|
||||
|
||||
public static final String ROOT_PROD;
|
||||
|
||||
protected static final Properties properties;
|
||||
|
||||
public static final String TYPE_PROPERTY_KEY = "type";
|
||||
public static final String USERNAME_PROPERTY_KEY = "username";
|
||||
public static final String PASSWORD_PROPERTY_KEY = "password";
|
||||
public static final String CLIENT_ID_PROPERTY_KEY = "clientId";
|
||||
|
||||
static {
|
||||
GCUBE = "/gcube";
|
||||
DEVNEXT = GCUBE + "/devNext";
|
||||
NEXTNEXT = DEVNEXT + "/NextNext";
|
||||
DEVSEC = GCUBE + "/devsec";
|
||||
DEVVRE = DEVSEC + "/devVRE";
|
||||
|
||||
ROOT_PROD = "/d4science.research-infrastructures.eu";
|
||||
|
||||
DEFAULT_TEST_SCOPE = GCUBE;
|
||||
|
||||
|
||||
properties = new Properties();
|
||||
InputStream input = ContextTest.class.getClassLoader().getResourceAsStream(CONFIG_INI_FILENAME);
|
||||
try {
|
||||
// load the properties file
|
||||
properties.load(input);
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private enum Type{
|
||||
USER, CLIENT_ID
|
||||
};
|
||||
|
||||
public static void set(Secret secret) throws Exception {
|
||||
SecretManagerProvider.instance.reset();
|
||||
SecretManager secretManager = new SecretManager();
|
||||
secretManager.addSecret(secret);
|
||||
SecretManagerProvider.instance.set(secretManager);
|
||||
SecretManagerProvider.instance.get().set();
|
||||
}
|
||||
|
||||
public static void setContextByName(String fullContextName) throws Exception {
|
||||
logger.debug("Going to set credentials for context {}", fullContextName);
|
||||
Secret secret = getSecretByContextName(fullContextName);
|
||||
set(secret);
|
||||
}
|
||||
|
||||
|
||||
private static TokenResponse getJWTAccessToken(String context) throws Exception {
|
||||
Type type = Type.valueOf(properties.get(TYPE_PROPERTY_KEY).toString());
|
||||
|
||||
TokenResponse tr = null;
|
||||
|
||||
int index = context.indexOf('/', 1);
|
||||
String root = context.substring(0, index == -1 ? context.length() : index);
|
||||
|
||||
switch (type) {
|
||||
case CLIENT_ID:
|
||||
String clientId = properties.getProperty(CLIENT_ID_PROPERTY_KEY);
|
||||
String clientSecret = properties.getProperty(root);
|
||||
|
||||
tr = KeycloakClientFactory.newInstance().queryUMAToken(context, clientId, clientSecret, context, null);
|
||||
break;
|
||||
|
||||
case USER:
|
||||
default:
|
||||
String username = properties.getProperty(USERNAME_PROPERTY_KEY);
|
||||
String password = properties.getProperty(PASSWORD_PROPERTY_KEY);
|
||||
|
||||
switch (root) {
|
||||
case "/gcube":
|
||||
default:
|
||||
clientId = "next.d4science.org";
|
||||
break;
|
||||
|
||||
case "/pred4s":
|
||||
clientId = "pre.d4science.org";
|
||||
break;
|
||||
|
||||
case "/d4science.research-infrastructures.eu":
|
||||
clientId = "services.d4science.org";
|
||||
break;
|
||||
}
|
||||
clientSecret = null;
|
||||
|
||||
tr = KeycloakClientHelper.getTokenForUser(context, username, password);
|
||||
break;
|
||||
|
||||
}
|
||||
|
||||
return tr;
|
||||
|
||||
}
|
||||
|
||||
public static Secret getSecretByContextName(String context) throws Exception {
|
||||
TokenResponse tr = getJWTAccessToken(context);
|
||||
Secret secret = new JWTSecret(tr.getAccessToken());
|
||||
return secret;
|
||||
}
|
||||
|
||||
public static void setContext(String token) throws Exception {
|
||||
Secret secret = getSecret(token);
|
||||
set(secret);
|
||||
}
|
||||
|
||||
private static Secret getSecret(String token) throws Exception {
|
||||
Secret secret = SecretUtility.getSecretByTokenString(token);
|
||||
return secret;
|
||||
}
|
||||
|
||||
public static String getUser() {
|
||||
String user = "UNKNOWN";
|
||||
try {
|
||||
user = SecretManagerProvider.instance.get().getUser().getUsername();
|
||||
} catch(Exception e) {
|
||||
logger.error("Unable to retrieve user. {} will be used", user);
|
||||
}
|
||||
return user;
|
||||
}
|
||||
|
||||
@BeforeClass
|
||||
public static void beforeClass() throws Exception {
|
||||
setContextByName(ROOT_PROD);
|
||||
}
|
||||
|
||||
@AfterClass
|
||||
public static void afterClass() throws Exception {
|
||||
SecretManagerProvider.instance.reset();
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,88 @@
|
|||
package org.gcube.dataharvest.harvester;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Calendar;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
import java.util.SortedSet;
|
||||
|
||||
import org.gcube.accounting.accounting.summary.access.AccountingDao;
|
||||
import org.gcube.accounting.accounting.summary.access.model.update.AccountingRecord;
|
||||
import org.gcube.dataharvest.ContextTest;
|
||||
import org.gcube.dataharvest.plugin.AccountingDataHarvesterPluginTest;
|
||||
import org.gcube.dataharvest.utils.AggregationType;
|
||||
import org.gcube.dataharvest.utils.ContextAuthorization;
|
||||
import org.gcube.dataharvest.utils.DateUtils;
|
||||
import org.junit.Ignore;
|
||||
import org.junit.Test;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
*
|
||||
* @author Giancarlo Panichi (ISTI CNR)
|
||||
*
|
||||
*/
|
||||
public class AccountingDataHarvesterJupyterTest extends AccountingDataHarvesterPluginTest {
|
||||
|
||||
private static Logger logger = LoggerFactory.getLogger(AccountingDataHarvesterJupyterTest.class);
|
||||
|
||||
@Ignore
|
||||
@Test
|
||||
public void testJupyterAccessesHarvester() throws Exception {
|
||||
try {
|
||||
|
||||
ContextTest.setContextByName(ROOT_PROD);
|
||||
AccountingDao dao = getAccountingDao();
|
||||
|
||||
List<Date> starts = new ArrayList<>();
|
||||
starts.add(DateUtils.getStartCalendar(2021, Calendar.JANUARY, 1).getTime());
|
||||
starts.add(DateUtils.getStartCalendar(2021, Calendar.FEBRUARY, 1).getTime());
|
||||
starts.add(DateUtils.getStartCalendar(2021, Calendar.MARCH, 1).getTime());
|
||||
starts.add(DateUtils.getStartCalendar(2021, Calendar.APRIL, 1).getTime());
|
||||
starts.add(DateUtils.getStartCalendar(2021, Calendar.MAY, 1).getTime());
|
||||
|
||||
AggregationType measureType = AggregationType.MONTHLY;
|
||||
|
||||
ContextAuthorization contextAuthorization = new ContextAuthorization();
|
||||
SortedSet<String> contexts = contextAuthorization.getContexts();
|
||||
/*
|
||||
SortedSet<String> contexts = new TreeSet<>();
|
||||
contexts.add("/d4science.research-infrastructures.eu/D4OS/Blue-CloudLab");
|
||||
contexts.add("/d4science.research-infrastructures.eu/D4OS/Zoo-Phytoplankton_EOV");
|
||||
contexts.add("/d4science.research-infrastructures.eu/D4OS/MarineEnvironmentalIndicators");
|
||||
*/
|
||||
|
||||
List<AccountingRecord> accountingRecords = new ArrayList<>();
|
||||
|
||||
for (Date start : starts) {
|
||||
Date end = DateUtils.getEndDateFromStartDate(measureType, start, 1);
|
||||
|
||||
ContextTest.setContextByName(ROOT_PROD);
|
||||
JupyterAccessesHarvester jupyterAccessesHarvester = new JupyterAccessesHarvester(start, end);
|
||||
|
||||
for(String context : contexts) {
|
||||
ContextTest.setContextByName(context);
|
||||
|
||||
List<AccountingRecord> harvested = jupyterAccessesHarvester.getAccountingRecords();
|
||||
accountingRecords.addAll(harvested);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
// logger.debug("{}", accountingRecords);
|
||||
|
||||
logger.debug("Going to insert {}", accountingRecords);
|
||||
|
||||
ContextTest.setContextByName(ROOT_PROD);
|
||||
|
||||
// dao.insertRecords(accountingRecords.toArray(new AccountingRecord[1]));
|
||||
|
||||
} catch (Throwable e) {
|
||||
logger.error(e.getLocalizedMessage(), e);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
}
|
|
@ -0,0 +1,87 @@
|
|||
package org.gcube.dataharvest.harvester;
|
||||
|
||||
import java.time.LocalDate;
|
||||
import java.time.ZoneId;
|
||||
import java.time.temporal.ChronoUnit;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
import java.util.SortedSet;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import org.gcube.accounting.accounting.summary.access.AccountingDao;
|
||||
import org.gcube.accounting.accounting.summary.access.model.update.AccountingRecord;
|
||||
import org.gcube.dataharvest.ContextTest;
|
||||
import org.gcube.dataharvest.plugin.AccountingDataHarvesterPluginTest;
|
||||
import org.gcube.dataharvest.utils.AggregationType;
|
||||
import org.gcube.dataharvest.utils.ContextAuthorization;
|
||||
import org.gcube.dataharvest.utils.DateUtils;
|
||||
import org.junit.Ignore;
|
||||
import org.junit.Test;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
*
|
||||
* @author Giancarlo Panichi (ISTI CNR)
|
||||
*
|
||||
*/
|
||||
public class AccountingDataHarvesterRStudioTest extends AccountingDataHarvesterPluginTest {
|
||||
|
||||
private static Logger logger = LoggerFactory.getLogger(AccountingDataHarvesterRStudioTest.class);
|
||||
|
||||
|
||||
@Ignore
|
||||
@Test
|
||||
public void testJupyterAccessesHarvester() throws Exception {
|
||||
try {
|
||||
|
||||
ContextTest.setContextByName(ROOT_PROD);
|
||||
AccountingDao dao = getAccountingDao();
|
||||
|
||||
List<Date> starts = new ArrayList<>();
|
||||
|
||||
LocalDate sdate = LocalDate.parse("2016-01-01"), edate = LocalDate.parse("2021-06-01");
|
||||
|
||||
Stream.iterate(sdate, date -> date.plusMonths(1)).limit(ChronoUnit.MONTHS.between(sdate, edate) + 1)
|
||||
.forEach(dateToConvert -> starts.add(java.util.Date
|
||||
.from(dateToConvert.atStartOfDay().atZone(ZoneId.systemDefault()).toInstant())));
|
||||
|
||||
AggregationType measureType = AggregationType.MONTHLY;
|
||||
|
||||
ContextAuthorization contextAuthorization = new ContextAuthorization();
|
||||
SortedSet<String> contexts = contextAuthorization.getContexts();
|
||||
|
||||
List<AccountingRecord> accountingRecords = new ArrayList<>();
|
||||
|
||||
|
||||
for (Date start : starts) {
|
||||
Date end = DateUtils.getEndDateFromStartDate(measureType, start, 1);
|
||||
|
||||
ContextTest.setContextByName(ROOT_PROD);
|
||||
RStudioAccessesHarvester rstudioAccessesHarvester = new RStudioAccessesHarvester(start, end);
|
||||
|
||||
for(String context : contexts) {
|
||||
ContextTest.setContextByName(context);
|
||||
|
||||
List<AccountingRecord> harvested = rstudioAccessesHarvester.getAccountingRecords();
|
||||
accountingRecords.addAll(harvested);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
// logger.debug("{}", accountingRecords);
|
||||
|
||||
logger.debug("Going to insert {}", accountingRecords);
|
||||
|
||||
ContextTest.setContextByName(ROOT_PROD);
|
||||
dao.insertRecords(accountingRecords.toArray(new AccountingRecord[1]));
|
||||
|
||||
} catch (Throwable e) {
|
||||
logger.error(e.getLocalizedMessage(), e);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
}
|
|
@ -2,7 +2,8 @@ package org.gcube.dataharvest.harvester.sobigdata;
|
|||
|
||||
import java.util.List;
|
||||
|
||||
import org.gcube.dataharvest.utils.ContextTest;
|
||||
import org.gcube.dataharvest.ContextTest;
|
||||
import org.junit.Ignore;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
|
@ -10,6 +11,7 @@ public class SoBigDataHarvesterTest extends ContextTest {
|
|||
|
||||
private static Logger logger = LoggerFactory.getLogger(SoBigDataHarvesterTest.class);
|
||||
|
||||
@Ignore
|
||||
// @Test
|
||||
public void testGroupList() throws Exception {
|
||||
// ContextTest.setContextByName("/d4science.research-infrastructures.eu/D4Research/AGINFRAplusDev");
|
||||
|
|
|
@ -0,0 +1,868 @@
|
|||
package org.gcube.dataharvest.plugin;
|
||||
|
||||
import java.time.Instant;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Calendar;
|
||||
import java.util.Date;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.SortedSet;
|
||||
import java.util.TreeSet;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import org.gcube.accounting.accounting.summary.access.AccountingDao;
|
||||
import org.gcube.accounting.accounting.summary.access.model.ScopeDescriptor;
|
||||
import org.gcube.accounting.accounting.summary.access.model.internal.Dimension;
|
||||
import org.gcube.accounting.accounting.summary.access.model.update.AccountingRecord;
|
||||
import org.gcube.common.authorization.client.exceptions.ObjectNotFound;
|
||||
import org.gcube.common.authorization.utils.secret.Secret;
|
||||
import org.gcube.common.scope.impl.ScopeBean;
|
||||
import org.gcube.common.scope.impl.ScopeBean.Type;
|
||||
import org.gcube.dataharvest.AccountingDashboardHarvesterPlugin;
|
||||
import org.gcube.dataharvest.ContextTest;
|
||||
import org.gcube.dataharvest.datamodel.HarvestedDataKey;
|
||||
import org.gcube.dataharvest.harvester.CatalogueAccessesHarvester;
|
||||
import org.gcube.dataharvest.harvester.CoreServicesAccessesHarvester;
|
||||
import org.gcube.dataharvest.harvester.MethodInvocationHarvester;
|
||||
import org.gcube.dataharvest.harvester.SocialInteractionsHarvester;
|
||||
import org.gcube.dataharvest.harvester.VREAccessesHarvester;
|
||||
import org.gcube.dataharvest.harvester.VREUsersHarvester;
|
||||
import org.gcube.dataharvest.harvester.sobigdata.ResourceCatalogueHarvester;
|
||||
import org.gcube.dataharvest.harvester.sobigdata.TagMeMethodInvocationHarvester;
|
||||
import org.gcube.dataharvest.utils.AggregationType;
|
||||
import org.gcube.dataharvest.utils.ContextAuthorization;
|
||||
import org.gcube.dataharvest.utils.DateUtils;
|
||||
import org.gcube.vremanagement.executor.api.types.LaunchParameter;
|
||||
import org.gcube.vremanagement.executor.api.types.Scheduling;
|
||||
import org.gcube.vremanagement.executor.client.SmartExecutorClient;
|
||||
import org.gcube.vremanagement.executor.client.SmartExecutorClientFactory;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Ignore;
|
||||
import org.junit.Test;
|
||||
import org.quartz.CronExpression;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
public class AccountingDataHarvesterPluginTest extends ContextTest {
|
||||
|
||||
private static Logger logger = LoggerFactory.getLogger(AccountingDataHarvesterPluginTest.class);
|
||||
|
||||
public static final String SO_BIG_VO = "/d4science.research-infrastructures.eu/SoBigData";
|
||||
|
||||
public static final String TAGME_VRE = "/d4science.research-infrastructures.eu/SoBigData/TagMe";
|
||||
public static final String STOCK_ASSESMENT_VRE = "/d4science.research-infrastructures.eu/gCubeApps/StockAssessment";
|
||||
|
||||
public static final String BLUE_CLOUD_LAB = "/d4science.research-infrastructures.eu/D4OS/Blue-CloudLab";
|
||||
|
||||
protected AccountingDao getAccountingDao() throws ObjectNotFound, Exception {
|
||||
AccountingDao dao = AccountingDao.get();
|
||||
|
||||
Set<ScopeDescriptor> scopeDescriptorSet = dao.getContexts();
|
||||
Map<String, ScopeDescriptor> scopeDescriptorMap = new HashMap<>();
|
||||
for (ScopeDescriptor scopeDescriptor : scopeDescriptorSet) {
|
||||
scopeDescriptorMap.put(scopeDescriptor.getId(), scopeDescriptor);
|
||||
}
|
||||
AccountingDashboardHarvesterPlugin.scopeDescriptors.set(scopeDescriptorMap);
|
||||
|
||||
Set<Dimension> dimensionSet = dao.getDimensions();
|
||||
Map<String, Dimension> dimensionMap = new HashMap<>();
|
||||
for (Dimension dimension : dimensionSet) {
|
||||
dimensionMap.put(dimension.getId(), dimension);
|
||||
}
|
||||
|
||||
|
||||
return dao;
|
||||
}
|
||||
|
||||
@Ignore
|
||||
@Test
|
||||
public void getDimensions() {
|
||||
try {
|
||||
ContextTest.setContextByName(ROOT_PROD);
|
||||
|
||||
AccountingDao dao = AccountingDao.get();
|
||||
|
||||
Set<Dimension> dimensionSet = dao.getDimensions();
|
||||
for (Dimension d : dimensionSet) {
|
||||
logger.debug("{} - {} - {} - {}", d.getId(), d.getGroup(), d.getAggregatedMeasure(), d.getLabel());
|
||||
}
|
||||
|
||||
logger.info("End.");
|
||||
|
||||
} catch (Exception e) {
|
||||
logger.error("", e);
|
||||
}
|
||||
}
|
||||
|
||||
@Ignore
|
||||
// @Test
|
||||
public void launch() {
|
||||
try {
|
||||
|
||||
ContextTest.setContextByName(ROOT_PROD);
|
||||
|
||||
AccountingDashboardHarvesterPlugin accountingDataHarvesterPlugin = new AccountingDashboardHarvesterPlugin();
|
||||
|
||||
Map<String, Object> inputs = new HashMap<>();
|
||||
|
||||
AggregationType aggregationType = AggregationType.MONTHLY;
|
||||
|
||||
inputs.put(AccountingDashboardHarvesterPlugin.MEASURE_TYPE_INPUT_PARAMETER, aggregationType.name());
|
||||
inputs.put(AccountingDashboardHarvesterPlugin.GET_VRE_USERS_INPUT_PARAMETER, true);
|
||||
inputs.put(AccountingDashboardHarvesterPlugin.RERUN_INPUT_PARAMETER, true);
|
||||
inputs.put(AccountingDashboardHarvesterPlugin.DRY_RUN_INPUT_PARAMETER, true);
|
||||
inputs.put(AccountingDashboardHarvesterPlugin.PARTIAL_HARVESTING, true);
|
||||
|
||||
Calendar from = DateUtils.getStartCalendar(2022, Calendar.SEPTEMBER, 1);
|
||||
String fromDate = DateUtils.LAUNCH_DATE_FORMAT.format(from.getTime());
|
||||
logger.trace("{} is {}", AccountingDashboardHarvesterPlugin.START_DATE_INPUT_PARAMETER, fromDate);
|
||||
inputs.put(AccountingDashboardHarvesterPlugin.START_DATE_INPUT_PARAMETER, fromDate);
|
||||
|
||||
accountingDataHarvesterPlugin.launch(inputs);
|
||||
|
||||
logger.info("End.");
|
||||
|
||||
} catch (Exception e) {
|
||||
logger.error("", e);
|
||||
}
|
||||
}
|
||||
|
||||
@Ignore
|
||||
// @Test
|
||||
public void launchPluginOnSmartExecutor() {
|
||||
try {
|
||||
|
||||
ContextTest.setContextByName(ROOT_PROD);
|
||||
|
||||
String pluginName = new AccountingDashboardHarvesterPlugin().getName();
|
||||
|
||||
SmartExecutorClient smartExecutor = SmartExecutorClientFactory.getClient(pluginName);
|
||||
Assert.assertNotNull(smartExecutor);
|
||||
|
||||
Map<String, Object> inputs = new HashMap<>();
|
||||
|
||||
AggregationType aggregationType = AggregationType.MONTHLY;
|
||||
|
||||
inputs.put(AccountingDashboardHarvesterPlugin.MEASURE_TYPE_INPUT_PARAMETER, aggregationType.name());
|
||||
inputs.put(AccountingDashboardHarvesterPlugin.GET_VRE_USERS_INPUT_PARAMETER, true);
|
||||
inputs.put(AccountingDashboardHarvesterPlugin.RERUN_INPUT_PARAMETER, false);
|
||||
inputs.put(AccountingDashboardHarvesterPlugin.DRY_RUN_INPUT_PARAMETER, false);
|
||||
inputs.put(AccountingDashboardHarvesterPlugin.PARTIAL_HARVESTING, false);
|
||||
|
||||
/*
|
||||
* Calendar from = DateUtils.getStartCalendar(2016, Calendar.SEPTEMBER, 1);
|
||||
* String fromDate = DateUtils.LAUNCH_DATE_FORMAT.format(from.getTime());
|
||||
* logger.trace("{} is {}",
|
||||
* AccountingDataHarvesterPlugin.START_DATE_INPUT_PARAMETER, fromDate);
|
||||
* inputs.put(AccountingDataHarvesterPlugin. START_DATE_INPUT_PARAMETER,
|
||||
* fromDate);
|
||||
*/
|
||||
|
||||
// 3rd of the month for MONTHLY Harvesting at 10:00
|
||||
// CronExpression cronExpression = new CronExpression("0 0 10 3 1/1
|
||||
// ? *");
|
||||
|
||||
// Every day at 10:00 for partial harvesting
|
||||
CronExpression cronExpression = new CronExpression("0 0 10 3 1/1 ? *");
|
||||
|
||||
Scheduling scheduling = new Scheduling(cronExpression);
|
||||
scheduling.setGlobal(false);
|
||||
LaunchParameter launchParameter = new LaunchParameter(pluginName, inputs, scheduling);
|
||||
// LaunchParameter launchParameter = new LaunchParameter(pluginName,
|
||||
// inputs);
|
||||
|
||||
smartExecutor.launch(launchParameter);
|
||||
|
||||
logger.info("End.");
|
||||
|
||||
} catch (Exception e) {
|
||||
logger.error("", e);
|
||||
}
|
||||
}
|
||||
|
||||
@Ignore
|
||||
// @Test
|
||||
public void launchOldData() {
|
||||
try {
|
||||
|
||||
ContextTest.setContextByName(ROOT_PROD);
|
||||
|
||||
AccountingDashboardHarvesterPlugin accountingDataHarvesterPlugin = new AccountingDashboardHarvesterPlugin();
|
||||
|
||||
Map<String, Object> inputs = new HashMap<>();
|
||||
|
||||
AggregationType aggregationType = AggregationType.MONTHLY;
|
||||
|
||||
inputs.put(AccountingDashboardHarvesterPlugin.MEASURE_TYPE_INPUT_PARAMETER, aggregationType.name());
|
||||
inputs.put(AccountingDashboardHarvesterPlugin.GET_VRE_USERS_INPUT_PARAMETER, true);
|
||||
inputs.put(AccountingDashboardHarvesterPlugin.RERUN_INPUT_PARAMETER, true);
|
||||
inputs.put(AccountingDashboardHarvesterPlugin.DRY_RUN_INPUT_PARAMETER, false);
|
||||
|
||||
Calendar from = DateUtils.getStartCalendar(2016, Calendar.SEPTEMBER, 1);
|
||||
|
||||
Calendar runbeforeDate = DateUtils.getStartCalendar(2018, Calendar.JUNE, 1);
|
||||
|
||||
while (from.before(runbeforeDate)) {
|
||||
String fromDate = DateUtils.LAUNCH_DATE_FORMAT.format(from.getTime());
|
||||
logger.trace("{} is {}", AccountingDashboardHarvesterPlugin.START_DATE_INPUT_PARAMETER, fromDate);
|
||||
inputs.put(AccountingDashboardHarvesterPlugin.START_DATE_INPUT_PARAMETER, fromDate);
|
||||
accountingDataHarvesterPlugin.launch(inputs);
|
||||
from.add(aggregationType.getCalendarField(), 1);
|
||||
}
|
||||
|
||||
logger.info("End.");
|
||||
|
||||
} catch (Exception e) {
|
||||
logger.error("", e);
|
||||
}
|
||||
}
|
||||
|
||||
// @Ignore
|
||||
@Test
|
||||
public void launchOldDataVREAccessesHarvester() {
|
||||
try {
|
||||
|
||||
ContextTest.setContextByName(ROOT_PROD);
|
||||
|
||||
AccountingDao dao = getAccountingDao();
|
||||
|
||||
ContextAuthorization contextAuthorization = new ContextAuthorization();
|
||||
SortedSet<String> contexts = contextAuthorization.getContexts();
|
||||
|
||||
AggregationType aggregationType = AggregationType.MONTHLY;
|
||||
|
||||
Calendar from = DateUtils.getStartCalendar(2023, Calendar.MAY, 1);
|
||||
|
||||
Calendar runbeforeDate = DateUtils.getStartCalendar(2023, Calendar.JULY, 1);
|
||||
|
||||
while (from.before(runbeforeDate)) {
|
||||
Date start = from.getTime();
|
||||
Date end = DateUtils.getEndDateFromStartDate(aggregationType, start, 1);
|
||||
|
||||
logger.debug("Harvesting from {} to {}", DateUtils.format(start), DateUtils.format(end));
|
||||
|
||||
ArrayList<AccountingRecord> accountingRecords = new ArrayList<>();
|
||||
|
||||
VREAccessesHarvester vreAccessesHarvester = null;
|
||||
//JupyterAccessesHarvester vreAccessesHarvester = null;
|
||||
//RStudioAccessesHarvester vreAccessesHarvester = null;
|
||||
//CoreServicesAccessesHarvester vreAccessesHarvester = null;
|
||||
|
||||
for (String context : contexts) {
|
||||
ContextTest.setContextByName(context);
|
||||
|
||||
ScopeBean scopeBean = new ScopeBean(context);
|
||||
|
||||
if (vreAccessesHarvester == null) {
|
||||
|
||||
if (scopeBean.is(Type.INFRASTRUCTURE)) {
|
||||
vreAccessesHarvester = new VREAccessesHarvester(start, end);
|
||||
} else {
|
||||
// This code should be never used because the scopes
|
||||
// are sorted by fullname
|
||||
|
||||
ScopeBean parent = scopeBean.enclosingScope();
|
||||
while (!parent.is(Type.INFRASTRUCTURE)) {
|
||||
parent = scopeBean.enclosingScope();
|
||||
}
|
||||
|
||||
// Setting back token for the context
|
||||
ContextTest.setContextByName(parent.toString());
|
||||
|
||||
vreAccessesHarvester = new VREAccessesHarvester(start, end);
|
||||
|
||||
// Setting back token for the context
|
||||
ContextTest.setContextByName(context);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
try {
|
||||
if (context.startsWith(AccountingDashboardHarvesterPlugin.SO_BIG_DATA_VO)
|
||||
&& start.before(DateUtils.getStartCalendar(2018, Calendar.APRIL, 1).getTime())) {
|
||||
logger.info("Not Harvesting VREs Accesses for {} from {} to {}", context,
|
||||
DateUtils.format(start), DateUtils.format(end));
|
||||
} else {
|
||||
// Collecting Google Analytics Data for VREs
|
||||
// Accesses
|
||||
List<AccountingRecord> harvested = vreAccessesHarvester.getAccountingRecords();
|
||||
accountingRecords.addAll(harvested);
|
||||
|
||||
}
|
||||
} catch (Exception e) {
|
||||
logger.error("Error harvesting Social Interactions for {}", context, e);
|
||||
}
|
||||
}
|
||||
|
||||
logger.debug("Harvest Measures from {} to {} are {}", DateUtils.format(start), DateUtils.format(end),
|
||||
accountingRecords);
|
||||
ContextTest.setContextByName(ROOT_PROD);
|
||||
|
||||
// dao.insertRecords(accountingRecords.toArray(new
|
||||
// AccountingRecord[1]));
|
||||
|
||||
Thread.sleep(TimeUnit.SECONDS.toMillis(10));
|
||||
|
||||
from.add(aggregationType.getCalendarField(), 1);
|
||||
|
||||
}
|
||||
|
||||
ContextTest.setContextByName(ROOT_PROD);
|
||||
|
||||
} catch (Exception e) {
|
||||
logger.error("", e);
|
||||
}
|
||||
|
||||
logger.info("End.");
|
||||
|
||||
}
|
||||
|
||||
@Ignore
|
||||
// @Test
|
||||
public void testVREAccessesHarvester() throws Exception {
|
||||
try {
|
||||
ContextTest.setContextByName(ROOT_PROD);
|
||||
|
||||
AccountingDao dao = getAccountingDao();
|
||||
|
||||
List<Date> starts = new ArrayList<>();
|
||||
starts.add(DateUtils.getStartCalendar(2018, Calendar.SEPTEMBER, 1).getTime());
|
||||
starts.add(DateUtils.getStartCalendar(2018, Calendar.OCTOBER, 1).getTime());
|
||||
starts.add(DateUtils.getStartCalendar(2018, Calendar.NOVEMBER, 1).getTime());
|
||||
starts.add(DateUtils.getStartCalendar(2018, Calendar.DECEMBER, 1).getTime());
|
||||
starts.add(DateUtils.getStartCalendar(2019, Calendar.JANUARY, 1).getTime());
|
||||
starts.add(DateUtils.getStartCalendar(2019, Calendar.FEBRUARY, 1).getTime());
|
||||
starts.add(DateUtils.getStartCalendar(2019, Calendar.MARCH, 1).getTime());
|
||||
|
||||
AggregationType measureType = AggregationType.MONTHLY;
|
||||
|
||||
String[] contextFullNames = new String[] { "/d4science.research-infrastructures.eu/FARM/GRSF",
|
||||
"/d4science.research-infrastructures.eu/FARM/GRSF_Admin" };
|
||||
|
||||
List<AccountingRecord> accountingRecords = new ArrayList<>();
|
||||
|
||||
for (Date start : starts) {
|
||||
Date end = DateUtils.getEndDateFromStartDate(measureType, start, 1);
|
||||
|
||||
ContextTest.setContextByName(ROOT_PROD);
|
||||
VREAccessesHarvester vreAccessesHarvester = new VREAccessesHarvester(start, end);
|
||||
|
||||
for (String contextFullname : contextFullNames) {
|
||||
|
||||
ContextTest.setContextByName(contextFullname);
|
||||
|
||||
List<AccountingRecord> harvested = vreAccessesHarvester.getAccountingRecords();
|
||||
accountingRecords.addAll(harvested);
|
||||
|
||||
logger.debug("{} - {}", contextFullname, accountingRecords);
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
logger.debug("{}", accountingRecords);
|
||||
|
||||
ContextTest.setContextByName(ROOT_PROD);
|
||||
// dao.insertRecords(accountingRecords.toArray(new
|
||||
// AccountingRecord[1]));
|
||||
|
||||
} catch (Exception e) {
|
||||
logger.error("", e);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
@Ignore
|
||||
// @Test
|
||||
public void testVREAccessesHarvesterAll() {
|
||||
try {
|
||||
|
||||
ContextTest.setContextByName(ROOT_PROD);
|
||||
|
||||
AggregationType measureType = AggregationType.MONTHLY;
|
||||
|
||||
// Date start = DateUtils.getStartCalendar(2015, Calendar.FEBRUARY,
|
||||
// 1).getTime();
|
||||
// Date end = DateUtils.getStartCalendar(2019, Calendar.FEBRUARY,
|
||||
// 1).getTime();
|
||||
|
||||
Date start = DateUtils.getPreviousPeriod(measureType, false).getTime();
|
||||
Date end = DateUtils.getEndDateFromStartDate(measureType, start, 1);
|
||||
|
||||
AccountingDashboardHarvesterPlugin accountingDataHarvesterPlugin = new AccountingDashboardHarvesterPlugin();
|
||||
accountingDataHarvesterPlugin.getConfigParameters();
|
||||
|
||||
ContextAuthorization contextAuthorization = new ContextAuthorization();
|
||||
SortedSet<String> contexts = contextAuthorization.getContexts();
|
||||
|
||||
VREAccessesHarvester vreAccessesHarvester = null;
|
||||
|
||||
ArrayList<AccountingRecord> accountingRecords = new ArrayList<>();
|
||||
|
||||
for (String context : contexts) {
|
||||
// Setting the token for the context
|
||||
ContextTest.setContextByName(context);
|
||||
|
||||
ScopeBean scopeBean = new ScopeBean(context);
|
||||
|
||||
if (vreAccessesHarvester == null) {
|
||||
|
||||
if (scopeBean.is(Type.INFRASTRUCTURE)) {
|
||||
vreAccessesHarvester = new VREAccessesHarvester(start, end);
|
||||
} else {
|
||||
// This code should be never used because the scopes are
|
||||
// sorted by fullname
|
||||
|
||||
ScopeBean parent = scopeBean.enclosingScope();
|
||||
while (!parent.is(Type.INFRASTRUCTURE)) {
|
||||
parent = scopeBean.enclosingScope();
|
||||
}
|
||||
|
||||
ContextTest.setContextByName(parent.toString());
|
||||
|
||||
vreAccessesHarvester = new VREAccessesHarvester(start, end);
|
||||
|
||||
// Setting back token for the context
|
||||
ContextTest.setContextByName(context);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
try {
|
||||
if (context.startsWith(AccountingDashboardHarvesterPlugin.SO_BIG_DATA_VO)
|
||||
&& start.before(DateUtils.getStartCalendar(2018, Calendar.APRIL, 1).getTime())) {
|
||||
logger.info("Not Harvesting VREs Accesses for {} from {} to {}", context,
|
||||
DateUtils.format(start), DateUtils.format(end));
|
||||
} else {
|
||||
// Collecting Google Analytics Data for VREs Accesses
|
||||
List<AccountingRecord> harvested = vreAccessesHarvester.getAccountingRecords();
|
||||
accountingRecords.addAll(harvested);
|
||||
}
|
||||
} catch (Exception e) {
|
||||
logger.error("Error harvesting Social Interactions for {}", context, e);
|
||||
}
|
||||
}
|
||||
|
||||
logger.debug("{}", accountingRecords);
|
||||
|
||||
} catch (Exception e) {
|
||||
logger.error("", e);
|
||||
}
|
||||
}
|
||||
|
||||
@Ignore
|
||||
// @Test
|
||||
public void testSocialInteraction() {
|
||||
try {
|
||||
|
||||
ContextTest.setContextByName(ROOT_PROD);
|
||||
|
||||
AccountingDao dao = getAccountingDao();
|
||||
|
||||
SortedSet<String> contexts = new TreeSet<>();
|
||||
contexts.add("/d4science.research-infrastructures.eu/D4Research");
|
||||
contexts.add("/d4science.research-infrastructures.eu/FARM/WECAFC-FIRMS");
|
||||
contexts.add("/d4science.research-infrastructures.eu/gCubeApps/BlueBridgeProject");
|
||||
contexts.add("/d4science.research-infrastructures.eu/gCubeApps/Parthenos");
|
||||
contexts.add("/d4science.research-infrastructures.eu/gCubeApps/ScalableDataMining");
|
||||
contexts.add("/d4science.research-infrastructures.eu/gCubeApps/gCube");
|
||||
|
||||
AggregationType aggregationType = AggregationType.MONTHLY;
|
||||
|
||||
Calendar from = DateUtils.getStartCalendar(2018, Calendar.JUNE, 1);
|
||||
Date start = from.getTime();
|
||||
Date end = DateUtils.getEndDateFromStartDate(aggregationType, start, 1);
|
||||
|
||||
logger.debug("Harvesting Social Interaction from {} to {}", DateUtils.format(start), DateUtils.format(end));
|
||||
|
||||
ArrayList<AccountingRecord> accountingRecords = new ArrayList<>();
|
||||
|
||||
for (String context : contexts) {
|
||||
// Setting the token for the context
|
||||
ContextTest.setContextByName(context);
|
||||
try {
|
||||
// Collecting info on social (posts, replies and likes)
|
||||
logger.info("Going to harvest Social Interactions for {}", context);
|
||||
SocialInteractionsHarvester socialHarvester = new SocialInteractionsHarvester(start, end);
|
||||
List<AccountingRecord> harvested = socialHarvester.getAccountingRecords();
|
||||
accountingRecords.addAll(harvested);
|
||||
} catch (Exception e) {
|
||||
logger.error("Error harvesting Social Interactions for {}", context, e);
|
||||
}
|
||||
}
|
||||
|
||||
logger.debug("Harvest Measures from {} to {} are {}", DateUtils.format(start), DateUtils.format(end),
|
||||
accountingRecords);
|
||||
ContextTest.setContextByName(ROOT_PROD);
|
||||
// dao.insertRecords(accountingRecords.toArray(new
|
||||
// AccountingRecord[1]));
|
||||
|
||||
} catch (Exception e) {
|
||||
logger.error("", e);
|
||||
}
|
||||
|
||||
logger.info("End.");
|
||||
|
||||
}
|
||||
|
||||
// @Ignore
|
||||
// // @Test
|
||||
// public void testMethodInvocation() {
|
||||
// try {
|
||||
// ContextTest.setContextByName(STOCK_ASSESMENT_VRE);
|
||||
//
|
||||
// AggregationType measureType = AggregationType.MONTHLY;
|
||||
//
|
||||
// Date start = DateUtils.getPreviousPeriod(measureType, false).getTime();
|
||||
// Date end = DateUtils.getEndDateFromStartDate(measureType, start, 1);
|
||||
//
|
||||
// MethodInvocationHarvester methodInvocationHarvester = new MethodInvocationHarvester(start, end);
|
||||
// List<AccountingRecord> accountingRecords = methodInvocationHarvester.getAccountingRecords();
|
||||
//
|
||||
// logger.debug("{}", accountingRecords);
|
||||
//
|
||||
// } catch (Exception e) {
|
||||
// logger.error("", e);
|
||||
// }
|
||||
// }
|
||||
|
||||
// @Ignore
|
||||
@Test
|
||||
public void testMethodInvocationOldData() {
|
||||
try {
|
||||
|
||||
ContextTest.setContextByName(ROOT_PROD);
|
||||
AccountingDao dao = getAccountingDao();
|
||||
|
||||
Date start = DateUtils.getStartCalendar(2023, Calendar.JANUARY, 1).getTime();
|
||||
Date last = DateUtils.getStartCalendar(2024, Calendar.FEBRUARY, 1).getTime();
|
||||
|
||||
AggregationType measureType = AggregationType.MONTHLY;
|
||||
|
||||
ContextAuthorization contextAuthorization = new ContextAuthorization();
|
||||
SortedSet<String> contexts = contextAuthorization.getContexts();
|
||||
|
||||
Set<ScopeDescriptor> scopeDescriptorSet = dao.getContexts();
|
||||
Map<String, ScopeDescriptor> scopeDescriptorMap = new HashMap<>();
|
||||
for (ScopeDescriptor scopeDescriptor : scopeDescriptorSet) {
|
||||
scopeDescriptorMap.put(scopeDescriptor.getId(), scopeDescriptor);
|
||||
}
|
||||
|
||||
AccountingDashboardHarvesterPlugin.scopeDescriptors.set(scopeDescriptorMap);
|
||||
|
||||
|
||||
List<AccountingRecord> accountingRecords = new ArrayList<>();
|
||||
|
||||
while(start.before(last)) {
|
||||
Date end = DateUtils.getEndDateFromStartDate(measureType, start, 1);
|
||||
|
||||
|
||||
for (String context : contexts) {
|
||||
// Setting the token for the context
|
||||
Secret s = contextAuthorization.getCatalogueSecretForContext(context);
|
||||
ContextTest.set(s);
|
||||
|
||||
|
||||
if (context.startsWith(AccountingDashboardHarvesterPlugin.TAGME_CONTEXT)) {
|
||||
try {
|
||||
// Collecting info on method invocation
|
||||
logger.info("Going to harvest Method Invocations for {}", context);
|
||||
TagMeMethodInvocationHarvester tagMeMethodInvocationHarvester = new TagMeMethodInvocationHarvester(
|
||||
start, end);
|
||||
|
||||
List<AccountingRecord> harvested = tagMeMethodInvocationHarvester.getAccountingRecords();
|
||||
logger.debug("{} - {}", context, harvested);
|
||||
accountingRecords.addAll(harvested);
|
||||
|
||||
} catch (Exception e) {
|
||||
logger.error("Error harvesting Method Invocations for {}", context, e);
|
||||
}
|
||||
} else {
|
||||
try {
|
||||
// Collecting info on method invocation
|
||||
logger.info("Going to harvest Method Invocations for {}", context);
|
||||
MethodInvocationHarvester methodInvocationHarvester = new MethodInvocationHarvester(start, end);
|
||||
|
||||
List<AccountingRecord> harvested = methodInvocationHarvester.getAccountingRecords();
|
||||
logger.debug("{} - {}", context, harvested);
|
||||
accountingRecords.addAll(harvested);
|
||||
} catch (Exception e) {
|
||||
logger.error("Error harvesting Method Invocations for {}", context, e);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
}
|
||||
|
||||
start = end;
|
||||
}
|
||||
|
||||
logger.debug("Going to insert {}", accountingRecords);
|
||||
|
||||
ContextTest.setContextByName(ROOT_PROD);
|
||||
// dao.insertRecords(accountingRecords.toArray(new AccountingRecord[1]));
|
||||
|
||||
} catch (Exception e) {
|
||||
logger.error("", e);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
@Ignore
|
||||
// @Test
|
||||
public void testTagMeMethodInvocation() throws Exception {
|
||||
try {
|
||||
|
||||
ContextTest.setContextByName(ROOT_PROD);
|
||||
AccountingDao dao = getAccountingDao();
|
||||
|
||||
ContextTest.setContextByName(TAGME_VRE);
|
||||
|
||||
List<AccountingRecord> accountingRecords = new ArrayList<>();
|
||||
|
||||
AggregationType measureType = AggregationType.MONTHLY;
|
||||
|
||||
List<Date> starts = new ArrayList<>();
|
||||
starts.add(DateUtils.getStartCalendar(2018, Calendar.NOVEMBER, 1).getTime());
|
||||
starts.add(DateUtils.getStartCalendar(2018, Calendar.DECEMBER, 1).getTime());
|
||||
starts.add(DateUtils.getStartCalendar(2019, Calendar.JANUARY, 1).getTime());
|
||||
starts.add(DateUtils.getStartCalendar(2019, Calendar.FEBRUARY, 1).getTime());
|
||||
starts.add(DateUtils.getStartCalendar(2019, Calendar.MARCH, 1).getTime());
|
||||
|
||||
for (Date start : starts) {
|
||||
|
||||
Date end = DateUtils.getEndDateFromStartDate(measureType, start, 1);
|
||||
|
||||
TagMeMethodInvocationHarvester methodInvocationHarvester = new TagMeMethodInvocationHarvester(start,
|
||||
end);
|
||||
accountingRecords.addAll(methodInvocationHarvester.getAccountingRecords());
|
||||
|
||||
logger.debug("{}", accountingRecords);
|
||||
}
|
||||
|
||||
ContextTest.setContextByName(ROOT_PROD);
|
||||
// dao.insertRecords(accountingRecords.toArray(new
|
||||
// AccountingRecord[1]));
|
||||
|
||||
} catch (Exception e) {
|
||||
logger.error("", e);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
@Ignore
|
||||
// @Test
|
||||
public void testGetVREUsersForSpecificVRE() {
|
||||
try {
|
||||
ContextTest.setContextByName(ROOT_PROD);
|
||||
AccountingDao dao = getAccountingDao();
|
||||
|
||||
ContextTest.setContextByName("/d4science.research-infrastructures.eu/SoBigData/SportsDataScience");
|
||||
|
||||
AggregationType measureType = AggregationType.MONTHLY;
|
||||
|
||||
Date start = DateUtils.getPreviousPeriod(measureType, false).getTime();
|
||||
Date end = DateUtils.getEndDateFromStartDate(measureType, start, 1);
|
||||
|
||||
VREUsersHarvester vreUsersHarvester = new VREUsersHarvester(start, end);
|
||||
List<AccountingRecord> harvested = vreUsersHarvester.getAccountingRecords();
|
||||
|
||||
logger.info("Harvested Data from {} to {} : {}", DateUtils.format(start), DateUtils.format(end), harvested);
|
||||
|
||||
ContextTest.setContextByName(ROOT_PROD);
|
||||
// dao.insertRecords(accountingRecords.toArray(new
|
||||
// AccountingRecord[1]));
|
||||
|
||||
} catch (Exception e) {
|
||||
logger.error("", e);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@Ignore
|
||||
// @Test
|
||||
public void testFilteringGenericResource() {
|
||||
try {
|
||||
ContextTest.setContextByName(ROOT_PROD);
|
||||
// Utils.setContext(RESOURCE_CATALOGUE);
|
||||
|
||||
AggregationType measureType = AggregationType.MONTHLY;
|
||||
|
||||
Date start = DateUtils.getPreviousPeriod(measureType, false).getTime();
|
||||
Date end = DateUtils.getEndDateFromStartDate(measureType, start, 1);
|
||||
|
||||
ContextAuthorization contextAuthorization = new ContextAuthorization();
|
||||
SortedSet<String> contexts = contextAuthorization.getContexts();
|
||||
|
||||
ResourceCatalogueHarvester resourceCatalogueHarvester = new ResourceCatalogueHarvester(start, end,
|
||||
contexts);
|
||||
SortedSet<String> validContexts = resourceCatalogueHarvester.getValidContexts(contexts, SO_BIG_VO + "/");
|
||||
logger.info("Valid Contexts {}", validContexts);
|
||||
|
||||
} catch (Exception e) {
|
||||
logger.error("", e);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@Ignore
|
||||
// @Test
|
||||
public void testResourceCatalogueHarvester() {
|
||||
try {
|
||||
|
||||
ContextTest.setContextByName(ROOT_PROD);
|
||||
|
||||
AggregationType measureType = AggregationType.MONTHLY;
|
||||
|
||||
// Date start = DateUtils.getStartCalendar(2015, Calendar.FEBRUARY,
|
||||
// 1).getTime();
|
||||
// Date end = DateUtils.getStartCalendar(2019, Calendar.FEBRUARY,
|
||||
// 1).getTime();
|
||||
|
||||
Date start = DateUtils.getPreviousPeriod(measureType, false).getTime();
|
||||
Date end = DateUtils.getEndDateFromStartDate(measureType, start, 1);
|
||||
|
||||
ContextAuthorization contextAuthorization = new ContextAuthorization();
|
||||
SortedSet<String> contexts = contextAuthorization.getContexts();
|
||||
|
||||
ResourceCatalogueHarvester resourceCatalogueHarvester = new ResourceCatalogueHarvester(start, end,
|
||||
contexts);
|
||||
List<AccountingRecord> data = resourceCatalogueHarvester.getAccountingRecords();
|
||||
|
||||
logger.debug("{}", data);
|
||||
|
||||
} catch (Exception e) {
|
||||
logger.error("", e);
|
||||
}
|
||||
}
|
||||
|
||||
// @Ignore
|
||||
@Test
|
||||
public void testCoreServicesHarvester() {
|
||||
try {
|
||||
|
||||
String context = ROOT_PROD;
|
||||
ContextTest.setContextByName(context);
|
||||
|
||||
AccountingDao dao = getAccountingDao();
|
||||
Calendar from = DateUtils.getStartCalendar(2023, Calendar.MAY, 1);
|
||||
Calendar finalEnd = DateUtils.getStartCalendar(2023, Calendar.JULY, 1);
|
||||
|
||||
AggregationType aggregationType = AggregationType.MONTHLY;
|
||||
Date start = from.getTime();
|
||||
Date end = DateUtils.getEndDateFromStartDate(aggregationType, start, 1);
|
||||
|
||||
ScopeBean scopeBean = new ScopeBean(context);
|
||||
logger.debug("FullName {} - Name {}", scopeBean.toString(), scopeBean.name());
|
||||
|
||||
while (from.before(end)) {
|
||||
CoreServicesAccessesHarvester coreServicesHarvester = new CoreServicesAccessesHarvester(start, end);
|
||||
List<AccountingRecord> accountingRecords = coreServicesHarvester.getAccountingRecords();
|
||||
// dao.insertRecords(accountingRecords.toArray(new AccountingRecord[1]));
|
||||
|
||||
logger.debug("{} -> {} Data Inserted", DateUtils.format(start), DateUtils.format(end));
|
||||
logger.debug("---------------------------------------------------------------------------------------");
|
||||
|
||||
Thread.sleep(TimeUnit.SECONDS.toMillis(90));
|
||||
start = end;
|
||||
end = DateUtils.getEndDateFromStartDate(AggregationType.MONTHLY, start, 1);
|
||||
}
|
||||
|
||||
} catch (Exception e) {
|
||||
logger.error("", e);
|
||||
}
|
||||
}
|
||||
|
||||
// @Ignore
|
||||
@Test
|
||||
public void testCatalogueHarvester() {
|
||||
try {
|
||||
|
||||
String context = ROOT_PROD;
|
||||
ContextTest.setContextByName(context);
|
||||
|
||||
AccountingDao dao = getAccountingDao();
|
||||
|
||||
Calendar from = DateUtils.getStartCalendar(2023, Calendar.JUNE, 1);
|
||||
Calendar runbeforeDate = DateUtils.getStartCalendar(2023, Calendar.JULY, 1);
|
||||
AggregationType aggregationType = AggregationType.MONTHLY;
|
||||
|
||||
Date start = from.getTime();
|
||||
Date end = DateUtils.getEndDateFromStartDate(aggregationType, start, 1);
|
||||
|
||||
/*
|
||||
* Date start = DateUtils.getPreviousPeriod(measureType, false).getTime(); Date
|
||||
* end = DateUtils.getEndDateFromStartDate(measureType, start, 1, false);
|
||||
*/
|
||||
|
||||
ScopeBean scopeBean = new ScopeBean(context);
|
||||
logger.debug("FullName {} - Name {}", scopeBean.toString(), scopeBean.name());
|
||||
|
||||
CatalogueAccessesHarvester catalogueHarvester = new CatalogueAccessesHarvester(start, end);
|
||||
List<AccountingRecord> accountingRecords = catalogueHarvester.getAccountingRecords();
|
||||
for (AccountingRecord accountingRecord : accountingRecords) {
|
||||
logger.debug("{}", accountingRecord);
|
||||
}
|
||||
logger.debug("{}", accountingRecords);
|
||||
|
||||
// dao.insertRecords(accountingRecords.toArray(new AccountingRecord[1]));
|
||||
|
||||
} catch (Exception e) {
|
||||
logger.error("", e);
|
||||
}
|
||||
}
|
||||
|
||||
public static final String E_LEARNING_AREA_VRE = "/d4science.research-infrastructures.eu/SoBigData/E-Learning_Area";
|
||||
|
||||
@Ignore
|
||||
// @Test
|
||||
public void addMissingVREAccesses() {
|
||||
try {
|
||||
|
||||
ContextTest.setContextByName(ROOT_PROD);
|
||||
|
||||
AccountingDao dao = getAccountingDao();
|
||||
|
||||
|
||||
ArrayList<AccountingRecord> accountingRecords = new ArrayList<AccountingRecord>();
|
||||
|
||||
String context = E_LEARNING_AREA_VRE;
|
||||
|
||||
// Setting the token for the context
|
||||
ContextTest.setContextByName(context);
|
||||
|
||||
ScopeDescriptor scopeDescriptor = AccountingDashboardHarvesterPlugin.getScopeDescriptor(context);
|
||||
Dimension dimension = AccountingDashboardHarvesterPlugin.getDimension(HarvestedDataKey.ACCESSES.getKey());
|
||||
|
||||
Calendar calendar = DateUtils.getStartCalendar(2018, Calendar.JULY, 1);
|
||||
calendar.set(Calendar.DAY_OF_MONTH, 15);
|
||||
|
||||
Map<Integer, Integer> monthValues = new HashMap<>();
|
||||
monthValues.put(Calendar.JULY, 54);
|
||||
monthValues.put(Calendar.AUGUST, 23);
|
||||
monthValues.put(Calendar.SEPTEMBER, 127);
|
||||
monthValues.put(Calendar.OCTOBER, 192);
|
||||
|
||||
for (Integer month : monthValues.keySet()) {
|
||||
calendar.set(Calendar.MONTH, month);
|
||||
Instant instant = calendar.toInstant();
|
||||
|
||||
AccountingRecord ar = new AccountingRecord(scopeDescriptor, instant, dimension,
|
||||
(long) monthValues.get(month));
|
||||
logger.debug("{} : {}", ar.getDimension().getId(), ar.getMeasure());
|
||||
accountingRecords.add(ar);
|
||||
}
|
||||
|
||||
logger.trace("{}", accountingRecords);
|
||||
dao.insertRecords(accountingRecords.toArray(new AccountingRecord[1]));
|
||||
|
||||
} catch (Exception e) {
|
||||
logger.error("", e);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -4,7 +4,9 @@ import java.io.IOException;
|
|||
import java.io.InputStream;
|
||||
import java.util.Properties;
|
||||
|
||||
import org.gcube.dataharvest.AccountingDataHarvesterPlugin;
|
||||
import org.gcube.dataharvest.AccountingDashboardHarvesterPlugin;
|
||||
import org.gcube.dataharvest.ContextTest;
|
||||
import org.junit.Ignore;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
|
@ -21,11 +23,11 @@ public class ContextAuthorizationTest extends ContextTest {
|
|||
|
||||
private void getConfigParameters() throws IOException {
|
||||
properties = new Properties();
|
||||
InputStream input = AccountingDataHarvesterPlugin.class.getClassLoader().getResourceAsStream(PROPERTY_FILENAME);
|
||||
InputStream input = AccountingDashboardHarvesterPlugin.class.getClassLoader().getResourceAsStream(PROPERTY_FILENAME);
|
||||
properties.load(input);
|
||||
AccountingDataHarvesterPlugin.getProperties().set(properties);
|
||||
}
|
||||
|
||||
@Ignore
|
||||
// @Test
|
||||
public void testRetrieveContextsAndTokens() throws Exception {
|
||||
try {
|
||||
|
@ -33,7 +35,7 @@ public class ContextAuthorizationTest extends ContextTest {
|
|||
}catch (Exception e) {
|
||||
logger.warn("Unable to load {} file containing configuration properties. AccountingDataHarvesterPlugin will use defaults", PROPERTY_FILENAME);
|
||||
}
|
||||
ContextAuthorization contextAuthorization = new ContextAuthorization();
|
||||
ContextAuthorization contextAuthorization = new ContextAuthorization(properties);
|
||||
contextAuthorization.retrieveContextsAndTokens();
|
||||
}
|
||||
|
||||
|
|
|
@ -1,85 +0,0 @@
|
|||
/**
|
||||
*
|
||||
*/
|
||||
package org.gcube.dataharvest.utils;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.Properties;
|
||||
|
||||
import org.gcube.common.authorization.client.Constants;
|
||||
import org.gcube.common.authorization.client.exceptions.ObjectNotFound;
|
||||
import org.gcube.common.authorization.library.AuthorizationEntry;
|
||||
import org.gcube.common.authorization.library.provider.AuthorizationProvider;
|
||||
import org.gcube.common.authorization.library.provider.ClientInfo;
|
||||
import org.gcube.common.authorization.library.provider.SecurityTokenProvider;
|
||||
import org.gcube.common.authorization.library.utils.Caller;
|
||||
import org.gcube.common.scope.api.ScopeProvider;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.BeforeClass;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
* @author Luca Frosini (ISTI - CNR)
|
||||
*
|
||||
*/
|
||||
public class ContextTest {
|
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger(ContextTest.class);
|
||||
|
||||
protected static Properties properties;
|
||||
protected static final String PROPERTIES_FILENAME = "token.properties";
|
||||
|
||||
public static final String DEFAULT_TEST_SCOPE_NAME;
|
||||
|
||||
static {
|
||||
properties = new Properties();
|
||||
InputStream input = ContextTest.class.getClassLoader().getResourceAsStream(PROPERTIES_FILENAME);
|
||||
|
||||
try {
|
||||
// load the properties file
|
||||
properties.load(input);
|
||||
} catch(IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
|
||||
//DEFAULT_TEST_SCOPE_NAME = "/pred4s/preprod/preVRE";
|
||||
DEFAULT_TEST_SCOPE_NAME = "/gcube/devNext/NextNext";
|
||||
}
|
||||
|
||||
public static String getCurrentScope(String token) throws ObjectNotFound, Exception {
|
||||
AuthorizationEntry authorizationEntry = Constants.authorizationService().get(token);
|
||||
String context = authorizationEntry.getContext();
|
||||
logger.info("Context of token {} is {}", token, context);
|
||||
return context;
|
||||
}
|
||||
|
||||
public static void setContextByName(String fullContextName) throws ObjectNotFound, Exception {
|
||||
String token = ContextTest.properties.getProperty(fullContextName);
|
||||
setContext(token);
|
||||
}
|
||||
|
||||
public static void setContext(String token) throws ObjectNotFound, Exception {
|
||||
SecurityTokenProvider.instance.set(token);
|
||||
AuthorizationEntry authorizationEntry = Constants.authorizationService().get(token);
|
||||
ClientInfo clientInfo = authorizationEntry.getClientInfo();
|
||||
logger.debug("User : {} - Type : {}", clientInfo.getId(), clientInfo.getType().name());
|
||||
String qualifier = authorizationEntry.getQualifier();
|
||||
Caller caller = new Caller(clientInfo, qualifier);
|
||||
AuthorizationProvider.instance.set(caller);
|
||||
ScopeProvider.instance.set(getCurrentScope(token));
|
||||
}
|
||||
|
||||
@BeforeClass
|
||||
public static void beforeClass() throws Exception {
|
||||
setContextByName(DEFAULT_TEST_SCOPE_NAME);
|
||||
}
|
||||
|
||||
@AfterClass
|
||||
public static void afterClass() throws Exception {
|
||||
SecurityTokenProvider.instance.reset();
|
||||
ScopeProvider.instance.reset();
|
||||
}
|
||||
|
||||
}
|
|
@ -1,3 +1,6 @@
|
|||
/*.gcubekey
|
||||
/*.key
|
||||
/*.properties
|
||||
/howto.txt
|
||||
/scopedata 2.xml
|
||||
/config.ini
|
||||
|
|
|
@ -0,0 +1,4 @@
|
|||
groupId=org.gcube.accounting
|
||||
artifactId=accounting-dashboard-harvester-se-plugin
|
||||
version=2.0.0-SNAPSHOT
|
||||
description=Accounting Dashboard Harvester Smart Executor Plugin.
|
|
@ -0,0 +1,24 @@
|
|||
<assembly
|
||||
xmlns="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0 http://maven.apache.org/xsd/assembly-1.1.0.xsd">
|
||||
<id>uberjar</id>
|
||||
<formats>
|
||||
<format>jar</format>
|
||||
</formats>
|
||||
<baseDirectory>${file.separator}</baseDirectory>
|
||||
<fileSets>
|
||||
<fileSet>
|
||||
<directory>target${file.separator}libs</directory>
|
||||
<outputDirectory>${file.separator}</outputDirectory>
|
||||
<useDefaultExcludes>true</useDefaultExcludes>
|
||||
<fileMode>755</fileMode>
|
||||
</fileSet>
|
||||
<fileSet>
|
||||
<directory>target${file.separator}classes</directory>
|
||||
<outputDirectory>${file.separator}</outputDirectory>
|
||||
<useDefaultExcludes>true</useDefaultExcludes>
|
||||
<fileMode>755</fileMode>
|
||||
</fileSet>
|
||||
</fileSets>
|
||||
</assembly>
|
Loading…
Reference in New Issue