Compare commits

...

116 Commits

Author SHA1 Message Date
Luca Frosini f3c2b6197c Fixed changelog 2024-04-22 15:11:53 +02:00
Luca Frosini 00bce762f3 Removed -SNAPSHOT for release 2024-04-22 15:10:19 +02:00
Luca Frosini 9ef16294ff Fixed CHANGELOG 2024-02-27 12:23:30 +01:00
Luca Frosini fa7be6370a Upgraded maven-parent 2024-02-27 12:20:52 +01:00
Luca Frosini 34db58abe2 Fixed test 2024-02-26 18:04:42 +01:00
Luca Frosini abb35b6e7b Fixed test 2024-02-26 18:04:24 +01:00
Luca Frosini 77cfbf6a8a Improved Method Harvester 2024-02-26 18:03:07 +01:00
luca.frosini e76b1c3af3 Removed -SNAPSHOT to release the component 2023-09-21 10:01:07 +02:00
luca.frosini 009083f335 Fixed method invocation due to change in signature of the used library 2023-09-14 15:00:57 +02:00
luca.frosini dedf11256a Fixing bug 2023-09-06 12:39:55 +02:00
Massimiliano Assante 8c26deb82f ready to release 2023-07-03 17:22:51 +02:00
Massimiliano Assante deb8937a10 updated changelog 2023-07-03 17:19:23 +02:00
Massimiliano Assante 850d6674e5 fixed the test classe 2023-07-03 17:12:03 +02:00
Massimiliano Assante 284431ca8d also core services refactored 2023-07-03 17:06:02 +02:00
Massimiliano Assante 4cbdef880e rewritten Jupyter, RStudio and Catalogue Harv., missing Core Serv one 2023-06-30 12:15:01 +02:00
luca.frosini 89e744d769 fixed example 2023-06-30 08:55:06 +02:00
luca.frosini 43865106df Fixed test launch 2023-06-29 17:05:05 +02:00
luca.frosini e0fd599c80 Cleaning code 2023-06-29 16:08:01 +02:00
Massimiliano Assante d3ad4f43ae pusho tutto 2023-06-28 18:20:13 +02:00
luca.frosini 9b27e35676 Ignored MacOs File 2023-06-21 11:24:39 +02:00
Luca Frosini bb3e645932 Fixed bom import 2022-11-09 15:21:46 +01:00
Luca Frosini 934545c8cf Removed -SNAPSHOT to release the component 2022-10-25 16:20:17 +02:00
Luca Frosini 524c3a0411 Added -SNAPSHOT 2022-10-25 16:12:04 +02:00
Luca Frosini f0ce3c250c Added -SNAPSHOT to allow to deploy the artifact using Jenkins 2022-10-25 16:01:44 +02:00
Luca Frosini ec9d30d886 Removed -SNAPSHOT to release the component 2022-10-19 12:14:55 +02:00
Luca Frosini dbc270a523 Removed uneeded harvester and key to be harvested 2022-09-19 12:40:05 +02:00
Luca Frosini 1fe73af6bc Fixed code 2022-09-15 18:03:55 +02:00
Luca Frosini 120316d1b2 Fixing code 2022-09-12 16:50:35 +02:00
Luca Frosini 3ff630bbcb Fixed pom 2022-09-08 14:47:02 +02:00
Luca Frosini 459a71bc0d Switched code to social-networking-client 2022-09-05 14:49:45 +02:00
Luca Frosini 6bd87cedc4 Removed uneeded whitespace 2022-09-01 18:03:43 +02:00
Luca Frosini ee3a6208a4 Upgraded boms versions 2022-09-01 16:29:34 +02:00
Luca Frosini 523c0d8e34 Removed no more needed constants 2022-09-01 15:14:49 +02:00
Luca Frosini d574e3c797 Porting plugin to use new IAM credentials 2022-09-01 14:08:36 +02:00
Luca Frosini c7a934bd4b Switching security to the new IAM refs #21904 2022-08-31 16:48:11 +02:00
Luca Frosini cc242dee6a Removed -SNAPHOT for release 2022-05-19 11:06:28 +02:00
Luca Frosini 9849e7f6ee Added -SNAPHOT to allow jenkins to build snapshot version 2022-05-19 11:05:31 +02:00
Luca Frosini f3a61edbdf Removed -SNAPHOT for release 2022-05-19 11:02:55 +02:00
Luca Frosini cdd875bc47 fixing dependecies version 2022-05-19 10:58:03 +02:00
Luca Frosini 273171704e Enhanced range of storagehub-client-library to 2.0.0,3.0.0-SNAPSHOT 2022-02-09 15:51:47 +01:00
Luca Frosini ab8ad166d8 Fixing end date 2021-07-28 17:07:36 +02:00
Luca Frosini e3c8c42dbe Excluded postgres driver 2021-07-14 20:07:36 +02:00
Luca Frosini ac0fe1c671 Remove -SNAPSHOT for release 2021-06-16 09:59:58 +02:00
Luca Frosini 126034537e Fixed pom 2021-06-11 11:28:03 +02:00
Luca Frosini 40e12c1b85 Added empty line 2021-06-10 17:35:11 +02:00
Luca Frosini 2d312dbf0b Ported to smart-executor 3.0.0 2021-06-10 17:13:33 +02:00
Luca Frosini 759aaf59a3 fixed test 2021-06-04 08:45:38 +02:00
Luca Frosini c567df6a9e fixed filter 2021-06-04 08:45:28 +02:00
Luca Frosini fd641c937f Removed the subtraction of 1 millisecond 2021-06-04 08:45:16 +02:00
Giancarlo Panichi e82971ee29 Updated CHANGELOG.md to support new format 2021-06-03 19:04:51 +02:00
Giancarlo Panichi 4a9ec0a773 Merge pull request 'feature/21557' (#2) from feature/21557 into master
Reviewed-on: #2
2021-06-03 18:58:16 +02:00
Giancarlo Panichi 26b11e96af ref 21557: RStudio sessions to be published into the accounting
Added R Studio harvester
2021-06-03 18:52:07 +02:00
Giancarlo Panichi 87f5594109 ref 21557: RStudio sessions to be published into the accounting
Added R Studio harvester
2021-06-03 18:03:15 +02:00
Luca Frosini 713dee5082 Removed uneeded files 2021-05-31 18:17:09 +02:00
Giancarlo Panichi 619e99f08b Added FUNDING.md 2021-05-31 18:00:41 +02:00
Giancarlo Panichi 81d792162d Merge pull request 'feature/21031' (#1) from feature/21031 into master
Reviewed-on: #1
2021-05-31 17:52:33 +02:00
Giancarlo Panichi 94a558d3c1 Updated to fix Jupyter test 2021-05-31 17:50:23 +02:00
Luca Frosini 77311be1aa fixed test 2021-05-31 17:39:27 +02:00
Luca Frosini 8c7bf2c22b fixed bug on VRE users 2021-05-21 15:39:53 +02:00
Luca Frosini 695bf798f9 fixed tests 2021-05-21 15:24:13 +02:00
Luca Frosini 1b500a2f3d fixed test 2021-04-12 18:11:36 +02:00
Luca Frosini ba158f3187 Fixed test 2021-04-12 18:11:05 +02:00
Luca Frosini 72b7aeccf2 fixed test log 2021-04-12 10:39:08 +02:00
Luca Frosini fff6101491 fixed test log 2021-04-12 10:38:25 +02:00
Luca Frosini ac305c0a32 Improved test 2021-04-12 10:35:08 +02:00
Luca Frosini 42527a425a added property file required for tests 2021-04-12 10:18:19 +02:00
Luca Frosini f2b37893a1 set dry run as default in test for safety 2021-04-12 10:14:53 +02:00
Giancarlo Panichi 9b5d0874ec ref 21031: Add support to Jupyter
Updated Jupyter Accesses Harvester
2021-03-31 15:21:45 +02:00
Giancarlo Panichi 2bd73c2caa ref 21031: Add support to Jupyter
Updated Jupyter Accesses Harvester
2021-03-26 16:35:15 +01:00
Giancarlo Panichi 24f2409df7 ref 21031: Add support to Jupyter
Updated Jupyter Accesses Harvester
2021-03-26 16:04:07 +01:00
Giancarlo Panichi 38ec08e0a3 ref 21031: Add support to Jupyter
Updated Jupyter Accesses Harvester
2021-03-26 13:24:45 +01:00
Giancarlo Panichi 13481c35a5 ref 21031: Add support to Jupyter
Added Jupyter Accesses Harvester
2021-03-25 17:48:55 +01:00
Giancarlo Panichi 7a335cbefd ref 21031: Add support to Jupyter
Added Jupyter Accesses Harvester
2021-03-25 17:40:46 +01:00
Luca Frosini afe8a52e5b Fixing dependency scope 2021-03-22 12:04:51 +01:00
Luca Frosini fa381d7313 Fixed gcat-client dependecy 2020-10-12 15:17:57 +02:00
Luca Frosini 1fd086e63d Switching to new smart-executor-api 2020-09-30 12:12:33 +02:00
Luca Frosini c47d0bbd25 Empty rows added 2020-08-04 15:03:54 +02:00
Luca Frosini 559926167a Fixed CHANGELOG according to the new template 2020-08-03 16:17:11 +02:00
Luca Frosini f94288aa53 Fixed CHANGELOG.md 2020-07-30 15:59:10 +02:00
Luca Frosini 553427047f Fixed CHANGELOG.md 2020-07-24 13:08:55 +02:00
Luca Frosini f7e22e3d31 Removed servicearchive creation which is deprecated 2020-07-24 13:08:16 +02:00
Luca Frosini 7b880efe30 Switching to gcube-jackson 2020-07-10 18:29:52 +02:00
Luca Frosini 9865f2a1ae Merge branch 'master' of gitea@code-repo.d4science.org:gCubeSystem/accounting-dashboard-harvester-se-plugin.git 2020-07-03 09:44:01 +02:00
Luca Frosini fc3a042c8f Removed old changelog.xml file 2020-06-11 15:55:22 +02:00
Luca Frosini 108cb1a767 Renamed CHANGELOG file 2020-06-10 15:12:44 +02:00
Luca Frosini fc7ef68845 Update 'CHANGELOG.md' 2020-06-10 12:23:49 +02:00
Luca Frosini 9d2b2e5010 Fixed integration of CoreServicesAccessesHarvester 2020-06-05 10:05:45 +02:00
Luca Frosini a6cf5c09a4 Removed SNAPSHOT for release 4.23.0 2020-05-22 13:13:57 +02:00
Luca Frosini 4a090b4926 Added release date in changelog.md 2020-05-22 11:41:51 +02:00
Luca Frosini 586e0628d9 Fixed changelog.md format 2020-05-22 11:31:24 +02:00
Luca Frosini 5125aba591 Fixing changelog.md 2020-05-22 09:47:30 +02:00
Luca Frosini 8f1bfc55a1 Fixed changelog.md 2020-05-22 09:30:14 +02:00
Luca Frosini 769df038c5 Merged branch of feature/19047
Created new changelog.md file. Keeping in this release old changelog.xml
file for backward compatibility.

Ready for release of version 1.6.0
2020-05-21 15:32:02 +02:00
Massimiliano Assante b041f78e1f feature completed 2020-05-14 15:33:46 +02:00
Massimiliano Assante baab4ba432 alpha version 2020-05-14 09:40:08 +02:00
Massimiliano Assante f953c16195 alfa version ready for the CoreServices accounting 2020-05-13 18:26:55 +02:00
Luca Frosini 0a6d49b22b Upgraded the version and fixed changelog 2020-03-17 18:04:50 +01:00
Luca Frosini 72dff5082b Fixed conflict 2020-03-17 18:00:59 +01:00
Luca Frosini 4fe22a57c6 Merge remote-tracking branch 'origin/feature/18848' into feature/18290
Conflicts:
	src/test/java/org/gcube/dataharvest/AccountingDataHarvesterPluginTest.java
2020-03-17 17:59:40 +01:00
Massimiliano Assante 813531f5ae Feature #18848, Updated Catalogue ENUM in accounting dashboard harvester 2020-03-17 16:17:46 +01:00
Luca Frosini 9ebbb89eb0 Fixed test 2020-02-14 12:11:15 +01:00
Luca Frosini 0069f4f363 Fixed ScopeDescriptior initialization in
AccountingDashboardHarvesterPlugin
2020-02-13 15:30:59 +01:00
Luca Frosini b659c45890 Removed unused variable 2020-02-12 17:31:46 +01:00
Luca Frosini c8a87abd73 Added harvesting of CatalogueHarvesterPlugin 2020-02-12 17:30:04 +01:00
Massimiliano Assante a9ab818768 Task #18290 Google Analytics Plugin for Catalogue pageviews 2020-01-14 17:07:03 +01:00
Luca Frosini 6f7bb6e223 Fixed the uber-jar solution. Using includeScope in place of excludeScope
which allow to leave test dependencies in the proper scope
2019-12-19 09:29:17 +01:00
Luca Frosini 7b367aa471 Remvoed eclipse project files. Creating also service archive 2019-12-18 18:52:01 +01:00
Luca Frosini 368422d254 maven-assembly-plugin creates uberjar with provided dependencies.
The pom has been changed to create an uber-jar with the same structure of the original but with no provided dependencies.
Moreover, a tar.gz archive with all dependencies (without provided) and teh jar of the artifact itself.
2019-12-18 18:46:45 +01:00
Luca Frosini cc8d8431ca Fixed ignore file 2019-12-13 10:15:16 +01:00
Luca Frosini 36ed2a9abc Removed and ingored eclipse project files 2019-12-10 09:58:12 +01:00
Luca Frosini aae1bf4806 Removed and ingored eclipse project files 2019-12-10 09:40:27 +01:00
Luca Frosini 73ed0a8e6e Fixed citation in README 2019-12-09 17:24:16 +01:00
Luca Frosini df233dfa36 Removed wrong parameters in javadoc 2019-12-09 15:35:04 +01:00
Luca Frosini caa4e3eb64 Fixed distro files and pom 2019-12-06 16:02:26 +01:00
Luca Frosini 45583f32f3 Fixed warning 2019-12-05 12:32:52 +01:00
Luca Frosini 9f6b745c4d Fixed Plugin Name 2019-12-03 16:46:46 +01:00
54 changed files with 3911 additions and 2204 deletions

View File

@ -1,36 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<classpath>
<classpathentry kind="src" output="target/classes" path="src/main/java">
<attributes>
<attribute name="optional" value="true"/>
<attribute name="maven.pomderived" value="true"/>
</attributes>
</classpathentry>
<classpathentry excluding="**" kind="src" output="target/classes" path="src/main/resources">
<attributes>
<attribute name="maven.pomderived" value="true"/>
</attributes>
</classpathentry>
<classpathentry kind="src" output="target/test-classes" path="src/test/java">
<attributes>
<attribute name="optional" value="true"/>
<attribute name="maven.pomderived" value="true"/>
</attributes>
</classpathentry>
<classpathentry excluding="**" kind="src" output="target/test-classes" path="src/test/resources">
<attributes>
<attribute name="maven.pomderived" value="true"/>
</attributes>
</classpathentry>
<classpathentry kind="con" path="org.eclipse.m2e.MAVEN2_CLASSPATH_CONTAINER">
<attributes>
<attribute name="maven.pomderived" value="true"/>
</attributes>
</classpathentry>
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.8">
<attributes>
<attribute name="maven.pomderived" value="true"/>
</attributes>
</classpathentry>
<classpathentry kind="output" path="target/classes"/>
</classpath>

6
.gitignore vendored
View File

@ -1 +1,5 @@
/target/
target
.classpath
.project
.settings
/.DS_Store

View File

@ -1,23 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<projectDescription>
<name>accounting-dashboard-harvester-se-plugin</name>
<comment></comment>
<projects>
</projects>
<buildSpec>
<buildCommand>
<name>org.eclipse.jdt.core.javabuilder</name>
<arguments>
</arguments>
</buildCommand>
<buildCommand>
<name>org.eclipse.m2e.core.maven2Builder</name>
<arguments>
</arguments>
</buildCommand>
</buildSpec>
<natures>
<nature>org.eclipse.jdt.core.javanature</nature>
<nature>org.eclipse.m2e.core.maven2Nature</nature>
</natures>
</projectDescription>

View File

@ -1,6 +0,0 @@
eclipse.preferences.version=1
encoding//src/main/java=UTF-8
encoding//src/main/resources=UTF-8
encoding//src/test/java=UTF-8
encoding//src/test/resources=UTF-8
encoding/<project>=UTF-8

View File

@ -1,5 +0,0 @@
eclipse.preferences.version=1
org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.8
org.eclipse.jdt.core.compiler.compliance=1.8
org.eclipse.jdt.core.compiler.problem.forbiddenReference=warning
org.eclipse.jdt.core.compiler.source=1.8

View File

@ -1,4 +0,0 @@
activeProfiles=
eclipse.preferences.version=1
resolveWorkspaceProjects=true
version=1

70
CHANGELOG.md Normal file
View File

@ -0,0 +1,70 @@
This project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
# Changelog for "accounting-dashboard-harvester-se-plugin"
## [v2.4.0]
- Removed filter restriction on JobUsageRecord harvesting to get MethodInvocation
- Fixed bug on getting ScopeDescriptor for new scopes.
## [v2.3.0]
- Ported GA harvesters to Analytics Data API (GA4)
## [v2.2.0]
- Switching security to the new IAM [#21904]
## [v2.1.0]
- Storagehub-client-library get range from gcube-bom [#22822]
## [v2.0.0]
- Ported plugin to smart-executor APIs 3.0.0 [#21616]
- Added RStudio Harvester [#21557]
- Added Jupyter Harvester [#21031]
- Switched accounting JSON management to gcube-jackson [#19115]
- Switched smart-executor JSON management to gcube-jackson [#19647]
## [v1.6.0] - 2020-05-22
- [#19047] Added core services accesses
## [v1.5.0] - 2020-03-30
- [#18290] Google Analytics Plugin for Catalogue pageviews
- [#18848] Updated Catalogue Dashboard harvester ENUM
## [v1.4.0] - 2019-12-19
- [#17800] Allowed partial harvesting of the current period
## [v1.3.0] - 2019-11-06
- [#17800] Allowed partial harvesting of the current period
## [v1.2.0] - 2019-09-11
- [#17128] Removed Home Library dependency
- [#17128] Removed ckan-util-library dependency
## [v1.1.0] [r4.13.1] - 2019-02-26
- [#12985] Fixed scope of dependencies
## [v1.0.0] [r4.13.1] - 2018-10-10
- First Release

26
FUNDING.md Normal file
View File

@ -0,0 +1,26 @@
# Acknowledgments
The projects leading to this software have received funding from a series of European Union programmes including:
- the Sixth Framework Programme for Research and Technological Development
- [DILIGENT](https://cordis.europa.eu/project/id/004260) (grant no. 004260).
- the Seventh Framework Programme for research, technological development and demonstration
- [D4Science](https://cordis.europa.eu/project/id/212488) (grant no. 212488);
- [D4Science-II](https://cordis.europa.eu/project/id/239019) (grant no.239019);
- [ENVRI](https://cordis.europa.eu/project/id/283465) (grant no. 283465);
- [iMarine](https://cordis.europa.eu/project/id/283644) (grant no. 283644);
- [EUBrazilOpenBio](https://cordis.europa.eu/project/id/288754) (grant no. 288754).
- the H2020 research and innovation programme
- [SoBigData](https://cordis.europa.eu/project/id/654024) (grant no. 654024);
- [PARTHENOS](https://cordis.europa.eu/project/id/654119) (grant no. 654119);
- [EGI-Engage](https://cordis.europa.eu/project/id/654142) (grant no. 654142);
- [ENVRI PLUS](https://cordis.europa.eu/project/id/654182) (grant no. 654182);
- [BlueBRIDGE](https://cordis.europa.eu/project/id/675680) (grant no. 675680);
- [PerformFISH](https://cordis.europa.eu/project/id/727610) (grant no. 727610);
- [AGINFRA PLUS](https://cordis.europa.eu/project/id/731001) (grant no. 731001);
- [DESIRA](https://cordis.europa.eu/project/id/818194) (grant no. 818194);
- [ARIADNEplus](https://cordis.europa.eu/project/id/823914) (grant no. 823914);
- [RISIS 2](https://cordis.europa.eu/project/id/824091) (grant no. 824091);
- [EOSC-Pillar](https://cordis.europa.eu/project/id/857650) (grant no. 857650);
- [Blue Cloud](https://cordis.europa.eu/project/id/862409) (grant no. 862409);
- [SoBigData-PlusPlus](https://cordis.europa.eu/project/id/871042) (grant no. 871042);

312
LICENSE.md Normal file
View File

@ -0,0 +1,312 @@
# European Union Public Licence V. 1.1
EUPL © the European Community 2007
This European Union Public Licence (the “EUPL”) applies to the Work or Software
(as defined below) which is provided under the terms of this Licence. Any use of
the Work, other than as authorised under this Licence is prohibited (to the
extent such use is covered by a right of the copyright holder of the Work).
The Original Work is provided under the terms of this Licence when the Licensor
(as defined below) has placed the following notice immediately following the
copyright notice for the Original Work:
Licensed under the EUPL V.1.1
or has expressed by any other mean his willingness to license under the EUPL.
## 1. Definitions
In this Licence, the following terms have the following meaning:
- The Licence: this Licence.
- The Original Work or the Software: the software distributed and/or
communicated by the Licensor under this Licence, available as Source Code and
also as Executable Code as the case may be.
- Derivative Works: the works or software that could be created by the Licensee,
based upon the Original Work or modifications thereof. This Licence does not
define the extent of modification or dependence on the Original Work required
in order to classify a work as a Derivative Work; this extent is determined by
copyright law applicable in the country mentioned in Article 15.
- The Work: the Original Work and/or its Derivative Works.
- The Source Code: the human-readable form of the Work which is the most
convenient for people to study and modify.
- The Executable Code: any code which has generally been compiled and which is
meant to be interpreted by a computer as a program.
- The Licensor: the natural or legal person that distributes and/or communicates
the Work under the Licence.
- Contributor(s): any natural or legal person who modifies the Work under the
Licence, or otherwise contributes to the creation of a Derivative Work.
- The Licensee or “You”: any natural or legal person who makes any usage of the
Software under the terms of the Licence.
- Distribution and/or Communication: any act of selling, giving, lending,
renting, distributing, communicating, transmitting, or otherwise making
available, on-line or off-line, copies of the Work or providing access to its
essential functionalities at the disposal of any other natural or legal
person.
## 2. Scope of the rights granted by the Licence
The Licensor hereby grants You a world-wide, royalty-free, non-exclusive,
sub-licensable licence to do the following, for the duration of copyright vested
in the Original Work:
- use the Work in any circumstance and for all usage, reproduce the Work, modify
- the Original Work, and make Derivative Works based upon the Work, communicate
- to the public, including the right to make available or display the Work or
- copies thereof to the public and perform publicly, as the case may be, the
- Work, distribute the Work or copies thereof, lend and rent the Work or copies
- thereof, sub-license rights in the Work or copies thereof.
Those rights can be exercised on any media, supports and formats, whether now
known or later invented, as far as the applicable law permits so.
In the countries where moral rights apply, the Licensor waives his right to
exercise his moral right to the extent allowed by law in order to make effective
the licence of the economic rights here above listed.
The Licensor grants to the Licensee royalty-free, non exclusive usage rights to
any patents held by the Licensor, to the extent necessary to make use of the
rights granted on the Work under this Licence.
## 3. Communication of the Source Code
The Licensor may provide the Work either in its Source Code form, or as
Executable Code. If the Work is provided as Executable Code, the Licensor
provides in addition a machine-readable copy of the Source Code of the Work
along with each copy of the Work that the Licensor distributes or indicates, in
a notice following the copyright notice attached to the Work, a repository where
the Source Code is easily and freely accessible for as long as the Licensor
continues to distribute and/or communicate the Work.
## 4. Limitations on copyright
Nothing in this Licence is intended to deprive the Licensee of the benefits from
any exception or limitation to the exclusive rights of the rights owners in the
Original Work or Software, of the exhaustion of those rights or of other
applicable limitations thereto.
## 5. Obligations of the Licensee
The grant of the rights mentioned above is subject to some restrictions and
obligations imposed on the Licensee. Those obligations are the following:
Attribution right: the Licensee shall keep intact all copyright, patent or
trademarks notices and all notices that refer to the Licence and to the
disclaimer of warranties. The Licensee must include a copy of such notices and a
copy of the Licence with every copy of the Work he/she distributes and/or
communicates. The Licensee must cause any Derivative Work to carry prominent
notices stating that the Work has been modified and the date of modification.
Copyleft clause: If the Licensee distributes and/or communicates copies of the
Original Works or Derivative Works based upon the Original Work, this
Distribution and/or Communication will be done under the terms of this Licence
or of a later version of this Licence unless the Original Work is expressly
distributed only under this version of the Licence. The Licensee (becoming
Licensor) cannot offer or impose any additional terms or conditions on the Work
or Derivative Work that alter or restrict the terms of the Licence.
Compatibility clause: If the Licensee Distributes and/or Communicates Derivative
Works or copies thereof based upon both the Original Work and another work
licensed under a Compatible Licence, this Distribution and/or Communication can
be done under the terms of this Compatible Licence. For the sake of this clause,
“Compatible Licence” refers to the licences listed in the appendix attached to
this Licence. Should the Licensees obligations under the Compatible Licence
conflict with his/her obligations under this Licence, the obligations of the
Compatible Licence shall prevail.
Provision of Source Code: When distributing and/or communicating copies of the
Work, the Licensee will provide a machine-readable copy of the Source Code or
indicate a repository where this Source will be easily and freely available for
as long as the Licensee continues to distribute and/or communicate the Work.
Legal Protection: This Licence does not grant permission to use the trade names,
trademarks, service marks, or names of the Licensor, except as required for
reasonable and customary use in describing the origin of the Work and
reproducing the content of the copyright notice.
## 6. Chain of Authorship
The original Licensor warrants that the copyright in the Original Work granted
hereunder is owned by him/her or licensed to him/her and that he/she has the
power and authority to grant the Licence.
Each Contributor warrants that the copyright in the modifications he/she brings
to the Work are owned by him/her or licensed to him/her and that he/she has the
power and authority to grant the Licence.
Each time You accept the Licence, the original Licensor and subsequent
Contributors grant You a licence to their contributions to the Work, under the
terms of this Licence.
## 7. Disclaimer of Warranty
The Work is a work in progress, which is continuously improved by numerous
contributors. It is not a finished work and may therefore contain defects or
“bugs” inherent to this type of software development.
For the above reason, the Work is provided under the Licence on an “as is” basis
and without warranties of any kind concerning the Work, including without
limitation merchantability, fitness for a particular purpose, absence of defects
or errors, accuracy, non-infringement of intellectual property rights other than
copyright as stated in Article 6 of this Licence.
This disclaimer of warranty is an essential part of the Licence and a condition
for the grant of any rights to the Work.
## 8. Disclaimer of Liability
Except in the cases of wilful misconduct or damages directly caused to natural
persons, the Licensor will in no event be liable for any direct or indirect,
material or moral, damages of any kind, arising out of the Licence or of the use
of the Work, including without limitation, damages for loss of goodwill, work
stoppage, computer failure or malfunction, loss of data or any commercial
damage, even if the Licensor has been advised of the possibility of such
damage. However, the Licensor will be liable under statutory product liability
laws as far such laws apply to the Work.
## 9. Additional agreements
While distributing the Original Work or Derivative Works, You may choose to
conclude an additional agreement to offer, and charge a fee for, acceptance of
support, warranty, indemnity, or other liability obligations and/or services
consistent with this Licence. However, in accepting such obligations, You may
act only on your own behalf and on your sole responsibility, not on behalf of
the original Licensor or any other Contributor, and only if You agree to
indemnify, defend, and hold each Contributor harmless for any liability incurred
by, or claims asserted against such Contributor by the fact You have accepted
any such warranty or additional liability.
## 10. Acceptance of the Licence
The provisions of this Licence can be accepted by clicking on an icon “I agree”
placed under the bottom of a window displaying the text of this Licence or by
affirming consent in any other similar way, in accordance with the rules of
applicable law. Clicking on that icon indicates your clear and irrevocable
acceptance of this Licence and all of its terms and conditions.
Similarly, you irrevocably accept this Licence and all of its terms and
conditions by exercising any rights granted to You by Article 2 of this Licence,
such as the use of the Work, the creation by You of a Derivative Work or the
Distribution and/or Communication by You of the Work or copies thereof.
## 11. Information to the public
In case of any Distribution and/or Communication of the Work by means of
electronic communication by You (for example, by offering to download the Work
from a remote location) the distribution channel or media (for example, a
website) must at least provide to the public the information requested by the
applicable law regarding the Licensor, the Licence and the way it may be
accessible, concluded, stored and reproduced by the Licensee.
## 12. Termination of the Licence
The Licence and the rights granted hereunder will terminate automatically upon
any breach by the Licensee of the terms of the Licence.
Such a termination will not terminate the licences of any person who has
received the Work from the Licensee under the Licence, provided such persons
remain in full compliance with the Licence.
## 13. Miscellaneous
Without prejudice of Article 9 above, the Licence represents the complete
agreement between the Parties as to the Work licensed hereunder.
If any provision of the Licence is invalid or unenforceable under applicable
law, this will not affect the validity or enforceability of the Licence as a
whole. Such provision will be construed and/or reformed so as necessary to make
it valid and enforceable.
The European Commission may publish other linguistic versions and/or new
versions of this Licence, so far this is required and reasonable, without
reducing the scope of the rights granted by the Licence. New versions of the
Licence will be published with a unique version number.
All linguistic versions of this Licence, approved by the European Commission,
have identical value. Parties can take advantage of the linguistic version of
their choice.
## 14. Jurisdiction
Any litigation resulting from the interpretation of this License, arising
between the European Commission, as a Licensor, and any Licensee, will be
subject to the jurisdiction of the Court of Justice of the European Communities,
as laid down in article 238 of the Treaty establishing the European Community.
Any litigation arising between Parties, other than the European Commission, and
resulting from the interpretation of this License, will be subject to the
exclusive jurisdiction of the competent court where the Licensor resides or
conducts its primary business.
## 15. Applicable Law
This Licence shall be governed by the law of the European Union country where
the Licensor resides or has his registered office.
This licence shall be governed by the Belgian law if:
- a litigation arises between the European Commission, as a Licensor, and any
- Licensee; the Licensor, other than the European Commission, has no residence
- or registered office inside a European Union country.
## Appendix
“Compatible Licences” according to article 5 EUPL are:
- GNU General Public License (GNU GPL) v. 2
- Open Software License (OSL) v. 2.1, v. 3.0
- Common Public License v. 1.0
- Eclipse Public License v. 1.0
- Cecill v. 2.0

53
README.md Normal file
View File

@ -0,0 +1,53 @@
# Accounting Dashboard Harvester Smart Executor Plugin
Accounting Dashboard Harvester Smart Executor Plugin harvest accounting data
from different sources, harmonize them and store in a PostgreSQL database to
enable aggregated infrastructure analysis.
## Built With
* [OpenJDK](https://openjdk.java.net/) - The JDK used
* [Maven](https://maven.apache.org/) - Dependency Management
## Documentation
[Accounting Dashboard Harvester Smart Executor Plugin](https://wiki.gcube-system.org/gcube/Accounting)
## Change log
See [Releases](https://code-repo.d4science.org/gCubeSystem/accounting-lib/releases).
## Authors
* **Luca Frosini** ([ORCID](https://orcid.org/0000-0003-3183-2291)) - [ISTI-CNR Infrascience Group](http://nemis.isti.cnr.it/groups/infrascience)
* **Massimiliano Assante** ([ORCID](https://orcid.org/0000-0002-3761-1492)) - - [ISTI-CNR Infrascience Group](http://nemis.isti.cnr.it/groups/infrascience)
* **Francesco Mangiacrapa** ([ORCID](https://orcid.org/0000-0002-6528-664X)) - - [ISTI-CNR Infrascience Group](http://nemis.isti.cnr.it/groups/infrascience)
## How to Cite this Software
Tell people how to cite this software.
* Cite an associated paper?
* Use a specific BibTeX entry for the software?
@Manual{,
title = {Accounting Dashboard Harvester Smart Executor Plugin},
author = {{Frosini, Luca}, {Assante, Massimiliano}, {Mangiacrapa, Francesco}},
organization = {ISTI - CNR},
address = {Pisa, Italy},
year = 2019,
url = {http://www.gcube-system.org/}
}
## License
This project is licensed under the EUPL V.1.1 License - see the [LICENSE.md](LICENSE.md) file for details.
## About the gCube Framework
This software is part of the [gCubeFramework](https://www.gcube-system.org/ "gCubeFramework"): an
open-source software toolkit used for building and operating Hybrid Data
Infrastructures enabling the dynamic deployment of Virtual Research Environments
by favouring the realisation of reuse oriented policies.
The projects leading to this software have received funding from a series of European Union programmes see [FUNDING.md](FUNDING.md)

View File

@ -1,4 +0,0 @@
gCube System - License
------------------------------------------------------------
${gcube.license}

View File

@ -1,68 +0,0 @@
The gCube System - ${name}
--------------------------------------------------
${description}
${gcube.description}
${gcube.funding}
Version
--------------------------------------------------
${version} (${buildDate})
Please see the file named "changelog.xml" in this directory for the release notes.
Authors
--------------------------------------------------
* Luca Frosini (luca.frosini-AT-isti.cnr.it), Istituto di Scienza e Tecnologie dell'Informazione "A. Faedo" - CNR, Pisa (Italy).
* Eric Perrone (eric.perrone-AT-isti.cnr.it), Istituto di Scienza e Tecnologie dell'Informazione "A. Faedo" - CNR, Pisa (Italy).
* Francesco Mangiacrapa (francesco.mangiacrapa-AT-isti.cnr.it), Istituto di Scienza e Tecnologie dell'Informazione "A. Faedo" - CNR, Pisa (Italy).
* Massimiliano Assante (massimiliano.assante-AT-isti.cnr.it), Istituto di Scienza e Tecnologie dell'Informazione "A. Faedo" - CNR, Pisa (Italy).
Maintainers
-----------
* Luca Frosini (luca.frosini-AT-isti.cnr.it), Istituto di Scienza e Tecnologie dell'Informazione "A. Faedo" - CNR, Pisa (Italy).
Download information
--------------------------------------------------
Source code is available from SVN:
${scm.url}
Binaries can be downloaded from the gCube website:
${gcube.website}
Installation
--------------------------------------------------
Installation documentation is available on-line in the gCube Wiki:
${gcube.wikiRoot}/Accounting_Aggregator
Documentation
--------------------------------------------------
Documentation is available on-line in the gCube Wiki:
${gcube.wikiRoot}/Accounting_Aggregator
Support
--------------------------------------------------
Bugs and support requests can be reported in the gCube issue tracking tool:
${gcube.issueTracking}
Licensing
--------------------------------------------------
This software is licensed under the terms you may find in the file named "LICENSE" in this directory.

View File

@ -1,17 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE xml>
<ReleaseNotes>
<Changeset component="org.gcube.accounting.accounting-dashboard-harvester-se-plugin.1.3.0" date="${buildDate}">
<Change>Allowed partial harvesting of the current period #17800</Change>
</Changeset>
<Changeset component="org.gcube.accounting.accounting-dashboard-harvester-se-plugin.1.2.0" date="2019-10-04">
<Change>Removed Home Library dependecy #17128</Change>
<Change>Removed ckan-util-library dependecy #17128</Change>
</Changeset>
<Changeset component="org.gcube.accounting.accounting-dashboard-harvester-se-plugin.1.1.0" date="2019-02-26">
<Change>Fixed scope of dependencies #12985</Change>
</Changeset>
<Changeset component="org.gcube.accounting.accounting-dashboard-harvester-se-plugin.1.0.0" date="2018-10-10">
<Change>First Release</Change>
</Changeset>
</ReleaseNotes>

View File

@ -1,28 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE xml>
<Resource>
<ID />
<Type>Service</Type>
<Profile>
<Description>${description}</Description>
<Class>${serviceClass}</Class>
<Name>${artifactId}</Name>
<Version>1.0.0</Version>
<Packages>
<Software>
<Description>${description}</Description>
<Name>${artifactId}</Name>
<Version>${version}</Version>
<MavenCoordinates>
<groupId>${groupId}</groupId>
<artifactId>${artifactId}</artifactId>
<version>${version}</version>
</MavenCoordinates>
<Type>Plugin</Type>
<Files>
<File>${build.finalName}.${project.packaging}</File>
</Files>
</Software>
</Packages>
</Profile>
</Resource>

View File

@ -0,0 +1,4 @@
groupId=${groupId}
artifactId=${artifactId}
version=${version}
description=${description}

264
pom.xml
View File

@ -5,16 +5,20 @@
<parent>
<artifactId>maven-parent</artifactId>
<groupId>org.gcube.tools</groupId>
<version>1.1.0</version>
<version>1.2.0</version>
<relativePath />
</parent>
<modelVersion>4.0.0</modelVersion>
<groupId>org.gcube.accounting</groupId>
<artifactId>accounting-dashboard-harvester-se-plugin</artifactId>
<version>1.3.0</version>
<name>Accounting Dashboard Harvester SmartExecutor Plugin</name>
<description>Accounting Dashboard Harvester SmartExecutor Plugin</description>
<version>2.4.0</version>
<name>Accounting Dashboard Harvester Smart Executor Plugin</name>
<description>
Accounting Dashboard Harvester Smart Executor Plugin harvest accounting
data from different sources, harmonize them and store in a PostgreSQL
database to enable aggregated infrastructure analysis.
</description>
<scm>
<connection>scm:git:https://code-repo.d4science.org/gCubeSystem/${project.artifactId}.git</connection>
@ -24,23 +28,15 @@
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<distroDirectory>distro</distroDirectory>
<serviceClass>Accounting</serviceClass>
</properties>
<dependencyManagement>
<dependencies>
<dependency>
<groupId>org.gcube.distribution</groupId>
<artifactId>gcube-bom</artifactId>
<version>LATEST</version>
<type>pom</type>
<scope>import</scope>
</dependency>
<dependency>
<groupId>org.gcube.distribution</groupId>
<artifactId>gcube-smartgears-bom</artifactId>
<version>LATEST</version>
<groupId>org.gcube.vremanagement</groupId>
<artifactId>smart-executor-bom</artifactId>
<version>3.1.0</version>
<type>pom</type>
<scope>import</scope>
</dependency>
@ -48,38 +44,9 @@
</dependencyManagement>
<dependencies>
<dependency>
<groupId>org.gcube.vremanagement</groupId>
<artifactId>smart-executor-api</artifactId>
<version>[1.5.0, 2.0.0-SNAPSHOT)</version>
<scope>provided</scope>
<exclusions>
<exclusion>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.gcube.vremanagement</groupId>
<artifactId>smart-executor-client</artifactId>
<version>[1.3.0,2.0.0-SNAPSHOT)</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>commons-lang</groupId>
<artifactId>commons-lang</artifactId>
<version>2.6</version>
</dependency>
<dependency>
<groupId>org.gcube.core</groupId>
<artifactId>common-encryption</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.gcube.portlets.admin</groupId>
@ -87,113 +54,103 @@
<version>[2.7.2,3.0.0-SNAPSHOT)</version>
</dependency>
<dependency>
<groupId>com.google.apis</groupId>
<artifactId>google-api-services-analyticsreporting</artifactId>
<version>v4-rev124-1.23.0</version>
<groupId>com.google.analytics</groupId>
<artifactId>google-analytics-data</artifactId>
<version>0.16.0</version>
</dependency>
<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
<version>23.6-jre</version>
</dependency>
<dependency>
<groupId>com.google.auth</groupId>
<artifactId>google-auth-library-oauth2-http</artifactId>
<version>1.12.1</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-core</artifactId>
<version>2.8.4</version>
</dependency>
<dependency>
<groupId>com.google.http-client</groupId>
<artifactId>google-http-client-gson</artifactId>
<version>1.21.0</version>
</dependency>
<!-- <dependency> -->
<!-- <groupId>org.gcube.common</groupId> -->
<!-- <artifactId>storagehub-client-library</artifactId> -->
<!-- <exclusions> -->
<!-- <exclusion> -->
<!-- <groupId>com.fasterxml.jackson.core</groupId> -->
<!-- <artifactId>jackson-core</artifactId> -->
<!-- </exclusion> -->
<!-- </exclusions> -->
<!-- </dependency> -->
<dependency>
<groupId>org.gcube.common</groupId>
<artifactId>storagehub-client-library</artifactId>
<version>[1.0.0, 2.0.0-SNAPSHOT)</version>
</dependency>
<dependency>
<groupId>org.gcube.data-publishing</groupId>
<groupId>org.gcube.data-catalogue</groupId>
<artifactId>gcat-client</artifactId>
<version>[1.0.0, 2.0.0-SNAPSHOT)</version>
<version>[2.0.0, 3.0.0-SNAPSHOT)</version>
</dependency>
<!-- Dependencies forced to provided -->
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-core</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-annotations</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.gcube.resources</groupId>
<artifactId>common-gcore-resources</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.gcube.common</groupId>
<artifactId>authorization-client</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.gcube.common</groupId>
<artifactId>common-authorization</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.gcube.core</groupId>
<artifactId>common-gcore-stubs</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.gcube.core</groupId>
<artifactId>common-scope-maps</artifactId>
<scope>provided</scope>
</dependency>
<!-- END Dependencies forced to provided -->
<dependency>
<groupId>org.gcube.resources.discovery</groupId>
<artifactId>ic-client</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.gcube.resources</groupId>
<artifactId>registry-publisher</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.gcube.accounting</groupId>
<artifactId>accounting-analytics</artifactId>
<version>[2.0.0,3.0.0-SNAPSHOT)</version>
</dependency>
<dependency>
<groupId>org.gcube.accounting</groupId>
<artifactId>accounting-analytics-persistence-couchbase</artifactId>
<version>[1.0.0,2.0.0-SNAPSHOT)</version>
</dependency>
<dependency>
<groupId>org.gcube.accounting</groupId>
<artifactId>accounting-lib</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.gcube.core</groupId>
<artifactId>common-encryption</artifactId>
</dependency>
<dependency>
<groupId>org.gcube.vremanagement</groupId>
<artifactId>smart-executor-api</artifactId>
</dependency>
<dependency>
<groupId>org.gcube.accounting</groupId>
<artifactId>accounting-analytics</artifactId>
<version>[3.0.0,4.0.0-SNAPSHOT)</version>
</dependency>
<dependency>
<groupId>org.gcube.accounting</groupId>
<artifactId>accounting-analytics-persistence-postgresql</artifactId>
<version>[1.0.0,2.0.0-SNAPSHOT)</version>
<scope>runtime</scope>
</dependency>
<dependency>
<groupId>org.gcube.accounting</groupId>
<artifactId>accounting-summary-access</artifactId>
<version>[1.0.0,2.0.0-SNAPSHOT)</version>
<exclusions>
<exclusion>
<groupId>org.ancoron.postgresql</groupId>
<artifactId>org.postgresql</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.gcube.social-networking</groupId>
<artifactId>social-service-client</artifactId>
<version>[1.0.0, 2.0.0-SNAPSHOT)</version>
</dependency>
<dependency>
<groupId>org.json</groupId>
<artifactId>json</artifactId>
<version>20171018</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.gcube.common</groupId>
<artifactId>authorization-utils</artifactId>
<version>[2.2.0, 3.0.0-SNAPSHOT)</version>
</dependency>
<!-- Test Dependencies -->
<!-- Test Dependencies. Setting scope to provided to allow proper creation
of uber-jar -->
<dependency>
<groupId>org.gcube.vremanagement</groupId>
<artifactId>smart-executor-client</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
@ -203,28 +160,73 @@
<dependency>
<groupId>ch.qos.logback</groupId>
<artifactId>logback-classic</artifactId>
<version>1.0.13</version>
<scope>test</scope>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-dependency-plugin</artifactId>
<version>3.1.1</version>
<executions>
<execution>
<id>copy-dependencies</id>
<phase>compile</phase>
<goals>
<goal>copy-dependencies</goal>
</goals>
<configuration>
<includeScope>runtime</includeScope>
<outputDirectory>${basedir}${file.separator}target${file.separator}libs</outputDirectory>
<overWriteReleases>false</overWriteReleases>
<overWriteSnapshots>false</overWriteSnapshots>
<overWriteIfNewer>true</overWriteIfNewer>
<excludeTypes>war</excludeTypes>
<stripVersion>false</stripVersion>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-assembly-plugin</artifactId>
<executions>
<execution>
<id>make-uberjar</id>
<phase>install</phase>
<id>uberjar</id>
<configuration>
<appendAssemblyId>true</appendAssemblyId>
<descriptors>
<descriptor>uberjar.xml</descriptor>
</descriptors>
</configuration>
<phase>package</phase>
<goals>
<goal>single</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-javadoc-plugin</artifactId>
<configuration>
<additionalparam>-Xdoclint:none</additionalparam>
<additionalJOption>-Xdoclint:none</additionalJOption>
</configuration>
<version>3.1.0</version>
<executions>
<execution>
<id>make-servicearchive</id>
<id>generate-doc</id>
<phase>install</phase>
<goals>
<goal>jar</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</build>
</project>
</project>

View File

@ -16,14 +16,18 @@ import org.gcube.accounting.accounting.summary.access.AccountingDao;
import org.gcube.accounting.accounting.summary.access.model.ScopeDescriptor;
import org.gcube.accounting.accounting.summary.access.model.internal.Dimension;
import org.gcube.accounting.accounting.summary.access.model.update.AccountingRecord;
import org.gcube.common.authorization.library.provider.SecurityTokenProvider;
import org.gcube.com.fasterxml.jackson.annotation.JsonIgnore;
import org.gcube.common.authorization.utils.secret.Secret;
import org.gcube.common.scope.impl.ScopeBean;
import org.gcube.common.scope.impl.ScopeBean.Type;
import org.gcube.dataharvest.harvester.CatalogueAccessesHarvester;
import org.gcube.dataharvest.harvester.CoreServicesAccessesHarvester;
import org.gcube.dataharvest.harvester.JupyterAccessesHarvester;
import org.gcube.dataharvest.harvester.MethodInvocationHarvester;
import org.gcube.dataharvest.harvester.RStudioAccessesHarvester;
import org.gcube.dataharvest.harvester.SocialInteractionsHarvester;
import org.gcube.dataharvest.harvester.VREAccessesHarvester;
import org.gcube.dataharvest.harvester.VREUsersHarvester;
import org.gcube.dataharvest.harvester.sobigdata.DataMethodDownloadHarvester;
import org.gcube.dataharvest.harvester.sobigdata.ResourceCatalogueHarvester;
import org.gcube.dataharvest.harvester.sobigdata.TagMeMethodInvocationHarvester;
import org.gcube.dataharvest.utils.AggregationType;
@ -38,364 +42,418 @@ import org.slf4j.LoggerFactory;
* @author Eric Perrone (ISTI - CNR)
* @author Luca Frosini (ISTI - CNR)
*/
public class AccountingDataHarvesterPlugin extends Plugin<DataHarvestPluginDeclaration> {
private static Logger logger = LoggerFactory.getLogger(AccountingDataHarvesterPlugin.class);
public class AccountingDashboardHarvesterPlugin extends Plugin {
private static Logger logger = LoggerFactory.getLogger(AccountingDashboardHarvesterPlugin.class);
private static final String PROPERTY_FILENAME = "config.properties";
public static final String START_DATE_INPUT_PARAMETER = "startDate";
public static final String MEASURE_TYPE_INPUT_PARAMETER = "measureType";
public static final String RERUN_INPUT_PARAMETER = "reRun";
public static final String GET_VRE_USERS_INPUT_PARAMETER = "getVREUsers";
public static final String DRY_RUN_INPUT_PARAMETER = "dryRun";
/**
* Allows partial harvesting of data of the current period.
* This means that in MONTHLY aggregation type the current month is harvested instead of the previous month which
* is done when the month is completed.
* This allow the portlet to display monthly data in the current moth even the data is partial (till the current day).
* Allows partial harvesting of data of the current period. This means that
* in MONTHLY aggregation type the current month is harvested instead of the
* previous month which is done when the month is completed. This allow the
* portlet to display monthly data in the current moth even the data is
* partial (till the current day).
*/
public static final String PARTIAL_HARVESTING = "partialHarvesting";
public static final String SO_BIG_DATA_VO = "/d4science.research-infrastructures.eu/SoBigData";
public static final String SO_BIG_DATA_EU_VRE = "/d4science.research-infrastructures.eu/gCubeApps/SoBigData.eu";
public static final String SO_BIG_DATA_IT_VRE = "/d4science.research-infrastructures.eu/gCubeApps/SoBigData.it";
public static final String SO_BIG_DATA_CATALOGUE_CONTEXT = "/d4science.research-infrastructures.eu/SoBigData/ResourceCatalogue";
public static final String TAGME_CONTEXT = "/d4science.research-infrastructures.eu/SoBigData/TagMe";
public static final String TO_BE_SET = "TO BE SET";
protected Date start;
protected Date end;
public AccountingDataHarvesterPlugin(DataHarvestPluginDeclaration pluginDeclaration) {
super(pluginDeclaration);
}
private static final InheritableThreadLocal<Properties> properties = new InheritableThreadLocal<Properties>() {
@Override
protected Properties initialValue() {
return new Properties();
}
};
public static InheritableThreadLocal<Properties> getProperties() {
return properties;
public AccountingDashboardHarvesterPlugin() {
super();
}
public static Dimension getDimension(String key) {
Dimension dimension = dimensions.get().get(key);
if(dimension == null) {
if (dimension == null) {
dimension = new Dimension(key, key, null, key);
}
return dimension;
}
protected static final InheritableThreadLocal<Map<String, Dimension>> dimensions = new InheritableThreadLocal<Map<String, Dimension>>() {
@Override
protected Map<String, Dimension> initialValue() {
return new HashMap<>();
}
};
public static ScopeDescriptor getScopeDescriptor(String context) {
return scopeDescriptors.get().get(context);
}
protected static final InheritableThreadLocal<Map<String, ScopeDescriptor>> scopeDescriptors = new InheritableThreadLocal<Map<String, ScopeDescriptor>>() {
public static final InheritableThreadLocal<Map<String, ScopeDescriptor>> scopeDescriptors = new InheritableThreadLocal<Map<String, ScopeDescriptor>>() {
@Override
protected Map<String, ScopeDescriptor> initialValue() {
return new HashMap<>();
}
};
public static ScopeDescriptor getScopeDescriptor() {
return scopeDescriptor.get();
}
public static final InheritableThreadLocal<ScopeDescriptor> scopeDescriptor = new InheritableThreadLocal<ScopeDescriptor>() {
@Override
protected ScopeDescriptor initialValue() {
return new ScopeDescriptor("","");
}
};
public Properties getConfigParameters() throws IOException {
@JsonIgnore
public static Properties getConfigParameters() throws IOException {
Properties properties = new Properties();
try {
InputStream input = AccountingDataHarvesterPlugin.class.getClassLoader()
InputStream input = AccountingDashboardHarvesterPlugin.class.getClassLoader()
.getResourceAsStream(PROPERTY_FILENAME);
properties.load(input);
return properties;
} catch(Exception e) {
} catch (Exception e) {
logger.warn(
"Unable to load {} file containing configuration properties. AccountingDataHarvesterPlugin will use defaults",
PROPERTY_FILENAME);
}
return properties;
}
/** {@inheritDoc} */
@Override
public void launch(Map<String,Object> inputs) throws Exception {
public void launch(Map<String, Object> inputs) throws Exception {
logger.debug("{} is starting", this.getClass().getSimpleName());
if(inputs == null || inputs.isEmpty()) {
if (inputs == null || inputs.isEmpty()) {
throw new IllegalArgumentException("The can only be launched providing valid input parameters");
}
if(!inputs.containsKey(MEASURE_TYPE_INPUT_PARAMETER)) {
if (!inputs.containsKey(MEASURE_TYPE_INPUT_PARAMETER)) {
throw new IllegalArgumentException("Please set required parameter '" + MEASURE_TYPE_INPUT_PARAMETER + "'");
}
AggregationType aggregationType = AggregationType.valueOf((String) inputs.get(MEASURE_TYPE_INPUT_PARAMETER));
boolean reRun = true;
if(inputs.containsKey(RERUN_INPUT_PARAMETER)) {
if (inputs.containsKey(RERUN_INPUT_PARAMETER)) {
try {
reRun = (boolean) inputs.get(RERUN_INPUT_PARAMETER);
} catch(Exception e) {
} catch (Exception e) {
throw new IllegalArgumentException("'" + RERUN_INPUT_PARAMETER + "' must be a boolean");
}
}
boolean getVREUsers = true;
if(inputs.containsKey(GET_VRE_USERS_INPUT_PARAMETER)) {
if (inputs.containsKey(GET_VRE_USERS_INPUT_PARAMETER)) {
try {
reRun = (boolean) inputs.get(GET_VRE_USERS_INPUT_PARAMETER);
} catch(Exception e) {
getVREUsers = (boolean) inputs.get(GET_VRE_USERS_INPUT_PARAMETER);
} catch (Exception e) {
throw new IllegalArgumentException("'" + GET_VRE_USERS_INPUT_PARAMETER + "' must be a boolean");
}
}
boolean dryRun = true;
if(inputs.containsKey(DRY_RUN_INPUT_PARAMETER)) {
if (inputs.containsKey(DRY_RUN_INPUT_PARAMETER)) {
try {
dryRun = (boolean) inputs.get(DRY_RUN_INPUT_PARAMETER);
} catch(Exception e) {
} catch (Exception e) {
throw new IllegalArgumentException("'" + DRY_RUN_INPUT_PARAMETER + "' must be a boolean");
}
}
boolean partialHarvesting = false;
if(inputs.containsKey(PARTIAL_HARVESTING)) {
if (inputs.containsKey(PARTIAL_HARVESTING)) {
partialHarvesting = (boolean) inputs.get(PARTIAL_HARVESTING);
}
if(inputs.containsKey(START_DATE_INPUT_PARAMETER)) {
if (inputs.containsKey(START_DATE_INPUT_PARAMETER)) {
String startDateString = (String) inputs.get(START_DATE_INPUT_PARAMETER);
start = DateUtils.UTC_DATE_FORMAT.parse(startDateString + " " + DateUtils.UTC);
} else {
start = DateUtils.getPreviousPeriod(aggregationType, partialHarvesting).getTime();
}
end = DateUtils.getEndDateFromStartDate(aggregationType, start, 1, partialHarvesting);
logger.debug("Harvesting from {} to {} (ReRun:{} - GetVREUsers:{} - DryRun:{})",
DateUtils.format(start), DateUtils.format(end), reRun, getVREUsers, dryRun);
Properties properties = getConfigParameters();
getProperties().set(properties);
end = DateUtils.getEndDateFromStartDate(aggregationType, start, 1);
logger.debug("Harvesting from {} to {} (ReRun:{} - GetVREUsers:{} - DryRun:{})", DateUtils.format(start),
DateUtils.format(end), reRun, getVREUsers, dryRun);
ContextAuthorization contextAuthorization = new ContextAuthorization();
// DatabaseManager dbaseManager = new DatabaseManager();
SortedSet<String> contexts = contextAuthorization.getContexts();
String root = contexts.first();
Utils.setContext(contextAuthorization.getSecretForContext(root));
AccountingDao dao = AccountingDao.get();
Set<ScopeDescriptor> scopeDescriptorSet = dao.getContexts();
Map<String,ScopeDescriptor> scopeDescriptorMap = new HashMap<>();
for(ScopeDescriptor scopeDescriptor : scopeDescriptorSet) {
Map<String, ScopeDescriptor> scopeDescriptorMap = new HashMap<>();
for (ScopeDescriptor scopeDescriptor : scopeDescriptorSet) {
scopeDescriptorMap.put(scopeDescriptor.getId(), scopeDescriptor);
}
scopeDescriptors.set(scopeDescriptorMap);
Set<Dimension> dimensionSet = dao.getDimensions();
Map<String,Dimension> dimensionMap = new HashMap<>();
for(Dimension dimension : dimensionSet) {
Map<String, Dimension> dimensionMap = new HashMap<>();
for (Dimension dimension : dimensionSet) {
dimensionMap.put(dimension.getId(), dimension);
}
dimensions.set(dimensionMap);
SortedSet<String> contexts = contextAuthorization.getContexts();
// ArrayList<HarvestedData> data = new ArrayList<HarvestedData>();
ArrayList<AccountingRecord> accountingRecords = new ArrayList<AccountingRecord>();
String initialToken = SecurityTokenProvider.instance.get();
VREAccessesHarvester vreAccessesHarvester = null;
JupyterAccessesHarvester jupyterAccessesHarvester = null;
RStudioAccessesHarvester rstudioAccessesHarvester = null;
Secret rootSecret = null;
for(String context : contexts) {
for (String context : contexts) {
// Setting the token for the context
Utils.setContext(contextAuthorization.getTokenForContext(context));
Secret secret = contextAuthorization.getSecretForContext(context);
Utils.setContext(secret);
ScopeBean scopeBean = new ScopeBean(context);
ScopeDescriptor actualScopeDescriptor = scopeDescriptorMap.get(context);
if(actualScopeDescriptor==null) {
if (actualScopeDescriptor == null) {
actualScopeDescriptor = new ScopeDescriptor(scopeBean.name(), context);
scopeDescriptorMap.put(actualScopeDescriptor.getId(), actualScopeDescriptor);
}
scopeDescriptor.set(actualScopeDescriptor);
if(vreAccessesHarvester == null) {
if(scopeBean.is(Type.INFRASTRUCTURE)) {
if (scopeBean.is(Type.INFRASTRUCTURE)) {
try {
rootSecret = secret;
CatalogueAccessesHarvester catalogueHarvester = new CatalogueAccessesHarvester(start, end);
List<AccountingRecord> harvested = catalogueHarvester.getAccountingRecords();
accountingRecords.addAll(harvested);
CoreServicesAccessesHarvester coreServicesHarvester = new CoreServicesAccessesHarvester(start, end);
List<AccountingRecord> records = coreServicesHarvester.getAccountingRecords();
accountingRecords.addAll(records);
} catch (Exception e) {
logger.error("Error harvesting {} for {}", CatalogueAccessesHarvester.class.getSimpleName(),
context, e);
}
}
if (vreAccessesHarvester == null) {
if (scopeBean.is(Type.INFRASTRUCTURE)) {
vreAccessesHarvester = new VREAccessesHarvester(start, end);
} else {
// This code should be never used because the scopes are sorted by fullname
// This code should be never used because the scopes are
// sorted by fullname
ScopeBean parent = scopeBean.enclosingScope();
while(!parent.is(Type.INFRASTRUCTURE)) {
while (!parent.is(Type.INFRASTRUCTURE)) {
parent = scopeBean.enclosingScope();
}
// Setting back token for the context
Utils.setContext(contextAuthorization.getTokenForContext(parent.toString()));
Utils.setContext(contextAuthorization.getSecretForContext(parent.toString()));
vreAccessesHarvester = new VREAccessesHarvester(start, end);
// Setting back token for the context
Utils.setContext(contextAuthorization.getTokenForContext(context));
Utils.setContext(contextAuthorization.getSecretForContext(context));
}
}
if (rstudioAccessesHarvester == null) {
if (scopeBean.is(Type.INFRASTRUCTURE)) {
rstudioAccessesHarvester = new RStudioAccessesHarvester(start, end);
} else {
// This code should be never used because the scopes are
// sorted by fullname
ScopeBean parent = scopeBean.enclosingScope();
while (!parent.is(Type.INFRASTRUCTURE)) {
parent = scopeBean.enclosingScope();
}
// Setting back token for the context
Utils.setContext(contextAuthorization.getSecretForContext(parent.toString()));
rstudioAccessesHarvester = new RStudioAccessesHarvester(start, end);
// Setting back token for the context
Utils.setContext(contextAuthorization.getSecretForContext(context));
}
}
if((context.startsWith(SO_BIG_DATA_VO) || context.startsWith(SO_BIG_DATA_EU_VRE)
if (jupyterAccessesHarvester == null) {
if (scopeBean.is(Type.INFRASTRUCTURE)) {
jupyterAccessesHarvester = new JupyterAccessesHarvester(start, end);
} else {
// This code should be never used because the scopes are
// sorted by fullname
ScopeBean parent = scopeBean.enclosingScope();
while (!parent.is(Type.INFRASTRUCTURE)) {
parent = scopeBean.enclosingScope();
}
// Setting back token for the context
Utils.setContext(contextAuthorization.getSecretForContext(parent.toString()));
jupyterAccessesHarvester = new JupyterAccessesHarvester(start, end);
// Setting back token for the context
Utils.setContext(contextAuthorization.getSecretForContext(context));
}
}
if ((context.startsWith(SO_BIG_DATA_VO) || context.startsWith(SO_BIG_DATA_EU_VRE)
|| context.startsWith(SO_BIG_DATA_IT_VRE))
&& start.before(DateUtils.getStartCalendar(2018, Calendar.APRIL, 1).getTime())) {
logger.info("Not Harvesting for {} from {} to {}", context, DateUtils.format(start),
DateUtils.format(end));
} else {
try {
// Collecting Google Analytics Data for VREs Accesses
logger.info("Going to harvest VRE Accesses for {}", context);
List<AccountingRecord> harvested = vreAccessesHarvester.getAccountingRecords();
accountingRecords.addAll(harvested);
/*
List<HarvestedData> harvested = vreAccessesHarvester.getData();
data.addAll(harvested);
*/
} catch(Exception e) {
* List<HarvestedData> harvested =
* vreAccessesHarvester.getData(); data.addAll(harvested);
*/
} catch (Exception e) {
logger.error("Error harvesting VRE Accesses for {}", context, e);
}
try {
// Collecting Google Analytics Data for R Studio Accesses
logger.info("Going to harvest R Studio Accesses for {}", context);
List<AccountingRecord> harvested = rstudioAccessesHarvester.getAccountingRecords();
accountingRecords.addAll(harvested);
} catch (Exception e) {
logger.error("Error harvesting R Studio Accesses for {}", context, e);
}
try {
// Collecting Google Analytics Data for Jupyters Accesses
logger.info("Going to harvest Jupyter Accesses for {}", context);
List<AccountingRecord> harvested = jupyterAccessesHarvester.getAccountingRecords();
accountingRecords.addAll(harvested);
} catch (Exception e) {
logger.error("Error harvesting Jupyeter Accesses for {}", context, e);
}
try {
// Collecting info on social (posts, replies and likes)
logger.info("Going to harvest Social Interactions for {}", context);
SocialInteractionsHarvester socialHarvester = new SocialInteractionsHarvester(start, end);
List<AccountingRecord> harvested = socialHarvester.getAccountingRecords();
accountingRecords.addAll(harvested);
/*
List<HarvestedData> harvested = socialHarvester.getData();
data.addAll(harvested);
*/
} catch(Exception e) {
} catch (Exception e) {
logger.error("Error harvesting Social Interactions for {}", context, e);
}
try {
// Collecting info on VRE users
if(getVREUsers) {
// Harvesting Users only for VREs (not for VO and ROOT which is the sum of the children contexts)
if (getVREUsers) {
// Harvesting Users only for VREs (not for VO and ROOT
// which is the sum of the children contexts)
// The VREUsers can be only Harvested for the last month
if(scopeBean.is(Type.VRE) && start.equals(DateUtils.getPreviousPeriod(aggregationType, partialHarvesting).getTime())) {
if (scopeBean.is(Type.VRE) && start
.equals(DateUtils.getPreviousPeriod(aggregationType, partialHarvesting).getTime())) {
logger.info("Going to harvest Context Users for {}", context);
VREUsersHarvester vreUsersHarvester = new VREUsersHarvester(start, end);
List<AccountingRecord> harvested = vreUsersHarvester.getAccountingRecords();
accountingRecords.addAll(harvested);
/*
List<HarvestedData> harvested = vreUsersHarvester.getData();
data.addAll(harvested);
*/
* List<HarvestedData> harvested =
* vreUsersHarvester.getData();
* data.addAll(harvested);
*/
}
}
} catch(Exception e) {
} catch (Exception e) {
logger.error("Error harvesting Context Users for {}", context, e);
}
if(context.startsWith(SO_BIG_DATA_CATALOGUE_CONTEXT)) {
if (context.startsWith(SO_BIG_DATA_CATALOGUE_CONTEXT)) {
try {
// Collecting info on Resource Catalogue (Dataset, Application, Deliverables, Methods)
// Collecting info on Resource Catalogue (Dataset,
// Application, Deliverables, Methods)
logger.info("Going to harvest Resource Catalogue Information for {}", context);
ResourceCatalogueHarvester resourceCatalogueHarvester = new ResourceCatalogueHarvester(start, end,
contexts);
ResourceCatalogueHarvester resourceCatalogueHarvester = new ResourceCatalogueHarvester(start,
end, contexts);
List<AccountingRecord> harvested = resourceCatalogueHarvester.getAccountingRecords();
accountingRecords.addAll(harvested);
/*
List<HarvestedData> harvested = resourceCatalogueHarvester.getData();
data.addAll(harvested);
*/
} catch(Exception e) {
* List<HarvestedData> harvested =
* resourceCatalogueHarvester.getData();
* data.addAll(harvested);
*/
} catch (Exception e) {
logger.error("Error harvesting Resource Catalogue Information for {}", context, e);
}
try {
// Collecting info on Data/Method download
logger.info("Going to harvest Data Method Download for {}", context);
DataMethodDownloadHarvester dataMethodDownloadHarvester = new DataMethodDownloadHarvester(start,
end, contexts);
List<AccountingRecord> harvested = dataMethodDownloadHarvester.getAccountingRecords();
accountingRecords.addAll(harvested);
/*
List<HarvestedData> harvested = dataMethodDownloadHarvester.getData();
data.addAll(harvested);
*/
} catch(Exception e) {
logger.error("Error harvesting Data Method Download for {}", context, e);
}
// try {
// // Collecting info on Data/Method download
// logger.info("Going to harvest Data Method Download for {}", context);
// DataMethodDownloadHarvester dataMethodDownloadHarvester = new DataMethodDownloadHarvester(start,
// end, contexts);
//
// List<AccountingRecord> harvested = dataMethodDownloadHarvester.getAccountingRecords();
// accountingRecords.addAll(harvested);
//
// } catch (Exception e) {
// logger.error("Error harvesting Data Method Download for {}", context, e);
// }
}
if(context.startsWith(TAGME_CONTEXT)) {
if (context.startsWith(TAGME_CONTEXT)) {
try {
// Collecting info on method invocation
logger.info("Going to harvest Method Invocations for {}", context);
TagMeMethodInvocationHarvester tagMeMethodInvocationHarvester = new TagMeMethodInvocationHarvester(
start, end);
List<AccountingRecord> harvested = tagMeMethodInvocationHarvester.getAccountingRecords();
accountingRecords.addAll(harvested);
/*
List<HarvestedData> harvested = tagMeMethodInvocationHarvester.getData();
data.addAll(harvested);
*/
} catch(Exception e) {
* List<HarvestedData> harvested =
* tagMeMethodInvocationHarvester.getData();
* data.addAll(harvested);
*/
} catch (Exception e) {
logger.error("Error harvesting Method Invocations for {}", context, e);
}
} else {
@ -403,38 +461,38 @@ public class AccountingDataHarvesterPlugin extends Plugin<DataHarvestPluginDecla
// Collecting info on method invocation
logger.info("Going to harvest Method Invocations for {}", context);
MethodInvocationHarvester methodInvocationHarvester = new MethodInvocationHarvester(start, end);
List<AccountingRecord> harvested = methodInvocationHarvester.getAccountingRecords();
accountingRecords.addAll(harvested);
/*
List<HarvestedData> harvested = methodInvocationHarvester.getData();
data.addAll(harvested);
*/
} catch(Exception e) {
* List<HarvestedData> harvested =
* methodInvocationHarvester.getData();
* data.addAll(harvested);
*/
} catch (Exception e) {
logger.error("Error harvesting Method Invocations for {}", context, e);
}
}
}
}
Utils.setContext(initialToken);
logger.debug("Harvest Measures from {} to {} are {}", DateUtils.format(start), DateUtils.format(end), accountingRecords);
if(!dryRun) {
Utils.setContext(rootSecret);
logger.debug("Harvest Measures from {} to {} are {}", DateUtils.format(start), DateUtils.format(end),
accountingRecords);
if (!dryRun) {
dao.insertRecords(accountingRecords.toArray(new AccountingRecord[1]));
//dbaseManager.insertMonthlyData(start, end, data, reRun);
}else {
} else {
logger.debug("Harvested measures are {}", accountingRecords);
}
}
/** {@inheritDoc} */
@Override
protected void onStop() throws Exception {
logger.debug("{} is stopping", this.getClass().getSimpleName());
}
}

View File

@ -1,75 +0,0 @@
package org.gcube.dataharvest;
import java.util.HashMap;
import java.util.Map;
import org.gcube.vremanagement.executor.plugin.Plugin;
import org.gcube.vremanagement.executor.plugin.PluginDeclaration;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* @author Eric Perrone (ISTI - CNR)
* @author Luca Frosini (ISTI - CNR)
*/
public class DataHarvestPluginDeclaration implements PluginDeclaration {
private static Logger logger = LoggerFactory.getLogger(DataHarvestPluginDeclaration.class);
public static final String NAME = "AccountingDataHarvester";
public static final String DESCRIPTION = "Data Harvest for Accounting Summary Dashboard";
public static final String VERSION = "1.0.0";
/**{@inheritDoc}*/
@Override
public void init() {
logger.debug(String.format("%s initialized", AccountingDataHarvesterPlugin.class.getSimpleName()));
}
/**{@inheritDoc}*/
@Override
public String getName() {
return NAME;
}
/**{@inheritDoc}*/
@Override
public String getDescription() {
return DESCRIPTION;
}
/**{@inheritDoc}*/
@Override
public String getVersion() {
return VERSION;
}
/**{@inheritDoc}*/
@Override
public Map<String, String> getSupportedCapabilities() {
Map<String, String> discoveredCapabilities = new HashMap<String, String>();
discoveredCapabilities.put("FakeKey", "FakeValue");
return discoveredCapabilities;
}
/**{@inheritDoc}*/
@Override
public Class<? extends Plugin<? extends PluginDeclaration>> getPluginImplementation() {
return AccountingDataHarvesterPlugin.class;
}
@Override
public String toString(){
return String.format("{"
+ "name:%s,"
+ "version:%s,"
+ "description:%s,"
+ "pluginImplementation:%s,"
+ "}",
getName(),
getVersion(),
getDescription(),
getPluginImplementation().getClass().getSimpleName());
}
}

View File

@ -59,12 +59,12 @@ public class AnalyticsReportCredentials {
/**
* Please note:
* The key is stored in the resource with blanks " " instead of "\n" as it causes issues and
* without the BEGIN and END Delimiters (e.g. -----END PRIVATE KEY-----) which myst be readded
* without the BEGIN and END Delimiters (e.g. -----END PRIVATE KEY-----) which must be readded
* @param privateKeyPem
*/
public void setPrivateKeyPem(String privateKeyPem) {
privateKeyPem = privateKeyPem.replace(" ", "\n");
this.privateKeyPem = "-----BEGIN PRIVATE KEY-----\n"+privateKeyPem+"\n-----END PRIVATE KEY-----";
this.privateKeyPem = privateKeyPem.replace(" ", "\n");
this.privateKeyPem = "-----BEGIN PRIVATE KEY-----\n"+this.privateKeyPem+"\n-----END PRIVATE KEY-----\n";
}
public String getPrivateKeyId() {

View File

@ -0,0 +1,44 @@
package org.gcube.dataharvest.datamodel;
public class CatalogueAccessesReportRow {
private String dashboardContext;
private HarvestedDataKey key;
private String pagePath;
private int visitNumber;
public CatalogueAccessesReportRow() {
// TODO Auto-generated constructor stub
}
public HarvestedDataKey getKey() {
return key;
}
public void setKey(HarvestedDataKey key) {
this.key = key;
}
public String getPagePath() {
return pagePath;
}
public void setPagePath(String pagePath) {
this.pagePath = pagePath;
}
public int getVisitNumber() {
return visitNumber;
}
public void setVisitNumber(int visitNumber) {
this.visitNumber = visitNumber;
}
public String getDashboardContext() {
return dashboardContext;
}
public void setDashboardContext(String dashboardContext) {
this.dashboardContext = dashboardContext;
}
@Override
public String toString() {
return "CatalogueAccessesReportRow [dashboardContext=" + dashboardContext + ", key=" + key + ", pagePath="
+ pagePath + ", visitNumber=" + visitNumber + "]";
}
}

View File

@ -0,0 +1,44 @@
package org.gcube.dataharvest.datamodel;
public class CoreServiceAccessesReportRow {
private String dashboardContext;
private HarvestedDataKey key;
private String pagePath;
private int visitNumber;
public CoreServiceAccessesReportRow() {
// TODO Auto-generated constructor stub
}
public HarvestedDataKey getKey() {
return key;
}
public void setKey(HarvestedDataKey key) {
this.key = key;
}
public String getPagePath() {
return pagePath;
}
public void setPagePath(String pagePath) {
this.pagePath = pagePath;
}
public int getVisitNumber() {
return visitNumber;
}
public void setVisitNumber(int visitNumber) {
this.visitNumber = visitNumber;
}
public String getDashboardContext() {
return dashboardContext;
}
public void setDashboardContext(String dashboardContext) {
this.dashboardContext = dashboardContext;
}
@Override
public String toString() {
return "CoreServiceAccessesReportRow [dashboardContext=" + dashboardContext + ", key=" + key + ", pagePath="
+ pagePath + ", visitNumber=" + visitNumber + "]";
}
}

View File

@ -5,24 +5,29 @@ package org.gcube.dataharvest.datamodel;
/**
*
* @author Francesco Mangiacrapa francesco.mangiacrapa@isti.cnr.it
* May 22, 2018
* @author M. Assante, ISTI-CNR
*/
public enum HarvestedDataKey {
WORKSPACE_ACCESSES("Workspace Accesses"),
MESSAGES_ACCESSES("Messages Accesses"),
NOTIFICATIONS_ACCESSES("Notifications Accesses"),
PROFILE_ACCESSES("Profile Accesses"),
JUPYTER_ACCESSES("Jupyter Accesses"),
RSTUDIO_ACCESSES("R Studio Accesses"),
CATALOGUE_ACCESSES("Catalogue Accesses"),
CATALOGUE_DATASET_LIST_ACCESSES("Item List"),
CATALOGUE_DATASET_ACCESSES("Item Metadata"),
CATALOGUE_RESOURCE_ACCESSES("Item Resource"),
ACCESSES("VRE Accesses"),
USERS("VRE Users"),
DATA_METHOD_DOWNLOAD("Data/Method download"),
NEW_CATALOGUE_METHODS("New Catalogue Methods"),
NEW_CATALOGUE_DATASETS("New Catalogue Datasets"),
NEW_CATALOGUE_DELIVERABLES("New Catalogue Deliverables"),
NEW_CATALOGUE_APPLICATIONS("New Catalogue Applications"),
SOCIAL_POSTS("VRE Social Interations Posts"),
SOCIAL_REPLIES("VRE Social Interations Replies"),
SOCIAL_LIKES("VRE Social Interations Likes"),
METHOD_INVOCATIONS("VRE Methods Invocation"),
VISUAL_TOOLS("VRE Visual Tools");
METHOD_INVOCATIONS("VRE Methods Invocation");
private String key;

View File

@ -12,7 +12,7 @@ import org.gcube.common.authorization.client.Constants;
import org.gcube.common.authorization.library.AuthorizationEntry;
import org.gcube.common.authorization.library.provider.SecurityTokenProvider;
import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.dataharvest.AccountingDataHarvesterPlugin;
import org.gcube.dataharvest.AccountingDashboardHarvesterPlugin;
import org.gcube.dataharvest.datamodel.HarvestedDataKey;
import org.gcube.dataharvest.utils.DateUtils;
import org.slf4j.Logger;
@ -37,27 +37,10 @@ public abstract class BasicHarvester {
logger.debug("Creating {} for the period {} {} ", this.getClass().getSimpleName(), DateUtils.format(start), DateUtils.format(end));
}
public static String getCurrentContext(String token) throws Exception {
AuthorizationEntry authorizationEntry = Constants.authorizationService().get(token);
String context = authorizationEntry.getContext();
logger.info("Context of token {} is {}", token, context);
return context;
}
public static void setContext(String token) throws Exception {
SecurityTokenProvider.instance.set(token);
ScopeProvider.instance.set(getCurrentContext(token));
}
public static String getCurrentContext() throws Exception {
String token = SecurityTokenProvider.instance.get();
return getCurrentContext(token);
}
public abstract List<AccountingRecord> getAccountingRecords() throws Exception;
public Dimension getDimension(HarvestedDataKey harvestedDataKey) {
return AccountingDataHarvesterPlugin.getDimension(harvestedDataKey.getKey());
return AccountingDashboardHarvesterPlugin.getDimension(harvestedDataKey.getKey());
}

View File

@ -0,0 +1,395 @@
package org.gcube.dataharvest.harvester;
import static org.gcube.resources.discovery.icclient.ICFactory.clientFor;
import static org.gcube.resources.discovery.icclient.ICFactory.queryFor;
import java.io.IOException;
import java.io.StringReader;
import java.security.GeneralSecurityException;
import java.time.Instant;
import java.time.LocalDate;
import java.time.ZoneId;
import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import org.gcube.accounting.accounting.summary.access.model.ScopeDescriptor;
import org.gcube.accounting.accounting.summary.access.model.update.AccountingRecord;
import org.gcube.common.authorization.client.exceptions.ObjectNotFound;
import org.gcube.common.encryption.encrypter.StringEncrypter;
import org.gcube.common.resources.gcore.GenericResource;
import org.gcube.common.resources.gcore.ServiceEndpoint;
import org.gcube.common.resources.gcore.ServiceEndpoint.AccessPoint;
import org.gcube.common.resources.gcore.ServiceEndpoint.Property;
import org.gcube.common.resources.gcore.utils.Group;
import org.gcube.common.resources.gcore.utils.XPathHelper;
import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.common.scope.impl.ScopeBean;
import org.gcube.dataharvest.datamodel.AnalyticsReportCredentials;
import org.gcube.dataharvest.datamodel.CatalogueAccessesReportRow;
import org.gcube.dataharvest.datamodel.HarvestedDataKey;
import org.gcube.resources.discovery.client.api.DiscoveryClient;
import org.gcube.resources.discovery.client.queries.api.SimpleQuery;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.w3c.dom.Node;
import org.xml.sax.InputSource;
import com.google.analytics.data.v1beta.BetaAnalyticsDataClient;
import com.google.analytics.data.v1beta.BetaAnalyticsDataSettings;
import com.google.analytics.data.v1beta.DateRange;
import com.google.analytics.data.v1beta.DateRange.Builder;
import com.google.analytics.data.v1beta.Dimension;
import com.google.analytics.data.v1beta.Metric;
import com.google.analytics.data.v1beta.Row;
import com.google.analytics.data.v1beta.RunReportRequest;
import com.google.analytics.data.v1beta.RunReportResponse;
import com.google.api.client.json.JsonFactory;
import com.google.api.client.json.gson.GsonFactory;
import com.google.api.gax.core.FixedCredentialsProvider;
import com.google.auth.oauth2.ServiceAccountCredentials;
public class CatalogueAccessesHarvester extends BasicHarvester {
private static Logger logger = LoggerFactory.getLogger(CatalogueAccessesHarvester.class);
private static final JsonFactory JSON_FACTORY = GsonFactory.getDefaultInstance();
private static final String MAPPING_RESOURCE_CATEGORY = "BigGAnalyticsMapping";
private static final String SERVICE_ENDPOINT_CATEGORY = "OnlineService";
private static final String SERVICE_ENDPOINT_NAME = "GA4AnalyticsDataService";
private static final String AP_CATALOGUE_PAGEVIEWS_PROPERTY = "catalogue-pageviews";
private static final String AP_CLIENT_PROPERTY = "client_id";
private static final String AP_PRIVATEKEY_PROPERTY = "private_key_id";
private static final String REGEX_CATALOGUE_ACCESSES = "^\\/$";
private static final String REGEX_CATALOGUE_DATASET_LIST_ACCESSES = "^\\/dataset(\\?([a-zA-Z0-9_.-]*.+))*";
private static final String REGEX_CATALOGUE_DATASET_ACCESSES = "^\\/dataset\\/[a-zA-Z0-9_.-]+$";
private static final String REGEX_CATALOGUE_RESOURCE_ACCESSES = "^\\/dataset\\/[a-zA-Z0-9_.-]+\\/resource\\/[a-zA-Z0-9_.-]+$";
private HashMap<String, List<CatalogueAccessesReportRow>> catalogueAccesses;
public CatalogueAccessesHarvester(Date start, Date end) throws Exception {
super(start, end);
catalogueAccesses = getAllAccesses(start, end);
}
@Override
public List<AccountingRecord> getAccountingRecords() throws Exception {
try {
ArrayList<AccountingRecord> accountingRecords = new ArrayList<AccountingRecord>();
for (String dashboardContext : catalogueAccesses.keySet()) {
int catalogueTotalAccesses = 0;
int catalogueDatasetListAccesses = 0;
int catalogueDatasetAccesses = 0;
int catalogueResourceAccesses = 0;
logger.debug("Catalogue accesses for {} ", dashboardContext);
for(CatalogueAccessesReportRow row : catalogueAccesses.get(dashboardContext)) {
// String pagePath = row.getPagePath();
switch (row.getKey()) {
case CATALOGUE_ACCESSES:
catalogueTotalAccesses += row.getVisitNumber();
break;
case CATALOGUE_DATASET_LIST_ACCESSES:
catalogueDatasetListAccesses += row.getVisitNumber();
break;
case CATALOGUE_DATASET_ACCESSES:
catalogueDatasetAccesses += row.getVisitNumber();
break;
case CATALOGUE_RESOURCE_ACCESSES:
catalogueResourceAccesses += row.getVisitNumber();
break;
default:
break;
}
}
ScopeDescriptor scopeDescriptor = new ScopeDescriptor();
try {
ScopeBean scopeBean = new ScopeBean(dashboardContext);
scopeDescriptor.setId(dashboardContext);
scopeDescriptor.setName(scopeBean.name());
AccountingRecord ar1 = new AccountingRecord(scopeDescriptor, instant, getDimension(HarvestedDataKey.CATALOGUE_ACCESSES), (long) catalogueTotalAccesses);
AccountingRecord ar2 = new AccountingRecord(scopeDescriptor, instant, getDimension(HarvestedDataKey.CATALOGUE_DATASET_LIST_ACCESSES), (long) catalogueDatasetListAccesses);
AccountingRecord ar3 = new AccountingRecord(scopeDescriptor, instant, getDimension(HarvestedDataKey.CATALOGUE_DATASET_ACCESSES), (long) catalogueDatasetAccesses);
AccountingRecord ar4 = new AccountingRecord(scopeDescriptor, instant, getDimension(HarvestedDataKey.CATALOGUE_RESOURCE_ACCESSES), (long) catalogueResourceAccesses);
logger.debug("{} : {}", ar1.getDimension().getId(), ar1.getMeasure());
accountingRecords.add(ar1);
logger.debug("{} : {}", ar2.getDimension().getId(), ar2.getMeasure());
accountingRecords.add(ar2);
logger.debug("{} : {}", ar3.getDimension().getId(), ar3.getMeasure());
accountingRecords.add(ar3);
logger.debug("{} : {}", ar4.getDimension().getId(), ar4.getMeasure());
accountingRecords.add(ar4);
} catch (NullPointerException e) {
logger.warn("I found no correspondance in the Genereric Resource for a PropertyId you should check this, type: BigGAnalyticsMapping name: AccountingDashboardMapping");
e.printStackTrace();
}
}
logger.debug("Returning {} accountingRecords ", accountingRecords.size());
return accountingRecords;
} catch(Exception e) {
throw e;
}
}
/**
*
*/
private static HashMap<String, List<CatalogueAccessesReportRow>> getAllAccesses(Date start, Date end) throws Exception {
Builder dateRangeBuilder = getDateRangeBuilderForAnalytics(start, end);
logger.debug("Getting Catalogue accesses in this time range {}", dateRangeBuilder.toString());
AnalyticsReportCredentials credentialsFromD4S = getAuthorisedApplicationInfoFromIs();
logger.debug("Getting credentials credentialsFromD4S");
BetaAnalyticsDataSettings serviceSettings = initializeAnalyticsReporting(credentialsFromD4S);
logger.debug("initializeAnalyticsReporting service settings");
HashMap<String,List<RunReportResponse>> responses = getReportResponses(serviceSettings, credentialsFromD4S.getViewIds(), dateRangeBuilder);
HashMap<String, List<CatalogueAccessesReportRow>> toReturn = new HashMap<>();
for(String view : responses.keySet()) {
String dashboardContext = getAccountingDashboardContextGivenGAViewID(view);
logger.info("\n\n**************** Parsing responses for this catalogue view, which corresponds to Dashboard Context: " + dashboardContext);
List<CatalogueAccessesReportRow> viewReport = parseResponse(view, responses.get(view), dashboardContext);
logger.trace("Got {} entries from view id={}", viewReport.size(), view);
toReturn.put(dashboardContext, viewReport);
}
return toReturn;
}
/**
* Initializes an Google Analytics Data API service object.
*
* @return An authorized Google Analytics Data API
* @throws IOException
* @throws GeneralSecurityException
*/
private static BetaAnalyticsDataSettings initializeAnalyticsReporting(AnalyticsReportCredentials cred) throws IOException {
return BetaAnalyticsDataSettings.newBuilder()
.setCredentialsProvider(FixedCredentialsProvider.create(
ServiceAccountCredentials.fromPkcs8(cred.getClientId(), cred.getClientEmail(), cred.getPrivateKeyPem(), cred.getPrivateKeyId(), null)))
.build();
}
/**
* Queries Analytics Data API service
*
* @param service Analytics Data API service service settings.
* @return Row Analytics Data API service
* @throws IOException
*/
private static HashMap<String,List<RunReportResponse>> getReportResponses(BetaAnalyticsDataSettings betaAnalyticsDataSettings,
List<String> viewIDs, Builder dateRangeBuilder) throws IOException {
HashMap<String,List<RunReportResponse>> reports = new HashMap<>();
try (BetaAnalyticsDataClient analyticsData = BetaAnalyticsDataClient.create(betaAnalyticsDataSettings)) {
for(String propertyId : viewIDs) {
List<RunReportResponse> gReportResponses = new ArrayList<>();
logger.debug("Getting data from Analytics Data API for propertyId: " + propertyId);
RunReportRequest request =
RunReportRequest.newBuilder()
.setProperty("properties/" + propertyId)
.addDimensions(Dimension.newBuilder().setName("pagePath"))
.addMetrics(Metric.newBuilder().setName("screenPageViews"))
.addDateRanges(dateRangeBuilder)
.build();
// Make the request.
RunReportResponse response = analyticsData.runReport(request);
gReportResponses.add(response);
// Iterate through every row of the API response.
// for (Row row : response.getRowsList()) {
// System.out.printf(
// "%s, %s%n", row.getDimensionValues(0).getValue(), row.getMetricValues(0).getValue());
// }
reports.put(propertyId, gReportResponses);
}
}
return reports;
}
/**
* Parses and prints the Analytics Data API service respose
*
* @param dashboardContext
*/
private static List<CatalogueAccessesReportRow> parseResponse(String viewId, List<RunReportResponse> responses, String dashboardContext) {
logger.debug("parsing Response for " + viewId);
List<CatalogueAccessesReportRow> toReturn = new ArrayList<>();
for (RunReportResponse response : responses) {
for (Row row: response.getRowsList()) {
String dimension = row.getDimensionValues(0).getValue();
String metric = row.getMetricValues(0).getValue();
CatalogueAccessesReportRow var = new CatalogueAccessesReportRow();
boolean validEntry = false;
String pagePath = dimension;
logger.trace("parsing pagepath {}: value: {}", pagePath, Integer.parseInt(metric));
if (pagePath.matches(REGEX_CATALOGUE_RESOURCE_ACCESSES)) {
var.setKey(HarvestedDataKey.CATALOGUE_RESOURCE_ACCESSES);
validEntry = true;
}
else if (pagePath.matches(REGEX_CATALOGUE_DATASET_ACCESSES)) {
var.setKey(HarvestedDataKey.CATALOGUE_DATASET_ACCESSES);
validEntry = true;
}
else if (pagePath.matches(REGEX_CATALOGUE_DATASET_LIST_ACCESSES)) {
var.setKey(HarvestedDataKey.CATALOGUE_DATASET_LIST_ACCESSES);
validEntry = true;
}
else if (pagePath.matches(REGEX_CATALOGUE_ACCESSES)) {
var.setKey(HarvestedDataKey.CATALOGUE_ACCESSES);
validEntry = true;
}
if (validEntry) {
var.setDashboardContext(dashboardContext);
var.setPagePath(dimension);
var.setVisitNumber(Integer.parseInt(metric));
toReturn.add(var);
}
}
}
return toReturn;
}
private static List<ServiceEndpoint> getAnalyticsReportingConfigurationFromIS(String infrastructureScope)
throws Exception {
String scope = infrastructureScope;
String currScope = ScopeProvider.instance.get();
ScopeProvider.instance.set(scope);
SimpleQuery query = queryFor(ServiceEndpoint.class);
query.addCondition("$resource/Profile/Category/text() eq '" + SERVICE_ENDPOINT_CATEGORY + "'");
query.addCondition("$resource/Profile/Name/text() eq '" + SERVICE_ENDPOINT_NAME + "'");
DiscoveryClient<ServiceEndpoint> client = clientFor(ServiceEndpoint.class);
List<ServiceEndpoint> toReturn = client.submit(query);
ScopeProvider.instance.set(currScope);
return toReturn;
}
/**
* This method look up in the IS the Gateway which corresponds to a given Google Analytics viewId
* @param viewID
* @return the gateway name, e.g. "Blue-Cloud Gateway" or null if no correspondance was found
* @throws Exception
* @throws ObjectNotFound
*/
private static String getAccountingDashboardContextGivenGAViewID(String viewID) throws ObjectNotFound, Exception {
String toReturn = null;
String context = org.gcube.dataharvest.utils.Utils.getCurrentContext();
String currScope = ScopeProvider.instance.get();
ScopeProvider.instance.set(context);
SimpleQuery query = queryFor(GenericResource.class);
query.addCondition("$resource/Profile/SecondaryType/text() eq '" + MAPPING_RESOURCE_CATEGORY + "'");
query.addCondition("$resource/Profile/Body/Property/viewID/text() eq '" + viewID + "'");
DiscoveryClient<GenericResource> client = clientFor(GenericResource.class);
List<GenericResource> list = client.submit(query);
if(list.size() > 1) {
logger.error("Too many Generic Resources having GA viewID " + viewID
+ " in this scope having SecondaryType " + MAPPING_RESOURCE_CATEGORY);
} else if(list.size() == 0) {
logger.warn("There is no Generic Resources having GA viewID " + viewID + " and SecondaryType "
+ MAPPING_RESOURCE_CATEGORY + " in this context: " + context);
} else {
GenericResource found = list.get(0);
String elem = new StringBuilder("<body>").append(found.profile().bodyAsString()).append("</body>").toString();
DocumentBuilder docBuilder = DocumentBuilderFactory.newInstance().newDocumentBuilder();
Node node = docBuilder.parse(new InputSource(new StringReader(elem))).getDocumentElement();
XPathHelper helper = new XPathHelper(node);
List<String> currValue = helper.evaluate("//Property/viewID/text()");
if (currValue != null && currValue.size() > 0) {
List<String> contexts = currValue;
for (int i = 0; i < contexts.size(); i++) {
if (currValue.get(i).trim().compareTo(viewID) == 0) {
toReturn = helper.evaluate("//Property/DashboardContext/text()").get(i);
break;
}
}
}
logger.debug("Found DashboardContext for viewId {} : {} ", viewID, toReturn);
}
ScopeProvider.instance.set(currScope);
return toReturn;
}
/**
* @throws Exception
*/
private static AnalyticsReportCredentials getAuthorisedApplicationInfoFromIs() throws Exception {
AnalyticsReportCredentials reportCredentials = new AnalyticsReportCredentials();
String context = org.gcube.dataharvest.utils.Utils.getCurrentContext();
try {
List<ServiceEndpoint> list = getAnalyticsReportingConfigurationFromIS(context);
if(list.size() > 1) {
logger.error("Too many Service Endpoints having name " + SERVICE_ENDPOINT_NAME
+ " in this scope having Category " + SERVICE_ENDPOINT_CATEGORY);
} else if(list.size() == 0) {
logger.warn("There is no Service Endpoint having name " + SERVICE_ENDPOINT_NAME + " and Category "
+ SERVICE_ENDPOINT_CATEGORY + " in this context: " + context);
} else {
for(ServiceEndpoint res : list) {
Group<AccessPoint> apGroup = res.profile().accessPoints();
AccessPoint[] accessPoints = (AccessPoint[]) apGroup.toArray(new AccessPoint[apGroup.size()]);
AccessPoint found = accessPoints[0];
reportCredentials.setClientEmail(found.username());
String decryptedPrivateKey = StringEncrypter.getEncrypter().decrypt(found.password());
reportCredentials.setPrivateKeyPem(decryptedPrivateKey.trim());
for(Property prop : found.properties()) {
if(prop.name().compareTo(AP_CATALOGUE_PAGEVIEWS_PROPERTY) == 0) {
String decryptedValue = StringEncrypter.getEncrypter().decrypt(prop.value());
String[] views = decryptedValue.split(";");
reportCredentials.setViewIds(Arrays.asList(views));
}
if(prop.name().compareTo(AP_CLIENT_PROPERTY) == 0) {
String decryptedValue = StringEncrypter.getEncrypter().decrypt(prop.value());
reportCredentials.setClientId(decryptedValue);
}
if(prop.name().compareTo(AP_PRIVATEKEY_PROPERTY) == 0) {
String decryptedValue = StringEncrypter.getEncrypter().decrypt(prop.value());
reportCredentials.setPrivateKeyId(decryptedValue);
}
}
}
}
} catch(Exception e) {
e.printStackTrace();
return null;
}
return reportCredentials;
}
private static LocalDate asLocalDate(Date date) {
return Instant.ofEpochMilli(date.getTime()).atZone(ZoneId.systemDefault()).toLocalDate();
}
private static Builder getDateRangeBuilderForAnalytics(Date start, Date end) {
DateTimeFormatter formatter = DateTimeFormatter.ofPattern("yyyy-MM-dd"); //required by Analytics
String startDate = asLocalDate(start).format(formatter);
String endDate = asLocalDate(end).format(formatter);
Builder dateRangeBuilder = DateRange.newBuilder().setStartDate(startDate).setEndDate(endDate);
return dateRangeBuilder;
}
}

View File

@ -0,0 +1,397 @@
package org.gcube.dataharvest.harvester;
import static org.gcube.resources.discovery.icclient.ICFactory.clientFor;
import static org.gcube.resources.discovery.icclient.ICFactory.queryFor;
import java.io.IOException;
import java.io.StringReader;
import java.security.GeneralSecurityException;
import java.time.Instant;
import java.time.LocalDate;
import java.time.ZoneId;
import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import org.gcube.accounting.accounting.summary.access.model.ScopeDescriptor;
import org.gcube.accounting.accounting.summary.access.model.update.AccountingRecord;
import org.gcube.common.authorization.client.exceptions.ObjectNotFound;
import org.gcube.common.encryption.encrypter.StringEncrypter;
import org.gcube.common.resources.gcore.GenericResource;
import org.gcube.common.resources.gcore.ServiceEndpoint;
import org.gcube.common.resources.gcore.ServiceEndpoint.AccessPoint;
import org.gcube.common.resources.gcore.ServiceEndpoint.Property;
import org.gcube.common.resources.gcore.utils.Group;
import org.gcube.common.resources.gcore.utils.XPathHelper;
import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.common.scope.impl.ScopeBean;
import org.gcube.dataharvest.datamodel.AnalyticsReportCredentials;
import org.gcube.dataharvest.datamodel.CoreServiceAccessesReportRow;
import org.gcube.dataharvest.datamodel.HarvestedDataKey;
import org.gcube.resources.discovery.client.api.DiscoveryClient;
import org.gcube.resources.discovery.client.queries.api.SimpleQuery;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.w3c.dom.Node;
import org.xml.sax.InputSource;
import com.google.analytics.data.v1beta.BetaAnalyticsDataClient;
import com.google.analytics.data.v1beta.BetaAnalyticsDataSettings;
import com.google.analytics.data.v1beta.DateRange;
import com.google.analytics.data.v1beta.DateRange.Builder;
import com.google.analytics.data.v1beta.Dimension;
import com.google.analytics.data.v1beta.Metric;
import com.google.analytics.data.v1beta.Row;
import com.google.analytics.data.v1beta.RunReportRequest;
import com.google.analytics.data.v1beta.RunReportResponse;
import com.google.api.gax.core.FixedCredentialsProvider;
import com.google.auth.oauth2.ServiceAccountCredentials;
/**
* @author Massimiliano Assante (ISTI - CNR)
*/
public class CoreServicesAccessesHarvester extends BasicHarvester {
private static Logger logger = LoggerFactory.getLogger(CoreServicesAccessesHarvester.class);
private static final String MAPPING_RESOURCE_CATEGORY = "BigGAnalyticsMapping";
private static final String SERVICE_ENDPOINT_CATEGORY = "OnlineService";
private static final String SERVICE_ENDPOINT_NAME = "GA4AnalyticsDataService";
private static final String AP_VIEWS_PROPERTY = "views";
private static final String AP_CLIENT_ID = "client_id";
private static final String AP_PRIVATEKEY_ID_PROPERTY = "private_key_id";
private static final String PAGE_WORKSPACE_ACCESSES = "/workspace";
private static final String PAGE_MESSAGES_ACCESSES = "/messages";
private static final String PAGE_PROFILE_ACCESSES = "/profile";
private static final String PAGE_NOTIFICATION_ACCESSES = "/notifications";
private HashMap<String, List<CoreServiceAccessesReportRow>> coreServicesAccesses;
public CoreServicesAccessesHarvester(Date start, Date end) throws Exception {
super(start, end);
coreServicesAccesses = getAllAccesses(start, end);
}
@Override
public List<AccountingRecord> getAccountingRecords() throws Exception {
try {
ArrayList<AccountingRecord> accountingRecords = new ArrayList<AccountingRecord>();
for (String dashboardContext : coreServicesAccesses.keySet()) {
int workspaceAccesses = 0;
int messagesAccesses = 0;
int notificationsAccesses = 0;
int profileAccesses = 0;
logger.debug("{};", dashboardContext);
for(CoreServiceAccessesReportRow row : coreServicesAccesses.get(dashboardContext)) {
// String pagePath = row.getPagePath();
switch (row.getKey()) {
case WORKSPACE_ACCESSES:
workspaceAccesses += row.getVisitNumber();
break;
case MESSAGES_ACCESSES:
messagesAccesses += row.getVisitNumber();
break;
case NOTIFICATIONS_ACCESSES:
notificationsAccesses += row.getVisitNumber();
break;
case PROFILE_ACCESSES:
profileAccesses += row.getVisitNumber();
break;
default:
break;
}
}
ScopeDescriptor scopeDescriptor = new ScopeDescriptor();
ScopeBean scopeBean = new ScopeBean(dashboardContext);
scopeDescriptor.setId(dashboardContext);
scopeDescriptor.setName(scopeBean.name());
AccountingRecord ar1 = new AccountingRecord(scopeDescriptor, instant, getDimension(HarvestedDataKey.WORKSPACE_ACCESSES), (long) workspaceAccesses);
AccountingRecord ar2 = new AccountingRecord(scopeDescriptor, instant, getDimension(HarvestedDataKey.MESSAGES_ACCESSES), (long) messagesAccesses);
AccountingRecord ar3 = new AccountingRecord(scopeDescriptor, instant, getDimension(HarvestedDataKey.NOTIFICATIONS_ACCESSES), (long) notificationsAccesses);
AccountingRecord ar4 = new AccountingRecord(scopeDescriptor, instant, getDimension(HarvestedDataKey.PROFILE_ACCESSES), (long) profileAccesses);
logger.debug("{};{}", ar1.getDimension().getId(), ar1.getMeasure());
accountingRecords.add(ar1);
logger.debug("{};{}", ar2.getDimension().getId(), ar2.getMeasure());
accountingRecords.add(ar2);
logger.debug("{};{}", ar3.getDimension().getId(), ar3.getMeasure());
accountingRecords.add(ar3);
logger.debug("{};{}", ar4.getDimension().getId(), ar4.getMeasure());
accountingRecords.add(ar4);
}
logger.debug("Returning {} accountingRecords ", accountingRecords.size());
return accountingRecords;
} catch(Exception e) {
throw e;
}
}
/**
*
*/
private static HashMap<String, List<CoreServiceAccessesReportRow>> getAllAccesses(Date start, Date end) throws Exception {
Builder dateRangeBuilder = getDateRangeBuilderForAnalytics(start, end);
logger.trace("Getting core services accesses in this time range {}", dateRangeBuilder.toString());
AnalyticsReportCredentials credentialsFromD4S = getAuthorisedApplicationInfoFromIs();
logger.trace("gotten credentialsFromD4S id = {}", credentialsFromD4S.getClientId());
BetaAnalyticsDataSettings serviceSettings = initializeAnalyticsReporting(credentialsFromD4S);
logger.trace("gotten credentialsFromD4S viewIds= {}", credentialsFromD4S.getViewIds().toString());
HashMap<String,List<RunReportResponse>> responses = getReportResponses(serviceSettings, credentialsFromD4S.getViewIds(), dateRangeBuilder);
HashMap<String, List<CoreServiceAccessesReportRow>> toReturn = new HashMap<>();
int i = 1;
for(String view : responses.keySet()) {
String dashboardContext = getAccountingDashboardContextGivenGAViewID(view);
if (dashboardContext != null ) {
logger.trace("\n ({}) *** Parsing responses for this Gateway view, which corresponds to Dashboard Context: {} \n", i, dashboardContext );
List<CoreServiceAccessesReportRow> viewReport = parseResponse(view, responses.get(view), dashboardContext);
logger.trace("Got {} entries from view id={}", viewReport.size(), view);
toReturn.put(dashboardContext, viewReport);
} else {
logger.warn("Got entries from view id={} but cannot find Dashboard Context correspondance, I think you need to update the Generic Resource of the Mappings", view);
}
i++;
}
return toReturn;
}
/**
* Initializes an Google Analytics Data API service object.
*
* @return An authorized Google Analytics Data API
* @throws IOException
* @throws GeneralSecurityException
*/
private static BetaAnalyticsDataSettings initializeAnalyticsReporting(AnalyticsReportCredentials cred) throws IOException {
return BetaAnalyticsDataSettings.newBuilder()
.setCredentialsProvider(FixedCredentialsProvider.create(
ServiceAccountCredentials.fromPkcs8(cred.getClientId(), cred.getClientEmail(), cred.getPrivateKeyPem(), cred.getPrivateKeyId(), null)))
.build();
}
/**
* Queries Analytics Data API service
*
* @param service Analytics Data API service service settings.
* @return Row Analytics Data API service
* @throws IOException
*/
private static HashMap<String,List<RunReportResponse>> getReportResponses(BetaAnalyticsDataSettings betaAnalyticsDataSettings,
List<String> viewIDs, Builder dateRangeBuilder) throws IOException {
HashMap<String,List<RunReportResponse>> reports = new HashMap<>();
try (BetaAnalyticsDataClient analyticsData = BetaAnalyticsDataClient.create(betaAnalyticsDataSettings)) {
for(String propertyId : viewIDs) {
List<RunReportResponse> gReportResponses = new ArrayList<>();
logger.debug("Getting data from Analytics Data API for propertyId: " + propertyId);
RunReportRequest request =
RunReportRequest.newBuilder()
.setProperty("properties/" + propertyId)
.addDimensions(Dimension.newBuilder().setName("pagePath"))
.addMetrics(Metric.newBuilder().setName("screenPageViews"))
.addDateRanges(dateRangeBuilder)
.build();
// Make the request.
RunReportResponse response = analyticsData.runReport(request);
gReportResponses.add(response);
reports.put(propertyId, gReportResponses);
}
}
return reports;
}
/**
* Parses and prints the Analytics Data API service respose
*
* @param response An Analytics Data API service response.
*/
private static List<CoreServiceAccessesReportRow> parseResponse(String viewId, List<RunReportResponse> responses, String dashboardContext) {
logger.debug("parsing Response for propertyID=" + viewId);
List<CoreServiceAccessesReportRow> toReturn = new ArrayList<>();
for (RunReportResponse response : responses) {
for (Row row: response.getRowsList()) {
String dimension = row.getDimensionValues(0).getValue();
String metric = row.getMetricValues(0).getValue();
CoreServiceAccessesReportRow var = new CoreServiceAccessesReportRow();
boolean validEntry = false;
String pagePath = dimension;
logger.trace("parsing pagepath {}: value: {}", pagePath, Integer.parseInt(metric));
if (!pagePath.contains("_redirect=/group")) {
if ( pagePath.contains(PAGE_WORKSPACE_ACCESSES)) {
var.setKey(HarvestedDataKey.WORKSPACE_ACCESSES);
logger.trace("**matched "+pagePath);
validEntry = true;
}
else if ( pagePath.contains(PAGE_MESSAGES_ACCESSES)) {
var.setKey(HarvestedDataKey.MESSAGES_ACCESSES);
logger.trace("**matched "+pagePath);
validEntry = true;
}
else if ( pagePath.contains(PAGE_PROFILE_ACCESSES)) {
var.setKey(HarvestedDataKey.PROFILE_ACCESSES);
logger.trace("**matched "+pagePath);
validEntry = true;
}
else if ( pagePath.contains(PAGE_NOTIFICATION_ACCESSES)) {
var.setKey(HarvestedDataKey.NOTIFICATIONS_ACCESSES);
logger.trace("**matched "+pagePath);
validEntry = true;
}
}
if (validEntry) {
var.setDashboardContext(dashboardContext);
var.setPagePath(dimension);
var.setVisitNumber(Integer.parseInt(metric));
toReturn.add(var);
}
}
}
return toReturn;
}
private static List<ServiceEndpoint> getAnalyticsReportingConfigurationFromIS(String infrastructureScope)
throws Exception {
String scope = infrastructureScope;
String currScope = ScopeProvider.instance.get();
ScopeProvider.instance.set(scope);
SimpleQuery query = queryFor(ServiceEndpoint.class);
query.addCondition("$resource/Profile/Category/text() eq '" + SERVICE_ENDPOINT_CATEGORY + "'");
query.addCondition("$resource/Profile/Name/text() eq '" + SERVICE_ENDPOINT_NAME + "'");
DiscoveryClient<ServiceEndpoint> client = clientFor(ServiceEndpoint.class);
List<ServiceEndpoint> toReturn = client.submit(query);
ScopeProvider.instance.set(currScope);
return toReturn;
}
/**
* This method look up in the IS the Gateway which corresponds to a given Google Analytics viewId
* @param viewID
* @return the gateway name, e.g. "Blue-Cloud Gateway" or null if no correspondance was found
* @throws Exception
* @throws ObjectNotFound
*/
private static String getAccountingDashboardContextGivenGAViewID(String viewID) throws ObjectNotFound, Exception {
String toReturn = null;
String context = org.gcube.dataharvest.utils.Utils.getCurrentContext();
String currScope = ScopeProvider.instance.get();
ScopeProvider.instance.set(context);
SimpleQuery query = queryFor(GenericResource.class);
query.addCondition("$resource/Profile/SecondaryType/text() eq '" + MAPPING_RESOURCE_CATEGORY + "'");
query.addCondition("$resource/Profile/Body/Property/viewID/text() eq '" + viewID + "'");
DiscoveryClient<GenericResource> client = clientFor(GenericResource.class);
List<GenericResource> list = client.submit(query);
if(list.size() > 1) {
logger.error("Too many Generic Resources having GA viewID " + viewID
+ " in this scope having SecondaryType " + MAPPING_RESOURCE_CATEGORY);
} else if(list.size() == 0) {
logger.warn("There is no Generic Resources having GA viewID " + viewID + " and SecondaryType "
+ MAPPING_RESOURCE_CATEGORY + " in this context: " + context);
} else {
GenericResource found = list.get(0);
String elem = new StringBuilder("<body>").append(found.profile().bodyAsString()).append("</body>").toString();
DocumentBuilder docBuilder = DocumentBuilderFactory.newInstance().newDocumentBuilder();
Node node = docBuilder.parse(new InputSource(new StringReader(elem))).getDocumentElement();
XPathHelper helper = new XPathHelper(node);
List<String> currValue = helper.evaluate("//Property/viewID/text()");
if (currValue != null && currValue.size() > 0) {
List<String> contexts = currValue;
for (int i = 0; i < contexts.size(); i++) {
if (currValue.get(i).trim().compareTo(viewID) == 0) {
toReturn = helper.evaluate("//Property/DashboardContext/text()").get(i);
break;
}
}
}
logger.debug("Found DashboardContext for viewId {} : {} ", viewID, toReturn);
}
ScopeProvider.instance.set(currScope);
return toReturn;
}
/**
* l
* @throws Exception
*/
private static AnalyticsReportCredentials getAuthorisedApplicationInfoFromIs() throws Exception {
AnalyticsReportCredentials reportCredentials = new AnalyticsReportCredentials();
String context = org.gcube.dataharvest.utils.Utils.getCurrentContext();
try {
List<ServiceEndpoint> list = getAnalyticsReportingConfigurationFromIS(context);
if(list.size() > 1) {
logger.error("Too many Service Endpoints having name " + SERVICE_ENDPOINT_NAME
+ " in this scope having Category " + SERVICE_ENDPOINT_CATEGORY);
} else if(list.size() == 0) {
logger.warn("There is no Service Endpoint having name " + SERVICE_ENDPOINT_NAME + " and Category "
+ SERVICE_ENDPOINT_CATEGORY + " in this context: " + context);
} else {
for(ServiceEndpoint res : list) {
Group<AccessPoint> apGroup = res.profile().accessPoints();
AccessPoint[] accessPoints = (AccessPoint[]) apGroup.toArray(new AccessPoint[apGroup.size()]);
AccessPoint found = accessPoints[0];
reportCredentials.setClientEmail(found.username());
String decryptedPrivateKey = StringEncrypter.getEncrypter().decrypt(found.password());
reportCredentials.setPrivateKeyPem(decryptedPrivateKey.trim());
for(Property prop : found.properties()) {
if(prop.name().compareTo(AP_VIEWS_PROPERTY) == 0) {
String decryptedValue = StringEncrypter.getEncrypter().decrypt(prop.value());
String[] views = decryptedValue.split(";");
reportCredentials.setViewIds(Arrays.asList(views));
}
if(prop.name().compareTo(AP_CLIENT_ID) == 0) {
String decryptedValue = StringEncrypter.getEncrypter().decrypt(prop.value());
reportCredentials.setClientId(decryptedValue);
}
if(prop.name().compareTo(AP_PRIVATEKEY_ID_PROPERTY) == 0) {
String decryptedValue = StringEncrypter.getEncrypter().decrypt(prop.value());
reportCredentials.setPrivateKeyId(decryptedValue);
}
}
}
}
} catch(Exception e) {
e.printStackTrace();
return null;
}
return reportCredentials;
}
private static LocalDate asLocalDate(Date date) {
return Instant.ofEpochMilli(date.getTime()).atZone(ZoneId.systemDefault()).toLocalDate();
}
private static Builder getDateRangeBuilderForAnalytics(Date start, Date end) {
DateTimeFormatter formatter = DateTimeFormatter.ofPattern("yyyy-MM-dd"); //required by Analytics
String startDate = asLocalDate(start).format(formatter);
String endDate = asLocalDate(end).format(formatter);
Builder dateRangeBuilder = DateRange.newBuilder().setStartDate(startDate).setEndDate(endDate);
return dateRangeBuilder;
}
}

View File

@ -0,0 +1,312 @@
package org.gcube.dataharvest.harvester;
import static org.gcube.resources.discovery.icclient.ICFactory.clientFor;
import static org.gcube.resources.discovery.icclient.ICFactory.queryFor;
import java.io.IOException;
import java.security.GeneralSecurityException;
import java.time.Instant;
import java.time.LocalDate;
import java.time.ZoneId;
import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import org.gcube.accounting.accounting.summary.access.model.ScopeDescriptor;
import org.gcube.accounting.accounting.summary.access.model.update.AccountingRecord;
import org.gcube.common.encryption.encrypter.StringEncrypter;
import org.gcube.common.resources.gcore.ServiceEndpoint;
import org.gcube.common.resources.gcore.ServiceEndpoint.AccessPoint;
import org.gcube.common.resources.gcore.ServiceEndpoint.Property;
import org.gcube.common.resources.gcore.utils.Group;
import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.common.scope.impl.ScopeBean;
import org.gcube.dataharvest.AccountingDashboardHarvesterPlugin;
import org.gcube.dataharvest.datamodel.AnalyticsReportCredentials;
import org.gcube.dataharvest.datamodel.HarvestedDataKey;
import org.gcube.dataharvest.datamodel.VREAccessesReportRow;
import org.gcube.resources.discovery.client.api.DiscoveryClient;
import org.gcube.resources.discovery.client.queries.api.SimpleQuery;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.analytics.data.v1beta.BetaAnalyticsDataClient;
import com.google.analytics.data.v1beta.BetaAnalyticsDataSettings;
import com.google.analytics.data.v1beta.DateRange;
import com.google.analytics.data.v1beta.DateRange.Builder;
import com.google.analytics.data.v1beta.Dimension;
import com.google.analytics.data.v1beta.Metric;
import com.google.analytics.data.v1beta.Row;
import com.google.analytics.data.v1beta.RunReportRequest;
import com.google.analytics.data.v1beta.RunReportResponse;
import com.google.api.client.json.JsonFactory;
import com.google.api.client.json.gson.GsonFactory;
import com.google.api.gax.core.FixedCredentialsProvider;
import com.google.auth.oauth2.ServiceAccountCredentials;
/**
*
* @author Giancarlo Panichi (ISTI CNR)
*
*/
public class JupyterAccessesHarvester extends BasicHarvester {
private static Logger logger = LoggerFactory.getLogger(JupyterAccessesHarvester.class);
private static final JsonFactory JSON_FACTORY = GsonFactory.getDefaultInstance();
private static final String SERVICE_ENDPOINT_CATEGORY = "OnlineService";
private static final String SERVICE_ENDPOINT_NAME = "GA4AnalyticsDataService";
private static final String AP_VIEWS_PROPERTY = "views";
private static final String AP_CLIENT_ID = "client_id";
private static final String AP_PRIVATEKEY_ID_PROPERTY = "private_key_id";
private List<VREAccessesReportRow> vreAccesses;
public JupyterAccessesHarvester(Date start, Date end) throws Exception {
super(start, end);
logger.debug("JupyerAccessHArvester: {}, {}", start, end);
vreAccesses = getAllAccesses(start, end);
}
@Override
public List<AccountingRecord> getAccountingRecords() throws Exception {
try {
String context = org.gcube.dataharvest.utils.Utils.getCurrentContext();
ArrayList<AccountingRecord> accountingRecords = new ArrayList<AccountingRecord>();
int measure = 0;
ScopeBean scopeBean = new ScopeBean(context);
String lowerCasedContext = scopeBean.name().toLowerCase();
logger.debug("JupyerAccessHArvester lowerCasedContext: {}", lowerCasedContext);
for (VREAccessesReportRow row : vreAccesses) {
String pagePath = row.getPagePath().toLowerCase();
if (pagePath != null && !pagePath.isEmpty()) {
if (pagePath.contains(lowerCasedContext)) {
if (!pagePath.contains("catalogue")) {
if (pagePath.contains("jupyter") || pagePath.contains("jupiter")) {
logger.trace("Matched jupyter or jupiter ({}) : {}", lowerCasedContext, pagePath);
measure += row.getVisitNumber();
}
}
}
}
}
ScopeDescriptor scopeDescriptor = AccountingDashboardHarvesterPlugin.getScopeDescriptor(context);
if (measure > 0) {
AccountingRecord ar = new AccountingRecord(scopeDescriptor, instant,
getDimension(HarvestedDataKey.JUPYTER_ACCESSES), (long) measure);
logger.debug("{} : {}", ar.getDimension().getId(), ar.getMeasure());
accountingRecords.add(ar);
}
return accountingRecords;
} catch (Exception e) {
throw e;
}
}
/**
*
* @return a list of {@link VREAccessesReportRow} objects containing the
* pagePath and the visit number e.g. VREAccessesReportRow
* [pagePath=/group/agroclimaticmodelling/add-new-users,
* visitNumber=1] VREAccessesReportRow
* [pagePath=/group/agroclimaticmodelling/administration,
* visitNumber=2] VREAccessesReportRow
* [pagePath=/group/agroclimaticmodelling/agroclimaticmodelling,
* visitNumber=39]
*/
private static List<VREAccessesReportRow> getAllAccesses(Date start, Date end) throws Exception {
Builder dateRangeBuilder = getDateRangeBuilderForAnalytics(start, end);
logger.debug("Getting accesses in this time range {}", dateRangeBuilder.toString());
AnalyticsReportCredentials credentialsFromD4S = getAuthorisedApplicationInfoFromIs();
logger.debug("Getting credentials credentialsFromD4S");
BetaAnalyticsDataSettings serviceSettings = initializeAnalyticsReporting(credentialsFromD4S);
logger.debug("initializeAnalyticsReporting service settings");
HashMap<String,List<RunReportResponse>> responses = getReportResponses(serviceSettings, credentialsFromD4S.getViewIds(), dateRangeBuilder);
List<VREAccessesReportRow> totalAccesses = new ArrayList<>();
for(String view : responses.keySet()) {
List<VREAccessesReportRow> viewReport = parseResponse(view, responses.get(view));
logger.debug("Got {} entries from view id={}", viewReport.size(), view);
totalAccesses.addAll(viewReport);
}
logger.debug("Merged in {} total entries from all views", totalAccesses.size());
return totalAccesses;
}
/**
* Initializes an Google Analytics Data API service object.
*
* @return An authorized Google Analytics Data API
* @throws IOException
* @throws GeneralSecurityException
*/
private static BetaAnalyticsDataSettings initializeAnalyticsReporting(AnalyticsReportCredentials cred) throws IOException {
return BetaAnalyticsDataSettings.newBuilder()
.setCredentialsProvider(FixedCredentialsProvider.create(
ServiceAccountCredentials.fromPkcs8(cred.getClientId(), cred.getClientEmail(), cred.getPrivateKeyPem(), cred.getPrivateKeyId(), null)))
.build();
}
/**
* Queries Analytics Data API service
*
* @param service Analytics Data API service service settings.
* @return Row Analytics Data API service
* @throws IOException
*/
private static HashMap<String,List<RunReportResponse>> getReportResponses(BetaAnalyticsDataSettings betaAnalyticsDataSettings,
List<String> viewIDs, Builder dateRangeBuilder) throws IOException {
HashMap<String,List<RunReportResponse>> reports = new HashMap<>();
try (BetaAnalyticsDataClient analyticsData = BetaAnalyticsDataClient.create(betaAnalyticsDataSettings)) {
for(String propertyId : viewIDs) {
List<RunReportResponse> gReportResponses = new ArrayList<>();
logger.debug("Getting data from Analytics Data API for propertyId: " + propertyId);
RunReportRequest request =
RunReportRequest.newBuilder()
.setProperty("properties/" + propertyId)
.addDimensions(Dimension.newBuilder().setName("pagePath"))
.addMetrics(Metric.newBuilder().setName("screenPageViews"))
.addDateRanges(dateRangeBuilder)
.build();
// Make the request.
RunReportResponse response = analyticsData.runReport(request);
gReportResponses.add(response);
reports.put(propertyId, gReportResponses);
}
}
return reports;
}
/**
* Parses and prints the Analytics Data API service respose
*
* @param response An Analytics Data API service response.
*/
private static List<VREAccessesReportRow> parseResponse(String viewId, List<RunReportResponse> responses) {
logger.debug("parsing Response for propertyID=" + viewId);
List<VREAccessesReportRow> toReturn = new ArrayList<>();
for (RunReportResponse response : responses) {
for (Row row: response.getRowsList()) {
String dimension = row.getDimensionValues(0).getValue();
String metric = row.getMetricValues(0).getValue();
VREAccessesReportRow var = new VREAccessesReportRow();
boolean validEntry = false;
String pagePath = dimension;
if (pagePath.startsWith("/group") || pagePath.startsWith("/web")) {
var.setPagePath(dimension);
validEntry = true;
}
if (validEntry) {
var.setVisitNumber(Integer.parseInt(metric));
toReturn.add(var);
}
}
}
return toReturn;
}
private static List<ServiceEndpoint> getAnalyticsReportingConfigurationFromIS(String infrastructureScope)
throws Exception {
String scope = infrastructureScope;
String currScope = ScopeProvider.instance.get();
ScopeProvider.instance.set(scope);
SimpleQuery query = queryFor(ServiceEndpoint.class);
query.addCondition("$resource/Profile/Category/text() eq '" + SERVICE_ENDPOINT_CATEGORY + "'");
query.addCondition("$resource/Profile/Name/text() eq '" + SERVICE_ENDPOINT_NAME + "'");
DiscoveryClient<ServiceEndpoint> client = clientFor(ServiceEndpoint.class);
List<ServiceEndpoint> toReturn = client.submit(query);
ScopeProvider.instance.set(currScope);
return toReturn;
}
/**
* l
* @throws Exception
*/
private static AnalyticsReportCredentials getAuthorisedApplicationInfoFromIs() throws Exception {
AnalyticsReportCredentials reportCredentials = new AnalyticsReportCredentials();
String context = org.gcube.dataharvest.utils.Utils.getCurrentContext();
try {
List<ServiceEndpoint> list = getAnalyticsReportingConfigurationFromIS(context);
if(list.size() > 1) {
logger.error("Too many Service Endpoints having name " + SERVICE_ENDPOINT_NAME
+ " in this scope having Category " + SERVICE_ENDPOINT_CATEGORY);
} else if(list.size() == 0) {
logger.warn("There is no Service Endpoint having name " + SERVICE_ENDPOINT_NAME + " and Category "
+ SERVICE_ENDPOINT_CATEGORY + " in this context: " + context);
} else {
for(ServiceEndpoint res : list) {
Group<AccessPoint> apGroup = res.profile().accessPoints();
AccessPoint[] accessPoints = (AccessPoint[]) apGroup.toArray(new AccessPoint[apGroup.size()]);
AccessPoint found = accessPoints[0];
reportCredentials.setClientEmail(found.username());
String decryptedPrivateKey = StringEncrypter.getEncrypter().decrypt(found.password());
reportCredentials.setPrivateKeyPem(decryptedPrivateKey.trim());
for(Property prop : found.properties()) {
if(prop.name().compareTo(AP_VIEWS_PROPERTY) == 0) {
String decryptedValue = StringEncrypter.getEncrypter().decrypt(prop.value());
String[] views = decryptedValue.split(";");
reportCredentials.setViewIds(Arrays.asList(views));
}
if(prop.name().compareTo(AP_CLIENT_ID) == 0) {
String decryptedValue = StringEncrypter.getEncrypter().decrypt(prop.value());
reportCredentials.setClientId(decryptedValue);
}
if(prop.name().compareTo(AP_PRIVATEKEY_ID_PROPERTY) == 0) {
String decryptedValue = StringEncrypter.getEncrypter().decrypt(prop.value());
reportCredentials.setPrivateKeyId(decryptedValue);
}
}
}
}
} catch(Exception e) {
e.printStackTrace();
return null;
}
return reportCredentials;
}
private static LocalDate asLocalDate(Date date) {
return Instant.ofEpochMilli(date.getTime()).atZone(ZoneId.systemDefault()).toLocalDate();
}
private static Builder getDateRangeBuilderForAnalytics(Date start, Date end) {
DateTimeFormatter formatter = DateTimeFormatter.ofPattern("yyyy-MM-dd"); //required by Analytics
String startDate = asLocalDate(start).format(formatter);
String endDate = asLocalDate(end).format(formatter);
Builder dateRangeBuilder = DateRange.newBuilder().setStartDate(startDate).setEndDate(endDate);
return dateRangeBuilder;
}
}

View File

@ -19,7 +19,7 @@ import org.gcube.accounting.datamodel.AggregatedUsageRecord;
import org.gcube.accounting.datamodel.aggregation.AggregatedJobUsageRecord;
import org.gcube.accounting.datamodel.aggregation.AggregatedServiceUsageRecord;
import org.gcube.accounting.datamodel.usagerecords.ServiceUsageRecord;
import org.gcube.dataharvest.AccountingDataHarvesterPlugin;
import org.gcube.dataharvest.AccountingDashboardHarvesterPlugin;
import org.gcube.dataharvest.datamodel.HarvestedDataKey;
import org.gcube.dataharvest.utils.DateUtils;
import org.gcube.dataharvest.utils.Utils;
@ -28,7 +28,6 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* @author Eric Perrone (ISTI - CNR)
* @author Luca Frosini (ISTI - CNR)
*/
public class MethodInvocationHarvester extends BasicHarvester {
@ -72,7 +71,7 @@ public class MethodInvocationHarvester extends BasicHarvester {
AggregatedServiceUsageRecord.class, temporalConstraint, filters, contexts, true);
}
ScopeDescriptor scopeDescriptor = AccountingDataHarvesterPlugin.getScopeDescriptor();
ScopeDescriptor scopeDescriptor = AccountingDashboardHarvesterPlugin.getScopeDescriptor(context);
Dimension dimension = getDimension(HarvestedDataKey.METHOD_INVOCATIONS);
if(result != null) {

View File

@ -0,0 +1,313 @@
package org.gcube.dataharvest.harvester;
import static org.gcube.resources.discovery.icclient.ICFactory.clientFor;
import static org.gcube.resources.discovery.icclient.ICFactory.queryFor;
import java.io.IOException;
import java.security.GeneralSecurityException;
import java.time.Instant;
import java.time.LocalDate;
import java.time.ZoneId;
import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import org.gcube.accounting.accounting.summary.access.model.ScopeDescriptor;
import org.gcube.accounting.accounting.summary.access.model.update.AccountingRecord;
import org.gcube.common.encryption.encrypter.StringEncrypter;
import org.gcube.common.resources.gcore.ServiceEndpoint;
import org.gcube.common.resources.gcore.ServiceEndpoint.AccessPoint;
import org.gcube.common.resources.gcore.ServiceEndpoint.Property;
import org.gcube.common.resources.gcore.utils.Group;
import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.common.scope.impl.ScopeBean;
import org.gcube.dataharvest.AccountingDashboardHarvesterPlugin;
import org.gcube.dataharvest.datamodel.AnalyticsReportCredentials;
import org.gcube.dataharvest.datamodel.HarvestedDataKey;
import org.gcube.dataharvest.datamodel.VREAccessesReportRow;
import org.gcube.resources.discovery.client.api.DiscoveryClient;
import org.gcube.resources.discovery.client.queries.api.SimpleQuery;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.analytics.data.v1beta.BetaAnalyticsDataClient;
import com.google.analytics.data.v1beta.BetaAnalyticsDataSettings;
import com.google.analytics.data.v1beta.DateRange;
import com.google.analytics.data.v1beta.DateRange.Builder;
import com.google.analytics.data.v1beta.Dimension;
import com.google.analytics.data.v1beta.Metric;
import com.google.analytics.data.v1beta.Row;
import com.google.analytics.data.v1beta.RunReportRequest;
import com.google.analytics.data.v1beta.RunReportResponse;
import com.google.api.client.json.JsonFactory;
import com.google.api.client.json.gson.GsonFactory;
import com.google.api.gax.core.FixedCredentialsProvider;
import com.google.auth.oauth2.ServiceAccountCredentials;
/**
*
* @author Giancarlo Panichi (ISTI CNR)
*
*/
public class RStudioAccessesHarvester extends BasicHarvester {
private static Logger logger = LoggerFactory.getLogger(RStudioAccessesHarvester.class);
private static final JsonFactory JSON_FACTORY = GsonFactory.getDefaultInstance();
private static final String SERVICE_ENDPOINT_CATEGORY = "OnlineService";
private static final String SERVICE_ENDPOINT_NAME = "GA4AnalyticsDataService";
private static final String AP_VIEWS_PROPERTY = "views";
private static final String AP_CLIENT_ID = "client_id";
private static final String AP_PRIVATEKEY_ID_PROPERTY = "private_key_id";
private List<VREAccessesReportRow> vreAccesses;
public RStudioAccessesHarvester(Date start, Date end) throws Exception {
super(start, end);
logger.debug("RStudioAccessHArvester: {}, {}", start, end);
vreAccesses = getAllAccesses(start, end);
}
@Override
public List<AccountingRecord> getAccountingRecords() throws Exception {
try {
String context = org.gcube.dataharvest.utils.Utils.getCurrentContext();
ArrayList<AccountingRecord> accountingRecords = new ArrayList<AccountingRecord>();
int measure = 0;
ScopeBean scopeBean = new ScopeBean(context);
String lowerCasedContext = scopeBean.name().toLowerCase();
logger.debug("RStudioAccessHArvester lowerCasedContext: {}", lowerCasedContext);
for (VREAccessesReportRow row : vreAccesses) {
String pagePath = row.getPagePath().toLowerCase();
if (pagePath != null && !pagePath.isEmpty()) {
if (pagePath.contains(lowerCasedContext)) {
if (!pagePath.contains("catalogue")) {
if (pagePath.contains("rstudio") || pagePath.contains("r-studio")) {
logger.trace("Matched rstudio or rstudio ({}) : {}", lowerCasedContext, pagePath);
measure += row.getVisitNumber();
}
}
}
}
}
ScopeDescriptor scopeDescriptor = AccountingDashboardHarvesterPlugin.getScopeDescriptor(context);
if (measure > 0) {
AccountingRecord ar = new AccountingRecord(scopeDescriptor, instant,
getDimension(HarvestedDataKey.RSTUDIO_ACCESSES), (long) measure);
logger.debug("{} : {}", ar.getDimension().getId(), ar.getMeasure());
accountingRecords.add(ar);
}
return accountingRecords;
} catch (Exception e) {
throw e;
}
}
/**
*
* @return a list of {@link VREAccessesReportRow} objects containing the
* pagePath and the visit number e.g. VREAccessesReportRow
* [pagePath=/group/agroclimaticmodelling/add-new-users,
* visitNumber=1] VREAccessesReportRow
* [pagePath=/group/agroclimaticmodelling/administration,
* visitNumber=2] VREAccessesReportRow
* [pagePath=/group/agroclimaticmodelling/agroclimaticmodelling,
* visitNumber=39]
*/
private static List<VREAccessesReportRow> getAllAccesses(Date start, Date end) throws Exception {
Builder dateRangeBuilder = getDateRangeBuilderForAnalytics(start, end);
logger.debug("Getting accesses in this time range {}", dateRangeBuilder.toString());
AnalyticsReportCredentials credentialsFromD4S = getAuthorisedApplicationInfoFromIs();
logger.debug("Getting credentials credentialsFromD4S");
BetaAnalyticsDataSettings serviceSettings = initializeAnalyticsReporting(credentialsFromD4S);
logger.debug("initializeAnalyticsReporting service settings");
HashMap<String,List<RunReportResponse>> responses = getReportResponses(serviceSettings, credentialsFromD4S.getViewIds(), dateRangeBuilder);
List<VREAccessesReportRow> totalAccesses = new ArrayList<>();
for(String view : responses.keySet()) {
List<VREAccessesReportRow> viewReport = parseResponse(view, responses.get(view));
logger.debug("Got {} entries from view id={}", viewReport.size(), view);
totalAccesses.addAll(viewReport);
}
logger.debug("Merged in {} total entries from all views", totalAccesses.size());
return totalAccesses;
}
/**
* Initializes an Google Analytics Data API service object.
*
* @return An authorized Google Analytics Data API
* @throws IOException
* @throws GeneralSecurityException
*/
private static BetaAnalyticsDataSettings initializeAnalyticsReporting(AnalyticsReportCredentials cred) throws IOException {
return BetaAnalyticsDataSettings.newBuilder()
.setCredentialsProvider(FixedCredentialsProvider.create(
ServiceAccountCredentials.fromPkcs8(cred.getClientId(), cred.getClientEmail(), cred.getPrivateKeyPem(), cred.getPrivateKeyId(), null)))
.build();
}
/**
* Queries Analytics Data API service
*
* @param service Analytics Data API service service settings.
* @return Row Analytics Data API service
* @throws IOException
*/
private static HashMap<String,List<RunReportResponse>> getReportResponses(BetaAnalyticsDataSettings betaAnalyticsDataSettings,
List<String> viewIDs, Builder dateRangeBuilder) throws IOException {
HashMap<String,List<RunReportResponse>> reports = new HashMap<>();
try (BetaAnalyticsDataClient analyticsData = BetaAnalyticsDataClient.create(betaAnalyticsDataSettings)) {
for(String propertyId : viewIDs) {
List<RunReportResponse> gReportResponses = new ArrayList<>();
logger.debug("Getting data from Analytics Data API for propertyId: " + propertyId);
RunReportRequest request =
RunReportRequest.newBuilder()
.setProperty("properties/" + propertyId)
.addDimensions(Dimension.newBuilder().setName("pagePath"))
.addMetrics(Metric.newBuilder().setName("screenPageViews"))
.addDateRanges(dateRangeBuilder)
.build();
// Make the request.
RunReportResponse response = analyticsData.runReport(request);
gReportResponses.add(response);
reports.put(propertyId, gReportResponses);
}
}
return reports;
}
/**
* Parses and prints the Analytics Data API service respose
*
* @param response An Analytics Data API service response.
*/
private static List<VREAccessesReportRow> parseResponse(String viewId, List<RunReportResponse> responses) {
logger.debug("parsing Response for propertyID=" + viewId);
List<VREAccessesReportRow> toReturn = new ArrayList<>();
for (RunReportResponse response : responses) {
for (Row row: response.getRowsList()) {
String dimension = row.getDimensionValues(0).getValue();
String metric = row.getMetricValues(0).getValue();
VREAccessesReportRow var = new VREAccessesReportRow();
boolean validEntry = false;
String pagePath = dimension;
if (pagePath.startsWith("/group") || pagePath.startsWith("/web")) {
var.setPagePath(dimension);
validEntry = true;
}
if (validEntry) {
var.setVisitNumber(Integer.parseInt(metric));
toReturn.add(var);
}
}
}
return toReturn;
}
private static List<ServiceEndpoint> getAnalyticsReportingConfigurationFromIS(String infrastructureScope)
throws Exception {
String scope = infrastructureScope;
String currScope = ScopeProvider.instance.get();
ScopeProvider.instance.set(scope);
SimpleQuery query = queryFor(ServiceEndpoint.class);
query.addCondition("$resource/Profile/Category/text() eq '" + SERVICE_ENDPOINT_CATEGORY + "'");
query.addCondition("$resource/Profile/Name/text() eq '" + SERVICE_ENDPOINT_NAME + "'");
DiscoveryClient<ServiceEndpoint> client = clientFor(ServiceEndpoint.class);
List<ServiceEndpoint> toReturn = client.submit(query);
ScopeProvider.instance.set(currScope);
return toReturn;
}
/**
* l
* @throws Exception
*/
private static AnalyticsReportCredentials getAuthorisedApplicationInfoFromIs() throws Exception {
AnalyticsReportCredentials reportCredentials = new AnalyticsReportCredentials();
String context = org.gcube.dataharvest.utils.Utils.getCurrentContext();
try {
List<ServiceEndpoint> list = getAnalyticsReportingConfigurationFromIS(context);
if(list.size() > 1) {
logger.error("Too many Service Endpoints having name " + SERVICE_ENDPOINT_NAME
+ " in this scope having Category " + SERVICE_ENDPOINT_CATEGORY);
} else if(list.size() == 0) {
logger.warn("There is no Service Endpoint having name " + SERVICE_ENDPOINT_NAME + " and Category "
+ SERVICE_ENDPOINT_CATEGORY + " in this context: " + context);
} else {
for(ServiceEndpoint res : list) {
Group<AccessPoint> apGroup = res.profile().accessPoints();
AccessPoint[] accessPoints = (AccessPoint[]) apGroup.toArray(new AccessPoint[apGroup.size()]);
AccessPoint found = accessPoints[0];
reportCredentials.setClientEmail(found.username());
String decryptedPrivateKey = StringEncrypter.getEncrypter().decrypt(found.password());
reportCredentials.setPrivateKeyPem(decryptedPrivateKey.trim());
for(Property prop : found.properties()) {
if(prop.name().compareTo(AP_VIEWS_PROPERTY) == 0) {
String decryptedValue = StringEncrypter.getEncrypter().decrypt(prop.value());
String[] views = decryptedValue.split(";");
reportCredentials.setViewIds(Arrays.asList(views));
}
if(prop.name().compareTo(AP_CLIENT_ID) == 0) {
String decryptedValue = StringEncrypter.getEncrypter().decrypt(prop.value());
reportCredentials.setClientId(decryptedValue);
}
if(prop.name().compareTo(AP_PRIVATEKEY_ID_PROPERTY) == 0) {
String decryptedValue = StringEncrypter.getEncrypter().decrypt(prop.value());
reportCredentials.setPrivateKeyId(decryptedValue);
}
}
}
}
} catch(Exception e) {
e.printStackTrace();
return null;
}
return reportCredentials;
}
private static LocalDate asLocalDate(Date date) {
return Instant.ofEpochMilli(date.getTime()).atZone(ZoneId.systemDefault()).toLocalDate();
}
private static Builder getDateRangeBuilderForAnalytics(Date start, Date end) {
DateTimeFormatter formatter = DateTimeFormatter.ofPattern("yyyy-MM-dd"); //required by Analytics
String startDate = asLocalDate(start).format(formatter);
String endDate = asLocalDate(end).format(formatter);
Builder dateRangeBuilder = DateRange.newBuilder().setStartDate(startDate).setEndDate(endDate);
return dateRangeBuilder;
}
}

View File

@ -1,17 +1,16 @@
package org.gcube.dataharvest.harvester;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import org.gcube.accounting.accounting.summary.access.model.ScopeDescriptor;
import org.gcube.accounting.accounting.summary.access.model.update.AccountingRecord;
import org.gcube.dataharvest.AccountingDataHarvesterPlugin;
import org.gcube.dataharvest.AccountingDashboardHarvesterPlugin;
import org.gcube.dataharvest.datamodel.HarvestedDataKey;
import org.gcube.dataharvest.utils.Utils;
import org.json.JSONArray;
import org.json.JSONObject;
import org.gcube.portal.databook.shared.Feed;
import org.gcube.social_networking.social_networking_client_library.PostClient;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -19,7 +18,7 @@ import org.slf4j.LoggerFactory;
* @author Eric Perrone (ISTI - CNR)
* @author Luca Frosini (ISTI - CNR)
*/
public class SocialInteractionsHarvester extends SocialNetworkingHarvester {
public class SocialInteractionsHarvester extends BasicHarvester {
private static Logger logger = LoggerFactory.getLogger(SocialInteractionsHarvester.class);
@ -27,7 +26,7 @@ public class SocialInteractionsHarvester extends SocialNetworkingHarvester {
private int replies;
private int posts;
public static final String PATH = "/2/posts/get-posts-vre?gcube-token=";
// public static final String PATH = "/2/posts/get-posts-vre?gcube-token=";
public SocialInteractionsHarvester(Date start, Date end) throws Exception {
super(start, end);
@ -44,7 +43,7 @@ public class SocialInteractionsHarvester extends SocialNetworkingHarvester {
getJson();
ScopeDescriptor scopeDescriptor = AccountingDataHarvesterPlugin.getScopeDescriptor();
ScopeDescriptor scopeDescriptor = AccountingDashboardHarvesterPlugin.getScopeDescriptor(context);
AccountingRecord likesAR = new AccountingRecord(scopeDescriptor, instant, getDimension(HarvestedDataKey.SOCIAL_LIKES), (long) likes);
logger.debug("{} : {}", likesAR.getDimension().getId(), likesAR.getMeasure());
@ -66,30 +65,20 @@ public class SocialInteractionsHarvester extends SocialNetworkingHarvester {
}
private void getJson() throws Exception {
JSONObject jsonObject = getJSONObject(PATH);
Boolean success = (Boolean) jsonObject.get("success");
if(success == false) {
throw new IOException("Erro while getting posts");
}
JSONArray res = jsonObject.getJSONArray("result");
int len = res.length();
PostClient postClient = new PostClient();
List<Feed> vrePosts = postClient.getPostsVRE();
likes = replies = posts = 0;
for(int i = 0; i < len; i++) {
for(Feed feed : vrePosts) {
JSONObject item = res.getJSONObject(i);
long time = item.getLong("time");
long time = feed.getTime().getTime();
if(start.getTime() <= time && time <= end.getTime()) {
posts++;
replies += item.getInt("comments_no");
likes += item.getInt("likes_no");
replies += Integer.valueOf(feed.getCommentsNo());
likes += Integer.valueOf(feed.getLikesNo());
}
}

View File

@ -1,58 +0,0 @@
package org.gcube.dataharvest.harvester;
import java.util.Date;
import java.util.List;
import org.gcube.common.authorization.library.provider.SecurityTokenProvider;
import org.gcube.common.clients.exceptions.DiscoveryException;
import org.gcube.common.resources.gcore.GCoreEndpoint;
import org.gcube.dataharvest.utils.Utils;
import org.gcube.resources.discovery.client.queries.api.SimpleQuery;
import org.gcube.resources.discovery.icclient.ICFactory;
import org.json.JSONObject;
public abstract class SocialNetworkingHarvester extends BasicHarvester {
public SocialNetworkingHarvester(Date start, Date end) throws Exception {
super(start, end);
}
public static String CLASS_FORMAT = "$resource/Profile/ServiceClass/text() eq '%1s'";
public static String NAME_FORMAT = "$resource/Profile/ServiceName/text() eq '%1s'";
public static String STATUS_FORMAT = "$resource/Profile/DeploymentData/Status/text() eq 'ready'";
public static String CONTAINS_FORMAT = "$entry/@EntryName eq '%1s'";
public static String SERVICE_CLASS = "Portal";
public static String SERVICE_NAME = "SocialNetworking";
public static String ENTRY_NAME = "jersey-servlet";
protected SimpleQuery getGCoreEndpointQuery() {
return ICFactory.queryFor(GCoreEndpoint.class)
.addCondition(String.format(CLASS_FORMAT, SERVICE_CLASS))
.addCondition(String.format(NAME_FORMAT, SERVICE_NAME))
.addCondition(String.format(STATUS_FORMAT))
.addVariable("$entry", "$resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint")
.addCondition(String.format(CONTAINS_FORMAT, ENTRY_NAME))
.setResult("$entry/text()");
}
protected String getAddress() {
SimpleQuery gCoreEndpointQuery = getGCoreEndpointQuery();
List<String> addresses = ICFactory.client().submit(gCoreEndpointQuery);
if(addresses.size()==0) {
throw new DiscoveryException("No running Social Networking Service");
}
return addresses.get(0);
}
protected JSONObject getJSONObject(String path) throws Exception {
String token = SecurityTokenProvider.instance.get();
String baseAddress = getAddress();
StringBuffer sb = new StringBuffer(baseAddress);
sb.append(path);
sb.append(token);
return new JSONObject(Utils.getJson(sb.toString()));
}
}

View File

@ -4,22 +4,13 @@ import static org.gcube.resources.discovery.icclient.ICFactory.clientFor;
import static org.gcube.resources.discovery.icclient.ICFactory.queryFor;
import java.io.IOException;
import java.io.Reader;
import java.io.StringReader;
import java.security.GeneralSecurityException;
import java.security.KeyFactory;
import java.security.NoSuchAlgorithmException;
import java.security.PrivateKey;
import java.security.spec.InvalidKeySpecException;
import java.security.spec.PKCS8EncodedKeySpec;
import java.time.Instant;
import java.time.LocalDate;
import java.time.ZoneId;
import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
@ -33,7 +24,7 @@ import org.gcube.common.resources.gcore.ServiceEndpoint.Property;
import org.gcube.common.resources.gcore.utils.Group;
import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.common.scope.impl.ScopeBean;
import org.gcube.dataharvest.AccountingDataHarvesterPlugin;
import org.gcube.dataharvest.AccountingDashboardHarvesterPlugin;
import org.gcube.dataharvest.datamodel.AnalyticsReportCredentials;
import org.gcube.dataharvest.datamodel.HarvestedDataKey;
import org.gcube.dataharvest.datamodel.VREAccessesReportRow;
@ -42,56 +33,48 @@ import org.gcube.resources.discovery.client.queries.api.SimpleQuery;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.api.client.googleapis.auth.oauth2.GoogleCredential;
import com.google.api.client.googleapis.auth.oauth2.GoogleCredential.Builder;
import com.google.api.client.googleapis.javanet.GoogleNetHttpTransport;
import com.google.api.client.googleapis.util.Utils;
import com.google.api.client.http.HttpTransport;
import com.google.api.client.json.JsonFactory;
import com.google.api.client.json.gson.GsonFactory;
import com.google.api.client.util.PemReader;
import com.google.api.client.util.PemReader.Section;
import com.google.api.client.util.SecurityUtils;
import com.google.api.services.analyticsreporting.v4.AnalyticsReporting;
import com.google.api.services.analyticsreporting.v4.AnalyticsReportingScopes;
import com.google.api.services.analyticsreporting.v4.model.DateRange;
import com.google.api.services.analyticsreporting.v4.model.DateRangeValues;
import com.google.api.services.analyticsreporting.v4.model.GetReportsRequest;
import com.google.api.services.analyticsreporting.v4.model.GetReportsResponse;
import com.google.api.services.analyticsreporting.v4.model.Metric;
import com.google.api.services.analyticsreporting.v4.model.Report;
import com.google.api.services.analyticsreporting.v4.model.ReportRequest;
import com.google.api.services.analyticsreporting.v4.model.ReportRow;
import com.google.analytics.data.v1beta.BetaAnalyticsDataClient;
import com.google.analytics.data.v1beta.BetaAnalyticsDataSettings;
import com.google.analytics.data.v1beta.DateRange;
import com.google.analytics.data.v1beta.DateRange.Builder;
import com.google.analytics.data.v1beta.Dimension;
import com.google.analytics.data.v1beta.Metric;
import com.google.analytics.data.v1beta.Row;
import com.google.analytics.data.v1beta.RunReportRequest;
import com.google.analytics.data.v1beta.RunReportResponse;
import com.google.api.gax.core.FixedCredentialsProvider;
import com.google.auth.oauth2.ServiceAccountCredentials;
public class VREAccessesHarvester extends BasicHarvester {
private static Logger logger = LoggerFactory.getLogger(VREAccessesHarvester.class);
private static final JsonFactory JSON_FACTORY = GsonFactory.getDefaultInstance();
// private static final JsonFactory JSON_FACTORY = GsonFactory.getDefaultInstance();
private static final String SERVICE_ENDPOINT_CATEGORY = "OnlineService";
private static final String SERVICE_ENDPOINT_NAME = "BigGAnalyticsReportService";
private static final String SERVICE_ENDPOINT_NAME = "GA4AnalyticsDataService";
private static final String AP_VIEWS_PROPERTY = "views";
private static final String AP_CLIENT_PROPERTY = "clientId";
private static final String AP_PRIVATEKEY_PROPERTY = "privateKeyId";
private static final String APPLICATION_NAME = "Analytics Reporting";
private static final String AP_CLIENT_ID = "client_id";
private static final String AP_PRIVATEKEY_ID_PROPERTY = "private_key_id";
private List<VREAccessesReportRow> vreAccesses;
public VREAccessesHarvester(Date start, Date end) throws Exception {
super(start, end);
vreAccesses = getAllAccesses(start, end);
}
@Override
public List<AccountingRecord> getAccountingRecords() throws Exception {
try {
String context = org.gcube.dataharvest.utils.Utils.getCurrentContext();
ArrayList<AccountingRecord> accountingRecords = new ArrayList<AccountingRecord>();
int measure = 0;
ScopeBean scopeBean = new ScopeBean(context);
String lowerCasedContext = scopeBean.name().toLowerCase();
String case1 = lowerCasedContext + "/";
@ -100,28 +83,28 @@ public class VREAccessesHarvester extends BasicHarvester {
String pagePath = row.getPagePath();
if (!pagePath.contains("_redirect=/group") && !pagePath.contains("workspace")) {
if(pagePath.endsWith(lowerCasedContext)) {
logger.trace("Matched endsWith({}) : {}", lowerCasedContext, pagePath);
logger.debug("Matched endsWith({}) : {}", lowerCasedContext, pagePath);
measure += row.getVisitNumber();
} else if(pagePath.contains(case1) || pagePath.contains(case2)) {
logger.trace("Matched contains({}) || contains({}) : {}", case1, case2, pagePath);
logger.debug("Matched contains({}) || contains({}) : {}", case1, case2, pagePath);
measure += row.getVisitNumber();
}
}
}
ScopeDescriptor scopeDescriptor = AccountingDataHarvesterPlugin.getScopeDescriptor();
ScopeDescriptor scopeDescriptor = AccountingDashboardHarvesterPlugin.getScopeDescriptor(context);
AccountingRecord ar = new AccountingRecord(scopeDescriptor, instant, getDimension(HarvestedDataKey.ACCESSES), (long) measure);
logger.debug("{} : {}", ar.getDimension().getId(), ar.getMeasure());
accountingRecords.add(ar);
return accountingRecords;
} catch(Exception e) {
throw e;
}
}
/**
*
* @return a list of {@link VREAccessesReportRow} objects containing the pagePath and the visit number e.g.
@ -130,182 +113,113 @@ public class VREAccessesHarvester extends BasicHarvester {
* VREAccessesReportRow [pagePath=/group/agroclimaticmodelling/agroclimaticmodelling, visitNumber=39]
*/
private static List<VREAccessesReportRow> getAllAccesses(Date start, Date end) throws Exception {
DateRange dateRange = getDateRangeForAnalytics(start, end);
logger.trace("Getting accesses in this time range {}", dateRange.toPrettyString());
Builder dateRangeBuilder = getDateRangeBuilderForAnalytics(start, end);
logger.debug("Getting accesses in this time range {}", dateRangeBuilder.toString());
AnalyticsReportCredentials credentialsFromD4S = getAuthorisedApplicationInfoFromIs();
AnalyticsReporting service = initializeAnalyticsReporting(credentialsFromD4S);
HashMap<String,List<GetReportsResponse>> responses = getReportResponses(service, credentialsFromD4S.getViewIds(), dateRange);
List<VREAccessesReportRow> totalAccesses = new ArrayList<>();
logger.debug("Getting credentials credentialsFromD4S");
BetaAnalyticsDataSettings serviceSettings = initializeAnalyticsReporting(credentialsFromD4S);
logger.debug("initializeAnalyticsReporting service settings");
HashMap<String,List<RunReportResponse>> responses = getReportResponses(serviceSettings, credentialsFromD4S.getViewIds(), dateRangeBuilder);
List<VREAccessesReportRow> totalAccesses = new ArrayList<>();
for(String view : responses.keySet()) {
List<VREAccessesReportRow> viewReport = parseResponse(view, responses.get(view));
logger.trace("Got {} entries from view id={}", viewReport.size(), view);
logger.debug("Got {} entries from view id={}", viewReport.size(), view);
totalAccesses.addAll(viewReport);
}
logger.trace("Merged in {} total entries from all views", totalAccesses.size());
logger.debug("Merged in {} total entries from all views", totalAccesses.size());
return totalAccesses;
}
/**
* Initializes an Analytics Reporting API V4 service object.
* Initializes an Google Analytics Data API service object.
*
* @return An authorized Analytics Reporting API V4 service object.
* @return An authorized Google Analytics Data API
* @throws IOException
* @throws GeneralSecurityException
*/
private static AnalyticsReporting initializeAnalyticsReporting(AnalyticsReportCredentials cred)
throws GeneralSecurityException, IOException {
HttpTransport httpTransport = GoogleNetHttpTransport.newTrustedTransport();
GoogleCredential credential = fromD4SServiceEndpoint(cred).createScoped(AnalyticsReportingScopes.all());
// Construct the Analytics Reporting service object.
return new AnalyticsReporting.Builder(httpTransport, JSON_FACTORY, credential)
.setApplicationName(APPLICATION_NAME).build();
private static BetaAnalyticsDataSettings initializeAnalyticsReporting(AnalyticsReportCredentials cred) throws IOException {
return BetaAnalyticsDataSettings.newBuilder()
.setCredentialsProvider(FixedCredentialsProvider.create(
ServiceAccountCredentials.fromPkcs8(cred.getClientId(), cred.getClientEmail(), cred.getPrivateKeyPem(), cred.getPrivateKeyId(), null)))
.build();
}
/**
* Queries the Analytics Reporting API V4.
* Queries Analytics Data API service
*
* @param service An authorized Analytics Reporting API V4 service object.
* @return GetReportResponse The Analytics Reporting API V4 response.
* @param service Analytics Data API service service settings.
* @return Row Analytics Data API service
* @throws IOException
*/
private static HashMap<String,List<GetReportsResponse>> getReportResponses(AnalyticsReporting service,
List<String> viewIDs, DateRange dateRange) throws IOException {
HashMap<String,List<GetReportsResponse>> reports = new HashMap<>();
// Create the Metrics object.
Metric sessions = new Metric().setExpression("ga:pageviews").setAlias("pages");
com.google.api.services.analyticsreporting.v4.model.Dimension pageTitle = new com.google.api.services.analyticsreporting.v4.model.Dimension().setName("ga:pagePath");
for(String view : viewIDs) {
List<GetReportsResponse> gReportResponses = new ArrayList<>();
logger.info("Getting data from Google Analytics for viewid: " + view);
boolean iterateMorePages = true;
String nextPageToken = null;
while (iterateMorePages) {
// Create the ReportRequest object.
ReportRequest request = new ReportRequest().setViewId(view.trim()).setDateRanges(Arrays.asList(dateRange))
.setMetrics(Arrays.asList(sessions)).setDimensions(Arrays.asList(pageTitle));
request.setPageSize(1000);
request.setPageToken(nextPageToken);
ArrayList<ReportRequest> requests = new ArrayList<ReportRequest>();
requests.add(request);
// Create the GetReportsRequest object.
GetReportsRequest getReport = new GetReportsRequest().setReportRequests(requests);
// Call the batchGet method.
GetReportsResponse response = service.reports().batchGet(getReport).execute();
nextPageToken = response.getReports().get(0).getNextPageToken();
iterateMorePages = (nextPageToken != null);
logger.debug("got nextPageToken: "+nextPageToken);
private static HashMap<String,List<RunReportResponse>> getReportResponses(BetaAnalyticsDataSettings betaAnalyticsDataSettings,
List<String> viewIDs, Builder dateRangeBuilder) throws IOException {
HashMap<String,List<RunReportResponse>> reports = new HashMap<>();
try (BetaAnalyticsDataClient analyticsData = BetaAnalyticsDataClient.create(betaAnalyticsDataSettings)) {
for(String propertyId : viewIDs) {
List<RunReportResponse> gReportResponses = new ArrayList<>();
logger.debug("Getting data from Analytics Data API for propertyId: " + propertyId);
RunReportRequest request =
RunReportRequest.newBuilder()
.setProperty("properties/" + propertyId)
.addDimensions(Dimension.newBuilder().setName("pagePath"))
.addMetrics(Metric.newBuilder().setName("screenPageViews"))
.addDateRanges(dateRangeBuilder)
.build();
// Make the request.
RunReportResponse response = analyticsData.runReport(request);
gReportResponses.add(response);
// Iterate through every row of the API response.
// for (Row row : response.getRowsList()) {
// System.out.printf(
// "%s, %s%n", row.getDimensionValues(0).getValue(), row.getMetricValues(0).getValue());
// }
reports.put(propertyId, gReportResponses);
}
reports.put(view, gReportResponses);
}
// Return the response.
return reports;
}
/**
* Parses and prints the Analytics Reporting API V4 response.
* Parses and prints the Analytics Data API service respose
*
* @param response An Analytics Reporting API V4 response.
* @param response An Analytics Data API service response.
*/
/**
* Parses and prints the Analytics Reporting API V4 response.
*
* @param response An Analytics Reporting API V4 response.
*/
private static List<VREAccessesReportRow> parseResponse(String viewId, List<GetReportsResponse> responses) {
logger.debug("parsing Response for " + viewId);
private static List<VREAccessesReportRow> parseResponse(String viewId, List<RunReportResponse> responses) {
logger.debug("parsing Response for propertyID=" + viewId);
List<VREAccessesReportRow> toReturn = new ArrayList<>();
for (GetReportsResponse response : responses) {
for (Report report: response.getReports()) {
List<ReportRow> rows = report.getData().getRows();
if (rows == null) {
logger.warn("No data found for " + viewId);
for (RunReportResponse response : responses) {
for (Row row: response.getRowsList()) {
String dimension = row.getDimensionValues(0).getValue();
String metric = row.getMetricValues(0).getValue();
VREAccessesReportRow var = new VREAccessesReportRow();
boolean validEntry = false;
String pagePath = dimension;
if (pagePath.startsWith("/group") || pagePath.startsWith("/web")) {
var.setPagePath(dimension);
validEntry = true;
}
else {
for (ReportRow row: rows) {
String dimension = row.getDimensions().get(0);
DateRangeValues metric = row.getMetrics().get(0);
VREAccessesReportRow var = new VREAccessesReportRow();
boolean validEntry = false;
String pagePath = dimension;
if (pagePath.startsWith("/group") || pagePath.startsWith("/web")) {
var.setPagePath(dimension);
validEntry = true;
}
if (validEntry) {
var.setVisitNumber(Integer.parseInt(metric.getValues().get(0)));
toReturn.add(var);
}
}
if (validEntry) {
var.setVisitNumber(Integer.parseInt(metric));
toReturn.add(var);
}
//System.out.printf("%s, %s%n", row.getDimensionValues(0).getValue(), row.getMetricValues(0).getValue());
}
}
return toReturn;
}
private static GoogleCredential fromD4SServiceEndpoint(AnalyticsReportCredentials cred) throws IOException {
String clientId = cred.getClientId();
String clientEmail = cred.getClientEmail();
String privateKeyPem = cred.getPrivateKeyPem();
String privateKeyId = cred.getPrivateKeyId();
String tokenUri = cred.getTokenUri();
String projectId = cred.getProjectId();
if(clientId == null || clientEmail == null || privateKeyPem == null || privateKeyId == null) {
throw new IOException("Error reading service account credential from stream, "
+ "expecting 'client_id', 'client_email', 'private_key' and 'private_key_id'.");
}
PrivateKey privateKey = privateKeyFromPkcs8(privateKeyPem);
Collection<String> emptyScopes = Collections.emptyList();
Builder credentialBuilder = new GoogleCredential.Builder().setTransport(Utils.getDefaultTransport())
.setJsonFactory(Utils.getDefaultJsonFactory()).setServiceAccountId(clientEmail)
.setServiceAccountScopes(emptyScopes).setServiceAccountPrivateKey(privateKey)
.setServiceAccountPrivateKeyId(privateKeyId);
if(tokenUri != null) {
credentialBuilder.setTokenServerEncodedUrl(tokenUri);
}
if(projectId != null) {
credentialBuilder.setServiceAccountProjectId(projectId);
}
// Don't do a refresh at this point, as it will always fail before the scopes are added.
return credentialBuilder.build();
}
private static PrivateKey privateKeyFromPkcs8(String privateKeyPem) throws IOException {
Reader reader = new StringReader(privateKeyPem);
Section section = PemReader.readFirstSectionAndClose(reader, "PRIVATE KEY");
if(section == null) {
throw new IOException("Invalid PKCS8 data.");
}
byte[] bytes = section.getBase64DecodedBytes();
PKCS8EncodedKeySpec keySpec = new PKCS8EncodedKeySpec(bytes);
Exception unexpectedException = null;
try {
KeyFactory keyFactory = SecurityUtils.getRsaKeyFactory();
PrivateKey privateKey = keyFactory.generatePrivate(keySpec);
return privateKey;
} catch(NoSuchAlgorithmException exception) {
unexpectedException = exception;
} catch(InvalidKeySpecException exception) {
unexpectedException = exception;
}
throw new IOException("Unexpected exception reading PKCS data", unexpectedException);
}
private static List<ServiceEndpoint> getAnalyticsReportingConfigurationFromIS(String infrastructureScope)
throws Exception {
String scope = infrastructureScope;
@ -319,14 +233,13 @@ public class VREAccessesHarvester extends BasicHarvester {
ScopeProvider.instance.set(currScope);
return toReturn;
}
/**
* l
* @throws Exception
*/
private static AnalyticsReportCredentials getAuthorisedApplicationInfoFromIs() throws Exception {
AnalyticsReportCredentials reportCredentials = new AnalyticsReportCredentials();
String context = org.gcube.dataharvest.utils.Utils.getCurrentContext();
try {
List<ServiceEndpoint> list = getAnalyticsReportingConfigurationFromIS(context);
@ -337,26 +250,26 @@ public class VREAccessesHarvester extends BasicHarvester {
logger.warn("There is no Service Endpoint having name " + SERVICE_ENDPOINT_NAME + " and Category "
+ SERVICE_ENDPOINT_CATEGORY + " in this context: " + context);
} else {
for(ServiceEndpoint res : list) {
reportCredentials.setTokenUri(res.profile().runtime().hostedOn());
Group<AccessPoint> apGroup = res.profile().accessPoints();
AccessPoint[] accessPoints = (AccessPoint[]) apGroup.toArray(new AccessPoint[apGroup.size()]);
AccessPoint found = accessPoints[0];
reportCredentials.setClientEmail(found.address());
reportCredentials.setProjectId(found.username());
reportCredentials.setPrivateKeyPem(StringEncrypter.getEncrypter().decrypt(found.password()));
reportCredentials.setClientEmail(found.username());
String decryptedPrivateKey = StringEncrypter.getEncrypter().decrypt(found.password());
reportCredentials.setPrivateKeyPem(decryptedPrivateKey.trim());
for(Property prop : found.properties()) {
if(prop.name().compareTo(AP_VIEWS_PROPERTY) == 0) {
String decryptedValue = StringEncrypter.getEncrypter().decrypt(prop.value());
String[] views = decryptedValue.split(";");
reportCredentials.setViewIds(Arrays.asList(views));
}
if(prop.name().compareTo(AP_CLIENT_PROPERTY) == 0) {
if(prop.name().compareTo(AP_CLIENT_ID) == 0) {
String decryptedValue = StringEncrypter.getEncrypter().decrypt(prop.value());
reportCredentials.setClientId(decryptedValue);
}
if(prop.name().compareTo(AP_PRIVATEKEY_PROPERTY) == 0) {
if(prop.name().compareTo(AP_PRIVATEKEY_ID_PROPERTY) == 0) {
String decryptedValue = StringEncrypter.getEncrypter().decrypt(prop.value());
reportCredentials.setPrivateKeyId(decryptedValue);
}
@ -369,19 +282,18 @@ public class VREAccessesHarvester extends BasicHarvester {
}
return reportCredentials;
}
private static LocalDate asLocalDate(Date date) {
return Instant.ofEpochMilli(date.getTime()).atZone(ZoneId.systemDefault()).toLocalDate();
}
private static DateRange getDateRangeForAnalytics(Date start, Date end) {
private static Builder getDateRangeBuilderForAnalytics(Date start, Date end) {
DateTimeFormatter formatter = DateTimeFormatter.ofPattern("yyyy-MM-dd"); //required by Analytics
String startDate = asLocalDate(start).format(formatter);
String endDate = asLocalDate(end).format(formatter);
DateRange dateRange = new DateRange();// date format `yyyy-MM-dd`
dateRange.setStartDate(startDate);
dateRange.setEndDate(endDate);
return dateRange;
Builder dateRangeBuilder = DateRange.newBuilder().setStartDate(startDate).setEndDate(endDate);
return dateRangeBuilder;
}
}

View File

@ -1,15 +1,14 @@
package org.gcube.dataharvest.harvester;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import org.gcube.accounting.accounting.summary.access.model.ScopeDescriptor;
import org.gcube.accounting.accounting.summary.access.model.update.AccountingRecord;
import org.gcube.dataharvest.AccountingDataHarvesterPlugin;
import org.gcube.dataharvest.AccountingDashboardHarvesterPlugin;
import org.gcube.dataharvest.datamodel.HarvestedDataKey;
import org.json.JSONObject;
import org.gcube.social_networking.social_networking_client_library.UserClient;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -17,7 +16,7 @@ import org.slf4j.LoggerFactory;
* @author Eric Perrone (ISTI - CNR)
* @author Luca Frosini (ISTI - CNR)
*/
public class VREUsersHarvester extends SocialNetworkingHarvester {
public class VREUsersHarvester extends BasicHarvester {
private static Logger logger = LoggerFactory.getLogger(VREUsersHarvester.class);
@ -31,12 +30,13 @@ public class VREUsersHarvester extends SocialNetworkingHarvester {
@Override
public List<AccountingRecord> getAccountingRecords() throws Exception {
try {
// String context = Utils.getCurrentContext();
String context = org.gcube.dataharvest.utils.Utils.getCurrentContext();
int measure = get();
ArrayList<AccountingRecord> accountingRecords = new ArrayList<AccountingRecord>();
ScopeDescriptor scopeDescriptor = AccountingDataHarvesterPlugin.getScopeDescriptor();
ScopeDescriptor scopeDescriptor = AccountingDashboardHarvesterPlugin.getScopeDescriptor(context);
AccountingRecord ar = new AccountingRecord(scopeDescriptor, instant, getDimension(HarvestedDataKey.USERS), (long) measure);
logger.debug("{} : {}", ar.getDimension().getId(), ar.getMeasure());
@ -50,17 +50,8 @@ public class VREUsersHarvester extends SocialNetworkingHarvester {
}
private int get() throws Exception {
JSONObject jsonObject = getJSONObject(PATH);
int userNumber = 0;
Boolean success = (Boolean) jsonObject.get("success");
if(success == false) {
throw new IOException("Erro while getting VRE Users");
}
userNumber = jsonObject.getJSONArray("result").length();
return userNumber;
UserClient userClient = new UserClient();
return userClient.getAllUsernamesContext().size();
}
}

View File

@ -1,197 +0,0 @@
package org.gcube.dataharvest.harvester.sobigdata;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.List;
import java.util.SortedSet;
import org.gcube.accounting.accounting.summary.access.model.ScopeDescriptor;
import org.gcube.accounting.accounting.summary.access.model.update.AccountingRecord;
import org.gcube.common.storagehub.client.dsl.ContainerType;
import org.gcube.common.storagehub.client.dsl.FolderContainer;
import org.gcube.common.storagehub.client.dsl.ItemContainer;
import org.gcube.common.storagehub.client.dsl.ListResolverTyped;
import org.gcube.common.storagehub.client.dsl.StorageHubClient;
import org.gcube.common.storagehub.model.items.FolderItem;
import org.gcube.common.storagehub.model.items.Item;
import org.gcube.common.storagehub.model.items.nodes.Accounting;
import org.gcube.common.storagehub.model.items.nodes.accounting.AccountEntry;
import org.gcube.dataharvest.AccountingDataHarvesterPlugin;
import org.gcube.dataharvest.datamodel.HarvestedDataKey;
import org.gcube.dataharvest.utils.DateUtils;
import org.gcube.dataharvest.utils.Utils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* The Class DataMethodDownloadHarvester.
*
* @author Eric Perrone (ISTI - CNR)
* @author Luca Frosini (ISTI - CNR)
* @author Francesco Mangiacrapa (ISTI - CNR)
*/
public class DataMethodDownloadHarvester extends SoBigDataHarvester {
private static Logger logger = LoggerFactory.getLogger(DataMethodDownloadHarvester.class);
private int count = 0;
/**
* Instantiates a new data method download harvester.
*
* @param start the start
* @param end the end
* @param catalogueContext the catalogue context
* @param contexts the contexts
* @throws ParseException the parse exception
*/
public DataMethodDownloadHarvester(Date start, Date end, SortedSet<String> contexts) throws Exception {
super(start, end, contexts);
}
/* (non-Javadoc)
* @see org.gcube.dataharvest.harvester.BasicHarvester#getData()
*/
@Override
public List<AccountingRecord> getAccountingRecords() throws Exception {
String defaultContext = Utils.getCurrentContext();
logger.debug("The context is {}", defaultContext);
try {
/*
String vreName = getVRENameToHL(defaultContext);
logger.debug("Getting VRE Name to HL from context/scope returns {} ", vreName);
String user = vreName + "-Manager";
logger.debug("Using user '{}' to getHome from HL", user);
//Getting HL instance and home for VRE MANAGER
HomeManager manager = HomeLibrary.getHomeManagerFactory().getHomeManager();
@SuppressWarnings("deprecation")
Home home = manager.getHome(user);
JCRWorkspace ws = (JCRWorkspace) home.getWorkspace();
String path = "/Workspace/MySpecialFolders/" + vreName;
logger.debug("Getting item by Path {}", path);
JCRWorkspaceItem item = (JCRWorkspaceItem) ws.getItemByPath(path);
*/
StorageHubClient storageHubClient = new StorageHubClient();
FolderContainer vreFolderContainer = storageHubClient.openVREFolder();
FolderItem vreFolderItem = vreFolderContainer.get();
logger.debug("Analyzing {} in the period [{} to {}] starting from root {}", defaultContext,
DateUtils.format(start), DateUtils.format(end), vreFolderItem.getName());
ScopeDescriptor defaultScopeDescriptor = AccountingDataHarvesterPlugin.getScopeDescriptor();
AccountingRecord defaultHarvesteData = new AccountingRecord(defaultScopeDescriptor, instant, getDimension(HarvestedDataKey.DATA_METHOD_DOWNLOAD), (long) count);
logger.debug("{} : {}", defaultHarvesteData.getDimension().getId(), defaultHarvesteData.getMeasure());
ArrayList<AccountingRecord> accountingRecords = new ArrayList<AccountingRecord>();
ListResolverTyped listResolverTyped = vreFolderContainer.list();
List<ItemContainer<? extends Item>> containers = listResolverTyped.includeHidden().getContainers();
for(ItemContainer<? extends Item> itemContainer : containers) {
count = 0; //resettings the counter
//HarvestedData harvestedData;
//Getting statistics for folder
if(itemContainer.getType() == ContainerType.FOLDER) {
Item item = itemContainer.get();
logger.debug("Getting statistics for folder {}", item.getName());
getStats(itemContainer, start, end);
String normalizedName = item.getName().replaceAll("[^A-Za-z0-9]", "");
String context = mapWsFolderNameToVRE.get(normalizedName);
//Checking if it is a VRE name to right accounting...
if(context != null && !context.isEmpty()) {
logger.debug("Found context '{}' matching with normalized VRE name {} ", context, normalizedName);
ScopeDescriptor scopeDescriptor = AccountingDataHarvesterPlugin.getScopeDescriptor(context);
AccountingRecord ar = new AccountingRecord(scopeDescriptor, instant, getDimension(HarvestedDataKey.DATA_METHOD_DOWNLOAD), (long) count);
logger.debug("{} : {}", ar.getDimension().getId(), ar.getMeasure());
accountingRecords.add(ar);
} else {
logger.debug(
"No scope found matching the folder name {}, accounting its stats in the default context {}",
normalizedName, defaultContext);
//INCREASING THE DEFAULT CONTEXT COUNTER...
defaultHarvesteData.setMeasure(defaultHarvesteData.getMeasure() + count);
logger.trace("Increased default context stats {}", defaultHarvesteData);
}
}
}
//ADDING DEFAULT ACCOUNTING
accountingRecords.add(defaultHarvesteData);
logger.debug("In the period [from {} to {} ] returning workspace accouting data {}", DateUtils.format(start),
DateUtils.format(end), accountingRecords);
return accountingRecords;
} catch(Exception e) {
throw e;
}
}
/**
* Gets the stats.
*
* @param baseItem the base item
* @param start the start
* @param end the end
* @return the stats
* @throws InternalErrorException the internal error exception
*/
private void getStats(ItemContainer<? extends Item> itemContainer, Date start, Date end) throws Exception {
if(itemContainer.getType() == ContainerType.FOLDER) {
ListResolverTyped listResolverTyped = ((FolderContainer)itemContainer).list();
List<ItemContainer<? extends Item>> containers = listResolverTyped.includeHidden().getContainers();
for(ItemContainer<? extends Item> itemCont : containers) {
getStats(itemCont , start, end);
}
} else {
try {
Accounting accounting = itemContainer.get().getAccounting();
for(AccountEntry entry : accounting.getEntries()) {
switch(entry.getType()) {
case CREATE:
case UPDATE:
case READ:
Calendar calendar = entry.getDate();
if(calendar.after(DateUtils.dateToCalendar(start))
&& calendar.before(DateUtils.dateToCalendar(end))) {
count++;
}
break;
default:
break;
}
}
} catch(Exception e) {
throw e;
}
}
}
}

View File

@ -1,5 +1,11 @@
package org.gcube.dataharvest.harvester.sobigdata;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.HttpURLConnection;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLEncoder;
import java.util.ArrayList;
import java.util.Date;
@ -12,7 +18,7 @@ import java.util.SortedSet;
import org.gcube.accounting.accounting.summary.access.model.ScopeDescriptor;
import org.gcube.accounting.accounting.summary.access.model.internal.Dimension;
import org.gcube.accounting.accounting.summary.access.model.update.AccountingRecord;
import org.gcube.dataharvest.AccountingDataHarvesterPlugin;
import org.gcube.dataharvest.AccountingDashboardHarvesterPlugin;
import org.gcube.dataharvest.datamodel.HarvestedDataKey;
import org.gcube.dataharvest.utils.DateUtils;
import org.gcube.dataharvest.utils.Utils;
@ -45,7 +51,6 @@ public class ResourceCatalogueHarvester extends SoBigDataHarvester {
*
* @param start the start
* @param end the end
* @param catalogueContext the catalogue context
* @param contexts the contexts. They are the VREs
* @throws Exception the exception
*/
@ -153,7 +158,7 @@ public class ResourceCatalogueHarvester extends SoBigDataHarvester {
logger.trace("The context {} has count ", catalogueContext, catalogueContextCount);
ScopeDescriptor catalogueScopeDescriptor = AccountingDataHarvesterPlugin.getScopeDescriptor(catalogueContext);
ScopeDescriptor catalogueScopeDescriptor = AccountingDashboardHarvesterPlugin.getScopeDescriptor(catalogueContext);
Dimension dimension = getDimension(harvestKey);
AccountingRecord ar = new AccountingRecord(catalogueScopeDescriptor, instant, dimension, (long) catalogueContextCount);
@ -161,7 +166,7 @@ public class ResourceCatalogueHarvester extends SoBigDataHarvester {
for(String key : counter.keySet()) {
logger.trace("The group {} has count {}", key, counter.get(key));
ScopeDescriptor sd = AccountingDataHarvesterPlugin.getScopeDescriptor(mapCatalogueGroupToVRE.get(key));
ScopeDescriptor sd = AccountingDashboardHarvesterPlugin.getScopeDescriptor(mapCatalogueGroupToVRE.get(key));
AccountingRecord accountingRecord = new AccountingRecord(sd, instant, dimension, (long) counter.get(key));
accountingRecords.add(accountingRecord);
}
@ -211,10 +216,27 @@ public class ResourceCatalogueHarvester extends SoBigDataHarvester {
query += "q=" + URLEncoder.encode(q, UTF_8_CHARASET) + "&wt=json&indent=true&rows=" + ROWS;
query += flValue != null && !flValue.isEmpty() ? "&fl=" + URLEncoder.encode(flValue, UTF_8_CHARASET) : "";
logger.debug("\nPerforming query {}", query);
String jsonResult = Utils.getJson(query);
String jsonResult = requestJson(query);
logger.trace("Response is {}", jsonResult);
return jsonResult;
}
public String requestJson(String url) throws MalformedURLException, IOException {
URL address = new URL(url);
HttpURLConnection connection = (HttpURLConnection) address.openConnection();
BufferedReader reader = new BufferedReader(new InputStreamReader(connection.getInputStream()));
String json = "";
String line = "";
while(line != null) {
line = reader.readLine();
if(line != null) {
json += line.trim();
}
}
return json;
}
}

View File

@ -12,7 +12,7 @@ import java.util.SortedSet;
import java.util.TreeSet;
import org.gcube.common.authorization.client.exceptions.ObjectNotFound;
import org.gcube.dataharvest.AccountingDataHarvesterPlugin;
import org.gcube.dataharvest.AccountingDashboardHarvesterPlugin;
import org.gcube.dataharvest.datamodel.HarvestedDataKey;
import org.gcube.dataharvest.harvester.BasicHarvester;
import org.gcube.dataharvest.utils.Utils;
@ -52,8 +52,7 @@ public abstract class SoBigDataHarvester extends BasicHarvester {
*
* @param start the start
* @param end the end
* @param catalogueContext the catalogue context
* @param vreScopes the contexts
* @param contexts the contexts. They are the VREs
* @throws ParseException the parse exception
*/
public SoBigDataHarvester(Date start, Date end, SortedSet<String> contexts) throws Exception {
@ -78,7 +77,7 @@ public abstract class SoBigDataHarvester extends BasicHarvester {
* @throws ObjectNotFound
*/
protected void initMappingMaps() throws ObjectNotFound, Exception {
Properties properties = AccountingDataHarvesterPlugin.getProperties().get();
Properties properties = AccountingDashboardHarvesterPlugin.getConfigParameters();
Set<String> keys = properties.stringPropertyNames();
mapSystemTypeToDBEntry = new HashMap<String,String>();

View File

@ -18,7 +18,7 @@ import org.gcube.accounting.analytics.persistence.AccountingPersistenceQueryFact
import org.gcube.accounting.datamodel.AggregatedUsageRecord;
import org.gcube.accounting.datamodel.aggregation.AggregatedServiceUsageRecord;
import org.gcube.accounting.datamodel.usagerecords.ServiceUsageRecord;
import org.gcube.dataharvest.AccountingDataHarvesterPlugin;
import org.gcube.dataharvest.AccountingDashboardHarvesterPlugin;
import org.gcube.dataharvest.datamodel.HarvestedDataKey;
import org.gcube.dataharvest.harvester.BasicHarvester;
import org.gcube.dataharvest.utils.DateUtils;
@ -109,7 +109,7 @@ public class TagMeMethodInvocationHarvester extends BasicHarvester {
}
ScopeDescriptor scopeDescriptor = AccountingDataHarvesterPlugin.getScopeDescriptor();
ScopeDescriptor scopeDescriptor = AccountingDashboardHarvesterPlugin.getScopeDescriptor(context);
Dimension dimension = getDimension(HarvestedDataKey.METHOD_INVOCATIONS);
AccountingRecord ar = new AccountingRecord(scopeDescriptor, instant, dimension, numberOfInvocation);

View File

@ -1,8 +1,5 @@
package org.gcube.dataharvest.utils;
import static org.gcube.common.authorization.client.Constants.authorizationService;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.Map;
@ -10,10 +7,15 @@ import java.util.Properties;
import java.util.SortedSet;
import java.util.TreeSet;
import org.gcube.common.authorization.library.provider.SecurityTokenProvider;
import org.gcube.common.authorization.library.provider.UserInfo;
import javax.ws.rs.InternalServerErrorException;
import org.gcube.common.authorization.utils.manager.SecretManagerProvider;
import org.gcube.common.authorization.utils.secret.JWTSecret;
import org.gcube.common.authorization.utils.secret.Secret;
import org.gcube.common.keycloak.KeycloakClientFactory;
import org.gcube.common.keycloak.model.TokenResponse;
import org.gcube.common.scope.impl.ScopeBean;
import org.gcube.dataharvest.AccountingDataHarvesterPlugin;
import org.gcube.dataharvest.AccountingDashboardHarvesterPlugin;
import org.gcube.resourcemanagement.support.server.managers.context.ContextManager;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -25,88 +27,98 @@ public class ContextAuthorization {
private static Logger logger = LoggerFactory.getLogger(ContextAuthorization.class);
public static final String USERNAME = "USERNAME";
public static final String DEFAULT_USERNAME = "luca.frosini";
public static final String CLIENT_ID = "accounting-dashboard-harvester-se-plugin";
public static final String SERVICE_NAME = "SERVICE_NAME";
public static final String DEFAULT_SERVICE_NAME = "accounting-harvester";
protected String clientSecret;
/**
* Contains Context full name as key and Token as Value
*/
protected Map<String,String> contextToToken;
protected Map<String,Secret> contextToToken;
/**
* Contains Token as key and Context full name as Value
*/
protected Map<String,String> tokenToContext;
protected Map<Secret,String> tokenToContext;
protected Properties properties;
/**
* Contains Properties used to generate tokens
*/
public ContextAuthorization() throws Exception {
public ContextAuthorization(Properties properties) throws Exception {
this.properties = properties;
this.contextToToken = new HashMap<>();
this.tokenToContext = new HashMap<>();
retrieveContextsAndTokens();
}
public String generateTokenForContext(String context, Properties properties) throws Exception {
if(properties==null) {
properties = AccountingDataHarvesterPlugin.getProperties().get();
/**
* Contains Properties used to generate tokens
*/
public ContextAuthorization() throws Exception {
this.properties = AccountingDashboardHarvesterPlugin.getConfigParameters();
this.contextToToken = new HashMap<>();
this.tokenToContext = new HashMap<>();
retrieveContextsAndTokens();
}
private String getClientSecret(String context) {
try {
if(clientSecret==null) {
int index = context.indexOf('/', 1);
String root = context.substring(0, index == -1 ? context.length() : index);
clientSecret = properties.getProperty(root);
}
return clientSecret;
} catch(Exception e) {
throw new InternalServerErrorException(
"Unable to retrieve Application Token for context " + SecretManagerProvider.instance.get().getContext(), e);
}
logger.info("Going to generate Token for Context {}", context);
UserInfo userInfo = new UserInfo(properties.getProperty(USERNAME, DEFAULT_USERNAME),
new ArrayList<>());
String userToken = authorizationService().generateUserToken(userInfo, context);
SecurityTokenProvider.instance.set(userToken);
String generatedToken = authorizationService()
.generateExternalServiceToken(properties.getProperty(SERVICE_NAME, DEFAULT_SERVICE_NAME));
logger.trace("Token for Context {} is {}", context, generatedToken);
return generatedToken;
}
private TokenResponse getJWTAccessToken(String context) throws Exception {
TokenResponse tr = KeycloakClientFactory.newInstance().queryUMAToken(context, CLIENT_ID, getClientSecret(context), context, null);
return tr;
}
public Secret getCatalogueSecretForContext(String context) throws Exception {
TokenResponse tr = getJWTAccessToken(context);
Secret secret = new JWTSecret(tr.getAccessToken());
return secret;
}
protected void retrieveContextsAndTokens() throws Exception {
String initialToken = SecurityTokenProvider.instance.get();
try {
Properties properties = AccountingDataHarvesterPlugin.getProperties().get();
LinkedHashMap<String,ScopeBean> map = ContextManager.readContexts();
for(String scope : map.keySet()) {
try {
String context = map.get(scope).toString();
String generatedToken = generateTokenForContext(context, properties);
Secret secret = getCatalogueSecretForContext(context);
contextToToken.put(context, generatedToken);
tokenToContext.put(generatedToken, context);
contextToToken.put(context, secret);
tokenToContext.put(secret, context);
} catch(Exception e) {
logger.error("Error while elaborating {}", scope, e);
throw e;
} finally {
SecurityTokenProvider.instance.reset();
}
// throw e;
}
}
} catch(Exception ex) {
throw ex;
} finally {
SecurityTokenProvider.instance.set(initialToken);
}
}
}
public String getTokenForContext(String contextFullName) {
return contextToToken.get(contextFullName);
public Secret getSecretForContext(String context) {
return contextToToken.get(context);
}
public String getContextFromToken(String token) {
return tokenToContext.get(token);
public String getContextFromSecret(Secret secret) {
return tokenToContext.get(secret);
}
public SortedSet<String> getContexts() {

View File

@ -91,19 +91,14 @@ public class DateUtils {
aggregationStartCalendar.set(Calendar.MINUTE, 0);
aggregationStartCalendar.set(Calendar.SECOND, 0);
aggregationStartCalendar.set(Calendar.MILLISECOND, 0);
logger.debug("{}", DEFAULT_DATE_FORMAT.format(aggregationStartCalendar.getTime()));
// logger.trace("{}", DEFAULT_DATE_FORMAT.format(aggregationStartCalendar.getTime()));
return aggregationStartCalendar;
}
public static Date getEndDateFromStartDate(AggregationType aggregationType, Date startDate, int offset, boolean partialHarvesting) {
public static Date getEndDateFromStartDate(AggregationType aggregationType, Date startDate, int offset) {
Calendar aggregationEndDate = getUTCCalendarInstance();
if(!partialHarvesting) {
aggregationEndDate.setTimeInMillis(startDate.getTime());
aggregationEndDate.add(aggregationType.getCalendarField(), offset);
aggregationEndDate.add(Calendar.MILLISECOND, -1);
}
aggregationEndDate.setTimeInMillis(startDate.getTime());
aggregationEndDate.add(aggregationType.getCalendarField(), offset);
return aggregationEndDate.getTime();
}
@ -113,8 +108,6 @@ public class DateUtils {
return calendar;
}
/* OLD functions of Eric Perrone (ISTI - CNR) */
public static String format(Date date) {
return DateUtils.LAUNCH_DATE_FORMAT.format(date);
}

View File

@ -1,17 +1,9 @@
package org.gcube.dataharvest.utils;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.HttpURLConnection;
import java.net.MalformedURLException;
import java.net.URL;
import org.gcube.common.authorization.client.Constants;
import org.gcube.common.authorization.client.exceptions.ObjectNotFound;
import org.gcube.common.authorization.library.AuthorizationEntry;
import org.gcube.common.authorization.library.provider.SecurityTokenProvider;
import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.common.authorization.utils.manager.SecretManager;
import org.gcube.common.authorization.utils.manager.SecretManagerProvider;
import org.gcube.common.authorization.utils.secret.Secret;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -23,36 +15,16 @@ public class Utils {
private static Logger logger = LoggerFactory.getLogger(Utils.class);
public static String getJson(String url) throws MalformedURLException, IOException {
URL address = new URL(url);
HttpURLConnection connection = (HttpURLConnection) address.openConnection();
BufferedReader reader = new BufferedReader(new InputStreamReader(connection.getInputStream()));
String json = "";
String line = "";
while(line != null) {
line = reader.readLine();
if(line != null) {
json += line.trim();
}
}
return json;
}
public static String getCurrentContext() throws ObjectNotFound, Exception {
return getCurrentContext(SecurityTokenProvider.instance.get());
return SecretManagerProvider.instance.get().getContext();
}
public static String getCurrentContext(String token) throws ObjectNotFound, Exception {
AuthorizationEntry authorizationEntry = Constants.authorizationService().get(token);
String context = authorizationEntry.getContext();
logger.info("Context of token {} is {}", token, context);
return context;
}
public static void setContext(String token) throws ObjectNotFound, Exception {
SecurityTokenProvider.instance.set(token);
ScopeProvider.instance.set(getCurrentContext(token));
public static void setContext(Secret secret) throws Exception {
SecretManagerProvider.instance.reset();
SecretManager secretManager = new SecretManager();
SecretManagerProvider.instance.set(secretManager);
secretManager.addSecret(secret);
secretManager.set();
}
}

View File

@ -0,0 +1 @@
org.gcube.dataharvest.AccountingDashboardHarvesterPlugin

View File

@ -1 +0,0 @@
org.gcube.dataharvest.DataHarvestPluginDeclaration

View File

@ -1,2 +1 @@
USERNAME=luca.frosini
SERVICE_NAME=accounting-harvester
/d4science.research-infrastructures.eu=XXXXXXXXXX

View File

@ -0,0 +1 @@
/TestDateScorro.java

View File

@ -1,849 +0,0 @@
package org.gcube.dataharvest;
import java.time.Instant;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.SortedSet;
import java.util.TreeSet;
import java.util.concurrent.TimeUnit;
import org.gcube.accounting.accounting.summary.access.AccountingDao;
import org.gcube.accounting.accounting.summary.access.model.ScopeDescriptor;
import org.gcube.accounting.accounting.summary.access.model.internal.Dimension;
import org.gcube.accounting.accounting.summary.access.model.update.AccountingRecord;
import org.gcube.common.authorization.client.exceptions.ObjectNotFound;
import org.gcube.common.scope.impl.ScopeBean;
import org.gcube.common.scope.impl.ScopeBean.Type;
import org.gcube.dataharvest.datamodel.HarvestedDataKey;
import org.gcube.dataharvest.harvester.MethodInvocationHarvester;
import org.gcube.dataharvest.harvester.SocialInteractionsHarvester;
import org.gcube.dataharvest.harvester.VREAccessesHarvester;
import org.gcube.dataharvest.harvester.VREUsersHarvester;
import org.gcube.dataharvest.harvester.sobigdata.DataMethodDownloadHarvester;
import org.gcube.dataharvest.harvester.sobigdata.ResourceCatalogueHarvester;
import org.gcube.dataharvest.harvester.sobigdata.TagMeMethodInvocationHarvester;
import org.gcube.dataharvest.utils.AggregationType;
import org.gcube.dataharvest.utils.ContextAuthorization;
import org.gcube.dataharvest.utils.ContextTest;
import org.gcube.dataharvest.utils.DateUtils;
import org.gcube.resourcemanagement.support.server.managers.context.ContextManager;
import org.gcube.vremanagement.executor.api.rest.SmartExecutor;
import org.gcube.vremanagement.executor.api.types.LaunchParameter;
import org.gcube.vremanagement.executor.api.types.Scheduling;
import org.gcube.vremanagement.executor.client.SmartExecutorClientFactory;
import org.junit.Assert;
import org.quartz.CronExpression;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class AccountingDataHarvesterPluginTest extends ContextTest {
private static Logger logger = LoggerFactory.getLogger(AccountingDataHarvesterPluginTest.class);
public static final String ROOT = "/d4science.research-infrastructures.eu";
public static final String SO_BIG_VO = "/d4science.research-infrastructures.eu/SoBigData";
public static final String TAGME_VRE = "/d4science.research-infrastructures.eu/SoBigData/TagMe";
public static final String STOCK_ASSESMENT_VRE = "/d4science.research-infrastructures.eu/gCubeApps/StockAssessment";
public static SortedSet<String> getContexts() throws Exception {
SortedSet<String> contexts = new TreeSet<>();
LinkedHashMap<String,ScopeBean> map = ContextManager.readContexts();
for(String scope : map.keySet()) {
try {
String context = map.get(scope).toString();
contexts.add(context);
} catch(Exception e) {
throw e;
}
}
return contexts;
}
// @Test
public void getDimensions() {
try {
ContextTest.setContextByName(ROOT);
AccountingDao dao = AccountingDao.get();
Set<Dimension> dimensionSet = dao.getDimensions();
for(Dimension d : dimensionSet) {
logger.debug("{} - {} - {} - {}", d.getId(), d.getGroup(), d.getAggregatedMeasure(), d.getLabel());
}
logger.info("End.");
} catch(Exception e) {
logger.error("", e);
}
}
// @Test
public void launch() {
try {
ContextTest.setContextByName(ROOT);
DataHarvestPluginDeclaration dataHarvestPluginDeclaration = new DataHarvestPluginDeclaration();
AccountingDataHarvesterPlugin accountingDataHarvesterPlugin = new AccountingDataHarvesterPlugin(
dataHarvestPluginDeclaration);
Map<String,Object> inputs = new HashMap<>();
AggregationType aggregationType = AggregationType.MONTHLY;
inputs.put(AccountingDataHarvesterPlugin.MEASURE_TYPE_INPUT_PARAMETER, aggregationType.name());
inputs.put(AccountingDataHarvesterPlugin.GET_VRE_USERS_INPUT_PARAMETER, true);
inputs.put(AccountingDataHarvesterPlugin.RERUN_INPUT_PARAMETER, true);
inputs.put(AccountingDataHarvesterPlugin.DRY_RUN_INPUT_PARAMETER, false);
inputs.put(AccountingDataHarvesterPlugin.PARTIAL_HARVESTING, true);
/*
Calendar from = DateUtils.getStartCalendar(2016, Calendar.SEPTEMBER, 1);
String fromDate = DateUtils.LAUNCH_DATE_FORMAT.format(from.getTime());
logger.trace("{} is {}", AccountingDataHarvesterPlugin.START_DATE_INPUT_PARAMETER, fromDate);
inputs.put(AccountingDataHarvesterPlugin.START_DATE_INPUT_PARAMETER, fromDate);
*/
accountingDataHarvesterPlugin.launch(inputs);
logger.info("End.");
} catch(Exception e) {
logger.error("", e);
}
}
// @Test
public void launchPluginOnSmartExecutor() {
try {
ContextTest.setContextByName(ROOT);
SmartExecutor smartExecutor = SmartExecutorClientFactory.create(DataHarvestPluginDeclaration.NAME);
Assert.assertNotNull(smartExecutor);
Map<String,Object> inputs = new HashMap<>();
AggregationType aggregationType = AggregationType.MONTHLY;
inputs.put(AccountingDataHarvesterPlugin.MEASURE_TYPE_INPUT_PARAMETER, aggregationType.name());
inputs.put(AccountingDataHarvesterPlugin.GET_VRE_USERS_INPUT_PARAMETER, true);
inputs.put(AccountingDataHarvesterPlugin.RERUN_INPUT_PARAMETER, true);
inputs.put(AccountingDataHarvesterPlugin.DRY_RUN_INPUT_PARAMETER, false);
/*
Calendar from = DateUtils.getStartCalendar(2016, Calendar.SEPTEMBER, 1);
String fromDate = DateUtils.LAUNCH_DATE_FORMAT.format(from.getTime());
logger.trace("{} is {}", AccountingDataHarvesterPlugin.START_DATE_INPUT_PARAMETER, fromDate);
inputs.put(AccountingDataHarvesterPlugin.START_DATE_INPUT_PARAMETER, fromDate);
*/
//
CronExpression cronExpression = new CronExpression("0 0 10 3 1/1 ? *");
Scheduling scheduling = new Scheduling(cronExpression);
scheduling.setGlobal(false);
LaunchParameter launchParameter = new LaunchParameter(DataHarvestPluginDeclaration.NAME, inputs,
scheduling);
smartExecutor.launch(launchParameter);
logger.info("End.");
} catch(Exception e) {
logger.error("", e);
}
}
// @Test
public void launchOldData() {
try {
ContextTest.setContextByName(ROOT);
DataHarvestPluginDeclaration dataHarvestPluginDeclaration = new DataHarvestPluginDeclaration();
AccountingDataHarvesterPlugin accountingDataHarvesterPlugin = new AccountingDataHarvesterPlugin(
dataHarvestPluginDeclaration);
Map<String,Object> inputs = new HashMap<>();
AggregationType aggregationType = AggregationType.MONTHLY;
inputs.put(AccountingDataHarvesterPlugin.MEASURE_TYPE_INPUT_PARAMETER, aggregationType.name());
inputs.put(AccountingDataHarvesterPlugin.GET_VRE_USERS_INPUT_PARAMETER, true);
inputs.put(AccountingDataHarvesterPlugin.RERUN_INPUT_PARAMETER, true);
inputs.put(AccountingDataHarvesterPlugin.DRY_RUN_INPUT_PARAMETER, false);
Calendar from = DateUtils.getStartCalendar(2016, Calendar.SEPTEMBER, 1);
Calendar runbeforeDate = DateUtils.getStartCalendar(2018, Calendar.JUNE, 1);
while(from.before(runbeforeDate)) {
String fromDate = DateUtils.LAUNCH_DATE_FORMAT.format(from.getTime());
logger.trace("{} is {}", AccountingDataHarvesterPlugin.START_DATE_INPUT_PARAMETER, fromDate);
inputs.put(AccountingDataHarvesterPlugin.START_DATE_INPUT_PARAMETER, fromDate);
accountingDataHarvesterPlugin.launch(inputs);
from.add(aggregationType.getCalendarField(), 1);
}
logger.info("End.");
} catch(Exception e) {
logger.error("", e);
}
}
// @Test
public void launchOldDataVREAccessesHarvester() {
try {
ContextTest.setContextByName(ROOT);
// AccountingDao dao = AccountingDao.get();
DataHarvestPluginDeclaration dataHarvestPluginDeclaration = new DataHarvestPluginDeclaration();
AccountingDataHarvesterPlugin accountingDataHarvesterPlugin = new AccountingDataHarvesterPlugin(
dataHarvestPluginDeclaration);
Properties properties = accountingDataHarvesterPlugin.getConfigParameters();
AccountingDataHarvesterPlugin.getProperties().set(properties);
ContextAuthorization contextAuthorization = new ContextAuthorization();
SortedSet<String> contexts = contextAuthorization.getContexts();
AggregationType aggregationType = AggregationType.MONTHLY;
Calendar from = DateUtils.getStartCalendar(2018, Calendar.APRIL, 1);
Calendar runbeforeDate = DateUtils.getStartCalendar(2018, Calendar.JUNE, 1);
while(from.before(runbeforeDate)) {
Date start = from.getTime();
Date end = DateUtils.getEndDateFromStartDate(aggregationType, start, 1, false);
logger.debug("Harvesting from {} to {}", DateUtils.format(start), DateUtils.format(end));
ArrayList<AccountingRecord> accountingRecords = new ArrayList<>();
VREAccessesHarvester vreAccessesHarvester = null;
for(String context : contexts) {
// Setting the token for the context
ContextTest.setContext(contextAuthorization.getTokenForContext(context));
ScopeBean scopeBean = new ScopeBean(context);
if(vreAccessesHarvester == null) {
if(scopeBean.is(Type.INFRASTRUCTURE)) {
vreAccessesHarvester = new VREAccessesHarvester(start, end);
} else {
// This code should be never used because the scopes are sorted by fullname
ScopeBean parent = scopeBean.enclosingScope();
while(!parent.is(Type.INFRASTRUCTURE)) {
parent = scopeBean.enclosingScope();
}
// Setting back token for the context
ContextTest.setContext(contextAuthorization.getTokenForContext(parent.toString()));
vreAccessesHarvester = new VREAccessesHarvester(start, end);
// Setting back token for the context
ContextTest.setContext(contextAuthorization.getTokenForContext(context));
}
}
try {
if(context.startsWith(AccountingDataHarvesterPlugin.SO_BIG_DATA_VO)
&& start.before(DateUtils.getStartCalendar(2018, Calendar.APRIL, 1).getTime())) {
logger.info("Not Harvesting VREs Accesses for {} from {} to {}", context,
DateUtils.format(start), DateUtils.format(end));
} else {
// Collecting Google Analytics Data for VREs Accesses
List<AccountingRecord> harvested = vreAccessesHarvester.getAccountingRecords();
accountingRecords.addAll(harvested);
}
} catch(Exception e) {
logger.error("Error harvesting Social Interactions for {}", context, e);
}
}
logger.debug("Harvest Measures from {} to {} are {}", DateUtils.format(start), DateUtils.format(end),
accountingRecords);
ContextTest.setContextByName(ROOT);
// dao.insertRecords(accountingRecords.toArray(new AccountingRecord[1]));
Thread.sleep(TimeUnit.SECONDS.toMillis(10));
from.add(aggregationType.getCalendarField(), 1);
}
ContextTest.setContextByName(ROOT);
} catch(Exception e) {
logger.error("", e);
}
logger.info("End.");
}
// @Test
public void testScopeBean() throws Exception {
ContextTest.setContextByName(ROOT);
SortedSet<String> contexts = getContexts();
AggregationType aggregationType = AggregationType.MONTHLY;
Date start = DateUtils.getStartCalendar(2018, Calendar.MARCH, 1).getTime();
// start = DateUtils.getPreviousPeriod(measureType).getTime();
Date end = DateUtils.getEndDateFromStartDate(aggregationType, start, 1, false);
logger.info("\n\n\n");
for(String context : contexts) {
ScopeBean scopeBean = new ScopeBean(context);
// logger.debug("FullName {} - Name {}", scopeBean.toString(), scopeBean.name());
try {
if(scopeBean.is(Type.VRE) && start.equals(DateUtils.getPreviousPeriod(aggregationType, false).getTime())) {
logger.info("Harvesting (VRE Users) for {} from {} to {}", context, DateUtils.format(start),
DateUtils.format(end));
} else {
logger.info("--- Not Harvesting (VRE Users) for {} from {} to {}", context, DateUtils.format(start),
DateUtils.format(end));
}
if((context.startsWith(AccountingDataHarvesterPlugin.SO_BIG_DATA_VO)
|| context.startsWith(AccountingDataHarvesterPlugin.SO_BIG_DATA_EU_VRE)
|| context.startsWith(AccountingDataHarvesterPlugin.SO_BIG_DATA_IT_VRE))
&& start.before(DateUtils.getStartCalendar(2018, Calendar.APRIL, 1).getTime())) {
logger.info("--- Not Harvesting (SoBigData Check) for {} from {} to {}", context,
DateUtils.format(start), DateUtils.format(end));
} else {
logger.info("Harvesting (SoBigData Check) for {} from {} to {}", context, DateUtils.format(start),
DateUtils.format(end));
}
} catch(Exception e) {
logger.error("Error harvesting Social Interactions for {}", context, e);
}
}
}
// @Test
public void testVREAccessesHarvester() throws Exception {
try {
AccountingDao dao = getAccountingDao();
List<Date> starts = new ArrayList<>();
starts.add(DateUtils.getStartCalendar(2018, Calendar.SEPTEMBER, 1).getTime());
starts.add(DateUtils.getStartCalendar(2018, Calendar.OCTOBER, 1).getTime());
starts.add(DateUtils.getStartCalendar(2018, Calendar.NOVEMBER, 1).getTime());
starts.add(DateUtils.getStartCalendar(2018, Calendar.DECEMBER, 1).getTime());
starts.add(DateUtils.getStartCalendar(2019, Calendar.JANUARY, 1).getTime());
starts.add(DateUtils.getStartCalendar(2019, Calendar.FEBRUARY, 1).getTime());
starts.add(DateUtils.getStartCalendar(2019, Calendar.MARCH, 1).getTime());
AggregationType measureType = AggregationType.MONTHLY;
String[] contextFullNames = new String[] {"/d4science.research-infrastructures.eu/FARM/GRSF",
"/d4science.research-infrastructures.eu/FARM/GRSF_Admin"};
List<AccountingRecord> accountingRecords = new ArrayList<>();
for(Date start : starts) {
Date end = DateUtils.getEndDateFromStartDate(measureType, start, 1, false);
ContextTest.setContextByName(ROOT);
VREAccessesHarvester vreAccessesHarvester = new VREAccessesHarvester(start, end);
for(String contextFullname : contextFullNames) {
setContextByNameAndScopeDescriptor(contextFullname);
List<AccountingRecord> harvested = vreAccessesHarvester.getAccountingRecords();
accountingRecords.addAll(harvested);
logger.debug("{} - {}", contextFullname, accountingRecords);
}
}
logger.debug("{}", accountingRecords);
ContextTest.setContextByName(ROOT);
// dao.insertRecords(accountingRecords.toArray(new AccountingRecord[1]));
} catch(Exception e) {
logger.error("", e);
throw e;
}
}
// @Test
public void testVREAccessesHarvesterAll() {
try {
ContextTest.setContextByName(ROOT);
AggregationType measureType = AggregationType.MONTHLY;
// Date start = DateUtils.getStartCalendar(2015, Calendar.FEBRUARY, 1).getTime();
// Date end = DateUtils.getStartCalendar(2019, Calendar.FEBRUARY, 1).getTime();
Date start = DateUtils.getPreviousPeriod(measureType, false).getTime();
Date end = DateUtils.getEndDateFromStartDate(measureType, start, 1, false);
AccountingDataHarvesterPlugin accountingDataHarvesterPlugin = new AccountingDataHarvesterPlugin(null);
accountingDataHarvesterPlugin.getConfigParameters();
ContextAuthorization contextAuthorization = new ContextAuthorization();
SortedSet<String> contexts = contextAuthorization.getContexts();
VREAccessesHarvester vreAccessesHarvester = null;
ArrayList<AccountingRecord> accountingRecords = new ArrayList<>();
for(String context : contexts) {
// Setting the token for the context
ContextTest.setContext(contextAuthorization.getTokenForContext(context));
ScopeBean scopeBean = new ScopeBean(context);
if(vreAccessesHarvester == null) {
if(scopeBean.is(Type.INFRASTRUCTURE)) {
vreAccessesHarvester = new VREAccessesHarvester(start, end);
} else {
// This code should be never used because the scopes are sorted by fullname
ScopeBean parent = scopeBean.enclosingScope();
while(!parent.is(Type.INFRASTRUCTURE)) {
parent = scopeBean.enclosingScope();
}
// Setting back token for the context
ContextTest.setContext(contextAuthorization.getTokenForContext(parent.toString()));
vreAccessesHarvester = new VREAccessesHarvester(start, end);
// Setting back token for the context
ContextTest.setContext(contextAuthorization.getTokenForContext(context));
}
}
try {
if(context.startsWith(AccountingDataHarvesterPlugin.SO_BIG_DATA_VO)
&& start.before(DateUtils.getStartCalendar(2018, Calendar.APRIL, 1).getTime())) {
logger.info("Not Harvesting VREs Accesses for {} from {} to {}", context,
DateUtils.format(start), DateUtils.format(end));
} else {
// Collecting Google Analytics Data for VREs Accesses
List<AccountingRecord> harvested = vreAccessesHarvester.getAccountingRecords();
accountingRecords.addAll(harvested);
}
} catch(Exception e) {
logger.error("Error harvesting Social Interactions for {}", context, e);
}
}
logger.debug("{}", accountingRecords);
} catch(Exception e) {
logger.error("", e);
}
}
// @Test
public void testSocialInteraction() {
try {
ContextTest.setContextByName(ROOT);
// AccountingDao dao = AccountingDao.get();
DataHarvestPluginDeclaration dataHarvestPluginDeclaration = new DataHarvestPluginDeclaration();
AccountingDataHarvesterPlugin accountingDataHarvesterPlugin = new AccountingDataHarvesterPlugin(
dataHarvestPluginDeclaration);
Properties properties = accountingDataHarvesterPlugin.getConfigParameters();
AccountingDataHarvesterPlugin.getProperties().set(properties);
ContextAuthorization contextAuthorization = new ContextAuthorization();
SortedSet<String> contexts = new TreeSet<>();
contexts.add("/d4science.research-infrastructures.eu/D4Research");
contexts.add("/d4science.research-infrastructures.eu/FARM/WECAFC-FIRMS");
contexts.add("/d4science.research-infrastructures.eu/gCubeApps/BlueBridgeProject");
contexts.add("/d4science.research-infrastructures.eu/gCubeApps/Parthenos");
contexts.add("/d4science.research-infrastructures.eu/gCubeApps/ScalableDataMining");
contexts.add("/d4science.research-infrastructures.eu/gCubeApps/gCube");
AggregationType aggregationType = AggregationType.MONTHLY;
Calendar from = DateUtils.getStartCalendar(2018, Calendar.JUNE, 1);
Date start = from.getTime();
Date end = DateUtils.getEndDateFromStartDate(aggregationType, start, 1, false);
logger.debug("Harvesting Social Interaction from {} to {}", DateUtils.format(start), DateUtils.format(end));
ArrayList<AccountingRecord> accountingRecords = new ArrayList<>();
for(String context : contexts) {
// Setting the token for the context
ContextTest.setContext(contextAuthorization.getTokenForContext(context));
try {
// Collecting info on social (posts, replies and likes)
logger.info("Going to harvest Social Interactions for {}", context);
SocialInteractionsHarvester socialHarvester = new SocialInteractionsHarvester(start, end);
List<AccountingRecord> harvested = socialHarvester.getAccountingRecords();
accountingRecords.addAll(harvested);
} catch(Exception e) {
logger.error("Error harvesting Social Interactions for {}", context, e);
}
}
logger.debug("Harvest Measures from {} to {} are {}", DateUtils.format(start), DateUtils.format(end),
accountingRecords);
ContextTest.setContextByName(ROOT);
// dao.insertRecords(accountingRecords.toArray(new AccountingRecord[1]));
} catch(Exception e) {
logger.error("", e);
}
logger.info("End.");
}
// @Test
public void testMethodInvocation() {
try {
ContextTest.setContextByName(ROOT);
ContextAuthorization contextAuthorization = new ContextAuthorization();
String stockAssessmentToken = contextAuthorization.generateTokenForContext(STOCK_ASSESMENT_VRE, null);
ContextTest.setContext(stockAssessmentToken);
AggregationType measureType = AggregationType.MONTHLY;
Date start = DateUtils.getPreviousPeriod(measureType, false).getTime();
Date end = DateUtils.getEndDateFromStartDate(measureType, start, 1, false);
MethodInvocationHarvester methodInvocationHarvester = new MethodInvocationHarvester(start, end);
List<AccountingRecord> accountingRecords = methodInvocationHarvester.getAccountingRecords();
logger.debug("{}", accountingRecords);
} catch(Exception e) {
logger.error("", e);
}
}
protected AccountingDao getAccountingDao() throws ObjectNotFound, Exception {
ContextTest.setContextByName(ROOT);
AccountingDao dao = AccountingDao.get();
Set<ScopeDescriptor> scopeDescriptorSet = dao.getContexts();
Map<String,ScopeDescriptor> scopeDescriptorMap = new HashMap<>();
for(ScopeDescriptor scopeDescriptor : scopeDescriptorSet) {
scopeDescriptorMap.put(scopeDescriptor.getId(), scopeDescriptor);
}
AccountingDataHarvesterPlugin.scopeDescriptors.set(scopeDescriptorMap);
Set<Dimension> dimensionSet = dao.getDimensions();
Map<String,Dimension> dimensionMap = new HashMap<>();
for(Dimension dimension : dimensionSet) {
dimensionMap.put(dimension.getId(), dimension);
}
AccountingDataHarvesterPlugin.dimensions.set(dimensionMap);
return dao;
}
protected void setContextByNameAndScopeDescriptor(String contextFullName) throws ObjectNotFound, Exception {
ContextAuthorization contextAuthorization = new ContextAuthorization();
String tagMeToken = contextAuthorization.getTokenForContext(contextFullName);
ContextTest.setContext(tagMeToken);
ScopeBean scopeBean = new ScopeBean(contextFullName);
ScopeDescriptor actualScopeDescriptor = AccountingDataHarvesterPlugin.scopeDescriptors.get()
.get(contextFullName);
if(actualScopeDescriptor == null) {
actualScopeDescriptor = new ScopeDescriptor(scopeBean.name(), contextFullName);
}
AccountingDataHarvesterPlugin.scopeDescriptor.set(actualScopeDescriptor);
}
// @Test
public void testTagMeMethodInvocation() throws Exception {
try {
AccountingDao dao = getAccountingDao();
setContextByNameAndScopeDescriptor(TAGME_VRE);
List<AccountingRecord> accountingRecords = new ArrayList<>();
AggregationType measureType = AggregationType.MONTHLY;
List<Date> starts = new ArrayList<>();
starts.add(DateUtils.getStartCalendar(2018, Calendar.NOVEMBER, 1).getTime());
starts.add(DateUtils.getStartCalendar(2018, Calendar.DECEMBER, 1).getTime());
starts.add(DateUtils.getStartCalendar(2019, Calendar.JANUARY, 1).getTime());
starts.add(DateUtils.getStartCalendar(2019, Calendar.FEBRUARY, 1).getTime());
starts.add(DateUtils.getStartCalendar(2019, Calendar.MARCH, 1).getTime());
for(Date start : starts) {
Date end = DateUtils.getEndDateFromStartDate(measureType, start, 1, false);
TagMeMethodInvocationHarvester methodInvocationHarvester = new TagMeMethodInvocationHarvester(start,
end);
accountingRecords.addAll(methodInvocationHarvester.getAccountingRecords());
logger.debug("{}", accountingRecords);
}
ContextTest.setContextByName(ROOT);
// dao.insertRecords(accountingRecords.toArray(new AccountingRecord[1]));
} catch(Exception e) {
logger.error("", e);
throw e;
}
}
// @Test
public void testGetVREUsersForSpecificVRE() {
try {
ContextTest.setContextByName(ROOT);
DataHarvestPluginDeclaration dataHarvestPluginDeclaration = new DataHarvestPluginDeclaration();
AccountingDataHarvesterPlugin accountingDataHarvesterPlugin = new AccountingDataHarvesterPlugin(
dataHarvestPluginDeclaration);
Properties properties = accountingDataHarvesterPlugin.getConfigParameters();
AccountingDataHarvesterPlugin.getProperties().set(properties);
// AccountingDao dao = AccountingDao.get();
ContextAuthorization contextAuthorization = new ContextAuthorization();
ContextTest.setContext(contextAuthorization
.getTokenForContext("/d4science.research-infrastructures.eu/SoBigData/SportsDataScience"));
AggregationType measureType = AggregationType.MONTHLY;
Date start = DateUtils.getPreviousPeriod(measureType, false).getTime();
Date end = DateUtils.getEndDateFromStartDate(measureType, start, 1, false);
VREUsersHarvester vreUsersHarvester = new VREUsersHarvester(start, end);
List<AccountingRecord> harvested = vreUsersHarvester.getAccountingRecords();
logger.info("Harvested Data from {} to {} : {}", DateUtils.format(start), DateUtils.format(end), harvested);
ContextTest.setContextByName(ROOT);
// dao.insertRecords(accountingRecords.toArray(new AccountingRecord[1]));
} catch(Exception e) {
logger.error("", e);
}
}
// @Test
public void testFilteringGenericResource() {
try {
ContextTest.setContextByName(ROOT);
//Utils.setContext(RESOURCE_CATALOGUE);
AggregationType measureType = AggregationType.MONTHLY;
Date start = DateUtils.getPreviousPeriod(measureType, false).getTime();
Date end = DateUtils.getEndDateFromStartDate(measureType, start, 1, false);
SortedSet<String> contexts = getContexts();
AccountingDataHarvesterPlugin accountingDataHarvesterPlugin = new AccountingDataHarvesterPlugin(null);
accountingDataHarvesterPlugin.getConfigParameters();
ResourceCatalogueHarvester resourceCatalogueHarvester = new ResourceCatalogueHarvester(start, end,
contexts);
SortedSet<String> validContexts = resourceCatalogueHarvester.getValidContexts(contexts, SO_BIG_VO + "/");
logger.info("Valid Contexts {}", validContexts);
} catch(Exception e) {
logger.error("", e);
}
}
// @Test
public void testResourceCatalogueHarvester() {
try {
//Utils.setContext(RESOURCE_CATALOGUE);
ContextTest.setContextByName(ROOT);
AggregationType measureType = AggregationType.MONTHLY;
// Date start = DateUtils.getStartCalendar(2015, Calendar.FEBRUARY, 1).getTime();
// Date end = DateUtils.getStartCalendar(2019, Calendar.FEBRUARY, 1).getTime();
Date start = DateUtils.getPreviousPeriod(measureType, false).getTime();
Date end = DateUtils.getEndDateFromStartDate(measureType, start, 1, false);
AccountingDataHarvesterPlugin accountingDataHarvesterPlugin = new AccountingDataHarvesterPlugin(null);
accountingDataHarvesterPlugin.getConfigParameters();
SortedSet<String> contexts = getContexts();
ResourceCatalogueHarvester resourceCatalogueHarvester = new ResourceCatalogueHarvester(start, end,
contexts);
List<AccountingRecord> data = resourceCatalogueHarvester.getAccountingRecords();
logger.debug("{}", data);
} catch(Exception e) {
logger.error("", e);
}
}
// @Test
public void testDataMethodDownloadHarvester() {
try {
//Utils.setContext(RESOURCE_CATALOGUE);
ContextTest.setContextByName(ROOT);
AggregationType measureType = AggregationType.MONTHLY;
// Date start = DateUtils.getStartCalendar(2015, Calendar.FEBRUARY, 1).getTime();
// Date end = DateUtils.getStartCalendar(2019, Calendar.FEBRUARY, 1).getTime();
Date start = DateUtils.getPreviousPeriod(measureType, false).getTime();
Date end = DateUtils.getEndDateFromStartDate(measureType, start, 1, false);
AccountingDataHarvesterPlugin accountingDataHarvesterPlugin = new AccountingDataHarvesterPlugin(null);
accountingDataHarvesterPlugin.getConfigParameters();
ContextAuthorization contextAuthorization = new ContextAuthorization();
SortedSet<String> contexts = contextAuthorization.getContexts();
for(String context : contexts) {
ScopeBean scopeBean = new ScopeBean(context);
logger.debug("FullName {} - Name {}", scopeBean.toString(), scopeBean.name());
if(context.startsWith(AccountingDataHarvesterPlugin.SO_BIG_DATA_VO)) {
if(scopeBean.is(Type.VRE)) {
if(context.startsWith(TAGME_VRE)) {
continue;
}
ContextTest.setContext(contextAuthorization.getTokenForContext(context));
DataMethodDownloadHarvester dataMethodDownloadHarvester = new DataMethodDownloadHarvester(start,
end, contexts);
List<AccountingRecord> data = dataMethodDownloadHarvester.getAccountingRecords();
logger.debug("{}", data);
}
}
}
} catch(Exception e) {
logger.error("", e);
}
}
public static final String E_LEARNING_AREA_VRE = "/d4science.research-infrastructures.eu/SoBigData/E-Learning_Area";
// @Test
public void addMissingVREAccesses() {
try {
ContextTest.setContextByName(ROOT);
DataHarvestPluginDeclaration dataHarvestPluginDeclaration = new DataHarvestPluginDeclaration();
AccountingDataHarvesterPlugin adhp = new AccountingDataHarvesterPlugin(dataHarvestPluginDeclaration);
Properties properties = adhp.getConfigParameters();
AccountingDataHarvesterPlugin.getProperties().set(properties);
ContextAuthorization contextAuthorization = new ContextAuthorization();
// DatabaseManager dbaseManager = new DatabaseManager();
AccountingDao dao = AccountingDao.get();
Set<ScopeDescriptor> scopeDescriptorSet = dao.getContexts();
Map<String,ScopeDescriptor> scopeDescriptorMap = new HashMap<>();
for(ScopeDescriptor scopeDescriptor : scopeDescriptorSet) {
scopeDescriptorMap.put(scopeDescriptor.getId(), scopeDescriptor);
}
AccountingDataHarvesterPlugin.scopeDescriptors.set(scopeDescriptorMap);
Set<Dimension> dimensionSet = dao.getDimensions();
Map<String,Dimension> dimensionMap = new HashMap<>();
for(Dimension dimension : dimensionSet) {
dimensionMap.put(dimension.getId(), dimension);
}
AccountingDataHarvesterPlugin.dimensions.set(dimensionMap);
// ArrayList<HarvestedData> data = new ArrayList<HarvestedData>();
ArrayList<AccountingRecord> accountingRecords = new ArrayList<AccountingRecord>();
String context = E_LEARNING_AREA_VRE;
// Setting the token for the context
ContextTest.setContext(contextAuthorization.getTokenForContext(context));
ScopeBean scopeBean = new ScopeBean(context);
ScopeDescriptor scopeDescriptor = new ScopeDescriptor(scopeBean.name(), context);
Dimension dimension = AccountingDataHarvesterPlugin.getDimension(HarvestedDataKey.ACCESSES.getKey());
Calendar calendar = DateUtils.getStartCalendar(2018, Calendar.JULY, 1);
calendar.set(Calendar.DAY_OF_MONTH, 15);
Map<Integer,Integer> monthValues = new HashMap<>();
monthValues.put(Calendar.JULY, 54);
monthValues.put(Calendar.AUGUST, 23);
monthValues.put(Calendar.SEPTEMBER, 127);
monthValues.put(Calendar.OCTOBER, 192);
for(Integer month : monthValues.keySet()) {
calendar.set(Calendar.MONTH, month);
Instant instant = calendar.toInstant();
AccountingRecord ar = new AccountingRecord(scopeDescriptor, instant, dimension,
(long) monthValues.get(month));
logger.debug("{} : {}", ar.getDimension().getId(), ar.getMeasure());
accountingRecords.add(ar);
}
logger.trace("{}", accountingRecords);
dao.insertRecords(accountingRecords.toArray(new AccountingRecord[1]));
} catch(Exception e) {
logger.error("", e);
}
}
}

View File

@ -0,0 +1,173 @@
/**
*
*/
package org.gcube.dataharvest;
import java.io.IOException;
import java.io.InputStream;
import java.util.Properties;
import org.gcube.common.authorization.utils.manager.SecretManager;
import org.gcube.common.authorization.utils.manager.SecretManagerProvider;
import org.gcube.common.authorization.utils.secret.JWTSecret;
import org.gcube.common.authorization.utils.secret.Secret;
import org.gcube.common.authorization.utils.secret.SecretUtility;
import org.gcube.common.keycloak.KeycloakClientFactory;
import org.gcube.common.keycloak.KeycloakClientHelper;
import org.gcube.common.keycloak.model.TokenResponse;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* @author Luca Frosini (ISTI - CNR)
*/
public class ContextTest {
private static final Logger logger = LoggerFactory.getLogger(ContextTest.class);
protected static final String CONFIG_INI_FILENAME = "config.ini";
public static final String DEFAULT_TEST_SCOPE;
public static final String GCUBE;
public static final String DEVNEXT;
public static final String NEXTNEXT;
public static final String DEVSEC;
public static final String DEVVRE;
public static final String ROOT_PROD;
protected static final Properties properties;
public static final String TYPE_PROPERTY_KEY = "type";
public static final String USERNAME_PROPERTY_KEY = "username";
public static final String PASSWORD_PROPERTY_KEY = "password";
public static final String CLIENT_ID_PROPERTY_KEY = "clientId";
static {
GCUBE = "/gcube";
DEVNEXT = GCUBE + "/devNext";
NEXTNEXT = DEVNEXT + "/NextNext";
DEVSEC = GCUBE + "/devsec";
DEVVRE = DEVSEC + "/devVRE";
ROOT_PROD = "/d4science.research-infrastructures.eu";
DEFAULT_TEST_SCOPE = GCUBE;
properties = new Properties();
InputStream input = ContextTest.class.getClassLoader().getResourceAsStream(CONFIG_INI_FILENAME);
try {
// load the properties file
properties.load(input);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
private enum Type{
USER, CLIENT_ID
};
public static void set(Secret secret) throws Exception {
SecretManagerProvider.instance.reset();
SecretManager secretManager = new SecretManager();
secretManager.addSecret(secret);
SecretManagerProvider.instance.set(secretManager);
SecretManagerProvider.instance.get().set();
}
public static void setContextByName(String fullContextName) throws Exception {
logger.debug("Going to set credentials for context {}", fullContextName);
Secret secret = getSecretByContextName(fullContextName);
set(secret);
}
private static TokenResponse getJWTAccessToken(String context) throws Exception {
Type type = Type.valueOf(properties.get(TYPE_PROPERTY_KEY).toString());
TokenResponse tr = null;
int index = context.indexOf('/', 1);
String root = context.substring(0, index == -1 ? context.length() : index);
switch (type) {
case CLIENT_ID:
String clientId = properties.getProperty(CLIENT_ID_PROPERTY_KEY);
String clientSecret = properties.getProperty(root);
tr = KeycloakClientFactory.newInstance().queryUMAToken(context, clientId, clientSecret, context, null);
break;
case USER:
default:
String username = properties.getProperty(USERNAME_PROPERTY_KEY);
String password = properties.getProperty(PASSWORD_PROPERTY_KEY);
switch (root) {
case "/gcube":
default:
clientId = "next.d4science.org";
break;
case "/pred4s":
clientId = "pre.d4science.org";
break;
case "/d4science.research-infrastructures.eu":
clientId = "services.d4science.org";
break;
}
clientSecret = null;
tr = KeycloakClientHelper.getTokenForUser(context, username, password);
break;
}
return tr;
}
public static Secret getSecretByContextName(String context) throws Exception {
TokenResponse tr = getJWTAccessToken(context);
Secret secret = new JWTSecret(tr.getAccessToken());
return secret;
}
public static void setContext(String token) throws Exception {
Secret secret = getSecret(token);
set(secret);
}
private static Secret getSecret(String token) throws Exception {
Secret secret = SecretUtility.getSecretByTokenString(token);
return secret;
}
public static String getUser() {
String user = "UNKNOWN";
try {
user = SecretManagerProvider.instance.get().getUser().getUsername();
} catch(Exception e) {
logger.error("Unable to retrieve user. {} will be used", user);
}
return user;
}
@BeforeClass
public static void beforeClass() throws Exception {
setContextByName(ROOT_PROD);
}
@AfterClass
public static void afterClass() throws Exception {
SecretManagerProvider.instance.reset();
}
}

View File

@ -0,0 +1,88 @@
package org.gcube.dataharvest.harvester;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.List;
import java.util.SortedSet;
import org.gcube.accounting.accounting.summary.access.AccountingDao;
import org.gcube.accounting.accounting.summary.access.model.update.AccountingRecord;
import org.gcube.dataharvest.ContextTest;
import org.gcube.dataharvest.plugin.AccountingDataHarvesterPluginTest;
import org.gcube.dataharvest.utils.AggregationType;
import org.gcube.dataharvest.utils.ContextAuthorization;
import org.gcube.dataharvest.utils.DateUtils;
import org.junit.Ignore;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
*
* @author Giancarlo Panichi (ISTI CNR)
*
*/
public class AccountingDataHarvesterJupyterTest extends AccountingDataHarvesterPluginTest {
private static Logger logger = LoggerFactory.getLogger(AccountingDataHarvesterJupyterTest.class);
@Ignore
@Test
public void testJupyterAccessesHarvester() throws Exception {
try {
ContextTest.setContextByName(ROOT_PROD);
AccountingDao dao = getAccountingDao();
List<Date> starts = new ArrayList<>();
starts.add(DateUtils.getStartCalendar(2021, Calendar.JANUARY, 1).getTime());
starts.add(DateUtils.getStartCalendar(2021, Calendar.FEBRUARY, 1).getTime());
starts.add(DateUtils.getStartCalendar(2021, Calendar.MARCH, 1).getTime());
starts.add(DateUtils.getStartCalendar(2021, Calendar.APRIL, 1).getTime());
starts.add(DateUtils.getStartCalendar(2021, Calendar.MAY, 1).getTime());
AggregationType measureType = AggregationType.MONTHLY;
ContextAuthorization contextAuthorization = new ContextAuthorization();
SortedSet<String> contexts = contextAuthorization.getContexts();
/*
SortedSet<String> contexts = new TreeSet<>();
contexts.add("/d4science.research-infrastructures.eu/D4OS/Blue-CloudLab");
contexts.add("/d4science.research-infrastructures.eu/D4OS/Zoo-Phytoplankton_EOV");
contexts.add("/d4science.research-infrastructures.eu/D4OS/MarineEnvironmentalIndicators");
*/
List<AccountingRecord> accountingRecords = new ArrayList<>();
for (Date start : starts) {
Date end = DateUtils.getEndDateFromStartDate(measureType, start, 1);
ContextTest.setContextByName(ROOT_PROD);
JupyterAccessesHarvester jupyterAccessesHarvester = new JupyterAccessesHarvester(start, end);
for(String context : contexts) {
ContextTest.setContextByName(context);
List<AccountingRecord> harvested = jupyterAccessesHarvester.getAccountingRecords();
accountingRecords.addAll(harvested);
}
}
// logger.debug("{}", accountingRecords);
logger.debug("Going to insert {}", accountingRecords);
ContextTest.setContextByName(ROOT_PROD);
// dao.insertRecords(accountingRecords.toArray(new AccountingRecord[1]));
} catch (Throwable e) {
logger.error(e.getLocalizedMessage(), e);
throw e;
}
}
}

View File

@ -0,0 +1,87 @@
package org.gcube.dataharvest.harvester;
import java.time.LocalDate;
import java.time.ZoneId;
import java.time.temporal.ChronoUnit;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.SortedSet;
import java.util.stream.Stream;
import org.gcube.accounting.accounting.summary.access.AccountingDao;
import org.gcube.accounting.accounting.summary.access.model.update.AccountingRecord;
import org.gcube.dataharvest.ContextTest;
import org.gcube.dataharvest.plugin.AccountingDataHarvesterPluginTest;
import org.gcube.dataharvest.utils.AggregationType;
import org.gcube.dataharvest.utils.ContextAuthorization;
import org.gcube.dataharvest.utils.DateUtils;
import org.junit.Ignore;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
*
* @author Giancarlo Panichi (ISTI CNR)
*
*/
public class AccountingDataHarvesterRStudioTest extends AccountingDataHarvesterPluginTest {
private static Logger logger = LoggerFactory.getLogger(AccountingDataHarvesterRStudioTest.class);
@Ignore
@Test
public void testJupyterAccessesHarvester() throws Exception {
try {
ContextTest.setContextByName(ROOT_PROD);
AccountingDao dao = getAccountingDao();
List<Date> starts = new ArrayList<>();
LocalDate sdate = LocalDate.parse("2016-01-01"), edate = LocalDate.parse("2021-06-01");
Stream.iterate(sdate, date -> date.plusMonths(1)).limit(ChronoUnit.MONTHS.between(sdate, edate) + 1)
.forEach(dateToConvert -> starts.add(java.util.Date
.from(dateToConvert.atStartOfDay().atZone(ZoneId.systemDefault()).toInstant())));
AggregationType measureType = AggregationType.MONTHLY;
ContextAuthorization contextAuthorization = new ContextAuthorization();
SortedSet<String> contexts = contextAuthorization.getContexts();
List<AccountingRecord> accountingRecords = new ArrayList<>();
for (Date start : starts) {
Date end = DateUtils.getEndDateFromStartDate(measureType, start, 1);
ContextTest.setContextByName(ROOT_PROD);
RStudioAccessesHarvester rstudioAccessesHarvester = new RStudioAccessesHarvester(start, end);
for(String context : contexts) {
ContextTest.setContextByName(context);
List<AccountingRecord> harvested = rstudioAccessesHarvester.getAccountingRecords();
accountingRecords.addAll(harvested);
}
}
// logger.debug("{}", accountingRecords);
logger.debug("Going to insert {}", accountingRecords);
ContextTest.setContextByName(ROOT_PROD);
dao.insertRecords(accountingRecords.toArray(new AccountingRecord[1]));
} catch (Throwable e) {
logger.error(e.getLocalizedMessage(), e);
throw e;
}
}
}

View File

@ -2,7 +2,8 @@ package org.gcube.dataharvest.harvester.sobigdata;
import java.util.List;
import org.gcube.dataharvest.utils.ContextTest;
import org.gcube.dataharvest.ContextTest;
import org.junit.Ignore;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -10,6 +11,7 @@ public class SoBigDataHarvesterTest extends ContextTest {
private static Logger logger = LoggerFactory.getLogger(SoBigDataHarvesterTest.class);
@Ignore
// @Test
public void testGroupList() throws Exception {
// ContextTest.setContextByName("/d4science.research-infrastructures.eu/D4Research/AGINFRAplusDev");

View File

@ -0,0 +1,868 @@
package org.gcube.dataharvest.plugin;
import java.time.Instant;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.SortedSet;
import java.util.TreeSet;
import java.util.concurrent.TimeUnit;
import org.gcube.accounting.accounting.summary.access.AccountingDao;
import org.gcube.accounting.accounting.summary.access.model.ScopeDescriptor;
import org.gcube.accounting.accounting.summary.access.model.internal.Dimension;
import org.gcube.accounting.accounting.summary.access.model.update.AccountingRecord;
import org.gcube.common.authorization.client.exceptions.ObjectNotFound;
import org.gcube.common.authorization.utils.secret.Secret;
import org.gcube.common.scope.impl.ScopeBean;
import org.gcube.common.scope.impl.ScopeBean.Type;
import org.gcube.dataharvest.AccountingDashboardHarvesterPlugin;
import org.gcube.dataharvest.ContextTest;
import org.gcube.dataharvest.datamodel.HarvestedDataKey;
import org.gcube.dataharvest.harvester.CatalogueAccessesHarvester;
import org.gcube.dataharvest.harvester.CoreServicesAccessesHarvester;
import org.gcube.dataharvest.harvester.MethodInvocationHarvester;
import org.gcube.dataharvest.harvester.SocialInteractionsHarvester;
import org.gcube.dataharvest.harvester.VREAccessesHarvester;
import org.gcube.dataharvest.harvester.VREUsersHarvester;
import org.gcube.dataharvest.harvester.sobigdata.ResourceCatalogueHarvester;
import org.gcube.dataharvest.harvester.sobigdata.TagMeMethodInvocationHarvester;
import org.gcube.dataharvest.utils.AggregationType;
import org.gcube.dataharvest.utils.ContextAuthorization;
import org.gcube.dataharvest.utils.DateUtils;
import org.gcube.vremanagement.executor.api.types.LaunchParameter;
import org.gcube.vremanagement.executor.api.types.Scheduling;
import org.gcube.vremanagement.executor.client.SmartExecutorClient;
import org.gcube.vremanagement.executor.client.SmartExecutorClientFactory;
import org.junit.Assert;
import org.junit.Ignore;
import org.junit.Test;
import org.quartz.CronExpression;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class AccountingDataHarvesterPluginTest extends ContextTest {
private static Logger logger = LoggerFactory.getLogger(AccountingDataHarvesterPluginTest.class);
public static final String SO_BIG_VO = "/d4science.research-infrastructures.eu/SoBigData";
public static final String TAGME_VRE = "/d4science.research-infrastructures.eu/SoBigData/TagMe";
public static final String STOCK_ASSESMENT_VRE = "/d4science.research-infrastructures.eu/gCubeApps/StockAssessment";
public static final String BLUE_CLOUD_LAB = "/d4science.research-infrastructures.eu/D4OS/Blue-CloudLab";
protected AccountingDao getAccountingDao() throws ObjectNotFound, Exception {
AccountingDao dao = AccountingDao.get();
Set<ScopeDescriptor> scopeDescriptorSet = dao.getContexts();
Map<String, ScopeDescriptor> scopeDescriptorMap = new HashMap<>();
for (ScopeDescriptor scopeDescriptor : scopeDescriptorSet) {
scopeDescriptorMap.put(scopeDescriptor.getId(), scopeDescriptor);
}
AccountingDashboardHarvesterPlugin.scopeDescriptors.set(scopeDescriptorMap);
Set<Dimension> dimensionSet = dao.getDimensions();
Map<String, Dimension> dimensionMap = new HashMap<>();
for (Dimension dimension : dimensionSet) {
dimensionMap.put(dimension.getId(), dimension);
}
return dao;
}
@Ignore
@Test
public void getDimensions() {
try {
ContextTest.setContextByName(ROOT_PROD);
AccountingDao dao = AccountingDao.get();
Set<Dimension> dimensionSet = dao.getDimensions();
for (Dimension d : dimensionSet) {
logger.debug("{} - {} - {} - {}", d.getId(), d.getGroup(), d.getAggregatedMeasure(), d.getLabel());
}
logger.info("End.");
} catch (Exception e) {
logger.error("", e);
}
}
@Ignore
// @Test
public void launch() {
try {
ContextTest.setContextByName(ROOT_PROD);
AccountingDashboardHarvesterPlugin accountingDataHarvesterPlugin = new AccountingDashboardHarvesterPlugin();
Map<String, Object> inputs = new HashMap<>();
AggregationType aggregationType = AggregationType.MONTHLY;
inputs.put(AccountingDashboardHarvesterPlugin.MEASURE_TYPE_INPUT_PARAMETER, aggregationType.name());
inputs.put(AccountingDashboardHarvesterPlugin.GET_VRE_USERS_INPUT_PARAMETER, true);
inputs.put(AccountingDashboardHarvesterPlugin.RERUN_INPUT_PARAMETER, true);
inputs.put(AccountingDashboardHarvesterPlugin.DRY_RUN_INPUT_PARAMETER, true);
inputs.put(AccountingDashboardHarvesterPlugin.PARTIAL_HARVESTING, true);
Calendar from = DateUtils.getStartCalendar(2022, Calendar.SEPTEMBER, 1);
String fromDate = DateUtils.LAUNCH_DATE_FORMAT.format(from.getTime());
logger.trace("{} is {}", AccountingDashboardHarvesterPlugin.START_DATE_INPUT_PARAMETER, fromDate);
inputs.put(AccountingDashboardHarvesterPlugin.START_DATE_INPUT_PARAMETER, fromDate);
accountingDataHarvesterPlugin.launch(inputs);
logger.info("End.");
} catch (Exception e) {
logger.error("", e);
}
}
@Ignore
// @Test
public void launchPluginOnSmartExecutor() {
try {
ContextTest.setContextByName(ROOT_PROD);
String pluginName = new AccountingDashboardHarvesterPlugin().getName();
SmartExecutorClient smartExecutor = SmartExecutorClientFactory.getClient(pluginName);
Assert.assertNotNull(smartExecutor);
Map<String, Object> inputs = new HashMap<>();
AggregationType aggregationType = AggregationType.MONTHLY;
inputs.put(AccountingDashboardHarvesterPlugin.MEASURE_TYPE_INPUT_PARAMETER, aggregationType.name());
inputs.put(AccountingDashboardHarvesterPlugin.GET_VRE_USERS_INPUT_PARAMETER, true);
inputs.put(AccountingDashboardHarvesterPlugin.RERUN_INPUT_PARAMETER, false);
inputs.put(AccountingDashboardHarvesterPlugin.DRY_RUN_INPUT_PARAMETER, false);
inputs.put(AccountingDashboardHarvesterPlugin.PARTIAL_HARVESTING, false);
/*
* Calendar from = DateUtils.getStartCalendar(2016, Calendar.SEPTEMBER, 1);
* String fromDate = DateUtils.LAUNCH_DATE_FORMAT.format(from.getTime());
* logger.trace("{} is {}",
* AccountingDataHarvesterPlugin.START_DATE_INPUT_PARAMETER, fromDate);
* inputs.put(AccountingDataHarvesterPlugin. START_DATE_INPUT_PARAMETER,
* fromDate);
*/
// 3rd of the month for MONTHLY Harvesting at 10:00
// CronExpression cronExpression = new CronExpression("0 0 10 3 1/1
// ? *");
// Every day at 10:00 for partial harvesting
CronExpression cronExpression = new CronExpression("0 0 10 3 1/1 ? *");
Scheduling scheduling = new Scheduling(cronExpression);
scheduling.setGlobal(false);
LaunchParameter launchParameter = new LaunchParameter(pluginName, inputs, scheduling);
// LaunchParameter launchParameter = new LaunchParameter(pluginName,
// inputs);
smartExecutor.launch(launchParameter);
logger.info("End.");
} catch (Exception e) {
logger.error("", e);
}
}
@Ignore
// @Test
public void launchOldData() {
try {
ContextTest.setContextByName(ROOT_PROD);
AccountingDashboardHarvesterPlugin accountingDataHarvesterPlugin = new AccountingDashboardHarvesterPlugin();
Map<String, Object> inputs = new HashMap<>();
AggregationType aggregationType = AggregationType.MONTHLY;
inputs.put(AccountingDashboardHarvesterPlugin.MEASURE_TYPE_INPUT_PARAMETER, aggregationType.name());
inputs.put(AccountingDashboardHarvesterPlugin.GET_VRE_USERS_INPUT_PARAMETER, true);
inputs.put(AccountingDashboardHarvesterPlugin.RERUN_INPUT_PARAMETER, true);
inputs.put(AccountingDashboardHarvesterPlugin.DRY_RUN_INPUT_PARAMETER, false);
Calendar from = DateUtils.getStartCalendar(2016, Calendar.SEPTEMBER, 1);
Calendar runbeforeDate = DateUtils.getStartCalendar(2018, Calendar.JUNE, 1);
while (from.before(runbeforeDate)) {
String fromDate = DateUtils.LAUNCH_DATE_FORMAT.format(from.getTime());
logger.trace("{} is {}", AccountingDashboardHarvesterPlugin.START_DATE_INPUT_PARAMETER, fromDate);
inputs.put(AccountingDashboardHarvesterPlugin.START_DATE_INPUT_PARAMETER, fromDate);
accountingDataHarvesterPlugin.launch(inputs);
from.add(aggregationType.getCalendarField(), 1);
}
logger.info("End.");
} catch (Exception e) {
logger.error("", e);
}
}
// @Ignore
@Test
public void launchOldDataVREAccessesHarvester() {
try {
ContextTest.setContextByName(ROOT_PROD);
AccountingDao dao = getAccountingDao();
ContextAuthorization contextAuthorization = new ContextAuthorization();
SortedSet<String> contexts = contextAuthorization.getContexts();
AggregationType aggregationType = AggregationType.MONTHLY;
Calendar from = DateUtils.getStartCalendar(2023, Calendar.MAY, 1);
Calendar runbeforeDate = DateUtils.getStartCalendar(2023, Calendar.JULY, 1);
while (from.before(runbeforeDate)) {
Date start = from.getTime();
Date end = DateUtils.getEndDateFromStartDate(aggregationType, start, 1);
logger.debug("Harvesting from {} to {}", DateUtils.format(start), DateUtils.format(end));
ArrayList<AccountingRecord> accountingRecords = new ArrayList<>();
VREAccessesHarvester vreAccessesHarvester = null;
//JupyterAccessesHarvester vreAccessesHarvester = null;
//RStudioAccessesHarvester vreAccessesHarvester = null;
//CoreServicesAccessesHarvester vreAccessesHarvester = null;
for (String context : contexts) {
ContextTest.setContextByName(context);
ScopeBean scopeBean = new ScopeBean(context);
if (vreAccessesHarvester == null) {
if (scopeBean.is(Type.INFRASTRUCTURE)) {
vreAccessesHarvester = new VREAccessesHarvester(start, end);
} else {
// This code should be never used because the scopes
// are sorted by fullname
ScopeBean parent = scopeBean.enclosingScope();
while (!parent.is(Type.INFRASTRUCTURE)) {
parent = scopeBean.enclosingScope();
}
// Setting back token for the context
ContextTest.setContextByName(parent.toString());
vreAccessesHarvester = new VREAccessesHarvester(start, end);
// Setting back token for the context
ContextTest.setContextByName(context);
}
}
try {
if (context.startsWith(AccountingDashboardHarvesterPlugin.SO_BIG_DATA_VO)
&& start.before(DateUtils.getStartCalendar(2018, Calendar.APRIL, 1).getTime())) {
logger.info("Not Harvesting VREs Accesses for {} from {} to {}", context,
DateUtils.format(start), DateUtils.format(end));
} else {
// Collecting Google Analytics Data for VREs
// Accesses
List<AccountingRecord> harvested = vreAccessesHarvester.getAccountingRecords();
accountingRecords.addAll(harvested);
}
} catch (Exception e) {
logger.error("Error harvesting Social Interactions for {}", context, e);
}
}
logger.debug("Harvest Measures from {} to {} are {}", DateUtils.format(start), DateUtils.format(end),
accountingRecords);
ContextTest.setContextByName(ROOT_PROD);
// dao.insertRecords(accountingRecords.toArray(new
// AccountingRecord[1]));
Thread.sleep(TimeUnit.SECONDS.toMillis(10));
from.add(aggregationType.getCalendarField(), 1);
}
ContextTest.setContextByName(ROOT_PROD);
} catch (Exception e) {
logger.error("", e);
}
logger.info("End.");
}
@Ignore
// @Test
public void testVREAccessesHarvester() throws Exception {
try {
ContextTest.setContextByName(ROOT_PROD);
AccountingDao dao = getAccountingDao();
List<Date> starts = new ArrayList<>();
starts.add(DateUtils.getStartCalendar(2018, Calendar.SEPTEMBER, 1).getTime());
starts.add(DateUtils.getStartCalendar(2018, Calendar.OCTOBER, 1).getTime());
starts.add(DateUtils.getStartCalendar(2018, Calendar.NOVEMBER, 1).getTime());
starts.add(DateUtils.getStartCalendar(2018, Calendar.DECEMBER, 1).getTime());
starts.add(DateUtils.getStartCalendar(2019, Calendar.JANUARY, 1).getTime());
starts.add(DateUtils.getStartCalendar(2019, Calendar.FEBRUARY, 1).getTime());
starts.add(DateUtils.getStartCalendar(2019, Calendar.MARCH, 1).getTime());
AggregationType measureType = AggregationType.MONTHLY;
String[] contextFullNames = new String[] { "/d4science.research-infrastructures.eu/FARM/GRSF",
"/d4science.research-infrastructures.eu/FARM/GRSF_Admin" };
List<AccountingRecord> accountingRecords = new ArrayList<>();
for (Date start : starts) {
Date end = DateUtils.getEndDateFromStartDate(measureType, start, 1);
ContextTest.setContextByName(ROOT_PROD);
VREAccessesHarvester vreAccessesHarvester = new VREAccessesHarvester(start, end);
for (String contextFullname : contextFullNames) {
ContextTest.setContextByName(contextFullname);
List<AccountingRecord> harvested = vreAccessesHarvester.getAccountingRecords();
accountingRecords.addAll(harvested);
logger.debug("{} - {}", contextFullname, accountingRecords);
}
}
logger.debug("{}", accountingRecords);
ContextTest.setContextByName(ROOT_PROD);
// dao.insertRecords(accountingRecords.toArray(new
// AccountingRecord[1]));
} catch (Exception e) {
logger.error("", e);
throw e;
}
}
@Ignore
// @Test
public void testVREAccessesHarvesterAll() {
try {
ContextTest.setContextByName(ROOT_PROD);
AggregationType measureType = AggregationType.MONTHLY;
// Date start = DateUtils.getStartCalendar(2015, Calendar.FEBRUARY,
// 1).getTime();
// Date end = DateUtils.getStartCalendar(2019, Calendar.FEBRUARY,
// 1).getTime();
Date start = DateUtils.getPreviousPeriod(measureType, false).getTime();
Date end = DateUtils.getEndDateFromStartDate(measureType, start, 1);
AccountingDashboardHarvesterPlugin accountingDataHarvesterPlugin = new AccountingDashboardHarvesterPlugin();
accountingDataHarvesterPlugin.getConfigParameters();
ContextAuthorization contextAuthorization = new ContextAuthorization();
SortedSet<String> contexts = contextAuthorization.getContexts();
VREAccessesHarvester vreAccessesHarvester = null;
ArrayList<AccountingRecord> accountingRecords = new ArrayList<>();
for (String context : contexts) {
// Setting the token for the context
ContextTest.setContextByName(context);
ScopeBean scopeBean = new ScopeBean(context);
if (vreAccessesHarvester == null) {
if (scopeBean.is(Type.INFRASTRUCTURE)) {
vreAccessesHarvester = new VREAccessesHarvester(start, end);
} else {
// This code should be never used because the scopes are
// sorted by fullname
ScopeBean parent = scopeBean.enclosingScope();
while (!parent.is(Type.INFRASTRUCTURE)) {
parent = scopeBean.enclosingScope();
}
ContextTest.setContextByName(parent.toString());
vreAccessesHarvester = new VREAccessesHarvester(start, end);
// Setting back token for the context
ContextTest.setContextByName(context);
}
}
try {
if (context.startsWith(AccountingDashboardHarvesterPlugin.SO_BIG_DATA_VO)
&& start.before(DateUtils.getStartCalendar(2018, Calendar.APRIL, 1).getTime())) {
logger.info("Not Harvesting VREs Accesses for {} from {} to {}", context,
DateUtils.format(start), DateUtils.format(end));
} else {
// Collecting Google Analytics Data for VREs Accesses
List<AccountingRecord> harvested = vreAccessesHarvester.getAccountingRecords();
accountingRecords.addAll(harvested);
}
} catch (Exception e) {
logger.error("Error harvesting Social Interactions for {}", context, e);
}
}
logger.debug("{}", accountingRecords);
} catch (Exception e) {
logger.error("", e);
}
}
@Ignore
// @Test
public void testSocialInteraction() {
try {
ContextTest.setContextByName(ROOT_PROD);
AccountingDao dao = getAccountingDao();
SortedSet<String> contexts = new TreeSet<>();
contexts.add("/d4science.research-infrastructures.eu/D4Research");
contexts.add("/d4science.research-infrastructures.eu/FARM/WECAFC-FIRMS");
contexts.add("/d4science.research-infrastructures.eu/gCubeApps/BlueBridgeProject");
contexts.add("/d4science.research-infrastructures.eu/gCubeApps/Parthenos");
contexts.add("/d4science.research-infrastructures.eu/gCubeApps/ScalableDataMining");
contexts.add("/d4science.research-infrastructures.eu/gCubeApps/gCube");
AggregationType aggregationType = AggregationType.MONTHLY;
Calendar from = DateUtils.getStartCalendar(2018, Calendar.JUNE, 1);
Date start = from.getTime();
Date end = DateUtils.getEndDateFromStartDate(aggregationType, start, 1);
logger.debug("Harvesting Social Interaction from {} to {}", DateUtils.format(start), DateUtils.format(end));
ArrayList<AccountingRecord> accountingRecords = new ArrayList<>();
for (String context : contexts) {
// Setting the token for the context
ContextTest.setContextByName(context);
try {
// Collecting info on social (posts, replies and likes)
logger.info("Going to harvest Social Interactions for {}", context);
SocialInteractionsHarvester socialHarvester = new SocialInteractionsHarvester(start, end);
List<AccountingRecord> harvested = socialHarvester.getAccountingRecords();
accountingRecords.addAll(harvested);
} catch (Exception e) {
logger.error("Error harvesting Social Interactions for {}", context, e);
}
}
logger.debug("Harvest Measures from {} to {} are {}", DateUtils.format(start), DateUtils.format(end),
accountingRecords);
ContextTest.setContextByName(ROOT_PROD);
// dao.insertRecords(accountingRecords.toArray(new
// AccountingRecord[1]));
} catch (Exception e) {
logger.error("", e);
}
logger.info("End.");
}
// @Ignore
// // @Test
// public void testMethodInvocation() {
// try {
// ContextTest.setContextByName(STOCK_ASSESMENT_VRE);
//
// AggregationType measureType = AggregationType.MONTHLY;
//
// Date start = DateUtils.getPreviousPeriod(measureType, false).getTime();
// Date end = DateUtils.getEndDateFromStartDate(measureType, start, 1);
//
// MethodInvocationHarvester methodInvocationHarvester = new MethodInvocationHarvester(start, end);
// List<AccountingRecord> accountingRecords = methodInvocationHarvester.getAccountingRecords();
//
// logger.debug("{}", accountingRecords);
//
// } catch (Exception e) {
// logger.error("", e);
// }
// }
// @Ignore
@Test
public void testMethodInvocationOldData() {
try {
ContextTest.setContextByName(ROOT_PROD);
AccountingDao dao = getAccountingDao();
Date start = DateUtils.getStartCalendar(2023, Calendar.JANUARY, 1).getTime();
Date last = DateUtils.getStartCalendar(2024, Calendar.FEBRUARY, 1).getTime();
AggregationType measureType = AggregationType.MONTHLY;
ContextAuthorization contextAuthorization = new ContextAuthorization();
SortedSet<String> contexts = contextAuthorization.getContexts();
Set<ScopeDescriptor> scopeDescriptorSet = dao.getContexts();
Map<String, ScopeDescriptor> scopeDescriptorMap = new HashMap<>();
for (ScopeDescriptor scopeDescriptor : scopeDescriptorSet) {
scopeDescriptorMap.put(scopeDescriptor.getId(), scopeDescriptor);
}
AccountingDashboardHarvesterPlugin.scopeDescriptors.set(scopeDescriptorMap);
List<AccountingRecord> accountingRecords = new ArrayList<>();
while(start.before(last)) {
Date end = DateUtils.getEndDateFromStartDate(measureType, start, 1);
for (String context : contexts) {
// Setting the token for the context
Secret s = contextAuthorization.getCatalogueSecretForContext(context);
ContextTest.set(s);
if (context.startsWith(AccountingDashboardHarvesterPlugin.TAGME_CONTEXT)) {
try {
// Collecting info on method invocation
logger.info("Going to harvest Method Invocations for {}", context);
TagMeMethodInvocationHarvester tagMeMethodInvocationHarvester = new TagMeMethodInvocationHarvester(
start, end);
List<AccountingRecord> harvested = tagMeMethodInvocationHarvester.getAccountingRecords();
logger.debug("{} - {}", context, harvested);
accountingRecords.addAll(harvested);
} catch (Exception e) {
logger.error("Error harvesting Method Invocations for {}", context, e);
}
} else {
try {
// Collecting info on method invocation
logger.info("Going to harvest Method Invocations for {}", context);
MethodInvocationHarvester methodInvocationHarvester = new MethodInvocationHarvester(start, end);
List<AccountingRecord> harvested = methodInvocationHarvester.getAccountingRecords();
logger.debug("{} - {}", context, harvested);
accountingRecords.addAll(harvested);
} catch (Exception e) {
logger.error("Error harvesting Method Invocations for {}", context, e);
}
}
}
start = end;
}
logger.debug("Going to insert {}", accountingRecords);
ContextTest.setContextByName(ROOT_PROD);
// dao.insertRecords(accountingRecords.toArray(new AccountingRecord[1]));
} catch (Exception e) {
logger.error("", e);
}
}
@Ignore
// @Test
public void testTagMeMethodInvocation() throws Exception {
try {
ContextTest.setContextByName(ROOT_PROD);
AccountingDao dao = getAccountingDao();
ContextTest.setContextByName(TAGME_VRE);
List<AccountingRecord> accountingRecords = new ArrayList<>();
AggregationType measureType = AggregationType.MONTHLY;
List<Date> starts = new ArrayList<>();
starts.add(DateUtils.getStartCalendar(2018, Calendar.NOVEMBER, 1).getTime());
starts.add(DateUtils.getStartCalendar(2018, Calendar.DECEMBER, 1).getTime());
starts.add(DateUtils.getStartCalendar(2019, Calendar.JANUARY, 1).getTime());
starts.add(DateUtils.getStartCalendar(2019, Calendar.FEBRUARY, 1).getTime());
starts.add(DateUtils.getStartCalendar(2019, Calendar.MARCH, 1).getTime());
for (Date start : starts) {
Date end = DateUtils.getEndDateFromStartDate(measureType, start, 1);
TagMeMethodInvocationHarvester methodInvocationHarvester = new TagMeMethodInvocationHarvester(start,
end);
accountingRecords.addAll(methodInvocationHarvester.getAccountingRecords());
logger.debug("{}", accountingRecords);
}
ContextTest.setContextByName(ROOT_PROD);
// dao.insertRecords(accountingRecords.toArray(new
// AccountingRecord[1]));
} catch (Exception e) {
logger.error("", e);
throw e;
}
}
@Ignore
// @Test
public void testGetVREUsersForSpecificVRE() {
try {
ContextTest.setContextByName(ROOT_PROD);
AccountingDao dao = getAccountingDao();
ContextTest.setContextByName("/d4science.research-infrastructures.eu/SoBigData/SportsDataScience");
AggregationType measureType = AggregationType.MONTHLY;
Date start = DateUtils.getPreviousPeriod(measureType, false).getTime();
Date end = DateUtils.getEndDateFromStartDate(measureType, start, 1);
VREUsersHarvester vreUsersHarvester = new VREUsersHarvester(start, end);
List<AccountingRecord> harvested = vreUsersHarvester.getAccountingRecords();
logger.info("Harvested Data from {} to {} : {}", DateUtils.format(start), DateUtils.format(end), harvested);
ContextTest.setContextByName(ROOT_PROD);
// dao.insertRecords(accountingRecords.toArray(new
// AccountingRecord[1]));
} catch (Exception e) {
logger.error("", e);
}
}
@Ignore
// @Test
public void testFilteringGenericResource() {
try {
ContextTest.setContextByName(ROOT_PROD);
// Utils.setContext(RESOURCE_CATALOGUE);
AggregationType measureType = AggregationType.MONTHLY;
Date start = DateUtils.getPreviousPeriod(measureType, false).getTime();
Date end = DateUtils.getEndDateFromStartDate(measureType, start, 1);
ContextAuthorization contextAuthorization = new ContextAuthorization();
SortedSet<String> contexts = contextAuthorization.getContexts();
ResourceCatalogueHarvester resourceCatalogueHarvester = new ResourceCatalogueHarvester(start, end,
contexts);
SortedSet<String> validContexts = resourceCatalogueHarvester.getValidContexts(contexts, SO_BIG_VO + "/");
logger.info("Valid Contexts {}", validContexts);
} catch (Exception e) {
logger.error("", e);
}
}
@Ignore
// @Test
public void testResourceCatalogueHarvester() {
try {
ContextTest.setContextByName(ROOT_PROD);
AggregationType measureType = AggregationType.MONTHLY;
// Date start = DateUtils.getStartCalendar(2015, Calendar.FEBRUARY,
// 1).getTime();
// Date end = DateUtils.getStartCalendar(2019, Calendar.FEBRUARY,
// 1).getTime();
Date start = DateUtils.getPreviousPeriod(measureType, false).getTime();
Date end = DateUtils.getEndDateFromStartDate(measureType, start, 1);
ContextAuthorization contextAuthorization = new ContextAuthorization();
SortedSet<String> contexts = contextAuthorization.getContexts();
ResourceCatalogueHarvester resourceCatalogueHarvester = new ResourceCatalogueHarvester(start, end,
contexts);
List<AccountingRecord> data = resourceCatalogueHarvester.getAccountingRecords();
logger.debug("{}", data);
} catch (Exception e) {
logger.error("", e);
}
}
// @Ignore
@Test
public void testCoreServicesHarvester() {
try {
String context = ROOT_PROD;
ContextTest.setContextByName(context);
AccountingDao dao = getAccountingDao();
Calendar from = DateUtils.getStartCalendar(2023, Calendar.MAY, 1);
Calendar finalEnd = DateUtils.getStartCalendar(2023, Calendar.JULY, 1);
AggregationType aggregationType = AggregationType.MONTHLY;
Date start = from.getTime();
Date end = DateUtils.getEndDateFromStartDate(aggregationType, start, 1);
ScopeBean scopeBean = new ScopeBean(context);
logger.debug("FullName {} - Name {}", scopeBean.toString(), scopeBean.name());
while (from.before(end)) {
CoreServicesAccessesHarvester coreServicesHarvester = new CoreServicesAccessesHarvester(start, end);
List<AccountingRecord> accountingRecords = coreServicesHarvester.getAccountingRecords();
// dao.insertRecords(accountingRecords.toArray(new AccountingRecord[1]));
logger.debug("{} -> {} Data Inserted", DateUtils.format(start), DateUtils.format(end));
logger.debug("---------------------------------------------------------------------------------------");
Thread.sleep(TimeUnit.SECONDS.toMillis(90));
start = end;
end = DateUtils.getEndDateFromStartDate(AggregationType.MONTHLY, start, 1);
}
} catch (Exception e) {
logger.error("", e);
}
}
// @Ignore
@Test
public void testCatalogueHarvester() {
try {
String context = ROOT_PROD;
ContextTest.setContextByName(context);
AccountingDao dao = getAccountingDao();
Calendar from = DateUtils.getStartCalendar(2023, Calendar.JUNE, 1);
Calendar runbeforeDate = DateUtils.getStartCalendar(2023, Calendar.JULY, 1);
AggregationType aggregationType = AggregationType.MONTHLY;
Date start = from.getTime();
Date end = DateUtils.getEndDateFromStartDate(aggregationType, start, 1);
/*
* Date start = DateUtils.getPreviousPeriod(measureType, false).getTime(); Date
* end = DateUtils.getEndDateFromStartDate(measureType, start, 1, false);
*/
ScopeBean scopeBean = new ScopeBean(context);
logger.debug("FullName {} - Name {}", scopeBean.toString(), scopeBean.name());
CatalogueAccessesHarvester catalogueHarvester = new CatalogueAccessesHarvester(start, end);
List<AccountingRecord> accountingRecords = catalogueHarvester.getAccountingRecords();
for (AccountingRecord accountingRecord : accountingRecords) {
logger.debug("{}", accountingRecord);
}
logger.debug("{}", accountingRecords);
// dao.insertRecords(accountingRecords.toArray(new AccountingRecord[1]));
} catch (Exception e) {
logger.error("", e);
}
}
public static final String E_LEARNING_AREA_VRE = "/d4science.research-infrastructures.eu/SoBigData/E-Learning_Area";
@Ignore
// @Test
public void addMissingVREAccesses() {
try {
ContextTest.setContextByName(ROOT_PROD);
AccountingDao dao = getAccountingDao();
ArrayList<AccountingRecord> accountingRecords = new ArrayList<AccountingRecord>();
String context = E_LEARNING_AREA_VRE;
// Setting the token for the context
ContextTest.setContextByName(context);
ScopeDescriptor scopeDescriptor = AccountingDashboardHarvesterPlugin.getScopeDescriptor(context);
Dimension dimension = AccountingDashboardHarvesterPlugin.getDimension(HarvestedDataKey.ACCESSES.getKey());
Calendar calendar = DateUtils.getStartCalendar(2018, Calendar.JULY, 1);
calendar.set(Calendar.DAY_OF_MONTH, 15);
Map<Integer, Integer> monthValues = new HashMap<>();
monthValues.put(Calendar.JULY, 54);
monthValues.put(Calendar.AUGUST, 23);
monthValues.put(Calendar.SEPTEMBER, 127);
monthValues.put(Calendar.OCTOBER, 192);
for (Integer month : monthValues.keySet()) {
calendar.set(Calendar.MONTH, month);
Instant instant = calendar.toInstant();
AccountingRecord ar = new AccountingRecord(scopeDescriptor, instant, dimension,
(long) monthValues.get(month));
logger.debug("{} : {}", ar.getDimension().getId(), ar.getMeasure());
accountingRecords.add(ar);
}
logger.trace("{}", accountingRecords);
dao.insertRecords(accountingRecords.toArray(new AccountingRecord[1]));
} catch (Exception e) {
logger.error("", e);
}
}
}

View File

@ -4,7 +4,9 @@ import java.io.IOException;
import java.io.InputStream;
import java.util.Properties;
import org.gcube.dataharvest.AccountingDataHarvesterPlugin;
import org.gcube.dataharvest.AccountingDashboardHarvesterPlugin;
import org.gcube.dataharvest.ContextTest;
import org.junit.Ignore;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -21,11 +23,11 @@ public class ContextAuthorizationTest extends ContextTest {
private void getConfigParameters() throws IOException {
properties = new Properties();
InputStream input = AccountingDataHarvesterPlugin.class.getClassLoader().getResourceAsStream(PROPERTY_FILENAME);
InputStream input = AccountingDashboardHarvesterPlugin.class.getClassLoader().getResourceAsStream(PROPERTY_FILENAME);
properties.load(input);
AccountingDataHarvesterPlugin.getProperties().set(properties);
}
@Ignore
// @Test
public void testRetrieveContextsAndTokens() throws Exception {
try {
@ -33,7 +35,7 @@ public class ContextAuthorizationTest extends ContextTest {
}catch (Exception e) {
logger.warn("Unable to load {} file containing configuration properties. AccountingDataHarvesterPlugin will use defaults", PROPERTY_FILENAME);
}
ContextAuthorization contextAuthorization = new ContextAuthorization();
ContextAuthorization contextAuthorization = new ContextAuthorization(properties);
contextAuthorization.retrieveContextsAndTokens();
}

View File

@ -1,85 +0,0 @@
/**
*
*/
package org.gcube.dataharvest.utils;
import java.io.IOException;
import java.io.InputStream;
import java.util.Properties;
import org.gcube.common.authorization.client.Constants;
import org.gcube.common.authorization.client.exceptions.ObjectNotFound;
import org.gcube.common.authorization.library.AuthorizationEntry;
import org.gcube.common.authorization.library.provider.AuthorizationProvider;
import org.gcube.common.authorization.library.provider.ClientInfo;
import org.gcube.common.authorization.library.provider.SecurityTokenProvider;
import org.gcube.common.authorization.library.utils.Caller;
import org.gcube.common.scope.api.ScopeProvider;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* @author Luca Frosini (ISTI - CNR)
*
*/
public class ContextTest {
private static final Logger logger = LoggerFactory.getLogger(ContextTest.class);
protected static Properties properties;
protected static final String PROPERTIES_FILENAME = "token.properties";
public static final String DEFAULT_TEST_SCOPE_NAME;
static {
properties = new Properties();
InputStream input = ContextTest.class.getClassLoader().getResourceAsStream(PROPERTIES_FILENAME);
try {
// load the properties file
properties.load(input);
} catch(IOException e) {
throw new RuntimeException(e);
}
//DEFAULT_TEST_SCOPE_NAME = "/pred4s/preprod/preVRE";
DEFAULT_TEST_SCOPE_NAME = "/gcube/devNext/NextNext";
}
public static String getCurrentScope(String token) throws ObjectNotFound, Exception {
AuthorizationEntry authorizationEntry = Constants.authorizationService().get(token);
String context = authorizationEntry.getContext();
logger.info("Context of token {} is {}", token, context);
return context;
}
public static void setContextByName(String fullContextName) throws ObjectNotFound, Exception {
String token = ContextTest.properties.getProperty(fullContextName);
setContext(token);
}
public static void setContext(String token) throws ObjectNotFound, Exception {
SecurityTokenProvider.instance.set(token);
AuthorizationEntry authorizationEntry = Constants.authorizationService().get(token);
ClientInfo clientInfo = authorizationEntry.getClientInfo();
logger.debug("User : {} - Type : {}", clientInfo.getId(), clientInfo.getType().name());
String qualifier = authorizationEntry.getQualifier();
Caller caller = new Caller(clientInfo, qualifier);
AuthorizationProvider.instance.set(caller);
ScopeProvider.instance.set(getCurrentScope(token));
}
@BeforeClass
public static void beforeClass() throws Exception {
setContextByName(DEFAULT_TEST_SCOPE_NAME);
}
@AfterClass
public static void afterClass() throws Exception {
SecurityTokenProvider.instance.reset();
ScopeProvider.instance.reset();
}
}

View File

@ -1,3 +1,6 @@
/*.gcubekey
/*.key
/*.properties
/howto.txt
/scopedata 2.xml
/config.ini

View File

@ -0,0 +1,4 @@
groupId=org.gcube.accounting
artifactId=accounting-dashboard-harvester-se-plugin
version=2.0.0-SNAPSHOT
description=Accounting Dashboard Harvester Smart Executor Plugin.

24
uberjar.xml Normal file
View File

@ -0,0 +1,24 @@
<assembly
xmlns="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0 http://maven.apache.org/xsd/assembly-1.1.0.xsd">
<id>uberjar</id>
<formats>
<format>jar</format>
</formats>
<baseDirectory>${file.separator}</baseDirectory>
<fileSets>
<fileSet>
<directory>target${file.separator}libs</directory>
<outputDirectory>${file.separator}</outputDirectory>
<useDefaultExcludes>true</useDefaultExcludes>
<fileMode>755</fileMode>
</fileSet>
<fileSet>
<directory>target${file.separator}classes</directory>
<outputDirectory>${file.separator}</outputDirectory>
<useDefaultExcludes>true</useDefaultExcludes>
<fileMode>755</fileMode>
</fileSet>
</fileSets>
</assembly>