Compare commits

...

85 Commits

Author SHA1 Message Date
Luca Frosini f3c2b6197c Fixed changelog 2024-04-22 15:11:53 +02:00
Luca Frosini 00bce762f3 Removed -SNAPSHOT for release 2024-04-22 15:10:19 +02:00
Luca Frosini 9ef16294ff Fixed CHANGELOG 2024-02-27 12:23:30 +01:00
Luca Frosini fa7be6370a Upgraded maven-parent 2024-02-27 12:20:52 +01:00
Luca Frosini 34db58abe2 Fixed test 2024-02-26 18:04:42 +01:00
Luca Frosini abb35b6e7b Fixed test 2024-02-26 18:04:24 +01:00
Luca Frosini 77cfbf6a8a Improved Method Harvester 2024-02-26 18:03:07 +01:00
luca.frosini e76b1c3af3 Removed -SNAPSHOT to release the component 2023-09-21 10:01:07 +02:00
luca.frosini 009083f335 Fixed method invocation due to change in signature of the used library 2023-09-14 15:00:57 +02:00
luca.frosini dedf11256a Fixing bug 2023-09-06 12:39:55 +02:00
Massimiliano Assante 8c26deb82f ready to release 2023-07-03 17:22:51 +02:00
Massimiliano Assante deb8937a10 updated changelog 2023-07-03 17:19:23 +02:00
Massimiliano Assante 850d6674e5 fixed the test classe 2023-07-03 17:12:03 +02:00
Massimiliano Assante 284431ca8d also core services refactored 2023-07-03 17:06:02 +02:00
Massimiliano Assante 4cbdef880e rewritten Jupyter, RStudio and Catalogue Harv., missing Core Serv one 2023-06-30 12:15:01 +02:00
luca.frosini 89e744d769 fixed example 2023-06-30 08:55:06 +02:00
luca.frosini 43865106df Fixed test launch 2023-06-29 17:05:05 +02:00
luca.frosini e0fd599c80 Cleaning code 2023-06-29 16:08:01 +02:00
Massimiliano Assante d3ad4f43ae pusho tutto 2023-06-28 18:20:13 +02:00
luca.frosini 9b27e35676 Ignored MacOs File 2023-06-21 11:24:39 +02:00
Luca Frosini bb3e645932 Fixed bom import 2022-11-09 15:21:46 +01:00
Luca Frosini 934545c8cf Removed -SNAPSHOT to release the component 2022-10-25 16:20:17 +02:00
Luca Frosini 524c3a0411 Added -SNAPSHOT 2022-10-25 16:12:04 +02:00
Luca Frosini f0ce3c250c Added -SNAPSHOT to allow to deploy the artifact using Jenkins 2022-10-25 16:01:44 +02:00
Luca Frosini ec9d30d886 Removed -SNAPSHOT to release the component 2022-10-19 12:14:55 +02:00
Luca Frosini dbc270a523 Removed uneeded harvester and key to be harvested 2022-09-19 12:40:05 +02:00
Luca Frosini 1fe73af6bc Fixed code 2022-09-15 18:03:55 +02:00
Luca Frosini 120316d1b2 Fixing code 2022-09-12 16:50:35 +02:00
Luca Frosini 3ff630bbcb Fixed pom 2022-09-08 14:47:02 +02:00
Luca Frosini 459a71bc0d Switched code to social-networking-client 2022-09-05 14:49:45 +02:00
Luca Frosini 6bd87cedc4 Removed uneeded whitespace 2022-09-01 18:03:43 +02:00
Luca Frosini ee3a6208a4 Upgraded boms versions 2022-09-01 16:29:34 +02:00
Luca Frosini 523c0d8e34 Removed no more needed constants 2022-09-01 15:14:49 +02:00
Luca Frosini d574e3c797 Porting plugin to use new IAM credentials 2022-09-01 14:08:36 +02:00
Luca Frosini c7a934bd4b Switching security to the new IAM refs #21904 2022-08-31 16:48:11 +02:00
Luca Frosini cc242dee6a Removed -SNAPHOT for release 2022-05-19 11:06:28 +02:00
Luca Frosini 9849e7f6ee Added -SNAPHOT to allow jenkins to build snapshot version 2022-05-19 11:05:31 +02:00
Luca Frosini f3a61edbdf Removed -SNAPHOT for release 2022-05-19 11:02:55 +02:00
Luca Frosini cdd875bc47 fixing dependecies version 2022-05-19 10:58:03 +02:00
Luca Frosini 273171704e Enhanced range of storagehub-client-library to 2.0.0,3.0.0-SNAPSHOT 2022-02-09 15:51:47 +01:00
Luca Frosini ab8ad166d8 Fixing end date 2021-07-28 17:07:36 +02:00
Luca Frosini e3c8c42dbe Excluded postgres driver 2021-07-14 20:07:36 +02:00
Luca Frosini ac0fe1c671 Remove -SNAPSHOT for release 2021-06-16 09:59:58 +02:00
Luca Frosini 126034537e Fixed pom 2021-06-11 11:28:03 +02:00
Luca Frosini 40e12c1b85 Added empty line 2021-06-10 17:35:11 +02:00
Luca Frosini 2d312dbf0b Ported to smart-executor 3.0.0 2021-06-10 17:13:33 +02:00
Luca Frosini 759aaf59a3 fixed test 2021-06-04 08:45:38 +02:00
Luca Frosini c567df6a9e fixed filter 2021-06-04 08:45:28 +02:00
Luca Frosini fd641c937f Removed the subtraction of 1 millisecond 2021-06-04 08:45:16 +02:00
Giancarlo Panichi e82971ee29 Updated CHANGELOG.md to support new format 2021-06-03 19:04:51 +02:00
Giancarlo Panichi 4a9ec0a773 Merge pull request 'feature/21557' (#2) from feature/21557 into master
Reviewed-on: #2
2021-06-03 18:58:16 +02:00
Giancarlo Panichi 26b11e96af ref 21557: RStudio sessions to be published into the accounting
Added R Studio harvester
2021-06-03 18:52:07 +02:00
Giancarlo Panichi 87f5594109 ref 21557: RStudio sessions to be published into the accounting
Added R Studio harvester
2021-06-03 18:03:15 +02:00
Luca Frosini 713dee5082 Removed uneeded files 2021-05-31 18:17:09 +02:00
Giancarlo Panichi 619e99f08b Added FUNDING.md 2021-05-31 18:00:41 +02:00
Giancarlo Panichi 81d792162d Merge pull request 'feature/21031' (#1) from feature/21031 into master
Reviewed-on: #1
2021-05-31 17:52:33 +02:00
Giancarlo Panichi 94a558d3c1 Updated to fix Jupyter test 2021-05-31 17:50:23 +02:00
Luca Frosini 77311be1aa fixed test 2021-05-31 17:39:27 +02:00
Luca Frosini 8c7bf2c22b fixed bug on VRE users 2021-05-21 15:39:53 +02:00
Luca Frosini 695bf798f9 fixed tests 2021-05-21 15:24:13 +02:00
Luca Frosini 1b500a2f3d fixed test 2021-04-12 18:11:36 +02:00
Luca Frosini ba158f3187 Fixed test 2021-04-12 18:11:05 +02:00
Luca Frosini 72b7aeccf2 fixed test log 2021-04-12 10:39:08 +02:00
Luca Frosini fff6101491 fixed test log 2021-04-12 10:38:25 +02:00
Luca Frosini ac305c0a32 Improved test 2021-04-12 10:35:08 +02:00
Luca Frosini 42527a425a added property file required for tests 2021-04-12 10:18:19 +02:00
Luca Frosini f2b37893a1 set dry run as default in test for safety 2021-04-12 10:14:53 +02:00
Giancarlo Panichi 9b5d0874ec ref 21031: Add support to Jupyter
Updated Jupyter Accesses Harvester
2021-03-31 15:21:45 +02:00
Giancarlo Panichi 2bd73c2caa ref 21031: Add support to Jupyter
Updated Jupyter Accesses Harvester
2021-03-26 16:35:15 +01:00
Giancarlo Panichi 24f2409df7 ref 21031: Add support to Jupyter
Updated Jupyter Accesses Harvester
2021-03-26 16:04:07 +01:00
Giancarlo Panichi 38ec08e0a3 ref 21031: Add support to Jupyter
Updated Jupyter Accesses Harvester
2021-03-26 13:24:45 +01:00
Giancarlo Panichi 13481c35a5 ref 21031: Add support to Jupyter
Added Jupyter Accesses Harvester
2021-03-25 17:48:55 +01:00
Giancarlo Panichi 7a335cbefd ref 21031: Add support to Jupyter
Added Jupyter Accesses Harvester
2021-03-25 17:40:46 +01:00
Luca Frosini afe8a52e5b Fixing dependency scope 2021-03-22 12:04:51 +01:00
Luca Frosini fa381d7313 Fixed gcat-client dependecy 2020-10-12 15:17:57 +02:00
Luca Frosini 1fd086e63d Switching to new smart-executor-api 2020-09-30 12:12:33 +02:00
Luca Frosini c47d0bbd25 Empty rows added 2020-08-04 15:03:54 +02:00
Luca Frosini 559926167a Fixed CHANGELOG according to the new template 2020-08-03 16:17:11 +02:00
Luca Frosini f94288aa53 Fixed CHANGELOG.md 2020-07-30 15:59:10 +02:00
Luca Frosini 553427047f Fixed CHANGELOG.md 2020-07-24 13:08:55 +02:00
Luca Frosini f7e22e3d31 Removed servicearchive creation which is deprecated 2020-07-24 13:08:16 +02:00
Luca Frosini 7b880efe30 Switching to gcube-jackson 2020-07-10 18:29:52 +02:00
Luca Frosini 9865f2a1ae Merge branch 'master' of gitea@code-repo.d4science.org:gCubeSystem/accounting-dashboard-harvester-se-plugin.git 2020-07-03 09:44:01 +02:00
Luca Frosini fc3a042c8f Removed old changelog.xml file 2020-06-11 15:55:22 +02:00
Luca Frosini 108cb1a767 Renamed CHANGELOG file 2020-06-10 15:12:44 +02:00
42 changed files with 2347 additions and 2085 deletions

3
.gitignore vendored
View File

@ -1,4 +1,5 @@
target
.classpath
.project
.settings
.settings
/.DS_Store

View File

@ -1,62 +1,70 @@
# Changelog
All notable changes to this project will be documented in this file.
This project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
# Changelog for "accounting-dashboard-harvester-se-plugin"
## [v1.6.0] [r4.23.0] - 2020-05-22
## [v2.4.0]
### Added
- Removed filter restriction on JobUsageRecord harvesting to get MethodInvocation
- Fixed bug on getting ScopeDescriptor for new scopes.
**Features**
## [v2.3.0]
- Ported GA harvesters to Analytics Data API (GA4)
## [v2.2.0]
- Switching security to the new IAM [#21904]
## [v2.1.0]
- Storagehub-client-library get range from gcube-bom [#22822]
## [v2.0.0]
- Ported plugin to smart-executor APIs 3.0.0 [#21616]
- Added RStudio Harvester [#21557]
- Added Jupyter Harvester [#21031]
- Switched accounting JSON management to gcube-jackson [#19115]
- Switched smart-executor JSON management to gcube-jackson [#19647]
## [v1.6.0] - 2020-05-22
- [#19047] Added core services accesses
## [v1.5.0] - 2020-03-30
### Added
**Features**
- [#18290] Google Analytics Plugin for Catalogue pageviews
- [#18848] Updated Catalogue Dashboard harvester ENUM
## [v1.4.0] - 2020-01-20
### Changed
**Fixes**
## [v1.4.0] - 2019-12-19
- [#17800] Allowed partial harvesting of the current period
## [v1.3.0] - 2019-11-11
### Added
**Features**
## [v1.3.0] - 2019-11-06
- [#17800] Allowed partial harvesting of the current period
## [v1.2.0] - 2019-10-04
### Changed
## [v1.2.0] - 2019-09-11
- [#17128] Removed Home Library dependency
- [#17128] Removed ckan-util-library dependency
## [v1.1.0] - 2019-02-26
### Changed
**Fixes**
## [v1.1.0] [r4.13.1] - 2019-02-26
- [#12985] Fixed scope of dependencies
## [v1.0.0] - 2018-10-10
## [v1.0.0] [r4.13.1] - 2018-10-10
- First Release

26
FUNDING.md Normal file
View File

@ -0,0 +1,26 @@
# Acknowledgments
The projects leading to this software have received funding from a series of European Union programmes including:
- the Sixth Framework Programme for Research and Technological Development
- [DILIGENT](https://cordis.europa.eu/project/id/004260) (grant no. 004260).
- the Seventh Framework Programme for research, technological development and demonstration
- [D4Science](https://cordis.europa.eu/project/id/212488) (grant no. 212488);
- [D4Science-II](https://cordis.europa.eu/project/id/239019) (grant no.239019);
- [ENVRI](https://cordis.europa.eu/project/id/283465) (grant no. 283465);
- [iMarine](https://cordis.europa.eu/project/id/283644) (grant no. 283644);
- [EUBrazilOpenBio](https://cordis.europa.eu/project/id/288754) (grant no. 288754).
- the H2020 research and innovation programme
- [SoBigData](https://cordis.europa.eu/project/id/654024) (grant no. 654024);
- [PARTHENOS](https://cordis.europa.eu/project/id/654119) (grant no. 654119);
- [EGI-Engage](https://cordis.europa.eu/project/id/654142) (grant no. 654142);
- [ENVRI PLUS](https://cordis.europa.eu/project/id/654182) (grant no. 654182);
- [BlueBRIDGE](https://cordis.europa.eu/project/id/675680) (grant no. 675680);
- [PerformFISH](https://cordis.europa.eu/project/id/727610) (grant no. 727610);
- [AGINFRA PLUS](https://cordis.europa.eu/project/id/731001) (grant no. 731001);
- [DESIRA](https://cordis.europa.eu/project/id/818194) (grant no. 818194);
- [ARIADNEplus](https://cordis.europa.eu/project/id/823914) (grant no. 823914);
- [RISIS 2](https://cordis.europa.eu/project/id/824091) (grant no. 824091);
- [EOSC-Pillar](https://cordis.europa.eu/project/id/857650) (grant no. 857650);
- [Blue Cloud](https://cordis.europa.eu/project/id/862409) (grant no. 862409);
- [SoBigData-PlusPlus](https://cordis.europa.eu/project/id/871042) (grant no. 871042);

View File

@ -50,26 +50,4 @@ open-source software toolkit used for building and operating Hybrid Data
Infrastructures enabling the dynamic deployment of Virtual Research Environments
by favouring the realisation of reuse oriented policies.
The projects leading to this software have received funding from a series of European Union programmes including:
- the Sixth Framework Programme for Research and Technological Development
- DILIGENT (grant no. 004260).
- the Seventh Framework Programme for research, technological development and demonstration
- D4Science (grant no. 212488);
- D4Science-II (grant no.239019);
- ENVRI (grant no. 283465);
- iMarine(grant no. 283644);
- EUBrazilOpenBio (grant no. 288754).
- the H2020 research and innovation programme
- SoBigData (grant no. 654024);
- PARTHENOS (grant no. 654119);
- EGIEngage (grant no. 654142);
- ENVRIplus (grant no. 654182);
- BlueBRIDGE (grant no. 675680);
- PerformFish (grant no. 727610);
- AGINFRAplus (grant no. 731001);
- DESIRA (grant no. 818194);
- ARIADNEplus (grant no. 823914);
- RISIS2 (grant no. 824091);
The projects leading to this software have received funding from a series of European Union programmes see [FUNDING.md](FUNDING.md)

View File

@ -1,27 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE xml>
<ReleaseNotes>
<Changeset component="org.gcube.accounting.accounting-dashboard-harvester-se-plugin.1.6.0" date="${buildDate}">
<Change>Added core services accesses #19047</Change>
</Changeset>
<Changeset component="org.gcube.accounting.accounting-dashboard-harvester-se-plugin.1.5.0" date="2020-03-30">
<Change>Google Analytics Plugin for Catalogue pageviews #18290</Change>
<Change>Updated Catalogue Dashboard harvester ENUM #18848</Change>
</Changeset>
<Changeset component="org.gcube.accounting.accounting-dashboard-harvester-se-plugin.1.4.0" date="2020-01-20">
<Change>Allowed partial harvesting of the current period #17800</Change>
</Changeset>
<Changeset component="org.gcube.accounting.accounting-dashboard-harvester-se-plugin.1.3.0" date="2019-11-11">
<Change>Allowed partial harvesting of the current period #17800</Change>
</Changeset>
<Changeset component="org.gcube.accounting.accounting-dashboard-harvester-se-plugin.1.2.0" date="2019-10-04">
<Change>Removed Home Library dependecy #17128</Change>
<Change>Removed ckan-util-library dependecy #17128</Change>
</Changeset>
<Changeset component="org.gcube.accounting.accounting-dashboard-harvester-se-plugin.1.1.0" date="2019-02-26">
<Change>Fixed scope of dependencies #12985</Change>
</Changeset>
<Changeset component="org.gcube.accounting.accounting-dashboard-harvester-se-plugin.1.0.0" date="2018-10-10">
<Change>First Release</Change>
</Changeset>
</ReleaseNotes>

View File

@ -0,0 +1,4 @@
groupId=${groupId}
artifactId=${artifactId}
version=${version}
description=${description}

100
pom.xml
View File

@ -5,14 +5,14 @@
<parent>
<artifactId>maven-parent</artifactId>
<groupId>org.gcube.tools</groupId>
<version>1.1.0</version>
<version>1.2.0</version>
<relativePath />
</parent>
<modelVersion>4.0.0</modelVersion>
<groupId>org.gcube.accounting</groupId>
<artifactId>accounting-dashboard-harvester-se-plugin</artifactId>
<version>1.6.0</version>
<version>2.4.0</version>
<name>Accounting Dashboard Harvester Smart Executor Plugin</name>
<description>
Accounting Dashboard Harvester Smart Executor Plugin harvest accounting
@ -36,7 +36,7 @@
<dependency>
<groupId>org.gcube.vremanagement</groupId>
<artifactId>smart-executor-bom</artifactId>
<version>1.0.0</version>
<version>3.1.0</version>
<type>pom</type>
<scope>import</scope>
</dependency>
@ -47,7 +47,6 @@
<dependency>
<groupId>commons-lang</groupId>
<artifactId>commons-lang</artifactId>
<version>2.6</version>
</dependency>
<dependency>
<groupId>org.gcube.portlets.admin</groupId>
@ -55,24 +54,44 @@
<version>[2.7.2,3.0.0-SNAPSHOT)</version>
</dependency>
<dependency>
<groupId>com.google.apis</groupId>
<artifactId>google-api-services-analyticsreporting</artifactId>
<version>v4-rev124-1.23.0</version>
<groupId>com.google.analytics</groupId>
<artifactId>google-analytics-data</artifactId>
<version>0.16.0</version>
</dependency>
<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
<version>23.6-jre</version>
</dependency>
<dependency>
<groupId>com.google.auth</groupId>
<artifactId>google-auth-library-oauth2-http</artifactId>
<version>1.12.1</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-core</artifactId>
<version>2.8.4</version>
</dependency>
<dependency>
<groupId>com.google.http-client</groupId>
<artifactId>google-http-client-gson</artifactId>
<version>1.21.0</version>
</dependency>
<!-- <dependency> -->
<!-- <groupId>org.gcube.common</groupId> -->
<!-- <artifactId>storagehub-client-library</artifactId> -->
<!-- <exclusions> -->
<!-- <exclusion> -->
<!-- <groupId>com.fasterxml.jackson.core</groupId> -->
<!-- <artifactId>jackson-core</artifactId> -->
<!-- </exclusion> -->
<!-- </exclusions> -->
<!-- </dependency> -->
<dependency>
<groupId>org.gcube.common</groupId>
<artifactId>storagehub-client-library</artifactId>
<version>[1.0.0, 2.0.0-SNAPSHOT)</version>
</dependency>
<dependency>
<groupId>org.gcube.data-publishing</groupId>
<groupId>org.gcube.data-catalogue</groupId>
<artifactId>gcat-client</artifactId>
<version>[1.0.0, 2.0.0-SNAPSHOT)</version>
<version>[2.0.0, 3.0.0-SNAPSHOT)</version>
</dependency>
<dependency>
<groupId>org.gcube.accounting</groupId>
@ -89,17 +108,29 @@
<dependency>
<groupId>org.gcube.accounting</groupId>
<artifactId>accounting-analytics</artifactId>
<version>[2.0.0,3.0.0-SNAPSHOT)</version>
<version>[3.0.0,4.0.0-SNAPSHOT)</version>
</dependency>
<dependency>
<groupId>org.gcube.accounting</groupId>
<artifactId>accounting-analytics-persistence-couchbase</artifactId>
<artifactId>accounting-analytics-persistence-postgresql</artifactId>
<version>[1.0.0,2.0.0-SNAPSHOT)</version>
<scope>runtime</scope>
</dependency>
<dependency>
<groupId>org.gcube.accounting</groupId>
<artifactId>accounting-summary-access</artifactId>
<version>[1.0.0,2.0.0-SNAPSHOT)</version>
<exclusions>
<exclusion>
<groupId>org.ancoron.postgresql</groupId>
<artifactId>org.postgresql</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.gcube.social-networking</groupId>
<artifactId>social-service-client</artifactId>
<version>[1.0.0, 2.0.0-SNAPSHOT)</version>
</dependency>
<dependency>
<groupId>org.json</groupId>
@ -107,8 +138,14 @@
<version>20171018</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.gcube.common</groupId>
<artifactId>authorization-utils</artifactId>
<version>[2.2.0, 3.0.0-SNAPSHOT)</version>
</dependency>
<!-- Test Dependencies. Setting scope to provided to allow proper creation of uber-jar -->
<!-- Test Dependencies. Setting scope to provided to allow proper creation
of uber-jar -->
<dependency>
<groupId>org.gcube.vremanagement</groupId>
<artifactId>smart-executor-client</artifactId>
@ -169,26 +206,27 @@
<goal>single</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-javadoc-plugin</artifactId>
<configuration>
<additionalparam>-Xdoclint:none</additionalparam>
<additionalJOption>-Xdoclint:none</additionalJOption>
</configuration>
<version>3.1.0</version>
<executions>
<execution>
<id>tar.gz</id>
<configuration>
<appendAssemblyId>false</appendAssemblyId>
<descriptors>
<descriptor>tar-gz.xml</descriptor>
</descriptors>
</configuration>
<phase>package</phase>
<id>generate-doc</id>
<phase>install</phase>
<goals>
<goal>single</goal>
<goal>jar</goal>
</goals>
</execution>
<execution>
<id>make-servicearchive</id>
<phase>package</phase>
</execution>
</executions>
</plugin>
</plugins>
</build>
</project>
</project>

View File

@ -16,16 +16,18 @@ import org.gcube.accounting.accounting.summary.access.AccountingDao;
import org.gcube.accounting.accounting.summary.access.model.ScopeDescriptor;
import org.gcube.accounting.accounting.summary.access.model.internal.Dimension;
import org.gcube.accounting.accounting.summary.access.model.update.AccountingRecord;
import org.gcube.common.authorization.library.provider.SecurityTokenProvider;
import org.gcube.com.fasterxml.jackson.annotation.JsonIgnore;
import org.gcube.common.authorization.utils.secret.Secret;
import org.gcube.common.scope.impl.ScopeBean;
import org.gcube.common.scope.impl.ScopeBean.Type;
import org.gcube.dataharvest.harvester.CatalogueAccessesHarvester;
import org.gcube.dataharvest.harvester.CoreServicesAccessesHarvester;
import org.gcube.dataharvest.harvester.JupyterAccessesHarvester;
import org.gcube.dataharvest.harvester.MethodInvocationHarvester;
import org.gcube.dataharvest.harvester.RStudioAccessesHarvester;
import org.gcube.dataharvest.harvester.SocialInteractionsHarvester;
import org.gcube.dataharvest.harvester.VREAccessesHarvester;
import org.gcube.dataharvest.harvester.VREUsersHarvester;
import org.gcube.dataharvest.harvester.sobigdata.DataMethodDownloadHarvester;
import org.gcube.dataharvest.harvester.sobigdata.ResourceCatalogueHarvester;
import org.gcube.dataharvest.harvester.sobigdata.TagMeMethodInvocationHarvester;
import org.gcube.dataharvest.utils.AggregationType;
@ -40,375 +42,418 @@ import org.slf4j.LoggerFactory;
* @author Eric Perrone (ISTI - CNR)
* @author Luca Frosini (ISTI - CNR)
*/
public class AccountingDashboardHarvesterPlugin extends Plugin<AccountingDashboardHarvesterPluginDeclaration> {
public class AccountingDashboardHarvesterPlugin extends Plugin {
private static Logger logger = LoggerFactory.getLogger(AccountingDashboardHarvesterPlugin.class);
private static final String PROPERTY_FILENAME = "config.properties";
public static final String START_DATE_INPUT_PARAMETER = "startDate";
public static final String MEASURE_TYPE_INPUT_PARAMETER = "measureType";
public static final String RERUN_INPUT_PARAMETER = "reRun";
public static final String GET_VRE_USERS_INPUT_PARAMETER = "getVREUsers";
public static final String DRY_RUN_INPUT_PARAMETER = "dryRun";
/**
* Allows partial harvesting of data of the current period.
* This means that in MONTHLY aggregation type the current month is harvested instead of the previous month which
* is done when the month is completed.
* This allow the portlet to display monthly data in the current moth even the data is partial (till the current day).
* Allows partial harvesting of data of the current period. This means that
* in MONTHLY aggregation type the current month is harvested instead of the
* previous month which is done when the month is completed. This allow the
* portlet to display monthly data in the current moth even the data is
* partial (till the current day).
*/
public static final String PARTIAL_HARVESTING = "partialHarvesting";
public static final String SO_BIG_DATA_VO = "/d4science.research-infrastructures.eu/SoBigData";
public static final String SO_BIG_DATA_EU_VRE = "/d4science.research-infrastructures.eu/gCubeApps/SoBigData.eu";
public static final String SO_BIG_DATA_IT_VRE = "/d4science.research-infrastructures.eu/gCubeApps/SoBigData.it";
public static final String SO_BIG_DATA_CATALOGUE_CONTEXT = "/d4science.research-infrastructures.eu/SoBigData/ResourceCatalogue";
public static final String TAGME_CONTEXT = "/d4science.research-infrastructures.eu/SoBigData/TagMe";
public static final String TO_BE_SET = "TO BE SET";
protected Date start;
protected Date end;
public AccountingDashboardHarvesterPlugin(AccountingDashboardHarvesterPluginDeclaration pluginDeclaration) {
super(pluginDeclaration);
}
private static final InheritableThreadLocal<Properties> properties = new InheritableThreadLocal<Properties>() {
@Override
protected Properties initialValue() {
return new Properties();
}
};
public static InheritableThreadLocal<Properties> getProperties() {
return properties;
public AccountingDashboardHarvesterPlugin() {
super();
}
public static Dimension getDimension(String key) {
Dimension dimension = dimensions.get().get(key);
if(dimension == null) {
if (dimension == null) {
dimension = new Dimension(key, key, null, key);
}
return dimension;
}
protected static final InheritableThreadLocal<Map<String, Dimension>> dimensions = new InheritableThreadLocal<Map<String, Dimension>>() {
@Override
protected Map<String, Dimension> initialValue() {
return new HashMap<>();
}
};
public static ScopeDescriptor getScopeDescriptor(String context) {
return scopeDescriptors.get().get(context);
}
protected static final InheritableThreadLocal<Map<String, ScopeDescriptor>> scopeDescriptors = new InheritableThreadLocal<Map<String, ScopeDescriptor>>() {
public static final InheritableThreadLocal<Map<String, ScopeDescriptor>> scopeDescriptors = new InheritableThreadLocal<Map<String, ScopeDescriptor>>() {
@Override
protected Map<String, ScopeDescriptor> initialValue() {
return new HashMap<>();
}
};
public static ScopeDescriptor getScopeDescriptor() {
return scopeDescriptor.get();
}
public static final InheritableThreadLocal<ScopeDescriptor> scopeDescriptor = new InheritableThreadLocal<ScopeDescriptor>() {
@Override
protected ScopeDescriptor initialValue() {
return new ScopeDescriptor("","");
}
};
public Properties getConfigParameters() throws IOException {
@JsonIgnore
public static Properties getConfigParameters() throws IOException {
Properties properties = new Properties();
try {
InputStream input = AccountingDashboardHarvesterPlugin.class.getClassLoader()
.getResourceAsStream(PROPERTY_FILENAME);
properties.load(input);
return properties;
} catch(Exception e) {
} catch (Exception e) {
logger.warn(
"Unable to load {} file containing configuration properties. AccountingDataHarvesterPlugin will use defaults",
PROPERTY_FILENAME);
}
return properties;
}
/** {@inheritDoc} */
@Override
public void launch(Map<String,Object> inputs) throws Exception {
public void launch(Map<String, Object> inputs) throws Exception {
logger.debug("{} is starting", this.getClass().getSimpleName());
if(inputs == null || inputs.isEmpty()) {
if (inputs == null || inputs.isEmpty()) {
throw new IllegalArgumentException("The can only be launched providing valid input parameters");
}
if(!inputs.containsKey(MEASURE_TYPE_INPUT_PARAMETER)) {
if (!inputs.containsKey(MEASURE_TYPE_INPUT_PARAMETER)) {
throw new IllegalArgumentException("Please set required parameter '" + MEASURE_TYPE_INPUT_PARAMETER + "'");
}
AggregationType aggregationType = AggregationType.valueOf((String) inputs.get(MEASURE_TYPE_INPUT_PARAMETER));
boolean reRun = true;
if(inputs.containsKey(RERUN_INPUT_PARAMETER)) {
if (inputs.containsKey(RERUN_INPUT_PARAMETER)) {
try {
reRun = (boolean) inputs.get(RERUN_INPUT_PARAMETER);
} catch(Exception e) {
} catch (Exception e) {
throw new IllegalArgumentException("'" + RERUN_INPUT_PARAMETER + "' must be a boolean");
}
}
boolean getVREUsers = true;
if(inputs.containsKey(GET_VRE_USERS_INPUT_PARAMETER)) {
if (inputs.containsKey(GET_VRE_USERS_INPUT_PARAMETER)) {
try {
reRun = (boolean) inputs.get(GET_VRE_USERS_INPUT_PARAMETER);
} catch(Exception e) {
getVREUsers = (boolean) inputs.get(GET_VRE_USERS_INPUT_PARAMETER);
} catch (Exception e) {
throw new IllegalArgumentException("'" + GET_VRE_USERS_INPUT_PARAMETER + "' must be a boolean");
}
}
boolean dryRun = true;
if(inputs.containsKey(DRY_RUN_INPUT_PARAMETER)) {
if (inputs.containsKey(DRY_RUN_INPUT_PARAMETER)) {
try {
dryRun = (boolean) inputs.get(DRY_RUN_INPUT_PARAMETER);
} catch(Exception e) {
} catch (Exception e) {
throw new IllegalArgumentException("'" + DRY_RUN_INPUT_PARAMETER + "' must be a boolean");
}
}
boolean partialHarvesting = false;
if(inputs.containsKey(PARTIAL_HARVESTING)) {
if (inputs.containsKey(PARTIAL_HARVESTING)) {
partialHarvesting = (boolean) inputs.get(PARTIAL_HARVESTING);
}
if(inputs.containsKey(START_DATE_INPUT_PARAMETER)) {
if (inputs.containsKey(START_DATE_INPUT_PARAMETER)) {
String startDateString = (String) inputs.get(START_DATE_INPUT_PARAMETER);
start = DateUtils.UTC_DATE_FORMAT.parse(startDateString + " " + DateUtils.UTC);
} else {
start = DateUtils.getPreviousPeriod(aggregationType, partialHarvesting).getTime();
}
end = DateUtils.getEndDateFromStartDate(aggregationType, start, 1, partialHarvesting);
logger.debug("Harvesting from {} to {} (ReRun:{} - GetVREUsers:{} - DryRun:{})",
DateUtils.format(start), DateUtils.format(end), reRun, getVREUsers, dryRun);
Properties properties = getConfigParameters();
getProperties().set(properties);
end = DateUtils.getEndDateFromStartDate(aggregationType, start, 1);
logger.debug("Harvesting from {} to {} (ReRun:{} - GetVREUsers:{} - DryRun:{})", DateUtils.format(start),
DateUtils.format(end), reRun, getVREUsers, dryRun);
ContextAuthorization contextAuthorization = new ContextAuthorization();
SortedSet<String> contexts = contextAuthorization.getContexts();
String root = contexts.first();
Utils.setContext(contextAuthorization.getTokenForContext(root));
Utils.setContext(contextAuthorization.getSecretForContext(root));
AccountingDao dao = AccountingDao.get();
Set<ScopeDescriptor> scopeDescriptorSet = dao.getContexts();
Map<String,ScopeDescriptor> scopeDescriptorMap = new HashMap<>();
for(ScopeDescriptor scopeDescriptor : scopeDescriptorSet) {
Map<String, ScopeDescriptor> scopeDescriptorMap = new HashMap<>();
for (ScopeDescriptor scopeDescriptor : scopeDescriptorSet) {
scopeDescriptorMap.put(scopeDescriptor.getId(), scopeDescriptor);
}
scopeDescriptors.set(scopeDescriptorMap);
Set<Dimension> dimensionSet = dao.getDimensions();
Map<String,Dimension> dimensionMap = new HashMap<>();
for(Dimension dimension : dimensionSet) {
Map<String, Dimension> dimensionMap = new HashMap<>();
for (Dimension dimension : dimensionSet) {
dimensionMap.put(dimension.getId(), dimension);
}
dimensions.set(dimensionMap);
ArrayList<AccountingRecord> accountingRecords = new ArrayList<AccountingRecord>();
String initialToken = SecurityTokenProvider.instance.get();
VREAccessesHarvester vreAccessesHarvester = null;
JupyterAccessesHarvester jupyterAccessesHarvester = null;
RStudioAccessesHarvester rstudioAccessesHarvester = null;
Secret rootSecret = null;
for(String context : contexts) {
for (String context : contexts) {
// Setting the token for the context
Utils.setContext(contextAuthorization.getTokenForContext(context));
Secret secret = contextAuthorization.getSecretForContext(context);
Utils.setContext(secret);
ScopeBean scopeBean = new ScopeBean(context);
ScopeDescriptor actualScopeDescriptor = scopeDescriptorMap.get(context);
if(actualScopeDescriptor==null) {
if (actualScopeDescriptor == null) {
actualScopeDescriptor = new ScopeDescriptor(scopeBean.name(), context);
scopeDescriptorMap.put(actualScopeDescriptor.getId(), actualScopeDescriptor);
}
scopeDescriptor.set(actualScopeDescriptor);
if(scopeBean.is(Type.INFRASTRUCTURE)) {
if (scopeBean.is(Type.INFRASTRUCTURE)) {
try {
rootSecret = secret;
CatalogueAccessesHarvester catalogueHarvester = new CatalogueAccessesHarvester(start, end);
CatalogueAccessesHarvester catalogueHarvester = new CatalogueAccessesHarvester(start, end);
List<AccountingRecord> harvested = catalogueHarvester.getAccountingRecords();
accountingRecords.addAll(harvested);
CoreServicesAccessesHarvester coreServicesHarvester = new CoreServicesAccessesHarvester(start, end);
CoreServicesAccessesHarvester coreServicesHarvester = new CoreServicesAccessesHarvester(start, end);
List<AccountingRecord> records = coreServicesHarvester.getAccountingRecords();
accountingRecords.addAll(records);
}catch (Exception e) {
logger.error("Error harvesting {} for {}", CatalogueAccessesHarvester.class.getSimpleName(), context, e);
} catch (Exception e) {
logger.error("Error harvesting {} for {}", CatalogueAccessesHarvester.class.getSimpleName(),
context, e);
}
}
if(vreAccessesHarvester == null) {
if(scopeBean.is(Type.INFRASTRUCTURE)) {
if (vreAccessesHarvester == null) {
if (scopeBean.is(Type.INFRASTRUCTURE)) {
vreAccessesHarvester = new VREAccessesHarvester(start, end);
} else {
// This code should be never used because the scopes are sorted by fullname
// This code should be never used because the scopes are
// sorted by fullname
ScopeBean parent = scopeBean.enclosingScope();
while(!parent.is(Type.INFRASTRUCTURE)) {
while (!parent.is(Type.INFRASTRUCTURE)) {
parent = scopeBean.enclosingScope();
}
// Setting back token for the context
Utils.setContext(contextAuthorization.getTokenForContext(parent.toString()));
Utils.setContext(contextAuthorization.getSecretForContext(parent.toString()));
vreAccessesHarvester = new VREAccessesHarvester(start, end);
// Setting back token for the context
Utils.setContext(contextAuthorization.getTokenForContext(context));
Utils.setContext(contextAuthorization.getSecretForContext(context));
}
}
if (rstudioAccessesHarvester == null) {
if (scopeBean.is(Type.INFRASTRUCTURE)) {
rstudioAccessesHarvester = new RStudioAccessesHarvester(start, end);
} else {
// This code should be never used because the scopes are
// sorted by fullname
ScopeBean parent = scopeBean.enclosingScope();
while (!parent.is(Type.INFRASTRUCTURE)) {
parent = scopeBean.enclosingScope();
}
// Setting back token for the context
Utils.setContext(contextAuthorization.getSecretForContext(parent.toString()));
rstudioAccessesHarvester = new RStudioAccessesHarvester(start, end);
// Setting back token for the context
Utils.setContext(contextAuthorization.getSecretForContext(context));
}
}
if((context.startsWith(SO_BIG_DATA_VO) || context.startsWith(SO_BIG_DATA_EU_VRE)
if (jupyterAccessesHarvester == null) {
if (scopeBean.is(Type.INFRASTRUCTURE)) {
jupyterAccessesHarvester = new JupyterAccessesHarvester(start, end);
} else {
// This code should be never used because the scopes are
// sorted by fullname
ScopeBean parent = scopeBean.enclosingScope();
while (!parent.is(Type.INFRASTRUCTURE)) {
parent = scopeBean.enclosingScope();
}
// Setting back token for the context
Utils.setContext(contextAuthorization.getSecretForContext(parent.toString()));
jupyterAccessesHarvester = new JupyterAccessesHarvester(start, end);
// Setting back token for the context
Utils.setContext(contextAuthorization.getSecretForContext(context));
}
}
if ((context.startsWith(SO_BIG_DATA_VO) || context.startsWith(SO_BIG_DATA_EU_VRE)
|| context.startsWith(SO_BIG_DATA_IT_VRE))
&& start.before(DateUtils.getStartCalendar(2018, Calendar.APRIL, 1).getTime())) {
logger.info("Not Harvesting for {} from {} to {}", context, DateUtils.format(start),
DateUtils.format(end));
} else {
try {
// Collecting Google Analytics Data for VREs Accesses
logger.info("Going to harvest VRE Accesses for {}", context);
List<AccountingRecord> harvested = vreAccessesHarvester.getAccountingRecords();
accountingRecords.addAll(harvested);
/*
List<HarvestedData> harvested = vreAccessesHarvester.getData();
data.addAll(harvested);
*/
} catch(Exception e) {
* List<HarvestedData> harvested =
* vreAccessesHarvester.getData(); data.addAll(harvested);
*/
} catch (Exception e) {
logger.error("Error harvesting VRE Accesses for {}", context, e);
}
try {
// Collecting Google Analytics Data for R Studio Accesses
logger.info("Going to harvest R Studio Accesses for {}", context);
List<AccountingRecord> harvested = rstudioAccessesHarvester.getAccountingRecords();
accountingRecords.addAll(harvested);
} catch (Exception e) {
logger.error("Error harvesting R Studio Accesses for {}", context, e);
}
try {
// Collecting Google Analytics Data for Jupyters Accesses
logger.info("Going to harvest Jupyter Accesses for {}", context);
List<AccountingRecord> harvested = jupyterAccessesHarvester.getAccountingRecords();
accountingRecords.addAll(harvested);
} catch (Exception e) {
logger.error("Error harvesting Jupyeter Accesses for {}", context, e);
}
try {
// Collecting info on social (posts, replies and likes)
logger.info("Going to harvest Social Interactions for {}", context);
SocialInteractionsHarvester socialHarvester = new SocialInteractionsHarvester(start, end);
List<AccountingRecord> harvested = socialHarvester.getAccountingRecords();
accountingRecords.addAll(harvested);
/*
List<HarvestedData> harvested = socialHarvester.getData();
data.addAll(harvested);
*/
} catch(Exception e) {
} catch (Exception e) {
logger.error("Error harvesting Social Interactions for {}", context, e);
}
try {
// Collecting info on VRE users
if(getVREUsers) {
// Harvesting Users only for VREs (not for VO and ROOT which is the sum of the children contexts)
if (getVREUsers) {
// Harvesting Users only for VREs (not for VO and ROOT
// which is the sum of the children contexts)
// The VREUsers can be only Harvested for the last month
if(scopeBean.is(Type.VRE) && start.equals(DateUtils.getPreviousPeriod(aggregationType, partialHarvesting).getTime())) {
if (scopeBean.is(Type.VRE) && start
.equals(DateUtils.getPreviousPeriod(aggregationType, partialHarvesting).getTime())) {
logger.info("Going to harvest Context Users for {}", context);
VREUsersHarvester vreUsersHarvester = new VREUsersHarvester(start, end);
List<AccountingRecord> harvested = vreUsersHarvester.getAccountingRecords();
accountingRecords.addAll(harvested);
/*
List<HarvestedData> harvested = vreUsersHarvester.getData();
data.addAll(harvested);
*/
* List<HarvestedData> harvested =
* vreUsersHarvester.getData();
* data.addAll(harvested);
*/
}
}
} catch(Exception e) {
} catch (Exception e) {
logger.error("Error harvesting Context Users for {}", context, e);
}
if(context.startsWith(SO_BIG_DATA_CATALOGUE_CONTEXT)) {
if (context.startsWith(SO_BIG_DATA_CATALOGUE_CONTEXT)) {
try {
// Collecting info on Resource Catalogue (Dataset, Application, Deliverables, Methods)
// Collecting info on Resource Catalogue (Dataset,
// Application, Deliverables, Methods)
logger.info("Going to harvest Resource Catalogue Information for {}", context);
ResourceCatalogueHarvester resourceCatalogueHarvester = new ResourceCatalogueHarvester(start, end,
contexts);
ResourceCatalogueHarvester resourceCatalogueHarvester = new ResourceCatalogueHarvester(start,
end, contexts);
List<AccountingRecord> harvested = resourceCatalogueHarvester.getAccountingRecords();
accountingRecords.addAll(harvested);
/*
List<HarvestedData> harvested = resourceCatalogueHarvester.getData();
data.addAll(harvested);
*/
} catch(Exception e) {
* List<HarvestedData> harvested =
* resourceCatalogueHarvester.getData();
* data.addAll(harvested);
*/
} catch (Exception e) {
logger.error("Error harvesting Resource Catalogue Information for {}", context, e);
}
try {
// Collecting info on Data/Method download
logger.info("Going to harvest Data Method Download for {}", context);
DataMethodDownloadHarvester dataMethodDownloadHarvester = new DataMethodDownloadHarvester(start,
end, contexts);
List<AccountingRecord> harvested = dataMethodDownloadHarvester.getAccountingRecords();
accountingRecords.addAll(harvested);
} catch(Exception e) {
logger.error("Error harvesting Data Method Download for {}", context, e);
}
// try {
// // Collecting info on Data/Method download
// logger.info("Going to harvest Data Method Download for {}", context);
// DataMethodDownloadHarvester dataMethodDownloadHarvester = new DataMethodDownloadHarvester(start,
// end, contexts);
//
// List<AccountingRecord> harvested = dataMethodDownloadHarvester.getAccountingRecords();
// accountingRecords.addAll(harvested);
//
// } catch (Exception e) {
// logger.error("Error harvesting Data Method Download for {}", context, e);
// }
}
if(context.startsWith(TAGME_CONTEXT)) {
if (context.startsWith(TAGME_CONTEXT)) {
try {
// Collecting info on method invocation
logger.info("Going to harvest Method Invocations for {}", context);
TagMeMethodInvocationHarvester tagMeMethodInvocationHarvester = new TagMeMethodInvocationHarvester(
start, end);
List<AccountingRecord> harvested = tagMeMethodInvocationHarvester.getAccountingRecords();
accountingRecords.addAll(harvested);
/*
List<HarvestedData> harvested = tagMeMethodInvocationHarvester.getData();
data.addAll(harvested);
*/
} catch(Exception e) {
* List<HarvestedData> harvested =
* tagMeMethodInvocationHarvester.getData();
* data.addAll(harvested);
*/
} catch (Exception e) {
logger.error("Error harvesting Method Invocations for {}", context, e);
}
} else {
@ -416,38 +461,38 @@ public class AccountingDashboardHarvesterPlugin extends Plugin<AccountingDashboa
// Collecting info on method invocation
logger.info("Going to harvest Method Invocations for {}", context);
MethodInvocationHarvester methodInvocationHarvester = new MethodInvocationHarvester(start, end);
List<AccountingRecord> harvested = methodInvocationHarvester.getAccountingRecords();
accountingRecords.addAll(harvested);
/*
List<HarvestedData> harvested = methodInvocationHarvester.getData();
data.addAll(harvested);
*/
} catch(Exception e) {
* List<HarvestedData> harvested =
* methodInvocationHarvester.getData();
* data.addAll(harvested);
*/
} catch (Exception e) {
logger.error("Error harvesting Method Invocations for {}", context, e);
}
}
}
}
Utils.setContext(initialToken);
logger.debug("Harvest Measures from {} to {} are {}", DateUtils.format(start), DateUtils.format(end), accountingRecords);
if(!dryRun) {
Utils.setContext(rootSecret);
logger.debug("Harvest Measures from {} to {} are {}", DateUtils.format(start), DateUtils.format(end),
accountingRecords);
if (!dryRun) {
dao.insertRecords(accountingRecords.toArray(new AccountingRecord[1]));
//dbaseManager.insertMonthlyData(start, end, data, reRun);
}else {
} else {
logger.debug("Harvested measures are {}", accountingRecords);
}
}
/** {@inheritDoc} */
@Override
protected void onStop() throws Exception {
logger.debug("{} is stopping", this.getClass().getSimpleName());
}
}

View File

@ -1,74 +0,0 @@
package org.gcube.dataharvest;
import java.util.HashMap;
import java.util.Map;
import org.gcube.vremanagement.executor.plugin.Plugin;
import org.gcube.vremanagement.executor.plugin.PluginDeclaration;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* @author Eric Perrone (ISTI - CNR)
* @author Luca Frosini (ISTI - CNR)
*/
public class AccountingDashboardHarvesterPluginDeclaration implements PluginDeclaration {
private static Logger logger = LoggerFactory.getLogger(AccountingDashboardHarvesterPluginDeclaration.class);
public static final String NAME = "AccountingDashboardHarvester";
public static final String DESCRIPTION = "Data Harvester for Accounting Dashboard";
public static final String VERSION = "1.4.0";
/**{@inheritDoc}*/
@Override
public void init() {
logger.debug(String.format("%s initialized", AccountingDashboardHarvesterPlugin.class.getSimpleName()));
}
/**{@inheritDoc}*/
@Override
public String getName() {
return NAME;
}
/**{@inheritDoc}*/
@Override
public String getDescription() {
return DESCRIPTION;
}
/**{@inheritDoc}*/
@Override
public String getVersion() {
return VERSION;
}
/**{@inheritDoc}*/
@Override
public Map<String, String> getSupportedCapabilities() {
Map<String, String> discoveredCapabilities = new HashMap<String, String>();
return discoveredCapabilities;
}
/**{@inheritDoc}*/
@Override
public Class<? extends Plugin<? extends PluginDeclaration>> getPluginImplementation() {
return AccountingDashboardHarvesterPlugin.class;
}
@Override
public String toString(){
return String.format("{"
+ "name:%s,"
+ "version:%s,"
+ "description:%s,"
+ "pluginImplementation:%s,"
+ "}",
getName(),
getVersion(),
getDescription(),
getPluginImplementation().getClass().getSimpleName());
}
}

View File

@ -59,12 +59,12 @@ public class AnalyticsReportCredentials {
/**
* Please note:
* The key is stored in the resource with blanks " " instead of "\n" as it causes issues and
* without the BEGIN and END Delimiters (e.g. -----END PRIVATE KEY-----) which myst be readded
* without the BEGIN and END Delimiters (e.g. -----END PRIVATE KEY-----) which must be readded
* @param privateKeyPem
*/
public void setPrivateKeyPem(String privateKeyPem) {
privateKeyPem = privateKeyPem.replace(" ", "\n");
this.privateKeyPem = "-----BEGIN PRIVATE KEY-----\n"+privateKeyPem+"\n-----END PRIVATE KEY-----";
this.privateKeyPem = privateKeyPem.replace(" ", "\n");
this.privateKeyPem = "-----BEGIN PRIVATE KEY-----\n"+this.privateKeyPem+"\n-----END PRIVATE KEY-----\n";
}
public String getPrivateKeyId() {

View File

@ -5,7 +5,6 @@ package org.gcube.dataharvest.datamodel;
/**
*
* @author Francesco Mangiacrapa francesco.mangiacrapa@isti.cnr.it
* @author M. Assante, ISTI-CNR
*/
@ -15,6 +14,8 @@ public enum HarvestedDataKey {
MESSAGES_ACCESSES("Messages Accesses"),
NOTIFICATIONS_ACCESSES("Notifications Accesses"),
PROFILE_ACCESSES("Profile Accesses"),
JUPYTER_ACCESSES("Jupyter Accesses"),
RSTUDIO_ACCESSES("R Studio Accesses"),
CATALOGUE_ACCESSES("Catalogue Accesses"),
CATALOGUE_DATASET_LIST_ACCESSES("Item List"),
@ -22,16 +23,11 @@ public enum HarvestedDataKey {
CATALOGUE_RESOURCE_ACCESSES("Item Resource"),
ACCESSES("VRE Accesses"),
USERS("VRE Users"),
DATA_METHOD_DOWNLOAD("Data/Method download"),
NEW_CATALOGUE_METHODS("New Catalogue Methods"),
NEW_CATALOGUE_DATASETS("New Catalogue Datasets"),
NEW_CATALOGUE_DELIVERABLES("New Catalogue Deliverables"),
NEW_CATALOGUE_APPLICATIONS("New Catalogue Applications"),
SOCIAL_POSTS("VRE Social Interations Posts"),
SOCIAL_REPLIES("VRE Social Interations Replies"),
SOCIAL_LIKES("VRE Social Interations Likes"),
METHOD_INVOCATIONS("VRE Methods Invocation"),
VISUAL_TOOLS("VRE Visual Tools");
METHOD_INVOCATIONS("VRE Methods Invocation");
private String key;

View File

@ -37,23 +37,6 @@ public abstract class BasicHarvester {
logger.debug("Creating {} for the period {} {} ", this.getClass().getSimpleName(), DateUtils.format(start), DateUtils.format(end));
}
public static String getCurrentContext(String token) throws Exception {
AuthorizationEntry authorizationEntry = Constants.authorizationService().get(token);
String context = authorizationEntry.getContext();
logger.info("Context of token {} is {}", token, context);
return context;
}
public static void setContext(String token) throws Exception {
SecurityTokenProvider.instance.set(token);
ScopeProvider.instance.set(getCurrentContext(token));
}
public static String getCurrentContext() throws Exception {
String token = SecurityTokenProvider.instance.get();
return getCurrentContext(token);
}
public abstract List<AccountingRecord> getAccountingRecords() throws Exception;
public Dimension getDimension(HarvestedDataKey harvestedDataKey) {

View File

@ -4,22 +4,14 @@ import static org.gcube.resources.discovery.icclient.ICFactory.clientFor;
import static org.gcube.resources.discovery.icclient.ICFactory.queryFor;
import java.io.IOException;
import java.io.Reader;
import java.io.StringReader;
import java.security.GeneralSecurityException;
import java.security.KeyFactory;
import java.security.NoSuchAlgorithmException;
import java.security.PrivateKey;
import java.security.spec.InvalidKeySpecException;
import java.security.spec.PKCS8EncodedKeySpec;
import java.time.Instant;
import java.time.LocalDate;
import java.time.ZoneId;
import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
@ -49,26 +41,20 @@ import org.slf4j.LoggerFactory;
import org.w3c.dom.Node;
import org.xml.sax.InputSource;
import com.google.api.client.googleapis.auth.oauth2.GoogleCredential;
import com.google.api.client.googleapis.auth.oauth2.GoogleCredential.Builder;
import com.google.api.client.googleapis.javanet.GoogleNetHttpTransport;
import com.google.api.client.googleapis.util.Utils;
import com.google.api.client.http.HttpTransport;
import com.google.analytics.data.v1beta.BetaAnalyticsDataClient;
import com.google.analytics.data.v1beta.BetaAnalyticsDataSettings;
import com.google.analytics.data.v1beta.DateRange;
import com.google.analytics.data.v1beta.DateRange.Builder;
import com.google.analytics.data.v1beta.Dimension;
import com.google.analytics.data.v1beta.Metric;
import com.google.analytics.data.v1beta.Row;
import com.google.analytics.data.v1beta.RunReportRequest;
import com.google.analytics.data.v1beta.RunReportResponse;
import com.google.api.client.json.JsonFactory;
import com.google.api.client.json.gson.GsonFactory;
import com.google.api.client.util.PemReader;
import com.google.api.client.util.PemReader.Section;
import com.google.api.client.util.SecurityUtils;
import com.google.api.services.analyticsreporting.v4.AnalyticsReporting;
import com.google.api.services.analyticsreporting.v4.AnalyticsReportingScopes;
import com.google.api.services.analyticsreporting.v4.model.DateRange;
import com.google.api.services.analyticsreporting.v4.model.DateRangeValues;
import com.google.api.services.analyticsreporting.v4.model.GetReportsRequest;
import com.google.api.services.analyticsreporting.v4.model.GetReportsResponse;
import com.google.api.services.analyticsreporting.v4.model.Metric;
import com.google.api.services.analyticsreporting.v4.model.Report;
import com.google.api.services.analyticsreporting.v4.model.ReportRequest;
import com.google.api.services.analyticsreporting.v4.model.ReportRow;
import com.google.api.gax.core.FixedCredentialsProvider;
import com.google.auth.oauth2.ServiceAccountCredentials;
public class CatalogueAccessesHarvester extends BasicHarvester {
@ -76,13 +62,13 @@ public class CatalogueAccessesHarvester extends BasicHarvester {
private static final JsonFactory JSON_FACTORY = GsonFactory.getDefaultInstance();
private static final String MAPPING_RESOURCE_CATEGORY = "BigGAnalyticsMapping";
private static final String SERVICE_ENDPOINT_CATEGORY = "OnlineService";
private static final String SERVICE_ENDPOINT_NAME = "BigGAnalyticsReportService";
private static final String SERVICE_ENDPOINT_NAME = "GA4AnalyticsDataService";
private static final String AP_CATALOGUE_PAGEVIEWS_PROPERTY = "catalogue-pageviews";
private static final String AP_CLIENT_PROPERTY = "clientId";
private static final String AP_PRIVATEKEY_PROPERTY = "privateKeyId";
private static final String APPLICATION_NAME = "Analytics Reporting";
private static final String AP_CLIENT_PROPERTY = "client_id";
private static final String AP_PRIVATEKEY_PROPERTY = "private_key_id";
private static final String REGEX_CATALOGUE_ACCESSES = "^\\/$";
private static final String REGEX_CATALOGUE_DATASET_LIST_ACCESSES = "^\\/dataset(\\?([a-zA-Z0-9_.-]*.+))*";
@ -107,7 +93,7 @@ public class CatalogueAccessesHarvester extends BasicHarvester {
int catalogueResourceAccesses = 0;
logger.debug("Catalogue accesses for {} ", dashboardContext);
for(CatalogueAccessesReportRow row : catalogueAccesses.get(dashboardContext)) {
// String pagePath = row.getPagePath();
// String pagePath = row.getPagePath();
switch (row.getKey()) {
case CATALOGUE_ACCESSES:
catalogueTotalAccesses += row.getVisitNumber();
@ -126,23 +112,29 @@ public class CatalogueAccessesHarvester extends BasicHarvester {
}
}
ScopeDescriptor scopeDescriptor = new ScopeDescriptor();
ScopeBean scopeBean = new ScopeBean(dashboardContext);
scopeDescriptor.setId(dashboardContext);
scopeDescriptor.setName(scopeBean.name());
AccountingRecord ar1 = new AccountingRecord(scopeDescriptor, instant, getDimension(HarvestedDataKey.CATALOGUE_ACCESSES), (long) catalogueTotalAccesses);
AccountingRecord ar2 = new AccountingRecord(scopeDescriptor, instant, getDimension(HarvestedDataKey.CATALOGUE_DATASET_LIST_ACCESSES), (long) catalogueDatasetListAccesses);
AccountingRecord ar3 = new AccountingRecord(scopeDescriptor, instant, getDimension(HarvestedDataKey.CATALOGUE_DATASET_ACCESSES), (long) catalogueDatasetAccesses);
AccountingRecord ar4 = new AccountingRecord(scopeDescriptor, instant, getDimension(HarvestedDataKey.CATALOGUE_RESOURCE_ACCESSES), (long) catalogueResourceAccesses);
logger.debug("{} : {}", ar1.getDimension().getId(), ar1.getMeasure());
accountingRecords.add(ar1);
logger.debug("{} : {}", ar2.getDimension().getId(), ar2.getMeasure());
accountingRecords.add(ar2);
logger.debug("{} : {}", ar3.getDimension().getId(), ar3.getMeasure());
accountingRecords.add(ar3);
logger.debug("{} : {}", ar4.getDimension().getId(), ar4.getMeasure());
accountingRecords.add(ar4);
try {
ScopeBean scopeBean = new ScopeBean(dashboardContext);
scopeDescriptor.setId(dashboardContext);
scopeDescriptor.setName(scopeBean.name());
AccountingRecord ar1 = new AccountingRecord(scopeDescriptor, instant, getDimension(HarvestedDataKey.CATALOGUE_ACCESSES), (long) catalogueTotalAccesses);
AccountingRecord ar2 = new AccountingRecord(scopeDescriptor, instant, getDimension(HarvestedDataKey.CATALOGUE_DATASET_LIST_ACCESSES), (long) catalogueDatasetListAccesses);
AccountingRecord ar3 = new AccountingRecord(scopeDescriptor, instant, getDimension(HarvestedDataKey.CATALOGUE_DATASET_ACCESSES), (long) catalogueDatasetAccesses);
AccountingRecord ar4 = new AccountingRecord(scopeDescriptor, instant, getDimension(HarvestedDataKey.CATALOGUE_RESOURCE_ACCESSES), (long) catalogueResourceAccesses);
logger.debug("{} : {}", ar1.getDimension().getId(), ar1.getMeasure());
accountingRecords.add(ar1);
logger.debug("{} : {}", ar2.getDimension().getId(), ar2.getMeasure());
accountingRecords.add(ar2);
logger.debug("{} : {}", ar3.getDimension().getId(), ar3.getMeasure());
accountingRecords.add(ar3);
logger.debug("{} : {}", ar4.getDimension().getId(), ar4.getMeasure());
accountingRecords.add(ar4);
} catch (NullPointerException e) {
logger.warn("I found no correspondance in the Genereric Resource for a PropertyId you should check this, type: BigGAnalyticsMapping name: AccountingDashboardMapping");
e.printStackTrace();
}
}
logger.debug("Returning {} accountingRecords ", accountingRecords.size());
return accountingRecords;
@ -156,17 +148,24 @@ public class CatalogueAccessesHarvester extends BasicHarvester {
*
*/
private static HashMap<String, List<CatalogueAccessesReportRow>> getAllAccesses(Date start, Date end) throws Exception {
DateRange dateRange = getDateRangeForAnalytics(start, end);
logger.trace("Getting catalogue accesses in this time range {}", dateRange.toPrettyString());
Builder dateRangeBuilder = getDateRangeBuilderForAnalytics(start, end);
logger.debug("Getting Catalogue accesses in this time range {}", dateRangeBuilder.toString());
AnalyticsReportCredentials credentialsFromD4S = getAuthorisedApplicationInfoFromIs();
AnalyticsReporting service = initializeAnalyticsReporting(credentialsFromD4S);
HashMap<String,List<GetReportsResponse>> responses = getReportResponses(service, credentialsFromD4S.getViewIds(), dateRange);
logger.debug("Getting credentials credentialsFromD4S");
BetaAnalyticsDataSettings serviceSettings = initializeAnalyticsReporting(credentialsFromD4S);
logger.debug("initializeAnalyticsReporting service settings");
HashMap<String,List<RunReportResponse>> responses = getReportResponses(serviceSettings, credentialsFromD4S.getViewIds(), dateRangeBuilder);
HashMap<String, List<CatalogueAccessesReportRow>> toReturn = new HashMap<>();
for(String view : responses.keySet()) {
String dashboardContext = getAccountingDashboardContextGivenGAViewID(view);
logger.trace("Parsing responses for this catalogue view, which corresponds to Dashboard Context: " + dashboardContext);
logger.info("\n\n**************** Parsing responses for this catalogue view, which corresponds to Dashboard Context: " + dashboardContext);
List<CatalogueAccessesReportRow> viewReport = parseResponse(view, responses.get(view), dashboardContext);
logger.trace("Got {} entries from view id={}", viewReport.size(), view);
toReturn.put(dashboardContext, viewReport);
@ -175,175 +174,102 @@ public class CatalogueAccessesHarvester extends BasicHarvester {
}
/**
* Initializes an Analytics Reporting API V4 service object.
* Initializes an Google Analytics Data API service object.
*
* @return An authorized Analytics Reporting API V4 service object.
* @return An authorized Google Analytics Data API
* @throws IOException
* @throws GeneralSecurityException
*/
private static AnalyticsReporting initializeAnalyticsReporting(AnalyticsReportCredentials cred)
throws GeneralSecurityException, IOException {
HttpTransport httpTransport = GoogleNetHttpTransport.newTrustedTransport();
GoogleCredential credential = fromD4SServiceEndpoint(cred).createScoped(AnalyticsReportingScopes.all());
// Construct the Analytics Reporting service object.
return new AnalyticsReporting.Builder(httpTransport, JSON_FACTORY, credential)
.setApplicationName(APPLICATION_NAME).build();
private static BetaAnalyticsDataSettings initializeAnalyticsReporting(AnalyticsReportCredentials cred) throws IOException {
return BetaAnalyticsDataSettings.newBuilder()
.setCredentialsProvider(FixedCredentialsProvider.create(
ServiceAccountCredentials.fromPkcs8(cred.getClientId(), cred.getClientEmail(), cred.getPrivateKeyPem(), cred.getPrivateKeyId(), null)))
.build();
}
/**
* Queries the Analytics Reporting API V4.
* Queries Analytics Data API service
*
* @param service An authorized Analytics Reporting API V4 service object.
* @return GetReportResponse The Analytics Reporting API V4 response.
* @param service Analytics Data API service service settings.
* @return Row Analytics Data API service
* @throws IOException
*/
private static HashMap<String,List<GetReportsResponse>> getReportResponses(AnalyticsReporting service,
List<String> viewIDs, DateRange dateRange) throws IOException {
private static HashMap<String,List<RunReportResponse>> getReportResponses(BetaAnalyticsDataSettings betaAnalyticsDataSettings,
List<String> viewIDs, Builder dateRangeBuilder) throws IOException {
HashMap<String,List<GetReportsResponse>> reports = new HashMap<>();
HashMap<String,List<RunReportResponse>> reports = new HashMap<>();
// Create the Metrics object.
Metric sessions = new Metric().setExpression("ga:pageviews").setAlias("pages");
com.google.api.services.analyticsreporting.v4.model.Dimension pageTitle = new com.google.api.services.analyticsreporting.v4.model.Dimension().setName("ga:pagePath");
try (BetaAnalyticsDataClient analyticsData = BetaAnalyticsDataClient.create(betaAnalyticsDataSettings)) {
for(String view : viewIDs) {
List<GetReportsResponse> gReportResponses = new ArrayList<>();
logger.info("Getting data from Google Analytics for catalogue viewid: " + view);
boolean iterateMorePages = true;
String nextPageToken = null;
while (iterateMorePages) {
// Create the ReportRequest object.
ReportRequest request = new ReportRequest().setViewId(view.trim()).setDateRanges(Arrays.asList(dateRange))
.setMetrics(Arrays.asList(sessions)).setDimensions(Arrays.asList(pageTitle));
request.setPageSize(1000);
request.setPageToken(nextPageToken);
ArrayList<ReportRequest> requests = new ArrayList<ReportRequest>();
requests.add(request);
// Create the GetReportsRequest object.
GetReportsRequest getReport = new GetReportsRequest().setReportRequests(requests);
// Call the batchGet method.
GetReportsResponse response = service.reports().batchGet(getReport).execute();
nextPageToken = response.getReports().get(0).getNextPageToken();
iterateMorePages = (nextPageToken != null);
logger.debug("got nextPageToken: "+nextPageToken);
for(String propertyId : viewIDs) {
List<RunReportResponse> gReportResponses = new ArrayList<>();
logger.debug("Getting data from Analytics Data API for propertyId: " + propertyId);
RunReportRequest request =
RunReportRequest.newBuilder()
.setProperty("properties/" + propertyId)
.addDimensions(Dimension.newBuilder().setName("pagePath"))
.addMetrics(Metric.newBuilder().setName("screenPageViews"))
.addDateRanges(dateRangeBuilder)
.build();
// Make the request.
RunReportResponse response = analyticsData.runReport(request);
gReportResponses.add(response);
// Iterate through every row of the API response.
// for (Row row : response.getRowsList()) {
// System.out.printf(
// "%s, %s%n", row.getDimensionValues(0).getValue(), row.getMetricValues(0).getValue());
// }
reports.put(propertyId, gReportResponses);
}
reports.put(view, gReportResponses);
}
// Return the response.
return reports;
}
/**
* Parses and prints the Analytics Reporting API V4 response.
* @param dashboardContext
* Parses and prints the Analytics Data API service respose
*
* @param response An Analytics Reporting API V4 response.
* @param dashboardContext
*/
private static List<CatalogueAccessesReportRow> parseResponse(String viewId, List<GetReportsResponse> responses, String dashboardContext) {
private static List<CatalogueAccessesReportRow> parseResponse(String viewId, List<RunReportResponse> responses, String dashboardContext) {
logger.debug("parsing Response for " + viewId);
List<CatalogueAccessesReportRow> toReturn = new ArrayList<>();
for (GetReportsResponse response : responses) {
for (Report report: response.getReports()) {
List<ReportRow> rows = report.getData().getRows();
if (rows == null) {
logger.warn("No data found for " + viewId);
for (RunReportResponse response : responses) {
for (Row row: response.getRowsList()) {
String dimension = row.getDimensionValues(0).getValue();
String metric = row.getMetricValues(0).getValue();
CatalogueAccessesReportRow var = new CatalogueAccessesReportRow();
boolean validEntry = false;
String pagePath = dimension;
logger.trace("parsing pagepath {}: value: {}", pagePath, Integer.parseInt(metric));
if (pagePath.matches(REGEX_CATALOGUE_RESOURCE_ACCESSES)) {
var.setKey(HarvestedDataKey.CATALOGUE_RESOURCE_ACCESSES);
validEntry = true;
}
else {
for (ReportRow row: rows) {
String dimension = row.getDimensions().get(0);
DateRangeValues metric = row.getMetrics().get(0);
CatalogueAccessesReportRow var = new CatalogueAccessesReportRow();
boolean validEntry = false;
String pagePath = dimension;
logger.trace("parsing pagepath {}: value: {}", pagePath, Integer.parseInt(metric.getValues().get(0)));
if (pagePath.matches(REGEX_CATALOGUE_RESOURCE_ACCESSES)) {
var.setKey(HarvestedDataKey.CATALOGUE_RESOURCE_ACCESSES);
validEntry = true;
}
else if (pagePath.matches(REGEX_CATALOGUE_DATASET_ACCESSES)) {
var.setKey(HarvestedDataKey.CATALOGUE_DATASET_ACCESSES);
validEntry = true;
}
else if (pagePath.matches(REGEX_CATALOGUE_DATASET_LIST_ACCESSES)) {
var.setKey(HarvestedDataKey.CATALOGUE_DATASET_LIST_ACCESSES);
validEntry = true;
}
else if (pagePath.matches(REGEX_CATALOGUE_ACCESSES)) {
var.setKey(HarvestedDataKey.CATALOGUE_ACCESSES);
validEntry = true;
}
if (validEntry) {
var.setDashboardContext(dashboardContext);
var.setPagePath(dimension);
var.setVisitNumber(Integer.parseInt(metric.getValues().get(0)));
toReturn.add(var);
}
}
else if (pagePath.matches(REGEX_CATALOGUE_DATASET_ACCESSES)) {
var.setKey(HarvestedDataKey.CATALOGUE_DATASET_ACCESSES);
validEntry = true;
}
else if (pagePath.matches(REGEX_CATALOGUE_DATASET_LIST_ACCESSES)) {
var.setKey(HarvestedDataKey.CATALOGUE_DATASET_LIST_ACCESSES);
validEntry = true;
}
else if (pagePath.matches(REGEX_CATALOGUE_ACCESSES)) {
var.setKey(HarvestedDataKey.CATALOGUE_ACCESSES);
validEntry = true;
}
if (validEntry) {
var.setDashboardContext(dashboardContext);
var.setPagePath(dimension);
var.setVisitNumber(Integer.parseInt(metric));
toReturn.add(var);
}
}
}
return toReturn;
}
private static GoogleCredential fromD4SServiceEndpoint(AnalyticsReportCredentials cred) throws IOException {
String clientId = cred.getClientId();
String clientEmail = cred.getClientEmail();
String privateKeyPem = cred.getPrivateKeyPem();
String privateKeyId = cred.getPrivateKeyId();
String tokenUri = cred.getTokenUri();
String projectId = cred.getProjectId();
if(clientId == null || clientEmail == null || privateKeyPem == null || privateKeyId == null) {
throw new IOException("Error reading service account credential from stream, "
+ "expecting 'client_id', 'client_email', 'private_key' and 'private_key_id'.");
}
PrivateKey privateKey = privateKeyFromPkcs8(privateKeyPem);
Collection<String> emptyScopes = Collections.emptyList();
Builder credentialBuilder = new GoogleCredential.Builder().setTransport(Utils.getDefaultTransport())
.setJsonFactory(Utils.getDefaultJsonFactory()).setServiceAccountId(clientEmail)
.setServiceAccountScopes(emptyScopes).setServiceAccountPrivateKey(privateKey)
.setServiceAccountPrivateKeyId(privateKeyId);
if(tokenUri != null) {
credentialBuilder.setTokenServerEncodedUrl(tokenUri);
}
if(projectId != null) {
credentialBuilder.setServiceAccountProjectId(projectId);
}
// Don't do a refresh at this point, as it will always fail before the scopes are added.
return credentialBuilder.build();
}
private static PrivateKey privateKeyFromPkcs8(String privateKeyPem) throws IOException {
Reader reader = new StringReader(privateKeyPem);
Section section = PemReader.readFirstSectionAndClose(reader, "PRIVATE KEY");
if(section == null) {
throw new IOException("Invalid PKCS8 data.");
}
byte[] bytes = section.getBase64DecodedBytes();
PKCS8EncodedKeySpec keySpec = new PKCS8EncodedKeySpec(bytes);
Exception unexpectedException = null;
try {
KeyFactory keyFactory = SecurityUtils.getRsaKeyFactory();
PrivateKey privateKey = keyFactory.generatePrivate(keySpec);
return privateKey;
} catch(NoSuchAlgorithmException exception) {
unexpectedException = exception;
} catch(InvalidKeySpecException exception) {
unexpectedException = exception;
}
throw new IOException("Unexpected exception reading PKCS data", unexpectedException);
}
private static List<ServiceEndpoint> getAnalyticsReportingConfigurationFromIS(String infrastructureScope)
throws Exception {
@ -405,7 +331,6 @@ public class CatalogueAccessesHarvester extends BasicHarvester {
}
/**
* l
* @throws Exception
*/
private static AnalyticsReportCredentials getAuthorisedApplicationInfoFromIs() throws Exception {
@ -423,13 +348,13 @@ public class CatalogueAccessesHarvester extends BasicHarvester {
} else {
for(ServiceEndpoint res : list) {
reportCredentials.setTokenUri(res.profile().runtime().hostedOn());
Group<AccessPoint> apGroup = res.profile().accessPoints();
AccessPoint[] accessPoints = (AccessPoint[]) apGroup.toArray(new AccessPoint[apGroup.size()]);
AccessPoint found = accessPoints[0];
reportCredentials.setClientEmail(found.address());
reportCredentials.setProjectId(found.username());
reportCredentials.setPrivateKeyPem(StringEncrypter.getEncrypter().decrypt(found.password()));
reportCredentials.setClientEmail(found.username());
String decryptedPrivateKey = StringEncrypter.getEncrypter().decrypt(found.password());
reportCredentials.setPrivateKeyPem(decryptedPrivateKey.trim());
for(Property prop : found.properties()) {
if(prop.name().compareTo(AP_CATALOGUE_PAGEVIEWS_PROPERTY) == 0) {
String decryptedValue = StringEncrypter.getEncrypter().decrypt(prop.value());
@ -458,14 +383,13 @@ public class CatalogueAccessesHarvester extends BasicHarvester {
return Instant.ofEpochMilli(date.getTime()).atZone(ZoneId.systemDefault()).toLocalDate();
}
private static DateRange getDateRangeForAnalytics(Date start, Date end) {
private static Builder getDateRangeBuilderForAnalytics(Date start, Date end) {
DateTimeFormatter formatter = DateTimeFormatter.ofPattern("yyyy-MM-dd"); //required by Analytics
String startDate = asLocalDate(start).format(formatter);
String endDate = asLocalDate(end).format(formatter);
DateRange dateRange = new DateRange();// date format `yyyy-MM-dd`
dateRange.setStartDate(startDate);
dateRange.setEndDate(endDate);
return dateRange;
Builder dateRangeBuilder = DateRange.newBuilder().setStartDate(startDate).setEndDate(endDate);
return dateRangeBuilder;
}
}

View File

@ -4,22 +4,14 @@ import static org.gcube.resources.discovery.icclient.ICFactory.clientFor;
import static org.gcube.resources.discovery.icclient.ICFactory.queryFor;
import java.io.IOException;
import java.io.Reader;
import java.io.StringReader;
import java.security.GeneralSecurityException;
import java.security.KeyFactory;
import java.security.NoSuchAlgorithmException;
import java.security.PrivateKey;
import java.security.spec.InvalidKeySpecException;
import java.security.spec.PKCS8EncodedKeySpec;
import java.time.Instant;
import java.time.LocalDate;
import java.time.ZoneId;
import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
@ -49,26 +41,17 @@ import org.slf4j.LoggerFactory;
import org.w3c.dom.Node;
import org.xml.sax.InputSource;
import com.google.api.client.googleapis.auth.oauth2.GoogleCredential;
import com.google.api.client.googleapis.auth.oauth2.GoogleCredential.Builder;
import com.google.api.client.googleapis.javanet.GoogleNetHttpTransport;
import com.google.api.client.googleapis.util.Utils;
import com.google.api.client.http.HttpTransport;
import com.google.api.client.json.JsonFactory;
import com.google.api.client.json.gson.GsonFactory;
import com.google.api.client.util.PemReader;
import com.google.api.client.util.PemReader.Section;
import com.google.api.client.util.SecurityUtils;
import com.google.api.services.analyticsreporting.v4.AnalyticsReporting;
import com.google.api.services.analyticsreporting.v4.AnalyticsReportingScopes;
import com.google.api.services.analyticsreporting.v4.model.DateRange;
import com.google.api.services.analyticsreporting.v4.model.DateRangeValues;
import com.google.api.services.analyticsreporting.v4.model.GetReportsRequest;
import com.google.api.services.analyticsreporting.v4.model.GetReportsResponse;
import com.google.api.services.analyticsreporting.v4.model.Metric;
import com.google.api.services.analyticsreporting.v4.model.Report;
import com.google.api.services.analyticsreporting.v4.model.ReportRequest;
import com.google.api.services.analyticsreporting.v4.model.ReportRow;
import com.google.analytics.data.v1beta.BetaAnalyticsDataClient;
import com.google.analytics.data.v1beta.BetaAnalyticsDataSettings;
import com.google.analytics.data.v1beta.DateRange;
import com.google.analytics.data.v1beta.DateRange.Builder;
import com.google.analytics.data.v1beta.Dimension;
import com.google.analytics.data.v1beta.Metric;
import com.google.analytics.data.v1beta.Row;
import com.google.analytics.data.v1beta.RunReportRequest;
import com.google.analytics.data.v1beta.RunReportResponse;
import com.google.api.gax.core.FixedCredentialsProvider;
import com.google.auth.oauth2.ServiceAccountCredentials;
/**
* @author Massimiliano Assante (ISTI - CNR)
@ -77,16 +60,12 @@ public class CoreServicesAccessesHarvester extends BasicHarvester {
private static Logger logger = LoggerFactory.getLogger(CoreServicesAccessesHarvester.class);
private static final JsonFactory JSON_FACTORY = GsonFactory.getDefaultInstance();
private static final String MAPPING_RESOURCE_CATEGORY = "BigGAnalyticsMapping";
private static final String SERVICE_ENDPOINT_CATEGORY = "OnlineService";
private static final String SERVICE_ENDPOINT_NAME = "BigGAnalyticsReportService";
private static final String SERVICE_ENDPOINT_NAME = "GA4AnalyticsDataService";
private static final String AP_VIEWS_PROPERTY = "views";
private static final String AP_CLIENT_PROPERTY = "clientId";
private static final String AP_PRIVATEKEY_PROPERTY = "privateKeyId";
private static final String APPLICATION_NAME = "Analytics Reporting";
private static final String AP_CLIENT_ID = "client_id";
private static final String AP_PRIVATEKEY_ID_PROPERTY = "private_key_id";
private static final String PAGE_WORKSPACE_ACCESSES = "/workspace";
private static final String PAGE_MESSAGES_ACCESSES = "/messages";
@ -160,18 +139,18 @@ public class CoreServicesAccessesHarvester extends BasicHarvester {
*
*/
private static HashMap<String, List<CoreServiceAccessesReportRow>> getAllAccesses(Date start, Date end) throws Exception {
DateRange dateRange = getDateRangeForAnalytics(start, end);
logger.trace("Getting core services accesses in this time range {}", dateRange.toPrettyString());
Builder dateRangeBuilder = getDateRangeBuilderForAnalytics(start, end);
logger.trace("Getting core services accesses in this time range {}", dateRangeBuilder.toString());
AnalyticsReportCredentials credentialsFromD4S = getAuthorisedApplicationInfoFromIs();
logger.trace("gotten credentialsFromD4S id = {}", credentialsFromD4S.getClientId());
AnalyticsReporting service = initializeAnalyticsReporting(credentialsFromD4S);
BetaAnalyticsDataSettings serviceSettings = initializeAnalyticsReporting(credentialsFromD4S);
logger.trace("gotten credentialsFromD4S viewIds= {}", credentialsFromD4S.getViewIds().toString());
HashMap<String,List<GetReportsResponse>> responses = getReportResponses(service, credentialsFromD4S.getViewIds(), dateRange);
HashMap<String,List<RunReportResponse>> responses = getReportResponses(serviceSettings, credentialsFromD4S.getViewIds(), dateRangeBuilder);
HashMap<String, List<CoreServiceAccessesReportRow>> toReturn = new HashMap<>();
int i = 1;
@ -191,181 +170,106 @@ public class CoreServicesAccessesHarvester extends BasicHarvester {
}
/**
* Initializes an Analytics Reporting API V4 service object.
* Initializes an Google Analytics Data API service object.
*
* @return An authorized Analytics Reporting API V4 service object.
* @return An authorized Google Analytics Data API
* @throws IOException
* @throws GeneralSecurityException
*/
private static AnalyticsReporting initializeAnalyticsReporting(AnalyticsReportCredentials cred)
throws GeneralSecurityException, IOException {
HttpTransport httpTransport = GoogleNetHttpTransport.newTrustedTransport();
GoogleCredential credential = fromD4SServiceEndpoint(cred).createScoped(AnalyticsReportingScopes.all());
// Construct the Analytics Reporting service object.
return new AnalyticsReporting.Builder(httpTransport, JSON_FACTORY, credential)
.setApplicationName(APPLICATION_NAME).build();
private static BetaAnalyticsDataSettings initializeAnalyticsReporting(AnalyticsReportCredentials cred) throws IOException {
return BetaAnalyticsDataSettings.newBuilder()
.setCredentialsProvider(FixedCredentialsProvider.create(
ServiceAccountCredentials.fromPkcs8(cred.getClientId(), cred.getClientEmail(), cred.getPrivateKeyPem(), cred.getPrivateKeyId(), null)))
.build();
}
/**
* Queries the Analytics Reporting API V4.
* Queries Analytics Data API service
*
* @param service An authorized Analytics Reporting API V4 service object.
* @return GetReportResponse The Analytics Reporting API V4 response.
* @param service Analytics Data API service service settings.
* @return Row Analytics Data API service
* @throws IOException
*/
private static HashMap<String,List<GetReportsResponse>> getReportResponses(AnalyticsReporting service,
List<String> viewIDs, DateRange dateRange) throws IOException {
private static HashMap<String,List<RunReportResponse>> getReportResponses(BetaAnalyticsDataSettings betaAnalyticsDataSettings,
List<String> viewIDs, Builder dateRangeBuilder) throws IOException {
HashMap<String,List<GetReportsResponse>> reports = new HashMap<>();
HashMap<String,List<RunReportResponse>> reports = new HashMap<>();
// Create the Metrics object.
Metric sessions = new Metric().setExpression("ga:pageviews").setAlias("pages");
com.google.api.services.analyticsreporting.v4.model.Dimension pageTitle = new com.google.api.services.analyticsreporting.v4.model.Dimension().setName("ga:pagePath");
try (BetaAnalyticsDataClient analyticsData = BetaAnalyticsDataClient.create(betaAnalyticsDataSettings)) {
for(String view : viewIDs) {
List<GetReportsResponse> gReportResponses = new ArrayList<>();
logger.info("Getting data from Google Analytics for gateway viewid: " + view);
boolean iterateMorePages = true;
String nextPageToken = null;
while (iterateMorePages) {
// Create the ReportRequest object.
ReportRequest request = new ReportRequest().setViewId(view.trim()).setDateRanges(Arrays.asList(dateRange))
.setMetrics(Arrays.asList(sessions)).setDimensions(Arrays.asList(pageTitle));
request.setPageSize(1000);
request.setPageToken(nextPageToken);
ArrayList<ReportRequest> requests = new ArrayList<ReportRequest>();
requests.add(request);
// Create the GetReportsRequest object.
GetReportsRequest getReport = new GetReportsRequest().setReportRequests(requests);
// Call the batchGet method.
GetReportsResponse response = service.reports().batchGet(getReport).execute();
nextPageToken = response.getReports().get(0).getNextPageToken();
iterateMorePages = (nextPageToken != null);
logger.debug("got nextPageToken: "+nextPageToken);
for(String propertyId : viewIDs) {
List<RunReportResponse> gReportResponses = new ArrayList<>();
logger.debug("Getting data from Analytics Data API for propertyId: " + propertyId);
RunReportRequest request =
RunReportRequest.newBuilder()
.setProperty("properties/" + propertyId)
.addDimensions(Dimension.newBuilder().setName("pagePath"))
.addMetrics(Metric.newBuilder().setName("screenPageViews"))
.addDateRanges(dateRangeBuilder)
.build();
// Make the request.
RunReportResponse response = analyticsData.runReport(request);
gReportResponses.add(response);
reports.put(propertyId, gReportResponses);
}
reports.put(view, gReportResponses);
}
// Return the response.
return reports;
}
/**
* Parses and prints the Analytics Reporting API V4 response.
* @param dashboardContext
* Parses and prints the Analytics Data API service respose
*
* @param response An Analytics Reporting API V4 response.
* @param response An Analytics Data API service response.
*/
private static List<CoreServiceAccessesReportRow> parseResponse(String viewId, List<GetReportsResponse> responses, String dashboardContext) {
logger.debug("parsing Response for " + viewId);
private static List<CoreServiceAccessesReportRow> parseResponse(String viewId, List<RunReportResponse> responses, String dashboardContext) {
logger.debug("parsing Response for propertyID=" + viewId);
List<CoreServiceAccessesReportRow> toReturn = new ArrayList<>();
for (GetReportsResponse response : responses) {
for (Report report: response.getReports()) {
List<ReportRow> rows = report.getData().getRows();
if (rows == null) {
logger.warn("No data found for " + viewId);
}
else {
for (ReportRow row: rows) {
String dimension = row.getDimensions().get(0);
DateRangeValues metric = row.getMetrics().get(0);
CoreServiceAccessesReportRow var = new CoreServiceAccessesReportRow();
boolean validEntry = false;
String pagePath = dimension;
logger.trace("parsing pagepath {}: value: {}", pagePath, Integer.parseInt(metric.getValues().get(0)));
for (RunReportResponse response : responses) {
for (Row row: response.getRowsList()) {
String dimension = row.getDimensionValues(0).getValue();
String metric = row.getMetricValues(0).getValue();
CoreServiceAccessesReportRow var = new CoreServiceAccessesReportRow();
boolean validEntry = false;
String pagePath = dimension;
logger.trace("parsing pagepath {}: value: {}", pagePath, Integer.parseInt(metric));
if (!pagePath.contains("_redirect=/group")) {
if ( pagePath.contains(PAGE_WORKSPACE_ACCESSES)) {
var.setKey(HarvestedDataKey.WORKSPACE_ACCESSES);
logger.trace("**matched "+pagePath);
validEntry = true;
}
else if ( pagePath.contains(PAGE_MESSAGES_ACCESSES)) {
var.setKey(HarvestedDataKey.MESSAGES_ACCESSES);
logger.trace("**matched "+pagePath);
validEntry = true;
}
else if ( pagePath.contains(PAGE_PROFILE_ACCESSES)) {
var.setKey(HarvestedDataKey.PROFILE_ACCESSES);
logger.trace("**matched "+pagePath);
validEntry = true;
}
else if ( pagePath.contains(PAGE_NOTIFICATION_ACCESSES)) {
var.setKey(HarvestedDataKey.NOTIFICATIONS_ACCESSES);
logger.trace("**matched "+pagePath);
validEntry = true;
}
}
if (validEntry) {
var.setDashboardContext(dashboardContext);
var.setPagePath(dimension);
var.setVisitNumber(Integer.parseInt(metric.getValues().get(0)));
toReturn.add(var);
}
if (!pagePath.contains("_redirect=/group")) {
if ( pagePath.contains(PAGE_WORKSPACE_ACCESSES)) {
var.setKey(HarvestedDataKey.WORKSPACE_ACCESSES);
logger.trace("**matched "+pagePath);
validEntry = true;
}
else if ( pagePath.contains(PAGE_MESSAGES_ACCESSES)) {
var.setKey(HarvestedDataKey.MESSAGES_ACCESSES);
logger.trace("**matched "+pagePath);
validEntry = true;
}
else if ( pagePath.contains(PAGE_PROFILE_ACCESSES)) {
var.setKey(HarvestedDataKey.PROFILE_ACCESSES);
logger.trace("**matched "+pagePath);
validEntry = true;
}
else if ( pagePath.contains(PAGE_NOTIFICATION_ACCESSES)) {
var.setKey(HarvestedDataKey.NOTIFICATIONS_ACCESSES);
logger.trace("**matched "+pagePath);
validEntry = true;
}
}
if (validEntry) {
var.setDashboardContext(dashboardContext);
var.setPagePath(dimension);
var.setVisitNumber(Integer.parseInt(metric));
toReturn.add(var);
}
}
}
return toReturn;
}
private static GoogleCredential fromD4SServiceEndpoint(AnalyticsReportCredentials cred) throws IOException {
String clientId = cred.getClientId();
String clientEmail = cred.getClientEmail();
String privateKeyPem = cred.getPrivateKeyPem();
String privateKeyId = cred.getPrivateKeyId();
String tokenUri = cred.getTokenUri();
String projectId = cred.getProjectId();
if(clientId == null || clientEmail == null || privateKeyPem == null || privateKeyId == null) {
throw new IOException("Error reading service account credential from stream, "
+ "expecting 'client_id', 'client_email', 'private_key' and 'private_key_id'.");
}
PrivateKey privateKey = privateKeyFromPkcs8(privateKeyPem);
Collection<String> emptyScopes = Collections.emptyList();
Builder credentialBuilder = new GoogleCredential.Builder().setTransport(Utils.getDefaultTransport())
.setJsonFactory(Utils.getDefaultJsonFactory()).setServiceAccountId(clientEmail)
.setServiceAccountScopes(emptyScopes).setServiceAccountPrivateKey(privateKey)
.setServiceAccountPrivateKeyId(privateKeyId);
if(tokenUri != null) {
credentialBuilder.setTokenServerEncodedUrl(tokenUri);
}
if(projectId != null) {
credentialBuilder.setServiceAccountProjectId(projectId);
}
// Don't do a refresh at this point, as it will always fail before the scopes are added.
return credentialBuilder.build();
}
private static PrivateKey privateKeyFromPkcs8(String privateKeyPem) throws IOException {
Reader reader = new StringReader(privateKeyPem);
Section section = PemReader.readFirstSectionAndClose(reader, "PRIVATE KEY");
if(section == null) {
throw new IOException("Invalid PKCS8 data.");
}
byte[] bytes = section.getBase64DecodedBytes();
PKCS8EncodedKeySpec keySpec = new PKCS8EncodedKeySpec(bytes);
Exception unexpectedException = null;
try {
KeyFactory keyFactory = SecurityUtils.getRsaKeyFactory();
PrivateKey privateKey = keyFactory.generatePrivate(keySpec);
return privateKey;
} catch(NoSuchAlgorithmException exception) {
unexpectedException = exception;
} catch(InvalidKeySpecException exception) {
unexpectedException = exception;
}
throw new IOException("Unexpected exception reading PKCS data", unexpectedException);
}
private static List<ServiceEndpoint> getAnalyticsReportingConfigurationFromIS(String infrastructureScope)
throws Exception {
@ -445,24 +349,24 @@ public class CoreServicesAccessesHarvester extends BasicHarvester {
} else {
for(ServiceEndpoint res : list) {
reportCredentials.setTokenUri(res.profile().runtime().hostedOn());
Group<AccessPoint> apGroup = res.profile().accessPoints();
AccessPoint[] accessPoints = (AccessPoint[]) apGroup.toArray(new AccessPoint[apGroup.size()]);
AccessPoint found = accessPoints[0];
reportCredentials.setClientEmail(found.address());
reportCredentials.setProjectId(found.username());
reportCredentials.setPrivateKeyPem(StringEncrypter.getEncrypter().decrypt(found.password()));
reportCredentials.setClientEmail(found.username());
String decryptedPrivateKey = StringEncrypter.getEncrypter().decrypt(found.password());
reportCredentials.setPrivateKeyPem(decryptedPrivateKey.trim());
for(Property prop : found.properties()) {
if(prop.name().compareTo(AP_VIEWS_PROPERTY) == 0) {
String decryptedValue = StringEncrypter.getEncrypter().decrypt(prop.value());
String[] views = decryptedValue.split(";");
reportCredentials.setViewIds(Arrays.asList(views));
}
if(prop.name().compareTo(AP_CLIENT_PROPERTY) == 0) {
if(prop.name().compareTo(AP_CLIENT_ID) == 0) {
String decryptedValue = StringEncrypter.getEncrypter().decrypt(prop.value());
reportCredentials.setClientId(decryptedValue);
}
if(prop.name().compareTo(AP_PRIVATEKEY_PROPERTY) == 0) {
if(prop.name().compareTo(AP_PRIVATEKEY_ID_PROPERTY) == 0) {
String decryptedValue = StringEncrypter.getEncrypter().decrypt(prop.value());
reportCredentials.setPrivateKeyId(decryptedValue);
}
@ -476,18 +380,18 @@ public class CoreServicesAccessesHarvester extends BasicHarvester {
return reportCredentials;
}
private static LocalDate asLocalDate(Date date) {
return Instant.ofEpochMilli(date.getTime()).atZone(ZoneId.systemDefault()).toLocalDate();
}
private static DateRange getDateRangeForAnalytics(Date start, Date end) {
private static Builder getDateRangeBuilderForAnalytics(Date start, Date end) {
DateTimeFormatter formatter = DateTimeFormatter.ofPattern("yyyy-MM-dd"); //required by Analytics
String startDate = asLocalDate(start).format(formatter);
String endDate = asLocalDate(end).format(formatter);
DateRange dateRange = new DateRange();// date format `yyyy-MM-dd`
dateRange.setStartDate(startDate);
dateRange.setEndDate(endDate);
return dateRange;
Builder dateRangeBuilder = DateRange.newBuilder().setStartDate(startDate).setEndDate(endDate);
return dateRangeBuilder;
}
}

View File

@ -0,0 +1,312 @@
package org.gcube.dataharvest.harvester;
import static org.gcube.resources.discovery.icclient.ICFactory.clientFor;
import static org.gcube.resources.discovery.icclient.ICFactory.queryFor;
import java.io.IOException;
import java.security.GeneralSecurityException;
import java.time.Instant;
import java.time.LocalDate;
import java.time.ZoneId;
import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import org.gcube.accounting.accounting.summary.access.model.ScopeDescriptor;
import org.gcube.accounting.accounting.summary.access.model.update.AccountingRecord;
import org.gcube.common.encryption.encrypter.StringEncrypter;
import org.gcube.common.resources.gcore.ServiceEndpoint;
import org.gcube.common.resources.gcore.ServiceEndpoint.AccessPoint;
import org.gcube.common.resources.gcore.ServiceEndpoint.Property;
import org.gcube.common.resources.gcore.utils.Group;
import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.common.scope.impl.ScopeBean;
import org.gcube.dataharvest.AccountingDashboardHarvesterPlugin;
import org.gcube.dataharvest.datamodel.AnalyticsReportCredentials;
import org.gcube.dataharvest.datamodel.HarvestedDataKey;
import org.gcube.dataharvest.datamodel.VREAccessesReportRow;
import org.gcube.resources.discovery.client.api.DiscoveryClient;
import org.gcube.resources.discovery.client.queries.api.SimpleQuery;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.analytics.data.v1beta.BetaAnalyticsDataClient;
import com.google.analytics.data.v1beta.BetaAnalyticsDataSettings;
import com.google.analytics.data.v1beta.DateRange;
import com.google.analytics.data.v1beta.DateRange.Builder;
import com.google.analytics.data.v1beta.Dimension;
import com.google.analytics.data.v1beta.Metric;
import com.google.analytics.data.v1beta.Row;
import com.google.analytics.data.v1beta.RunReportRequest;
import com.google.analytics.data.v1beta.RunReportResponse;
import com.google.api.client.json.JsonFactory;
import com.google.api.client.json.gson.GsonFactory;
import com.google.api.gax.core.FixedCredentialsProvider;
import com.google.auth.oauth2.ServiceAccountCredentials;
/**
*
* @author Giancarlo Panichi (ISTI CNR)
*
*/
public class JupyterAccessesHarvester extends BasicHarvester {
private static Logger logger = LoggerFactory.getLogger(JupyterAccessesHarvester.class);
private static final JsonFactory JSON_FACTORY = GsonFactory.getDefaultInstance();
private static final String SERVICE_ENDPOINT_CATEGORY = "OnlineService";
private static final String SERVICE_ENDPOINT_NAME = "GA4AnalyticsDataService";
private static final String AP_VIEWS_PROPERTY = "views";
private static final String AP_CLIENT_ID = "client_id";
private static final String AP_PRIVATEKEY_ID_PROPERTY = "private_key_id";
private List<VREAccessesReportRow> vreAccesses;
public JupyterAccessesHarvester(Date start, Date end) throws Exception {
super(start, end);
logger.debug("JupyerAccessHArvester: {}, {}", start, end);
vreAccesses = getAllAccesses(start, end);
}
@Override
public List<AccountingRecord> getAccountingRecords() throws Exception {
try {
String context = org.gcube.dataharvest.utils.Utils.getCurrentContext();
ArrayList<AccountingRecord> accountingRecords = new ArrayList<AccountingRecord>();
int measure = 0;
ScopeBean scopeBean = new ScopeBean(context);
String lowerCasedContext = scopeBean.name().toLowerCase();
logger.debug("JupyerAccessHArvester lowerCasedContext: {}", lowerCasedContext);
for (VREAccessesReportRow row : vreAccesses) {
String pagePath = row.getPagePath().toLowerCase();
if (pagePath != null && !pagePath.isEmpty()) {
if (pagePath.contains(lowerCasedContext)) {
if (!pagePath.contains("catalogue")) {
if (pagePath.contains("jupyter") || pagePath.contains("jupiter")) {
logger.trace("Matched jupyter or jupiter ({}) : {}", lowerCasedContext, pagePath);
measure += row.getVisitNumber();
}
}
}
}
}
ScopeDescriptor scopeDescriptor = AccountingDashboardHarvesterPlugin.getScopeDescriptor(context);
if (measure > 0) {
AccountingRecord ar = new AccountingRecord(scopeDescriptor, instant,
getDimension(HarvestedDataKey.JUPYTER_ACCESSES), (long) measure);
logger.debug("{} : {}", ar.getDimension().getId(), ar.getMeasure());
accountingRecords.add(ar);
}
return accountingRecords;
} catch (Exception e) {
throw e;
}
}
/**
*
* @return a list of {@link VREAccessesReportRow} objects containing the
* pagePath and the visit number e.g. VREAccessesReportRow
* [pagePath=/group/agroclimaticmodelling/add-new-users,
* visitNumber=1] VREAccessesReportRow
* [pagePath=/group/agroclimaticmodelling/administration,
* visitNumber=2] VREAccessesReportRow
* [pagePath=/group/agroclimaticmodelling/agroclimaticmodelling,
* visitNumber=39]
*/
private static List<VREAccessesReportRow> getAllAccesses(Date start, Date end) throws Exception {
Builder dateRangeBuilder = getDateRangeBuilderForAnalytics(start, end);
logger.debug("Getting accesses in this time range {}", dateRangeBuilder.toString());
AnalyticsReportCredentials credentialsFromD4S = getAuthorisedApplicationInfoFromIs();
logger.debug("Getting credentials credentialsFromD4S");
BetaAnalyticsDataSettings serviceSettings = initializeAnalyticsReporting(credentialsFromD4S);
logger.debug("initializeAnalyticsReporting service settings");
HashMap<String,List<RunReportResponse>> responses = getReportResponses(serviceSettings, credentialsFromD4S.getViewIds(), dateRangeBuilder);
List<VREAccessesReportRow> totalAccesses = new ArrayList<>();
for(String view : responses.keySet()) {
List<VREAccessesReportRow> viewReport = parseResponse(view, responses.get(view));
logger.debug("Got {} entries from view id={}", viewReport.size(), view);
totalAccesses.addAll(viewReport);
}
logger.debug("Merged in {} total entries from all views", totalAccesses.size());
return totalAccesses;
}
/**
* Initializes an Google Analytics Data API service object.
*
* @return An authorized Google Analytics Data API
* @throws IOException
* @throws GeneralSecurityException
*/
private static BetaAnalyticsDataSettings initializeAnalyticsReporting(AnalyticsReportCredentials cred) throws IOException {
return BetaAnalyticsDataSettings.newBuilder()
.setCredentialsProvider(FixedCredentialsProvider.create(
ServiceAccountCredentials.fromPkcs8(cred.getClientId(), cred.getClientEmail(), cred.getPrivateKeyPem(), cred.getPrivateKeyId(), null)))
.build();
}
/**
* Queries Analytics Data API service
*
* @param service Analytics Data API service service settings.
* @return Row Analytics Data API service
* @throws IOException
*/
private static HashMap<String,List<RunReportResponse>> getReportResponses(BetaAnalyticsDataSettings betaAnalyticsDataSettings,
List<String> viewIDs, Builder dateRangeBuilder) throws IOException {
HashMap<String,List<RunReportResponse>> reports = new HashMap<>();
try (BetaAnalyticsDataClient analyticsData = BetaAnalyticsDataClient.create(betaAnalyticsDataSettings)) {
for(String propertyId : viewIDs) {
List<RunReportResponse> gReportResponses = new ArrayList<>();
logger.debug("Getting data from Analytics Data API for propertyId: " + propertyId);
RunReportRequest request =
RunReportRequest.newBuilder()
.setProperty("properties/" + propertyId)
.addDimensions(Dimension.newBuilder().setName("pagePath"))
.addMetrics(Metric.newBuilder().setName("screenPageViews"))
.addDateRanges(dateRangeBuilder)
.build();
// Make the request.
RunReportResponse response = analyticsData.runReport(request);
gReportResponses.add(response);
reports.put(propertyId, gReportResponses);
}
}
return reports;
}
/**
* Parses and prints the Analytics Data API service respose
*
* @param response An Analytics Data API service response.
*/
private static List<VREAccessesReportRow> parseResponse(String viewId, List<RunReportResponse> responses) {
logger.debug("parsing Response for propertyID=" + viewId);
List<VREAccessesReportRow> toReturn = new ArrayList<>();
for (RunReportResponse response : responses) {
for (Row row: response.getRowsList()) {
String dimension = row.getDimensionValues(0).getValue();
String metric = row.getMetricValues(0).getValue();
VREAccessesReportRow var = new VREAccessesReportRow();
boolean validEntry = false;
String pagePath = dimension;
if (pagePath.startsWith("/group") || pagePath.startsWith("/web")) {
var.setPagePath(dimension);
validEntry = true;
}
if (validEntry) {
var.setVisitNumber(Integer.parseInt(metric));
toReturn.add(var);
}
}
}
return toReturn;
}
private static List<ServiceEndpoint> getAnalyticsReportingConfigurationFromIS(String infrastructureScope)
throws Exception {
String scope = infrastructureScope;
String currScope = ScopeProvider.instance.get();
ScopeProvider.instance.set(scope);
SimpleQuery query = queryFor(ServiceEndpoint.class);
query.addCondition("$resource/Profile/Category/text() eq '" + SERVICE_ENDPOINT_CATEGORY + "'");
query.addCondition("$resource/Profile/Name/text() eq '" + SERVICE_ENDPOINT_NAME + "'");
DiscoveryClient<ServiceEndpoint> client = clientFor(ServiceEndpoint.class);
List<ServiceEndpoint> toReturn = client.submit(query);
ScopeProvider.instance.set(currScope);
return toReturn;
}
/**
* l
* @throws Exception
*/
private static AnalyticsReportCredentials getAuthorisedApplicationInfoFromIs() throws Exception {
AnalyticsReportCredentials reportCredentials = new AnalyticsReportCredentials();
String context = org.gcube.dataharvest.utils.Utils.getCurrentContext();
try {
List<ServiceEndpoint> list = getAnalyticsReportingConfigurationFromIS(context);
if(list.size() > 1) {
logger.error("Too many Service Endpoints having name " + SERVICE_ENDPOINT_NAME
+ " in this scope having Category " + SERVICE_ENDPOINT_CATEGORY);
} else if(list.size() == 0) {
logger.warn("There is no Service Endpoint having name " + SERVICE_ENDPOINT_NAME + " and Category "
+ SERVICE_ENDPOINT_CATEGORY + " in this context: " + context);
} else {
for(ServiceEndpoint res : list) {
Group<AccessPoint> apGroup = res.profile().accessPoints();
AccessPoint[] accessPoints = (AccessPoint[]) apGroup.toArray(new AccessPoint[apGroup.size()]);
AccessPoint found = accessPoints[0];
reportCredentials.setClientEmail(found.username());
String decryptedPrivateKey = StringEncrypter.getEncrypter().decrypt(found.password());
reportCredentials.setPrivateKeyPem(decryptedPrivateKey.trim());
for(Property prop : found.properties()) {
if(prop.name().compareTo(AP_VIEWS_PROPERTY) == 0) {
String decryptedValue = StringEncrypter.getEncrypter().decrypt(prop.value());
String[] views = decryptedValue.split(";");
reportCredentials.setViewIds(Arrays.asList(views));
}
if(prop.name().compareTo(AP_CLIENT_ID) == 0) {
String decryptedValue = StringEncrypter.getEncrypter().decrypt(prop.value());
reportCredentials.setClientId(decryptedValue);
}
if(prop.name().compareTo(AP_PRIVATEKEY_ID_PROPERTY) == 0) {
String decryptedValue = StringEncrypter.getEncrypter().decrypt(prop.value());
reportCredentials.setPrivateKeyId(decryptedValue);
}
}
}
}
} catch(Exception e) {
e.printStackTrace();
return null;
}
return reportCredentials;
}
private static LocalDate asLocalDate(Date date) {
return Instant.ofEpochMilli(date.getTime()).atZone(ZoneId.systemDefault()).toLocalDate();
}
private static Builder getDateRangeBuilderForAnalytics(Date start, Date end) {
DateTimeFormatter formatter = DateTimeFormatter.ofPattern("yyyy-MM-dd"); //required by Analytics
String startDate = asLocalDate(start).format(formatter);
String endDate = asLocalDate(end).format(formatter);
Builder dateRangeBuilder = DateRange.newBuilder().setStartDate(startDate).setEndDate(endDate);
return dateRangeBuilder;
}
}

View File

@ -28,7 +28,6 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* @author Eric Perrone (ISTI - CNR)
* @author Luca Frosini (ISTI - CNR)
*/
public class MethodInvocationHarvester extends BasicHarvester {
@ -72,7 +71,7 @@ public class MethodInvocationHarvester extends BasicHarvester {
AggregatedServiceUsageRecord.class, temporalConstraint, filters, contexts, true);
}
ScopeDescriptor scopeDescriptor = AccountingDashboardHarvesterPlugin.getScopeDescriptor();
ScopeDescriptor scopeDescriptor = AccountingDashboardHarvesterPlugin.getScopeDescriptor(context);
Dimension dimension = getDimension(HarvestedDataKey.METHOD_INVOCATIONS);
if(result != null) {

View File

@ -0,0 +1,313 @@
package org.gcube.dataharvest.harvester;
import static org.gcube.resources.discovery.icclient.ICFactory.clientFor;
import static org.gcube.resources.discovery.icclient.ICFactory.queryFor;
import java.io.IOException;
import java.security.GeneralSecurityException;
import java.time.Instant;
import java.time.LocalDate;
import java.time.ZoneId;
import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import org.gcube.accounting.accounting.summary.access.model.ScopeDescriptor;
import org.gcube.accounting.accounting.summary.access.model.update.AccountingRecord;
import org.gcube.common.encryption.encrypter.StringEncrypter;
import org.gcube.common.resources.gcore.ServiceEndpoint;
import org.gcube.common.resources.gcore.ServiceEndpoint.AccessPoint;
import org.gcube.common.resources.gcore.ServiceEndpoint.Property;
import org.gcube.common.resources.gcore.utils.Group;
import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.common.scope.impl.ScopeBean;
import org.gcube.dataharvest.AccountingDashboardHarvesterPlugin;
import org.gcube.dataharvest.datamodel.AnalyticsReportCredentials;
import org.gcube.dataharvest.datamodel.HarvestedDataKey;
import org.gcube.dataharvest.datamodel.VREAccessesReportRow;
import org.gcube.resources.discovery.client.api.DiscoveryClient;
import org.gcube.resources.discovery.client.queries.api.SimpleQuery;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.analytics.data.v1beta.BetaAnalyticsDataClient;
import com.google.analytics.data.v1beta.BetaAnalyticsDataSettings;
import com.google.analytics.data.v1beta.DateRange;
import com.google.analytics.data.v1beta.DateRange.Builder;
import com.google.analytics.data.v1beta.Dimension;
import com.google.analytics.data.v1beta.Metric;
import com.google.analytics.data.v1beta.Row;
import com.google.analytics.data.v1beta.RunReportRequest;
import com.google.analytics.data.v1beta.RunReportResponse;
import com.google.api.client.json.JsonFactory;
import com.google.api.client.json.gson.GsonFactory;
import com.google.api.gax.core.FixedCredentialsProvider;
import com.google.auth.oauth2.ServiceAccountCredentials;
/**
*
* @author Giancarlo Panichi (ISTI CNR)
*
*/
public class RStudioAccessesHarvester extends BasicHarvester {
private static Logger logger = LoggerFactory.getLogger(RStudioAccessesHarvester.class);
private static final JsonFactory JSON_FACTORY = GsonFactory.getDefaultInstance();
private static final String SERVICE_ENDPOINT_CATEGORY = "OnlineService";
private static final String SERVICE_ENDPOINT_NAME = "GA4AnalyticsDataService";
private static final String AP_VIEWS_PROPERTY = "views";
private static final String AP_CLIENT_ID = "client_id";
private static final String AP_PRIVATEKEY_ID_PROPERTY = "private_key_id";
private List<VREAccessesReportRow> vreAccesses;
public RStudioAccessesHarvester(Date start, Date end) throws Exception {
super(start, end);
logger.debug("RStudioAccessHArvester: {}, {}", start, end);
vreAccesses = getAllAccesses(start, end);
}
@Override
public List<AccountingRecord> getAccountingRecords() throws Exception {
try {
String context = org.gcube.dataharvest.utils.Utils.getCurrentContext();
ArrayList<AccountingRecord> accountingRecords = new ArrayList<AccountingRecord>();
int measure = 0;
ScopeBean scopeBean = new ScopeBean(context);
String lowerCasedContext = scopeBean.name().toLowerCase();
logger.debug("RStudioAccessHArvester lowerCasedContext: {}", lowerCasedContext);
for (VREAccessesReportRow row : vreAccesses) {
String pagePath = row.getPagePath().toLowerCase();
if (pagePath != null && !pagePath.isEmpty()) {
if (pagePath.contains(lowerCasedContext)) {
if (!pagePath.contains("catalogue")) {
if (pagePath.contains("rstudio") || pagePath.contains("r-studio")) {
logger.trace("Matched rstudio or rstudio ({}) : {}", lowerCasedContext, pagePath);
measure += row.getVisitNumber();
}
}
}
}
}
ScopeDescriptor scopeDescriptor = AccountingDashboardHarvesterPlugin.getScopeDescriptor(context);
if (measure > 0) {
AccountingRecord ar = new AccountingRecord(scopeDescriptor, instant,
getDimension(HarvestedDataKey.RSTUDIO_ACCESSES), (long) measure);
logger.debug("{} : {}", ar.getDimension().getId(), ar.getMeasure());
accountingRecords.add(ar);
}
return accountingRecords;
} catch (Exception e) {
throw e;
}
}
/**
*
* @return a list of {@link VREAccessesReportRow} objects containing the
* pagePath and the visit number e.g. VREAccessesReportRow
* [pagePath=/group/agroclimaticmodelling/add-new-users,
* visitNumber=1] VREAccessesReportRow
* [pagePath=/group/agroclimaticmodelling/administration,
* visitNumber=2] VREAccessesReportRow
* [pagePath=/group/agroclimaticmodelling/agroclimaticmodelling,
* visitNumber=39]
*/
private static List<VREAccessesReportRow> getAllAccesses(Date start, Date end) throws Exception {
Builder dateRangeBuilder = getDateRangeBuilderForAnalytics(start, end);
logger.debug("Getting accesses in this time range {}", dateRangeBuilder.toString());
AnalyticsReportCredentials credentialsFromD4S = getAuthorisedApplicationInfoFromIs();
logger.debug("Getting credentials credentialsFromD4S");
BetaAnalyticsDataSettings serviceSettings = initializeAnalyticsReporting(credentialsFromD4S);
logger.debug("initializeAnalyticsReporting service settings");
HashMap<String,List<RunReportResponse>> responses = getReportResponses(serviceSettings, credentialsFromD4S.getViewIds(), dateRangeBuilder);
List<VREAccessesReportRow> totalAccesses = new ArrayList<>();
for(String view : responses.keySet()) {
List<VREAccessesReportRow> viewReport = parseResponse(view, responses.get(view));
logger.debug("Got {} entries from view id={}", viewReport.size(), view);
totalAccesses.addAll(viewReport);
}
logger.debug("Merged in {} total entries from all views", totalAccesses.size());
return totalAccesses;
}
/**
* Initializes an Google Analytics Data API service object.
*
* @return An authorized Google Analytics Data API
* @throws IOException
* @throws GeneralSecurityException
*/
private static BetaAnalyticsDataSettings initializeAnalyticsReporting(AnalyticsReportCredentials cred) throws IOException {
return BetaAnalyticsDataSettings.newBuilder()
.setCredentialsProvider(FixedCredentialsProvider.create(
ServiceAccountCredentials.fromPkcs8(cred.getClientId(), cred.getClientEmail(), cred.getPrivateKeyPem(), cred.getPrivateKeyId(), null)))
.build();
}
/**
* Queries Analytics Data API service
*
* @param service Analytics Data API service service settings.
* @return Row Analytics Data API service
* @throws IOException
*/
private static HashMap<String,List<RunReportResponse>> getReportResponses(BetaAnalyticsDataSettings betaAnalyticsDataSettings,
List<String> viewIDs, Builder dateRangeBuilder) throws IOException {
HashMap<String,List<RunReportResponse>> reports = new HashMap<>();
try (BetaAnalyticsDataClient analyticsData = BetaAnalyticsDataClient.create(betaAnalyticsDataSettings)) {
for(String propertyId : viewIDs) {
List<RunReportResponse> gReportResponses = new ArrayList<>();
logger.debug("Getting data from Analytics Data API for propertyId: " + propertyId);
RunReportRequest request =
RunReportRequest.newBuilder()
.setProperty("properties/" + propertyId)
.addDimensions(Dimension.newBuilder().setName("pagePath"))
.addMetrics(Metric.newBuilder().setName("screenPageViews"))
.addDateRanges(dateRangeBuilder)
.build();
// Make the request.
RunReportResponse response = analyticsData.runReport(request);
gReportResponses.add(response);
reports.put(propertyId, gReportResponses);
}
}
return reports;
}
/**
* Parses and prints the Analytics Data API service respose
*
* @param response An Analytics Data API service response.
*/
private static List<VREAccessesReportRow> parseResponse(String viewId, List<RunReportResponse> responses) {
logger.debug("parsing Response for propertyID=" + viewId);
List<VREAccessesReportRow> toReturn = new ArrayList<>();
for (RunReportResponse response : responses) {
for (Row row: response.getRowsList()) {
String dimension = row.getDimensionValues(0).getValue();
String metric = row.getMetricValues(0).getValue();
VREAccessesReportRow var = new VREAccessesReportRow();
boolean validEntry = false;
String pagePath = dimension;
if (pagePath.startsWith("/group") || pagePath.startsWith("/web")) {
var.setPagePath(dimension);
validEntry = true;
}
if (validEntry) {
var.setVisitNumber(Integer.parseInt(metric));
toReturn.add(var);
}
}
}
return toReturn;
}
private static List<ServiceEndpoint> getAnalyticsReportingConfigurationFromIS(String infrastructureScope)
throws Exception {
String scope = infrastructureScope;
String currScope = ScopeProvider.instance.get();
ScopeProvider.instance.set(scope);
SimpleQuery query = queryFor(ServiceEndpoint.class);
query.addCondition("$resource/Profile/Category/text() eq '" + SERVICE_ENDPOINT_CATEGORY + "'");
query.addCondition("$resource/Profile/Name/text() eq '" + SERVICE_ENDPOINT_NAME + "'");
DiscoveryClient<ServiceEndpoint> client = clientFor(ServiceEndpoint.class);
List<ServiceEndpoint> toReturn = client.submit(query);
ScopeProvider.instance.set(currScope);
return toReturn;
}
/**
* l
* @throws Exception
*/
private static AnalyticsReportCredentials getAuthorisedApplicationInfoFromIs() throws Exception {
AnalyticsReportCredentials reportCredentials = new AnalyticsReportCredentials();
String context = org.gcube.dataharvest.utils.Utils.getCurrentContext();
try {
List<ServiceEndpoint> list = getAnalyticsReportingConfigurationFromIS(context);
if(list.size() > 1) {
logger.error("Too many Service Endpoints having name " + SERVICE_ENDPOINT_NAME
+ " in this scope having Category " + SERVICE_ENDPOINT_CATEGORY);
} else if(list.size() == 0) {
logger.warn("There is no Service Endpoint having name " + SERVICE_ENDPOINT_NAME + " and Category "
+ SERVICE_ENDPOINT_CATEGORY + " in this context: " + context);
} else {
for(ServiceEndpoint res : list) {
Group<AccessPoint> apGroup = res.profile().accessPoints();
AccessPoint[] accessPoints = (AccessPoint[]) apGroup.toArray(new AccessPoint[apGroup.size()]);
AccessPoint found = accessPoints[0];
reportCredentials.setClientEmail(found.username());
String decryptedPrivateKey = StringEncrypter.getEncrypter().decrypt(found.password());
reportCredentials.setPrivateKeyPem(decryptedPrivateKey.trim());
for(Property prop : found.properties()) {
if(prop.name().compareTo(AP_VIEWS_PROPERTY) == 0) {
String decryptedValue = StringEncrypter.getEncrypter().decrypt(prop.value());
String[] views = decryptedValue.split(";");
reportCredentials.setViewIds(Arrays.asList(views));
}
if(prop.name().compareTo(AP_CLIENT_ID) == 0) {
String decryptedValue = StringEncrypter.getEncrypter().decrypt(prop.value());
reportCredentials.setClientId(decryptedValue);
}
if(prop.name().compareTo(AP_PRIVATEKEY_ID_PROPERTY) == 0) {
String decryptedValue = StringEncrypter.getEncrypter().decrypt(prop.value());
reportCredentials.setPrivateKeyId(decryptedValue);
}
}
}
}
} catch(Exception e) {
e.printStackTrace();
return null;
}
return reportCredentials;
}
private static LocalDate asLocalDate(Date date) {
return Instant.ofEpochMilli(date.getTime()).atZone(ZoneId.systemDefault()).toLocalDate();
}
private static Builder getDateRangeBuilderForAnalytics(Date start, Date end) {
DateTimeFormatter formatter = DateTimeFormatter.ofPattern("yyyy-MM-dd"); //required by Analytics
String startDate = asLocalDate(start).format(formatter);
String endDate = asLocalDate(end).format(formatter);
Builder dateRangeBuilder = DateRange.newBuilder().setStartDate(startDate).setEndDate(endDate);
return dateRangeBuilder;
}
}

View File

@ -1,6 +1,5 @@
package org.gcube.dataharvest.harvester;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
@ -10,8 +9,8 @@ import org.gcube.accounting.accounting.summary.access.model.update.AccountingRec
import org.gcube.dataharvest.AccountingDashboardHarvesterPlugin;
import org.gcube.dataharvest.datamodel.HarvestedDataKey;
import org.gcube.dataharvest.utils.Utils;
import org.json.JSONArray;
import org.json.JSONObject;
import org.gcube.portal.databook.shared.Feed;
import org.gcube.social_networking.social_networking_client_library.PostClient;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -19,7 +18,7 @@ import org.slf4j.LoggerFactory;
* @author Eric Perrone (ISTI - CNR)
* @author Luca Frosini (ISTI - CNR)
*/
public class SocialInteractionsHarvester extends SocialNetworkingHarvester {
public class SocialInteractionsHarvester extends BasicHarvester {
private static Logger logger = LoggerFactory.getLogger(SocialInteractionsHarvester.class);
@ -27,7 +26,7 @@ public class SocialInteractionsHarvester extends SocialNetworkingHarvester {
private int replies;
private int posts;
public static final String PATH = "/2/posts/get-posts-vre?gcube-token=";
// public static final String PATH = "/2/posts/get-posts-vre?gcube-token=";
public SocialInteractionsHarvester(Date start, Date end) throws Exception {
super(start, end);
@ -44,7 +43,7 @@ public class SocialInteractionsHarvester extends SocialNetworkingHarvester {
getJson();
ScopeDescriptor scopeDescriptor = AccountingDashboardHarvesterPlugin.getScopeDescriptor();
ScopeDescriptor scopeDescriptor = AccountingDashboardHarvesterPlugin.getScopeDescriptor(context);
AccountingRecord likesAR = new AccountingRecord(scopeDescriptor, instant, getDimension(HarvestedDataKey.SOCIAL_LIKES), (long) likes);
logger.debug("{} : {}", likesAR.getDimension().getId(), likesAR.getMeasure());
@ -66,30 +65,20 @@ public class SocialInteractionsHarvester extends SocialNetworkingHarvester {
}
private void getJson() throws Exception {
JSONObject jsonObject = getJSONObject(PATH);
Boolean success = (Boolean) jsonObject.get("success");
if(success == false) {
throw new IOException("Erro while getting posts");
}
JSONArray res = jsonObject.getJSONArray("result");
int len = res.length();
PostClient postClient = new PostClient();
List<Feed> vrePosts = postClient.getPostsVRE();
likes = replies = posts = 0;
for(int i = 0; i < len; i++) {
for(Feed feed : vrePosts) {
JSONObject item = res.getJSONObject(i);
long time = item.getLong("time");
long time = feed.getTime().getTime();
if(start.getTime() <= time && time <= end.getTime()) {
posts++;
replies += item.getInt("comments_no");
likes += item.getInt("likes_no");
replies += Integer.valueOf(feed.getCommentsNo());
likes += Integer.valueOf(feed.getLikesNo());
}
}

View File

@ -1,58 +0,0 @@
package org.gcube.dataharvest.harvester;
import java.util.Date;
import java.util.List;
import org.gcube.common.authorization.library.provider.SecurityTokenProvider;
import org.gcube.common.clients.exceptions.DiscoveryException;
import org.gcube.common.resources.gcore.GCoreEndpoint;
import org.gcube.dataharvest.utils.Utils;
import org.gcube.resources.discovery.client.queries.api.SimpleQuery;
import org.gcube.resources.discovery.icclient.ICFactory;
import org.json.JSONObject;
public abstract class SocialNetworkingHarvester extends BasicHarvester {
public SocialNetworkingHarvester(Date start, Date end) throws Exception {
super(start, end);
}
public static String CLASS_FORMAT = "$resource/Profile/ServiceClass/text() eq '%1s'";
public static String NAME_FORMAT = "$resource/Profile/ServiceName/text() eq '%1s'";
public static String STATUS_FORMAT = "$resource/Profile/DeploymentData/Status/text() eq 'ready'";
public static String CONTAINS_FORMAT = "$entry/@EntryName eq '%1s'";
public static String SERVICE_CLASS = "Portal";
public static String SERVICE_NAME = "SocialNetworking";
public static String ENTRY_NAME = "jersey-servlet";
protected SimpleQuery getGCoreEndpointQuery() {
return ICFactory.queryFor(GCoreEndpoint.class)
.addCondition(String.format(CLASS_FORMAT, SERVICE_CLASS))
.addCondition(String.format(NAME_FORMAT, SERVICE_NAME))
.addCondition(String.format(STATUS_FORMAT))
.addVariable("$entry", "$resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint")
.addCondition(String.format(CONTAINS_FORMAT, ENTRY_NAME))
.setResult("$entry/text()");
}
protected String getAddress() {
SimpleQuery gCoreEndpointQuery = getGCoreEndpointQuery();
List<String> addresses = ICFactory.client().submit(gCoreEndpointQuery);
if(addresses.size()==0) {
throw new DiscoveryException("No running Social Networking Service");
}
return addresses.get(0);
}
protected JSONObject getJSONObject(String path) throws Exception {
String token = SecurityTokenProvider.instance.get();
String baseAddress = getAddress();
StringBuffer sb = new StringBuffer(baseAddress);
sb.append(path);
sb.append(token);
return new JSONObject(Utils.getJson(sb.toString()));
}
}

View File

@ -4,22 +4,13 @@ import static org.gcube.resources.discovery.icclient.ICFactory.clientFor;
import static org.gcube.resources.discovery.icclient.ICFactory.queryFor;
import java.io.IOException;
import java.io.Reader;
import java.io.StringReader;
import java.security.GeneralSecurityException;
import java.security.KeyFactory;
import java.security.NoSuchAlgorithmException;
import java.security.PrivateKey;
import java.security.spec.InvalidKeySpecException;
import java.security.spec.PKCS8EncodedKeySpec;
import java.time.Instant;
import java.time.LocalDate;
import java.time.ZoneId;
import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
@ -42,56 +33,48 @@ import org.gcube.resources.discovery.client.queries.api.SimpleQuery;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.api.client.googleapis.auth.oauth2.GoogleCredential;
import com.google.api.client.googleapis.auth.oauth2.GoogleCredential.Builder;
import com.google.api.client.googleapis.javanet.GoogleNetHttpTransport;
import com.google.api.client.googleapis.util.Utils;
import com.google.api.client.http.HttpTransport;
import com.google.api.client.json.JsonFactory;
import com.google.api.client.json.gson.GsonFactory;
import com.google.api.client.util.PemReader;
import com.google.api.client.util.PemReader.Section;
import com.google.api.client.util.SecurityUtils;
import com.google.api.services.analyticsreporting.v4.AnalyticsReporting;
import com.google.api.services.analyticsreporting.v4.AnalyticsReportingScopes;
import com.google.api.services.analyticsreporting.v4.model.DateRange;
import com.google.api.services.analyticsreporting.v4.model.DateRangeValues;
import com.google.api.services.analyticsreporting.v4.model.GetReportsRequest;
import com.google.api.services.analyticsreporting.v4.model.GetReportsResponse;
import com.google.api.services.analyticsreporting.v4.model.Metric;
import com.google.api.services.analyticsreporting.v4.model.Report;
import com.google.api.services.analyticsreporting.v4.model.ReportRequest;
import com.google.api.services.analyticsreporting.v4.model.ReportRow;
import com.google.analytics.data.v1beta.BetaAnalyticsDataClient;
import com.google.analytics.data.v1beta.BetaAnalyticsDataSettings;
import com.google.analytics.data.v1beta.DateRange;
import com.google.analytics.data.v1beta.DateRange.Builder;
import com.google.analytics.data.v1beta.Dimension;
import com.google.analytics.data.v1beta.Metric;
import com.google.analytics.data.v1beta.Row;
import com.google.analytics.data.v1beta.RunReportRequest;
import com.google.analytics.data.v1beta.RunReportResponse;
import com.google.api.gax.core.FixedCredentialsProvider;
import com.google.auth.oauth2.ServiceAccountCredentials;
public class VREAccessesHarvester extends BasicHarvester {
private static Logger logger = LoggerFactory.getLogger(VREAccessesHarvester.class);
private static final JsonFactory JSON_FACTORY = GsonFactory.getDefaultInstance();
// private static final JsonFactory JSON_FACTORY = GsonFactory.getDefaultInstance();
private static final String SERVICE_ENDPOINT_CATEGORY = "OnlineService";
private static final String SERVICE_ENDPOINT_NAME = "BigGAnalyticsReportService";
private static final String SERVICE_ENDPOINT_NAME = "GA4AnalyticsDataService";
private static final String AP_VIEWS_PROPERTY = "views";
private static final String AP_CLIENT_PROPERTY = "clientId";
private static final String AP_PRIVATEKEY_PROPERTY = "privateKeyId";
private static final String APPLICATION_NAME = "Analytics Reporting";
private static final String AP_CLIENT_ID = "client_id";
private static final String AP_PRIVATEKEY_ID_PROPERTY = "private_key_id";
private List<VREAccessesReportRow> vreAccesses;
public VREAccessesHarvester(Date start, Date end) throws Exception {
super(start, end);
vreAccesses = getAllAccesses(start, end);
}
@Override
public List<AccountingRecord> getAccountingRecords() throws Exception {
try {
String context = org.gcube.dataharvest.utils.Utils.getCurrentContext();
ArrayList<AccountingRecord> accountingRecords = new ArrayList<AccountingRecord>();
int measure = 0;
ScopeBean scopeBean = new ScopeBean(context);
String lowerCasedContext = scopeBean.name().toLowerCase();
String case1 = lowerCasedContext + "/";
@ -100,28 +83,28 @@ public class VREAccessesHarvester extends BasicHarvester {
String pagePath = row.getPagePath();
if (!pagePath.contains("_redirect=/group") && !pagePath.contains("workspace")) {
if(pagePath.endsWith(lowerCasedContext)) {
logger.trace("Matched endsWith({}) : {}", lowerCasedContext, pagePath);
logger.debug("Matched endsWith({}) : {}", lowerCasedContext, pagePath);
measure += row.getVisitNumber();
} else if(pagePath.contains(case1) || pagePath.contains(case2)) {
logger.trace("Matched contains({}) || contains({}) : {}", case1, case2, pagePath);
logger.debug("Matched contains({}) || contains({}) : {}", case1, case2, pagePath);
measure += row.getVisitNumber();
}
}
}
ScopeDescriptor scopeDescriptor = AccountingDashboardHarvesterPlugin.getScopeDescriptor();
ScopeDescriptor scopeDescriptor = AccountingDashboardHarvesterPlugin.getScopeDescriptor(context);
AccountingRecord ar = new AccountingRecord(scopeDescriptor, instant, getDimension(HarvestedDataKey.ACCESSES), (long) measure);
logger.debug("{} : {}", ar.getDimension().getId(), ar.getMeasure());
accountingRecords.add(ar);
return accountingRecords;
} catch(Exception e) {
throw e;
}
}
/**
*
* @return a list of {@link VREAccessesReportRow} objects containing the pagePath and the visit number e.g.
@ -130,182 +113,113 @@ public class VREAccessesHarvester extends BasicHarvester {
* VREAccessesReportRow [pagePath=/group/agroclimaticmodelling/agroclimaticmodelling, visitNumber=39]
*/
private static List<VREAccessesReportRow> getAllAccesses(Date start, Date end) throws Exception {
DateRange dateRange = getDateRangeForAnalytics(start, end);
logger.trace("Getting accesses in this time range {}", dateRange.toPrettyString());
Builder dateRangeBuilder = getDateRangeBuilderForAnalytics(start, end);
logger.debug("Getting accesses in this time range {}", dateRangeBuilder.toString());
AnalyticsReportCredentials credentialsFromD4S = getAuthorisedApplicationInfoFromIs();
AnalyticsReporting service = initializeAnalyticsReporting(credentialsFromD4S);
HashMap<String,List<GetReportsResponse>> responses = getReportResponses(service, credentialsFromD4S.getViewIds(), dateRange);
List<VREAccessesReportRow> totalAccesses = new ArrayList<>();
logger.debug("Getting credentials credentialsFromD4S");
BetaAnalyticsDataSettings serviceSettings = initializeAnalyticsReporting(credentialsFromD4S);
logger.debug("initializeAnalyticsReporting service settings");
HashMap<String,List<RunReportResponse>> responses = getReportResponses(serviceSettings, credentialsFromD4S.getViewIds(), dateRangeBuilder);
List<VREAccessesReportRow> totalAccesses = new ArrayList<>();
for(String view : responses.keySet()) {
List<VREAccessesReportRow> viewReport = parseResponse(view, responses.get(view));
logger.trace("Got {} entries from view id={}", viewReport.size(), view);
logger.debug("Got {} entries from view id={}", viewReport.size(), view);
totalAccesses.addAll(viewReport);
}
logger.trace("Merged in {} total entries from all views", totalAccesses.size());
logger.debug("Merged in {} total entries from all views", totalAccesses.size());
return totalAccesses;
}
/**
* Initializes an Analytics Reporting API V4 service object.
* Initializes an Google Analytics Data API service object.
*
* @return An authorized Analytics Reporting API V4 service object.
* @return An authorized Google Analytics Data API
* @throws IOException
* @throws GeneralSecurityException
*/
private static AnalyticsReporting initializeAnalyticsReporting(AnalyticsReportCredentials cred)
throws GeneralSecurityException, IOException {
HttpTransport httpTransport = GoogleNetHttpTransport.newTrustedTransport();
GoogleCredential credential = fromD4SServiceEndpoint(cred).createScoped(AnalyticsReportingScopes.all());
// Construct the Analytics Reporting service object.
return new AnalyticsReporting.Builder(httpTransport, JSON_FACTORY, credential)
.setApplicationName(APPLICATION_NAME).build();
private static BetaAnalyticsDataSettings initializeAnalyticsReporting(AnalyticsReportCredentials cred) throws IOException {
return BetaAnalyticsDataSettings.newBuilder()
.setCredentialsProvider(FixedCredentialsProvider.create(
ServiceAccountCredentials.fromPkcs8(cred.getClientId(), cred.getClientEmail(), cred.getPrivateKeyPem(), cred.getPrivateKeyId(), null)))
.build();
}
/**
* Queries the Analytics Reporting API V4.
* Queries Analytics Data API service
*
* @param service An authorized Analytics Reporting API V4 service object.
* @return GetReportResponse The Analytics Reporting API V4 response.
* @param service Analytics Data API service service settings.
* @return Row Analytics Data API service
* @throws IOException
*/
private static HashMap<String,List<GetReportsResponse>> getReportResponses(AnalyticsReporting service,
List<String> viewIDs, DateRange dateRange) throws IOException {
HashMap<String,List<GetReportsResponse>> reports = new HashMap<>();
// Create the Metrics object.
Metric sessions = new Metric().setExpression("ga:pageviews").setAlias("pages");
com.google.api.services.analyticsreporting.v4.model.Dimension pageTitle = new com.google.api.services.analyticsreporting.v4.model.Dimension().setName("ga:pagePath");
for(String view : viewIDs) {
List<GetReportsResponse> gReportResponses = new ArrayList<>();
logger.info("Getting data from Google Analytics for viewid: " + view);
boolean iterateMorePages = true;
String nextPageToken = null;
while (iterateMorePages) {
// Create the ReportRequest object.
ReportRequest request = new ReportRequest().setViewId(view.trim()).setDateRanges(Arrays.asList(dateRange))
.setMetrics(Arrays.asList(sessions)).setDimensions(Arrays.asList(pageTitle));
request.setPageSize(1000);
request.setPageToken(nextPageToken);
ArrayList<ReportRequest> requests = new ArrayList<ReportRequest>();
requests.add(request);
// Create the GetReportsRequest object.
GetReportsRequest getReport = new GetReportsRequest().setReportRequests(requests);
// Call the batchGet method.
GetReportsResponse response = service.reports().batchGet(getReport).execute();
nextPageToken = response.getReports().get(0).getNextPageToken();
iterateMorePages = (nextPageToken != null);
logger.debug("got nextPageToken: "+nextPageToken);
private static HashMap<String,List<RunReportResponse>> getReportResponses(BetaAnalyticsDataSettings betaAnalyticsDataSettings,
List<String> viewIDs, Builder dateRangeBuilder) throws IOException {
HashMap<String,List<RunReportResponse>> reports = new HashMap<>();
try (BetaAnalyticsDataClient analyticsData = BetaAnalyticsDataClient.create(betaAnalyticsDataSettings)) {
for(String propertyId : viewIDs) {
List<RunReportResponse> gReportResponses = new ArrayList<>();
logger.debug("Getting data from Analytics Data API for propertyId: " + propertyId);
RunReportRequest request =
RunReportRequest.newBuilder()
.setProperty("properties/" + propertyId)
.addDimensions(Dimension.newBuilder().setName("pagePath"))
.addMetrics(Metric.newBuilder().setName("screenPageViews"))
.addDateRanges(dateRangeBuilder)
.build();
// Make the request.
RunReportResponse response = analyticsData.runReport(request);
gReportResponses.add(response);
// Iterate through every row of the API response.
// for (Row row : response.getRowsList()) {
// System.out.printf(
// "%s, %s%n", row.getDimensionValues(0).getValue(), row.getMetricValues(0).getValue());
// }
reports.put(propertyId, gReportResponses);
}
reports.put(view, gReportResponses);
}
// Return the response.
return reports;
}
/**
* Parses and prints the Analytics Reporting API V4 response.
* Parses and prints the Analytics Data API service respose
*
* @param response An Analytics Reporting API V4 response.
* @param response An Analytics Data API service response.
*/
/**
* Parses and prints the Analytics Reporting API V4 response.
*
* @param response An Analytics Reporting API V4 response.
*/
private static List<VREAccessesReportRow> parseResponse(String viewId, List<GetReportsResponse> responses) {
logger.debug("parsing Response for " + viewId);
private static List<VREAccessesReportRow> parseResponse(String viewId, List<RunReportResponse> responses) {
logger.debug("parsing Response for propertyID=" + viewId);
List<VREAccessesReportRow> toReturn = new ArrayList<>();
for (GetReportsResponse response : responses) {
for (Report report: response.getReports()) {
List<ReportRow> rows = report.getData().getRows();
if (rows == null) {
logger.warn("No data found for " + viewId);
for (RunReportResponse response : responses) {
for (Row row: response.getRowsList()) {
String dimension = row.getDimensionValues(0).getValue();
String metric = row.getMetricValues(0).getValue();
VREAccessesReportRow var = new VREAccessesReportRow();
boolean validEntry = false;
String pagePath = dimension;
if (pagePath.startsWith("/group") || pagePath.startsWith("/web")) {
var.setPagePath(dimension);
validEntry = true;
}
else {
for (ReportRow row: rows) {
String dimension = row.getDimensions().get(0);
DateRangeValues metric = row.getMetrics().get(0);
VREAccessesReportRow var = new VREAccessesReportRow();
boolean validEntry = false;
String pagePath = dimension;
if (pagePath.startsWith("/group") || pagePath.startsWith("/web")) {
var.setPagePath(dimension);
validEntry = true;
}
if (validEntry) {
var.setVisitNumber(Integer.parseInt(metric.getValues().get(0)));
toReturn.add(var);
}
}
if (validEntry) {
var.setVisitNumber(Integer.parseInt(metric));
toReturn.add(var);
}
//System.out.printf("%s, %s%n", row.getDimensionValues(0).getValue(), row.getMetricValues(0).getValue());
}
}
return toReturn;
}
private static GoogleCredential fromD4SServiceEndpoint(AnalyticsReportCredentials cred) throws IOException {
String clientId = cred.getClientId();
String clientEmail = cred.getClientEmail();
String privateKeyPem = cred.getPrivateKeyPem();
String privateKeyId = cred.getPrivateKeyId();
String tokenUri = cred.getTokenUri();
String projectId = cred.getProjectId();
if(clientId == null || clientEmail == null || privateKeyPem == null || privateKeyId == null) {
throw new IOException("Error reading service account credential from stream, "
+ "expecting 'client_id', 'client_email', 'private_key' and 'private_key_id'.");
}
PrivateKey privateKey = privateKeyFromPkcs8(privateKeyPem);
Collection<String> emptyScopes = Collections.emptyList();
Builder credentialBuilder = new GoogleCredential.Builder().setTransport(Utils.getDefaultTransport())
.setJsonFactory(Utils.getDefaultJsonFactory()).setServiceAccountId(clientEmail)
.setServiceAccountScopes(emptyScopes).setServiceAccountPrivateKey(privateKey)
.setServiceAccountPrivateKeyId(privateKeyId);
if(tokenUri != null) {
credentialBuilder.setTokenServerEncodedUrl(tokenUri);
}
if(projectId != null) {
credentialBuilder.setServiceAccountProjectId(projectId);
}
// Don't do a refresh at this point, as it will always fail before the scopes are added.
return credentialBuilder.build();
}
private static PrivateKey privateKeyFromPkcs8(String privateKeyPem) throws IOException {
Reader reader = new StringReader(privateKeyPem);
Section section = PemReader.readFirstSectionAndClose(reader, "PRIVATE KEY");
if(section == null) {
throw new IOException("Invalid PKCS8 data.");
}
byte[] bytes = section.getBase64DecodedBytes();
PKCS8EncodedKeySpec keySpec = new PKCS8EncodedKeySpec(bytes);
Exception unexpectedException = null;
try {
KeyFactory keyFactory = SecurityUtils.getRsaKeyFactory();
PrivateKey privateKey = keyFactory.generatePrivate(keySpec);
return privateKey;
} catch(NoSuchAlgorithmException exception) {
unexpectedException = exception;
} catch(InvalidKeySpecException exception) {
unexpectedException = exception;
}
throw new IOException("Unexpected exception reading PKCS data", unexpectedException);
}
private static List<ServiceEndpoint> getAnalyticsReportingConfigurationFromIS(String infrastructureScope)
throws Exception {
String scope = infrastructureScope;
@ -319,14 +233,13 @@ public class VREAccessesHarvester extends BasicHarvester {
ScopeProvider.instance.set(currScope);
return toReturn;
}
/**
* l
* @throws Exception
*/
private static AnalyticsReportCredentials getAuthorisedApplicationInfoFromIs() throws Exception {
AnalyticsReportCredentials reportCredentials = new AnalyticsReportCredentials();
String context = org.gcube.dataharvest.utils.Utils.getCurrentContext();
try {
List<ServiceEndpoint> list = getAnalyticsReportingConfigurationFromIS(context);
@ -337,26 +250,26 @@ public class VREAccessesHarvester extends BasicHarvester {
logger.warn("There is no Service Endpoint having name " + SERVICE_ENDPOINT_NAME + " and Category "
+ SERVICE_ENDPOINT_CATEGORY + " in this context: " + context);
} else {
for(ServiceEndpoint res : list) {
reportCredentials.setTokenUri(res.profile().runtime().hostedOn());
Group<AccessPoint> apGroup = res.profile().accessPoints();
AccessPoint[] accessPoints = (AccessPoint[]) apGroup.toArray(new AccessPoint[apGroup.size()]);
AccessPoint found = accessPoints[0];
reportCredentials.setClientEmail(found.address());
reportCredentials.setProjectId(found.username());
reportCredentials.setPrivateKeyPem(StringEncrypter.getEncrypter().decrypt(found.password()));
reportCredentials.setClientEmail(found.username());
String decryptedPrivateKey = StringEncrypter.getEncrypter().decrypt(found.password());
reportCredentials.setPrivateKeyPem(decryptedPrivateKey.trim());
for(Property prop : found.properties()) {
if(prop.name().compareTo(AP_VIEWS_PROPERTY) == 0) {
String decryptedValue = StringEncrypter.getEncrypter().decrypt(prop.value());
String[] views = decryptedValue.split(";");
reportCredentials.setViewIds(Arrays.asList(views));
}
if(prop.name().compareTo(AP_CLIENT_PROPERTY) == 0) {
if(prop.name().compareTo(AP_CLIENT_ID) == 0) {
String decryptedValue = StringEncrypter.getEncrypter().decrypt(prop.value());
reportCredentials.setClientId(decryptedValue);
}
if(prop.name().compareTo(AP_PRIVATEKEY_PROPERTY) == 0) {
if(prop.name().compareTo(AP_PRIVATEKEY_ID_PROPERTY) == 0) {
String decryptedValue = StringEncrypter.getEncrypter().decrypt(prop.value());
reportCredentials.setPrivateKeyId(decryptedValue);
}
@ -369,19 +282,18 @@ public class VREAccessesHarvester extends BasicHarvester {
}
return reportCredentials;
}
private static LocalDate asLocalDate(Date date) {
return Instant.ofEpochMilli(date.getTime()).atZone(ZoneId.systemDefault()).toLocalDate();
}
private static DateRange getDateRangeForAnalytics(Date start, Date end) {
private static Builder getDateRangeBuilderForAnalytics(Date start, Date end) {
DateTimeFormatter formatter = DateTimeFormatter.ofPattern("yyyy-MM-dd"); //required by Analytics
String startDate = asLocalDate(start).format(formatter);
String endDate = asLocalDate(end).format(formatter);
DateRange dateRange = new DateRange();// date format `yyyy-MM-dd`
dateRange.setStartDate(startDate);
dateRange.setEndDate(endDate);
return dateRange;
Builder dateRangeBuilder = DateRange.newBuilder().setStartDate(startDate).setEndDate(endDate);
return dateRangeBuilder;
}
}

View File

@ -1,6 +1,5 @@
package org.gcube.dataharvest.harvester;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
@ -9,7 +8,7 @@ import org.gcube.accounting.accounting.summary.access.model.ScopeDescriptor;
import org.gcube.accounting.accounting.summary.access.model.update.AccountingRecord;
import org.gcube.dataharvest.AccountingDashboardHarvesterPlugin;
import org.gcube.dataharvest.datamodel.HarvestedDataKey;
import org.json.JSONObject;
import org.gcube.social_networking.social_networking_client_library.UserClient;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -17,7 +16,7 @@ import org.slf4j.LoggerFactory;
* @author Eric Perrone (ISTI - CNR)
* @author Luca Frosini (ISTI - CNR)
*/
public class VREUsersHarvester extends SocialNetworkingHarvester {
public class VREUsersHarvester extends BasicHarvester {
private static Logger logger = LoggerFactory.getLogger(VREUsersHarvester.class);
@ -31,12 +30,13 @@ public class VREUsersHarvester extends SocialNetworkingHarvester {
@Override
public List<AccountingRecord> getAccountingRecords() throws Exception {
try {
// String context = Utils.getCurrentContext();
String context = org.gcube.dataharvest.utils.Utils.getCurrentContext();
int measure = get();
ArrayList<AccountingRecord> accountingRecords = new ArrayList<AccountingRecord>();
ScopeDescriptor scopeDescriptor = AccountingDashboardHarvesterPlugin.getScopeDescriptor();
ScopeDescriptor scopeDescriptor = AccountingDashboardHarvesterPlugin.getScopeDescriptor(context);
AccountingRecord ar = new AccountingRecord(scopeDescriptor, instant, getDimension(HarvestedDataKey.USERS), (long) measure);
logger.debug("{} : {}", ar.getDimension().getId(), ar.getMeasure());
@ -50,17 +50,8 @@ public class VREUsersHarvester extends SocialNetworkingHarvester {
}
private int get() throws Exception {
JSONObject jsonObject = getJSONObject(PATH);
int userNumber = 0;
Boolean success = (Boolean) jsonObject.get("success");
if(success == false) {
throw new IOException("Erro while getting VRE Users");
}
userNumber = jsonObject.getJSONArray("result").length();
return userNumber;
UserClient userClient = new UserClient();
return userClient.getAllUsernamesContext().size();
}
}

View File

@ -1,196 +0,0 @@
package org.gcube.dataharvest.harvester.sobigdata;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.List;
import java.util.SortedSet;
import org.gcube.accounting.accounting.summary.access.model.ScopeDescriptor;
import org.gcube.accounting.accounting.summary.access.model.update.AccountingRecord;
import org.gcube.common.storagehub.client.dsl.ContainerType;
import org.gcube.common.storagehub.client.dsl.FolderContainer;
import org.gcube.common.storagehub.client.dsl.ItemContainer;
import org.gcube.common.storagehub.client.dsl.ListResolverTyped;
import org.gcube.common.storagehub.client.dsl.StorageHubClient;
import org.gcube.common.storagehub.model.items.FolderItem;
import org.gcube.common.storagehub.model.items.Item;
import org.gcube.common.storagehub.model.items.nodes.Accounting;
import org.gcube.common.storagehub.model.items.nodes.accounting.AccountEntry;
import org.gcube.dataharvest.AccountingDashboardHarvesterPlugin;
import org.gcube.dataharvest.datamodel.HarvestedDataKey;
import org.gcube.dataharvest.utils.DateUtils;
import org.gcube.dataharvest.utils.Utils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* The Class DataMethodDownloadHarvester.
*
* @author Eric Perrone (ISTI - CNR)
* @author Luca Frosini (ISTI - CNR)
* @author Francesco Mangiacrapa (ISTI - CNR)
*/
public class DataMethodDownloadHarvester extends SoBigDataHarvester {
private static Logger logger = LoggerFactory.getLogger(DataMethodDownloadHarvester.class);
private int count = 0;
/**
* Instantiates a new data method download harvester.
*
* @param start the start
* @param end the end
* @param contexts the contexts
* @throws ParseException the parse exception
*/
public DataMethodDownloadHarvester(Date start, Date end, SortedSet<String> contexts) throws Exception {
super(start, end, contexts);
}
/* (non-Javadoc)
* @see org.gcube.dataharvest.harvester.BasicHarvester#getData()
*/
@Override
public List<AccountingRecord> getAccountingRecords() throws Exception {
String defaultContext = Utils.getCurrentContext();
logger.debug("The context is {}", defaultContext);
try {
/*
String vreName = getVRENameToHL(defaultContext);
logger.debug("Getting VRE Name to HL from context/scope returns {} ", vreName);
String user = vreName + "-Manager";
logger.debug("Using user '{}' to getHome from HL", user);
//Getting HL instance and home for VRE MANAGER
HomeManager manager = HomeLibrary.getHomeManagerFactory().getHomeManager();
@SuppressWarnings("deprecation")
Home home = manager.getHome(user);
JCRWorkspace ws = (JCRWorkspace) home.getWorkspace();
String path = "/Workspace/MySpecialFolders/" + vreName;
logger.debug("Getting item by Path {}", path);
JCRWorkspaceItem item = (JCRWorkspaceItem) ws.getItemByPath(path);
*/
StorageHubClient storageHubClient = new StorageHubClient();
FolderContainer vreFolderContainer = storageHubClient.openVREFolder();
FolderItem vreFolderItem = vreFolderContainer.get();
logger.debug("Analyzing {} in the period [{} to {}] starting from root {}", defaultContext,
DateUtils.format(start), DateUtils.format(end), vreFolderItem.getName());
ScopeDescriptor defaultScopeDescriptor = AccountingDashboardHarvesterPlugin.getScopeDescriptor();
AccountingRecord defaultHarvesteData = new AccountingRecord(defaultScopeDescriptor, instant, getDimension(HarvestedDataKey.DATA_METHOD_DOWNLOAD), (long) count);
logger.debug("{} : {}", defaultHarvesteData.getDimension().getId(), defaultHarvesteData.getMeasure());
ArrayList<AccountingRecord> accountingRecords = new ArrayList<AccountingRecord>();
ListResolverTyped listResolverTyped = vreFolderContainer.list();
List<ItemContainer<? extends Item>> containers = listResolverTyped.includeHidden().getContainers();
for(ItemContainer<? extends Item> itemContainer : containers) {
count = 0; //resettings the counter
//HarvestedData harvestedData;
//Getting statistics for folder
if(itemContainer.getType() == ContainerType.FOLDER) {
Item item = itemContainer.get();
logger.debug("Getting statistics for folder {}", item.getName());
getStats(itemContainer, start, end);
String normalizedName = item.getName().replaceAll("[^A-Za-z0-9]", "");
String context = mapWsFolderNameToVRE.get(normalizedName);
//Checking if it is a VRE name to right accounting...
if(context != null && !context.isEmpty()) {
logger.debug("Found context '{}' matching with normalized VRE name {} ", context, normalizedName);
ScopeDescriptor scopeDescriptor = AccountingDashboardHarvesterPlugin.getScopeDescriptor(context);
AccountingRecord ar = new AccountingRecord(scopeDescriptor, instant, getDimension(HarvestedDataKey.DATA_METHOD_DOWNLOAD), (long) count);
logger.debug("{} : {}", ar.getDimension().getId(), ar.getMeasure());
accountingRecords.add(ar);
} else {
logger.debug(
"No scope found matching the folder name {}, accounting its stats in the default context {}",
normalizedName, defaultContext);
//INCREASING THE DEFAULT CONTEXT COUNTER...
defaultHarvesteData.setMeasure(defaultHarvesteData.getMeasure() + count);
logger.trace("Increased default context stats {}", defaultHarvesteData);
}
}
}
//ADDING DEFAULT ACCOUNTING
accountingRecords.add(defaultHarvesteData);
logger.debug("In the period [from {} to {} ] returning workspace accouting data {}", DateUtils.format(start),
DateUtils.format(end), accountingRecords);
return accountingRecords;
} catch(Exception e) {
throw e;
}
}
/**
* Gets the stats.
*
* @param baseItem the base item
* @param start the start
* @param end the end
* @return the stats
* @throws InternalErrorException the internal error exception
*/
private void getStats(ItemContainer<? extends Item> itemContainer, Date start, Date end) throws Exception {
if(itemContainer.getType() == ContainerType.FOLDER) {
ListResolverTyped listResolverTyped = ((FolderContainer)itemContainer).list();
List<ItemContainer<? extends Item>> containers = listResolverTyped.includeHidden().getContainers();
for(ItemContainer<? extends Item> itemCont : containers) {
getStats(itemCont , start, end);
}
} else {
try {
Accounting accounting = itemContainer.get().getAccounting();
for(AccountEntry entry : accounting.getEntries()) {
switch(entry.getType()) {
case CREATE:
case UPDATE:
case READ:
Calendar calendar = entry.getDate();
if(calendar.after(DateUtils.dateToCalendar(start))
&& calendar.before(DateUtils.dateToCalendar(end))) {
count++;
}
break;
default:
break;
}
}
} catch(Exception e) {
throw e;
}
}
}
}

View File

@ -1,5 +1,11 @@
package org.gcube.dataharvest.harvester.sobigdata;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.HttpURLConnection;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLEncoder;
import java.util.ArrayList;
import java.util.Date;
@ -210,10 +216,27 @@ public class ResourceCatalogueHarvester extends SoBigDataHarvester {
query += "q=" + URLEncoder.encode(q, UTF_8_CHARASET) + "&wt=json&indent=true&rows=" + ROWS;
query += flValue != null && !flValue.isEmpty() ? "&fl=" + URLEncoder.encode(flValue, UTF_8_CHARASET) : "";
logger.debug("\nPerforming query {}", query);
String jsonResult = Utils.getJson(query);
String jsonResult = requestJson(query);
logger.trace("Response is {}", jsonResult);
return jsonResult;
}
public String requestJson(String url) throws MalformedURLException, IOException {
URL address = new URL(url);
HttpURLConnection connection = (HttpURLConnection) address.openConnection();
BufferedReader reader = new BufferedReader(new InputStreamReader(connection.getInputStream()));
String json = "";
String line = "";
while(line != null) {
line = reader.readLine();
if(line != null) {
json += line.trim();
}
}
return json;
}
}

View File

@ -77,7 +77,7 @@ public abstract class SoBigDataHarvester extends BasicHarvester {
* @throws ObjectNotFound
*/
protected void initMappingMaps() throws ObjectNotFound, Exception {
Properties properties = AccountingDashboardHarvesterPlugin.getProperties().get();
Properties properties = AccountingDashboardHarvesterPlugin.getConfigParameters();
Set<String> keys = properties.stringPropertyNames();
mapSystemTypeToDBEntry = new HashMap<String,String>();

View File

@ -109,7 +109,7 @@ public class TagMeMethodInvocationHarvester extends BasicHarvester {
}
ScopeDescriptor scopeDescriptor = AccountingDashboardHarvesterPlugin.getScopeDescriptor();
ScopeDescriptor scopeDescriptor = AccountingDashboardHarvesterPlugin.getScopeDescriptor(context);
Dimension dimension = getDimension(HarvestedDataKey.METHOD_INVOCATIONS);
AccountingRecord ar = new AccountingRecord(scopeDescriptor, instant, dimension, numberOfInvocation);

View File

@ -1,8 +1,5 @@
package org.gcube.dataharvest.utils;
import static org.gcube.common.authorization.client.Constants.authorizationService;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.Map;
@ -10,8 +7,13 @@ import java.util.Properties;
import java.util.SortedSet;
import java.util.TreeSet;
import org.gcube.common.authorization.library.provider.SecurityTokenProvider;
import org.gcube.common.authorization.library.provider.UserInfo;
import javax.ws.rs.InternalServerErrorException;
import org.gcube.common.authorization.utils.manager.SecretManagerProvider;
import org.gcube.common.authorization.utils.secret.JWTSecret;
import org.gcube.common.authorization.utils.secret.Secret;
import org.gcube.common.keycloak.KeycloakClientFactory;
import org.gcube.common.keycloak.model.TokenResponse;
import org.gcube.common.scope.impl.ScopeBean;
import org.gcube.dataharvest.AccountingDashboardHarvesterPlugin;
import org.gcube.resourcemanagement.support.server.managers.context.ContextManager;
@ -25,88 +27,98 @@ public class ContextAuthorization {
private static Logger logger = LoggerFactory.getLogger(ContextAuthorization.class);
public static final String USERNAME = "USERNAME";
public static final String DEFAULT_USERNAME = "luca.frosini";
public static final String CLIENT_ID = "accounting-dashboard-harvester-se-plugin";
public static final String SERVICE_NAME = "SERVICE_NAME";
public static final String DEFAULT_SERVICE_NAME = "accounting-harvester";
protected String clientSecret;
/**
* Contains Context full name as key and Token as Value
*/
protected Map<String,String> contextToToken;
protected Map<String,Secret> contextToToken;
/**
* Contains Token as key and Context full name as Value
*/
protected Map<String,String> tokenToContext;
protected Map<Secret,String> tokenToContext;
protected Properties properties;
/**
* Contains Properties used to generate tokens
*/
public ContextAuthorization() throws Exception {
public ContextAuthorization(Properties properties) throws Exception {
this.properties = properties;
this.contextToToken = new HashMap<>();
this.tokenToContext = new HashMap<>();
retrieveContextsAndTokens();
}
public String generateTokenForContext(String context, Properties properties) throws Exception {
if(properties==null) {
properties = AccountingDashboardHarvesterPlugin.getProperties().get();
/**
* Contains Properties used to generate tokens
*/
public ContextAuthorization() throws Exception {
this.properties = AccountingDashboardHarvesterPlugin.getConfigParameters();
this.contextToToken = new HashMap<>();
this.tokenToContext = new HashMap<>();
retrieveContextsAndTokens();
}
private String getClientSecret(String context) {
try {
if(clientSecret==null) {
int index = context.indexOf('/', 1);
String root = context.substring(0, index == -1 ? context.length() : index);
clientSecret = properties.getProperty(root);
}
return clientSecret;
} catch(Exception e) {
throw new InternalServerErrorException(
"Unable to retrieve Application Token for context " + SecretManagerProvider.instance.get().getContext(), e);
}
logger.info("Going to generate Token for Context {}", context);
UserInfo userInfo = new UserInfo(properties.getProperty(USERNAME, DEFAULT_USERNAME),
new ArrayList<>());
String userToken = authorizationService().generateUserToken(userInfo, context);
SecurityTokenProvider.instance.set(userToken);
String generatedToken = authorizationService()
.generateExternalServiceToken(properties.getProperty(SERVICE_NAME, DEFAULT_SERVICE_NAME));
logger.trace("Token for Context {} is {}", context, generatedToken);
return generatedToken;
}
private TokenResponse getJWTAccessToken(String context) throws Exception {
TokenResponse tr = KeycloakClientFactory.newInstance().queryUMAToken(context, CLIENT_ID, getClientSecret(context), context, null);
return tr;
}
public Secret getCatalogueSecretForContext(String context) throws Exception {
TokenResponse tr = getJWTAccessToken(context);
Secret secret = new JWTSecret(tr.getAccessToken());
return secret;
}
protected void retrieveContextsAndTokens() throws Exception {
String initialToken = SecurityTokenProvider.instance.get();
try {
Properties properties = AccountingDashboardHarvesterPlugin.getProperties().get();
LinkedHashMap<String,ScopeBean> map = ContextManager.readContexts();
for(String scope : map.keySet()) {
try {
String context = map.get(scope).toString();
String generatedToken = generateTokenForContext(context, properties);
Secret secret = getCatalogueSecretForContext(context);
contextToToken.put(context, generatedToken);
tokenToContext.put(generatedToken, context);
contextToToken.put(context, secret);
tokenToContext.put(secret, context);
} catch(Exception e) {
logger.error("Error while elaborating {}", scope, e);
throw e;
} finally {
SecurityTokenProvider.instance.reset();
}
// throw e;
}
}
} catch(Exception ex) {
throw ex;
} finally {
SecurityTokenProvider.instance.set(initialToken);
}
}
}
public String getTokenForContext(String contextFullName) {
return contextToToken.get(contextFullName);
public Secret getSecretForContext(String context) {
return contextToToken.get(context);
}
public String getContextFromToken(String token) {
return tokenToContext.get(token);
public String getContextFromSecret(Secret secret) {
return tokenToContext.get(secret);
}
public SortedSet<String> getContexts() {

View File

@ -91,19 +91,14 @@ public class DateUtils {
aggregationStartCalendar.set(Calendar.MINUTE, 0);
aggregationStartCalendar.set(Calendar.SECOND, 0);
aggregationStartCalendar.set(Calendar.MILLISECOND, 0);
logger.debug("{}", DEFAULT_DATE_FORMAT.format(aggregationStartCalendar.getTime()));
// logger.trace("{}", DEFAULT_DATE_FORMAT.format(aggregationStartCalendar.getTime()));
return aggregationStartCalendar;
}
public static Date getEndDateFromStartDate(AggregationType aggregationType, Date startDate, int offset, boolean partialHarvesting) {
public static Date getEndDateFromStartDate(AggregationType aggregationType, Date startDate, int offset) {
Calendar aggregationEndDate = getUTCCalendarInstance();
if(!partialHarvesting) {
aggregationEndDate.setTimeInMillis(startDate.getTime());
aggregationEndDate.add(aggregationType.getCalendarField(), offset);
aggregationEndDate.add(Calendar.MILLISECOND, -1);
}
aggregationEndDate.setTimeInMillis(startDate.getTime());
aggregationEndDate.add(aggregationType.getCalendarField(), offset);
return aggregationEndDate.getTime();
}
@ -113,8 +108,6 @@ public class DateUtils {
return calendar;
}
/* OLD functions of Eric Perrone (ISTI - CNR) */
public static String format(Date date) {
return DateUtils.LAUNCH_DATE_FORMAT.format(date);
}

View File

@ -1,17 +1,9 @@
package org.gcube.dataharvest.utils;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.HttpURLConnection;
import java.net.MalformedURLException;
import java.net.URL;
import org.gcube.common.authorization.client.Constants;
import org.gcube.common.authorization.client.exceptions.ObjectNotFound;
import org.gcube.common.authorization.library.AuthorizationEntry;
import org.gcube.common.authorization.library.provider.SecurityTokenProvider;
import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.common.authorization.utils.manager.SecretManager;
import org.gcube.common.authorization.utils.manager.SecretManagerProvider;
import org.gcube.common.authorization.utils.secret.Secret;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -23,36 +15,16 @@ public class Utils {
private static Logger logger = LoggerFactory.getLogger(Utils.class);
public static String getJson(String url) throws MalformedURLException, IOException {
URL address = new URL(url);
HttpURLConnection connection = (HttpURLConnection) address.openConnection();
BufferedReader reader = new BufferedReader(new InputStreamReader(connection.getInputStream()));
String json = "";
String line = "";
while(line != null) {
line = reader.readLine();
if(line != null) {
json += line.trim();
}
}
return json;
}
public static String getCurrentContext() throws ObjectNotFound, Exception {
return getCurrentContext(SecurityTokenProvider.instance.get());
return SecretManagerProvider.instance.get().getContext();
}
public static String getCurrentContext(String token) throws ObjectNotFound, Exception {
AuthorizationEntry authorizationEntry = Constants.authorizationService().get(token);
String context = authorizationEntry.getContext();
logger.info("Context of token {} is {}", token, context);
return context;
}
public static void setContext(String token) throws ObjectNotFound, Exception {
SecurityTokenProvider.instance.set(token);
ScopeProvider.instance.set(getCurrentContext(token));
public static void setContext(Secret secret) throws Exception {
SecretManagerProvider.instance.reset();
SecretManager secretManager = new SecretManager();
SecretManagerProvider.instance.set(secretManager);
secretManager.addSecret(secret);
secretManager.set();
}
}

View File

@ -0,0 +1 @@
org.gcube.dataharvest.AccountingDashboardHarvesterPlugin

View File

@ -1 +0,0 @@
org.gcube.dataharvest.AccountingDashboardHarvesterPluginDeclaration

View File

@ -1,2 +1 @@
USERNAME=luca.frosini
SERVICE_NAME=accounting-harvester
/d4science.research-infrastructures.eu=XXXXXXXXXX

View File

@ -0,0 +1 @@
/TestDateScorro.java

View File

@ -0,0 +1,173 @@
/**
*
*/
package org.gcube.dataharvest;
import java.io.IOException;
import java.io.InputStream;
import java.util.Properties;
import org.gcube.common.authorization.utils.manager.SecretManager;
import org.gcube.common.authorization.utils.manager.SecretManagerProvider;
import org.gcube.common.authorization.utils.secret.JWTSecret;
import org.gcube.common.authorization.utils.secret.Secret;
import org.gcube.common.authorization.utils.secret.SecretUtility;
import org.gcube.common.keycloak.KeycloakClientFactory;
import org.gcube.common.keycloak.KeycloakClientHelper;
import org.gcube.common.keycloak.model.TokenResponse;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* @author Luca Frosini (ISTI - CNR)
*/
public class ContextTest {
private static final Logger logger = LoggerFactory.getLogger(ContextTest.class);
protected static final String CONFIG_INI_FILENAME = "config.ini";
public static final String DEFAULT_TEST_SCOPE;
public static final String GCUBE;
public static final String DEVNEXT;
public static final String NEXTNEXT;
public static final String DEVSEC;
public static final String DEVVRE;
public static final String ROOT_PROD;
protected static final Properties properties;
public static final String TYPE_PROPERTY_KEY = "type";
public static final String USERNAME_PROPERTY_KEY = "username";
public static final String PASSWORD_PROPERTY_KEY = "password";
public static final String CLIENT_ID_PROPERTY_KEY = "clientId";
static {
GCUBE = "/gcube";
DEVNEXT = GCUBE + "/devNext";
NEXTNEXT = DEVNEXT + "/NextNext";
DEVSEC = GCUBE + "/devsec";
DEVVRE = DEVSEC + "/devVRE";
ROOT_PROD = "/d4science.research-infrastructures.eu";
DEFAULT_TEST_SCOPE = GCUBE;
properties = new Properties();
InputStream input = ContextTest.class.getClassLoader().getResourceAsStream(CONFIG_INI_FILENAME);
try {
// load the properties file
properties.load(input);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
private enum Type{
USER, CLIENT_ID
};
public static void set(Secret secret) throws Exception {
SecretManagerProvider.instance.reset();
SecretManager secretManager = new SecretManager();
secretManager.addSecret(secret);
SecretManagerProvider.instance.set(secretManager);
SecretManagerProvider.instance.get().set();
}
public static void setContextByName(String fullContextName) throws Exception {
logger.debug("Going to set credentials for context {}", fullContextName);
Secret secret = getSecretByContextName(fullContextName);
set(secret);
}
private static TokenResponse getJWTAccessToken(String context) throws Exception {
Type type = Type.valueOf(properties.get(TYPE_PROPERTY_KEY).toString());
TokenResponse tr = null;
int index = context.indexOf('/', 1);
String root = context.substring(0, index == -1 ? context.length() : index);
switch (type) {
case CLIENT_ID:
String clientId = properties.getProperty(CLIENT_ID_PROPERTY_KEY);
String clientSecret = properties.getProperty(root);
tr = KeycloakClientFactory.newInstance().queryUMAToken(context, clientId, clientSecret, context, null);
break;
case USER:
default:
String username = properties.getProperty(USERNAME_PROPERTY_KEY);
String password = properties.getProperty(PASSWORD_PROPERTY_KEY);
switch (root) {
case "/gcube":
default:
clientId = "next.d4science.org";
break;
case "/pred4s":
clientId = "pre.d4science.org";
break;
case "/d4science.research-infrastructures.eu":
clientId = "services.d4science.org";
break;
}
clientSecret = null;
tr = KeycloakClientHelper.getTokenForUser(context, username, password);
break;
}
return tr;
}
public static Secret getSecretByContextName(String context) throws Exception {
TokenResponse tr = getJWTAccessToken(context);
Secret secret = new JWTSecret(tr.getAccessToken());
return secret;
}
public static void setContext(String token) throws Exception {
Secret secret = getSecret(token);
set(secret);
}
private static Secret getSecret(String token) throws Exception {
Secret secret = SecretUtility.getSecretByTokenString(token);
return secret;
}
public static String getUser() {
String user = "UNKNOWN";
try {
user = SecretManagerProvider.instance.get().getUser().getUsername();
} catch(Exception e) {
logger.error("Unable to retrieve user. {} will be used", user);
}
return user;
}
@BeforeClass
public static void beforeClass() throws Exception {
setContextByName(ROOT_PROD);
}
@AfterClass
public static void afterClass() throws Exception {
SecretManagerProvider.instance.reset();
}
}

View File

@ -0,0 +1,88 @@
package org.gcube.dataharvest.harvester;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.List;
import java.util.SortedSet;
import org.gcube.accounting.accounting.summary.access.AccountingDao;
import org.gcube.accounting.accounting.summary.access.model.update.AccountingRecord;
import org.gcube.dataharvest.ContextTest;
import org.gcube.dataharvest.plugin.AccountingDataHarvesterPluginTest;
import org.gcube.dataharvest.utils.AggregationType;
import org.gcube.dataharvest.utils.ContextAuthorization;
import org.gcube.dataharvest.utils.DateUtils;
import org.junit.Ignore;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
*
* @author Giancarlo Panichi (ISTI CNR)
*
*/
public class AccountingDataHarvesterJupyterTest extends AccountingDataHarvesterPluginTest {
private static Logger logger = LoggerFactory.getLogger(AccountingDataHarvesterJupyterTest.class);
@Ignore
@Test
public void testJupyterAccessesHarvester() throws Exception {
try {
ContextTest.setContextByName(ROOT_PROD);
AccountingDao dao = getAccountingDao();
List<Date> starts = new ArrayList<>();
starts.add(DateUtils.getStartCalendar(2021, Calendar.JANUARY, 1).getTime());
starts.add(DateUtils.getStartCalendar(2021, Calendar.FEBRUARY, 1).getTime());
starts.add(DateUtils.getStartCalendar(2021, Calendar.MARCH, 1).getTime());
starts.add(DateUtils.getStartCalendar(2021, Calendar.APRIL, 1).getTime());
starts.add(DateUtils.getStartCalendar(2021, Calendar.MAY, 1).getTime());
AggregationType measureType = AggregationType.MONTHLY;
ContextAuthorization contextAuthorization = new ContextAuthorization();
SortedSet<String> contexts = contextAuthorization.getContexts();
/*
SortedSet<String> contexts = new TreeSet<>();
contexts.add("/d4science.research-infrastructures.eu/D4OS/Blue-CloudLab");
contexts.add("/d4science.research-infrastructures.eu/D4OS/Zoo-Phytoplankton_EOV");
contexts.add("/d4science.research-infrastructures.eu/D4OS/MarineEnvironmentalIndicators");
*/
List<AccountingRecord> accountingRecords = new ArrayList<>();
for (Date start : starts) {
Date end = DateUtils.getEndDateFromStartDate(measureType, start, 1);
ContextTest.setContextByName(ROOT_PROD);
JupyterAccessesHarvester jupyterAccessesHarvester = new JupyterAccessesHarvester(start, end);
for(String context : contexts) {
ContextTest.setContextByName(context);
List<AccountingRecord> harvested = jupyterAccessesHarvester.getAccountingRecords();
accountingRecords.addAll(harvested);
}
}
// logger.debug("{}", accountingRecords);
logger.debug("Going to insert {}", accountingRecords);
ContextTest.setContextByName(ROOT_PROD);
// dao.insertRecords(accountingRecords.toArray(new AccountingRecord[1]));
} catch (Throwable e) {
logger.error(e.getLocalizedMessage(), e);
throw e;
}
}
}

View File

@ -0,0 +1,87 @@
package org.gcube.dataharvest.harvester;
import java.time.LocalDate;
import java.time.ZoneId;
import java.time.temporal.ChronoUnit;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.SortedSet;
import java.util.stream.Stream;
import org.gcube.accounting.accounting.summary.access.AccountingDao;
import org.gcube.accounting.accounting.summary.access.model.update.AccountingRecord;
import org.gcube.dataharvest.ContextTest;
import org.gcube.dataharvest.plugin.AccountingDataHarvesterPluginTest;
import org.gcube.dataharvest.utils.AggregationType;
import org.gcube.dataharvest.utils.ContextAuthorization;
import org.gcube.dataharvest.utils.DateUtils;
import org.junit.Ignore;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
*
* @author Giancarlo Panichi (ISTI CNR)
*
*/
public class AccountingDataHarvesterRStudioTest extends AccountingDataHarvesterPluginTest {
private static Logger logger = LoggerFactory.getLogger(AccountingDataHarvesterRStudioTest.class);
@Ignore
@Test
public void testJupyterAccessesHarvester() throws Exception {
try {
ContextTest.setContextByName(ROOT_PROD);
AccountingDao dao = getAccountingDao();
List<Date> starts = new ArrayList<>();
LocalDate sdate = LocalDate.parse("2016-01-01"), edate = LocalDate.parse("2021-06-01");
Stream.iterate(sdate, date -> date.plusMonths(1)).limit(ChronoUnit.MONTHS.between(sdate, edate) + 1)
.forEach(dateToConvert -> starts.add(java.util.Date
.from(dateToConvert.atStartOfDay().atZone(ZoneId.systemDefault()).toInstant())));
AggregationType measureType = AggregationType.MONTHLY;
ContextAuthorization contextAuthorization = new ContextAuthorization();
SortedSet<String> contexts = contextAuthorization.getContexts();
List<AccountingRecord> accountingRecords = new ArrayList<>();
for (Date start : starts) {
Date end = DateUtils.getEndDateFromStartDate(measureType, start, 1);
ContextTest.setContextByName(ROOT_PROD);
RStudioAccessesHarvester rstudioAccessesHarvester = new RStudioAccessesHarvester(start, end);
for(String context : contexts) {
ContextTest.setContextByName(context);
List<AccountingRecord> harvested = rstudioAccessesHarvester.getAccountingRecords();
accountingRecords.addAll(harvested);
}
}
// logger.debug("{}", accountingRecords);
logger.debug("Going to insert {}", accountingRecords);
ContextTest.setContextByName(ROOT_PROD);
dao.insertRecords(accountingRecords.toArray(new AccountingRecord[1]));
} catch (Throwable e) {
logger.error(e.getLocalizedMessage(), e);
throw e;
}
}
}

View File

@ -2,7 +2,8 @@ package org.gcube.dataharvest.harvester.sobigdata;
import java.util.List;
import org.gcube.dataharvest.utils.ContextTest;
import org.gcube.dataharvest.ContextTest;
import org.junit.Ignore;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -10,6 +11,7 @@ public class SoBigDataHarvesterTest extends ContextTest {
private static Logger logger = LoggerFactory.getLogger(SoBigDataHarvesterTest.class);
@Ignore
// @Test
public void testGroupList() throws Exception {
// ContextTest.setContextByName("/d4science.research-infrastructures.eu/D4Research/AGINFRAplusDev");

View File

@ -5,6 +5,8 @@ import java.io.InputStream;
import java.util.Properties;
import org.gcube.dataharvest.AccountingDashboardHarvesterPlugin;
import org.gcube.dataharvest.ContextTest;
import org.junit.Ignore;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -23,9 +25,9 @@ public class ContextAuthorizationTest extends ContextTest {
properties = new Properties();
InputStream input = AccountingDashboardHarvesterPlugin.class.getClassLoader().getResourceAsStream(PROPERTY_FILENAME);
properties.load(input);
AccountingDashboardHarvesterPlugin.getProperties().set(properties);
}
@Ignore
// @Test
public void testRetrieveContextsAndTokens() throws Exception {
try {
@ -33,7 +35,7 @@ public class ContextAuthorizationTest extends ContextTest {
}catch (Exception e) {
logger.warn("Unable to load {} file containing configuration properties. AccountingDataHarvesterPlugin will use defaults", PROPERTY_FILENAME);
}
ContextAuthorization contextAuthorization = new ContextAuthorization();
ContextAuthorization contextAuthorization = new ContextAuthorization(properties);
contextAuthorization.retrieveContextsAndTokens();
}

View File

@ -1,86 +0,0 @@
/**
*
*/
package org.gcube.dataharvest.utils;
import java.io.IOException;
import java.io.InputStream;
import java.util.Properties;
import org.gcube.common.authorization.client.Constants;
import org.gcube.common.authorization.client.exceptions.ObjectNotFound;
import org.gcube.common.authorization.library.AuthorizationEntry;
import org.gcube.common.authorization.library.provider.AuthorizationProvider;
import org.gcube.common.authorization.library.provider.ClientInfo;
import org.gcube.common.authorization.library.provider.SecurityTokenProvider;
import org.gcube.common.authorization.library.utils.Caller;
import org.gcube.common.scope.api.ScopeProvider;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* @author Luca Frosini (ISTI - CNR)
*
*/
public class ContextTest {
private static final Logger logger = LoggerFactory.getLogger(ContextTest.class);
protected static Properties properties;
protected static final String PROPERTIES_FILENAME = "token.properties";
public static final String DEFAULT_TEST_SCOPE_NAME;
static {
properties = new Properties();
InputStream input = ContextTest.class.getClassLoader().getResourceAsStream(PROPERTIES_FILENAME);
try {
// load the properties file
properties.load(input);
} catch(IOException e) {
throw new RuntimeException(e);
}
//DEFAULT_TEST_SCOPE_NAME = "/pred4s/preprod/preVRE";
DEFAULT_TEST_SCOPE_NAME = "/d4science.research-infrastructures.eu";
}
public static String getCurrentScope(String token) throws ObjectNotFound, Exception {
AuthorizationEntry authorizationEntry = Constants.authorizationService().get(token);
String context = authorizationEntry.getContext();
logger.info("Context of token {} is {}", token, context);
return context;
}
public static void setContextByName(String fullContextName) throws ObjectNotFound, Exception {
String token = ContextTest.properties.getProperty(fullContextName);
setContext(token);
}
public static void setContext(String token) throws ObjectNotFound, Exception {
SecurityTokenProvider.instance.set(token);
AuthorizationEntry authorizationEntry = Constants.authorizationService().get(token);
ClientInfo clientInfo = authorizationEntry.getClientInfo();
logger.debug("User : {} - Type : {}", clientInfo.getId(), clientInfo.getType().name());
String qualifier = authorizationEntry.getQualifier();
Caller caller = new Caller(clientInfo, qualifier);
AuthorizationProvider.instance.set(caller);
ScopeProvider.instance.set(getCurrentScope(token));
}
/*
@BeforeClass
public static void beforeClass() throws Exception {
setContextByName(DEFAULT_TEST_SCOPE_NAME);
}
@AfterClass
public static void afterClass() throws Exception {
SecurityTokenProvider.instance.reset();
ScopeProvider.instance.reset();
}
*/
}

View File

@ -1,3 +1,6 @@
/*.gcubekey
/*.key
/*.properties
/howto.txt
/scopedata 2.xml
/config.ini

View File

@ -0,0 +1,4 @@
groupId=org.gcube.accounting
artifactId=accounting-dashboard-harvester-se-plugin
version=2.0.0-SNAPSHOT
description=Accounting Dashboard Harvester Smart Executor Plugin.

View File

@ -1,24 +0,0 @@
<assembly
xmlns="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0 http://maven.apache.org/xsd/assembly-1.1.0.xsd">
<id>tar.gz</id>
<formats>
<format>tar.gz</format>
</formats>
<baseDirectory>${file.separator}</baseDirectory>
<fileSets>
<fileSet>
<directory>target${file.separator}libs</directory>
<outputDirectory>${file.separator}</outputDirectory>
<useDefaultExcludes>true</useDefaultExcludes>
<fileMode>755</fileMode>
</fileSet>
</fileSets>
<files>
<file>
<source>target${file.separator}${project.artifactId}-${project.version}.jar</source>
<filtered>true</filtered>
</file>
</files>
</assembly>