Compare commits

...

40 Commits

Author SHA1 Message Date
Luca Frosini f3c2b6197c Fixed changelog 2024-04-22 15:11:53 +02:00
Luca Frosini 00bce762f3 Removed -SNAPSHOT for release 2024-04-22 15:10:19 +02:00
Luca Frosini 9ef16294ff Fixed CHANGELOG 2024-02-27 12:23:30 +01:00
Luca Frosini fa7be6370a Upgraded maven-parent 2024-02-27 12:20:52 +01:00
Luca Frosini 34db58abe2 Fixed test 2024-02-26 18:04:42 +01:00
Luca Frosini abb35b6e7b Fixed test 2024-02-26 18:04:24 +01:00
Luca Frosini 77cfbf6a8a Improved Method Harvester 2024-02-26 18:03:07 +01:00
luca.frosini e76b1c3af3 Removed -SNAPSHOT to release the component 2023-09-21 10:01:07 +02:00
luca.frosini 009083f335 Fixed method invocation due to change in signature of the used library 2023-09-14 15:00:57 +02:00
luca.frosini dedf11256a Fixing bug 2023-09-06 12:39:55 +02:00
Massimiliano Assante 8c26deb82f ready to release 2023-07-03 17:22:51 +02:00
Massimiliano Assante deb8937a10 updated changelog 2023-07-03 17:19:23 +02:00
Massimiliano Assante 850d6674e5 fixed the test classe 2023-07-03 17:12:03 +02:00
Massimiliano Assante 284431ca8d also core services refactored 2023-07-03 17:06:02 +02:00
Massimiliano Assante 4cbdef880e rewritten Jupyter, RStudio and Catalogue Harv., missing Core Serv one 2023-06-30 12:15:01 +02:00
luca.frosini 89e744d769 fixed example 2023-06-30 08:55:06 +02:00
luca.frosini 43865106df Fixed test launch 2023-06-29 17:05:05 +02:00
luca.frosini e0fd599c80 Cleaning code 2023-06-29 16:08:01 +02:00
Massimiliano Assante d3ad4f43ae pusho tutto 2023-06-28 18:20:13 +02:00
luca.frosini 9b27e35676 Ignored MacOs File 2023-06-21 11:24:39 +02:00
Luca Frosini bb3e645932 Fixed bom import 2022-11-09 15:21:46 +01:00
Luca Frosini 934545c8cf Removed -SNAPSHOT to release the component 2022-10-25 16:20:17 +02:00
Luca Frosini 524c3a0411 Added -SNAPSHOT 2022-10-25 16:12:04 +02:00
Luca Frosini f0ce3c250c Added -SNAPSHOT to allow to deploy the artifact using Jenkins 2022-10-25 16:01:44 +02:00
Luca Frosini ec9d30d886 Removed -SNAPSHOT to release the component 2022-10-19 12:14:55 +02:00
Luca Frosini dbc270a523 Removed uneeded harvester and key to be harvested 2022-09-19 12:40:05 +02:00
Luca Frosini 1fe73af6bc Fixed code 2022-09-15 18:03:55 +02:00
Luca Frosini 120316d1b2 Fixing code 2022-09-12 16:50:35 +02:00
Luca Frosini 3ff630bbcb Fixed pom 2022-09-08 14:47:02 +02:00
Luca Frosini 459a71bc0d Switched code to social-networking-client 2022-09-05 14:49:45 +02:00
Luca Frosini 6bd87cedc4 Removed uneeded whitespace 2022-09-01 18:03:43 +02:00
Luca Frosini ee3a6208a4 Upgraded boms versions 2022-09-01 16:29:34 +02:00
Luca Frosini 523c0d8e34 Removed no more needed constants 2022-09-01 15:14:49 +02:00
Luca Frosini d574e3c797 Porting plugin to use new IAM credentials 2022-09-01 14:08:36 +02:00
Luca Frosini c7a934bd4b Switching security to the new IAM refs #21904 2022-08-31 16:48:11 +02:00
Luca Frosini cc242dee6a Removed -SNAPHOT for release 2022-05-19 11:06:28 +02:00
Luca Frosini 9849e7f6ee Added -SNAPHOT to allow jenkins to build snapshot version 2022-05-19 11:05:31 +02:00
Luca Frosini f3a61edbdf Removed -SNAPHOT for release 2022-05-19 11:02:55 +02:00
Luca Frosini cdd875bc47 fixing dependecies version 2022-05-19 10:58:03 +02:00
Luca Frosini 273171704e Enhanced range of storagehub-client-library to 2.0.0,3.0.0-SNAPSHOT 2022-02-09 15:51:47 +01:00
34 changed files with 1292 additions and 2154 deletions

3
.gitignore vendored
View File

@ -1,4 +1,5 @@
target
.classpath
.project
.settings
.settings
/.DS_Store

View File

@ -2,6 +2,26 @@ This project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.htm
# Changelog for "accounting-dashboard-harvester-se-plugin"
## [v2.4.0]
- Removed filter restriction on JobUsageRecord harvesting to get MethodInvocation
- Fixed bug on getting ScopeDescriptor for new scopes.
## [v2.3.0]
- Ported GA harvesters to Analytics Data API (GA4)
## [v2.2.0]
- Switching security to the new IAM [#21904]
## [v2.1.0]
- Storagehub-client-library get range from gcube-bom [#22822]
## [v2.0.0]

74
pom.xml
View File

@ -5,14 +5,14 @@
<parent>
<artifactId>maven-parent</artifactId>
<groupId>org.gcube.tools</groupId>
<version>1.1.0</version>
<version>1.2.0</version>
<relativePath />
</parent>
<modelVersion>4.0.0</modelVersion>
<groupId>org.gcube.accounting</groupId>
<artifactId>accounting-dashboard-harvester-se-plugin</artifactId>
<version>2.0.0</version>
<version>2.4.0</version>
<name>Accounting Dashboard Harvester Smart Executor Plugin</name>
<description>
Accounting Dashboard Harvester Smart Executor Plugin harvest accounting
@ -36,7 +36,7 @@
<dependency>
<groupId>org.gcube.vremanagement</groupId>
<artifactId>smart-executor-bom</artifactId>
<version>3.0.0</version>
<version>3.1.0</version>
<type>pom</type>
<scope>import</scope>
</dependency>
@ -47,7 +47,6 @@
<dependency>
<groupId>commons-lang</groupId>
<artifactId>commons-lang</artifactId>
<version>2.6</version>
</dependency>
<dependency>
<groupId>org.gcube.portlets.admin</groupId>
@ -55,20 +54,40 @@
<version>[2.7.2,3.0.0-SNAPSHOT)</version>
</dependency>
<dependency>
<groupId>com.google.apis</groupId>
<artifactId>google-api-services-analyticsreporting</artifactId>
<version>v4-rev124-1.23.0</version>
<groupId>com.google.analytics</groupId>
<artifactId>google-analytics-data</artifactId>
<version>0.16.0</version>
</dependency>
<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
<version>23.6-jre</version>
</dependency>
<dependency>
<groupId>com.google.auth</groupId>
<artifactId>google-auth-library-oauth2-http</artifactId>
<version>1.12.1</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-core</artifactId>
<version>2.8.4</version>
</dependency>
<dependency>
<groupId>com.google.http-client</groupId>
<artifactId>google-http-client-gson</artifactId>
<version>1.21.0</version>
</dependency>
<dependency>
<groupId>org.gcube.common</groupId>
<artifactId>storagehub-client-library</artifactId>
<version>[1.0.0, 2.0.0-SNAPSHOT)</version>
</dependency>
<!-- <dependency> -->
<!-- <groupId>org.gcube.common</groupId> -->
<!-- <artifactId>storagehub-client-library</artifactId> -->
<!-- <exclusions> -->
<!-- <exclusion> -->
<!-- <groupId>com.fasterxml.jackson.core</groupId> -->
<!-- <artifactId>jackson-core</artifactId> -->
<!-- </exclusion> -->
<!-- </exclusions> -->
<!-- </dependency> -->
<dependency>
<groupId>org.gcube.data-catalogue</groupId>
<artifactId>gcat-client</artifactId>
@ -108,14 +127,25 @@
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.gcube.social-networking</groupId>
<artifactId>social-service-client</artifactId>
<version>[1.0.0, 2.0.0-SNAPSHOT)</version>
</dependency>
<dependency>
<groupId>org.json</groupId>
<artifactId>json</artifactId>
<version>20171018</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.gcube.common</groupId>
<artifactId>authorization-utils</artifactId>
<version>[2.2.0, 3.0.0-SNAPSHOT)</version>
</dependency>
<!-- Test Dependencies. Setting scope to provided to allow proper creation of uber-jar -->
<!-- Test Dependencies. Setting scope to provided to allow proper creation
of uber-jar -->
<dependency>
<groupId>org.gcube.vremanagement</groupId>
<artifactId>smart-executor-client</artifactId>
@ -178,6 +208,24 @@
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-javadoc-plugin</artifactId>
<configuration>
<additionalparam>-Xdoclint:none</additionalparam>
<additionalJOption>-Xdoclint:none</additionalJOption>
</configuration>
<version>3.1.0</version>
<executions>
<execution>
<id>generate-doc</id>
<phase>install</phase>
<goals>
<goal>jar</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</build>

View File

@ -17,7 +17,7 @@ import org.gcube.accounting.accounting.summary.access.model.ScopeDescriptor;
import org.gcube.accounting.accounting.summary.access.model.internal.Dimension;
import org.gcube.accounting.accounting.summary.access.model.update.AccountingRecord;
import org.gcube.com.fasterxml.jackson.annotation.JsonIgnore;
import org.gcube.common.authorization.library.provider.SecurityTokenProvider;
import org.gcube.common.authorization.utils.secret.Secret;
import org.gcube.common.scope.impl.ScopeBean;
import org.gcube.common.scope.impl.ScopeBean.Type;
import org.gcube.dataharvest.harvester.CatalogueAccessesHarvester;
@ -28,7 +28,6 @@ import org.gcube.dataharvest.harvester.RStudioAccessesHarvester;
import org.gcube.dataharvest.harvester.SocialInteractionsHarvester;
import org.gcube.dataharvest.harvester.VREAccessesHarvester;
import org.gcube.dataharvest.harvester.VREUsersHarvester;
import org.gcube.dataharvest.harvester.sobigdata.DataMethodDownloadHarvester;
import org.gcube.dataharvest.harvester.sobigdata.ResourceCatalogueHarvester;
import org.gcube.dataharvest.harvester.sobigdata.TagMeMethodInvocationHarvester;
import org.gcube.dataharvest.utils.AggregationType;
@ -79,20 +78,7 @@ public class AccountingDashboardHarvesterPlugin extends Plugin {
public AccountingDashboardHarvesterPlugin() {
super();
}
private static final InheritableThreadLocal<Properties> properties = new InheritableThreadLocal<Properties>() {
@Override
protected Properties initialValue() {
return new Properties();
}
};
public static InheritableThreadLocal<Properties> getProperties() {
return properties;
}
public static Dimension getDimension(String key) {
Dimension dimension = dimensions.get().get(key);
if (dimension == null) {
@ -123,21 +109,8 @@ public class AccountingDashboardHarvesterPlugin extends Plugin {
};
public static ScopeDescriptor getScopeDescriptor() {
return scopeDescriptor.get();
}
public static final InheritableThreadLocal<ScopeDescriptor> scopeDescriptor = new InheritableThreadLocal<ScopeDescriptor>() {
@Override
protected ScopeDescriptor initialValue() {
return new ScopeDescriptor("", "");
}
};
@JsonIgnore
public Properties getConfigParameters() throws IOException {
public static Properties getConfigParameters() throws IOException {
Properties properties = new Properties();
try {
InputStream input = AccountingDashboardHarvesterPlugin.class.getClassLoader()
@ -211,15 +184,10 @@ public class AccountingDashboardHarvesterPlugin extends Plugin {
logger.debug("Harvesting from {} to {} (ReRun:{} - GetVREUsers:{} - DryRun:{})", DateUtils.format(start),
DateUtils.format(end), reRun, getVREUsers, dryRun);
Properties properties = getConfigParameters();
getProperties().set(properties);
ContextAuthorization contextAuthorization = new ContextAuthorization();
SortedSet<String> contexts = contextAuthorization.getContexts();
String root = contexts.first();
Utils.setContext(contextAuthorization.getTokenForContext(root));
Utils.setContext(contextAuthorization.getSecretForContext(root));
AccountingDao dao = AccountingDao.get();
@ -240,29 +208,30 @@ public class AccountingDashboardHarvesterPlugin extends Plugin {
ArrayList<AccountingRecord> accountingRecords = new ArrayList<AccountingRecord>();
String initialToken = SecurityTokenProvider.instance.get();
VREAccessesHarvester vreAccessesHarvester = null;
JupyterAccessesHarvester jupyterAccessesHarvester = null;
RStudioAccessesHarvester rstudioAccessesHarvester = null;
Secret rootSecret = null;
for (String context : contexts) {
// Setting the token for the context
Utils.setContext(contextAuthorization.getTokenForContext(context));
Secret secret = contextAuthorization.getSecretForContext(context);
Utils.setContext(secret);
ScopeBean scopeBean = new ScopeBean(context);
ScopeDescriptor actualScopeDescriptor = scopeDescriptorMap.get(context);
if (actualScopeDescriptor == null) {
actualScopeDescriptor = new ScopeDescriptor(scopeBean.name(), context);
scopeDescriptorMap.put(actualScopeDescriptor.getId(), actualScopeDescriptor);
}
scopeDescriptor.set(actualScopeDescriptor);
if (scopeBean.is(Type.INFRASTRUCTURE)) {
try {
rootSecret = secret;
CatalogueAccessesHarvester catalogueHarvester = new CatalogueAccessesHarvester(start, end);
List<AccountingRecord> harvested = catalogueHarvester.getAccountingRecords();
accountingRecords.addAll(harvested);
@ -291,12 +260,12 @@ public class AccountingDashboardHarvesterPlugin extends Plugin {
}
// Setting back token for the context
Utils.setContext(contextAuthorization.getTokenForContext(parent.toString()));
Utils.setContext(contextAuthorization.getSecretForContext(parent.toString()));
vreAccessesHarvester = new VREAccessesHarvester(start, end);
// Setting back token for the context
Utils.setContext(contextAuthorization.getTokenForContext(context));
Utils.setContext(contextAuthorization.getSecretForContext(context));
}
}
@ -315,12 +284,12 @@ public class AccountingDashboardHarvesterPlugin extends Plugin {
}
// Setting back token for the context
Utils.setContext(contextAuthorization.getTokenForContext(parent.toString()));
Utils.setContext(contextAuthorization.getSecretForContext(parent.toString()));
rstudioAccessesHarvester = new RStudioAccessesHarvester(start, end);
// Setting back token for the context
Utils.setContext(contextAuthorization.getTokenForContext(context));
Utils.setContext(contextAuthorization.getSecretForContext(context));
}
}
@ -339,12 +308,12 @@ public class AccountingDashboardHarvesterPlugin extends Plugin {
}
// Setting back token for the context
Utils.setContext(contextAuthorization.getTokenForContext(parent.toString()));
Utils.setContext(contextAuthorization.getSecretForContext(parent.toString()));
jupyterAccessesHarvester = new JupyterAccessesHarvester(start, end);
// Setting back token for the context
Utils.setContext(contextAuthorization.getTokenForContext(context));
Utils.setContext(contextAuthorization.getSecretForContext(context));
}
}
@ -453,18 +422,18 @@ public class AccountingDashboardHarvesterPlugin extends Plugin {
logger.error("Error harvesting Resource Catalogue Information for {}", context, e);
}
try {
// Collecting info on Data/Method download
logger.info("Going to harvest Data Method Download for {}", context);
DataMethodDownloadHarvester dataMethodDownloadHarvester = new DataMethodDownloadHarvester(start,
end, contexts);
List<AccountingRecord> harvested = dataMethodDownloadHarvester.getAccountingRecords();
accountingRecords.addAll(harvested);
} catch (Exception e) {
logger.error("Error harvesting Data Method Download for {}", context, e);
}
// try {
// // Collecting info on Data/Method download
// logger.info("Going to harvest Data Method Download for {}", context);
// DataMethodDownloadHarvester dataMethodDownloadHarvester = new DataMethodDownloadHarvester(start,
// end, contexts);
//
// List<AccountingRecord> harvested = dataMethodDownloadHarvester.getAccountingRecords();
// accountingRecords.addAll(harvested);
//
// } catch (Exception e) {
// logger.error("Error harvesting Data Method Download for {}", context, e);
// }
}
@ -508,7 +477,7 @@ public class AccountingDashboardHarvesterPlugin extends Plugin {
}
}
Utils.setContext(initialToken);
Utils.setContext(rootSecret);
logger.debug("Harvest Measures from {} to {} are {}", DateUtils.format(start), DateUtils.format(end),
accountingRecords);

View File

@ -59,12 +59,12 @@ public class AnalyticsReportCredentials {
/**
* Please note:
* The key is stored in the resource with blanks " " instead of "\n" as it causes issues and
* without the BEGIN and END Delimiters (e.g. -----END PRIVATE KEY-----) which myst be readded
* without the BEGIN and END Delimiters (e.g. -----END PRIVATE KEY-----) which must be readded
* @param privateKeyPem
*/
public void setPrivateKeyPem(String privateKeyPem) {
privateKeyPem = privateKeyPem.replace(" ", "\n");
this.privateKeyPem = "-----BEGIN PRIVATE KEY-----\n"+privateKeyPem+"\n-----END PRIVATE KEY-----";
this.privateKeyPem = privateKeyPem.replace(" ", "\n");
this.privateKeyPem = "-----BEGIN PRIVATE KEY-----\n"+this.privateKeyPem+"\n-----END PRIVATE KEY-----\n";
}
public String getPrivateKeyId() {

View File

@ -5,7 +5,6 @@ package org.gcube.dataharvest.datamodel;
/**
*
* @author Francesco Mangiacrapa francesco.mangiacrapa@isti.cnr.it
* @author M. Assante, ISTI-CNR
*/
@ -24,16 +23,11 @@ public enum HarvestedDataKey {
CATALOGUE_RESOURCE_ACCESSES("Item Resource"),
ACCESSES("VRE Accesses"),
USERS("VRE Users"),
DATA_METHOD_DOWNLOAD("Data/Method download"),
NEW_CATALOGUE_METHODS("New Catalogue Methods"),
NEW_CATALOGUE_DATASETS("New Catalogue Datasets"),
NEW_CATALOGUE_DELIVERABLES("New Catalogue Deliverables"),
NEW_CATALOGUE_APPLICATIONS("New Catalogue Applications"),
SOCIAL_POSTS("VRE Social Interations Posts"),
SOCIAL_REPLIES("VRE Social Interations Replies"),
SOCIAL_LIKES("VRE Social Interations Likes"),
METHOD_INVOCATIONS("VRE Methods Invocation"),
VISUAL_TOOLS("VRE Visual Tools");
METHOD_INVOCATIONS("VRE Methods Invocation");
private String key;

View File

@ -37,23 +37,6 @@ public abstract class BasicHarvester {
logger.debug("Creating {} for the period {} {} ", this.getClass().getSimpleName(), DateUtils.format(start), DateUtils.format(end));
}
public static String getCurrentContext(String token) throws Exception {
AuthorizationEntry authorizationEntry = Constants.authorizationService().get(token);
String context = authorizationEntry.getContext();
logger.info("Context of token {} is {}", token, context);
return context;
}
public static void setContext(String token) throws Exception {
SecurityTokenProvider.instance.set(token);
ScopeProvider.instance.set(getCurrentContext(token));
}
public static String getCurrentContext() throws Exception {
String token = SecurityTokenProvider.instance.get();
return getCurrentContext(token);
}
public abstract List<AccountingRecord> getAccountingRecords() throws Exception;
public Dimension getDimension(HarvestedDataKey harvestedDataKey) {

View File

@ -4,22 +4,14 @@ import static org.gcube.resources.discovery.icclient.ICFactory.clientFor;
import static org.gcube.resources.discovery.icclient.ICFactory.queryFor;
import java.io.IOException;
import java.io.Reader;
import java.io.StringReader;
import java.security.GeneralSecurityException;
import java.security.KeyFactory;
import java.security.NoSuchAlgorithmException;
import java.security.PrivateKey;
import java.security.spec.InvalidKeySpecException;
import java.security.spec.PKCS8EncodedKeySpec;
import java.time.Instant;
import java.time.LocalDate;
import java.time.ZoneId;
import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
@ -49,26 +41,20 @@ import org.slf4j.LoggerFactory;
import org.w3c.dom.Node;
import org.xml.sax.InputSource;
import com.google.api.client.googleapis.auth.oauth2.GoogleCredential;
import com.google.api.client.googleapis.auth.oauth2.GoogleCredential.Builder;
import com.google.api.client.googleapis.javanet.GoogleNetHttpTransport;
import com.google.api.client.googleapis.util.Utils;
import com.google.api.client.http.HttpTransport;
import com.google.analytics.data.v1beta.BetaAnalyticsDataClient;
import com.google.analytics.data.v1beta.BetaAnalyticsDataSettings;
import com.google.analytics.data.v1beta.DateRange;
import com.google.analytics.data.v1beta.DateRange.Builder;
import com.google.analytics.data.v1beta.Dimension;
import com.google.analytics.data.v1beta.Metric;
import com.google.analytics.data.v1beta.Row;
import com.google.analytics.data.v1beta.RunReportRequest;
import com.google.analytics.data.v1beta.RunReportResponse;
import com.google.api.client.json.JsonFactory;
import com.google.api.client.json.gson.GsonFactory;
import com.google.api.client.util.PemReader;
import com.google.api.client.util.PemReader.Section;
import com.google.api.client.util.SecurityUtils;
import com.google.api.services.analyticsreporting.v4.AnalyticsReporting;
import com.google.api.services.analyticsreporting.v4.AnalyticsReportingScopes;
import com.google.api.services.analyticsreporting.v4.model.DateRange;
import com.google.api.services.analyticsreporting.v4.model.DateRangeValues;
import com.google.api.services.analyticsreporting.v4.model.GetReportsRequest;
import com.google.api.services.analyticsreporting.v4.model.GetReportsResponse;
import com.google.api.services.analyticsreporting.v4.model.Metric;
import com.google.api.services.analyticsreporting.v4.model.Report;
import com.google.api.services.analyticsreporting.v4.model.ReportRequest;
import com.google.api.services.analyticsreporting.v4.model.ReportRow;
import com.google.api.gax.core.FixedCredentialsProvider;
import com.google.auth.oauth2.ServiceAccountCredentials;
public class CatalogueAccessesHarvester extends BasicHarvester {
@ -76,13 +62,13 @@ public class CatalogueAccessesHarvester extends BasicHarvester {
private static final JsonFactory JSON_FACTORY = GsonFactory.getDefaultInstance();
private static final String MAPPING_RESOURCE_CATEGORY = "BigGAnalyticsMapping";
private static final String SERVICE_ENDPOINT_CATEGORY = "OnlineService";
private static final String SERVICE_ENDPOINT_NAME = "BigGAnalyticsReportService";
private static final String SERVICE_ENDPOINT_NAME = "GA4AnalyticsDataService";
private static final String AP_CATALOGUE_PAGEVIEWS_PROPERTY = "catalogue-pageviews";
private static final String AP_CLIENT_PROPERTY = "clientId";
private static final String AP_PRIVATEKEY_PROPERTY = "privateKeyId";
private static final String APPLICATION_NAME = "Analytics Reporting";
private static final String AP_CLIENT_PROPERTY = "client_id";
private static final String AP_PRIVATEKEY_PROPERTY = "private_key_id";
private static final String REGEX_CATALOGUE_ACCESSES = "^\\/$";
private static final String REGEX_CATALOGUE_DATASET_LIST_ACCESSES = "^\\/dataset(\\?([a-zA-Z0-9_.-]*.+))*";
@ -107,7 +93,7 @@ public class CatalogueAccessesHarvester extends BasicHarvester {
int catalogueResourceAccesses = 0;
logger.debug("Catalogue accesses for {} ", dashboardContext);
for(CatalogueAccessesReportRow row : catalogueAccesses.get(dashboardContext)) {
// String pagePath = row.getPagePath();
// String pagePath = row.getPagePath();
switch (row.getKey()) {
case CATALOGUE_ACCESSES:
catalogueTotalAccesses += row.getVisitNumber();
@ -126,23 +112,29 @@ public class CatalogueAccessesHarvester extends BasicHarvester {
}
}
ScopeDescriptor scopeDescriptor = new ScopeDescriptor();
ScopeBean scopeBean = new ScopeBean(dashboardContext);
scopeDescriptor.setId(dashboardContext);
scopeDescriptor.setName(scopeBean.name());
AccountingRecord ar1 = new AccountingRecord(scopeDescriptor, instant, getDimension(HarvestedDataKey.CATALOGUE_ACCESSES), (long) catalogueTotalAccesses);
AccountingRecord ar2 = new AccountingRecord(scopeDescriptor, instant, getDimension(HarvestedDataKey.CATALOGUE_DATASET_LIST_ACCESSES), (long) catalogueDatasetListAccesses);
AccountingRecord ar3 = new AccountingRecord(scopeDescriptor, instant, getDimension(HarvestedDataKey.CATALOGUE_DATASET_ACCESSES), (long) catalogueDatasetAccesses);
AccountingRecord ar4 = new AccountingRecord(scopeDescriptor, instant, getDimension(HarvestedDataKey.CATALOGUE_RESOURCE_ACCESSES), (long) catalogueResourceAccesses);
logger.debug("{} : {}", ar1.getDimension().getId(), ar1.getMeasure());
accountingRecords.add(ar1);
logger.debug("{} : {}", ar2.getDimension().getId(), ar2.getMeasure());
accountingRecords.add(ar2);
logger.debug("{} : {}", ar3.getDimension().getId(), ar3.getMeasure());
accountingRecords.add(ar3);
logger.debug("{} : {}", ar4.getDimension().getId(), ar4.getMeasure());
accountingRecords.add(ar4);
try {
ScopeBean scopeBean = new ScopeBean(dashboardContext);
scopeDescriptor.setId(dashboardContext);
scopeDescriptor.setName(scopeBean.name());
AccountingRecord ar1 = new AccountingRecord(scopeDescriptor, instant, getDimension(HarvestedDataKey.CATALOGUE_ACCESSES), (long) catalogueTotalAccesses);
AccountingRecord ar2 = new AccountingRecord(scopeDescriptor, instant, getDimension(HarvestedDataKey.CATALOGUE_DATASET_LIST_ACCESSES), (long) catalogueDatasetListAccesses);
AccountingRecord ar3 = new AccountingRecord(scopeDescriptor, instant, getDimension(HarvestedDataKey.CATALOGUE_DATASET_ACCESSES), (long) catalogueDatasetAccesses);
AccountingRecord ar4 = new AccountingRecord(scopeDescriptor, instant, getDimension(HarvestedDataKey.CATALOGUE_RESOURCE_ACCESSES), (long) catalogueResourceAccesses);
logger.debug("{} : {}", ar1.getDimension().getId(), ar1.getMeasure());
accountingRecords.add(ar1);
logger.debug("{} : {}", ar2.getDimension().getId(), ar2.getMeasure());
accountingRecords.add(ar2);
logger.debug("{} : {}", ar3.getDimension().getId(), ar3.getMeasure());
accountingRecords.add(ar3);
logger.debug("{} : {}", ar4.getDimension().getId(), ar4.getMeasure());
accountingRecords.add(ar4);
} catch (NullPointerException e) {
logger.warn("I found no correspondance in the Genereric Resource for a PropertyId you should check this, type: BigGAnalyticsMapping name: AccountingDashboardMapping");
e.printStackTrace();
}
}
logger.debug("Returning {} accountingRecords ", accountingRecords.size());
return accountingRecords;
@ -156,17 +148,24 @@ public class CatalogueAccessesHarvester extends BasicHarvester {
*
*/
private static HashMap<String, List<CatalogueAccessesReportRow>> getAllAccesses(Date start, Date end) throws Exception {
DateRange dateRange = getDateRangeForAnalytics(start, end);
logger.trace("Getting catalogue accesses in this time range {}", dateRange.toPrettyString());
Builder dateRangeBuilder = getDateRangeBuilderForAnalytics(start, end);
logger.debug("Getting Catalogue accesses in this time range {}", dateRangeBuilder.toString());
AnalyticsReportCredentials credentialsFromD4S = getAuthorisedApplicationInfoFromIs();
AnalyticsReporting service = initializeAnalyticsReporting(credentialsFromD4S);
HashMap<String,List<GetReportsResponse>> responses = getReportResponses(service, credentialsFromD4S.getViewIds(), dateRange);
logger.debug("Getting credentials credentialsFromD4S");
BetaAnalyticsDataSettings serviceSettings = initializeAnalyticsReporting(credentialsFromD4S);
logger.debug("initializeAnalyticsReporting service settings");
HashMap<String,List<RunReportResponse>> responses = getReportResponses(serviceSettings, credentialsFromD4S.getViewIds(), dateRangeBuilder);
HashMap<String, List<CatalogueAccessesReportRow>> toReturn = new HashMap<>();
for(String view : responses.keySet()) {
String dashboardContext = getAccountingDashboardContextGivenGAViewID(view);
logger.trace("Parsing responses for this catalogue view, which corresponds to Dashboard Context: " + dashboardContext);
logger.info("\n\n**************** Parsing responses for this catalogue view, which corresponds to Dashboard Context: " + dashboardContext);
List<CatalogueAccessesReportRow> viewReport = parseResponse(view, responses.get(view), dashboardContext);
logger.trace("Got {} entries from view id={}", viewReport.size(), view);
toReturn.put(dashboardContext, viewReport);
@ -175,175 +174,102 @@ public class CatalogueAccessesHarvester extends BasicHarvester {
}
/**
* Initializes an Analytics Reporting API V4 service object.
* Initializes an Google Analytics Data API service object.
*
* @return An authorized Analytics Reporting API V4 service object.
* @return An authorized Google Analytics Data API
* @throws IOException
* @throws GeneralSecurityException
*/
private static AnalyticsReporting initializeAnalyticsReporting(AnalyticsReportCredentials cred)
throws GeneralSecurityException, IOException {
HttpTransport httpTransport = GoogleNetHttpTransport.newTrustedTransport();
GoogleCredential credential = fromD4SServiceEndpoint(cred).createScoped(AnalyticsReportingScopes.all());
// Construct the Analytics Reporting service object.
return new AnalyticsReporting.Builder(httpTransport, JSON_FACTORY, credential)
.setApplicationName(APPLICATION_NAME).build();
private static BetaAnalyticsDataSettings initializeAnalyticsReporting(AnalyticsReportCredentials cred) throws IOException {
return BetaAnalyticsDataSettings.newBuilder()
.setCredentialsProvider(FixedCredentialsProvider.create(
ServiceAccountCredentials.fromPkcs8(cred.getClientId(), cred.getClientEmail(), cred.getPrivateKeyPem(), cred.getPrivateKeyId(), null)))
.build();
}
/**
* Queries the Analytics Reporting API V4.
* Queries Analytics Data API service
*
* @param service An authorized Analytics Reporting API V4 service object.
* @return GetReportResponse The Analytics Reporting API V4 response.
* @param service Analytics Data API service service settings.
* @return Row Analytics Data API service
* @throws IOException
*/
private static HashMap<String,List<GetReportsResponse>> getReportResponses(AnalyticsReporting service,
List<String> viewIDs, DateRange dateRange) throws IOException {
private static HashMap<String,List<RunReportResponse>> getReportResponses(BetaAnalyticsDataSettings betaAnalyticsDataSettings,
List<String> viewIDs, Builder dateRangeBuilder) throws IOException {
HashMap<String,List<GetReportsResponse>> reports = new HashMap<>();
HashMap<String,List<RunReportResponse>> reports = new HashMap<>();
// Create the Metrics object.
Metric sessions = new Metric().setExpression("ga:pageviews").setAlias("pages");
com.google.api.services.analyticsreporting.v4.model.Dimension pageTitle = new com.google.api.services.analyticsreporting.v4.model.Dimension().setName("ga:pagePath");
try (BetaAnalyticsDataClient analyticsData = BetaAnalyticsDataClient.create(betaAnalyticsDataSettings)) {
for(String view : viewIDs) {
List<GetReportsResponse> gReportResponses = new ArrayList<>();
logger.info("Getting data from Google Analytics for catalogue viewid: " + view);
boolean iterateMorePages = true;
String nextPageToken = null;
while (iterateMorePages) {
// Create the ReportRequest object.
ReportRequest request = new ReportRequest().setViewId(view.trim()).setDateRanges(Arrays.asList(dateRange))
.setMetrics(Arrays.asList(sessions)).setDimensions(Arrays.asList(pageTitle));
request.setPageSize(1000);
request.setPageToken(nextPageToken);
ArrayList<ReportRequest> requests = new ArrayList<ReportRequest>();
requests.add(request);
// Create the GetReportsRequest object.
GetReportsRequest getReport = new GetReportsRequest().setReportRequests(requests);
// Call the batchGet method.
GetReportsResponse response = service.reports().batchGet(getReport).execute();
nextPageToken = response.getReports().get(0).getNextPageToken();
iterateMorePages = (nextPageToken != null);
logger.debug("got nextPageToken: "+nextPageToken);
for(String propertyId : viewIDs) {
List<RunReportResponse> gReportResponses = new ArrayList<>();
logger.debug("Getting data from Analytics Data API for propertyId: " + propertyId);
RunReportRequest request =
RunReportRequest.newBuilder()
.setProperty("properties/" + propertyId)
.addDimensions(Dimension.newBuilder().setName("pagePath"))
.addMetrics(Metric.newBuilder().setName("screenPageViews"))
.addDateRanges(dateRangeBuilder)
.build();
// Make the request.
RunReportResponse response = analyticsData.runReport(request);
gReportResponses.add(response);
// Iterate through every row of the API response.
// for (Row row : response.getRowsList()) {
// System.out.printf(
// "%s, %s%n", row.getDimensionValues(0).getValue(), row.getMetricValues(0).getValue());
// }
reports.put(propertyId, gReportResponses);
}
reports.put(view, gReportResponses);
}
// Return the response.
return reports;
}
/**
* Parses and prints the Analytics Reporting API V4 response.
* @param dashboardContext
* Parses and prints the Analytics Data API service respose
*
* @param response An Analytics Reporting API V4 response.
* @param dashboardContext
*/
private static List<CatalogueAccessesReportRow> parseResponse(String viewId, List<GetReportsResponse> responses, String dashboardContext) {
private static List<CatalogueAccessesReportRow> parseResponse(String viewId, List<RunReportResponse> responses, String dashboardContext) {
logger.debug("parsing Response for " + viewId);
List<CatalogueAccessesReportRow> toReturn = new ArrayList<>();
for (GetReportsResponse response : responses) {
for (Report report: response.getReports()) {
List<ReportRow> rows = report.getData().getRows();
if (rows == null) {
logger.warn("No data found for " + viewId);
for (RunReportResponse response : responses) {
for (Row row: response.getRowsList()) {
String dimension = row.getDimensionValues(0).getValue();
String metric = row.getMetricValues(0).getValue();
CatalogueAccessesReportRow var = new CatalogueAccessesReportRow();
boolean validEntry = false;
String pagePath = dimension;
logger.trace("parsing pagepath {}: value: {}", pagePath, Integer.parseInt(metric));
if (pagePath.matches(REGEX_CATALOGUE_RESOURCE_ACCESSES)) {
var.setKey(HarvestedDataKey.CATALOGUE_RESOURCE_ACCESSES);
validEntry = true;
}
else {
for (ReportRow row: rows) {
String dimension = row.getDimensions().get(0);
DateRangeValues metric = row.getMetrics().get(0);
CatalogueAccessesReportRow var = new CatalogueAccessesReportRow();
boolean validEntry = false;
String pagePath = dimension;
logger.trace("parsing pagepath {}: value: {}", pagePath, Integer.parseInt(metric.getValues().get(0)));
if (pagePath.matches(REGEX_CATALOGUE_RESOURCE_ACCESSES)) {
var.setKey(HarvestedDataKey.CATALOGUE_RESOURCE_ACCESSES);
validEntry = true;
}
else if (pagePath.matches(REGEX_CATALOGUE_DATASET_ACCESSES)) {
var.setKey(HarvestedDataKey.CATALOGUE_DATASET_ACCESSES);
validEntry = true;
}
else if (pagePath.matches(REGEX_CATALOGUE_DATASET_LIST_ACCESSES)) {
var.setKey(HarvestedDataKey.CATALOGUE_DATASET_LIST_ACCESSES);
validEntry = true;
}
else if (pagePath.matches(REGEX_CATALOGUE_ACCESSES)) {
var.setKey(HarvestedDataKey.CATALOGUE_ACCESSES);
validEntry = true;
}
if (validEntry) {
var.setDashboardContext(dashboardContext);
var.setPagePath(dimension);
var.setVisitNumber(Integer.parseInt(metric.getValues().get(0)));
toReturn.add(var);
}
}
else if (pagePath.matches(REGEX_CATALOGUE_DATASET_ACCESSES)) {
var.setKey(HarvestedDataKey.CATALOGUE_DATASET_ACCESSES);
validEntry = true;
}
else if (pagePath.matches(REGEX_CATALOGUE_DATASET_LIST_ACCESSES)) {
var.setKey(HarvestedDataKey.CATALOGUE_DATASET_LIST_ACCESSES);
validEntry = true;
}
else if (pagePath.matches(REGEX_CATALOGUE_ACCESSES)) {
var.setKey(HarvestedDataKey.CATALOGUE_ACCESSES);
validEntry = true;
}
if (validEntry) {
var.setDashboardContext(dashboardContext);
var.setPagePath(dimension);
var.setVisitNumber(Integer.parseInt(metric));
toReturn.add(var);
}
}
}
return toReturn;
}
private static GoogleCredential fromD4SServiceEndpoint(AnalyticsReportCredentials cred) throws IOException {
String clientId = cred.getClientId();
String clientEmail = cred.getClientEmail();
String privateKeyPem = cred.getPrivateKeyPem();
String privateKeyId = cred.getPrivateKeyId();
String tokenUri = cred.getTokenUri();
String projectId = cred.getProjectId();
if(clientId == null || clientEmail == null || privateKeyPem == null || privateKeyId == null) {
throw new IOException("Error reading service account credential from stream, "
+ "expecting 'client_id', 'client_email', 'private_key' and 'private_key_id'.");
}
PrivateKey privateKey = privateKeyFromPkcs8(privateKeyPem);
Collection<String> emptyScopes = Collections.emptyList();
Builder credentialBuilder = new GoogleCredential.Builder().setTransport(Utils.getDefaultTransport())
.setJsonFactory(Utils.getDefaultJsonFactory()).setServiceAccountId(clientEmail)
.setServiceAccountScopes(emptyScopes).setServiceAccountPrivateKey(privateKey)
.setServiceAccountPrivateKeyId(privateKeyId);
if(tokenUri != null) {
credentialBuilder.setTokenServerEncodedUrl(tokenUri);
}
if(projectId != null) {
credentialBuilder.setServiceAccountProjectId(projectId);
}
// Don't do a refresh at this point, as it will always fail before the scopes are added.
return credentialBuilder.build();
}
private static PrivateKey privateKeyFromPkcs8(String privateKeyPem) throws IOException {
Reader reader = new StringReader(privateKeyPem);
Section section = PemReader.readFirstSectionAndClose(reader, "PRIVATE KEY");
if(section == null) {
throw new IOException("Invalid PKCS8 data.");
}
byte[] bytes = section.getBase64DecodedBytes();
PKCS8EncodedKeySpec keySpec = new PKCS8EncodedKeySpec(bytes);
Exception unexpectedException = null;
try {
KeyFactory keyFactory = SecurityUtils.getRsaKeyFactory();
PrivateKey privateKey = keyFactory.generatePrivate(keySpec);
return privateKey;
} catch(NoSuchAlgorithmException exception) {
unexpectedException = exception;
} catch(InvalidKeySpecException exception) {
unexpectedException = exception;
}
throw new IOException("Unexpected exception reading PKCS data", unexpectedException);
}
private static List<ServiceEndpoint> getAnalyticsReportingConfigurationFromIS(String infrastructureScope)
throws Exception {
@ -405,7 +331,6 @@ public class CatalogueAccessesHarvester extends BasicHarvester {
}
/**
* l
* @throws Exception
*/
private static AnalyticsReportCredentials getAuthorisedApplicationInfoFromIs() throws Exception {
@ -423,13 +348,13 @@ public class CatalogueAccessesHarvester extends BasicHarvester {
} else {
for(ServiceEndpoint res : list) {
reportCredentials.setTokenUri(res.profile().runtime().hostedOn());
Group<AccessPoint> apGroup = res.profile().accessPoints();
AccessPoint[] accessPoints = (AccessPoint[]) apGroup.toArray(new AccessPoint[apGroup.size()]);
AccessPoint found = accessPoints[0];
reportCredentials.setClientEmail(found.address());
reportCredentials.setProjectId(found.username());
reportCredentials.setPrivateKeyPem(StringEncrypter.getEncrypter().decrypt(found.password()));
reportCredentials.setClientEmail(found.username());
String decryptedPrivateKey = StringEncrypter.getEncrypter().decrypt(found.password());
reportCredentials.setPrivateKeyPem(decryptedPrivateKey.trim());
for(Property prop : found.properties()) {
if(prop.name().compareTo(AP_CATALOGUE_PAGEVIEWS_PROPERTY) == 0) {
String decryptedValue = StringEncrypter.getEncrypter().decrypt(prop.value());
@ -458,14 +383,13 @@ public class CatalogueAccessesHarvester extends BasicHarvester {
return Instant.ofEpochMilli(date.getTime()).atZone(ZoneId.systemDefault()).toLocalDate();
}
private static DateRange getDateRangeForAnalytics(Date start, Date end) {
private static Builder getDateRangeBuilderForAnalytics(Date start, Date end) {
DateTimeFormatter formatter = DateTimeFormatter.ofPattern("yyyy-MM-dd"); //required by Analytics
String startDate = asLocalDate(start).format(formatter);
String endDate = asLocalDate(end).format(formatter);
DateRange dateRange = new DateRange();// date format `yyyy-MM-dd`
dateRange.setStartDate(startDate);
dateRange.setEndDate(endDate);
return dateRange;
Builder dateRangeBuilder = DateRange.newBuilder().setStartDate(startDate).setEndDate(endDate);
return dateRangeBuilder;
}
}

View File

@ -4,22 +4,14 @@ import static org.gcube.resources.discovery.icclient.ICFactory.clientFor;
import static org.gcube.resources.discovery.icclient.ICFactory.queryFor;
import java.io.IOException;
import java.io.Reader;
import java.io.StringReader;
import java.security.GeneralSecurityException;
import java.security.KeyFactory;
import java.security.NoSuchAlgorithmException;
import java.security.PrivateKey;
import java.security.spec.InvalidKeySpecException;
import java.security.spec.PKCS8EncodedKeySpec;
import java.time.Instant;
import java.time.LocalDate;
import java.time.ZoneId;
import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
@ -49,26 +41,17 @@ import org.slf4j.LoggerFactory;
import org.w3c.dom.Node;
import org.xml.sax.InputSource;
import com.google.api.client.googleapis.auth.oauth2.GoogleCredential;
import com.google.api.client.googleapis.auth.oauth2.GoogleCredential.Builder;
import com.google.api.client.googleapis.javanet.GoogleNetHttpTransport;
import com.google.api.client.googleapis.util.Utils;
import com.google.api.client.http.HttpTransport;
import com.google.api.client.json.JsonFactory;
import com.google.api.client.json.gson.GsonFactory;
import com.google.api.client.util.PemReader;
import com.google.api.client.util.PemReader.Section;
import com.google.api.client.util.SecurityUtils;
import com.google.api.services.analyticsreporting.v4.AnalyticsReporting;
import com.google.api.services.analyticsreporting.v4.AnalyticsReportingScopes;
import com.google.api.services.analyticsreporting.v4.model.DateRange;
import com.google.api.services.analyticsreporting.v4.model.DateRangeValues;
import com.google.api.services.analyticsreporting.v4.model.GetReportsRequest;
import com.google.api.services.analyticsreporting.v4.model.GetReportsResponse;
import com.google.api.services.analyticsreporting.v4.model.Metric;
import com.google.api.services.analyticsreporting.v4.model.Report;
import com.google.api.services.analyticsreporting.v4.model.ReportRequest;
import com.google.api.services.analyticsreporting.v4.model.ReportRow;
import com.google.analytics.data.v1beta.BetaAnalyticsDataClient;
import com.google.analytics.data.v1beta.BetaAnalyticsDataSettings;
import com.google.analytics.data.v1beta.DateRange;
import com.google.analytics.data.v1beta.DateRange.Builder;
import com.google.analytics.data.v1beta.Dimension;
import com.google.analytics.data.v1beta.Metric;
import com.google.analytics.data.v1beta.Row;
import com.google.analytics.data.v1beta.RunReportRequest;
import com.google.analytics.data.v1beta.RunReportResponse;
import com.google.api.gax.core.FixedCredentialsProvider;
import com.google.auth.oauth2.ServiceAccountCredentials;
/**
* @author Massimiliano Assante (ISTI - CNR)
@ -77,16 +60,12 @@ public class CoreServicesAccessesHarvester extends BasicHarvester {
private static Logger logger = LoggerFactory.getLogger(CoreServicesAccessesHarvester.class);
private static final JsonFactory JSON_FACTORY = GsonFactory.getDefaultInstance();
private static final String MAPPING_RESOURCE_CATEGORY = "BigGAnalyticsMapping";
private static final String SERVICE_ENDPOINT_CATEGORY = "OnlineService";
private static final String SERVICE_ENDPOINT_NAME = "BigGAnalyticsReportService";
private static final String SERVICE_ENDPOINT_NAME = "GA4AnalyticsDataService";
private static final String AP_VIEWS_PROPERTY = "views";
private static final String AP_CLIENT_PROPERTY = "clientId";
private static final String AP_PRIVATEKEY_PROPERTY = "privateKeyId";
private static final String APPLICATION_NAME = "Analytics Reporting";
private static final String AP_CLIENT_ID = "client_id";
private static final String AP_PRIVATEKEY_ID_PROPERTY = "private_key_id";
private static final String PAGE_WORKSPACE_ACCESSES = "/workspace";
private static final String PAGE_MESSAGES_ACCESSES = "/messages";
@ -160,18 +139,18 @@ public class CoreServicesAccessesHarvester extends BasicHarvester {
*
*/
private static HashMap<String, List<CoreServiceAccessesReportRow>> getAllAccesses(Date start, Date end) throws Exception {
DateRange dateRange = getDateRangeForAnalytics(start, end);
logger.trace("Getting core services accesses in this time range {}", dateRange.toPrettyString());
Builder dateRangeBuilder = getDateRangeBuilderForAnalytics(start, end);
logger.trace("Getting core services accesses in this time range {}", dateRangeBuilder.toString());
AnalyticsReportCredentials credentialsFromD4S = getAuthorisedApplicationInfoFromIs();
logger.trace("gotten credentialsFromD4S id = {}", credentialsFromD4S.getClientId());
AnalyticsReporting service = initializeAnalyticsReporting(credentialsFromD4S);
BetaAnalyticsDataSettings serviceSettings = initializeAnalyticsReporting(credentialsFromD4S);
logger.trace("gotten credentialsFromD4S viewIds= {}", credentialsFromD4S.getViewIds().toString());
HashMap<String,List<GetReportsResponse>> responses = getReportResponses(service, credentialsFromD4S.getViewIds(), dateRange);
HashMap<String,List<RunReportResponse>> responses = getReportResponses(serviceSettings, credentialsFromD4S.getViewIds(), dateRangeBuilder);
HashMap<String, List<CoreServiceAccessesReportRow>> toReturn = new HashMap<>();
int i = 1;
@ -191,181 +170,106 @@ public class CoreServicesAccessesHarvester extends BasicHarvester {
}
/**
* Initializes an Analytics Reporting API V4 service object.
* Initializes an Google Analytics Data API service object.
*
* @return An authorized Analytics Reporting API V4 service object.
* @return An authorized Google Analytics Data API
* @throws IOException
* @throws GeneralSecurityException
*/
private static AnalyticsReporting initializeAnalyticsReporting(AnalyticsReportCredentials cred)
throws GeneralSecurityException, IOException {
HttpTransport httpTransport = GoogleNetHttpTransport.newTrustedTransport();
GoogleCredential credential = fromD4SServiceEndpoint(cred).createScoped(AnalyticsReportingScopes.all());
// Construct the Analytics Reporting service object.
return new AnalyticsReporting.Builder(httpTransport, JSON_FACTORY, credential)
.setApplicationName(APPLICATION_NAME).build();
private static BetaAnalyticsDataSettings initializeAnalyticsReporting(AnalyticsReportCredentials cred) throws IOException {
return BetaAnalyticsDataSettings.newBuilder()
.setCredentialsProvider(FixedCredentialsProvider.create(
ServiceAccountCredentials.fromPkcs8(cred.getClientId(), cred.getClientEmail(), cred.getPrivateKeyPem(), cred.getPrivateKeyId(), null)))
.build();
}
/**
* Queries the Analytics Reporting API V4.
* Queries Analytics Data API service
*
* @param service An authorized Analytics Reporting API V4 service object.
* @return GetReportResponse The Analytics Reporting API V4 response.
* @param service Analytics Data API service service settings.
* @return Row Analytics Data API service
* @throws IOException
*/
private static HashMap<String,List<GetReportsResponse>> getReportResponses(AnalyticsReporting service,
List<String> viewIDs, DateRange dateRange) throws IOException {
private static HashMap<String,List<RunReportResponse>> getReportResponses(BetaAnalyticsDataSettings betaAnalyticsDataSettings,
List<String> viewIDs, Builder dateRangeBuilder) throws IOException {
HashMap<String,List<GetReportsResponse>> reports = new HashMap<>();
HashMap<String,List<RunReportResponse>> reports = new HashMap<>();
// Create the Metrics object.
Metric sessions = new Metric().setExpression("ga:pageviews").setAlias("pages");
com.google.api.services.analyticsreporting.v4.model.Dimension pageTitle = new com.google.api.services.analyticsreporting.v4.model.Dimension().setName("ga:pagePath");
try (BetaAnalyticsDataClient analyticsData = BetaAnalyticsDataClient.create(betaAnalyticsDataSettings)) {
for(String view : viewIDs) {
List<GetReportsResponse> gReportResponses = new ArrayList<>();
logger.info("Getting data from Google Analytics for gateway viewid: " + view);
boolean iterateMorePages = true;
String nextPageToken = null;
while (iterateMorePages) {
// Create the ReportRequest object.
ReportRequest request = new ReportRequest().setViewId(view.trim()).setDateRanges(Arrays.asList(dateRange))
.setMetrics(Arrays.asList(sessions)).setDimensions(Arrays.asList(pageTitle));
request.setPageSize(1000);
request.setPageToken(nextPageToken);
ArrayList<ReportRequest> requests = new ArrayList<ReportRequest>();
requests.add(request);
// Create the GetReportsRequest object.
GetReportsRequest getReport = new GetReportsRequest().setReportRequests(requests);
// Call the batchGet method.
GetReportsResponse response = service.reports().batchGet(getReport).execute();
nextPageToken = response.getReports().get(0).getNextPageToken();
iterateMorePages = (nextPageToken != null);
logger.debug("got nextPageToken: "+nextPageToken);
for(String propertyId : viewIDs) {
List<RunReportResponse> gReportResponses = new ArrayList<>();
logger.debug("Getting data from Analytics Data API for propertyId: " + propertyId);
RunReportRequest request =
RunReportRequest.newBuilder()
.setProperty("properties/" + propertyId)
.addDimensions(Dimension.newBuilder().setName("pagePath"))
.addMetrics(Metric.newBuilder().setName("screenPageViews"))
.addDateRanges(dateRangeBuilder)
.build();
// Make the request.
RunReportResponse response = analyticsData.runReport(request);
gReportResponses.add(response);
reports.put(propertyId, gReportResponses);
}
reports.put(view, gReportResponses);
}
// Return the response.
return reports;
}
/**
* Parses and prints the Analytics Reporting API V4 response.
* @param dashboardContext
* Parses and prints the Analytics Data API service respose
*
* @param response An Analytics Reporting API V4 response.
* @param response An Analytics Data API service response.
*/
private static List<CoreServiceAccessesReportRow> parseResponse(String viewId, List<GetReportsResponse> responses, String dashboardContext) {
logger.debug("parsing Response for " + viewId);
private static List<CoreServiceAccessesReportRow> parseResponse(String viewId, List<RunReportResponse> responses, String dashboardContext) {
logger.debug("parsing Response for propertyID=" + viewId);
List<CoreServiceAccessesReportRow> toReturn = new ArrayList<>();
for (GetReportsResponse response : responses) {
for (Report report: response.getReports()) {
List<ReportRow> rows = report.getData().getRows();
if (rows == null) {
logger.warn("No data found for " + viewId);
}
else {
for (ReportRow row: rows) {
String dimension = row.getDimensions().get(0);
DateRangeValues metric = row.getMetrics().get(0);
CoreServiceAccessesReportRow var = new CoreServiceAccessesReportRow();
boolean validEntry = false;
String pagePath = dimension;
logger.trace("parsing pagepath {}: value: {}", pagePath, Integer.parseInt(metric.getValues().get(0)));
for (RunReportResponse response : responses) {
for (Row row: response.getRowsList()) {
String dimension = row.getDimensionValues(0).getValue();
String metric = row.getMetricValues(0).getValue();
CoreServiceAccessesReportRow var = new CoreServiceAccessesReportRow();
boolean validEntry = false;
String pagePath = dimension;
logger.trace("parsing pagepath {}: value: {}", pagePath, Integer.parseInt(metric));
if (!pagePath.contains("_redirect=/group")) {
if ( pagePath.contains(PAGE_WORKSPACE_ACCESSES)) {
var.setKey(HarvestedDataKey.WORKSPACE_ACCESSES);
logger.trace("**matched "+pagePath);
validEntry = true;
}
else if ( pagePath.contains(PAGE_MESSAGES_ACCESSES)) {
var.setKey(HarvestedDataKey.MESSAGES_ACCESSES);
logger.trace("**matched "+pagePath);
validEntry = true;
}
else if ( pagePath.contains(PAGE_PROFILE_ACCESSES)) {
var.setKey(HarvestedDataKey.PROFILE_ACCESSES);
logger.trace("**matched "+pagePath);
validEntry = true;
}
else if ( pagePath.contains(PAGE_NOTIFICATION_ACCESSES)) {
var.setKey(HarvestedDataKey.NOTIFICATIONS_ACCESSES);
logger.trace("**matched "+pagePath);
validEntry = true;
}
}
if (validEntry) {
var.setDashboardContext(dashboardContext);
var.setPagePath(dimension);
var.setVisitNumber(Integer.parseInt(metric.getValues().get(0)));
toReturn.add(var);
}
if (!pagePath.contains("_redirect=/group")) {
if ( pagePath.contains(PAGE_WORKSPACE_ACCESSES)) {
var.setKey(HarvestedDataKey.WORKSPACE_ACCESSES);
logger.trace("**matched "+pagePath);
validEntry = true;
}
else if ( pagePath.contains(PAGE_MESSAGES_ACCESSES)) {
var.setKey(HarvestedDataKey.MESSAGES_ACCESSES);
logger.trace("**matched "+pagePath);
validEntry = true;
}
else if ( pagePath.contains(PAGE_PROFILE_ACCESSES)) {
var.setKey(HarvestedDataKey.PROFILE_ACCESSES);
logger.trace("**matched "+pagePath);
validEntry = true;
}
else if ( pagePath.contains(PAGE_NOTIFICATION_ACCESSES)) {
var.setKey(HarvestedDataKey.NOTIFICATIONS_ACCESSES);
logger.trace("**matched "+pagePath);
validEntry = true;
}
}
if (validEntry) {
var.setDashboardContext(dashboardContext);
var.setPagePath(dimension);
var.setVisitNumber(Integer.parseInt(metric));
toReturn.add(var);
}
}
}
return toReturn;
}
private static GoogleCredential fromD4SServiceEndpoint(AnalyticsReportCredentials cred) throws IOException {
String clientId = cred.getClientId();
String clientEmail = cred.getClientEmail();
String privateKeyPem = cred.getPrivateKeyPem();
String privateKeyId = cred.getPrivateKeyId();
String tokenUri = cred.getTokenUri();
String projectId = cred.getProjectId();
if(clientId == null || clientEmail == null || privateKeyPem == null || privateKeyId == null) {
throw new IOException("Error reading service account credential from stream, "
+ "expecting 'client_id', 'client_email', 'private_key' and 'private_key_id'.");
}
PrivateKey privateKey = privateKeyFromPkcs8(privateKeyPem);
Collection<String> emptyScopes = Collections.emptyList();
Builder credentialBuilder = new GoogleCredential.Builder().setTransport(Utils.getDefaultTransport())
.setJsonFactory(Utils.getDefaultJsonFactory()).setServiceAccountId(clientEmail)
.setServiceAccountScopes(emptyScopes).setServiceAccountPrivateKey(privateKey)
.setServiceAccountPrivateKeyId(privateKeyId);
if(tokenUri != null) {
credentialBuilder.setTokenServerEncodedUrl(tokenUri);
}
if(projectId != null) {
credentialBuilder.setServiceAccountProjectId(projectId);
}
// Don't do a refresh at this point, as it will always fail before the scopes are added.
return credentialBuilder.build();
}
private static PrivateKey privateKeyFromPkcs8(String privateKeyPem) throws IOException {
Reader reader = new StringReader(privateKeyPem);
Section section = PemReader.readFirstSectionAndClose(reader, "PRIVATE KEY");
if(section == null) {
throw new IOException("Invalid PKCS8 data.");
}
byte[] bytes = section.getBase64DecodedBytes();
PKCS8EncodedKeySpec keySpec = new PKCS8EncodedKeySpec(bytes);
Exception unexpectedException = null;
try {
KeyFactory keyFactory = SecurityUtils.getRsaKeyFactory();
PrivateKey privateKey = keyFactory.generatePrivate(keySpec);
return privateKey;
} catch(NoSuchAlgorithmException exception) {
unexpectedException = exception;
} catch(InvalidKeySpecException exception) {
unexpectedException = exception;
}
throw new IOException("Unexpected exception reading PKCS data", unexpectedException);
}
private static List<ServiceEndpoint> getAnalyticsReportingConfigurationFromIS(String infrastructureScope)
throws Exception {
@ -445,24 +349,24 @@ public class CoreServicesAccessesHarvester extends BasicHarvester {
} else {
for(ServiceEndpoint res : list) {
reportCredentials.setTokenUri(res.profile().runtime().hostedOn());
Group<AccessPoint> apGroup = res.profile().accessPoints();
AccessPoint[] accessPoints = (AccessPoint[]) apGroup.toArray(new AccessPoint[apGroup.size()]);
AccessPoint found = accessPoints[0];
reportCredentials.setClientEmail(found.address());
reportCredentials.setProjectId(found.username());
reportCredentials.setPrivateKeyPem(StringEncrypter.getEncrypter().decrypt(found.password()));
reportCredentials.setClientEmail(found.username());
String decryptedPrivateKey = StringEncrypter.getEncrypter().decrypt(found.password());
reportCredentials.setPrivateKeyPem(decryptedPrivateKey.trim());
for(Property prop : found.properties()) {
if(prop.name().compareTo(AP_VIEWS_PROPERTY) == 0) {
String decryptedValue = StringEncrypter.getEncrypter().decrypt(prop.value());
String[] views = decryptedValue.split(";");
reportCredentials.setViewIds(Arrays.asList(views));
}
if(prop.name().compareTo(AP_CLIENT_PROPERTY) == 0) {
if(prop.name().compareTo(AP_CLIENT_ID) == 0) {
String decryptedValue = StringEncrypter.getEncrypter().decrypt(prop.value());
reportCredentials.setClientId(decryptedValue);
}
if(prop.name().compareTo(AP_PRIVATEKEY_PROPERTY) == 0) {
if(prop.name().compareTo(AP_PRIVATEKEY_ID_PROPERTY) == 0) {
String decryptedValue = StringEncrypter.getEncrypter().decrypt(prop.value());
reportCredentials.setPrivateKeyId(decryptedValue);
}
@ -476,18 +380,18 @@ public class CoreServicesAccessesHarvester extends BasicHarvester {
return reportCredentials;
}
private static LocalDate asLocalDate(Date date) {
return Instant.ofEpochMilli(date.getTime()).atZone(ZoneId.systemDefault()).toLocalDate();
}
private static DateRange getDateRangeForAnalytics(Date start, Date end) {
private static Builder getDateRangeBuilderForAnalytics(Date start, Date end) {
DateTimeFormatter formatter = DateTimeFormatter.ofPattern("yyyy-MM-dd"); //required by Analytics
String startDate = asLocalDate(start).format(formatter);
String endDate = asLocalDate(end).format(formatter);
DateRange dateRange = new DateRange();// date format `yyyy-MM-dd`
dateRange.setStartDate(startDate);
dateRange.setEndDate(endDate);
return dateRange;
Builder dateRangeBuilder = DateRange.newBuilder().setStartDate(startDate).setEndDate(endDate);
return dateRangeBuilder;
}
}

View File

@ -4,22 +4,13 @@ import static org.gcube.resources.discovery.icclient.ICFactory.clientFor;
import static org.gcube.resources.discovery.icclient.ICFactory.queryFor;
import java.io.IOException;
import java.io.Reader;
import java.io.StringReader;
import java.security.GeneralSecurityException;
import java.security.KeyFactory;
import java.security.NoSuchAlgorithmException;
import java.security.PrivateKey;
import java.security.spec.InvalidKeySpecException;
import java.security.spec.PKCS8EncodedKeySpec;
import java.time.Instant;
import java.time.LocalDate;
import java.time.ZoneId;
import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
@ -42,26 +33,19 @@ import org.gcube.resources.discovery.client.queries.api.SimpleQuery;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.api.client.googleapis.auth.oauth2.GoogleCredential;
import com.google.api.client.googleapis.auth.oauth2.GoogleCredential.Builder;
import com.google.api.client.googleapis.javanet.GoogleNetHttpTransport;
import com.google.api.client.googleapis.util.Utils;
import com.google.api.client.http.HttpTransport;
import com.google.analytics.data.v1beta.BetaAnalyticsDataClient;
import com.google.analytics.data.v1beta.BetaAnalyticsDataSettings;
import com.google.analytics.data.v1beta.DateRange;
import com.google.analytics.data.v1beta.DateRange.Builder;
import com.google.analytics.data.v1beta.Dimension;
import com.google.analytics.data.v1beta.Metric;
import com.google.analytics.data.v1beta.Row;
import com.google.analytics.data.v1beta.RunReportRequest;
import com.google.analytics.data.v1beta.RunReportResponse;
import com.google.api.client.json.JsonFactory;
import com.google.api.client.json.gson.GsonFactory;
import com.google.api.client.util.PemReader;
import com.google.api.client.util.PemReader.Section;
import com.google.api.client.util.SecurityUtils;
import com.google.api.services.analyticsreporting.v4.AnalyticsReporting;
import com.google.api.services.analyticsreporting.v4.AnalyticsReportingScopes;
import com.google.api.services.analyticsreporting.v4.model.DateRange;
import com.google.api.services.analyticsreporting.v4.model.DateRangeValues;
import com.google.api.services.analyticsreporting.v4.model.GetReportsRequest;
import com.google.api.services.analyticsreporting.v4.model.GetReportsResponse;
import com.google.api.services.analyticsreporting.v4.model.Metric;
import com.google.api.services.analyticsreporting.v4.model.Report;
import com.google.api.services.analyticsreporting.v4.model.ReportRequest;
import com.google.api.services.analyticsreporting.v4.model.ReportRow;
import com.google.api.gax.core.FixedCredentialsProvider;
import com.google.auth.oauth2.ServiceAccountCredentials;
/**
*
@ -75,11 +59,10 @@ public class JupyterAccessesHarvester extends BasicHarvester {
private static final JsonFactory JSON_FACTORY = GsonFactory.getDefaultInstance();
private static final String SERVICE_ENDPOINT_CATEGORY = "OnlineService";
private static final String SERVICE_ENDPOINT_NAME = "BigGAnalyticsReportService";
private static final String SERVICE_ENDPOINT_NAME = "GA4AnalyticsDataService";
private static final String AP_VIEWS_PROPERTY = "views";
private static final String AP_CLIENT_PROPERTY = "clientId";
private static final String AP_PRIVATEKEY_PROPERTY = "privateKeyId";
private static final String APPLICATION_NAME = "Analytics Reporting";
private static final String AP_CLIENT_ID = "client_id";
private static final String AP_PRIVATEKEY_ID_PROPERTY = "private_key_id";
private List<VREAccessesReportRow> vreAccesses;
@ -115,7 +98,7 @@ public class JupyterAccessesHarvester extends BasicHarvester {
}
}
ScopeDescriptor scopeDescriptor = AccountingDashboardHarvesterPlugin.getScopeDescriptor();
ScopeDescriptor scopeDescriptor = AccountingDashboardHarvesterPlugin.getScopeDescriptor(context);
if (measure > 0) {
AccountingRecord ar = new AccountingRecord(scopeDescriptor, instant,
@ -143,188 +126,111 @@ public class JupyterAccessesHarvester extends BasicHarvester {
* visitNumber=39]
*/
private static List<VREAccessesReportRow> getAllAccesses(Date start, Date end) throws Exception {
DateRange dateRange = getDateRangeForAnalytics(start, end);
logger.trace("Getting accesses in this time range {}", dateRange.toPrettyString());
Builder dateRangeBuilder = getDateRangeBuilderForAnalytics(start, end);
logger.debug("Getting accesses in this time range {}", dateRangeBuilder.toString());
AnalyticsReportCredentials credentialsFromD4S = getAuthorisedApplicationInfoFromIs();
AnalyticsReporting service = initializeAnalyticsReporting(credentialsFromD4S);
HashMap<String, List<GetReportsResponse>> responses = getReportResponses(service,
credentialsFromD4S.getViewIds(), dateRange);
logger.debug("Getting credentials credentialsFromD4S");
BetaAnalyticsDataSettings serviceSettings = initializeAnalyticsReporting(credentialsFromD4S);
logger.debug("initializeAnalyticsReporting service settings");
HashMap<String,List<RunReportResponse>> responses = getReportResponses(serviceSettings, credentialsFromD4S.getViewIds(), dateRangeBuilder);
List<VREAccessesReportRow> totalAccesses = new ArrayList<>();
for (String view : responses.keySet()) {
for(String view : responses.keySet()) {
List<VREAccessesReportRow> viewReport = parseResponse(view, responses.get(view));
logger.trace("Got {} entries from view id={}", viewReport.size(), view);
logger.debug("Got {} entries from view id={}", viewReport.size(), view);
totalAccesses.addAll(viewReport);
}
logger.trace("Merged in {} total entries from all views", totalAccesses.size());
logger.debug("Merged in {} total entries from all views", totalAccesses.size());
return totalAccesses;
}
/**
* Initializes an Analytics Reporting API V4 service object.
* Initializes an Google Analytics Data API service object.
*
* @return An authorized Analytics Reporting API V4 service object.
* @return An authorized Google Analytics Data API
* @throws IOException
* @throws GeneralSecurityException
*/
private static AnalyticsReporting initializeAnalyticsReporting(AnalyticsReportCredentials cred)
throws GeneralSecurityException, IOException {
HttpTransport httpTransport = GoogleNetHttpTransport.newTrustedTransport();
GoogleCredential credential = fromD4SServiceEndpoint(cred).createScoped(AnalyticsReportingScopes.all());
// Construct the Analytics Reporting service object.
return new AnalyticsReporting.Builder(httpTransport, JSON_FACTORY, credential)
.setApplicationName(APPLICATION_NAME).build();
private static BetaAnalyticsDataSettings initializeAnalyticsReporting(AnalyticsReportCredentials cred) throws IOException {
return BetaAnalyticsDataSettings.newBuilder()
.setCredentialsProvider(FixedCredentialsProvider.create(
ServiceAccountCredentials.fromPkcs8(cred.getClientId(), cred.getClientEmail(), cred.getPrivateKeyPem(), cred.getPrivateKeyId(), null)))
.build();
}
/**
* Queries the Analytics Reporting API V4.
* Queries Analytics Data API service
*
* @param service
* An authorized Analytics Reporting API V4 service object.
* @return GetReportResponse The Analytics Reporting API V4 response.
* @param service Analytics Data API service service settings.
* @return Row Analytics Data API service
* @throws IOException
*/
private static HashMap<String, List<GetReportsResponse>> getReportResponses(AnalyticsReporting service,
List<String> viewIDs, DateRange dateRange) throws IOException {
private static HashMap<String,List<RunReportResponse>> getReportResponses(BetaAnalyticsDataSettings betaAnalyticsDataSettings,
List<String> viewIDs, Builder dateRangeBuilder) throws IOException {
HashMap<String, List<GetReportsResponse>> reports = new HashMap<>();
HashMap<String,List<RunReportResponse>> reports = new HashMap<>();
// Create the Metrics object.
Metric sessions = new Metric().setExpression("ga:pageviews").setAlias("pages");
com.google.api.services.analyticsreporting.v4.model.Dimension pageTitle = new com.google.api.services.analyticsreporting.v4.model.Dimension()
.setName("ga:pagePath");
try (BetaAnalyticsDataClient analyticsData = BetaAnalyticsDataClient.create(betaAnalyticsDataSettings)) {
for (String view : viewIDs) {
List<GetReportsResponse> gReportResponses = new ArrayList<>();
logger.info("Getting data from Google Analytics for viewid: " + view);
boolean iterateMorePages = true;
String nextPageToken = null;
while (iterateMorePages) {
// Create the ReportRequest object.
ReportRequest request = new ReportRequest().setViewId(view.trim())
.setDateRanges(Arrays.asList(dateRange)).setMetrics(Arrays.asList(sessions))
.setDimensions(Arrays.asList(pageTitle));
request.setPageSize(1000);
request.setPageToken(nextPageToken);
ArrayList<ReportRequest> requests = new ArrayList<ReportRequest>();
requests.add(request);
// Create the GetReportsRequest object.
GetReportsRequest getReport = new GetReportsRequest().setReportRequests(requests);
// Call the batchGet method.
GetReportsResponse response = service.reports().batchGet(getReport).execute();
nextPageToken = response.getReports().get(0).getNextPageToken();
iterateMorePages = (nextPageToken != null);
logger.debug("got nextPageToken: " + nextPageToken);
for(String propertyId : viewIDs) {
List<RunReportResponse> gReportResponses = new ArrayList<>();
logger.debug("Getting data from Analytics Data API for propertyId: " + propertyId);
RunReportRequest request =
RunReportRequest.newBuilder()
.setProperty("properties/" + propertyId)
.addDimensions(Dimension.newBuilder().setName("pagePath"))
.addMetrics(Metric.newBuilder().setName("screenPageViews"))
.addDateRanges(dateRangeBuilder)
.build();
// Make the request.
RunReportResponse response = analyticsData.runReport(request);
gReportResponses.add(response);
reports.put(propertyId, gReportResponses);
}
reports.put(view, gReportResponses);
}
// Return the response.
return reports;
}
/**
* Parses and prints the Analytics Reporting API V4 response.
* Parses and prints the Analytics Data API service respose
*
* @param response
* An Analytics Reporting API V4 response.
* @param response An Analytics Data API service response.
*/
/**
* Parses and prints the Analytics Reporting API V4 response.
*
* @param response
* An Analytics Reporting API V4 response.
*/
private static List<VREAccessesReportRow> parseResponse(String viewId, List<GetReportsResponse> responses) {
logger.debug("parsing Response for " + viewId);
private static List<VREAccessesReportRow> parseResponse(String viewId, List<RunReportResponse> responses) {
logger.debug("parsing Response for propertyID=" + viewId);
List<VREAccessesReportRow> toReturn = new ArrayList<>();
for (GetReportsResponse response : responses) {
for (Report report : response.getReports()) {
List<ReportRow> rows = report.getData().getRows();
if (rows == null) {
logger.warn("No data found for " + viewId);
} else {
for (ReportRow row : rows) {
String dimension = row.getDimensions().get(0);
DateRangeValues metric = row.getMetrics().get(0);
VREAccessesReportRow var = new VREAccessesReportRow();
boolean validEntry = false;
String pagePath = dimension;
if (pagePath.startsWith("/group") || pagePath.startsWith("/web")) {
var.setPagePath(dimension);
validEntry = true;
}
if (validEntry) {
var.setVisitNumber(Integer.parseInt(metric.getValues().get(0)));
toReturn.add(var);
}
}
for (RunReportResponse response : responses) {
for (Row row: response.getRowsList()) {
String dimension = row.getDimensionValues(0).getValue();
String metric = row.getMetricValues(0).getValue();
VREAccessesReportRow var = new VREAccessesReportRow();
boolean validEntry = false;
String pagePath = dimension;
if (pagePath.startsWith("/group") || pagePath.startsWith("/web")) {
var.setPagePath(dimension);
validEntry = true;
}
if (validEntry) {
var.setVisitNumber(Integer.parseInt(metric));
toReturn.add(var);
}
}
}
return toReturn;
}
private static GoogleCredential fromD4SServiceEndpoint(AnalyticsReportCredentials cred) throws IOException {
String clientId = cred.getClientId();
String clientEmail = cred.getClientEmail();
String privateKeyPem = cred.getPrivateKeyPem();
String privateKeyId = cred.getPrivateKeyId();
String tokenUri = cred.getTokenUri();
String projectId = cred.getProjectId();
if (clientId == null || clientEmail == null || privateKeyPem == null || privateKeyId == null) {
throw new IOException("Error reading service account credential from stream, "
+ "expecting 'client_id', 'client_email', 'private_key' and 'private_key_id'.");
}
PrivateKey privateKey = privateKeyFromPkcs8(privateKeyPem);
Collection<String> emptyScopes = Collections.emptyList();
Builder credentialBuilder = new GoogleCredential.Builder().setTransport(Utils.getDefaultTransport())
.setJsonFactory(Utils.getDefaultJsonFactory()).setServiceAccountId(clientEmail)
.setServiceAccountScopes(emptyScopes).setServiceAccountPrivateKey(privateKey)
.setServiceAccountPrivateKeyId(privateKeyId);
if (tokenUri != null) {
credentialBuilder.setTokenServerEncodedUrl(tokenUri);
}
if (projectId != null) {
credentialBuilder.setServiceAccountProjectId(projectId);
}
// Don't do a refresh at this point, as it will always fail before the
// scopes are added.
return credentialBuilder.build();
}
private static PrivateKey privateKeyFromPkcs8(String privateKeyPem) throws IOException {
Reader reader = new StringReader(privateKeyPem);
Section section = PemReader.readFirstSectionAndClose(reader, "PRIVATE KEY");
if (section == null) {
throw new IOException("Invalid PKCS8 data.");
}
byte[] bytes = section.getBase64DecodedBytes();
PKCS8EncodedKeySpec keySpec = new PKCS8EncodedKeySpec(bytes);
Exception unexpectedException = null;
try {
KeyFactory keyFactory = SecurityUtils.getRsaKeyFactory();
PrivateKey privateKey = keyFactory.generatePrivate(keySpec);
return privateKey;
} catch (NoSuchAlgorithmException exception) {
unexpectedException = exception;
} catch (InvalidKeySpecException exception) {
unexpectedException = exception;
}
throw new IOException("Unexpected exception reading PKCS data", unexpectedException);
}
private static List<ServiceEndpoint> getAnalyticsReportingConfigurationFromIS(String infrastructureScope)
throws Exception {
String scope = infrastructureScope;
@ -341,8 +247,7 @@ public class JupyterAccessesHarvester extends BasicHarvester {
/**
* l
*
* @throws Exception
* @throws Exception
*/
private static AnalyticsReportCredentials getAuthorisedApplicationInfoFromIs() throws Exception {
AnalyticsReportCredentials reportCredentials = new AnalyticsReportCredentials();
@ -350,60 +255,58 @@ public class JupyterAccessesHarvester extends BasicHarvester {
String context = org.gcube.dataharvest.utils.Utils.getCurrentContext();
try {
List<ServiceEndpoint> list = getAnalyticsReportingConfigurationFromIS(context);
if (list.size() > 1) {
if(list.size() > 1) {
logger.error("Too many Service Endpoints having name " + SERVICE_ENDPOINT_NAME
+ " in this scope having Category " + SERVICE_ENDPOINT_CATEGORY);
} else if (list.size() == 0) {
} else if(list.size() == 0) {
logger.warn("There is no Service Endpoint having name " + SERVICE_ENDPOINT_NAME + " and Category "
+ SERVICE_ENDPOINT_CATEGORY + " in this context: " + context);
} else {
for (ServiceEndpoint res : list) {
reportCredentials.setTokenUri(res.profile().runtime().hostedOn());
for(ServiceEndpoint res : list) {
Group<AccessPoint> apGroup = res.profile().accessPoints();
AccessPoint[] accessPoints = (AccessPoint[]) apGroup.toArray(new AccessPoint[apGroup.size()]);
AccessPoint found = accessPoints[0];
reportCredentials.setClientEmail(found.address());
reportCredentials.setProjectId(found.username());
reportCredentials.setPrivateKeyPem(StringEncrypter.getEncrypter().decrypt(found.password()));
for (Property prop : found.properties()) {
if (prop.name().compareTo(AP_VIEWS_PROPERTY) == 0) {
reportCredentials.setClientEmail(found.username());
String decryptedPrivateKey = StringEncrypter.getEncrypter().decrypt(found.password());
reportCredentials.setPrivateKeyPem(decryptedPrivateKey.trim());
for(Property prop : found.properties()) {
if(prop.name().compareTo(AP_VIEWS_PROPERTY) == 0) {
String decryptedValue = StringEncrypter.getEncrypter().decrypt(prop.value());
String[] views = decryptedValue.split(";");
reportCredentials.setViewIds(Arrays.asList(views));
}
if (prop.name().compareTo(AP_CLIENT_PROPERTY) == 0) {
if(prop.name().compareTo(AP_CLIENT_ID) == 0) {
String decryptedValue = StringEncrypter.getEncrypter().decrypt(prop.value());
reportCredentials.setClientId(decryptedValue);
}
if (prop.name().compareTo(AP_PRIVATEKEY_PROPERTY) == 0) {
if(prop.name().compareTo(AP_PRIVATEKEY_ID_PROPERTY) == 0) {
String decryptedValue = StringEncrypter.getEncrypter().decrypt(prop.value());
reportCredentials.setPrivateKeyId(decryptedValue);
}
}
}
}
} catch (Exception e) {
} catch(Exception e) {
e.printStackTrace();
return null;
}
return reportCredentials;
}
private static LocalDate asLocalDate(Date date) {
return Instant.ofEpochMilli(date.getTime()).atZone(ZoneId.systemDefault()).toLocalDate();
}
private static DateRange getDateRangeForAnalytics(Date start, Date end) {
DateTimeFormatter formatter = DateTimeFormatter.ofPattern("yyyy-MM-dd"); // required
// by
// Analytics
private static Builder getDateRangeBuilderForAnalytics(Date start, Date end) {
DateTimeFormatter formatter = DateTimeFormatter.ofPattern("yyyy-MM-dd"); //required by Analytics
String startDate = asLocalDate(start).format(formatter);
String endDate = asLocalDate(end).format(formatter);
DateRange dateRange = new DateRange();// date format `yyyy-MM-dd`
dateRange.setStartDate(startDate);
dateRange.setEndDate(endDate);
return dateRange;
Builder dateRangeBuilder = DateRange.newBuilder().setStartDate(startDate).setEndDate(endDate);
return dateRangeBuilder;
}
}

View File

@ -28,7 +28,6 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* @author Eric Perrone (ISTI - CNR)
* @author Luca Frosini (ISTI - CNR)
*/
public class MethodInvocationHarvester extends BasicHarvester {
@ -58,7 +57,6 @@ public class MethodInvocationHarvester extends BasicHarvester {
SortedMap<Filter,SortedMap<Calendar,Info>> result = null;
List<Filter> filters = new ArrayList<>();
filters.add(new Filter(ServiceUsageRecord.SERVICE_NAME, DATAMINER_SERVICE_NAME));
Date newMethodInvocationHarvesterStartDate = DateUtils.getStartCalendar(2017, Calendar.DECEMBER, 31).getTime();
@ -68,11 +66,12 @@ public class MethodInvocationHarvester extends BasicHarvester {
AggregatedJobUsageRecord.class, temporalConstraint, filters, contexts, true);
} else {
// Before 31/12/2017 accounting Method Invocation using ServiceUsageRecord
filters.add(new Filter(ServiceUsageRecord.SERVICE_NAME, DATAMINER_SERVICE_NAME));
result = accountingPersistenceQuery.getContextTimeSeries(
AggregatedServiceUsageRecord.class, temporalConstraint, filters, contexts, true);
}
ScopeDescriptor scopeDescriptor = AccountingDashboardHarvesterPlugin.getScopeDescriptor();
ScopeDescriptor scopeDescriptor = AccountingDashboardHarvesterPlugin.getScopeDescriptor(context);
Dimension dimension = getDimension(HarvestedDataKey.METHOD_INVOCATIONS);
if(result != null) {

View File

@ -4,22 +4,13 @@ import static org.gcube.resources.discovery.icclient.ICFactory.clientFor;
import static org.gcube.resources.discovery.icclient.ICFactory.queryFor;
import java.io.IOException;
import java.io.Reader;
import java.io.StringReader;
import java.security.GeneralSecurityException;
import java.security.KeyFactory;
import java.security.NoSuchAlgorithmException;
import java.security.PrivateKey;
import java.security.spec.InvalidKeySpecException;
import java.security.spec.PKCS8EncodedKeySpec;
import java.time.Instant;
import java.time.LocalDate;
import java.time.ZoneId;
import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
@ -42,26 +33,20 @@ import org.gcube.resources.discovery.client.queries.api.SimpleQuery;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.api.client.googleapis.auth.oauth2.GoogleCredential;
import com.google.api.client.googleapis.auth.oauth2.GoogleCredential.Builder;
import com.google.api.client.googleapis.javanet.GoogleNetHttpTransport;
import com.google.api.client.googleapis.util.Utils;
import com.google.api.client.http.HttpTransport;
import com.google.analytics.data.v1beta.BetaAnalyticsDataClient;
import com.google.analytics.data.v1beta.BetaAnalyticsDataSettings;
import com.google.analytics.data.v1beta.DateRange;
import com.google.analytics.data.v1beta.DateRange.Builder;
import com.google.analytics.data.v1beta.Dimension;
import com.google.analytics.data.v1beta.Metric;
import com.google.analytics.data.v1beta.Row;
import com.google.analytics.data.v1beta.RunReportRequest;
import com.google.analytics.data.v1beta.RunReportResponse;
import com.google.api.client.json.JsonFactory;
import com.google.api.client.json.gson.GsonFactory;
import com.google.api.client.util.PemReader;
import com.google.api.client.util.PemReader.Section;
import com.google.api.client.util.SecurityUtils;
import com.google.api.services.analyticsreporting.v4.AnalyticsReporting;
import com.google.api.services.analyticsreporting.v4.AnalyticsReportingScopes;
import com.google.api.services.analyticsreporting.v4.model.DateRange;
import com.google.api.services.analyticsreporting.v4.model.DateRangeValues;
import com.google.api.services.analyticsreporting.v4.model.GetReportsRequest;
import com.google.api.services.analyticsreporting.v4.model.GetReportsResponse;
import com.google.api.services.analyticsreporting.v4.model.Metric;
import com.google.api.services.analyticsreporting.v4.model.Report;
import com.google.api.services.analyticsreporting.v4.model.ReportRequest;
import com.google.api.services.analyticsreporting.v4.model.ReportRow;
import com.google.api.gax.core.FixedCredentialsProvider;
import com.google.auth.oauth2.ServiceAccountCredentials;
/**
*
@ -75,11 +60,10 @@ public class RStudioAccessesHarvester extends BasicHarvester {
private static final JsonFactory JSON_FACTORY = GsonFactory.getDefaultInstance();
private static final String SERVICE_ENDPOINT_CATEGORY = "OnlineService";
private static final String SERVICE_ENDPOINT_NAME = "BigGAnalyticsReportService";
private static final String SERVICE_ENDPOINT_NAME = "GA4AnalyticsDataService";
private static final String AP_VIEWS_PROPERTY = "views";
private static final String AP_CLIENT_PROPERTY = "clientId";
private static final String AP_PRIVATEKEY_PROPERTY = "privateKeyId";
private static final String APPLICATION_NAME = "Analytics Reporting";
private static final String AP_CLIENT_ID = "client_id";
private static final String AP_PRIVATEKEY_ID_PROPERTY = "private_key_id";
private List<VREAccessesReportRow> vreAccesses;
@ -115,7 +99,7 @@ public class RStudioAccessesHarvester extends BasicHarvester {
}
}
ScopeDescriptor scopeDescriptor = AccountingDashboardHarvesterPlugin.getScopeDescriptor();
ScopeDescriptor scopeDescriptor = AccountingDashboardHarvesterPlugin.getScopeDescriptor(context);
if (measure > 0) {
AccountingRecord ar = new AccountingRecord(scopeDescriptor, instant,
@ -143,188 +127,111 @@ public class RStudioAccessesHarvester extends BasicHarvester {
* visitNumber=39]
*/
private static List<VREAccessesReportRow> getAllAccesses(Date start, Date end) throws Exception {
DateRange dateRange = getDateRangeForAnalytics(start, end);
logger.trace("Getting accesses in this time range {}", dateRange.toPrettyString());
Builder dateRangeBuilder = getDateRangeBuilderForAnalytics(start, end);
logger.debug("Getting accesses in this time range {}", dateRangeBuilder.toString());
AnalyticsReportCredentials credentialsFromD4S = getAuthorisedApplicationInfoFromIs();
AnalyticsReporting service = initializeAnalyticsReporting(credentialsFromD4S);
HashMap<String, List<GetReportsResponse>> responses = getReportResponses(service,
credentialsFromD4S.getViewIds(), dateRange);
logger.debug("Getting credentials credentialsFromD4S");
BetaAnalyticsDataSettings serviceSettings = initializeAnalyticsReporting(credentialsFromD4S);
logger.debug("initializeAnalyticsReporting service settings");
HashMap<String,List<RunReportResponse>> responses = getReportResponses(serviceSettings, credentialsFromD4S.getViewIds(), dateRangeBuilder);
List<VREAccessesReportRow> totalAccesses = new ArrayList<>();
for (String view : responses.keySet()) {
for(String view : responses.keySet()) {
List<VREAccessesReportRow> viewReport = parseResponse(view, responses.get(view));
logger.trace("Got {} entries from view id={}", viewReport.size(), view);
logger.debug("Got {} entries from view id={}", viewReport.size(), view);
totalAccesses.addAll(viewReport);
}
logger.trace("Merged in {} total entries from all views", totalAccesses.size());
logger.debug("Merged in {} total entries from all views", totalAccesses.size());
return totalAccesses;
}
/**
* Initializes an Analytics Reporting API V4 service object.
* Initializes an Google Analytics Data API service object.
*
* @return An authorized Analytics Reporting API V4 service object.
* @return An authorized Google Analytics Data API
* @throws IOException
* @throws GeneralSecurityException
*/
private static AnalyticsReporting initializeAnalyticsReporting(AnalyticsReportCredentials cred)
throws GeneralSecurityException, IOException {
HttpTransport httpTransport = GoogleNetHttpTransport.newTrustedTransport();
GoogleCredential credential = fromD4SServiceEndpoint(cred).createScoped(AnalyticsReportingScopes.all());
// Construct the Analytics Reporting service object.
return new AnalyticsReporting.Builder(httpTransport, JSON_FACTORY, credential)
.setApplicationName(APPLICATION_NAME).build();
private static BetaAnalyticsDataSettings initializeAnalyticsReporting(AnalyticsReportCredentials cred) throws IOException {
return BetaAnalyticsDataSettings.newBuilder()
.setCredentialsProvider(FixedCredentialsProvider.create(
ServiceAccountCredentials.fromPkcs8(cred.getClientId(), cred.getClientEmail(), cred.getPrivateKeyPem(), cred.getPrivateKeyId(), null)))
.build();
}
/**
* Queries the Analytics Reporting API V4.
* Queries Analytics Data API service
*
* @param service
* An authorized Analytics Reporting API V4 service object.
* @return GetReportResponse The Analytics Reporting API V4 response.
* @param service Analytics Data API service service settings.
* @return Row Analytics Data API service
* @throws IOException
*/
private static HashMap<String, List<GetReportsResponse>> getReportResponses(AnalyticsReporting service,
List<String> viewIDs, DateRange dateRange) throws IOException {
private static HashMap<String,List<RunReportResponse>> getReportResponses(BetaAnalyticsDataSettings betaAnalyticsDataSettings,
List<String> viewIDs, Builder dateRangeBuilder) throws IOException {
HashMap<String, List<GetReportsResponse>> reports = new HashMap<>();
HashMap<String,List<RunReportResponse>> reports = new HashMap<>();
// Create the Metrics object.
Metric sessions = new Metric().setExpression("ga:pageviews").setAlias("pages");
com.google.api.services.analyticsreporting.v4.model.Dimension pageTitle = new com.google.api.services.analyticsreporting.v4.model.Dimension()
.setName("ga:pagePath");
try (BetaAnalyticsDataClient analyticsData = BetaAnalyticsDataClient.create(betaAnalyticsDataSettings)) {
for (String view : viewIDs) {
List<GetReportsResponse> gReportResponses = new ArrayList<>();
logger.info("Getting data from Google Analytics for viewid: " + view);
boolean iterateMorePages = true;
String nextPageToken = null;
while (iterateMorePages) {
// Create the ReportRequest object.
ReportRequest request = new ReportRequest().setViewId(view.trim())
.setDateRanges(Arrays.asList(dateRange)).setMetrics(Arrays.asList(sessions))
.setDimensions(Arrays.asList(pageTitle));
request.setPageSize(1000);
request.setPageToken(nextPageToken);
ArrayList<ReportRequest> requests = new ArrayList<ReportRequest>();
requests.add(request);
// Create the GetReportsRequest object.
GetReportsRequest getReport = new GetReportsRequest().setReportRequests(requests);
// Call the batchGet method.
GetReportsResponse response = service.reports().batchGet(getReport).execute();
nextPageToken = response.getReports().get(0).getNextPageToken();
iterateMorePages = (nextPageToken != null);
logger.debug("got nextPageToken: " + nextPageToken);
for(String propertyId : viewIDs) {
List<RunReportResponse> gReportResponses = new ArrayList<>();
logger.debug("Getting data from Analytics Data API for propertyId: " + propertyId);
RunReportRequest request =
RunReportRequest.newBuilder()
.setProperty("properties/" + propertyId)
.addDimensions(Dimension.newBuilder().setName("pagePath"))
.addMetrics(Metric.newBuilder().setName("screenPageViews"))
.addDateRanges(dateRangeBuilder)
.build();
// Make the request.
RunReportResponse response = analyticsData.runReport(request);
gReportResponses.add(response);
reports.put(propertyId, gReportResponses);
}
reports.put(view, gReportResponses);
}
// Return the response.
return reports;
}
/**
* Parses and prints the Analytics Reporting API V4 response.
* Parses and prints the Analytics Data API service respose
*
* @param response
* An Analytics Reporting API V4 response.
* @param response An Analytics Data API service response.
*/
/**
* Parses and prints the Analytics Reporting API V4 response.
*
* @param response
* An Analytics Reporting API V4 response.
*/
private static List<VREAccessesReportRow> parseResponse(String viewId, List<GetReportsResponse> responses) {
logger.debug("parsing Response for " + viewId);
private static List<VREAccessesReportRow> parseResponse(String viewId, List<RunReportResponse> responses) {
logger.debug("parsing Response for propertyID=" + viewId);
List<VREAccessesReportRow> toReturn = new ArrayList<>();
for (GetReportsResponse response : responses) {
for (Report report : response.getReports()) {
List<ReportRow> rows = report.getData().getRows();
if (rows == null) {
logger.warn("No data found for " + viewId);
} else {
for (ReportRow row : rows) {
String dimension = row.getDimensions().get(0);
DateRangeValues metric = row.getMetrics().get(0);
VREAccessesReportRow var = new VREAccessesReportRow();
boolean validEntry = false;
String pagePath = dimension;
if (pagePath.startsWith("/group") || pagePath.startsWith("/web")) {
var.setPagePath(dimension);
validEntry = true;
}
if (validEntry) {
var.setVisitNumber(Integer.parseInt(metric.getValues().get(0)));
toReturn.add(var);
}
}
for (RunReportResponse response : responses) {
for (Row row: response.getRowsList()) {
String dimension = row.getDimensionValues(0).getValue();
String metric = row.getMetricValues(0).getValue();
VREAccessesReportRow var = new VREAccessesReportRow();
boolean validEntry = false;
String pagePath = dimension;
if (pagePath.startsWith("/group") || pagePath.startsWith("/web")) {
var.setPagePath(dimension);
validEntry = true;
}
if (validEntry) {
var.setVisitNumber(Integer.parseInt(metric));
toReturn.add(var);
}
}
}
return toReturn;
}
private static GoogleCredential fromD4SServiceEndpoint(AnalyticsReportCredentials cred) throws IOException {
String clientId = cred.getClientId();
String clientEmail = cred.getClientEmail();
String privateKeyPem = cred.getPrivateKeyPem();
String privateKeyId = cred.getPrivateKeyId();
String tokenUri = cred.getTokenUri();
String projectId = cred.getProjectId();
if (clientId == null || clientEmail == null || privateKeyPem == null || privateKeyId == null) {
throw new IOException("Error reading service account credential from stream, "
+ "expecting 'client_id', 'client_email', 'private_key' and 'private_key_id'.");
}
PrivateKey privateKey = privateKeyFromPkcs8(privateKeyPem);
Collection<String> emptyScopes = Collections.emptyList();
Builder credentialBuilder = new GoogleCredential.Builder().setTransport(Utils.getDefaultTransport())
.setJsonFactory(Utils.getDefaultJsonFactory()).setServiceAccountId(clientEmail)
.setServiceAccountScopes(emptyScopes).setServiceAccountPrivateKey(privateKey)
.setServiceAccountPrivateKeyId(privateKeyId);
if (tokenUri != null) {
credentialBuilder.setTokenServerEncodedUrl(tokenUri);
}
if (projectId != null) {
credentialBuilder.setServiceAccountProjectId(projectId);
}
// Don't do a refresh at this point, as it will always fail before the
// scopes are added.
return credentialBuilder.build();
}
private static PrivateKey privateKeyFromPkcs8(String privateKeyPem) throws IOException {
Reader reader = new StringReader(privateKeyPem);
Section section = PemReader.readFirstSectionAndClose(reader, "PRIVATE KEY");
if (section == null) {
throw new IOException("Invalid PKCS8 data.");
}
byte[] bytes = section.getBase64DecodedBytes();
PKCS8EncodedKeySpec keySpec = new PKCS8EncodedKeySpec(bytes);
Exception unexpectedException = null;
try {
KeyFactory keyFactory = SecurityUtils.getRsaKeyFactory();
PrivateKey privateKey = keyFactory.generatePrivate(keySpec);
return privateKey;
} catch (NoSuchAlgorithmException exception) {
unexpectedException = exception;
} catch (InvalidKeySpecException exception) {
unexpectedException = exception;
}
throw new IOException("Unexpected exception reading PKCS data", unexpectedException);
}
private static List<ServiceEndpoint> getAnalyticsReportingConfigurationFromIS(String infrastructureScope)
throws Exception {
String scope = infrastructureScope;
@ -341,8 +248,7 @@ public class RStudioAccessesHarvester extends BasicHarvester {
/**
* l
*
* @throws Exception
* @throws Exception
*/
private static AnalyticsReportCredentials getAuthorisedApplicationInfoFromIs() throws Exception {
AnalyticsReportCredentials reportCredentials = new AnalyticsReportCredentials();
@ -350,60 +256,58 @@ public class RStudioAccessesHarvester extends BasicHarvester {
String context = org.gcube.dataharvest.utils.Utils.getCurrentContext();
try {
List<ServiceEndpoint> list = getAnalyticsReportingConfigurationFromIS(context);
if (list.size() > 1) {
if(list.size() > 1) {
logger.error("Too many Service Endpoints having name " + SERVICE_ENDPOINT_NAME
+ " in this scope having Category " + SERVICE_ENDPOINT_CATEGORY);
} else if (list.size() == 0) {
} else if(list.size() == 0) {
logger.warn("There is no Service Endpoint having name " + SERVICE_ENDPOINT_NAME + " and Category "
+ SERVICE_ENDPOINT_CATEGORY + " in this context: " + context);
} else {
for (ServiceEndpoint res : list) {
reportCredentials.setTokenUri(res.profile().runtime().hostedOn());
for(ServiceEndpoint res : list) {
Group<AccessPoint> apGroup = res.profile().accessPoints();
AccessPoint[] accessPoints = (AccessPoint[]) apGroup.toArray(new AccessPoint[apGroup.size()]);
AccessPoint found = accessPoints[0];
reportCredentials.setClientEmail(found.address());
reportCredentials.setProjectId(found.username());
reportCredentials.setPrivateKeyPem(StringEncrypter.getEncrypter().decrypt(found.password()));
for (Property prop : found.properties()) {
if (prop.name().compareTo(AP_VIEWS_PROPERTY) == 0) {
reportCredentials.setClientEmail(found.username());
String decryptedPrivateKey = StringEncrypter.getEncrypter().decrypt(found.password());
reportCredentials.setPrivateKeyPem(decryptedPrivateKey.trim());
for(Property prop : found.properties()) {
if(prop.name().compareTo(AP_VIEWS_PROPERTY) == 0) {
String decryptedValue = StringEncrypter.getEncrypter().decrypt(prop.value());
String[] views = decryptedValue.split(";");
reportCredentials.setViewIds(Arrays.asList(views));
}
if (prop.name().compareTo(AP_CLIENT_PROPERTY) == 0) {
if(prop.name().compareTo(AP_CLIENT_ID) == 0) {
String decryptedValue = StringEncrypter.getEncrypter().decrypt(prop.value());
reportCredentials.setClientId(decryptedValue);
}
if (prop.name().compareTo(AP_PRIVATEKEY_PROPERTY) == 0) {
if(prop.name().compareTo(AP_PRIVATEKEY_ID_PROPERTY) == 0) {
String decryptedValue = StringEncrypter.getEncrypter().decrypt(prop.value());
reportCredentials.setPrivateKeyId(decryptedValue);
}
}
}
}
} catch (Exception e) {
} catch(Exception e) {
e.printStackTrace();
return null;
}
return reportCredentials;
}
private static LocalDate asLocalDate(Date date) {
return Instant.ofEpochMilli(date.getTime()).atZone(ZoneId.systemDefault()).toLocalDate();
}
private static DateRange getDateRangeForAnalytics(Date start, Date end) {
DateTimeFormatter formatter = DateTimeFormatter.ofPattern("yyyy-MM-dd"); // required
// by
// Analytics
private static Builder getDateRangeBuilderForAnalytics(Date start, Date end) {
DateTimeFormatter formatter = DateTimeFormatter.ofPattern("yyyy-MM-dd"); //required by Analytics
String startDate = asLocalDate(start).format(formatter);
String endDate = asLocalDate(end).format(formatter);
DateRange dateRange = new DateRange();// date format `yyyy-MM-dd`
dateRange.setStartDate(startDate);
dateRange.setEndDate(endDate);
return dateRange;
Builder dateRangeBuilder = DateRange.newBuilder().setStartDate(startDate).setEndDate(endDate);
return dateRangeBuilder;
}
}

View File

@ -1,6 +1,5 @@
package org.gcube.dataharvest.harvester;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
@ -10,8 +9,8 @@ import org.gcube.accounting.accounting.summary.access.model.update.AccountingRec
import org.gcube.dataharvest.AccountingDashboardHarvesterPlugin;
import org.gcube.dataharvest.datamodel.HarvestedDataKey;
import org.gcube.dataharvest.utils.Utils;
import org.json.JSONArray;
import org.json.JSONObject;
import org.gcube.portal.databook.shared.Feed;
import org.gcube.social_networking.social_networking_client_library.PostClient;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -19,7 +18,7 @@ import org.slf4j.LoggerFactory;
* @author Eric Perrone (ISTI - CNR)
* @author Luca Frosini (ISTI - CNR)
*/
public class SocialInteractionsHarvester extends SocialNetworkingHarvester {
public class SocialInteractionsHarvester extends BasicHarvester {
private static Logger logger = LoggerFactory.getLogger(SocialInteractionsHarvester.class);
@ -27,7 +26,7 @@ public class SocialInteractionsHarvester extends SocialNetworkingHarvester {
private int replies;
private int posts;
public static final String PATH = "/2/posts/get-posts-vre?gcube-token=";
// public static final String PATH = "/2/posts/get-posts-vre?gcube-token=";
public SocialInteractionsHarvester(Date start, Date end) throws Exception {
super(start, end);
@ -44,7 +43,7 @@ public class SocialInteractionsHarvester extends SocialNetworkingHarvester {
getJson();
ScopeDescriptor scopeDescriptor = AccountingDashboardHarvesterPlugin.getScopeDescriptor();
ScopeDescriptor scopeDescriptor = AccountingDashboardHarvesterPlugin.getScopeDescriptor(context);
AccountingRecord likesAR = new AccountingRecord(scopeDescriptor, instant, getDimension(HarvestedDataKey.SOCIAL_LIKES), (long) likes);
logger.debug("{} : {}", likesAR.getDimension().getId(), likesAR.getMeasure());
@ -66,30 +65,20 @@ public class SocialInteractionsHarvester extends SocialNetworkingHarvester {
}
private void getJson() throws Exception {
JSONObject jsonObject = getJSONObject(PATH);
Boolean success = (Boolean) jsonObject.get("success");
if(success == false) {
throw new IOException("Erro while getting posts");
}
JSONArray res = jsonObject.getJSONArray("result");
int len = res.length();
PostClient postClient = new PostClient();
List<Feed> vrePosts = postClient.getPostsVRE();
likes = replies = posts = 0;
for(int i = 0; i < len; i++) {
for(Feed feed : vrePosts) {
JSONObject item = res.getJSONObject(i);
long time = item.getLong("time");
long time = feed.getTime().getTime();
if(start.getTime() <= time && time <= end.getTime()) {
posts++;
replies += item.getInt("comments_no");
likes += item.getInt("likes_no");
replies += Integer.valueOf(feed.getCommentsNo());
likes += Integer.valueOf(feed.getLikesNo());
}
}

View File

@ -1,58 +0,0 @@
package org.gcube.dataharvest.harvester;
import java.util.Date;
import java.util.List;
import org.gcube.common.authorization.library.provider.SecurityTokenProvider;
import org.gcube.common.clients.exceptions.DiscoveryException;
import org.gcube.common.resources.gcore.GCoreEndpoint;
import org.gcube.dataharvest.utils.Utils;
import org.gcube.resources.discovery.client.queries.api.SimpleQuery;
import org.gcube.resources.discovery.icclient.ICFactory;
import org.json.JSONObject;
public abstract class SocialNetworkingHarvester extends BasicHarvester {
public SocialNetworkingHarvester(Date start, Date end) throws Exception {
super(start, end);
}
public static String CLASS_FORMAT = "$resource/Profile/ServiceClass/text() eq '%1s'";
public static String NAME_FORMAT = "$resource/Profile/ServiceName/text() eq '%1s'";
public static String STATUS_FORMAT = "$resource/Profile/DeploymentData/Status/text() eq 'ready'";
public static String CONTAINS_FORMAT = "$entry/@EntryName eq '%1s'";
public static String SERVICE_CLASS = "Portal";
public static String SERVICE_NAME = "SocialNetworking";
public static String ENTRY_NAME = "jersey-servlet";
protected SimpleQuery getGCoreEndpointQuery() {
return ICFactory.queryFor(GCoreEndpoint.class)
.addCondition(String.format(CLASS_FORMAT, SERVICE_CLASS))
.addCondition(String.format(NAME_FORMAT, SERVICE_NAME))
.addCondition(String.format(STATUS_FORMAT))
.addVariable("$entry", "$resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint")
.addCondition(String.format(CONTAINS_FORMAT, ENTRY_NAME))
.setResult("$entry/text()");
}
protected String getAddress() {
SimpleQuery gCoreEndpointQuery = getGCoreEndpointQuery();
List<String> addresses = ICFactory.client().submit(gCoreEndpointQuery);
if(addresses.size()==0) {
throw new DiscoveryException("No running Social Networking Service");
}
return addresses.get(0);
}
protected JSONObject getJSONObject(String path) throws Exception {
String token = SecurityTokenProvider.instance.get();
String baseAddress = getAddress();
StringBuffer sb = new StringBuffer(baseAddress);
sb.append(path);
sb.append(token);
return new JSONObject(Utils.getJson(sb.toString()));
}
}

View File

@ -4,22 +4,13 @@ import static org.gcube.resources.discovery.icclient.ICFactory.clientFor;
import static org.gcube.resources.discovery.icclient.ICFactory.queryFor;
import java.io.IOException;
import java.io.Reader;
import java.io.StringReader;
import java.security.GeneralSecurityException;
import java.security.KeyFactory;
import java.security.NoSuchAlgorithmException;
import java.security.PrivateKey;
import java.security.spec.InvalidKeySpecException;
import java.security.spec.PKCS8EncodedKeySpec;
import java.time.Instant;
import java.time.LocalDate;
import java.time.ZoneId;
import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
@ -42,56 +33,48 @@ import org.gcube.resources.discovery.client.queries.api.SimpleQuery;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.api.client.googleapis.auth.oauth2.GoogleCredential;
import com.google.api.client.googleapis.auth.oauth2.GoogleCredential.Builder;
import com.google.api.client.googleapis.javanet.GoogleNetHttpTransport;
import com.google.api.client.googleapis.util.Utils;
import com.google.api.client.http.HttpTransport;
import com.google.api.client.json.JsonFactory;
import com.google.api.client.json.gson.GsonFactory;
import com.google.api.client.util.PemReader;
import com.google.api.client.util.PemReader.Section;
import com.google.api.client.util.SecurityUtils;
import com.google.api.services.analyticsreporting.v4.AnalyticsReporting;
import com.google.api.services.analyticsreporting.v4.AnalyticsReportingScopes;
import com.google.api.services.analyticsreporting.v4.model.DateRange;
import com.google.api.services.analyticsreporting.v4.model.DateRangeValues;
import com.google.api.services.analyticsreporting.v4.model.GetReportsRequest;
import com.google.api.services.analyticsreporting.v4.model.GetReportsResponse;
import com.google.api.services.analyticsreporting.v4.model.Metric;
import com.google.api.services.analyticsreporting.v4.model.Report;
import com.google.api.services.analyticsreporting.v4.model.ReportRequest;
import com.google.api.services.analyticsreporting.v4.model.ReportRow;
import com.google.analytics.data.v1beta.BetaAnalyticsDataClient;
import com.google.analytics.data.v1beta.BetaAnalyticsDataSettings;
import com.google.analytics.data.v1beta.DateRange;
import com.google.analytics.data.v1beta.DateRange.Builder;
import com.google.analytics.data.v1beta.Dimension;
import com.google.analytics.data.v1beta.Metric;
import com.google.analytics.data.v1beta.Row;
import com.google.analytics.data.v1beta.RunReportRequest;
import com.google.analytics.data.v1beta.RunReportResponse;
import com.google.api.gax.core.FixedCredentialsProvider;
import com.google.auth.oauth2.ServiceAccountCredentials;
public class VREAccessesHarvester extends BasicHarvester {
private static Logger logger = LoggerFactory.getLogger(VREAccessesHarvester.class);
private static final JsonFactory JSON_FACTORY = GsonFactory.getDefaultInstance();
// private static final JsonFactory JSON_FACTORY = GsonFactory.getDefaultInstance();
private static final String SERVICE_ENDPOINT_CATEGORY = "OnlineService";
private static final String SERVICE_ENDPOINT_NAME = "BigGAnalyticsReportService";
private static final String SERVICE_ENDPOINT_NAME = "GA4AnalyticsDataService";
private static final String AP_VIEWS_PROPERTY = "views";
private static final String AP_CLIENT_PROPERTY = "clientId";
private static final String AP_PRIVATEKEY_PROPERTY = "privateKeyId";
private static final String APPLICATION_NAME = "Analytics Reporting";
private static final String AP_CLIENT_ID = "client_id";
private static final String AP_PRIVATEKEY_ID_PROPERTY = "private_key_id";
private List<VREAccessesReportRow> vreAccesses;
public VREAccessesHarvester(Date start, Date end) throws Exception {
super(start, end);
vreAccesses = getAllAccesses(start, end);
}
@Override
public List<AccountingRecord> getAccountingRecords() throws Exception {
try {
String context = org.gcube.dataharvest.utils.Utils.getCurrentContext();
ArrayList<AccountingRecord> accountingRecords = new ArrayList<AccountingRecord>();
int measure = 0;
ScopeBean scopeBean = new ScopeBean(context);
String lowerCasedContext = scopeBean.name().toLowerCase();
String case1 = lowerCasedContext + "/";
@ -100,28 +83,28 @@ public class VREAccessesHarvester extends BasicHarvester {
String pagePath = row.getPagePath();
if (!pagePath.contains("_redirect=/group") && !pagePath.contains("workspace")) {
if(pagePath.endsWith(lowerCasedContext)) {
logger.trace("Matched endsWith({}) : {}", lowerCasedContext, pagePath);
logger.debug("Matched endsWith({}) : {}", lowerCasedContext, pagePath);
measure += row.getVisitNumber();
} else if(pagePath.contains(case1) || pagePath.contains(case2)) {
logger.trace("Matched contains({}) || contains({}) : {}", case1, case2, pagePath);
logger.debug("Matched contains({}) || contains({}) : {}", case1, case2, pagePath);
measure += row.getVisitNumber();
}
}
}
ScopeDescriptor scopeDescriptor = AccountingDashboardHarvesterPlugin.getScopeDescriptor();
ScopeDescriptor scopeDescriptor = AccountingDashboardHarvesterPlugin.getScopeDescriptor(context);
AccountingRecord ar = new AccountingRecord(scopeDescriptor, instant, getDimension(HarvestedDataKey.ACCESSES), (long) measure);
logger.debug("{} : {}", ar.getDimension().getId(), ar.getMeasure());
accountingRecords.add(ar);
return accountingRecords;
} catch(Exception e) {
throw e;
}
}
/**
*
* @return a list of {@link VREAccessesReportRow} objects containing the pagePath and the visit number e.g.
@ -130,182 +113,113 @@ public class VREAccessesHarvester extends BasicHarvester {
* VREAccessesReportRow [pagePath=/group/agroclimaticmodelling/agroclimaticmodelling, visitNumber=39]
*/
private static List<VREAccessesReportRow> getAllAccesses(Date start, Date end) throws Exception {
DateRange dateRange = getDateRangeForAnalytics(start, end);
logger.trace("Getting accesses in this time range {}", dateRange.toPrettyString());
Builder dateRangeBuilder = getDateRangeBuilderForAnalytics(start, end);
logger.debug("Getting accesses in this time range {}", dateRangeBuilder.toString());
AnalyticsReportCredentials credentialsFromD4S = getAuthorisedApplicationInfoFromIs();
AnalyticsReporting service = initializeAnalyticsReporting(credentialsFromD4S);
HashMap<String,List<GetReportsResponse>> responses = getReportResponses(service, credentialsFromD4S.getViewIds(), dateRange);
List<VREAccessesReportRow> totalAccesses = new ArrayList<>();
logger.debug("Getting credentials credentialsFromD4S");
BetaAnalyticsDataSettings serviceSettings = initializeAnalyticsReporting(credentialsFromD4S);
logger.debug("initializeAnalyticsReporting service settings");
HashMap<String,List<RunReportResponse>> responses = getReportResponses(serviceSettings, credentialsFromD4S.getViewIds(), dateRangeBuilder);
List<VREAccessesReportRow> totalAccesses = new ArrayList<>();
for(String view : responses.keySet()) {
List<VREAccessesReportRow> viewReport = parseResponse(view, responses.get(view));
logger.trace("Got {} entries from view id={}", viewReport.size(), view);
logger.debug("Got {} entries from view id={}", viewReport.size(), view);
totalAccesses.addAll(viewReport);
}
logger.trace("Merged in {} total entries from all views", totalAccesses.size());
logger.debug("Merged in {} total entries from all views", totalAccesses.size());
return totalAccesses;
}
/**
* Initializes an Analytics Reporting API V4 service object.
* Initializes an Google Analytics Data API service object.
*
* @return An authorized Analytics Reporting API V4 service object.
* @return An authorized Google Analytics Data API
* @throws IOException
* @throws GeneralSecurityException
*/
private static AnalyticsReporting initializeAnalyticsReporting(AnalyticsReportCredentials cred)
throws GeneralSecurityException, IOException {
HttpTransport httpTransport = GoogleNetHttpTransport.newTrustedTransport();
GoogleCredential credential = fromD4SServiceEndpoint(cred).createScoped(AnalyticsReportingScopes.all());
// Construct the Analytics Reporting service object.
return new AnalyticsReporting.Builder(httpTransport, JSON_FACTORY, credential)
.setApplicationName(APPLICATION_NAME).build();
private static BetaAnalyticsDataSettings initializeAnalyticsReporting(AnalyticsReportCredentials cred) throws IOException {
return BetaAnalyticsDataSettings.newBuilder()
.setCredentialsProvider(FixedCredentialsProvider.create(
ServiceAccountCredentials.fromPkcs8(cred.getClientId(), cred.getClientEmail(), cred.getPrivateKeyPem(), cred.getPrivateKeyId(), null)))
.build();
}
/**
* Queries the Analytics Reporting API V4.
* Queries Analytics Data API service
*
* @param service An authorized Analytics Reporting API V4 service object.
* @return GetReportResponse The Analytics Reporting API V4 response.
* @param service Analytics Data API service service settings.
* @return Row Analytics Data API service
* @throws IOException
*/
private static HashMap<String,List<GetReportsResponse>> getReportResponses(AnalyticsReporting service,
List<String> viewIDs, DateRange dateRange) throws IOException {
HashMap<String,List<GetReportsResponse>> reports = new HashMap<>();
// Create the Metrics object.
Metric sessions = new Metric().setExpression("ga:pageviews").setAlias("pages");
com.google.api.services.analyticsreporting.v4.model.Dimension pageTitle = new com.google.api.services.analyticsreporting.v4.model.Dimension().setName("ga:pagePath");
for(String view : viewIDs) {
List<GetReportsResponse> gReportResponses = new ArrayList<>();
logger.info("Getting data from Google Analytics for viewid: " + view);
boolean iterateMorePages = true;
String nextPageToken = null;
while (iterateMorePages) {
// Create the ReportRequest object.
ReportRequest request = new ReportRequest().setViewId(view.trim()).setDateRanges(Arrays.asList(dateRange))
.setMetrics(Arrays.asList(sessions)).setDimensions(Arrays.asList(pageTitle));
request.setPageSize(1000);
request.setPageToken(nextPageToken);
ArrayList<ReportRequest> requests = new ArrayList<ReportRequest>();
requests.add(request);
// Create the GetReportsRequest object.
GetReportsRequest getReport = new GetReportsRequest().setReportRequests(requests);
// Call the batchGet method.
GetReportsResponse response = service.reports().batchGet(getReport).execute();
nextPageToken = response.getReports().get(0).getNextPageToken();
iterateMorePages = (nextPageToken != null);
logger.debug("got nextPageToken: "+nextPageToken);
private static HashMap<String,List<RunReportResponse>> getReportResponses(BetaAnalyticsDataSettings betaAnalyticsDataSettings,
List<String> viewIDs, Builder dateRangeBuilder) throws IOException {
HashMap<String,List<RunReportResponse>> reports = new HashMap<>();
try (BetaAnalyticsDataClient analyticsData = BetaAnalyticsDataClient.create(betaAnalyticsDataSettings)) {
for(String propertyId : viewIDs) {
List<RunReportResponse> gReportResponses = new ArrayList<>();
logger.debug("Getting data from Analytics Data API for propertyId: " + propertyId);
RunReportRequest request =
RunReportRequest.newBuilder()
.setProperty("properties/" + propertyId)
.addDimensions(Dimension.newBuilder().setName("pagePath"))
.addMetrics(Metric.newBuilder().setName("screenPageViews"))
.addDateRanges(dateRangeBuilder)
.build();
// Make the request.
RunReportResponse response = analyticsData.runReport(request);
gReportResponses.add(response);
// Iterate through every row of the API response.
// for (Row row : response.getRowsList()) {
// System.out.printf(
// "%s, %s%n", row.getDimensionValues(0).getValue(), row.getMetricValues(0).getValue());
// }
reports.put(propertyId, gReportResponses);
}
reports.put(view, gReportResponses);
}
// Return the response.
return reports;
}
/**
* Parses and prints the Analytics Reporting API V4 response.
* Parses and prints the Analytics Data API service respose
*
* @param response An Analytics Reporting API V4 response.
* @param response An Analytics Data API service response.
*/
/**
* Parses and prints the Analytics Reporting API V4 response.
*
* @param response An Analytics Reporting API V4 response.
*/
private static List<VREAccessesReportRow> parseResponse(String viewId, List<GetReportsResponse> responses) {
logger.debug("parsing Response for " + viewId);
private static List<VREAccessesReportRow> parseResponse(String viewId, List<RunReportResponse> responses) {
logger.debug("parsing Response for propertyID=" + viewId);
List<VREAccessesReportRow> toReturn = new ArrayList<>();
for (GetReportsResponse response : responses) {
for (Report report: response.getReports()) {
List<ReportRow> rows = report.getData().getRows();
if (rows == null) {
logger.warn("No data found for " + viewId);
for (RunReportResponse response : responses) {
for (Row row: response.getRowsList()) {
String dimension = row.getDimensionValues(0).getValue();
String metric = row.getMetricValues(0).getValue();
VREAccessesReportRow var = new VREAccessesReportRow();
boolean validEntry = false;
String pagePath = dimension;
if (pagePath.startsWith("/group") || pagePath.startsWith("/web")) {
var.setPagePath(dimension);
validEntry = true;
}
else {
for (ReportRow row: rows) {
String dimension = row.getDimensions().get(0);
DateRangeValues metric = row.getMetrics().get(0);
VREAccessesReportRow var = new VREAccessesReportRow();
boolean validEntry = false;
String pagePath = dimension;
if (pagePath.startsWith("/group") || pagePath.startsWith("/web")) {
var.setPagePath(dimension);
validEntry = true;
}
if (validEntry) {
var.setVisitNumber(Integer.parseInt(metric.getValues().get(0)));
toReturn.add(var);
}
}
if (validEntry) {
var.setVisitNumber(Integer.parseInt(metric));
toReturn.add(var);
}
//System.out.printf("%s, %s%n", row.getDimensionValues(0).getValue(), row.getMetricValues(0).getValue());
}
}
return toReturn;
}
private static GoogleCredential fromD4SServiceEndpoint(AnalyticsReportCredentials cred) throws IOException {
String clientId = cred.getClientId();
String clientEmail = cred.getClientEmail();
String privateKeyPem = cred.getPrivateKeyPem();
String privateKeyId = cred.getPrivateKeyId();
String tokenUri = cred.getTokenUri();
String projectId = cred.getProjectId();
if(clientId == null || clientEmail == null || privateKeyPem == null || privateKeyId == null) {
throw new IOException("Error reading service account credential from stream, "
+ "expecting 'client_id', 'client_email', 'private_key' and 'private_key_id'.");
}
PrivateKey privateKey = privateKeyFromPkcs8(privateKeyPem);
Collection<String> emptyScopes = Collections.emptyList();
Builder credentialBuilder = new GoogleCredential.Builder().setTransport(Utils.getDefaultTransport())
.setJsonFactory(Utils.getDefaultJsonFactory()).setServiceAccountId(clientEmail)
.setServiceAccountScopes(emptyScopes).setServiceAccountPrivateKey(privateKey)
.setServiceAccountPrivateKeyId(privateKeyId);
if(tokenUri != null) {
credentialBuilder.setTokenServerEncodedUrl(tokenUri);
}
if(projectId != null) {
credentialBuilder.setServiceAccountProjectId(projectId);
}
// Don't do a refresh at this point, as it will always fail before the scopes are added.
return credentialBuilder.build();
}
private static PrivateKey privateKeyFromPkcs8(String privateKeyPem) throws IOException {
Reader reader = new StringReader(privateKeyPem);
Section section = PemReader.readFirstSectionAndClose(reader, "PRIVATE KEY");
if(section == null) {
throw new IOException("Invalid PKCS8 data.");
}
byte[] bytes = section.getBase64DecodedBytes();
PKCS8EncodedKeySpec keySpec = new PKCS8EncodedKeySpec(bytes);
Exception unexpectedException = null;
try {
KeyFactory keyFactory = SecurityUtils.getRsaKeyFactory();
PrivateKey privateKey = keyFactory.generatePrivate(keySpec);
return privateKey;
} catch(NoSuchAlgorithmException exception) {
unexpectedException = exception;
} catch(InvalidKeySpecException exception) {
unexpectedException = exception;
}
throw new IOException("Unexpected exception reading PKCS data", unexpectedException);
}
private static List<ServiceEndpoint> getAnalyticsReportingConfigurationFromIS(String infrastructureScope)
throws Exception {
String scope = infrastructureScope;
@ -319,14 +233,13 @@ public class VREAccessesHarvester extends BasicHarvester {
ScopeProvider.instance.set(currScope);
return toReturn;
}
/**
* l
* @throws Exception
*/
private static AnalyticsReportCredentials getAuthorisedApplicationInfoFromIs() throws Exception {
AnalyticsReportCredentials reportCredentials = new AnalyticsReportCredentials();
String context = org.gcube.dataharvest.utils.Utils.getCurrentContext();
try {
List<ServiceEndpoint> list = getAnalyticsReportingConfigurationFromIS(context);
@ -337,26 +250,26 @@ public class VREAccessesHarvester extends BasicHarvester {
logger.warn("There is no Service Endpoint having name " + SERVICE_ENDPOINT_NAME + " and Category "
+ SERVICE_ENDPOINT_CATEGORY + " in this context: " + context);
} else {
for(ServiceEndpoint res : list) {
reportCredentials.setTokenUri(res.profile().runtime().hostedOn());
Group<AccessPoint> apGroup = res.profile().accessPoints();
AccessPoint[] accessPoints = (AccessPoint[]) apGroup.toArray(new AccessPoint[apGroup.size()]);
AccessPoint found = accessPoints[0];
reportCredentials.setClientEmail(found.address());
reportCredentials.setProjectId(found.username());
reportCredentials.setPrivateKeyPem(StringEncrypter.getEncrypter().decrypt(found.password()));
reportCredentials.setClientEmail(found.username());
String decryptedPrivateKey = StringEncrypter.getEncrypter().decrypt(found.password());
reportCredentials.setPrivateKeyPem(decryptedPrivateKey.trim());
for(Property prop : found.properties()) {
if(prop.name().compareTo(AP_VIEWS_PROPERTY) == 0) {
String decryptedValue = StringEncrypter.getEncrypter().decrypt(prop.value());
String[] views = decryptedValue.split(";");
reportCredentials.setViewIds(Arrays.asList(views));
}
if(prop.name().compareTo(AP_CLIENT_PROPERTY) == 0) {
if(prop.name().compareTo(AP_CLIENT_ID) == 0) {
String decryptedValue = StringEncrypter.getEncrypter().decrypt(prop.value());
reportCredentials.setClientId(decryptedValue);
}
if(prop.name().compareTo(AP_PRIVATEKEY_PROPERTY) == 0) {
if(prop.name().compareTo(AP_PRIVATEKEY_ID_PROPERTY) == 0) {
String decryptedValue = StringEncrypter.getEncrypter().decrypt(prop.value());
reportCredentials.setPrivateKeyId(decryptedValue);
}
@ -369,19 +282,18 @@ public class VREAccessesHarvester extends BasicHarvester {
}
return reportCredentials;
}
private static LocalDate asLocalDate(Date date) {
return Instant.ofEpochMilli(date.getTime()).atZone(ZoneId.systemDefault()).toLocalDate();
}
private static DateRange getDateRangeForAnalytics(Date start, Date end) {
private static Builder getDateRangeBuilderForAnalytics(Date start, Date end) {
DateTimeFormatter formatter = DateTimeFormatter.ofPattern("yyyy-MM-dd"); //required by Analytics
String startDate = asLocalDate(start).format(formatter);
String endDate = asLocalDate(end).format(formatter);
DateRange dateRange = new DateRange();// date format `yyyy-MM-dd`
dateRange.setStartDate(startDate);
dateRange.setEndDate(endDate);
return dateRange;
Builder dateRangeBuilder = DateRange.newBuilder().setStartDate(startDate).setEndDate(endDate);
return dateRangeBuilder;
}
}

View File

@ -1,6 +1,5 @@
package org.gcube.dataharvest.harvester;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
@ -9,7 +8,7 @@ import org.gcube.accounting.accounting.summary.access.model.ScopeDescriptor;
import org.gcube.accounting.accounting.summary.access.model.update.AccountingRecord;
import org.gcube.dataharvest.AccountingDashboardHarvesterPlugin;
import org.gcube.dataharvest.datamodel.HarvestedDataKey;
import org.json.JSONObject;
import org.gcube.social_networking.social_networking_client_library.UserClient;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -17,7 +16,7 @@ import org.slf4j.LoggerFactory;
* @author Eric Perrone (ISTI - CNR)
* @author Luca Frosini (ISTI - CNR)
*/
public class VREUsersHarvester extends SocialNetworkingHarvester {
public class VREUsersHarvester extends BasicHarvester {
private static Logger logger = LoggerFactory.getLogger(VREUsersHarvester.class);
@ -31,12 +30,13 @@ public class VREUsersHarvester extends SocialNetworkingHarvester {
@Override
public List<AccountingRecord> getAccountingRecords() throws Exception {
try {
// String context = Utils.getCurrentContext();
String context = org.gcube.dataharvest.utils.Utils.getCurrentContext();
int measure = get();
ArrayList<AccountingRecord> accountingRecords = new ArrayList<AccountingRecord>();
ScopeDescriptor scopeDescriptor = AccountingDashboardHarvesterPlugin.getScopeDescriptor();
ScopeDescriptor scopeDescriptor = AccountingDashboardHarvesterPlugin.getScopeDescriptor(context);
AccountingRecord ar = new AccountingRecord(scopeDescriptor, instant, getDimension(HarvestedDataKey.USERS), (long) measure);
logger.debug("{} : {}", ar.getDimension().getId(), ar.getMeasure());
@ -50,17 +50,8 @@ public class VREUsersHarvester extends SocialNetworkingHarvester {
}
private int get() throws Exception {
JSONObject jsonObject = getJSONObject(PATH);
int userNumber = 0;
Boolean success = (Boolean) jsonObject.get("success");
if(success == false) {
throw new IOException("Erro while getting VRE Users");
}
userNumber = jsonObject.getJSONArray("result").length();
return userNumber;
UserClient userClient = new UserClient();
return userClient.getAllUsernamesContext().size();
}
}

View File

@ -1,196 +0,0 @@
package org.gcube.dataharvest.harvester.sobigdata;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.List;
import java.util.SortedSet;
import org.gcube.accounting.accounting.summary.access.model.ScopeDescriptor;
import org.gcube.accounting.accounting.summary.access.model.update.AccountingRecord;
import org.gcube.common.storagehub.client.dsl.ContainerType;
import org.gcube.common.storagehub.client.dsl.FolderContainer;
import org.gcube.common.storagehub.client.dsl.ItemContainer;
import org.gcube.common.storagehub.client.dsl.ListResolverTyped;
import org.gcube.common.storagehub.client.dsl.StorageHubClient;
import org.gcube.common.storagehub.model.items.FolderItem;
import org.gcube.common.storagehub.model.items.Item;
import org.gcube.common.storagehub.model.items.nodes.Accounting;
import org.gcube.common.storagehub.model.items.nodes.accounting.AccountEntry;
import org.gcube.dataharvest.AccountingDashboardHarvesterPlugin;
import org.gcube.dataharvest.datamodel.HarvestedDataKey;
import org.gcube.dataharvest.utils.DateUtils;
import org.gcube.dataharvest.utils.Utils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* The Class DataMethodDownloadHarvester.
*
* @author Eric Perrone (ISTI - CNR)
* @author Luca Frosini (ISTI - CNR)
* @author Francesco Mangiacrapa (ISTI - CNR)
*/
public class DataMethodDownloadHarvester extends SoBigDataHarvester {
private static Logger logger = LoggerFactory.getLogger(DataMethodDownloadHarvester.class);
private int count = 0;
/**
* Instantiates a new data method download harvester.
*
* @param start the start
* @param end the end
* @param contexts the contexts
* @throws ParseException the parse exception
*/
public DataMethodDownloadHarvester(Date start, Date end, SortedSet<String> contexts) throws Exception {
super(start, end, contexts);
}
/* (non-Javadoc)
* @see org.gcube.dataharvest.harvester.BasicHarvester#getData()
*/
@Override
public List<AccountingRecord> getAccountingRecords() throws Exception {
String defaultContext = Utils.getCurrentContext();
logger.debug("The context is {}", defaultContext);
try {
/*
String vreName = getVRENameToHL(defaultContext);
logger.debug("Getting VRE Name to HL from context/scope returns {} ", vreName);
String user = vreName + "-Manager";
logger.debug("Using user '{}' to getHome from HL", user);
//Getting HL instance and home for VRE MANAGER
HomeManager manager = HomeLibrary.getHomeManagerFactory().getHomeManager();
@SuppressWarnings("deprecation")
Home home = manager.getHome(user);
JCRWorkspace ws = (JCRWorkspace) home.getWorkspace();
String path = "/Workspace/MySpecialFolders/" + vreName;
logger.debug("Getting item by Path {}", path);
JCRWorkspaceItem item = (JCRWorkspaceItem) ws.getItemByPath(path);
*/
StorageHubClient storageHubClient = new StorageHubClient();
FolderContainer vreFolderContainer = storageHubClient.openVREFolder();
FolderItem vreFolderItem = vreFolderContainer.get();
logger.debug("Analyzing {} in the period [{} to {}] starting from root {}", defaultContext,
DateUtils.format(start), DateUtils.format(end), vreFolderItem.getName());
ScopeDescriptor defaultScopeDescriptor = AccountingDashboardHarvesterPlugin.getScopeDescriptor();
AccountingRecord defaultHarvesteData = new AccountingRecord(defaultScopeDescriptor, instant, getDimension(HarvestedDataKey.DATA_METHOD_DOWNLOAD), (long) count);
logger.debug("{} : {}", defaultHarvesteData.getDimension().getId(), defaultHarvesteData.getMeasure());
ArrayList<AccountingRecord> accountingRecords = new ArrayList<AccountingRecord>();
ListResolverTyped listResolverTyped = vreFolderContainer.list();
List<ItemContainer<? extends Item>> containers = listResolverTyped.includeHidden().getContainers();
for(ItemContainer<? extends Item> itemContainer : containers) {
count = 0; //resettings the counter
//HarvestedData harvestedData;
//Getting statistics for folder
if(itemContainer.getType() == ContainerType.FOLDER) {
Item item = itemContainer.get();
logger.debug("Getting statistics for folder {}", item.getName());
getStats(itemContainer, start, end);
String normalizedName = item.getName().replaceAll("[^A-Za-z0-9]", "");
String context = mapWsFolderNameToVRE.get(normalizedName);
//Checking if it is a VRE name to right accounting...
if(context != null && !context.isEmpty()) {
logger.debug("Found context '{}' matching with normalized VRE name {} ", context, normalizedName);
ScopeDescriptor scopeDescriptor = AccountingDashboardHarvesterPlugin.getScopeDescriptor(context);
AccountingRecord ar = new AccountingRecord(scopeDescriptor, instant, getDimension(HarvestedDataKey.DATA_METHOD_DOWNLOAD), (long) count);
logger.debug("{} : {}", ar.getDimension().getId(), ar.getMeasure());
accountingRecords.add(ar);
} else {
logger.debug(
"No scope found matching the folder name {}, accounting its stats in the default context {}",
normalizedName, defaultContext);
//INCREASING THE DEFAULT CONTEXT COUNTER...
defaultHarvesteData.setMeasure(defaultHarvesteData.getMeasure() + count);
logger.trace("Increased default context stats {}", defaultHarvesteData);
}
}
}
//ADDING DEFAULT ACCOUNTING
accountingRecords.add(defaultHarvesteData);
logger.debug("In the period [from {} to {} ] returning workspace accouting data {}", DateUtils.format(start),
DateUtils.format(end), accountingRecords);
return accountingRecords;
} catch(Exception e) {
throw e;
}
}
/**
* Gets the stats.
*
* @param baseItem the base item
* @param start the start
* @param end the end
* @return the stats
* @throws InternalErrorException the internal error exception
*/
private void getStats(ItemContainer<? extends Item> itemContainer, Date start, Date end) throws Exception {
if(itemContainer.getType() == ContainerType.FOLDER) {
ListResolverTyped listResolverTyped = ((FolderContainer)itemContainer).list();
List<ItemContainer<? extends Item>> containers = listResolverTyped.includeHidden().getContainers();
for(ItemContainer<? extends Item> itemCont : containers) {
getStats(itemCont , start, end);
}
} else {
try {
Accounting accounting = itemContainer.get().getAccounting();
for(AccountEntry entry : accounting.getEntries()) {
switch(entry.getType()) {
case CREATE:
case UPDATE:
case READ:
Calendar calendar = entry.getDate();
if(calendar.after(DateUtils.dateToCalendar(start))
&& calendar.before(DateUtils.dateToCalendar(end))) {
count++;
}
break;
default:
break;
}
}
} catch(Exception e) {
throw e;
}
}
}
}

View File

@ -1,5 +1,11 @@
package org.gcube.dataharvest.harvester.sobigdata;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.HttpURLConnection;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLEncoder;
import java.util.ArrayList;
import java.util.Date;
@ -210,10 +216,27 @@ public class ResourceCatalogueHarvester extends SoBigDataHarvester {
query += "q=" + URLEncoder.encode(q, UTF_8_CHARASET) + "&wt=json&indent=true&rows=" + ROWS;
query += flValue != null && !flValue.isEmpty() ? "&fl=" + URLEncoder.encode(flValue, UTF_8_CHARASET) : "";
logger.debug("\nPerforming query {}", query);
String jsonResult = Utils.getJson(query);
String jsonResult = requestJson(query);
logger.trace("Response is {}", jsonResult);
return jsonResult;
}
public String requestJson(String url) throws MalformedURLException, IOException {
URL address = new URL(url);
HttpURLConnection connection = (HttpURLConnection) address.openConnection();
BufferedReader reader = new BufferedReader(new InputStreamReader(connection.getInputStream()));
String json = "";
String line = "";
while(line != null) {
line = reader.readLine();
if(line != null) {
json += line.trim();
}
}
return json;
}
}

View File

@ -77,7 +77,7 @@ public abstract class SoBigDataHarvester extends BasicHarvester {
* @throws ObjectNotFound
*/
protected void initMappingMaps() throws ObjectNotFound, Exception {
Properties properties = AccountingDashboardHarvesterPlugin.getProperties().get();
Properties properties = AccountingDashboardHarvesterPlugin.getConfigParameters();
Set<String> keys = properties.stringPropertyNames();
mapSystemTypeToDBEntry = new HashMap<String,String>();

View File

@ -109,7 +109,7 @@ public class TagMeMethodInvocationHarvester extends BasicHarvester {
}
ScopeDescriptor scopeDescriptor = AccountingDashboardHarvesterPlugin.getScopeDescriptor();
ScopeDescriptor scopeDescriptor = AccountingDashboardHarvesterPlugin.getScopeDescriptor(context);
Dimension dimension = getDimension(HarvestedDataKey.METHOD_INVOCATIONS);
AccountingRecord ar = new AccountingRecord(scopeDescriptor, instant, dimension, numberOfInvocation);

View File

@ -1,8 +1,5 @@
package org.gcube.dataharvest.utils;
import static org.gcube.common.authorization.client.Constants.authorizationService;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.Map;
@ -10,8 +7,13 @@ import java.util.Properties;
import java.util.SortedSet;
import java.util.TreeSet;
import org.gcube.common.authorization.library.provider.SecurityTokenProvider;
import org.gcube.common.authorization.library.provider.UserInfo;
import javax.ws.rs.InternalServerErrorException;
import org.gcube.common.authorization.utils.manager.SecretManagerProvider;
import org.gcube.common.authorization.utils.secret.JWTSecret;
import org.gcube.common.authorization.utils.secret.Secret;
import org.gcube.common.keycloak.KeycloakClientFactory;
import org.gcube.common.keycloak.model.TokenResponse;
import org.gcube.common.scope.impl.ScopeBean;
import org.gcube.dataharvest.AccountingDashboardHarvesterPlugin;
import org.gcube.resourcemanagement.support.server.managers.context.ContextManager;
@ -25,88 +27,98 @@ public class ContextAuthorization {
private static Logger logger = LoggerFactory.getLogger(ContextAuthorization.class);
public static final String USERNAME = "USERNAME";
public static final String DEFAULT_USERNAME = "luca.frosini";
public static final String CLIENT_ID = "accounting-dashboard-harvester-se-plugin";
public static final String SERVICE_NAME = "SERVICE_NAME";
public static final String DEFAULT_SERVICE_NAME = "accounting-harvester";
protected String clientSecret;
/**
* Contains Context full name as key and Token as Value
*/
protected Map<String,String> contextToToken;
protected Map<String,Secret> contextToToken;
/**
* Contains Token as key and Context full name as Value
*/
protected Map<String,String> tokenToContext;
protected Map<Secret,String> tokenToContext;
protected Properties properties;
/**
* Contains Properties used to generate tokens
*/
public ContextAuthorization() throws Exception {
public ContextAuthorization(Properties properties) throws Exception {
this.properties = properties;
this.contextToToken = new HashMap<>();
this.tokenToContext = new HashMap<>();
retrieveContextsAndTokens();
}
public String generateTokenForContext(String context, Properties properties) throws Exception {
if(properties==null) {
properties = AccountingDashboardHarvesterPlugin.getProperties().get();
/**
* Contains Properties used to generate tokens
*/
public ContextAuthorization() throws Exception {
this.properties = AccountingDashboardHarvesterPlugin.getConfigParameters();
this.contextToToken = new HashMap<>();
this.tokenToContext = new HashMap<>();
retrieveContextsAndTokens();
}
private String getClientSecret(String context) {
try {
if(clientSecret==null) {
int index = context.indexOf('/', 1);
String root = context.substring(0, index == -1 ? context.length() : index);
clientSecret = properties.getProperty(root);
}
return clientSecret;
} catch(Exception e) {
throw new InternalServerErrorException(
"Unable to retrieve Application Token for context " + SecretManagerProvider.instance.get().getContext(), e);
}
logger.info("Going to generate Token for Context {}", context);
UserInfo userInfo = new UserInfo(properties.getProperty(USERNAME, DEFAULT_USERNAME),
new ArrayList<>());
String userToken = authorizationService().generateUserToken(userInfo, context);
SecurityTokenProvider.instance.set(userToken);
String generatedToken = authorizationService()
.generateExternalServiceToken(properties.getProperty(SERVICE_NAME, DEFAULT_SERVICE_NAME));
logger.trace("Token for Context {} is {}", context, generatedToken);
return generatedToken;
}
private TokenResponse getJWTAccessToken(String context) throws Exception {
TokenResponse tr = KeycloakClientFactory.newInstance().queryUMAToken(context, CLIENT_ID, getClientSecret(context), context, null);
return tr;
}
public Secret getCatalogueSecretForContext(String context) throws Exception {
TokenResponse tr = getJWTAccessToken(context);
Secret secret = new JWTSecret(tr.getAccessToken());
return secret;
}
protected void retrieveContextsAndTokens() throws Exception {
String initialToken = SecurityTokenProvider.instance.get();
try {
Properties properties = AccountingDashboardHarvesterPlugin.getProperties().get();
LinkedHashMap<String,ScopeBean> map = ContextManager.readContexts();
for(String scope : map.keySet()) {
try {
String context = map.get(scope).toString();
String generatedToken = generateTokenForContext(context, properties);
Secret secret = getCatalogueSecretForContext(context);
contextToToken.put(context, generatedToken);
tokenToContext.put(generatedToken, context);
contextToToken.put(context, secret);
tokenToContext.put(secret, context);
} catch(Exception e) {
logger.error("Error while elaborating {}", scope, e);
throw e;
} finally {
SecurityTokenProvider.instance.reset();
}
// throw e;
}
}
} catch(Exception ex) {
throw ex;
} finally {
SecurityTokenProvider.instance.set(initialToken);
}
}
}
public String getTokenForContext(String contextFullName) {
return contextToToken.get(contextFullName);
public Secret getSecretForContext(String context) {
return contextToToken.get(context);
}
public String getContextFromToken(String token) {
return tokenToContext.get(token);
public String getContextFromSecret(Secret secret) {
return tokenToContext.get(secret);
}
public SortedSet<String> getContexts() {

View File

@ -91,9 +91,7 @@ public class DateUtils {
aggregationStartCalendar.set(Calendar.MINUTE, 0);
aggregationStartCalendar.set(Calendar.SECOND, 0);
aggregationStartCalendar.set(Calendar.MILLISECOND, 0);
logger.debug("{}", DEFAULT_DATE_FORMAT.format(aggregationStartCalendar.getTime()));
// logger.trace("{}", DEFAULT_DATE_FORMAT.format(aggregationStartCalendar.getTime()));
return aggregationStartCalendar;
}

View File

@ -1,17 +1,9 @@
package org.gcube.dataharvest.utils;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.HttpURLConnection;
import java.net.MalformedURLException;
import java.net.URL;
import org.gcube.common.authorization.client.Constants;
import org.gcube.common.authorization.client.exceptions.ObjectNotFound;
import org.gcube.common.authorization.library.AuthorizationEntry;
import org.gcube.common.authorization.library.provider.SecurityTokenProvider;
import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.common.authorization.utils.manager.SecretManager;
import org.gcube.common.authorization.utils.manager.SecretManagerProvider;
import org.gcube.common.authorization.utils.secret.Secret;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -23,36 +15,16 @@ public class Utils {
private static Logger logger = LoggerFactory.getLogger(Utils.class);
public static String getJson(String url) throws MalformedURLException, IOException {
URL address = new URL(url);
HttpURLConnection connection = (HttpURLConnection) address.openConnection();
BufferedReader reader = new BufferedReader(new InputStreamReader(connection.getInputStream()));
String json = "";
String line = "";
while(line != null) {
line = reader.readLine();
if(line != null) {
json += line.trim();
}
}
return json;
}
public static String getCurrentContext() throws ObjectNotFound, Exception {
return getCurrentContext(SecurityTokenProvider.instance.get());
return SecretManagerProvider.instance.get().getContext();
}
public static String getCurrentContext(String token) throws ObjectNotFound, Exception {
AuthorizationEntry authorizationEntry = Constants.authorizationService().get(token);
String context = authorizationEntry.getContext();
logger.info("Context of token {} is {}", token, context);
return context;
}
public static void setContext(String token) throws ObjectNotFound, Exception {
SecurityTokenProvider.instance.set(token);
ScopeProvider.instance.set(getCurrentContext(token));
public static void setContext(Secret secret) throws Exception {
SecretManagerProvider.instance.reset();
SecretManager secretManager = new SecretManager();
SecretManagerProvider.instance.set(secretManager);
secretManager.addSecret(secret);
secretManager.set();
}
}

View File

@ -1,2 +1 @@
USERNAME=luca.frosini
SERVICE_NAME=accounting-harvester
/d4science.research-infrastructures.eu=XXXXXXXXXX

View File

@ -1,135 +0,0 @@
package org.gcube.dataharvest;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.SortedSet;
import org.gcube.accounting.accounting.summary.access.AccountingDao;
import org.gcube.accounting.accounting.summary.access.model.ScopeDescriptor;
import org.gcube.accounting.accounting.summary.access.model.internal.Dimension;
import org.gcube.accounting.accounting.summary.access.model.update.AccountingRecord;
import org.gcube.common.authorization.client.exceptions.ObjectNotFound;
import org.gcube.common.scope.impl.ScopeBean;
import org.gcube.dataharvest.harvester.JupyterAccessesHarvester;
import org.gcube.dataharvest.utils.AggregationType;
import org.gcube.dataharvest.utils.ContextAuthorization;
import org.gcube.dataharvest.utils.ContextTest;
import org.gcube.dataharvest.utils.DateUtils;
import org.junit.Ignore;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
*
* @author Giancarlo Panichi (ISTI CNR)
*
*/
public class AccountingDataHarvesterJupyterTest extends ContextTest {
private static Logger logger = LoggerFactory.getLogger(AccountingDataHarvesterJupyterTest.class);
public static final String ROOT = "/d4science.research-infrastructures.eu";
// private static final String SCOPE = "/d4science.research-infrastructures.eu/D4OS/Blue-CloudLab";
protected AccountingDao getAccountingDao() throws ObjectNotFound, Exception {
AccountingDao dao = AccountingDao.get();
Set<ScopeDescriptor> scopeDescriptorSet = dao.getContexts();
Map<String, ScopeDescriptor> scopeDescriptorMap = new HashMap<>();
for (ScopeDescriptor scopeDescriptor : scopeDescriptorSet) {
scopeDescriptorMap.put(scopeDescriptor.getId(), scopeDescriptor);
}
AccountingDashboardHarvesterPlugin.scopeDescriptors.set(scopeDescriptorMap);
Set<Dimension> dimensionSet = dao.getDimensions();
Map<String, Dimension> dimensionMap = new HashMap<>();
for (Dimension dimension : dimensionSet) {
dimensionMap.put(dimension.getId(), dimension);
}
AccountingDashboardHarvesterPlugin.dimensions.set(dimensionMap);
return dao;
}
@Ignore
@Test
public void testJupyterAccessesHarvester() throws Exception {
try {
ContextTest.setContextByName(ROOT);
AccountingDao dao = getAccountingDao();
List<Date> starts = new ArrayList<>();
starts.add(DateUtils.getStartCalendar(2021, Calendar.JANUARY, 1).getTime());
starts.add(DateUtils.getStartCalendar(2021, Calendar.FEBRUARY, 1).getTime());
starts.add(DateUtils.getStartCalendar(2021, Calendar.MARCH, 1).getTime());
starts.add(DateUtils.getStartCalendar(2021, Calendar.APRIL, 1).getTime());
starts.add(DateUtils.getStartCalendar(2021, Calendar.MAY, 1).getTime());
AggregationType measureType = AggregationType.MONTHLY;
ContextAuthorization contextAuthorization = new ContextAuthorization();
SortedSet<String> contexts = contextAuthorization.getContexts();
/*
SortedSet<String> contexts = new TreeSet<>();
contexts.add("/d4science.research-infrastructures.eu/D4OS/Blue-CloudLab");
contexts.add("/d4science.research-infrastructures.eu/D4OS/Zoo-Phytoplankton_EOV");
contexts.add("/d4science.research-infrastructures.eu/D4OS/MarineEnvironmentalIndicators");
*/
List<AccountingRecord> accountingRecords = new ArrayList<>();
Set<ScopeDescriptor> scopeDescriptorSet = dao.getContexts();
Map<String, ScopeDescriptor> scopeDescriptorMap = new HashMap<>();
for (ScopeDescriptor scopeDescriptor : scopeDescriptorSet) {
scopeDescriptorMap.put(scopeDescriptor.getId(), scopeDescriptor);
}
AccountingDashboardHarvesterPlugin.scopeDescriptors.set(scopeDescriptorMap);
for (Date start : starts) {
Date end = DateUtils.getEndDateFromStartDate(measureType, start, 1);
ContextTest.setContextByName(ROOT);
JupyterAccessesHarvester jupyterAccessesHarvester = new JupyterAccessesHarvester(start, end);
for(String context : contexts) {
ContextTest.setContext(contextAuthorization.getTokenForContext(context));
ScopeBean scopeBean = new ScopeBean(context);
ScopeDescriptor actualScopeDescriptor = scopeDescriptorMap.get(context);
if (actualScopeDescriptor == null) {
actualScopeDescriptor = new ScopeDescriptor(scopeBean.name(), context);
}
AccountingDashboardHarvesterPlugin.scopeDescriptor.set(actualScopeDescriptor);
List<AccountingRecord> harvested = jupyterAccessesHarvester.getAccountingRecords();
accountingRecords.addAll(harvested);
}
}
// logger.debug("{}", accountingRecords);
logger.debug("Going to insert {}", accountingRecords);
ContextTest.setContextByName(ROOT);
dao.insertRecords(accountingRecords.toArray(new AccountingRecord[1]));
} catch (Throwable e) {
logger.error(e.getLocalizedMessage(), e);
throw e;
}
}
}

View File

@ -1,139 +0,0 @@
package org.gcube.dataharvest;
import java.time.LocalDate;
import java.time.ZoneId;
import java.time.temporal.ChronoUnit;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.SortedSet;
import java.util.stream.Stream;
import org.gcube.accounting.accounting.summary.access.AccountingDao;
import org.gcube.accounting.accounting.summary.access.model.ScopeDescriptor;
import org.gcube.accounting.accounting.summary.access.model.internal.Dimension;
import org.gcube.accounting.accounting.summary.access.model.update.AccountingRecord;
import org.gcube.common.authorization.client.exceptions.ObjectNotFound;
import org.gcube.common.scope.impl.ScopeBean;
import org.gcube.dataharvest.harvester.RStudioAccessesHarvester;
import org.gcube.dataharvest.utils.AggregationType;
import org.gcube.dataharvest.utils.ContextAuthorization;
import org.gcube.dataharvest.utils.ContextTest;
import org.gcube.dataharvest.utils.DateUtils;
import org.junit.Ignore;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
*
* @author Giancarlo Panichi (ISTI CNR)
*
*/
public class AccountingDataHarvesterRStudioTest extends ContextTest {
private static Logger logger = LoggerFactory.getLogger(AccountingDataHarvesterRStudioTest.class);
public static final String ROOT = "/d4science.research-infrastructures.eu";
// private static final String SCOPE = "/d4science.research-infrastructures.eu/D4OS/Blue-CloudLab";
protected AccountingDao getAccountingDao() throws ObjectNotFound, Exception {
AccountingDao dao = AccountingDao.get();
Set<ScopeDescriptor> scopeDescriptorSet = dao.getContexts();
Map<String, ScopeDescriptor> scopeDescriptorMap = new HashMap<>();
for (ScopeDescriptor scopeDescriptor : scopeDescriptorSet) {
scopeDescriptorMap.put(scopeDescriptor.getId(), scopeDescriptor);
}
AccountingDashboardHarvesterPlugin.scopeDescriptors.set(scopeDescriptorMap);
Set<Dimension> dimensionSet = dao.getDimensions();
Map<String, Dimension> dimensionMap = new HashMap<>();
for (Dimension dimension : dimensionSet) {
dimensionMap.put(dimension.getId(), dimension);
}
AccountingDashboardHarvesterPlugin.dimensions.set(dimensionMap);
return dao;
}
@Ignore
@Test
public void testJupyterAccessesHarvester() throws Exception {
try {
ContextTest.setContextByName(ROOT);
AccountingDao dao = getAccountingDao();
List<Date> starts = new ArrayList<>();
LocalDate sdate = LocalDate.parse("2016-01-01"), edate = LocalDate.parse("2021-06-01");
Stream.iterate(sdate, date -> date.plusMonths(1)).limit(ChronoUnit.MONTHS.between(sdate, edate) + 1)
.forEach(dateToConvert -> starts.add(java.util.Date
.from(dateToConvert.atStartOfDay().atZone(ZoneId.systemDefault()).toInstant())));
AggregationType measureType = AggregationType.MONTHLY;
ContextAuthorization contextAuthorization = new ContextAuthorization();
SortedSet<String> contexts = contextAuthorization.getContexts();
/*
SortedSet<String> contexts = new TreeSet<>();
contexts.add("/d4science.research-infrastructures.eu/D4OS/Blue-CloudLab");
contexts.add("/d4science.research-infrastructures.eu/D4OS/Zoo-Phytoplankton_EOV");
contexts.add("/d4science.research-infrastructures.eu/D4OS/MarineEnvironmentalIndicators");
*/
List<AccountingRecord> accountingRecords = new ArrayList<>();
Set<ScopeDescriptor> scopeDescriptorSet = dao.getContexts();
Map<String, ScopeDescriptor> scopeDescriptorMap = new HashMap<>();
for (ScopeDescriptor scopeDescriptor : scopeDescriptorSet) {
scopeDescriptorMap.put(scopeDescriptor.getId(), scopeDescriptor);
}
AccountingDashboardHarvesterPlugin.scopeDescriptors.set(scopeDescriptorMap);
for (Date start : starts) {
Date end = DateUtils.getEndDateFromStartDate(measureType, start, 1);
ContextTest.setContextByName(ROOT);
RStudioAccessesHarvester rstudioAccessesHarvester = new RStudioAccessesHarvester(start, end);
for(String context : contexts) {
ContextTest.setContext(contextAuthorization.getTokenForContext(context));
ScopeBean scopeBean = new ScopeBean(context);
ScopeDescriptor actualScopeDescriptor = scopeDescriptorMap.get(context);
if (actualScopeDescriptor == null) {
actualScopeDescriptor = new ScopeDescriptor(scopeBean.name(), context);
}
AccountingDashboardHarvesterPlugin.scopeDescriptor.set(actualScopeDescriptor);
List<AccountingRecord> harvested = rstudioAccessesHarvester.getAccountingRecords();
accountingRecords.addAll(harvested);
}
}
// logger.debug("{}", accountingRecords);
logger.debug("Going to insert {}", accountingRecords);
ContextTest.setContextByName(ROOT);
dao.insertRecords(accountingRecords.toArray(new AccountingRecord[1]));
} catch (Throwable e) {
logger.error(e.getLocalizedMessage(), e);
throw e;
}
}
}

View File

@ -0,0 +1,173 @@
/**
*
*/
package org.gcube.dataharvest;
import java.io.IOException;
import java.io.InputStream;
import java.util.Properties;
import org.gcube.common.authorization.utils.manager.SecretManager;
import org.gcube.common.authorization.utils.manager.SecretManagerProvider;
import org.gcube.common.authorization.utils.secret.JWTSecret;
import org.gcube.common.authorization.utils.secret.Secret;
import org.gcube.common.authorization.utils.secret.SecretUtility;
import org.gcube.common.keycloak.KeycloakClientFactory;
import org.gcube.common.keycloak.KeycloakClientHelper;
import org.gcube.common.keycloak.model.TokenResponse;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* @author Luca Frosini (ISTI - CNR)
*/
public class ContextTest {
private static final Logger logger = LoggerFactory.getLogger(ContextTest.class);
protected static final String CONFIG_INI_FILENAME = "config.ini";
public static final String DEFAULT_TEST_SCOPE;
public static final String GCUBE;
public static final String DEVNEXT;
public static final String NEXTNEXT;
public static final String DEVSEC;
public static final String DEVVRE;
public static final String ROOT_PROD;
protected static final Properties properties;
public static final String TYPE_PROPERTY_KEY = "type";
public static final String USERNAME_PROPERTY_KEY = "username";
public static final String PASSWORD_PROPERTY_KEY = "password";
public static final String CLIENT_ID_PROPERTY_KEY = "clientId";
static {
GCUBE = "/gcube";
DEVNEXT = GCUBE + "/devNext";
NEXTNEXT = DEVNEXT + "/NextNext";
DEVSEC = GCUBE + "/devsec";
DEVVRE = DEVSEC + "/devVRE";
ROOT_PROD = "/d4science.research-infrastructures.eu";
DEFAULT_TEST_SCOPE = GCUBE;
properties = new Properties();
InputStream input = ContextTest.class.getClassLoader().getResourceAsStream(CONFIG_INI_FILENAME);
try {
// load the properties file
properties.load(input);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
private enum Type{
USER, CLIENT_ID
};
public static void set(Secret secret) throws Exception {
SecretManagerProvider.instance.reset();
SecretManager secretManager = new SecretManager();
secretManager.addSecret(secret);
SecretManagerProvider.instance.set(secretManager);
SecretManagerProvider.instance.get().set();
}
public static void setContextByName(String fullContextName) throws Exception {
logger.debug("Going to set credentials for context {}", fullContextName);
Secret secret = getSecretByContextName(fullContextName);
set(secret);
}
private static TokenResponse getJWTAccessToken(String context) throws Exception {
Type type = Type.valueOf(properties.get(TYPE_PROPERTY_KEY).toString());
TokenResponse tr = null;
int index = context.indexOf('/', 1);
String root = context.substring(0, index == -1 ? context.length() : index);
switch (type) {
case CLIENT_ID:
String clientId = properties.getProperty(CLIENT_ID_PROPERTY_KEY);
String clientSecret = properties.getProperty(root);
tr = KeycloakClientFactory.newInstance().queryUMAToken(context, clientId, clientSecret, context, null);
break;
case USER:
default:
String username = properties.getProperty(USERNAME_PROPERTY_KEY);
String password = properties.getProperty(PASSWORD_PROPERTY_KEY);
switch (root) {
case "/gcube":
default:
clientId = "next.d4science.org";
break;
case "/pred4s":
clientId = "pre.d4science.org";
break;
case "/d4science.research-infrastructures.eu":
clientId = "services.d4science.org";
break;
}
clientSecret = null;
tr = KeycloakClientHelper.getTokenForUser(context, username, password);
break;
}
return tr;
}
public static Secret getSecretByContextName(String context) throws Exception {
TokenResponse tr = getJWTAccessToken(context);
Secret secret = new JWTSecret(tr.getAccessToken());
return secret;
}
public static void setContext(String token) throws Exception {
Secret secret = getSecret(token);
set(secret);
}
private static Secret getSecret(String token) throws Exception {
Secret secret = SecretUtility.getSecretByTokenString(token);
return secret;
}
public static String getUser() {
String user = "UNKNOWN";
try {
user = SecretManagerProvider.instance.get().getUser().getUsername();
} catch(Exception e) {
logger.error("Unable to retrieve user. {} will be used", user);
}
return user;
}
@BeforeClass
public static void beforeClass() throws Exception {
setContextByName(ROOT_PROD);
}
@AfterClass
public static void afterClass() throws Exception {
SecretManagerProvider.instance.reset();
}
}

View File

@ -0,0 +1,88 @@
package org.gcube.dataharvest.harvester;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.List;
import java.util.SortedSet;
import org.gcube.accounting.accounting.summary.access.AccountingDao;
import org.gcube.accounting.accounting.summary.access.model.update.AccountingRecord;
import org.gcube.dataharvest.ContextTest;
import org.gcube.dataharvest.plugin.AccountingDataHarvesterPluginTest;
import org.gcube.dataharvest.utils.AggregationType;
import org.gcube.dataharvest.utils.ContextAuthorization;
import org.gcube.dataharvest.utils.DateUtils;
import org.junit.Ignore;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
*
* @author Giancarlo Panichi (ISTI CNR)
*
*/
public class AccountingDataHarvesterJupyterTest extends AccountingDataHarvesterPluginTest {
private static Logger logger = LoggerFactory.getLogger(AccountingDataHarvesterJupyterTest.class);
@Ignore
@Test
public void testJupyterAccessesHarvester() throws Exception {
try {
ContextTest.setContextByName(ROOT_PROD);
AccountingDao dao = getAccountingDao();
List<Date> starts = new ArrayList<>();
starts.add(DateUtils.getStartCalendar(2021, Calendar.JANUARY, 1).getTime());
starts.add(DateUtils.getStartCalendar(2021, Calendar.FEBRUARY, 1).getTime());
starts.add(DateUtils.getStartCalendar(2021, Calendar.MARCH, 1).getTime());
starts.add(DateUtils.getStartCalendar(2021, Calendar.APRIL, 1).getTime());
starts.add(DateUtils.getStartCalendar(2021, Calendar.MAY, 1).getTime());
AggregationType measureType = AggregationType.MONTHLY;
ContextAuthorization contextAuthorization = new ContextAuthorization();
SortedSet<String> contexts = contextAuthorization.getContexts();
/*
SortedSet<String> contexts = new TreeSet<>();
contexts.add("/d4science.research-infrastructures.eu/D4OS/Blue-CloudLab");
contexts.add("/d4science.research-infrastructures.eu/D4OS/Zoo-Phytoplankton_EOV");
contexts.add("/d4science.research-infrastructures.eu/D4OS/MarineEnvironmentalIndicators");
*/
List<AccountingRecord> accountingRecords = new ArrayList<>();
for (Date start : starts) {
Date end = DateUtils.getEndDateFromStartDate(measureType, start, 1);
ContextTest.setContextByName(ROOT_PROD);
JupyterAccessesHarvester jupyterAccessesHarvester = new JupyterAccessesHarvester(start, end);
for(String context : contexts) {
ContextTest.setContextByName(context);
List<AccountingRecord> harvested = jupyterAccessesHarvester.getAccountingRecords();
accountingRecords.addAll(harvested);
}
}
// logger.debug("{}", accountingRecords);
logger.debug("Going to insert {}", accountingRecords);
ContextTest.setContextByName(ROOT_PROD);
// dao.insertRecords(accountingRecords.toArray(new AccountingRecord[1]));
} catch (Throwable e) {
logger.error(e.getLocalizedMessage(), e);
throw e;
}
}
}

View File

@ -0,0 +1,87 @@
package org.gcube.dataharvest.harvester;
import java.time.LocalDate;
import java.time.ZoneId;
import java.time.temporal.ChronoUnit;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.SortedSet;
import java.util.stream.Stream;
import org.gcube.accounting.accounting.summary.access.AccountingDao;
import org.gcube.accounting.accounting.summary.access.model.update.AccountingRecord;
import org.gcube.dataharvest.ContextTest;
import org.gcube.dataharvest.plugin.AccountingDataHarvesterPluginTest;
import org.gcube.dataharvest.utils.AggregationType;
import org.gcube.dataharvest.utils.ContextAuthorization;
import org.gcube.dataharvest.utils.DateUtils;
import org.junit.Ignore;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
*
* @author Giancarlo Panichi (ISTI CNR)
*
*/
public class AccountingDataHarvesterRStudioTest extends AccountingDataHarvesterPluginTest {
private static Logger logger = LoggerFactory.getLogger(AccountingDataHarvesterRStudioTest.class);
@Ignore
@Test
public void testJupyterAccessesHarvester() throws Exception {
try {
ContextTest.setContextByName(ROOT_PROD);
AccountingDao dao = getAccountingDao();
List<Date> starts = new ArrayList<>();
LocalDate sdate = LocalDate.parse("2016-01-01"), edate = LocalDate.parse("2021-06-01");
Stream.iterate(sdate, date -> date.plusMonths(1)).limit(ChronoUnit.MONTHS.between(sdate, edate) + 1)
.forEach(dateToConvert -> starts.add(java.util.Date
.from(dateToConvert.atStartOfDay().atZone(ZoneId.systemDefault()).toInstant())));
AggregationType measureType = AggregationType.MONTHLY;
ContextAuthorization contextAuthorization = new ContextAuthorization();
SortedSet<String> contexts = contextAuthorization.getContexts();
List<AccountingRecord> accountingRecords = new ArrayList<>();
for (Date start : starts) {
Date end = DateUtils.getEndDateFromStartDate(measureType, start, 1);
ContextTest.setContextByName(ROOT_PROD);
RStudioAccessesHarvester rstudioAccessesHarvester = new RStudioAccessesHarvester(start, end);
for(String context : contexts) {
ContextTest.setContextByName(context);
List<AccountingRecord> harvested = rstudioAccessesHarvester.getAccountingRecords();
accountingRecords.addAll(harvested);
}
}
// logger.debug("{}", accountingRecords);
logger.debug("Going to insert {}", accountingRecords);
ContextTest.setContextByName(ROOT_PROD);
dao.insertRecords(accountingRecords.toArray(new AccountingRecord[1]));
} catch (Throwable e) {
logger.error(e.getLocalizedMessage(), e);
throw e;
}
}
}

View File

@ -2,7 +2,7 @@ package org.gcube.dataharvest.harvester.sobigdata;
import java.util.List;
import org.gcube.dataharvest.utils.ContextTest;
import org.gcube.dataharvest.ContextTest;
import org.junit.Ignore;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

View File

@ -1,14 +1,12 @@
package org.gcube.dataharvest;
package org.gcube.dataharvest.plugin;
import java.time.Instant;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.SortedSet;
import java.util.TreeSet;
@ -19,8 +17,11 @@ import org.gcube.accounting.accounting.summary.access.model.ScopeDescriptor;
import org.gcube.accounting.accounting.summary.access.model.internal.Dimension;
import org.gcube.accounting.accounting.summary.access.model.update.AccountingRecord;
import org.gcube.common.authorization.client.exceptions.ObjectNotFound;
import org.gcube.common.authorization.utils.secret.Secret;
import org.gcube.common.scope.impl.ScopeBean;
import org.gcube.common.scope.impl.ScopeBean.Type;
import org.gcube.dataharvest.AccountingDashboardHarvesterPlugin;
import org.gcube.dataharvest.ContextTest;
import org.gcube.dataharvest.datamodel.HarvestedDataKey;
import org.gcube.dataharvest.harvester.CatalogueAccessesHarvester;
import org.gcube.dataharvest.harvester.CoreServicesAccessesHarvester;
@ -28,14 +29,11 @@ import org.gcube.dataharvest.harvester.MethodInvocationHarvester;
import org.gcube.dataharvest.harvester.SocialInteractionsHarvester;
import org.gcube.dataharvest.harvester.VREAccessesHarvester;
import org.gcube.dataharvest.harvester.VREUsersHarvester;
import org.gcube.dataharvest.harvester.sobigdata.DataMethodDownloadHarvester;
import org.gcube.dataharvest.harvester.sobigdata.ResourceCatalogueHarvester;
import org.gcube.dataharvest.harvester.sobigdata.TagMeMethodInvocationHarvester;
import org.gcube.dataharvest.utils.AggregationType;
import org.gcube.dataharvest.utils.ContextAuthorization;
import org.gcube.dataharvest.utils.ContextTest;
import org.gcube.dataharvest.utils.DateUtils;
import org.gcube.resourcemanagement.support.server.managers.context.ContextManager;
import org.gcube.vremanagement.executor.api.types.LaunchParameter;
import org.gcube.vremanagement.executor.api.types.Scheduling;
import org.gcube.vremanagement.executor.client.SmartExecutorClient;
@ -51,32 +49,38 @@ public class AccountingDataHarvesterPluginTest extends ContextTest {
private static Logger logger = LoggerFactory.getLogger(AccountingDataHarvesterPluginTest.class);
public static final String ROOT = "/d4science.research-infrastructures.eu";
public static final String SO_BIG_VO = "/d4science.research-infrastructures.eu/SoBigData";
public static final String TAGME_VRE = "/d4science.research-infrastructures.eu/SoBigData/TagMe";
public static final String STOCK_ASSESMENT_VRE = "/d4science.research-infrastructures.eu/gCubeApps/StockAssessment";
public static final String BLUE_CLOUD_LAB = "/d4science.research-infrastructures.eu/D4OS/Blue-CloudLab";
protected AccountingDao getAccountingDao() throws ObjectNotFound, Exception {
AccountingDao dao = AccountingDao.get();
public static SortedSet<String> getContexts() throws Exception {
SortedSet<String> contexts = new TreeSet<>();
LinkedHashMap<String, ScopeBean> map = ContextManager.readContexts();
for (String scope : map.keySet()) {
try {
String context = map.get(scope).toString();
contexts.add(context);
} catch (Exception e) {
throw e;
}
Set<ScopeDescriptor> scopeDescriptorSet = dao.getContexts();
Map<String, ScopeDescriptor> scopeDescriptorMap = new HashMap<>();
for (ScopeDescriptor scopeDescriptor : scopeDescriptorSet) {
scopeDescriptorMap.put(scopeDescriptor.getId(), scopeDescriptor);
}
return contexts;
}
AccountingDashboardHarvesterPlugin.scopeDescriptors.set(scopeDescriptorMap);
Set<Dimension> dimensionSet = dao.getDimensions();
Map<String, Dimension> dimensionMap = new HashMap<>();
for (Dimension dimension : dimensionSet) {
dimensionMap.put(dimension.getId(), dimension);
}
return dao;
}
@Ignore
@Test
public void getDimensions() {
try {
ContextTest.setContextByName(ROOT);
ContextTest.setContextByName(ROOT_PROD);
AccountingDao dao = AccountingDao.get();
@ -92,12 +96,12 @@ public class AccountingDataHarvesterPluginTest extends ContextTest {
}
}
//@Ignore
@Test
@Ignore
// @Test
public void launch() {
try {
ContextTest.setContextByName(ROOT);
ContextTest.setContextByName(ROOT_PROD);
AccountingDashboardHarvesterPlugin accountingDataHarvesterPlugin = new AccountingDashboardHarvesterPlugin();
@ -111,7 +115,7 @@ public class AccountingDataHarvesterPluginTest extends ContextTest {
inputs.put(AccountingDashboardHarvesterPlugin.DRY_RUN_INPUT_PARAMETER, true);
inputs.put(AccountingDashboardHarvesterPlugin.PARTIAL_HARVESTING, true);
Calendar from = DateUtils.getStartCalendar(2021, Calendar.JANUARY, 1);
Calendar from = DateUtils.getStartCalendar(2022, Calendar.SEPTEMBER, 1);
String fromDate = DateUtils.LAUNCH_DATE_FORMAT.format(from.getTime());
logger.trace("{} is {}", AccountingDashboardHarvesterPlugin.START_DATE_INPUT_PARAMETER, fromDate);
inputs.put(AccountingDashboardHarvesterPlugin.START_DATE_INPUT_PARAMETER, fromDate);
@ -126,11 +130,11 @@ public class AccountingDataHarvesterPluginTest extends ContextTest {
}
@Ignore
@Test
// @Test
public void launchPluginOnSmartExecutor() {
try {
ContextTest.setContextByName(ROOT);
ContextTest.setContextByName(ROOT_PROD);
String pluginName = new AccountingDashboardHarvesterPlugin().getName();
@ -178,12 +182,12 @@ public class AccountingDataHarvesterPluginTest extends ContextTest {
}
}
//@Ignore
@Test
@Ignore
// @Test
public void launchOldData() {
try {
ContextTest.setContextByName(ROOT);
ContextTest.setContextByName(ROOT_PROD);
AccountingDashboardHarvesterPlugin accountingDataHarvesterPlugin = new AccountingDashboardHarvesterPlugin();
@ -215,26 +219,23 @@ public class AccountingDataHarvesterPluginTest extends ContextTest {
}
}
@Ignore
// @Test
// @Ignore
@Test
public void launchOldDataVREAccessesHarvester() {
try {
ContextTest.setContextByName(ROOT);
// AccountingDao dao = AccountingDao.get();
AccountingDashboardHarvesterPlugin accountingDataHarvesterPlugin = new AccountingDashboardHarvesterPlugin();
Properties properties = accountingDataHarvesterPlugin.getConfigParameters();
AccountingDashboardHarvesterPlugin.getProperties().set(properties);
ContextTest.setContextByName(ROOT_PROD);
AccountingDao dao = getAccountingDao();
ContextAuthorization contextAuthorization = new ContextAuthorization();
SortedSet<String> contexts = contextAuthorization.getContexts();
AggregationType aggregationType = AggregationType.MONTHLY;
Calendar from = DateUtils.getStartCalendar(2018, Calendar.APRIL, 1);
Calendar from = DateUtils.getStartCalendar(2023, Calendar.MAY, 1);
Calendar runbeforeDate = DateUtils.getStartCalendar(2018, Calendar.JUNE, 1);
Calendar runbeforeDate = DateUtils.getStartCalendar(2023, Calendar.JULY, 1);
while (from.before(runbeforeDate)) {
Date start = from.getTime();
@ -245,10 +246,12 @@ public class AccountingDataHarvesterPluginTest extends ContextTest {
ArrayList<AccountingRecord> accountingRecords = new ArrayList<>();
VREAccessesHarvester vreAccessesHarvester = null;
//JupyterAccessesHarvester vreAccessesHarvester = null;
//RStudioAccessesHarvester vreAccessesHarvester = null;
//CoreServicesAccessesHarvester vreAccessesHarvester = null;
for (String context : contexts) {
// Setting the token for the context
ContextTest.setContext(contextAuthorization.getTokenForContext(context));
ContextTest.setContextByName(context);
ScopeBean scopeBean = new ScopeBean(context);
@ -266,12 +269,12 @@ public class AccountingDataHarvesterPluginTest extends ContextTest {
}
// Setting back token for the context
ContextTest.setContext(contextAuthorization.getTokenForContext(parent.toString()));
ContextTest.setContextByName(parent.toString());
vreAccessesHarvester = new VREAccessesHarvester(start, end);
// Setting back token for the context
ContextTest.setContext(contextAuthorization.getTokenForContext(context));
ContextTest.setContextByName(context);
}
}
@ -295,7 +298,7 @@ public class AccountingDataHarvesterPluginTest extends ContextTest {
logger.debug("Harvest Measures from {} to {} are {}", DateUtils.format(start), DateUtils.format(end),
accountingRecords);
ContextTest.setContextByName(ROOT);
ContextTest.setContextByName(ROOT_PROD);
// dao.insertRecords(accountingRecords.toArray(new
// AccountingRecord[1]));
@ -306,7 +309,7 @@ public class AccountingDataHarvesterPluginTest extends ContextTest {
}
ContextTest.setContextByName(ROOT);
ContextTest.setContextByName(ROOT_PROD);
} catch (Exception e) {
logger.error("", e);
@ -318,58 +321,11 @@ public class AccountingDataHarvesterPluginTest extends ContextTest {
@Ignore
// @Test
public void testScopeBean() throws Exception {
ContextTest.setContextByName(ROOT);
SortedSet<String> contexts = getContexts();
AggregationType aggregationType = AggregationType.MONTHLY;
Date start = DateUtils.getStartCalendar(2018, Calendar.MARCH, 1).getTime();
// start = DateUtils.getPreviousPeriod(measureType).getTime();
Date end = DateUtils.getEndDateFromStartDate(aggregationType, start, 1);
logger.info("\n\n\n");
for (String context : contexts) {
ScopeBean scopeBean = new ScopeBean(context);
// logger.debug("FullName {} - Name {}", scopeBean.toString(),
// scopeBean.name());
try {
if (scopeBean.is(Type.VRE)
&& start.equals(DateUtils.getPreviousPeriod(aggregationType, false).getTime())) {
logger.info("Harvesting (VRE Users) for {} from {} to {}", context, DateUtils.format(start),
DateUtils.format(end));
} else {
logger.info("--- Not Harvesting (VRE Users) for {} from {} to {}", context, DateUtils.format(start),
DateUtils.format(end));
}
if ((context.startsWith(AccountingDashboardHarvesterPlugin.SO_BIG_DATA_VO)
|| context.startsWith(AccountingDashboardHarvesterPlugin.SO_BIG_DATA_EU_VRE)
|| context.startsWith(AccountingDashboardHarvesterPlugin.SO_BIG_DATA_IT_VRE))
&& start.before(DateUtils.getStartCalendar(2018, Calendar.APRIL, 1).getTime())) {
logger.info("--- Not Harvesting (SoBigData Check) for {} from {} to {}", context,
DateUtils.format(start), DateUtils.format(end));
} else {
logger.info("Harvesting (SoBigData Check) for {} from {} to {}", context, DateUtils.format(start),
DateUtils.format(end));
}
} catch (Exception e) {
logger.error("Error harvesting Social Interactions for {}", context, e);
}
}
}
@Ignore
@Test
public void testVREAccessesHarvester() throws Exception {
try {
// AccountingDao dao = getAccountingDao();
ContextTest.setContextByName(ROOT_PROD);
AccountingDao dao = getAccountingDao();
List<Date> starts = new ArrayList<>();
starts.add(DateUtils.getStartCalendar(2018, Calendar.SEPTEMBER, 1).getTime());
@ -390,12 +346,12 @@ public class AccountingDataHarvesterPluginTest extends ContextTest {
for (Date start : starts) {
Date end = DateUtils.getEndDateFromStartDate(measureType, start, 1);
ContextTest.setContextByName(ROOT);
ContextTest.setContextByName(ROOT_PROD);
VREAccessesHarvester vreAccessesHarvester = new VREAccessesHarvester(start, end);
for (String contextFullname : contextFullNames) {
setContextByNameAndScopeDescriptor(contextFullname);
ContextTest.setContextByName(contextFullname);
List<AccountingRecord> harvested = vreAccessesHarvester.getAccountingRecords();
accountingRecords.addAll(harvested);
@ -407,7 +363,7 @@ public class AccountingDataHarvesterPluginTest extends ContextTest {
logger.debug("{}", accountingRecords);
ContextTest.setContextByName(ROOT);
ContextTest.setContextByName(ROOT_PROD);
// dao.insertRecords(accountingRecords.toArray(new
// AccountingRecord[1]));
@ -422,7 +378,7 @@ public class AccountingDataHarvesterPluginTest extends ContextTest {
public void testVREAccessesHarvesterAll() {
try {
ContextTest.setContextByName(ROOT);
ContextTest.setContextByName(ROOT_PROD);
AggregationType measureType = AggregationType.MONTHLY;
@ -446,7 +402,7 @@ public class AccountingDataHarvesterPluginTest extends ContextTest {
for (String context : contexts) {
// Setting the token for the context
ContextTest.setContext(contextAuthorization.getTokenForContext(context));
ContextTest.setContextByName(context);
ScopeBean scopeBean = new ScopeBean(context);
@ -463,13 +419,12 @@ public class AccountingDataHarvesterPluginTest extends ContextTest {
parent = scopeBean.enclosingScope();
}
// Setting back token for the context
ContextTest.setContext(contextAuthorization.getTokenForContext(parent.toString()));
ContextTest.setContextByName(parent.toString());
vreAccessesHarvester = new VREAccessesHarvester(start, end);
// Setting back token for the context
ContextTest.setContext(contextAuthorization.getTokenForContext(context));
ContextTest.setContextByName(context);
}
}
@ -501,14 +456,9 @@ public class AccountingDataHarvesterPluginTest extends ContextTest {
public void testSocialInteraction() {
try {
ContextTest.setContextByName(ROOT);
// AccountingDao dao = AccountingDao.get();
AccountingDashboardHarvesterPlugin accountingDataHarvesterPlugin = new AccountingDashboardHarvesterPlugin();
Properties properties = accountingDataHarvesterPlugin.getConfigParameters();
AccountingDashboardHarvesterPlugin.getProperties().set(properties);
ContextAuthorization contextAuthorization = new ContextAuthorization();
ContextTest.setContextByName(ROOT_PROD);
AccountingDao dao = getAccountingDao();
SortedSet<String> contexts = new TreeSet<>();
contexts.add("/d4science.research-infrastructures.eu/D4Research");
@ -530,7 +480,7 @@ public class AccountingDataHarvesterPluginTest extends ContextTest {
for (String context : contexts) {
// Setting the token for the context
ContextTest.setContext(contextAuthorization.getTokenForContext(context));
ContextTest.setContextByName(context);
try {
// Collecting info on social (posts, replies and likes)
logger.info("Going to harvest Social Interactions for {}", context);
@ -544,7 +494,7 @@ public class AccountingDataHarvesterPluginTest extends ContextTest {
logger.debug("Harvest Measures from {} to {} are {}", DateUtils.format(start), DateUtils.format(end),
accountingRecords);
ContextTest.setContextByName(ROOT);
ContextTest.setContextByName(ROOT_PROD);
// dao.insertRecords(accountingRecords.toArray(new
// AccountingRecord[1]));
@ -556,41 +506,37 @@ public class AccountingDataHarvesterPluginTest extends ContextTest {
}
@Ignore
// @Test
public void testMethodInvocation() {
try {
ContextTest.setContextByName(ROOT);
ContextAuthorization contextAuthorization = new ContextAuthorization();
String stockAssessmentToken = contextAuthorization.generateTokenForContext(STOCK_ASSESMENT_VRE, null);
ContextTest.setContext(stockAssessmentToken);
AggregationType measureType = AggregationType.MONTHLY;
Date start = DateUtils.getPreviousPeriod(measureType, false).getTime();
Date end = DateUtils.getEndDateFromStartDate(measureType, start, 1);
MethodInvocationHarvester methodInvocationHarvester = new MethodInvocationHarvester(start, end);
List<AccountingRecord> accountingRecords = methodInvocationHarvester.getAccountingRecords();
logger.debug("{}", accountingRecords);
} catch (Exception e) {
logger.error("", e);
}
}
// @Ignore
// // @Test
// public void testMethodInvocation() {
// try {
// ContextTest.setContextByName(STOCK_ASSESMENT_VRE);
//
// AggregationType measureType = AggregationType.MONTHLY;
//
// Date start = DateUtils.getPreviousPeriod(measureType, false).getTime();
// Date end = DateUtils.getEndDateFromStartDate(measureType, start, 1);
//
// MethodInvocationHarvester methodInvocationHarvester = new MethodInvocationHarvester(start, end);
// List<AccountingRecord> accountingRecords = methodInvocationHarvester.getAccountingRecords();
//
// logger.debug("{}", accountingRecords);
//
// } catch (Exception e) {
// logger.error("", e);
// }
// }
// @Ignore
@Test
public void testMethodInvocationOldData() {
try {
ContextTest.setContextByName(ROOT);
ContextTest.setContextByName(ROOT_PROD);
AccountingDao dao = getAccountingDao();
List<Date> starts = new ArrayList<>();
starts.add(DateUtils.getStartCalendar(2021, Calendar.JULY, 1).getTime());
Date start = DateUtils.getStartCalendar(2023, Calendar.JANUARY, 1).getTime();
Date last = DateUtils.getStartCalendar(2024, Calendar.FEBRUARY, 1).getTime();
AggregationType measureType = AggregationType.MONTHLY;
@ -602,40 +548,60 @@ public class AccountingDataHarvesterPluginTest extends ContextTest {
for (ScopeDescriptor scopeDescriptor : scopeDescriptorSet) {
scopeDescriptorMap.put(scopeDescriptor.getId(), scopeDescriptor);
}
AccountingDashboardHarvesterPlugin.scopeDescriptors.set(scopeDescriptorMap);
List<AccountingRecord> accountingRecords = new ArrayList<>();
for (Date start : starts) {
while(start.before(last)) {
Date end = DateUtils.getEndDateFromStartDate(measureType, start, 1);
MethodInvocationHarvester methodInvocationHarvester = new MethodInvocationHarvester(start, end);
for (String context : contexts) {
// Setting the token for the context
ContextTest.setContext(contextAuthorization.getTokenForContext(context));
Secret s = contextAuthorization.getCatalogueSecretForContext(context);
ContextTest.set(s);
if (context.startsWith(AccountingDashboardHarvesterPlugin.TAGME_CONTEXT)) {
try {
// Collecting info on method invocation
logger.info("Going to harvest Method Invocations for {}", context);
TagMeMethodInvocationHarvester tagMeMethodInvocationHarvester = new TagMeMethodInvocationHarvester(
start, end);
ScopeBean scopeBean = new ScopeBean(context);
List<AccountingRecord> harvested = tagMeMethodInvocationHarvester.getAccountingRecords();
logger.debug("{} - {}", context, harvested);
accountingRecords.addAll(harvested);
ScopeDescriptor actualScopeDescriptor = scopeDescriptorMap.get(context);
if (actualScopeDescriptor == null) {
actualScopeDescriptor = new ScopeDescriptor(scopeBean.name(), context);
} catch (Exception e) {
logger.error("Error harvesting Method Invocations for {}", context, e);
}
} else {
try {
// Collecting info on method invocation
logger.info("Going to harvest Method Invocations for {}", context);
MethodInvocationHarvester methodInvocationHarvester = new MethodInvocationHarvester(start, end);
List<AccountingRecord> harvested = methodInvocationHarvester.getAccountingRecords();
logger.debug("{} - {}", context, harvested);
accountingRecords.addAll(harvested);
} catch (Exception e) {
logger.error("Error harvesting Method Invocations for {}", context, e);
}
}
AccountingDashboardHarvesterPlugin.scopeDescriptor.set(actualScopeDescriptor);
List<AccountingRecord> harvested = methodInvocationHarvester.getAccountingRecords();
accountingRecords.addAll(harvested);
logger.debug("{} - {}", context, harvested);
}
start = end;
}
logger.debug("Going to insert {}", accountingRecords);
ContextTest.setContextByName(ROOT);
ContextTest.setContextByName(ROOT_PROD);
// dao.insertRecords(accountingRecords.toArray(new AccountingRecord[1]));
} catch (Exception e) {
@ -643,50 +609,20 @@ public class AccountingDataHarvesterPluginTest extends ContextTest {
}
}
protected AccountingDao getAccountingDao() throws ObjectNotFound, Exception {
AccountingDao dao = AccountingDao.get();
Set<ScopeDescriptor> scopeDescriptorSet = dao.getContexts();
Map<String, ScopeDescriptor> scopeDescriptorMap = new HashMap<>();
for (ScopeDescriptor scopeDescriptor : scopeDescriptorSet) {
scopeDescriptorMap.put(scopeDescriptor.getId(), scopeDescriptor);
}
AccountingDashboardHarvesterPlugin.scopeDescriptors.set(scopeDescriptorMap);
Set<Dimension> dimensionSet = dao.getDimensions();
Map<String, Dimension> dimensionMap = new HashMap<>();
for (Dimension dimension : dimensionSet) {
dimensionMap.put(dimension.getId(), dimension);
}
AccountingDashboardHarvesterPlugin.dimensions.set(dimensionMap);
return dao;
}
protected void setContextByNameAndScopeDescriptor(String contextFullName) throws ObjectNotFound, Exception {
ContextAuthorization contextAuthorization = new ContextAuthorization();
String tagMeToken = contextAuthorization.getTokenForContext(contextFullName);
ContextTest.setContext(tagMeToken);
ScopeBean scopeBean = new ScopeBean(contextFullName);
ScopeDescriptor actualScopeDescriptor = AccountingDashboardHarvesterPlugin.scopeDescriptors.get()
.get(contextFullName);
if (actualScopeDescriptor == null) {
actualScopeDescriptor = new ScopeDescriptor(scopeBean.name(), contextFullName);
}
AccountingDashboardHarvesterPlugin.scopeDescriptor.set(actualScopeDescriptor);
}
@Ignore
// @Test
public void testTagMeMethodInvocation() throws Exception {
try {
// AccountingDao dao = getAccountingDao();
setContextByNameAndScopeDescriptor(TAGME_VRE);
ContextTest.setContextByName(ROOT_PROD);
AccountingDao dao = getAccountingDao();
ContextTest.setContextByName(TAGME_VRE);
List<AccountingRecord> accountingRecords = new ArrayList<>();
AggregationType measureType = AggregationType.MONTHLY;
@ -709,7 +645,7 @@ public class AccountingDataHarvesterPluginTest extends ContextTest {
logger.debug("{}", accountingRecords);
}
ContextTest.setContextByName(ROOT);
ContextTest.setContextByName(ROOT_PROD);
// dao.insertRecords(accountingRecords.toArray(new
// AccountingRecord[1]));
@ -723,17 +659,10 @@ public class AccountingDataHarvesterPluginTest extends ContextTest {
// @Test
public void testGetVREUsersForSpecificVRE() {
try {
ContextTest.setContextByName(ROOT);
ContextTest.setContextByName(ROOT_PROD);
AccountingDao dao = getAccountingDao();
AccountingDashboardHarvesterPlugin accountingDataHarvesterPlugin = new AccountingDashboardHarvesterPlugin();
Properties properties = accountingDataHarvesterPlugin.getConfigParameters();
AccountingDashboardHarvesterPlugin.getProperties().set(properties);
// AccountingDao dao = AccountingDao.get();
ContextAuthorization contextAuthorization = new ContextAuthorization();
ContextTest.setContext(contextAuthorization
.getTokenForContext("/d4science.research-infrastructures.eu/SoBigData/SportsDataScience"));
ContextTest.setContextByName("/d4science.research-infrastructures.eu/SoBigData/SportsDataScience");
AggregationType measureType = AggregationType.MONTHLY;
@ -745,7 +674,7 @@ public class AccountingDataHarvesterPluginTest extends ContextTest {
logger.info("Harvested Data from {} to {} : {}", DateUtils.format(start), DateUtils.format(end), harvested);
ContextTest.setContextByName(ROOT);
ContextTest.setContextByName(ROOT_PROD);
// dao.insertRecords(accountingRecords.toArray(new
// AccountingRecord[1]));
@ -759,7 +688,7 @@ public class AccountingDataHarvesterPluginTest extends ContextTest {
// @Test
public void testFilteringGenericResource() {
try {
ContextTest.setContextByName(ROOT);
ContextTest.setContextByName(ROOT_PROD);
// Utils.setContext(RESOURCE_CATALOGUE);
AggregationType measureType = AggregationType.MONTHLY;
@ -767,10 +696,8 @@ public class AccountingDataHarvesterPluginTest extends ContextTest {
Date start = DateUtils.getPreviousPeriod(measureType, false).getTime();
Date end = DateUtils.getEndDateFromStartDate(measureType, start, 1);
SortedSet<String> contexts = getContexts();
AccountingDashboardHarvesterPlugin accountingDataHarvesterPlugin = new AccountingDashboardHarvesterPlugin();
accountingDataHarvesterPlugin.getConfigParameters();
ContextAuthorization contextAuthorization = new ContextAuthorization();
SortedSet<String> contexts = contextAuthorization.getContexts();
ResourceCatalogueHarvester resourceCatalogueHarvester = new ResourceCatalogueHarvester(start, end,
contexts);
@ -788,9 +715,8 @@ public class AccountingDataHarvesterPluginTest extends ContextTest {
public void testResourceCatalogueHarvester() {
try {
// Utils.setContext(RESOURCE_CATALOGUE);
ContextTest.setContextByName(ROOT);
ContextTest.setContextByName(ROOT_PROD);
AggregationType measureType = AggregationType.MONTHLY;
// Date start = DateUtils.getStartCalendar(2015, Calendar.FEBRUARY,
@ -801,10 +727,8 @@ public class AccountingDataHarvesterPluginTest extends ContextTest {
Date start = DateUtils.getPreviousPeriod(measureType, false).getTime();
Date end = DateUtils.getEndDateFromStartDate(measureType, start, 1);
AccountingDashboardHarvesterPlugin accountingDataHarvesterPlugin = new AccountingDashboardHarvesterPlugin();
accountingDataHarvesterPlugin.getConfigParameters();
SortedSet<String> contexts = getContexts();
ContextAuthorization contextAuthorization = new ContextAuthorization();
SortedSet<String> contexts = contextAuthorization.getContexts();
ResourceCatalogueHarvester resourceCatalogueHarvester = new ResourceCatalogueHarvester(start, end,
contexts);
@ -817,28 +741,29 @@ public class AccountingDataHarvesterPluginTest extends ContextTest {
}
}
@Ignore
// @Ignore
@Test
public void testCoreServicesHarvester() {
try {
String context = ROOT; // "/gcube";
String context = ROOT_PROD;
ContextTest.setContextByName(context);
AccountingDao dao = getAccountingDao();
Calendar from = DateUtils.getStartCalendar(2023, Calendar.MAY, 1);
Calendar finalEnd = DateUtils.getStartCalendar(2023, Calendar.JULY, 1);
Date start = DateUtils.getStartCalendar(2017, Calendar.MAY, 1).getTime();
Date finalEnd = DateUtils.getStartCalendar(2020, Calendar.MAY, 1).getTime();
Date end = DateUtils.getEndDateFromStartDate(AggregationType.MONTHLY, start, 1);
AggregationType aggregationType = AggregationType.MONTHLY;
Date start = from.getTime();
Date end = DateUtils.getEndDateFromStartDate(aggregationType, start, 1);
ScopeBean scopeBean = new ScopeBean(context);
logger.debug("FullName {} - Name {}", scopeBean.toString(), scopeBean.name());
while (end.before(finalEnd)) {
while (from.before(end)) {
CoreServicesAccessesHarvester coreServicesHarvester = new CoreServicesAccessesHarvester(start, end);
List<AccountingRecord> accountingRecords = coreServicesHarvester.getAccountingRecords();
dao.insertRecords(accountingRecords.toArray(new AccountingRecord[1]));
// dao.insertRecords(accountingRecords.toArray(new AccountingRecord[1]));
logger.debug("{} -> {} Data Inserted", DateUtils.format(start), DateUtils.format(end));
logger.debug("---------------------------------------------------------------------------------------");
@ -853,18 +778,22 @@ public class AccountingDataHarvesterPluginTest extends ContextTest {
}
}
@Ignore
@Test
// @Ignore
@Test
public void testCatalogueHarvester() {
try {
String context = ROOT; // "/gcube";
String context = ROOT_PROD;
ContextTest.setContextByName(context);
AccountingDao dao = getAccountingDao();
Date start = DateUtils.getStartCalendar(2020, Calendar.JANUARY, 1).getTime();
Date end = DateUtils.getStartCalendar(2020, Calendar.FEBRUARY, 1).getTime();
Calendar from = DateUtils.getStartCalendar(2023, Calendar.JUNE, 1);
Calendar runbeforeDate = DateUtils.getStartCalendar(2023, Calendar.JULY, 1);
AggregationType aggregationType = AggregationType.MONTHLY;
Date start = from.getTime();
Date end = DateUtils.getEndDateFromStartDate(aggregationType, start, 1);
/*
* Date start = DateUtils.getPreviousPeriod(measureType, false).getTime(); Date
@ -881,55 +810,7 @@ public class AccountingDataHarvesterPluginTest extends ContextTest {
}
logger.debug("{}", accountingRecords);
dao.insertRecords(accountingRecords.toArray(new AccountingRecord[1]));
} catch (Exception e) {
logger.error("", e);
}
}
@Ignore
// @Test
public void testDataMethodDownloadHarvester() {
try {
// Utils.setContext(RESOURCE_CATALOGUE);
ContextTest.setContextByName(ROOT);
AggregationType measureType = AggregationType.MONTHLY;
// Date start = DateUtils.getStartCalendar(2015, Calendar.FEBRUARY,
// 1).getTime();
// Date end = DateUtils.getStartCalendar(2019, Calendar.FEBRUARY,
// 1).getTime();
Date start = DateUtils.getPreviousPeriod(measureType, false).getTime();
Date end = DateUtils.getEndDateFromStartDate(measureType, start, 1);
AccountingDashboardHarvesterPlugin accountingDataHarvesterPlugin = new AccountingDashboardHarvesterPlugin();
accountingDataHarvesterPlugin.getConfigParameters();
ContextAuthorization contextAuthorization = new ContextAuthorization();
SortedSet<String> contexts = contextAuthorization.getContexts();
for (String context : contexts) {
ScopeBean scopeBean = new ScopeBean(context);
logger.debug("FullName {} - Name {}", scopeBean.toString(), scopeBean.name());
if (context.startsWith(AccountingDashboardHarvesterPlugin.SO_BIG_DATA_VO)) {
if (scopeBean.is(Type.VRE)) {
if (context.startsWith(TAGME_VRE)) {
continue;
}
ContextTest.setContext(contextAuthorization.getTokenForContext(context));
DataMethodDownloadHarvester dataMethodDownloadHarvester = new DataMethodDownloadHarvester(start,
end, contexts);
List<AccountingRecord> data = dataMethodDownloadHarvester.getAccountingRecords();
logger.debug("{}", data);
}
}
}
// dao.insertRecords(accountingRecords.toArray(new AccountingRecord[1]));
} catch (Exception e) {
logger.error("", e);
@ -943,43 +824,19 @@ public class AccountingDataHarvesterPluginTest extends ContextTest {
public void addMissingVREAccesses() {
try {
ContextTest.setContextByName(ROOT);
ContextTest.setContextByName(ROOT_PROD);
AccountingDashboardHarvesterPlugin adhp = new AccountingDashboardHarvesterPlugin();
Properties properties = adhp.getConfigParameters();
AccountingDashboardHarvesterPlugin.getProperties().set(properties);
AccountingDao dao = getAccountingDao();
ContextAuthorization contextAuthorization = new ContextAuthorization();
// DatabaseManager dbaseManager = new DatabaseManager();
AccountingDao dao = AccountingDao.get();
Set<ScopeDescriptor> scopeDescriptorSet = dao.getContexts();
Map<String, ScopeDescriptor> scopeDescriptorMap = new HashMap<>();
for (ScopeDescriptor scopeDescriptor : scopeDescriptorSet) {
scopeDescriptorMap.put(scopeDescriptor.getId(), scopeDescriptor);
}
AccountingDashboardHarvesterPlugin.scopeDescriptors.set(scopeDescriptorMap);
Set<Dimension> dimensionSet = dao.getDimensions();
Map<String, Dimension> dimensionMap = new HashMap<>();
for (Dimension dimension : dimensionSet) {
dimensionMap.put(dimension.getId(), dimension);
}
AccountingDashboardHarvesterPlugin.dimensions.set(dimensionMap);
// ArrayList<HarvestedData> data = new ArrayList<HarvestedData>();
ArrayList<AccountingRecord> accountingRecords = new ArrayList<AccountingRecord>();
String context = E_LEARNING_AREA_VRE;
// Setting the token for the context
ContextTest.setContext(contextAuthorization.getTokenForContext(context));
ScopeBean scopeBean = new ScopeBean(context);
ScopeDescriptor scopeDescriptor = new ScopeDescriptor(scopeBean.name(), context);
ContextTest.setContextByName(context);
ScopeDescriptor scopeDescriptor = AccountingDashboardHarvesterPlugin.getScopeDescriptor(context);
Dimension dimension = AccountingDashboardHarvesterPlugin.getDimension(HarvestedDataKey.ACCESSES.getKey());
Calendar calendar = DateUtils.getStartCalendar(2018, Calendar.JULY, 1);

View File

@ -5,6 +5,7 @@ import java.io.InputStream;
import java.util.Properties;
import org.gcube.dataharvest.AccountingDashboardHarvesterPlugin;
import org.gcube.dataharvest.ContextTest;
import org.junit.Ignore;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -24,7 +25,6 @@ public class ContextAuthorizationTest extends ContextTest {
properties = new Properties();
InputStream input = AccountingDashboardHarvesterPlugin.class.getClassLoader().getResourceAsStream(PROPERTY_FILENAME);
properties.load(input);
AccountingDashboardHarvesterPlugin.getProperties().set(properties);
}
@Ignore
@ -35,7 +35,7 @@ public class ContextAuthorizationTest extends ContextTest {
}catch (Exception e) {
logger.warn("Unable to load {} file containing configuration properties. AccountingDataHarvesterPlugin will use defaults", PROPERTY_FILENAME);
}
ContextAuthorization contextAuthorization = new ContextAuthorization();
ContextAuthorization contextAuthorization = new ContextAuthorization(properties);
contextAuthorization.retrieveContextsAndTokens();
}

View File

@ -1,86 +0,0 @@
/**
*
*/
package org.gcube.dataharvest.utils;
import java.io.IOException;
import java.io.InputStream;
import java.util.Properties;
import org.gcube.common.authorization.client.Constants;
import org.gcube.common.authorization.client.exceptions.ObjectNotFound;
import org.gcube.common.authorization.library.AuthorizationEntry;
import org.gcube.common.authorization.library.provider.AuthorizationProvider;
import org.gcube.common.authorization.library.provider.ClientInfo;
import org.gcube.common.authorization.library.provider.SecurityTokenProvider;
import org.gcube.common.authorization.library.utils.Caller;
import org.gcube.common.scope.api.ScopeProvider;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* @author Luca Frosini (ISTI - CNR)
*
*/
public class ContextTest {
private static final Logger logger = LoggerFactory.getLogger(ContextTest.class);
protected static Properties properties;
protected static final String PROPERTIES_FILENAME = "token.properties";
public static final String DEFAULT_TEST_SCOPE_NAME;
static {
properties = new Properties();
InputStream input = ContextTest.class.getClassLoader().getResourceAsStream(PROPERTIES_FILENAME);
try {
// load the properties file
properties.load(input);
} catch(IOException e) {
throw new RuntimeException(e);
}
//DEFAULT_TEST_SCOPE_NAME = "/pred4s/preprod/preVRE";
DEFAULT_TEST_SCOPE_NAME = "/d4science.research-infrastructures.eu";
}
public static String getCurrentScope(String token) throws ObjectNotFound, Exception {
AuthorizationEntry authorizationEntry = Constants.authorizationService().get(token);
String context = authorizationEntry.getContext();
logger.info("Context of token {} is {}", token, context);
return context;
}
public static void setContextByName(String fullContextName) throws ObjectNotFound, Exception {
String token = ContextTest.properties.getProperty(fullContextName);
setContext(token);
}
public static void setContext(String token) throws ObjectNotFound, Exception {
SecurityTokenProvider.instance.set(token);
AuthorizationEntry authorizationEntry = Constants.authorizationService().get(token);
ClientInfo clientInfo = authorizationEntry.getClientInfo();
logger.debug("User : {} - Type : {}", clientInfo.getId(), clientInfo.getType().name());
String qualifier = authorizationEntry.getQualifier();
Caller caller = new Caller(clientInfo, qualifier);
AuthorizationProvider.instance.set(caller);
ScopeProvider.instance.set(getCurrentScope(token));
}
/*
@BeforeClass
public static void beforeClass() throws Exception {
setContextByName(DEFAULT_TEST_SCOPE_NAME);
}
@AfterClass
public static void afterClass() throws Exception {
SecurityTokenProvider.instance.reset();
ScopeProvider.instance.reset();
}
*/
}

View File

@ -2,3 +2,5 @@
/*.key
/*.properties
/howto.txt
/scopedata 2.xml
/config.ini