Added Datasets from Datacite WF
This commit is contained in:
parent
a4cfabdbc6
commit
b081e01dad
|
@ -0,0 +1,107 @@
|
||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||||
|
|
||||||
|
|
||||||
|
<!-- <parent>
|
||||||
|
<artifactId>dhp-workflows</artifactId >
|
||||||
|
<groupId>eu.dnetlib.dhp</groupId>
|
||||||
|
<version>1.1.7-SNAPSHOT</version>
|
||||||
|
</parent>
|
||||||
|
<groupId>eu.dnetlib</groupId> -->
|
||||||
|
<!-- <parent>
|
||||||
|
<groupId>eu.dnetlib.dhp</groupId>
|
||||||
|
<artifactId>dhp-workflows</artifactId>
|
||||||
|
<version>1.1.7-SNAPSHOT</version>
|
||||||
|
</parent>
|
||||||
|
<modelVersion>4.0.0</modelVersion>
|
||||||
|
<artifactId>dhp-usage-stats-update</artifactId> -->
|
||||||
|
|
||||||
|
<parent>
|
||||||
|
<artifactId>dhp-workflows</artifactId>
|
||||||
|
<groupId>eu.dnetlib.dhp</groupId>
|
||||||
|
<version>1.1.7-SNAPSHOT</version>
|
||||||
|
</parent>
|
||||||
|
<modelVersion>4.0.0</modelVersion>
|
||||||
|
<artifactId>dhp-indicators</artifactId>
|
||||||
|
<build>
|
||||||
|
<plugins>
|
||||||
|
<plugin>
|
||||||
|
<groupId>pl.project13.maven</groupId>
|
||||||
|
<artifactId>git-commit-id-plugin</artifactId>
|
||||||
|
<version>2.1.15</version>
|
||||||
|
<executions>
|
||||||
|
<execution>
|
||||||
|
<goals>
|
||||||
|
<goal>revision</goal>
|
||||||
|
</goals>
|
||||||
|
</execution>
|
||||||
|
</executions>
|
||||||
|
<configuration>
|
||||||
|
<dotGitDirectory>${project.basedir}/../.git</dotGitDirectory>
|
||||||
|
<!-- more config here as you see fit -->
|
||||||
|
</configuration>
|
||||||
|
</plugin>
|
||||||
|
<plugin>
|
||||||
|
<groupId>org.apache.maven.plugins</groupId>
|
||||||
|
<artifactId>maven-compiler-plugin</artifactId>
|
||||||
|
<version>3.6.1</version>
|
||||||
|
<configuration>
|
||||||
|
<source>1.8</source>
|
||||||
|
<target>1.8</target>
|
||||||
|
</configuration>
|
||||||
|
</plugin>
|
||||||
|
</plugins>
|
||||||
|
</build>
|
||||||
|
<properties>
|
||||||
|
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
|
||||||
|
<project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
|
||||||
|
<cdh.hive.version>0.13.1-cdh5.2.1</cdh.hive.version>
|
||||||
|
<cdh.hadoop.version>2.5.0-cdh5.2.1</cdh.hadoop.version>
|
||||||
|
</properties>
|
||||||
|
|
||||||
|
<dependencies>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.apache.spark</groupId>
|
||||||
|
<artifactId>spark-core_2.11</artifactId>
|
||||||
|
<version>2.2.0</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.apache.spark</groupId>
|
||||||
|
<artifactId>spark-sql_2.11</artifactId>
|
||||||
|
<version>2.4.5</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>com.googlecode.json-simple</groupId>
|
||||||
|
<artifactId>json-simple</artifactId>
|
||||||
|
<version>1.1.1</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.json</groupId>
|
||||||
|
<artifactId>json</artifactId>
|
||||||
|
<version>20180130</version>
|
||||||
|
<type>jar</type>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.apache.hive</groupId>
|
||||||
|
<artifactId>hive-jdbc</artifactId>
|
||||||
|
<version>${cdh.hive.version}</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.apache.hadoop</groupId>
|
||||||
|
<artifactId>hadoop-common</artifactId>
|
||||||
|
<version>${cdh.hadoop.version}</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>eu.dnetlib.dhp</groupId>
|
||||||
|
<artifactId>dhp-common</artifactId>
|
||||||
|
<version>${project.version}</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>c3p0</groupId>
|
||||||
|
<artifactId>c3p0</artifactId>
|
||||||
|
<version>0.9.1.2</version>
|
||||||
|
<type>jar</type>
|
||||||
|
</dependency>
|
||||||
|
</dependencies>
|
||||||
|
<name>dhp-indicators</name>
|
||||||
|
</project>
|
|
@ -0,0 +1 @@
|
||||||
|
mvn clean package -Poozie-package,deploy,run -Dworkflow.source.dir=eu/dnetlib/dhp/oa/graph/indicators
|
|
@ -0,0 +1,35 @@
|
||||||
|
/*
|
||||||
|
* To change this license header, choose License Headers in Project Properties.
|
||||||
|
* To change this template file, choose Tools | Templates
|
||||||
|
* and open the template in the editor.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package eu.dnetlib.oa.graph.indicators.export;
|
||||||
|
|
||||||
|
import java.text.SimpleDateFormat;
|
||||||
|
import java.util.Calendar;
|
||||||
|
import java.util.Date;
|
||||||
|
|
||||||
|
import org.apache.commons.io.IOUtils;
|
||||||
|
import org.apache.log4j.BasicConfigurator;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author D. Pierrakos
|
||||||
|
*/
|
||||||
|
public class ExecuteWorkflow {
|
||||||
|
|
||||||
|
private static final Logger logger = LoggerFactory.getLogger(ExecuteWorkflow.class);
|
||||||
|
|
||||||
|
public static void main(String args[]) throws Exception {
|
||||||
|
|
||||||
|
// Sending the logs to the console
|
||||||
|
BasicConfigurator.configure();
|
||||||
|
|
||||||
|
logger.info("Workflow Executed");
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,38 @@
|
||||||
|
<configuration>
|
||||||
|
<property>
|
||||||
|
<name>jobTracker</name>
|
||||||
|
<value>${jobTracker}</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>nameNode</name>
|
||||||
|
<value>${nameNode}</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>oozie.use.system.libpath</name>
|
||||||
|
<value>true</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>oozie.action.sharelib.for.spark</name>
|
||||||
|
<value>spark2</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>hiveMetastoreUris</name>
|
||||||
|
<value>thrift://iis-cdh5-test-m3.ocean.icm.edu.pl:9083</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>hiveJdbcUrl</name>
|
||||||
|
<value>jdbc:hive2://iis-cdh5-test-m3.ocean.icm.edu.pl:10000/;UseNativeQuery=1</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>impalaJdbcUrl</name>
|
||||||
|
<value>jdbc:hive2://iis-cdh5-test-gw.ocean.icm.edu.pl:21050/;auth=noSasl;</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>oozie.wf.workflow.notification.url</name>
|
||||||
|
<value>{serviceUrl}/v1/oozieNotification/jobUpdate?jobId=$jobId%26status=$status</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>oozie.use.system.libpath</name>
|
||||||
|
<value>true</value>
|
||||||
|
</property>
|
||||||
|
</configuration>
|
|
@ -0,0 +1,5 @@
|
||||||
|
#! /usr/bin/env python
|
||||||
|
import sys
|
||||||
|
|
||||||
|
print "this is a Python script"
|
||||||
|
print "Python Interpreter Version: " + sys.version
|
|
@ -0,0 +1,2 @@
|
||||||
|
#!/bin/bash
|
||||||
|
echo "`date` hi"
|
|
@ -0,0 +1,58 @@
|
||||||
|
<workflow-app name="Python sample" xmlns="uri:oozie:workflow:0.5">
|
||||||
|
<parameters>
|
||||||
|
<property>
|
||||||
|
<name>hiveMetastoreUris</name>
|
||||||
|
<description>Hive server metastore URIs</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>hiveJdbcUrl</name>
|
||||||
|
<description>Hive server jdbc url</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>impalaJdbcUrl</name>
|
||||||
|
<description>Impala server jdbc url</description>
|
||||||
|
</property>
|
||||||
|
</parameters>
|
||||||
|
|
||||||
|
<global>
|
||||||
|
<job-tracker>${jobTracker}</job-tracker>
|
||||||
|
<name-node>${nameNode}</name-node>
|
||||||
|
<configuration>
|
||||||
|
<property>
|
||||||
|
<name>hive.metastore.uris</name>
|
||||||
|
<value>${hiveMetastoreUris}</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>mapreduce.job.queuename</name>
|
||||||
|
<value>${queueName}</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>oozie.launcher.mapred.job.queue.name</name>
|
||||||
|
<value>${oozieLauncherQueueName}</value>
|
||||||
|
</property>
|
||||||
|
</configuration>
|
||||||
|
</global>
|
||||||
|
|
||||||
|
<start to="python-check"/>
|
||||||
|
<action name="python-check">
|
||||||
|
<shell xmlns="uri:oozie:shell-action:0.2">
|
||||||
|
<job-tracker>${jobTracker}</job-tracker>
|
||||||
|
<name-node>${nameNode}</name-node>
|
||||||
|
<configuration>
|
||||||
|
<property>
|
||||||
|
<name>mapred.job.queue.name</name>
|
||||||
|
<value>${queueName}</value>
|
||||||
|
</property>
|
||||||
|
</configuration>
|
||||||
|
<exec>testpython.py</exec>
|
||||||
|
<file>python/testpython.py</file>
|
||||||
|
<capture-output/>
|
||||||
|
</shell>
|
||||||
|
<ok to="end"/>
|
||||||
|
<error to="fail"/>
|
||||||
|
</action>
|
||||||
|
<kill name="fail">
|
||||||
|
<message>Python action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
|
||||||
|
</kill>
|
||||||
|
<end name="end"/>
|
||||||
|
</workflow-app>
|
|
@ -0,0 +1,18 @@
|
||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<project-shared-configuration>
|
||||||
|
<!--
|
||||||
|
This file contains additional configuration written by modules in the NetBeans IDE.
|
||||||
|
The configuration is intended to be shared among all the users of project and
|
||||||
|
therefore it is assumed to be part of version control checkout.
|
||||||
|
Without this configuration present, some functionality in the IDE may be limited or fail altogether.
|
||||||
|
-->
|
||||||
|
<properties xmlns="http://www.netbeans.org/ns/maven-properties-data/1">
|
||||||
|
<!--
|
||||||
|
Properties that influence various parts of the IDE, especially code formatting and the like.
|
||||||
|
You can copy and paste the single properties, into the pom.xml file and the IDE will pick them up.
|
||||||
|
That way multiple projects can share the same settings (useful for formatting rules for example).
|
||||||
|
Any value defined here will override the pom.xml file value but is only applicable to the current project.
|
||||||
|
-->
|
||||||
|
<netbeans.hint.jdkPlatform>JDK_1.8</netbeans.hint.jdkPlatform>
|
||||||
|
</properties>
|
||||||
|
</project-shared-configuration>
|
|
@ -0,0 +1,121 @@
|
||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||||
|
|
||||||
|
|
||||||
|
<!-- <parent>
|
||||||
|
<artifactId>dhp-workflows</artifactId >
|
||||||
|
<groupId>eu.dnetlib.dhp</groupId>
|
||||||
|
<version>1.1.7-SNAPSHOT</version>
|
||||||
|
</parent>
|
||||||
|
<groupId>eu.dnetlib</groupId> -->
|
||||||
|
<!-- <parent>
|
||||||
|
<groupId>eu.dnetlib.dhp</groupId>
|
||||||
|
<artifactId>dhp-workflows</artifactId>
|
||||||
|
<version>1.1.7-SNAPSHOT</version>
|
||||||
|
</parent>
|
||||||
|
<modelVersion>4.0.0</modelVersion>
|
||||||
|
<artifactId>dhp-usage-stats-update</artifactId> -->
|
||||||
|
|
||||||
|
<parent>
|
||||||
|
<artifactId>dhp-workflows</artifactId>
|
||||||
|
<groupId>eu.dnetlib.dhp</groupId>
|
||||||
|
<version>1.1.7-SNAPSHOT</version>
|
||||||
|
<relativePath>../</relativePath>
|
||||||
|
</parent>
|
||||||
|
<modelVersion>4.0.0</modelVersion>
|
||||||
|
<artifactId>dhp-usage-datasets-stats-update</artifactId>
|
||||||
|
<build>
|
||||||
|
<plugins>
|
||||||
|
<plugin>
|
||||||
|
<groupId>pl.project13.maven</groupId>
|
||||||
|
<artifactId>git-commit-id-plugin</artifactId>
|
||||||
|
<version>2.1.15</version>
|
||||||
|
<executions>
|
||||||
|
<execution>
|
||||||
|
<goals>
|
||||||
|
<goal>revision</goal>
|
||||||
|
</goals>
|
||||||
|
</execution>
|
||||||
|
</executions>
|
||||||
|
<configuration>
|
||||||
|
<dotGitDirectory>${project.basedir}/../.git</dotGitDirectory>
|
||||||
|
<!-- more config here as you see fit -->
|
||||||
|
</configuration>
|
||||||
|
</plugin>
|
||||||
|
<plugin>
|
||||||
|
<groupId>org.apache.maven.plugins</groupId>
|
||||||
|
<artifactId>maven-compiler-plugin</artifactId>
|
||||||
|
<version>3.6.1</version>
|
||||||
|
<configuration>
|
||||||
|
<source>1.8</source>
|
||||||
|
<target>1.8</target>
|
||||||
|
</configuration>
|
||||||
|
</plugin>
|
||||||
|
</plugins>
|
||||||
|
</build>
|
||||||
|
<properties>
|
||||||
|
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
|
||||||
|
<project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
|
||||||
|
<cdh.hive.version>0.13.1-cdh5.2.1</cdh.hive.version>
|
||||||
|
<cdh.hadoop.version>2.5.0-cdh5.2.1</cdh.hadoop.version>
|
||||||
|
</properties>
|
||||||
|
|
||||||
|
<dependencies>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.apache.spark</groupId>
|
||||||
|
<artifactId>spark-core_2.11</artifactId>
|
||||||
|
<version>2.2.0</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.apache.spark</groupId>
|
||||||
|
<artifactId>spark-sql_2.11</artifactId>
|
||||||
|
<version>2.4.5</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>com.googlecode.json-simple</groupId>
|
||||||
|
<artifactId>json-simple</artifactId>
|
||||||
|
<version>1.1.1</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.json</groupId>
|
||||||
|
<artifactId>json</artifactId>
|
||||||
|
<version>20180130</version>
|
||||||
|
<type>jar</type>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.apache.hive</groupId>
|
||||||
|
<artifactId>hive-jdbc</artifactId>
|
||||||
|
<version>${cdh.hive.version}</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.apache.hadoop</groupId>
|
||||||
|
<artifactId>hadoop-common</artifactId>
|
||||||
|
<version>2.7.4</version>
|
||||||
|
<type>jar</type>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>eu.dnetlib.dhp</groupId>
|
||||||
|
<artifactId>dhp-common</artifactId>
|
||||||
|
<version>1.1.7-SNAPSHOT</version>
|
||||||
|
<type>jar</type>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>com.mchange</groupId>
|
||||||
|
<artifactId>c3p0</artifactId>
|
||||||
|
<version>0.9.5.2</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>c3p0</groupId>
|
||||||
|
<artifactId>c3p0</artifactId>
|
||||||
|
<version>0.9.1.2</version>
|
||||||
|
<type>jar</type>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.slf4j</groupId>
|
||||||
|
<artifactId>slf4j-api</artifactId>
|
||||||
|
<version>1.7.26</version>
|
||||||
|
<type>jar</type>
|
||||||
|
</dependency>
|
||||||
|
</dependencies>
|
||||||
|
<name>dhp-usage-datasets-stats-update</name>
|
||||||
|
</project>
|
|
@ -0,0 +1 @@
|
||||||
|
mvn clean package -Poozie-package,deploy,run -Dworkflow.source.dir=eu/dnetlib/dhp/oa/graph/datasetsusagestats
|
|
@ -0,0 +1,123 @@
|
||||||
|
/*
|
||||||
|
* To change this license header, choose License Headers in Project Properties.
|
||||||
|
* To change this template file, choose Tools | Templates
|
||||||
|
* and open the template in the editor.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package eu.dnetlib.oa.graph.datasetsusagestats.export;
|
||||||
|
|
||||||
|
import java.sql.Connection;
|
||||||
|
import java.sql.SQLException;
|
||||||
|
import java.sql.Statement;
|
||||||
|
|
||||||
|
import org.apache.log4j.Logger;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author D. Pierrakos
|
||||||
|
*/
|
||||||
|
/**
|
||||||
|
* @author D. Pierrakos
|
||||||
|
*/
|
||||||
|
import com.mchange.v2.c3p0.ComboPooledDataSource;
|
||||||
|
|
||||||
|
public abstract class ConnectDB {
|
||||||
|
|
||||||
|
public static Connection DB_HIVE_CONNECTION;
|
||||||
|
public static Connection DB_IMPALA_CONNECTION;
|
||||||
|
|
||||||
|
private static String dbHiveUrl;
|
||||||
|
private static String dbImpalaUrl;
|
||||||
|
private static String datasetUsageStatsDBSchema;
|
||||||
|
private static String statsDBSchema;
|
||||||
|
private final static Logger logger = Logger.getLogger(ConnectDB.class);
|
||||||
|
private Statement stmt = null;
|
||||||
|
|
||||||
|
static void init() throws ClassNotFoundException {
|
||||||
|
|
||||||
|
dbHiveUrl = ExecuteWorkflow.dbHiveUrl;
|
||||||
|
dbImpalaUrl = ExecuteWorkflow.dbImpalaUrl;
|
||||||
|
datasetUsageStatsDBSchema = ExecuteWorkflow.datasetUsageStatsDBSchema;
|
||||||
|
statsDBSchema = ExecuteWorkflow.statsDBSchema;
|
||||||
|
|
||||||
|
Class.forName("org.apache.hive.jdbc.HiveDriver");
|
||||||
|
}
|
||||||
|
|
||||||
|
public static Connection getHiveConnection() throws SQLException {
|
||||||
|
if (DB_HIVE_CONNECTION != null && !DB_HIVE_CONNECTION.isClosed()) {
|
||||||
|
return DB_HIVE_CONNECTION;
|
||||||
|
} else {
|
||||||
|
DB_HIVE_CONNECTION = connectHive();
|
||||||
|
|
||||||
|
return DB_HIVE_CONNECTION;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public static Connection getImpalaConnection() throws SQLException {
|
||||||
|
if (DB_IMPALA_CONNECTION != null && !DB_IMPALA_CONNECTION.isClosed()) {
|
||||||
|
return DB_IMPALA_CONNECTION;
|
||||||
|
} else {
|
||||||
|
DB_IMPALA_CONNECTION = connectImpala();
|
||||||
|
|
||||||
|
return DB_IMPALA_CONNECTION;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public static String getDataSetUsageStatsDBSchema() {
|
||||||
|
return ConnectDB.datasetUsageStatsDBSchema;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static String getStatsDBSchema() {
|
||||||
|
return ConnectDB.statsDBSchema;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static Connection connectHive() throws SQLException {
|
||||||
|
|
||||||
|
ComboPooledDataSource cpds = new ComboPooledDataSource();
|
||||||
|
cpds.setJdbcUrl(dbHiveUrl);
|
||||||
|
cpds.setUser("dimitris.pierrakos");
|
||||||
|
cpds.setAcquireIncrement(1);
|
||||||
|
cpds.setMaxPoolSize(100);
|
||||||
|
cpds.setMinPoolSize(1);
|
||||||
|
cpds.setInitialPoolSize(1);
|
||||||
|
cpds.setMaxIdleTime(300);
|
||||||
|
cpds.setMaxConnectionAge(36000);
|
||||||
|
|
||||||
|
cpds.setAcquireRetryAttempts(5);
|
||||||
|
cpds.setAcquireRetryDelay(2000);
|
||||||
|
cpds.setBreakAfterAcquireFailure(false);
|
||||||
|
|
||||||
|
cpds.setCheckoutTimeout(0);
|
||||||
|
cpds.setPreferredTestQuery("SELECT 1");
|
||||||
|
cpds.setIdleConnectionTestPeriod(60);
|
||||||
|
|
||||||
|
logger.info("Opened database successfully");
|
||||||
|
|
||||||
|
return cpds.getConnection();
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
private static Connection connectImpala() throws SQLException {
|
||||||
|
|
||||||
|
ComboPooledDataSource cpds = new ComboPooledDataSource();
|
||||||
|
cpds.setJdbcUrl(dbImpalaUrl);
|
||||||
|
cpds.setUser("dimitris.pierrakos");
|
||||||
|
cpds.setAcquireIncrement(1);
|
||||||
|
cpds.setMaxPoolSize(100);
|
||||||
|
cpds.setMinPoolSize(1);
|
||||||
|
cpds.setInitialPoolSize(1);
|
||||||
|
cpds.setMaxIdleTime(300);
|
||||||
|
cpds.setMaxConnectionAge(36000);
|
||||||
|
|
||||||
|
cpds.setAcquireRetryAttempts(5);
|
||||||
|
cpds.setAcquireRetryDelay(2000);
|
||||||
|
cpds.setBreakAfterAcquireFailure(false);
|
||||||
|
|
||||||
|
cpds.setCheckoutTimeout(0);
|
||||||
|
cpds.setPreferredTestQuery("SELECT 1");
|
||||||
|
cpds.setIdleConnectionTestPeriod(60);
|
||||||
|
|
||||||
|
logger.info("Opened database successfully");
|
||||||
|
return cpds.getConnection();
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,114 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.oa.graph.datasetsusagestats.export;
|
||||||
|
|
||||||
|
import java.sql.Connection;
|
||||||
|
import java.sql.SQLException;
|
||||||
|
import java.sql.Statement;
|
||||||
|
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author D. Pierrakos
|
||||||
|
*/
|
||||||
|
public class DatasetsStatsDB {
|
||||||
|
|
||||||
|
private String logPath;
|
||||||
|
private String logRepoPath;
|
||||||
|
private String logPortalPath;
|
||||||
|
|
||||||
|
private Statement stmt = null;
|
||||||
|
|
||||||
|
private static final Logger logger = LoggerFactory.getLogger(DatasetsStatsDB.class);
|
||||||
|
|
||||||
|
public DatasetsStatsDB(String logRepoPath, String logPortalPath) throws Exception {
|
||||||
|
this.logRepoPath = logRepoPath;
|
||||||
|
this.logPortalPath = logPortalPath;
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
public void recreateDBAndTables() throws Exception {
|
||||||
|
this.createDatabase();
|
||||||
|
this.createTables();
|
||||||
|
}
|
||||||
|
|
||||||
|
private void createDatabase() throws Exception {
|
||||||
|
try {
|
||||||
|
stmt = ConnectDB.getHiveConnection().createStatement();
|
||||||
|
|
||||||
|
logger.info("Dropping datasets DB: " + ConnectDB.getDataSetUsageStatsDBSchema());
|
||||||
|
String dropDatabase = "DROP DATABASE IF EXISTS " + ConnectDB.getDataSetUsageStatsDBSchema() + " CASCADE";
|
||||||
|
stmt.executeUpdate(dropDatabase);
|
||||||
|
} catch (Exception e) {
|
||||||
|
logger.error("Failed to drop database: " + e);
|
||||||
|
throw new Exception("Failed to drop database: " + e.toString(), e);
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
stmt = ConnectDB.getHiveConnection().createStatement();
|
||||||
|
|
||||||
|
logger.info("Creating usagestats DB: " + ConnectDB.getDataSetUsageStatsDBSchema());
|
||||||
|
String createDatabase = "CREATE DATABASE IF NOT EXISTS " + ConnectDB.getDataSetUsageStatsDBSchema();
|
||||||
|
stmt.executeUpdate(createDatabase);
|
||||||
|
|
||||||
|
} catch (Exception e) {
|
||||||
|
logger.error("Failed to create database: " + e);
|
||||||
|
throw new Exception("Failed to create database: " + e.toString(), e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void createTables() throws Exception {
|
||||||
|
try {
|
||||||
|
stmt = ConnectDB.getHiveConnection().createStatement();
|
||||||
|
|
||||||
|
// Create Reports table - This table should exist
|
||||||
|
logger.info("Creating Reports Table");
|
||||||
|
String sqlCreateTableDataciteReports = "CREATE TABLE IF NOT EXISTS "
|
||||||
|
+ ConnectDB.getDataSetUsageStatsDBSchema()
|
||||||
|
+ ".datacitereports(reportid STRING, \n"
|
||||||
|
+ " name STRING, \n"
|
||||||
|
+ " source STRING,\n"
|
||||||
|
+ " release STRING,\n"
|
||||||
|
+ " createdby STRING,\n"
|
||||||
|
+ " report_start_date STRING,\n"
|
||||||
|
+ " report_end_date STRING)\n"
|
||||||
|
+ " CLUSTERED BY (reportid)\n"
|
||||||
|
+ " into 100 buckets stored as orc tblproperties('transactional'='true')";
|
||||||
|
|
||||||
|
stmt.executeUpdate(sqlCreateTableDataciteReports);
|
||||||
|
logger.info("Reports Table Created");
|
||||||
|
|
||||||
|
// Create Datasets Performance Table
|
||||||
|
logger.info("Creating DataSetsPerformance Table");
|
||||||
|
String sqlCreateTableDataSetsPerformance = "CREATE TABLE IF NOT EXISTS "
|
||||||
|
+ ConnectDB.getDataSetUsageStatsDBSchema()
|
||||||
|
+ ".datasetsperformance(ds_type STRING,\n"
|
||||||
|
+ " ds_title STRING,\n"
|
||||||
|
+ " yop STRING,\n"
|
||||||
|
+ " dataset_type STRING, \n"
|
||||||
|
+ " uri STRING,\n"
|
||||||
|
+ " platform STRING,\n"
|
||||||
|
+ " publisher STRING,\n"
|
||||||
|
+ " publisher_id array<struct<type:STRING, value:STRING>>,\n"
|
||||||
|
+ " dataset_contributors array<struct<type:STRING, value:STRING>>,\n"
|
||||||
|
+ " period_end STRING,\n"
|
||||||
|
+ " period_from STRING,\n"
|
||||||
|
+ " access_method STRING,\n"
|
||||||
|
+ " metric_type STRING,\n"
|
||||||
|
+ " count INT,\n"
|
||||||
|
+ " reportid STRING)\n"
|
||||||
|
+ " CLUSTERED BY (ds_type)\n"
|
||||||
|
+ " into 100 buckets stored as orc tblproperties('transactional'='true')";
|
||||||
|
stmt.executeUpdate(sqlCreateTableDataSetsPerformance);
|
||||||
|
logger.info("DataSetsPerformance Table Created");
|
||||||
|
|
||||||
|
stmt.close();
|
||||||
|
ConnectDB.getHiveConnection().close();
|
||||||
|
|
||||||
|
} catch (Exception e) {
|
||||||
|
logger.error("Failed to create tables: " + e);
|
||||||
|
throw new Exception("Failed to create tables: " + e.toString(), e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,100 @@
|
||||||
|
/*
|
||||||
|
* To change this license header, choose License Headers in Project Properties.
|
||||||
|
* To change this template file, choose Tools | Templates
|
||||||
|
* and open the template in the editor.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package eu.dnetlib.oa.graph.datasetsusagestats.export;
|
||||||
|
|
||||||
|
import java.io.BufferedInputStream;
|
||||||
|
import java.io.BufferedReader;
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.io.InputStreamReader;
|
||||||
|
import java.net.MalformedURLException;
|
||||||
|
import java.net.URL;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.Iterator;
|
||||||
|
|
||||||
|
import org.apache.hadoop.conf.Configuration;
|
||||||
|
import org.apache.hadoop.fs.FSDataOutputStream;
|
||||||
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
|
import org.apache.hadoop.fs.Path;
|
||||||
|
import org.json.simple.parser.ParseException;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
import com.google.gson.Gson;
|
||||||
|
import com.google.gson.JsonArray;
|
||||||
|
import com.google.gson.JsonElement;
|
||||||
|
import com.google.gson.JsonObject;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author D.Pierrakos
|
||||||
|
*/
|
||||||
|
public class DownloadReportsListFromDatacite {
|
||||||
|
|
||||||
|
private String dataciteBaseURL;
|
||||||
|
private String dataciteReportPath;
|
||||||
|
private static final Logger logger = LoggerFactory.getLogger(UsageStatsExporter.class);
|
||||||
|
|
||||||
|
public DownloadReportsListFromDatacite(String dataciteBaseURL, String dataciteReportPath)
|
||||||
|
throws MalformedURLException, Exception {
|
||||||
|
|
||||||
|
this.dataciteBaseURL = dataciteBaseURL;
|
||||||
|
this.dataciteReportPath = dataciteReportPath;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void downloadReportsList() throws ParseException {
|
||||||
|
StringBuilder responseStrBuilder = new StringBuilder();
|
||||||
|
|
||||||
|
Gson gson = new Gson();
|
||||||
|
|
||||||
|
try {
|
||||||
|
BufferedInputStream in = new BufferedInputStream(new URL(dataciteBaseURL).openStream());
|
||||||
|
BufferedReader streamReader = new BufferedReader(new InputStreamReader(in, "UTF-8"));
|
||||||
|
String inputStr;
|
||||||
|
|
||||||
|
while ((inputStr = streamReader.readLine()) != null) {
|
||||||
|
responseStrBuilder.append(inputStr);
|
||||||
|
}
|
||||||
|
} catch (IOException e) {
|
||||||
|
logger.info(e.getMessage());
|
||||||
|
}
|
||||||
|
JsonObject jsonObject = gson.fromJson(responseStrBuilder.toString(), JsonObject.class);
|
||||||
|
JsonArray dataArray = jsonObject.getAsJsonArray("reports");
|
||||||
|
ArrayList reportsList = new ArrayList();
|
||||||
|
for (JsonElement element : dataArray) {
|
||||||
|
reportsList.add(element.getAsJsonObject().get("id").getAsString());
|
||||||
|
}
|
||||||
|
|
||||||
|
Iterator it = reportsList.iterator();
|
||||||
|
while (it.hasNext()) {
|
||||||
|
String reportId = it.next().toString();
|
||||||
|
String url = dataciteBaseURL + reportId;
|
||||||
|
|
||||||
|
try {
|
||||||
|
BufferedInputStream in = new BufferedInputStream(new URL(url).openStream());
|
||||||
|
BufferedReader streamReader = new BufferedReader(new InputStreamReader(in, "UTF-8"));
|
||||||
|
String inputStr;
|
||||||
|
StringBuilder responseStrBuilder2 = new StringBuilder();
|
||||||
|
while ((inputStr = streamReader.readLine()) != null) {
|
||||||
|
responseStrBuilder2.append(inputStr);
|
||||||
|
}
|
||||||
|
FileSystem fs = FileSystem.get(new Configuration());
|
||||||
|
FSDataOutputStream fin = fs
|
||||||
|
.create(
|
||||||
|
new Path(dataciteReportPath + "/" + reportId + ".json"),
|
||||||
|
true);
|
||||||
|
byte[] jsonObjectRawBytes = responseStrBuilder2.toString().getBytes();
|
||||||
|
fin.write(jsonObjectRawBytes);
|
||||||
|
fin.writeChar('\n');
|
||||||
|
|
||||||
|
fin.close();
|
||||||
|
|
||||||
|
fin.close();
|
||||||
|
} catch (IOException e) {
|
||||||
|
System.out.println(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,69 @@
|
||||||
|
/*
|
||||||
|
* To change this license header, choose License Headers in Project Properties.
|
||||||
|
* To change this template file, choose Tools | Templates
|
||||||
|
* and open the template in the editor.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package eu.dnetlib.oa.graph.datasetsusagestats.export;
|
||||||
|
|
||||||
|
import org.apache.commons.io.IOUtils;
|
||||||
|
import org.apache.log4j.BasicConfigurator;
|
||||||
|
|
||||||
|
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author D. Pierrakos, S. Zoupanos
|
||||||
|
*/
|
||||||
|
public class ExecuteWorkflow {
|
||||||
|
|
||||||
|
static String dataciteBaseURL;
|
||||||
|
static String dataciteReportPath;
|
||||||
|
static String dbHiveUrl;
|
||||||
|
static String dbImpalaUrl;
|
||||||
|
static String datasetUsageStatsDBSchema;
|
||||||
|
static String statsDBSchema;
|
||||||
|
static boolean recreateDbAndTables;
|
||||||
|
static boolean datasetsEmptyDirs;
|
||||||
|
static boolean finalTablesVisibleToImpala;
|
||||||
|
|
||||||
|
public static void main(String args[]) throws Exception {
|
||||||
|
|
||||||
|
// Sending the logs to the console
|
||||||
|
BasicConfigurator.configure();
|
||||||
|
|
||||||
|
final ArgumentApplicationParser parser = new ArgumentApplicationParser(
|
||||||
|
IOUtils
|
||||||
|
.toString(
|
||||||
|
UsageStatsExporter.class
|
||||||
|
.getResourceAsStream(
|
||||||
|
"/eu/dnetlib/dhp/oa/graph/datasetsusagestats/export/datasets_usagestats_parameters.json")));
|
||||||
|
parser.parseArgument(args);
|
||||||
|
|
||||||
|
// Setting up the initial parameters
|
||||||
|
dataciteBaseURL = parser.get("dataciteBaseURL");
|
||||||
|
dataciteReportPath = parser.get("dataciteReportPath");
|
||||||
|
dbHiveUrl = parser.get("dbHiveUrl");
|
||||||
|
dbImpalaUrl = parser.get("dbImpalaUrl");
|
||||||
|
datasetUsageStatsDBSchema = parser.get("datasetUsageStatsDBSchema");
|
||||||
|
statsDBSchema = parser.get("statsDBSchema");
|
||||||
|
|
||||||
|
if (parser.get("recreateDbAndTables").toLowerCase().equals("true"))
|
||||||
|
recreateDbAndTables = true;
|
||||||
|
else
|
||||||
|
recreateDbAndTables = false;
|
||||||
|
|
||||||
|
if (parser.get("datasetsEmptyDirs").toLowerCase().equals("true"))
|
||||||
|
datasetsEmptyDirs = true;
|
||||||
|
else
|
||||||
|
datasetsEmptyDirs = false;
|
||||||
|
|
||||||
|
// if (parser.get("finalTablesVisibleToImpala").toLowerCase().equals("true"))
|
||||||
|
// finalTablesVisibleToImpala = true;
|
||||||
|
// else
|
||||||
|
// finalTablesVisibleToImpala = false;
|
||||||
|
//
|
||||||
|
UsageStatsExporter usagestatsExport = new UsageStatsExporter();
|
||||||
|
usagestatsExport.export();
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,325 @@
|
||||||
|
/*
|
||||||
|
* To change this license header, choose License Headers in Project Properties.
|
||||||
|
* To change this template file, choose Tools | Templates
|
||||||
|
* and open the template in the editor.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package eu.dnetlib.oa.graph.datasetsusagestats.export;
|
||||||
|
|
||||||
|
import java.io.*;
|
||||||
|
import java.io.ByteArrayInputStream;
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.io.InputStreamReader;
|
||||||
|
import java.net.MalformedURLException;
|
||||||
|
import java.sql.PreparedStatement;
|
||||||
|
import java.sql.ResultSet;
|
||||||
|
import java.sql.SQLException;
|
||||||
|
import java.sql.Statement;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.Base64;
|
||||||
|
import java.util.zip.GZIPInputStream;
|
||||||
|
|
||||||
|
import org.apache.commons.io.IOUtils;
|
||||||
|
import org.apache.hadoop.conf.Configuration;
|
||||||
|
import org.apache.hadoop.fs.FSDataOutputStream;
|
||||||
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
|
import org.apache.hadoop.fs.LocatedFileStatus;
|
||||||
|
import org.apache.hadoop.fs.Path;
|
||||||
|
import org.apache.hadoop.fs.RemoteIterator;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.databind.JsonNode;
|
||||||
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author D.Pierrakos
|
||||||
|
*/
|
||||||
|
public class ReadReportsListFromDatacite {
|
||||||
|
|
||||||
|
private String dataciteReportPath;
|
||||||
|
private static final Logger logger = LoggerFactory.getLogger(UsageStatsExporter.class);
|
||||||
|
|
||||||
|
public ReadReportsListFromDatacite(String dataciteReportPath) throws MalformedURLException, Exception {
|
||||||
|
|
||||||
|
this.dataciteReportPath = dataciteReportPath;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void readReports() throws Exception {
|
||||||
|
Statement stmt = ConnectDB.getHiveConnection().createStatement();
|
||||||
|
ConnectDB.getHiveConnection().setAutoCommit(false);
|
||||||
|
ArrayList<String> jsonFiles = listHdfsDir(dataciteReportPath);
|
||||||
|
for (String jsonFile : jsonFiles) {
|
||||||
|
logger.info("Reading report file " + jsonFile);
|
||||||
|
this.createTmpReportsTable(jsonFile);
|
||||||
|
|
||||||
|
String sqlSelectReportID = "SELECT get_json_object(json, '$.report.id') FROM "
|
||||||
|
+ ConnectDB.getDataSetUsageStatsDBSchema() + ".tmpjsonToTable";
|
||||||
|
stmt.execute(sqlSelectReportID);
|
||||||
|
ResultSet rstmpReportID = stmt.getResultSet();
|
||||||
|
|
||||||
|
String reportID = null;
|
||||||
|
while (rstmpReportID.next()) {
|
||||||
|
reportID = rstmpReportID.getString(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info("Checking report with id " + reportID);
|
||||||
|
String sqlCheckIfReportExists = "SELECT source FROM " + ConnectDB.getDataSetUsageStatsDBSchema()
|
||||||
|
+ ".datacitereports where reportid=?";
|
||||||
|
PreparedStatement stGetReportID = ConnectDB.getHiveConnection().prepareStatement(sqlCheckIfReportExists);
|
||||||
|
stGetReportID.setString(1, reportID);
|
||||||
|
|
||||||
|
ResultSet rsCheckIfReportExist = stGetReportID.executeQuery();
|
||||||
|
|
||||||
|
if (rsCheckIfReportExist.next()) {
|
||||||
|
logger.info("Report found with ID " + reportID);
|
||||||
|
dropTmpReportsTable();
|
||||||
|
} else {
|
||||||
|
String sqlInsertReport = "INSERT INTO " + ConnectDB.getDataSetUsageStatsDBSchema()
|
||||||
|
+ " .datacitereports "
|
||||||
|
+ "SELECT\n"
|
||||||
|
+ " get_json_object(json, '$.report.id') AS reportid,\n"
|
||||||
|
+ " get_json_object(json, '$.report.report-header.report-name') AS name,\n"
|
||||||
|
+ " get_json_object(json, '$.report.report-header.report-id') AS source,\n"
|
||||||
|
+ " get_json_object(json, '$.report.report-header.release') AS release,\n"
|
||||||
|
+ " get_json_object(json, '$.report.report-header.created-by\') AS createdby,\n"
|
||||||
|
+ " get_json_object(json, '$.report.report-header.reporting-period.begin-date') AS fromdate,\n"
|
||||||
|
+ " get_json_object(json, '$.report.report-header.reporting-period.end-date') AS todate \n"
|
||||||
|
+ "FROM " + ConnectDB.getDataSetUsageStatsDBSchema() + ".tmpjsonToTable";
|
||||||
|
stmt.execute(sqlInsertReport);
|
||||||
|
|
||||||
|
logger.info("Report added");
|
||||||
|
|
||||||
|
logger.info("Adding datasets");
|
||||||
|
String sqlSelecteDatasetsArray = "SELECT get_json_object(json, '$.report.report-datasets') FROM "
|
||||||
|
+ ConnectDB.getDataSetUsageStatsDBSchema() + ".tmpjsonToTable";
|
||||||
|
stmt.execute(sqlSelecteDatasetsArray);
|
||||||
|
ResultSet rstmpReportDatasets = stmt.getResultSet();
|
||||||
|
|
||||||
|
if (rstmpReportDatasets.next() && rstmpReportDatasets.getString(1).indexOf(',') > 0) {
|
||||||
|
// String[] listDatasets = rstmpReportDatasets.getString(1).split(",");
|
||||||
|
// String listDatasets = rstmpReportDatasets.getString(1);
|
||||||
|
String sqlSelectReport = "SELECT * FROM "
|
||||||
|
+ ConnectDB.getDataSetUsageStatsDBSchema() + ".tmpjsonToTable";
|
||||||
|
stmt.execute(sqlSelectReport);
|
||||||
|
ResultSet rstmpReportAll = stmt.getResultSet();
|
||||||
|
if (rstmpReportAll.next()) {
|
||||||
|
String listDatasets = rstmpReportAll.getString(1);
|
||||||
|
logger.info("No compressed performance found");
|
||||||
|
this.readDatasetsReport(listDatasets, reportID);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
logger.info("Adding gziped performance for datasets");
|
||||||
|
String sqlSelecteReportSubsets = "SELECT get_json_object(json, '$.report.report-subsets.gzip[0]') FROM "
|
||||||
|
+ ConnectDB.getDataSetUsageStatsDBSchema() + ".tmpjsonToTable";
|
||||||
|
stmt.execute(sqlSelecteReportSubsets);
|
||||||
|
ResultSet rstmpReportSubsets = stmt.getResultSet();
|
||||||
|
if (rstmpReportSubsets.next()) {
|
||||||
|
String unCompressedReport = uncompressString(rstmpReportSubsets.getString(1));
|
||||||
|
this.readDatasetsReport(unCompressedReport, reportID);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
this.dropTmpReportsTable();
|
||||||
|
}
|
||||||
|
|
||||||
|
public void readDatasetsReport(String prettyDatasetsReports, String reportId) throws Exception {
|
||||||
|
ObjectMapper objectMapper = new ObjectMapper();
|
||||||
|
JsonNode jsonNode = objectMapper.readValue(prettyDatasetsReports, JsonNode.class);
|
||||||
|
String datasetsReports = jsonNode.toString();
|
||||||
|
String report = datasetsReports
|
||||||
|
.replace("report-datasets", "report_datasets")
|
||||||
|
.replace("dataset-title", "dataset_title")
|
||||||
|
.replace("dataset-id", "dataset_id")
|
||||||
|
.replace("data-type", "data_type")
|
||||||
|
.replace("publisher-id", "publisher_id")
|
||||||
|
.replace("dataset-contributors", "dataset_contributors")
|
||||||
|
.replace("begin-date", "begin_date")
|
||||||
|
.replace("end-date", "end_date")
|
||||||
|
.replace("access-method", "access_method")
|
||||||
|
.replace("metric-type", "metric_type")
|
||||||
|
.replace("doi:", "");
|
||||||
|
FileSystem fs = FileSystem.get(new Configuration());
|
||||||
|
String tmpPath = dataciteReportPath + "/tmpjson";
|
||||||
|
FSDataOutputStream fin = fs
|
||||||
|
.create(new Path(dataciteReportPath + "/tmpjson/" + reportId + "_Compressed.json"), true);
|
||||||
|
byte[] jsonObjectRawBytes = report.getBytes();
|
||||||
|
|
||||||
|
fin.write(jsonObjectRawBytes);
|
||||||
|
|
||||||
|
fin.writeChar('\n');
|
||||||
|
fin.close();
|
||||||
|
|
||||||
|
logger.info("Write Compress Report To File");
|
||||||
|
logger.info("Reading Compress Report From File...");
|
||||||
|
|
||||||
|
String sqlCreateTempTableForDatasets = "CREATE TEMPORARY TABLE " + ConnectDB.getDataSetUsageStatsDBSchema()
|
||||||
|
+ ".tmpjsoncompressesed (report_datasets array<struct<dataset_id:array<struct<value:string>>,dataset_title:string, data_type:string, "
|
||||||
|
+ "uri:string, publisher:string, publisher_id:array<struct<type:string, value:string>>,platform:string, yop:string, "
|
||||||
|
+ "dataset_contributors:array<struct<type:string, value:string>>,"
|
||||||
|
+ "performance:array<struct<period:struct<begin_date:string,end_date:string>, "
|
||||||
|
+ "instance:array<struct<count:int,access_method:string,metric_type:string>>>>>>) "
|
||||||
|
+ "ROW FORMAT SERDE 'org.apache.hive.hcatalog.data.JsonSerDe'\n"
|
||||||
|
+ "LOCATION '" + tmpPath + "'";
|
||||||
|
|
||||||
|
Statement stmt = ConnectDB.getHiveConnection().createStatement();
|
||||||
|
|
||||||
|
ConnectDB.getHiveConnection().setAutoCommit(false);
|
||||||
|
|
||||||
|
logger.info("Adding JSON Serde jar");
|
||||||
|
stmt.executeUpdate("add jar /usr/share/cmf/common_jars/hive-hcatalog-core-1.1.0-cdh5.14.0.jar");
|
||||||
|
logger.info("Added JSON Serde jar");
|
||||||
|
|
||||||
|
logger.info("Inserting Datasets Performance");
|
||||||
|
stmt.execute(sqlCreateTempTableForDatasets);
|
||||||
|
|
||||||
|
String sqlInsertToDatasetsPerformance = "INSERT INTO " + ConnectDB.getDataSetUsageStatsDBSchema()
|
||||||
|
+ ".datasetsperformance SELECT dataset.dataset_id[0].value ds_type, "
|
||||||
|
+ " dataset.dataset_title ds_title, "
|
||||||
|
+ " dataset.yop yop, "
|
||||||
|
+ " dataset.data_type dataset_type, "
|
||||||
|
+ " dataset.uri uri, "
|
||||||
|
+ " dataset.platform platform, "
|
||||||
|
+ " dataset.publisher publisher, "
|
||||||
|
+ " dataset.publisher_id publisher_id, "
|
||||||
|
+ " dataset.dataset_contributors dataset_contributors, "
|
||||||
|
+ " period.end_date period_end, "
|
||||||
|
+ " period.begin_date period_from, "
|
||||||
|
+ " performance.access_method access_method, "
|
||||||
|
+ " performance.metric_type metric_type, "
|
||||||
|
+ " performance.count count, "
|
||||||
|
+ "'" + reportId + "' report_id "
|
||||||
|
+ " FROM " + ConnectDB.getDataSetUsageStatsDBSchema() + ".tmpjsoncompressesed "
|
||||||
|
+ " LATERAL VIEW explode(report_datasets) exploded_table as dataset LATERAL VIEW explode(dataset.performance[0].instance) exploded_table2 as performance "
|
||||||
|
+ " LATERAL VIEW explode (array(dataset.performance[0].period)) exploded_table3 as period";
|
||||||
|
|
||||||
|
stmt.executeUpdate(sqlInsertToDatasetsPerformance);
|
||||||
|
|
||||||
|
logger.info("Datasets Performance Inserted ");
|
||||||
|
|
||||||
|
stmt.execute("Drop table " + ConnectDB.getDataSetUsageStatsDBSchema() + ".tmpjsoncompressesed");
|
||||||
|
|
||||||
|
logger.info("Datasets Report Added");
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
private ArrayList<String> listHdfsDir(String dir) throws Exception {
|
||||||
|
|
||||||
|
FileSystem hdfs = FileSystem.get(new Configuration());
|
||||||
|
RemoteIterator<LocatedFileStatus> Files;
|
||||||
|
ArrayList<String> fileNames = new ArrayList<>();
|
||||||
|
|
||||||
|
try {
|
||||||
|
Path exportPath = new Path(hdfs.getUri() + dir);
|
||||||
|
Files = hdfs.listFiles(exportPath, false);
|
||||||
|
while (Files.hasNext()) {
|
||||||
|
String fileName = Files.next().getPath().toString();
|
||||||
|
fileNames.add(fileName);
|
||||||
|
}
|
||||||
|
|
||||||
|
hdfs.close();
|
||||||
|
} catch (Exception e) {
|
||||||
|
logger.error("HDFS file path with exported data does not exist : " + new Path(hdfs.getUri() + dir));
|
||||||
|
throw new Exception("HDFS file path with exported data does not exist : " + dir, e);
|
||||||
|
}
|
||||||
|
|
||||||
|
return fileNames;
|
||||||
|
}
|
||||||
|
|
||||||
|
private String readHDFSFile(String filename) throws Exception {
|
||||||
|
String result;
|
||||||
|
try {
|
||||||
|
|
||||||
|
FileSystem fs = FileSystem.get(new Configuration());
|
||||||
|
// log.info("reading file : " + filename);
|
||||||
|
|
||||||
|
BufferedReader br = new BufferedReader(new InputStreamReader(fs.open(new Path(filename))));
|
||||||
|
|
||||||
|
StringBuilder sb = new StringBuilder();
|
||||||
|
String line = br.readLine();
|
||||||
|
|
||||||
|
while (line != null) {
|
||||||
|
sb.append(line);
|
||||||
|
// sb.append(line);
|
||||||
|
line = br.readLine();
|
||||||
|
}
|
||||||
|
// uncompressedReport = sb.toString().replace("][{\"idSite\"", ",{\"idSite\"");
|
||||||
|
result = sb.toString().trim();
|
||||||
|
// fs.close();
|
||||||
|
} catch (Exception e) {
|
||||||
|
throw new Exception(e);
|
||||||
|
}
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static String uncompressString(String zippedBase64Str)
|
||||||
|
throws IOException {
|
||||||
|
String uncompressedReport = null;
|
||||||
|
|
||||||
|
byte[] bytes = Base64.getDecoder().decode(zippedBase64Str);
|
||||||
|
GZIPInputStream zi = null;
|
||||||
|
try {
|
||||||
|
zi = new GZIPInputStream(new ByteArrayInputStream(bytes));
|
||||||
|
uncompressedReport = IOUtils.toString(zi);
|
||||||
|
} finally {
|
||||||
|
IOUtils.closeQuietly(zi);
|
||||||
|
}
|
||||||
|
logger.info("Report Succesfully Uncompressed...");
|
||||||
|
return uncompressedReport;
|
||||||
|
}
|
||||||
|
|
||||||
|
private void createTmpReportsTable(String jsonFile) throws SQLException {
|
||||||
|
Statement stmt = ConnectDB.getHiveConnection().createStatement();
|
||||||
|
dropTmpReportsTable();
|
||||||
|
String createTmpTable = "CREATE TEMPORARY TABLE " + ConnectDB.getDataSetUsageStatsDBSchema()
|
||||||
|
+ ".tmpjsonToTable (json STRING)";
|
||||||
|
stmt.executeUpdate(createTmpTable);
|
||||||
|
logger.info("Temporary Table for Json Report Created");
|
||||||
|
|
||||||
|
String insertJsonReport = "LOAD DATA INPATH '" + jsonFile + "' INTO TABLE "
|
||||||
|
+ ConnectDB.getDataSetUsageStatsDBSchema() + ".tmpjsonToTable";
|
||||||
|
stmt.execute(insertJsonReport);
|
||||||
|
logger.info("JSON Report File inserted to tmpjsonToTable Table");
|
||||||
|
}
|
||||||
|
|
||||||
|
private void dropTmpReportsTable() throws SQLException {
|
||||||
|
logger.info("Dropping tmpjson Table");
|
||||||
|
String dropTmpTable = "DROP TABLE IF EXISTS " + ConnectDB.getDataSetUsageStatsDBSchema() + ".tmpjsonToTable";
|
||||||
|
Statement stmt = ConnectDB.getHiveConnection().createStatement();
|
||||||
|
stmt.executeUpdate(dropTmpTable);
|
||||||
|
logger.info("Dropped Table for Json Report Table");
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
public void createUsageStatisticsTable() throws SQLException {
|
||||||
|
logger.info("Dropping Downloads Stats table");
|
||||||
|
Statement stmt = ConnectDB.getHiveConnection().createStatement();
|
||||||
|
String dropDownloadsTable = "DROP TABLE IF EXISTS " + ConnectDB.getDataSetUsageStatsDBSchema()
|
||||||
|
+ ".datacite_downloads";
|
||||||
|
stmt.executeUpdate(dropDownloadsTable);
|
||||||
|
|
||||||
|
logger.info("Creating Downloads Stats table");
|
||||||
|
String createDownloadsTable = "CREATE TABLE " + ConnectDB.getDataSetUsageStatsDBSchema()
|
||||||
|
+ ".datacite_downloads as "
|
||||||
|
+ "SELECT 'Datacite' source, d.id repository_id, od.id result_id, regexp_replace(substring(string(period_end),0,7),'-','/') date, count, '0' openaire "
|
||||||
|
+ "FROM " + ConnectDB.getDataSetUsageStatsDBSchema() + ".datasetsperformance "
|
||||||
|
+ "JOIN " + ConnectDB.getStatsDBSchema() + ".datasource d on name=platform "
|
||||||
|
+ "JOIN " + ConnectDB.getStatsDBSchema() + ".result_oids od on string(ds_type)=od.oid "
|
||||||
|
+ "where metric_type='total-dataset-requests'";
|
||||||
|
stmt.executeUpdate(createDownloadsTable);
|
||||||
|
logger.info("Downloads Stats table created");
|
||||||
|
|
||||||
|
logger.info("Creating Views Stats table");
|
||||||
|
String createViewsTable = "CREATE TABLE " + ConnectDB.getDataSetUsageStatsDBSchema() + ".datacite_views as "
|
||||||
|
+ "SELECT 'Datacite' source, d.id repository_id, od.id result_id, regexp_replace(substring(string(period_end),0,7),'-','/') date, count, '0' openaire "
|
||||||
|
+ "FROM " + ConnectDB.getDataSetUsageStatsDBSchema() + ".datasetsperformance "
|
||||||
|
+ "JOIN " + ConnectDB.getStatsDBSchema() + ".datasource d on name=platform "
|
||||||
|
+ "JOIN " + ConnectDB.getStatsDBSchema() + ".result_oids od on string(ds_type)=od.oid "
|
||||||
|
+ "where metric_type='total-dataset-investigations'";
|
||||||
|
stmt.executeUpdate(createViewsTable);
|
||||||
|
logger.info("Views Stats table created");
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,71 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.oa.graph.datasetsusagestats.export;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.sql.Statement;
|
||||||
|
|
||||||
|
import org.apache.hadoop.conf.Configuration;
|
||||||
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
|
import org.apache.hadoop.fs.Path;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Main class for downloading and processing Usage statistics
|
||||||
|
*
|
||||||
|
* @author D. Pierrakos, S. Zoupanos
|
||||||
|
*/
|
||||||
|
public class UsageStatsExporter {
|
||||||
|
|
||||||
|
private Statement stmt = null;
|
||||||
|
|
||||||
|
public UsageStatsExporter() {
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
private static final Logger logger = LoggerFactory.getLogger(UsageStatsExporter.class);
|
||||||
|
|
||||||
|
private void reCreateLogDirs() throws IllegalArgumentException, IOException {
|
||||||
|
FileSystem dfs = FileSystem.get(new Configuration());
|
||||||
|
|
||||||
|
logger.info("Deleting Log directory: " + ExecuteWorkflow.dataciteReportPath);
|
||||||
|
dfs.delete(new Path(ExecuteWorkflow.dataciteReportPath), true);
|
||||||
|
|
||||||
|
logger.info("Creating Log directory: " + ExecuteWorkflow.dataciteReportPath);
|
||||||
|
dfs.mkdirs(new Path(ExecuteWorkflow.dataciteReportPath));
|
||||||
|
|
||||||
|
logger.info("Creating tmp directory: " + ExecuteWorkflow.dataciteReportPath + " " + "/tmpjson/");
|
||||||
|
dfs.mkdirs(new Path(ExecuteWorkflow.dataciteReportPath + "/tmpjson/"));
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
public void export() throws Exception {
|
||||||
|
|
||||||
|
logger.info("Initialising DB properties");
|
||||||
|
ConnectDB.init();
|
||||||
|
ConnectDB.getHiveConnection();
|
||||||
|
|
||||||
|
if (ExecuteWorkflow.recreateDbAndTables) {
|
||||||
|
DatasetsStatsDB datasetsDB = new DatasetsStatsDB("", "");
|
||||||
|
datasetsDB.recreateDBAndTables();
|
||||||
|
}
|
||||||
|
logger.info("Initializing the download logs module");
|
||||||
|
DownloadReportsListFromDatacite downloadReportsListFromDatacite = new DownloadReportsListFromDatacite(
|
||||||
|
ExecuteWorkflow.dataciteBaseURL,
|
||||||
|
ExecuteWorkflow.dataciteReportPath);
|
||||||
|
|
||||||
|
if (ExecuteWorkflow.datasetsEmptyDirs) {
|
||||||
|
logger.info("Downloading Reports List From Datacite");
|
||||||
|
this.reCreateLogDirs();
|
||||||
|
downloadReportsListFromDatacite.downloadReportsList();
|
||||||
|
logger.info("Reports List has been downloaded");
|
||||||
|
}
|
||||||
|
|
||||||
|
ReadReportsListFromDatacite readReportsListFromDatacite = new ReadReportsListFromDatacite(
|
||||||
|
ExecuteWorkflow.dataciteReportPath);
|
||||||
|
logger.info("Store Reports To DB");
|
||||||
|
readReportsListFromDatacite.readReports();
|
||||||
|
logger.info("Reports Stored To DB");
|
||||||
|
readReportsListFromDatacite.createUsageStatisticsTable();
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,56 @@
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"paramName": "dbu",
|
||||||
|
"paramLongName": "dataciteBaseURL",
|
||||||
|
"paramDescription": "URL of Datacite Reports Endpoint",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "drp",
|
||||||
|
"paramLongName": "dataciteReportPath",
|
||||||
|
"paramDescription": "Path for Datacite Reports",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "dbhu",
|
||||||
|
"paramLongName": "dbHiveUrl",
|
||||||
|
"paramDescription": "activate tranform-only mode. Only apply transformation step",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "dbiu",
|
||||||
|
"paramLongName": "dbImpalaUrl",
|
||||||
|
"paramDescription": "activate tranform-only mode. Only apply transformation step",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "dusdbs",
|
||||||
|
"paramLongName": "datasetUsageStatsDBSchema",
|
||||||
|
"paramDescription": "activate tranform-only mode. Only apply transformation step",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "sdbs",
|
||||||
|
"paramLongName": "statsDBSchema",
|
||||||
|
"paramDescription": "activate tranform-only mode. Only apply transformation step",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "rdbt",
|
||||||
|
"paramLongName": "recreateDbAndTables",
|
||||||
|
"paramDescription": "Re-create database and initial tables?",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "pwed",
|
||||||
|
"paramLongName": "datasetsEmptyDirs",
|
||||||
|
"paramDescription": "Empty piwik directories?",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "ftvi",
|
||||||
|
"paramLongName": "finalTablesVisibleToImpala",
|
||||||
|
"paramDescription": "Make the dataset_usage_stats, visible to Impala",
|
||||||
|
"paramRequired": true
|
||||||
|
}
|
||||||
|
]
|
|
@ -0,0 +1,38 @@
|
||||||
|
<configuration>
|
||||||
|
<property>
|
||||||
|
<name>jobTracker</name>
|
||||||
|
<value>${jobTracker}</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>nameNode</name>
|
||||||
|
<value>${nameNode}</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>oozie.use.system.libpath</name>
|
||||||
|
<value>true</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>oozie.action.sharelib.for.spark</name>
|
||||||
|
<value>spark2</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>hiveMetastoreUris</name>
|
||||||
|
<value>thrift://iis-cdh5-test-m3.ocean.icm.edu.pl:9083</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>hiveJdbcUrl</name>
|
||||||
|
<value>jdbc:hive2://iis-cdh5-test-m3.ocean.icm.edu.pl:10000/;UseNativeQuery=1</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>impalaJdbcUrl</name>
|
||||||
|
<value>jdbc:hive2://iis-cdh5-test-gw.ocean.icm.edu.pl:21050/;auth=noSasl;</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>oozie.wf.workflow.notification.url</name>
|
||||||
|
<value>{serviceUrl}/v1/oozieNotification/jobUpdate?jobId=$jobId%26status=$status</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>oozie.use.system.libpath</name>
|
||||||
|
<value>true</value>
|
||||||
|
</property>
|
||||||
|
</configuration>
|
|
@ -0,0 +1,70 @@
|
||||||
|
<workflow-app name="Usage Datasets Stats" xmlns="uri:oozie:workflow:0.5">
|
||||||
|
<parameters>
|
||||||
|
<property>
|
||||||
|
<name>hiveMetastoreUris</name>
|
||||||
|
<description>Hive server metastore URIs</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>hiveJdbcUrl</name>
|
||||||
|
<description>Hive server jdbc url</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>impalaJdbcUrl</name>
|
||||||
|
<description>Impala server jdbc url</description>
|
||||||
|
</property>
|
||||||
|
</parameters>
|
||||||
|
|
||||||
|
<global>
|
||||||
|
<job-tracker>${jobTracker}</job-tracker>
|
||||||
|
<name-node>${nameNode}</name-node>
|
||||||
|
<configuration>
|
||||||
|
<property>
|
||||||
|
<name>hive.metastore.uris</name>
|
||||||
|
<value>${hiveMetastoreUris}</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>mapreduce.job.queuename</name>
|
||||||
|
<value>${queueName}</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>oozie.launcher.mapred.job.queue.name</name>
|
||||||
|
<value>${oozieLauncherQueueName}</value>
|
||||||
|
</property>
|
||||||
|
</configuration>
|
||||||
|
</global>
|
||||||
|
|
||||||
|
<start to="Step1"/>
|
||||||
|
|
||||||
|
<kill name="Kill">
|
||||||
|
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
|
||||||
|
</kill>
|
||||||
|
|
||||||
|
<action name='Step1'>
|
||||||
|
<java>
|
||||||
|
<main-class>eu.dnetlib.oa.graph.datasetsusagestats.export.ExecuteWorkflow</main-class>
|
||||||
|
<arg>--dataciteBaseURL</arg>
|
||||||
|
<arg>${dataciteBaseURL}</arg>
|
||||||
|
<arg>--dataciteReportPath</arg>
|
||||||
|
<arg>${dataciteReportPath}</arg>
|
||||||
|
<arg>--dbHiveUrl</arg>
|
||||||
|
<arg>${hiveJdbcUrl}</arg>
|
||||||
|
<arg>--dbImpalaUrl</arg>
|
||||||
|
<arg>${impalaJdbcUrl}</arg>
|
||||||
|
<arg>--datasetUsageStatsDBSchema</arg>
|
||||||
|
<arg>${datasetUsageStatsDBSchema}</arg>
|
||||||
|
<arg>--statsDBSchema</arg>
|
||||||
|
<arg>${statsDBSchema}</arg>
|
||||||
|
<arg>--recreateDbAndTables</arg>
|
||||||
|
<arg>${recreateDbAndTables}</arg>
|
||||||
|
<arg>--datasetsEmptyDirs</arg>
|
||||||
|
<arg>${datasetsEmptyDirs}</arg>
|
||||||
|
<arg>--finalTablesVisibleToImpala</arg>
|
||||||
|
<arg>${finalTablesVisibleToImpala}</arg>
|
||||||
|
<capture-output/>
|
||||||
|
</java>
|
||||||
|
<ok to="End" />
|
||||||
|
<error to="Kill" />
|
||||||
|
</action>
|
||||||
|
|
||||||
|
<end name="End"/>
|
||||||
|
</workflow-app>
|
|
@ -132,7 +132,7 @@ public class PiwikStatsDB {
|
||||||
+ "max(views) AS count, max(openaire_referrer) AS openaire "
|
+ "max(views) AS count, max(openaire_referrer) AS openaire "
|
||||||
+ "FROM " + ConnectDB.getUsageStatsDBSchema() + ".openaire_result_views_monthly_tmp p, "
|
+ "FROM " + ConnectDB.getUsageStatsDBSchema() + ".openaire_result_views_monthly_tmp p, "
|
||||||
+ ConnectDB.getStatsDBSchema() + ".datasource d, " + ConnectDB.getStatsDBSchema() + ".result_oids ro "
|
+ ConnectDB.getStatsDBSchema() + ".datasource d, " + ConnectDB.getStatsDBSchema() + ".result_oids ro "
|
||||||
+ "WHERE p.source=d.piwik_id AND p.id=ro.oid AND ro.oid!='200' "
|
+ "WHERE p.source=d.piwik_id AND p.id=ro.oid AND ro.oid!='200' AND d.id!='re3data_____::7b0ad08687b2c960d5aeef06f811d5e6' "
|
||||||
+ "GROUP BY d.id, ro.id, month "
|
+ "GROUP BY d.id, ro.id, month "
|
||||||
+ "ORDER BY d.id, ro.id, month ";
|
+ "ORDER BY d.id, ro.id, month ";
|
||||||
stmt.executeUpdate(create_views_stats);
|
stmt.executeUpdate(create_views_stats);
|
||||||
|
@ -145,7 +145,7 @@ public class PiwikStatsDB {
|
||||||
+ "FROM " + ConnectDB.getUsageStatsDBSchema() + ".openaire_result_views_monthly_tmp p, "
|
+ "FROM " + ConnectDB.getUsageStatsDBSchema() + ".openaire_result_views_monthly_tmp p, "
|
||||||
+ ConnectDB.getStatsDBSchema() + ".datasource d, " + ConnectDB.getStatsDBSchema() + ".result_oids ro "
|
+ ConnectDB.getStatsDBSchema() + ".datasource d, " + ConnectDB.getStatsDBSchema() + ".result_oids ro "
|
||||||
+ "WHERE p.source=" + ExecuteWorkflow.portalMatomoID
|
+ "WHERE p.source=" + ExecuteWorkflow.portalMatomoID
|
||||||
+ " AND p.source=d.piwik_id and p.id=ro.id AND ro.oid!='200' "
|
+ " AND p.source=d.piwik_id and p.id=ro.id AND ro.oid!='200' AND d.id!='re3data_____::7b0ad08687b2c960d5aeef06f811d5e6' "
|
||||||
+ "GROUP BY d.id, ro.id, month "
|
+ "GROUP BY d.id, ro.id, month "
|
||||||
+ "ORDER BY d.id, ro.id, month ";
|
+ "ORDER BY d.id, ro.id, month ";
|
||||||
stmt.executeUpdate(create_pageviews_stats);
|
stmt.executeUpdate(create_pageviews_stats);
|
||||||
|
@ -194,7 +194,7 @@ public class PiwikStatsDB {
|
||||||
+ "max(downloads) AS count, max(openaire_referrer) AS openaire "
|
+ "max(downloads) AS count, max(openaire_referrer) AS openaire "
|
||||||
+ "FROM " + ConnectDB.getUsageStatsDBSchema() + ".openaire_result_downloads_monthly_tmp p, "
|
+ "FROM " + ConnectDB.getUsageStatsDBSchema() + ".openaire_result_downloads_monthly_tmp p, "
|
||||||
+ ConnectDB.getStatsDBSchema() + ".datasource d, " + ConnectDB.getStatsDBSchema() + ".result_oids ro "
|
+ ConnectDB.getStatsDBSchema() + ".datasource d, " + ConnectDB.getStatsDBSchema() + ".result_oids ro "
|
||||||
+ "WHERE p.source=d.piwik_id and p.id=ro.oid AND ro.oid!='200' "
|
+ "WHERE p.source=d.piwik_id and p.id=ro.oid AND ro.oid!='200' AND d.id!='re3data_____::7b0ad08687b2c960d5aeef06f811d5e6' "
|
||||||
+ "GROUP BY d.id, ro.id, month "
|
+ "GROUP BY d.id, ro.id, month "
|
||||||
+ "ORDER BY d.id, ro.id, month ";
|
+ "ORDER BY d.id, ro.id, month ";
|
||||||
stmt.executeUpdate(sql);
|
stmt.executeUpdate(sql);
|
||||||
|
|
|
@ -0,0 +1,15 @@
|
||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<actions>
|
||||||
|
<action>
|
||||||
|
<actionName>test</actionName>
|
||||||
|
<packagings>
|
||||||
|
<packaging>*</packaging>
|
||||||
|
</packagings>
|
||||||
|
<goals>
|
||||||
|
<goal>test</goal>
|
||||||
|
</goals>
|
||||||
|
<properties>
|
||||||
|
<skipTests>true</skipTests>
|
||||||
|
</properties>
|
||||||
|
</action>
|
||||||
|
</actions>
|
Loading…
Reference in New Issue