Added usage-stats-promote

This commit is contained in:
dimitrispie 2023-01-05 10:36:32 +02:00
parent fc40a4ddf7
commit 0654343479
7 changed files with 158 additions and 20 deletions

View File

@ -152,25 +152,25 @@ public class PiwikStatsDB {
ReadCounterRobotsList counterRobots = new ReadCounterRobotsList(this.getCounterRobotsURL());
this.robotsList = counterRobots.getRobotsPatterns();
// logger.info("Processing repository logs");
// processRepositoryLog();
// logger.info("Repository logs process done");
//
// logger.info("Removing double clicks");
// removeDoubleClicks();
// logger.info("Removing double clicks done");
//
// logger.info("Cleaning oai");
// cleanOAI();
// logger.info("Cleaning oai done");
//
// logger.info("Processing portal logs");
// processPortalLog();
// logger.info("Portal logs process done");
//
// logger.info("Processing portal usagestats");
// portalLogs();
// logger.info("Portal usagestats process done");
logger.info("Processing repository logs");
processRepositoryLog();
logger.info("Repository logs process done");
logger.info("Removing double clicks");
removeDoubleClicks();
logger.info("Removing double clicks done");
logger.info("Cleaning oai");
cleanOAI();
logger.info("Cleaning oai done");
logger.info("Processing portal logs");
processPortalLog();
logger.info("Portal logs process done");
logger.info("Processing portal usagestats");
portalLogs();
logger.info("Portal usagestats process done");
logger.info("Process Episciences");
processEpisciencesLog();

View File

@ -108,7 +108,7 @@ public class UsageStatsExporter {
logger.info("LaReferencia logs done");
}
IrusStats irusstats = new IrusStats(ExecuteWorkflow.irusUKBaseURL);
IrusStats irusstats = new IrusStats(ExecuteWorkflow.irusUKBaseURL_R5);
if (ExecuteWorkflow.irusCreateTablesEmptyDirs) {
logger.info("Creating Irus Stats tables");
irusstats.createTables();

View File

@ -0,0 +1,32 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<parent>
<artifactId>dhp-workflows</artifactId>
<groupId>eu.dnetlib.dhp</groupId>
<version>1.2.4-SNAPSHOT</version>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>dhp-usage-stats-promote</artifactId>
<dependencies>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-core_2.11</artifactId>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-sql_2.11</artifactId>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>pl.project13.maven</groupId>
<artifactId>git-commit-id-plugin</artifactId>
<version>2.1.11</version>
<configuration>
<failOnNoGitDirectory>false</failOnNoGitDirectory>
</configuration>
</plugin>
</plugins>
</build>
</project>

View File

@ -0,0 +1 @@
mvn clean package -Poozie-package,deploy,run -Dworkflow.source.dir=eu/dnetlib/dhp/oa/graph/usagestatspromote

View File

@ -0,0 +1,30 @@
<configuration>
<property>
<name>jobTracker</name>
<value>${jobTracker}</value>
</property>
<property>
<name>nameNode</name>
<value>${nameNode}</value>
</property>
<property>
<name>oozie.use.system.libpath</name>
<value>true</value>
</property>
<property>
<name>oozie.action.sharelib.for.spark</name>
<value>spark2</value>
</property>
<property>
<name>hive_metastore_uris</name>
<value>thrift://iis-cdh5-test-m3.ocean.icm.edu.pl:9083</value>
</property>
<property>
<name>hive_jdbc_url</name>
<value>jdbc:hive2://iis-cdh5-test-m3.ocean.icm.edu.pl:10000/;UseNativeQuery=1;?spark.executor.memory=19166291558;spark.yarn.executor.memoryOverhead=3225;spark.driver.memory=11596411699;spark.yarn.driver.memoryOverhead=1228</value>
</property>
<property>
<name>oozie.wf.workflow.notification.url</name>
<value>{serviceUrl}/v1/oozieNotification/jobUpdate?jobId=$jobId%26status=$status</value>
</property>
</configuration>

View File

@ -0,0 +1,16 @@
export PYTHON_EGG_CACHE=/home/$(whoami)/.python-eggs
export link_folder=/tmp/impala-shell-python-egg-cache-$(whoami)
if ! [ -L $link_folder ]
then
rm -Rf "$link_folder"
ln -sfn ${PYTHON_EGG_CACHE}${link_folder} ${link_folder}
fi
export SOURCE=$1
export PRODUCTION=$2
echo "Updating ${PRODUCTION} database"
impala-shell -q "create database if not exists ${PRODUCTION}"
impala-shell -d ${PRODUCTION} -q "show tables" --delimited | sed "s/^/drop view if exists ${PRODUCTION}./" | sed "s/$/;/" | impala-shell -c -f -
impala-shell -d ${SOURCE} -q "show tables" --delimited | sed "s/\(.*\)/create view ${PRODUCTION}.\1 as select * from ${SOURCE}.\1;/" | impala-shell -c -f -
echo "Production db ready!"

View File

@ -0,0 +1,59 @@
<workflow-app name="Usage Stats" xmlns="uri:oozie:workflow:0.5">
<parameters>
<property>
<name>usage_stats_db_name</name>
<description>the target usage stats database name</description>
</property>
<property>
<name>usage_stats_db_production_name</name>
<description>the name of the public production usage stats database</description>
</property>
<property>
<name>hive_metastore_uris</name>
<description>hive server metastore URIs</description>
</property>
<property>
<name>hive_jdbc_url</name>
<description>hive server jdbc url</description>
</property>
<property>
<name>hive_timeout</name>
<description>the time period, in seconds, after which Hive fails a transaction if a Hive client has not sent a hearbeat. The default value is 300 seconds.</description>
</property>
</parameters>
<global>
<job-tracker>${jobTracker}</job-tracker>
<name-node>${nameNode}</name-node>
<configuration>
<property>
<name>hive.metastore.uris</name>
<value>${hive_metastore_uris}</value>
</property>
<property>
<name>hive.txn.timeout</name>
<value>${hive_timeout}</value>
</property>
</configuration>
</global>
<start to="updateProductionViews"/>
<kill name="Kill">
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
</kill>
<action name="updateProductionViews">
<shell xmlns="uri:oozie:shell-action:0.1">
<job-tracker>${jobTracker}</job-tracker>
<name-node>${nameNode}</name-node>
<exec>updateProductionViews.sh</exec>
<argument>${usage_stats_db_name}</argument>
<argument>${usage_stats_db_production_name}</argument>
<file>updateProductionViews.sh</file>
</shell>
<ok to="End"/>
<error to="Kill"/>
</action>
<end name="End"/>
</workflow-app>