forked from D-Net/dnet-hadoop
203 lines
6.7 KiB
Java
203 lines
6.7 KiB
Java
|
|
package eu.dnetlib.oa.graph.usagestats.export;
|
|
|
|
import java.io.IOException;
|
|
import java.sql.ResultSet;
|
|
import java.sql.SQLException;
|
|
import java.sql.Statement;
|
|
import java.text.SimpleDateFormat;
|
|
import java.util.Calendar;
|
|
|
|
import javax.sound.midi.SysexMessage;
|
|
|
|
import org.apache.hadoop.conf.Configuration;
|
|
import org.apache.hadoop.fs.FileSystem;
|
|
import org.apache.hadoop.fs.Path;
|
|
import org.slf4j.Logger;
|
|
import org.slf4j.LoggerFactory;
|
|
|
|
/**
|
|
* Main class for downloading and processing Usage statistics
|
|
*
|
|
* @author D. Pierrakos, S. Zoupanos
|
|
*/
|
|
public class UsageStatsExporter {
|
|
|
|
public UsageStatsExporter() {
|
|
|
|
}
|
|
|
|
private static final Logger logger = LoggerFactory.getLogger(UsageStatsExporter.class);
|
|
|
|
public void runImpalaQuery() throws Exception {
|
|
Statement stmt = ConnectDB.getImpalaConnection().createStatement();
|
|
ConnectDB.getImpalaConnection().setAutoCommit(false);
|
|
|
|
logger.info("Executing Impala query");
|
|
Statement statement = ConnectDB.getImpalaConnection().createStatement();
|
|
|
|
ResultSet rs = statement
|
|
.executeQuery(
|
|
// "CREATE TABLE usagestats_20200913.spyros_tmp5 AS\n" +
|
|
// "SELECT s.source, d.id AS repository_id, ro.id as result_id, s.count, '0' \n" +
|
|
// "FROM usagestats_20200913.sarc_sushilogtmp2 s, \n" +
|
|
// "openaire_prod_stats_shadow_20200821.datasource_oids d, \n" +
|
|
// "openaire_prod_stats_shadow_20200821.datasource_results dr, \n" +
|
|
// "openaire_prod_stats_shadow_20200821.result_pids ro \n" +
|
|
// "WHERE d.oid LIKE CONCAT('%', s.repository, '%') AND dr.id=d.id AND dr.result=ro.id \n" +
|
|
// "AND s.rid=ro.pid AND ro.type='doi' AND metric_type='ft_total' AND s.source='SARC-OJS' ");
|
|
|
|
"CREATE TABLE usagestats_20200913.spyros_tmp6 AS\n" +
|
|
"SELECT * \n" +
|
|
"FROM usagestats_20200913.sarc_sushilogtmp2");
|
|
|
|
stmt.close();
|
|
}
|
|
|
|
private void reCreateLogDirs() throws IllegalArgumentException, IOException {
|
|
FileSystem dfs = FileSystem.get(new Configuration());
|
|
|
|
logger.info("Deleting repoLog directory: " + ExecuteWorkflow.repoLogPath);
|
|
dfs.delete(new Path(ExecuteWorkflow.repoLogPath), true);
|
|
|
|
logger.info("Deleting portalLog directory: " + ExecuteWorkflow.portalLogPath);
|
|
dfs.delete(new Path(ExecuteWorkflow.portalLogPath), true);
|
|
|
|
logger.info("Deleting lareferenciaLog directory: " + ExecuteWorkflow.lareferenciaLogPath);
|
|
dfs.delete(new Path(ExecuteWorkflow.lareferenciaLogPath), true);
|
|
|
|
logger.info("Creating repoLog directory: " + ExecuteWorkflow.repoLogPath);
|
|
dfs.mkdirs(new Path(ExecuteWorkflow.repoLogPath));
|
|
|
|
logger.info("Creating portalLog directory: " + ExecuteWorkflow.portalLogPath);
|
|
dfs.mkdirs(new Path(ExecuteWorkflow.portalLogPath));
|
|
|
|
logger.info("Creating lareferenciaLog directory: " + ExecuteWorkflow.lareferenciaLogPath);
|
|
dfs.mkdirs(new Path(ExecuteWorkflow.lareferenciaLogPath));
|
|
}
|
|
|
|
public void export() throws Exception {
|
|
|
|
logger.info("Initialising DB properties");
|
|
ConnectDB.init();
|
|
|
|
// runImpalaQuery();
|
|
|
|
PiwikStatsDB piwikstatsdb = new PiwikStatsDB(ExecuteWorkflow.repoLogPath, ExecuteWorkflow.portalLogPath);
|
|
|
|
logger.info("Re-creating database and tables");
|
|
if (ExecuteWorkflow.recreateDbAndTables)
|
|
piwikstatsdb.recreateDBAndTables();
|
|
;
|
|
|
|
logger.info("Initializing the download logs module");
|
|
PiwikDownloadLogs piwd = new PiwikDownloadLogs(ExecuteWorkflow.matomoBaseURL, ExecuteWorkflow.matomoAuthToken);
|
|
|
|
// Downloading piwik logs (also managing directory creation)
|
|
if (ExecuteWorkflow.downloadPiwikLogs) {
|
|
logger.info("Recreating log directories");
|
|
reCreateLogDirs();
|
|
|
|
logger.info("Downloading piwik logs");
|
|
piwd
|
|
.GetOpenAIRELogs(
|
|
ExecuteWorkflow.repoLogPath,
|
|
ExecuteWorkflow.portalLogPath, ExecuteWorkflow.portalMatomoID);
|
|
}
|
|
logger.info("Downloaded piwik logs");
|
|
|
|
// Create DB tables, insert/update statistics
|
|
String cRobotsUrl = "https://raw.githubusercontent.com/atmire/COUNTER-Robots/master/COUNTER_Robots_list.json";
|
|
piwikstatsdb.setCounterRobotsURL(cRobotsUrl);
|
|
|
|
if (ExecuteWorkflow.processPiwikLogs) {
|
|
logger.info("Processing logs");
|
|
piwikstatsdb.processLogs();
|
|
}
|
|
|
|
logger.info("Creating LaReferencia tables");
|
|
LaReferenciaDownloadLogs lrf = new LaReferenciaDownloadLogs(ExecuteWorkflow.lareferenciaBaseURL,
|
|
ExecuteWorkflow.lareferenciaAuthToken);
|
|
|
|
if (ExecuteWorkflow.downloadLaReferenciaLogs) {
|
|
logger.info("Downloading LaReferencia logs");
|
|
lrf.GetLaReferenciaRepos(ExecuteWorkflow.lareferenciaLogPath);
|
|
logger.info("Downloaded LaReferencia logs");
|
|
}
|
|
LaReferenciaStats lastats = new LaReferenciaStats(ExecuteWorkflow.lareferenciaLogPath);
|
|
|
|
if (ExecuteWorkflow.processLaReferenciaLogs) {
|
|
logger.info("Processing LaReferencia logs");
|
|
lastats.processLogs();
|
|
logger.info("LaReferencia logs done");
|
|
}
|
|
|
|
IrusStats irusstats = new IrusStats(ExecuteWorkflow.irusUKBaseURL);
|
|
if (ExecuteWorkflow.irusCreateTablesEmptyDirs) {
|
|
logger.info("Creating Irus Stats tables");
|
|
irusstats.createTables();
|
|
logger.info("Created Irus Stats tables");
|
|
|
|
logger.info("Re-create log dirs");
|
|
irusstats.reCreateLogDirs();
|
|
logger.info("Re-created log dirs");
|
|
}
|
|
|
|
if (ExecuteWorkflow.irusDownloadReports) {
|
|
irusstats.getIrusRRReport(ExecuteWorkflow.irusUKReportPath);
|
|
}
|
|
if (ExecuteWorkflow.irusProcessStats) {
|
|
irusstats.processIrusStats();
|
|
logger.info("Irus done");
|
|
}
|
|
|
|
SarcStats sarcStats = new SarcStats();
|
|
if (ExecuteWorkflow.sarcCreateTablesEmptyDirs) {
|
|
sarcStats.reCreateLogDirs();
|
|
}
|
|
if (ExecuteWorkflow.sarcDownloadReports) {
|
|
sarcStats.getAndProcessSarc(ExecuteWorkflow.sarcsReportPathArray, ExecuteWorkflow.sarcsReportPathNonArray);
|
|
}
|
|
if (ExecuteWorkflow.sarcProcessStats) {
|
|
sarcStats.processSarc(ExecuteWorkflow.sarcsReportPathArray, ExecuteWorkflow.sarcsReportPathNonArray);
|
|
sarcStats.finalizeSarcStats();
|
|
}
|
|
logger.info("Sarc done");
|
|
|
|
// finalize usagestats
|
|
piwikstatsdb.finalizeStats();
|
|
logger.info("Finalized stats");
|
|
|
|
// Make the tables available to Impala
|
|
logger.info("Making tables visible to Impala");
|
|
invalidateMetadata();
|
|
|
|
logger.info("End");
|
|
}
|
|
|
|
private void invalidateMetadata() throws SQLException {
|
|
Statement stmt = null;
|
|
|
|
stmt = ConnectDB.getImpalaConnection().createStatement();
|
|
|
|
String sql = "INVALIDATE METADATA " + ConnectDB.getUsageStatsDBSchema() + ".piwiklog";
|
|
stmt.executeUpdate(sql);
|
|
|
|
sql = "INVALIDATE METADATA " + ConnectDB.getUsageStatsDBSchema() + ".sushilog";
|
|
stmt.executeUpdate(sql);
|
|
|
|
sql = "INVALIDATE METADATA " + ConnectDB.getUsageStatsDBSchema() + ".downloads_stats";
|
|
stmt.executeUpdate(sql);
|
|
|
|
sql = "INVALIDATE METADATA " + ConnectDB.getUsageStatsDBSchema() + ".views_stats";
|
|
stmt.executeUpdate(sql);
|
|
|
|
sql = "INVALIDATE METADATA " + ConnectDB.getUsageStatsDBSchema() + ".lareferencialog";
|
|
stmt.executeUpdate(sql);
|
|
|
|
stmt.close();
|
|
ConnectDB.getHiveConnection().close();
|
|
}
|
|
|
|
}
|