dnet-hadoop/dhp-workflows/dhp-usage-stats-update/src/main/java/eu/dnetlib/oa/graph/usagestats/export/UsageStatsExporter.java

101 lines
3.9 KiB
Java

package eu.dnetlib.oa.graph.usagestats.export;
import java.sql.ResultSet;
import java.sql.Statement;
import java.util.Properties;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class UsageStatsExporter {
private static final Logger logger = LoggerFactory.getLogger(UsageStatsExporter.class);
private Properties properties;
public void runImpalaQuery() throws Exception {
Statement stmt = ConnectDB.getImpalaConnection().createStatement();
ConnectDB.getImpalaConnection().setAutoCommit(false);
System.out.println("====> Executing Impala query");
Statement statement = ConnectDB.getImpalaConnection().createStatement();
ResultSet rs = statement
.executeQuery(
// "CREATE TABLE usagestats_20200913.spyros_tmp5 AS\n" +
// "SELECT s.source, d.id AS repository_id, ro.id as result_id, s.count, '0' \n" +
// "FROM usagestats_20200913.sarc_sushilogtmp2 s, \n" +
// "openaire_prod_stats_shadow_20200821.datasource_oids d, \n" +
// "openaire_prod_stats_shadow_20200821.datasource_results dr, \n" +
// "openaire_prod_stats_shadow_20200821.result_pids ro \n" +
// "WHERE d.oid LIKE CONCAT('%', s.repository, '%') AND dr.id=d.id AND dr.result=ro.id \n" +
// "AND s.rid=ro.pid AND ro.type='doi' AND metric_type='ft_total' AND s.source='SARC-OJS' ");
"CREATE TABLE usagestats_20200913.spyros_tmp6 AS\n" +
"SELECT * \n" +
"FROM usagestats_20200913.sarc_sushilogtmp2");
stmt.close();
}
public void export() throws Exception {
logger.info("=====> Test of the logger (info)");
logger.debug("=====> Test of the logger (debug)");
logger.error("=====> Test of the logger (error)");
// connect to DB
System.out.println("====> Initialising DB properties");
ConnectDB.init(properties);
runImpalaQuery();
System.exit(0);
// Create DB tables - they are also needed to download the statistics too
System.out.println("====> Creating database and tables");
PiwikStatsDB piwikstatsdb = new PiwikStatsDB(ExecuteWorkflow.repoLogPath, ExecuteWorkflow.portalLogPath);
//
// // Download the statistics - The following 2 lines are not needed after the download - Commenting them out for
// // the moment
System.out.println("====> Initializing the download logs module");
PiwikDownloadLogs piwd = new PiwikDownloadLogs(ExecuteWorkflow.matomoBaseURL, ExecuteWorkflow.matomoAuthToken);
System.out.println("====> Downloading piwik logs");
// piwd.GetOpenAIRELogs(repoLogPath, portalLogPath, portalMatomoID);
System.out.println("====> Downloaded piwik logs");
// Create DB tables, insert/update statistics
// String cRobotsUrl = properties.getProperty("COUNTER_robots_Url");
String cRobotsUrl = "https://raw.githubusercontent.com/atmire/COUNTER-Robots/master/COUNTER_Robots_list.json";
piwikstatsdb.setCounterRobotsURL(cRobotsUrl);
System.out.println("====> Processing logs");
piwikstatsdb.processLogs();
// log.info("process logs done");
System.out.println("====> Creating LaReferencia tables");
LaReferenciaDownloadLogs lrf = new LaReferenciaDownloadLogs(ExecuteWorkflow.lareferenciaBaseURL,
ExecuteWorkflow.lareferenciaAuthToken);
System.out.println("====> Downloading LaReferencia logs");
// lrf.GetLaReferenciaRepos(lareferenciaLogPath);
System.out.println("====> Downloaded LaReferencia logs");
LaReferenciaStats lastats = new LaReferenciaStats(ExecuteWorkflow.lareferenciaLogPath);
System.out.println("====> Processing LaReferencia logs");
// lastats.processLogs();
// log.info("LaReferencia logs done");
// IrusStats irusstats = new IrusStats(irusUKBaseURL);
// irusstats.getIrusRRReport(irusUKReportPath);
// irusstats.processIrusStats();
// log.info("irus done");
// SarcStats sarcStats = new SarcStats();
// sarcStats.getAndProcessSarc(sarcsReportPathArray, sarcsReportPathNonArray);
// sarcStats.finalizeSarcStats();
// log.info("sarc done");
// // finalize usagestats
// piwikstatsdb.finalizeStats();
// log.info("finalized stats");
}
}