Changes to download only updates

This commit is contained in:
Dimitris 2020-11-02 09:08:25 +02:00
parent b8a3392b59
commit 32bf943979
7 changed files with 1072 additions and 1065 deletions

View File

@ -78,20 +78,20 @@ public abstract class ConnectDB {
*/ */
ComboPooledDataSource cpds = new ComboPooledDataSource(); ComboPooledDataSource cpds = new ComboPooledDataSource();
cpds.setJdbcUrl(dbHiveUrl); cpds.setJdbcUrl(dbHiveUrl);
cpds.setAcquireIncrement(1); cpds.setAcquireIncrement(1);
cpds.setMaxPoolSize(100); cpds.setMaxPoolSize(100);
cpds.setMinPoolSize(1); cpds.setMinPoolSize(1);
cpds.setInitialPoolSize(1); cpds.setInitialPoolSize(1);
cpds.setMaxIdleTime(300); cpds.setMaxIdleTime(300);
cpds.setMaxConnectionAge(36000); cpds.setMaxConnectionAge(36000);
cpds.setAcquireRetryAttempts(5); cpds.setAcquireRetryAttempts(5);
cpds.setAcquireRetryDelay(2000); cpds.setAcquireRetryDelay(2000);
cpds.setBreakAfterAcquireFailure(false); cpds.setBreakAfterAcquireFailure(false);
cpds.setCheckoutTimeout(30000); cpds.setCheckoutTimeout(0);
cpds.setPreferredTestQuery("SELECT 1"); cpds.setPreferredTestQuery("SELECT 1");
cpds.setIdleConnectionTestPeriod(60); cpds.setIdleConnectionTestPeriod(60);
return cpds.getConnection(); return cpds.getConnection();
} }
@ -103,23 +103,23 @@ public abstract class ConnectDB {
*/ */
ComboPooledDataSource cpds = new ComboPooledDataSource(); ComboPooledDataSource cpds = new ComboPooledDataSource();
cpds.setJdbcUrl(dbImpalaUrl); cpds.setJdbcUrl(dbImpalaUrl);
cpds.setAcquireIncrement(1); cpds.setAcquireIncrement(1);
cpds.setMaxPoolSize(100); cpds.setMaxPoolSize(100);
cpds.setMinPoolSize(1); cpds.setMinPoolSize(1);
cpds.setInitialPoolSize(1); cpds.setInitialPoolSize(1);
cpds.setMaxIdleTime(300); cpds.setMaxIdleTime(300);
cpds.setMaxConnectionAge(36000); cpds.setMaxConnectionAge(36000);
cpds.setAcquireRetryAttempts(5); cpds.setAcquireRetryAttempts(5);
cpds.setAcquireRetryDelay(2000); cpds.setAcquireRetryDelay(2000);
cpds.setBreakAfterAcquireFailure(false); cpds.setBreakAfterAcquireFailure(false);
cpds.setCheckoutTimeout(30000); cpds.setCheckoutTimeout(0);
cpds.setPreferredTestQuery("SELECT 1"); cpds.setPreferredTestQuery("SELECT 1");
cpds.setIdleConnectionTestPeriod(60); cpds.setIdleConnectionTestPeriod(60);
return cpds.getConnection(); return cpds.getConnection();
} }
} }

View File

@ -1,4 +1,3 @@
package eu.dnetlib.oa.graph.usagestats.export; package eu.dnetlib.oa.graph.usagestats.export;
import java.io.*; import java.io.*;
@ -28,38 +27,38 @@ import org.slf4j.LoggerFactory;
*/ */
public class IrusStats { public class IrusStats {
private String irusUKURL; private String irusUKURL;
private static final Logger logger = LoggerFactory.getLogger(IrusStats.class); private static final Logger logger = LoggerFactory.getLogger(IrusStats.class);
public IrusStats(String irusUKURL) throws Exception { public IrusStats(String irusUKURL) throws Exception {
this.irusUKURL = irusUKURL; this.irusUKURL = irusUKURL;
// The following may not be needed - It will be created when JSON tables are created // The following may not be needed - It will be created when JSON tables are created
// createTmpTables(); // createTmpTables();
} }
public void reCreateLogDirs() throws Exception { public void reCreateLogDirs() throws Exception {
FileSystem dfs = FileSystem.get(new Configuration()); FileSystem dfs = FileSystem.get(new Configuration());
logger.info("Deleting irusUKReport directory: " + ExecuteWorkflow.irusUKReportPath); logger.info("Deleting irusUKReport directory: " + ExecuteWorkflow.irusUKReportPath);
dfs.delete(new Path(ExecuteWorkflow.irusUKReportPath), true); dfs.delete(new Path(ExecuteWorkflow.irusUKReportPath), true);
logger.info("Creating irusUKReport directory: " + ExecuteWorkflow.irusUKReportPath); logger.info("Creating irusUKReport directory: " + ExecuteWorkflow.irusUKReportPath);
dfs.mkdirs(new Path(ExecuteWorkflow.irusUKReportPath)); dfs.mkdirs(new Path(ExecuteWorkflow.irusUKReportPath));
} }
public void createTables() throws Exception { public void createTables() throws Exception {
try { try {
logger.info("Creating sushilog"); logger.info("Creating sushilog");
Statement stmt = ConnectDB.getHiveConnection().createStatement(); Statement stmt = ConnectDB.getHiveConnection().createStatement();
String sqlCreateTableSushiLog = "CREATE TABLE IF NOT EXISTS " + ConnectDB.getUsageStatsDBSchema() String sqlCreateTableSushiLog = "CREATE TABLE IF NOT EXISTS " + ConnectDB.getUsageStatsDBSchema()
+ ".sushilog(source STRING, " + + ".sushilog(source STRING, "
"repository STRING, rid STRING, date STRING, metric_type STRING, count INT) clustered by (source, " + + "repository STRING, rid STRING, date STRING, metric_type STRING, count INT) clustered by (source, "
"repository, rid, date, metric_type) into 100 buckets stored as orc tblproperties('transactional'='true')"; + "repository, rid, date, metric_type) into 100 buckets stored as orc tblproperties('transactional'='true')";
stmt.executeUpdate(sqlCreateTableSushiLog); stmt.executeUpdate(sqlCreateTableSushiLog);
logger.info("Created sushilog"); logger.info("Created sushilog");
// To see how to apply to the ignore duplicate rules and indexes // To see how to apply to the ignore duplicate rules and indexes
// stmt.executeUpdate(sqlCreateTableSushiLog); // stmt.executeUpdate(sqlCreateTableSushiLog);
// String sqlcreateRuleSushiLog = "CREATE OR REPLACE RULE ignore_duplicate_inserts AS " // String sqlcreateRuleSushiLog = "CREATE OR REPLACE RULE ignore_duplicate_inserts AS "
// + " ON INSERT TO sushilog " // + " ON INSERT TO sushilog "
@ -70,15 +69,14 @@ public class IrusStats {
// stmt.executeUpdate(sqlcreateRuleSushiLog); // stmt.executeUpdate(sqlcreateRuleSushiLog);
// String createSushiIndex = "create index if not exists sushilog_duplicates on sushilog(source, repository, rid, date, metric_type);"; // String createSushiIndex = "create index if not exists sushilog_duplicates on sushilog(source, repository, rid, date, metric_type);";
// stmt.executeUpdate(createSushiIndex); // stmt.executeUpdate(createSushiIndex);
stmt.close();
stmt.close(); ConnectDB.getHiveConnection().close();
ConnectDB.getHiveConnection().close(); logger.info("Sushi Tables Created");
logger.info("Sushi Tables Created"); } catch (Exception e) {
} catch (Exception e) { logger.error("Failed to create tables: " + e);
logger.error("Failed to create tables: " + e); throw new Exception("Failed to create tables: " + e.toString(), e);
throw new Exception("Failed to create tables: " + e.toString(), e); }
} }
}
// // The following may not be needed - It will be created when JSON tables are created // // The following may not be needed - It will be created when JSON tables are created
// private void createTmpTables() throws Exception { // private void createTmpTables() throws Exception {
@ -107,311 +105,315 @@ public class IrusStats {
// throw new Exception("Failed to create tables: " + e.toString(), e); // throw new Exception("Failed to create tables: " + e.toString(), e);
// } // }
// } // }
public void processIrusStats() throws Exception {
Statement stmt = ConnectDB.getHiveConnection().createStatement();
ConnectDB.getHiveConnection().setAutoCommit(false);
public void processIrusStats() throws Exception { logger.info("Adding JSON Serde jar");
Statement stmt = ConnectDB.getHiveConnection().createStatement(); stmt.executeUpdate("add jar /usr/share/cmf/common_jars/hive-hcatalog-core-1.1.0-cdh5.14.0.jar");
ConnectDB.getHiveConnection().setAutoCommit(false); logger.info("Added JSON Serde jar");
logger.info("Adding JSON Serde jar"); logger.info("Dropping sushilogtmp_json table");
stmt.executeUpdate("add jar /usr/share/cmf/common_jars/hive-hcatalog-core-1.1.0-cdh5.14.0.jar"); String dropSushilogtmpJson = "DROP TABLE IF EXISTS "
logger.info("Added JSON Serde jar"); + ConnectDB.getUsageStatsDBSchema()
+ ".sushilogtmp_json";
stmt.executeUpdate(dropSushilogtmpJson);
logger.info("Dropped sushilogtmp_json table");
logger.info("Dropping sushilogtmp_json table"); logger.info("Creating irus_sushilogtmp_json table");
String dropSushilogtmpJson = "DROP TABLE IF EXISTS " + String createSushilogtmpJson = "CREATE EXTERNAL TABLE IF NOT EXISTS "
ConnectDB.getUsageStatsDBSchema() + + ConnectDB.getUsageStatsDBSchema() + ".irus_sushilogtmp_json(\n"
".sushilogtmp_json"; + " `ItemIdentifier` ARRAY<\n"
stmt.executeUpdate(dropSushilogtmpJson); + " struct<\n"
logger.info("Dropped sushilogtmp_json table"); + " Type: STRING,\n"
+ " Value: STRING\n"
+ " >\n"
+ " >,\n"
+ " `ItemPerformance` ARRAY<\n"
+ " struct<\n"
+ " `Period`: struct<\n"
+ " `Begin`: STRING,\n"
+ " `End`: STRING\n"
+ " >,\n"
+ " `Instance`: struct<\n"
+ " `Count`: STRING,\n"
+ " `MetricType`: STRING\n"
+ " >\n"
+ " >\n"
+ " >\n"
+ ")\n"
+ "ROW FORMAT SERDE 'org.apache.hive.hcatalog.data.JsonSerDe'\n"
+ "LOCATION '" + ExecuteWorkflow.irusUKReportPath + "'\n"
+ "TBLPROPERTIES (\"transactional\"=\"false\")";
stmt.executeUpdate(createSushilogtmpJson);
logger.info("Created irus_sushilogtmp_json table");
logger.info("Creating irus_sushilogtmp_json table"); logger.info("Dropping irus_sushilogtmp table");
String createSushilogtmpJson = "CREATE EXTERNAL TABLE IF NOT EXISTS " + String dropSushilogtmp = "DROP TABLE IF EXISTS "
ConnectDB.getUsageStatsDBSchema() + ".irus_sushilogtmp_json(\n" + + ConnectDB.getUsageStatsDBSchema()
" `ItemIdentifier` ARRAY<\n" + + ".irus_sushilogtmp";
" struct<\n" + stmt.executeUpdate(dropSushilogtmp);
" Type: STRING,\n" + logger.info("Dropped irus_sushilogtmp table");
" Value: STRING\n" +
" >\n" +
" >,\n" +
" `ItemPerformance` ARRAY<\n" +
" struct<\n" +
" `Period`: struct<\n" +
" `Begin`: STRING,\n" +
" `End`: STRING\n" +
" >,\n" +
" `Instance`: struct<\n" +
" `Count`: STRING,\n" +
" `MetricType`: STRING\n" +
" >\n" +
" >\n" +
" >\n" +
")\n" +
"ROW FORMAT SERDE 'org.apache.hive.hcatalog.data.JsonSerDe'\n" +
"LOCATION '" + ExecuteWorkflow.irusUKReportPath + "'\n" +
"TBLPROPERTIES (\"transactional\"=\"false\")";
stmt.executeUpdate(createSushilogtmpJson);
logger.info("Created irus_sushilogtmp_json table");
logger.info("Dropping irus_sushilogtmp table"); logger.info("Creating irus_sushilogtmp table");
String dropSushilogtmp = "DROP TABLE IF EXISTS " + String createSushilogtmp = "CREATE TABLE " + ConnectDB.getUsageStatsDBSchema()
ConnectDB.getUsageStatsDBSchema() + + ".irus_sushilogtmp(source STRING, repository STRING, "
".irus_sushilogtmp"; + "rid STRING, date STRING, metric_type STRING, count INT) clustered by (source) into 100 buckets stored as orc "
stmt.executeUpdate(dropSushilogtmp); + "tblproperties('transactional'='true')";
logger.info("Dropped irus_sushilogtmp table"); stmt.executeUpdate(createSushilogtmp);
logger.info("Created irus_sushilogtmp table");
logger.info("Creating irus_sushilogtmp table"); logger.info("Inserting to irus_sushilogtmp table");
String createSushilogtmp = "CREATE TABLE " + ConnectDB.getUsageStatsDBSchema() String insertSushilogtmp = "INSERT INTO " + ConnectDB.getUsageStatsDBSchema() + ".irus_sushilogtmp "
+ ".irus_sushilogtmp(source STRING, repository STRING, " + + "SELECT 'IRUS-UK', CONCAT('opendoar____::', split(split(INPUT__FILE__NAME,'IrusIRReport_')[1],'_')[0]), "
"rid STRING, date STRING, metric_type STRING, count INT) clustered by (source) into 100 buckets stored as orc " + "`ItemIdent`.`Value`, `ItemPerf`.`Period`.`Begin`, "
+ + "`ItemPerf`.`Instance`.`MetricType`, `ItemPerf`.`Instance`.`Count` "
"tblproperties('transactional'='true')"; + "FROM " + ConnectDB.getUsageStatsDBSchema() + ".irus_sushilogtmp_json "
stmt.executeUpdate(createSushilogtmp); + "LATERAL VIEW posexplode(ItemIdentifier) ItemIdentifierTable AS seqi, ItemIdent "
logger.info("Created irus_sushilogtmp table"); + "LATERAL VIEW posexplode(ItemPerformance) ItemPerformanceTable AS seqp, ItemPerf "
+ "WHERE `ItemIdent`.`Type`= 'OAI'";
stmt.executeUpdate(insertSushilogtmp);
logger.info("Inserted to irus_sushilogtmp table");
logger.info("Inserting to irus_sushilogtmp table"); logger.info("Creating downloads_stats table");
String insertSushilogtmp = "INSERT INTO " + ConnectDB.getUsageStatsDBSchema() + ".irus_sushilogtmp " + String createDownloadsStats = "CREATE TABLE IF NOT EXISTS " + ConnectDB.getUsageStatsDBSchema()
"SELECT 'IRUS-UK', CONCAT('opendoar____::', split(split(INPUT__FILE__NAME,'IrusIRReport_')[1],'_')[0]), " + + ".downloads_stats "
"`ItemIdent`.`Value`, `ItemPerf`.`Period`.`Begin`, " + + "(`source` string, "
"`ItemPerf`.`Instance`.`MetricType`, `ItemPerf`.`Instance`.`Count` " + + "`repository_id` string, "
"FROM " + ConnectDB.getUsageStatsDBSchema() + ".irus_sushilogtmp_json " + + "`result_id` string, "
"LATERAL VIEW posexplode(ItemIdentifier) ItemIdentifierTable AS seqi, ItemIdent " + + "`date` string, "
"LATERAL VIEW posexplode(ItemPerformance) ItemPerformanceTable AS seqp, ItemPerf " + + "`count` bigint, "
"WHERE `ItemIdent`.`Type`= 'OAI'"; + "`openaire` bigint)";
stmt.executeUpdate(insertSushilogtmp); stmt.executeUpdate(createDownloadsStats);
logger.info("Inserted to irus_sushilogtmp table"); logger.info("Created downloads_stats table");
logger.info("Creating downloads_stats table"); logger.info("Inserting into downloads_stats");
String createDownloadsStats = "CREATE TABLE IF NOT EXISTS " + ConnectDB.getUsageStatsDBSchema() String insertDStats = "INSERT INTO " + ConnectDB.getUsageStatsDBSchema() + ".downloads_stats "
+ ".downloads_stats " + + "SELECT s.source, d.id AS repository_id, "
"(`source` string, " + + "ro.id as result_id, CONCAT(YEAR(date), '/', LPAD(MONTH(date), 2, '0')) as date, s.count, '0' "
"`repository_id` string, " + + "FROM " + ConnectDB.getUsageStatsDBSchema() + ".irus_sushilogtmp s, "
"`result_id` string, " + + ConnectDB.getStatsDBSchema() + ".datasource_oids d, "
"`date` string, " + + ConnectDB.getStatsDBSchema() + ".result_oids ro "
"`count` bigint, " + + "WHERE s.repository=d.oid AND s.rid=ro.oid AND metric_type='ft_total' AND s.source='IRUS-UK'";
"`openaire` bigint)"; stmt.executeUpdate(insertDStats);
stmt.executeUpdate(createDownloadsStats); logger.info("Inserted into downloads_stats");
logger.info("Created downloads_stats table");
logger.info("Inserting into downloads_stats"); logger.info("Creating sushilog table");
String insertDStats = "INSERT INTO " + ConnectDB.getUsageStatsDBSchema() + ".downloads_stats " + String createSushilog = "CREATE TABLE IF NOT EXISTS " + ConnectDB.getUsageStatsDBSchema()
"SELECT s.source, d.id AS repository_id, " + + ".sushilog "
"ro.id as result_id, CONCAT(YEAR(date), '/', LPAD(MONTH(date), 2, '0')) as date, s.count, '0' " + + "(`source` string, "
"FROM " + ConnectDB.getUsageStatsDBSchema() + ".irus_sushilogtmp s, " + + "`repository_id` string, "
ConnectDB.getStatsDBSchema() + ".datasource_oids d, " + + "`rid` string, "
ConnectDB.getStatsDBSchema() + ".result_oids ro " + + "`date` string, "
"WHERE s.repository=d.oid AND s.rid=ro.oid AND metric_type='ft_total' AND s.source='IRUS-UK'"; + "`metric_type` string, "
stmt.executeUpdate(insertDStats); + "`count` int)";
logger.info("Inserted into downloads_stats"); stmt.executeUpdate(createSushilog);
logger.info("Created sushilog table");
logger.info("Creating sushilog table"); logger.info("Inserting to sushilog table");
String createSushilog = "CREATE TABLE IF NOT EXISTS " + ConnectDB.getUsageStatsDBSchema() String insertToShushilog = "INSERT INTO " + ConnectDB.getUsageStatsDBSchema() + ".sushilog SELECT * FROM "
+ ".sushilog " + + ConnectDB.getUsageStatsDBSchema()
"(`source` string, " + + ".irus_sushilogtmp";
"`repository_id` string, " + stmt.executeUpdate(insertToShushilog);
"`rid` string, " + logger.info("Inserted to sushilog table");
"`date` string, " +
"`metric_type` string, " +
"`count` int)";
stmt.executeUpdate(createSushilog);
logger.info("Created sushilog table");
logger.info("Inserting to sushilog table"); ConnectDB.getHiveConnection().close();
String insertToShushilog = "INSERT INTO " + ConnectDB.getUsageStatsDBSchema() + ".sushilog SELECT * FROM " + }
ConnectDB.getUsageStatsDBSchema()
+ ".irus_sushilogtmp";
stmt.executeUpdate(insertToShushilog);
logger.info("Inserted to sushilog table");
ConnectDB.getHiveConnection().close(); public void getIrusRRReport(String irusUKReportPath) throws Exception {
} SimpleDateFormat sdf = new SimpleDateFormat("YYYY-MM");
// Setting the starting period
Calendar start = (Calendar) ExecuteWorkflow.startingLogPeriod.clone();
logger.info("(getIrusRRReport) Starting period for log download: " + sdf.format(start.getTime()));
public void getIrusRRReport(String irusUKReportPath) throws Exception { // Setting the ending period (last day of the month)
SimpleDateFormat sdf = new SimpleDateFormat("YYYY-MM"); Calendar end = (Calendar) ExecuteWorkflow.endingLogPeriod.clone();
// Setting the starting period end.add(Calendar.MONTH, +1);
Calendar start = (Calendar) ExecuteWorkflow.startingLogPeriod.clone(); end.add(Calendar.DAY_OF_MONTH, -1);
logger.info("(getIrusRRReport) Starting period for log download: " + sdf.format(start.getTime())); logger.info("(getIrusRRReport) Ending period for log download: " + sdf.format(end.getTime()));
// Setting the ending period (last day of the month) String reportUrl = irusUKURL + "GetReport/?Report=RR1&Release=4&RequestorID=OpenAIRE&BeginDate="
Calendar end = (Calendar) ExecuteWorkflow.endingLogPeriod.clone(); + sdf.format(start.getTime()) + "&EndDate=" + sdf.format(end.getTime())
end.add(Calendar.MONTH, +1); + "&RepositoryIdentifier=&ItemDataType=&NewJiscBand=&Granularity=Monthly&Callback=";
end.add(Calendar.DAY_OF_MONTH, -1);
logger.info("(getIrusRRReport) Ending period for log download: " + sdf.format(end.getTime()));
String reportUrl = irusUKURL + "GetReport/?Report=RR1&Release=4&RequestorID=OpenAIRE&BeginDate=" + logger.info("(getIrusRRReport) Getting report: " + reportUrl);
sdf.format(start.getTime()) + "&EndDate=" + sdf.format(end.getTime()) +
"&RepositoryIdentifier=&ItemDataType=&NewJiscBand=&Granularity=Monthly&Callback=";
logger.info("(getIrusRRReport) Getting report: " + reportUrl); String text = getJson(reportUrl, "", "");
String text = getJson(reportUrl, "", ""); List<String> opendoarsToVisit = new ArrayList<String>();
JSONParser parser = new JSONParser();
JSONObject jsonObject = (JSONObject) parser.parse(text);
jsonObject = (JSONObject) jsonObject.get("ReportResponse");
jsonObject = (JSONObject) jsonObject.get("Report");
jsonObject = (JSONObject) jsonObject.get("Report");
jsonObject = (JSONObject) jsonObject.get("Customer");
JSONArray jsonArray = (JSONArray) jsonObject.get("ReportItems");
int i = 0;
for (Object aJsonArray : jsonArray) {
JSONObject jsonObjectRow = (JSONObject) aJsonArray;
JSONArray itemIdentifier = (JSONArray) jsonObjectRow.get("ItemIdentifier");
for (Object identifier : itemIdentifier) {
JSONObject opendoar = (JSONObject) identifier;
if (opendoar.get("Type").toString().equals("OpenDOAR")) {
i++;
opendoarsToVisit.add(opendoar.get("Value").toString());
break;
}
}
// break;
}
List<String> opendoarsToVisit = new ArrayList<String>(); logger.info("(getIrusRRReport) Found the following opendoars for download: " + opendoarsToVisit);
JSONParser parser = new JSONParser();
JSONObject jsonObject = (JSONObject) parser.parse(text);
jsonObject = (JSONObject) jsonObject.get("ReportResponse");
jsonObject = (JSONObject) jsonObject.get("Report");
jsonObject = (JSONObject) jsonObject.get("Report");
jsonObject = (JSONObject) jsonObject.get("Customer");
JSONArray jsonArray = (JSONArray) jsonObject.get("ReportItems");
int i = 0;
for (Object aJsonArray : jsonArray) {
JSONObject jsonObjectRow = (JSONObject) aJsonArray;
JSONArray itemIdentifier = (JSONArray) jsonObjectRow.get("ItemIdentifier");
for (Object identifier : itemIdentifier) {
JSONObject opendoar = (JSONObject) identifier;
if (opendoar.get("Type").toString().equals("OpenDOAR")) {
i++;
opendoarsToVisit.add(opendoar.get("Value").toString());
break;
}
}
// break;
}
logger.info("(getIrusRRReport) Found the following opendoars for download: " + opendoarsToVisit); if (ExecuteWorkflow.irusNumberOfOpendoarsToDownload > 0
&& ExecuteWorkflow.irusNumberOfOpendoarsToDownload <= opendoarsToVisit.size()) {
logger.info("Trimming siteIds list to the size of: " + ExecuteWorkflow.irusNumberOfOpendoarsToDownload);
opendoarsToVisit = opendoarsToVisit.subList(0, ExecuteWorkflow.irusNumberOfOpendoarsToDownload);
}
if (ExecuteWorkflow.irusNumberOfOpendoarsToDownload > 0 && logger.info("(getIrusRRReport) Downloading the followins opendoars: " + opendoarsToVisit);
ExecuteWorkflow.irusNumberOfOpendoarsToDownload <= opendoarsToVisit.size()) {
logger.info("Trimming siteIds list to the size of: " + ExecuteWorkflow.irusNumberOfOpendoarsToDownload);
opendoarsToVisit = opendoarsToVisit.subList(0, ExecuteWorkflow.irusNumberOfOpendoarsToDownload);
}
logger.info("(getIrusRRReport) Downloading the followins opendoars: " + opendoarsToVisit); for (String opendoar : opendoarsToVisit) {
logger.info("Now working on openDoar: " + opendoar);
this.getIrusIRReport(opendoar, irusUKReportPath);
}
for (String opendoar : opendoarsToVisit) { logger.info("(getIrusRRReport) Finished with report: " + reportUrl);
logger.info("Now working on openDoar: " + opendoar); }
this.getIrusIRReport(opendoar, irusUKReportPath);
}
logger.info("(getIrusRRReport) Finished with report: " + reportUrl); private void getIrusIRReport(String opendoar, String irusUKReportPath) throws Exception {
}
private void getIrusIRReport(String opendoar, String irusUKReportPath) throws Exception { logger.info("(getIrusIRReport) Getting report(s) with opendoar: " + opendoar);
logger.info("(getIrusIRReport) Getting report(s) with opendoar: " + opendoar); ConnectDB.getHiveConnection().setAutoCommit(false);
ConnectDB.getHiveConnection().setAutoCommit(false); SimpleDateFormat simpleDateFormat = new SimpleDateFormat("YYYY-MM");
SimpleDateFormat simpleDateFormat = new SimpleDateFormat("YYYY-MM"); // Setting the starting period
Calendar start = (Calendar) ExecuteWorkflow.startingLogPeriod.clone();
logger.info("(getIrusIRReport) Starting period for log download: " + simpleDateFormat.format(start.getTime()));
// Setting the starting period // Setting the ending period (last day of the month)
Calendar start = (Calendar) ExecuteWorkflow.startingLogPeriod.clone(); Calendar end = (Calendar) ExecuteWorkflow.endingLogPeriod.clone();
logger.info("(getIrusIRReport) Starting period for log download: " + simpleDateFormat.format(start.getTime())); end.add(Calendar.MONTH, +1);
end.add(Calendar.DAY_OF_MONTH, -1);
logger.info("(getIrusIRReport) Ending period for log download: " + simpleDateFormat.format(end.getTime()));
// Setting the ending period (last day of the month) SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd");
Calendar end = (Calendar) ExecuteWorkflow.endingLogPeriod.clone(); PreparedStatement st = ConnectDB
end.add(Calendar.MONTH, +1); .getHiveConnection()
end.add(Calendar.DAY_OF_MONTH, -1); .prepareStatement(
logger.info("(getIrusIRReport) Ending period for log download: " + simpleDateFormat.format(end.getTime())); "SELECT max(date) FROM " + ConnectDB.getUsageStatsDBSchema() + ".sushilog WHERE repository=?");
st.setString(1, "opendoar____::" + opendoar);
ResultSet rs_date = st.executeQuery();
Date dateMax = null;
while (rs_date.next()) {
if (rs_date.getString(1) != null && !rs_date.getString(1).equals("null")
&& !rs_date.getString(1).equals("")) {
start.setTime(sdf.parse(rs_date.getString(1)));
dateMax = sdf.parse(rs_date.getString(1));
}
}
rs_date.close();
int batch_size = 0;
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd"); if (dateMax != null && start.getTime().compareTo(dateMax) <= 0) {
PreparedStatement st = ConnectDB logger.info("Date found in logs " + dateMax + " and not downloanding logs for " + opendoar);
.getHiveConnection() } else {
.prepareStatement( while (start.before(end)) {
"SELECT max(date) FROM " + ConnectDB.getUsageStatsDBSchema() + ".sushilog WHERE repository=?"); logger.info("date: " + simpleDateFormat.format(start.getTime()));
st.setString(1, "opendoar____::" + opendoar); String reportUrl = this.irusUKURL + "GetReport/?Report=IR1&Release=4&RequestorID=OpenAIRE&BeginDate="
ResultSet rs_date = st.executeQuery(); + simpleDateFormat.format(start.getTime()) + "&EndDate=" + simpleDateFormat.format(start.getTime())
while (rs_date.next()) { + "&RepositoryIdentifier=opendoar%3A" + opendoar
if (rs_date.getString(1) != null && !rs_date.getString(1).equals("null") + "&ItemIdentifier=&ItemDataType=&hasDOI=&Granularity=Monthly&Callback=";
&& !rs_date.getString(1).equals("")) { start.add(Calendar.MONTH, 1);
start.setTime(sdf.parse(rs_date.getString(1)));
}
}
rs_date.close();
int batch_size = 0;
while (start.before(end)) { logger.info("Downloading file: " + reportUrl);
// log.info("date: " + simpleDateFormat.format(start.getTime())); String text = getJson(reportUrl, "", "");
String reportUrl = this.irusUKURL + "GetReport/?Report=IR1&Release=4&RequestorID=OpenAIRE&BeginDate=" if (text == null) {
+ simpleDateFormat.format(start.getTime()) + "&EndDate=" + simpleDateFormat.format(start.getTime()) continue;
+ "&RepositoryIdentifier=opendoar%3A" + opendoar }
+ "&ItemIdentifier=&ItemDataType=&hasDOI=&Granularity=Monthly&Callback=";
start.add(Calendar.MONTH, 1);
logger.info("Downloading file: " + reportUrl); FileSystem fs = FileSystem.get(new Configuration());
String text = getJson(reportUrl, "", ""); String filePath = irusUKReportPath + "/" + "IrusIRReport_"
if (text == null) { + opendoar + "_" + simpleDateFormat.format(start.getTime()) + ".json";
continue; logger.info("Storing to file: " + filePath);
} FSDataOutputStream fin = fs.create(new Path(filePath), true);
FileSystem fs = FileSystem.get(new Configuration()); JSONParser parser = new JSONParser();
String filePath = irusUKReportPath + "/" + "IrusIRReport_" + JSONObject jsonObject = (JSONObject) parser.parse(text);
opendoar + "_" + simpleDateFormat.format(start.getTime()) + ".json"; jsonObject = (JSONObject) jsonObject.get("ReportResponse");
logger.info("Storing to file: " + filePath); jsonObject = (JSONObject) jsonObject.get("Report");
FSDataOutputStream fin = fs.create(new Path(filePath), true); jsonObject = (JSONObject) jsonObject.get("Report");
jsonObject = (JSONObject) jsonObject.get("Customer");
JSONArray jsonArray = (JSONArray) jsonObject.get("ReportItems");
if (jsonArray == null) {
continue;
}
String oai = "";
for (Object aJsonArray : jsonArray) {
JSONObject jsonObjectRow = (JSONObject) aJsonArray;
fin.write(jsonObjectRow.toJSONString().getBytes());
fin.writeChar('\n');
}
JSONParser parser = new JSONParser(); fin.close();
JSONObject jsonObject = (JSONObject) parser.parse(text); }
jsonObject = (JSONObject) jsonObject.get("ReportResponse");
jsonObject = (JSONObject) jsonObject.get("Report");
jsonObject = (JSONObject) jsonObject.get("Report");
jsonObject = (JSONObject) jsonObject.get("Customer");
JSONArray jsonArray = (JSONArray) jsonObject.get("ReportItems");
if (jsonArray == null) {
continue;
}
String oai = "";
for (Object aJsonArray : jsonArray) {
JSONObject jsonObjectRow = (JSONObject) aJsonArray;
fin.write(jsonObjectRow.toJSONString().getBytes());
fin.writeChar('\n');
}
fin.close(); }
} //ConnectDB.getHiveConnection().close();
ConnectDB.getHiveConnection().close(); logger.info("(getIrusIRReport) Finished downloading report(s) with opendoar: " + opendoar);
}
logger.info("(getIrusIRReport) Finished downloading report(s) with opendoar: " + opendoar); private String getJson(String url) throws Exception {
} try {
System.out.println("===> Connecting to: " + url);
URL website = new URL(url);
System.out.println("Connection url -----> " + url);
URLConnection connection = website.openConnection();
private String getJson(String url) throws Exception { // connection.setRequestProperty ("Authorization", "Basic "+encoded);
try { StringBuilder response;
System.out.println("===> Connecting to: " + url); try (BufferedReader in = new BufferedReader(new InputStreamReader(connection.getInputStream()))) {
URL website = new URL(url); response = new StringBuilder();
System.out.println("Connection url -----> " + url); String inputLine;
URLConnection connection = website.openConnection(); while ((inputLine = in.readLine()) != null) {
response.append(inputLine);
// connection.setRequestProperty ("Authorization", "Basic "+encoded);
StringBuilder response;
try (BufferedReader in = new BufferedReader(new InputStreamReader(connection.getInputStream()))) {
response = new StringBuilder();
String inputLine;
while ((inputLine = in.readLine()) != null) {
response.append(inputLine);
// response.append("\n"); // response.append("\n");
} }
} }
System.out.println("response ====> " + response.toString()); System.out.println("response ====> " + response.toString());
return response.toString(); return response.toString();
} catch (Exception e) { } catch (Exception e) {
logger.error("Failed to get URL: " + e); logger.error("Failed to get URL: " + e);
System.out.println("Failed to get URL: " + e); System.out.println("Failed to get URL: " + e);
throw new Exception("Failed to get URL: " + e.toString(), e); throw new Exception("Failed to get URL: " + e.toString(), e);
} }
} }
private String getJson(String url, String username, String password) throws Exception { private String getJson(String url, String username, String password) throws Exception {
// String cred=username+":"+password; // String cred=username+":"+password;
// String encoded = new sun.misc.BASE64Encoder().encode (cred.getBytes()); // String encoded = new sun.misc.BASE64Encoder().encode (cred.getBytes());
try { try {
URL website = new URL(url); URL website = new URL(url);
URLConnection connection = website.openConnection(); URLConnection connection = website.openConnection();
// connection.setRequestProperty ("Authorization", "Basic "+encoded); // connection.setRequestProperty ("Authorization", "Basic "+encoded);
StringBuilder response; StringBuilder response;
try (BufferedReader in = new BufferedReader(new InputStreamReader(connection.getInputStream()))) { try (BufferedReader in = new BufferedReader(new InputStreamReader(connection.getInputStream()))) {
response = new StringBuilder(); response = new StringBuilder();
String inputLine; String inputLine;
while ((inputLine = in.readLine()) != null) { while ((inputLine = in.readLine()) != null) {
response.append(inputLine); response.append(inputLine);
response.append("\n"); response.append("\n");
} }
} }
return response.toString(); return response.toString();
} catch (Exception e) { } catch (Exception e) {
logger.error("Failed to get URL", e); logger.error("Failed to get URL", e);
return null; return null;
} }
} }
} }

View File

@ -1,4 +1,3 @@
package eu.dnetlib.oa.graph.usagestats.export; package eu.dnetlib.oa.graph.usagestats.export;
import java.io.*; import java.io.*;
@ -28,49 +27,49 @@ import org.slf4j.LoggerFactory;
*/ */
public class LaReferenciaDownloadLogs { public class LaReferenciaDownloadLogs {
private final String piwikUrl; private final String piwikUrl;
private Date startDate; private Date startDate;
private final String tokenAuth; private final String tokenAuth;
/* /*
* The Piwik's API method * The Piwik's API method
*/ */
private final String APImethod = "?module=API&method=Live.getLastVisitsDetails"; private final String APImethod = "?module=API&method=Live.getLastVisitsDetails";
private final String format = "&format=json"; private final String format = "&format=json";
private final String ApimethodGetAllSites = "?module=API&method=SitesManager.getSitesWithViewAccess"; private final String ApimethodGetAllSites = "?module=API&method=SitesManager.getSitesWithViewAccess";
private static final Logger logger = LoggerFactory.getLogger(LaReferenciaDownloadLogs.class); private static final Logger logger = LoggerFactory.getLogger(LaReferenciaDownloadLogs.class);
public LaReferenciaDownloadLogs(String piwikUrl, String tokenAuth) throws Exception { public LaReferenciaDownloadLogs(String piwikUrl, String tokenAuth) throws Exception {
this.piwikUrl = piwikUrl; this.piwikUrl = piwikUrl;
this.tokenAuth = tokenAuth; this.tokenAuth = tokenAuth;
this.createTables(); this.createTables();
// this.createTmpTables(); // this.createTmpTables();
} }
public void reCreateLogDirs() throws IllegalArgumentException, IOException { public void reCreateLogDirs() throws IllegalArgumentException, IOException {
FileSystem dfs = FileSystem.get(new Configuration()); FileSystem dfs = FileSystem.get(new Configuration());
logger.info("Deleting lareferenciaLog directory: " + ExecuteWorkflow.lareferenciaLogPath); logger.info("Deleting lareferenciaLog directory: " + ExecuteWorkflow.lareferenciaLogPath);
dfs.delete(new Path(ExecuteWorkflow.lareferenciaLogPath), true); dfs.delete(new Path(ExecuteWorkflow.lareferenciaLogPath), true);
logger.info("Creating lareferenciaLog directory: " + ExecuteWorkflow.lareferenciaLogPath); logger.info("Creating lareferenciaLog directory: " + ExecuteWorkflow.lareferenciaLogPath);
dfs.mkdirs(new Path(ExecuteWorkflow.lareferenciaLogPath)); dfs.mkdirs(new Path(ExecuteWorkflow.lareferenciaLogPath));
} }
private void createTables() throws Exception { private void createTables() throws Exception {
try { try {
Statement stmt = ConnectDB.getHiveConnection().createStatement(); Statement stmt = ConnectDB.getHiveConnection().createStatement();
logger.info("Creating LaReferencia tables"); logger.info("Creating LaReferencia tables");
String sqlCreateTableLareferenciaLog = "CREATE TABLE IF NOT EXISTS " + String sqlCreateTableLareferenciaLog = "CREATE TABLE IF NOT EXISTS "
ConnectDB.getUsageStatsDBSchema() + ".lareferencialog(matomoid INT, " + + ConnectDB.getUsageStatsDBSchema() + ".lareferencialog(matomoid INT, "
"source STRING, id_visit STRING, country STRING, action STRING, url STRING, entity_id STRING, " + + "source STRING, id_visit STRING, country STRING, action STRING, url STRING, entity_id STRING, "
"source_item_type STRING, timestamp STRING, referrer_name STRING, agent STRING) " + + "source_item_type STRING, timestamp STRING, referrer_name STRING, agent STRING) "
"clustered by (source, id_visit, action, timestamp, entity_id) into 100 buckets " + + "clustered by (source, id_visit, action, timestamp, entity_id) into 100 buckets "
"stored as orc tblproperties('transactional'='true')"; + "stored as orc tblproperties('transactional'='true')";
stmt.executeUpdate(sqlCreateTableLareferenciaLog); stmt.executeUpdate(sqlCreateTableLareferenciaLog);
logger.info("Created LaReferencia tables"); logger.info("Created LaReferencia tables");
// String sqlcreateRuleLaReferenciaLog = "CREATE OR REPLACE RULE ignore_duplicate_inserts AS " // String sqlcreateRuleLaReferenciaLog = "CREATE OR REPLACE RULE ignore_duplicate_inserts AS "
// + " ON INSERT TO lareferencialog " // + " ON INSERT TO lareferencialog "
// + " WHERE (EXISTS ( SELECT lareferencialog.matomoid, lareferencialog.source, lareferencialog.id_visit," // + " WHERE (EXISTS ( SELECT lareferencialog.matomoid, lareferencialog.source, lareferencialog.id_visit,"
@ -81,16 +80,16 @@ public class LaReferenciaDownloadLogs {
// stmt.executeUpdate(sqlcreateRuleLaReferenciaLog); // stmt.executeUpdate(sqlcreateRuleLaReferenciaLog);
// stmt.executeUpdate(sqlCreateRuleIndexLaReferenciaLog); // stmt.executeUpdate(sqlCreateRuleIndexLaReferenciaLog);
stmt.close(); stmt.close();
ConnectDB.getHiveConnection().close(); ConnectDB.getHiveConnection().close();
logger.info("Lareferencia Tables Created"); logger.info("Lareferencia Tables Created");
} catch (Exception e) { } catch (Exception e) {
logger.error("Failed to create tables: " + e); logger.error("Failed to create tables: " + e);
throw new Exception("Failed to create tables: " + e.toString(), e); throw new Exception("Failed to create tables: " + e.toString(), e);
// System.exit(0); // System.exit(0);
} }
} }
// private void createTmpTables() throws Exception { // private void createTmpTables() throws Exception {
// //
@ -115,147 +114,152 @@ public class LaReferenciaDownloadLogs {
// // System.exit(0); // // System.exit(0);
// } // }
// } // }
private String getPiwikLogUrl() {
return piwikUrl + "/";
}
private String getPiwikLogUrl() { private String getJson(String url) throws Exception {
return piwikUrl + "/"; try {
} URL website = new URL(url);
URLConnection connection = website.openConnection();
private String getJson(String url) throws Exception { StringBuilder response;
try { try (BufferedReader in = new BufferedReader(new InputStreamReader(connection.getInputStream()))) {
URL website = new URL(url); response = new StringBuilder();
URLConnection connection = website.openConnection(); String inputLine;
while ((inputLine = in.readLine()) != null) {
StringBuilder response; response.append(inputLine);
try (BufferedReader in = new BufferedReader(new InputStreamReader(connection.getInputStream()))) {
response = new StringBuilder();
String inputLine;
while ((inputLine = in.readLine()) != null) {
response.append(inputLine);
// response.append("\n"); // response.append("\n");
} }
} }
return response.toString(); return response.toString();
} catch (Exception e) { } catch (Exception e) {
logger.error("Failed to get URL: " + e); logger.error("Failed to get URL: " + e);
throw new Exception("Failed to get URL: " + e.toString(), e); throw new Exception("Failed to get URL: " + e.toString(), e);
} }
} }
public void GetLaReferenciaRepos(String repoLogsPath) throws Exception { public void GetLaReferenciaRepos(String repoLogsPath) throws Exception {
String baseApiUrl = getPiwikLogUrl() + ApimethodGetAllSites + format + "&token_auth=" + this.tokenAuth; String baseApiUrl = getPiwikLogUrl() + ApimethodGetAllSites + format + "&token_auth=" + this.tokenAuth;
String content = ""; String content = "";
List<Integer> siteIdsToVisit = new ArrayList<Integer>(); List<Integer> siteIdsToVisit = new ArrayList<Integer>();
// Getting all the siteIds in a list for logging reasons & limiting the list // Getting all the siteIds in a list for logging reasons & limiting the list
// to the max number of siteIds // to the max number of siteIds
content = getJson(baseApiUrl); content = getJson(baseApiUrl);
JSONParser parser = new JSONParser(); JSONParser parser = new JSONParser();
JSONArray jsonArray = (JSONArray) parser.parse(content); JSONArray jsonArray = (JSONArray) parser.parse(content);
for (Object aJsonArray : jsonArray) { for (Object aJsonArray : jsonArray) {
JSONObject jsonObjectRow = (JSONObject) aJsonArray; JSONObject jsonObjectRow = (JSONObject) aJsonArray;
siteIdsToVisit.add(Integer.parseInt(jsonObjectRow.get("idsite").toString())); siteIdsToVisit.add(Integer.parseInt(jsonObjectRow.get("idsite").toString()));
} }
logger.info("Found the following siteIds for download: " + siteIdsToVisit); logger.info("Found the following siteIds for download: " + siteIdsToVisit);
if (ExecuteWorkflow.numberOfPiwikIdsToDownload > 0 && if (ExecuteWorkflow.numberOfPiwikIdsToDownload > 0
ExecuteWorkflow.numberOfPiwikIdsToDownload <= siteIdsToVisit.size()) { && ExecuteWorkflow.numberOfPiwikIdsToDownload <= siteIdsToVisit.size()) {
logger.info("Trimming siteIds list to the size of: " + ExecuteWorkflow.numberOfPiwikIdsToDownload); logger.info("Trimming siteIds list to the size of: " + ExecuteWorkflow.numberOfPiwikIdsToDownload);
siteIdsToVisit = siteIdsToVisit.subList(0, ExecuteWorkflow.numberOfPiwikIdsToDownload); siteIdsToVisit = siteIdsToVisit.subList(0, ExecuteWorkflow.numberOfPiwikIdsToDownload);
} }
logger.info("Downloading from repos with the followins siteIds: " + siteIdsToVisit); logger.info("Downloading from repos with the followins siteIds: " + siteIdsToVisit);
for (int siteId : siteIdsToVisit) { for (int siteId : siteIdsToVisit) {
logger.info("Now working on piwikId: " + siteId); logger.info("Now working on LaReferencia MatomoId: " + siteId);
this.GetLaReFerenciaLogs(repoLogsPath, siteId); this.GetLaReFerenciaLogs(repoLogsPath, siteId);
} }
} }
public void GetLaReFerenciaLogs(String repoLogsPath, public void GetLaReFerenciaLogs(String repoLogsPath,
int laReferencialMatomoID) throws Exception { int laReferencialMatomoID) throws Exception {
logger.info("Downloading logs for LaReferencia repoid " + laReferencialMatomoID); logger.info("Downloading logs for LaReferencia repoid " + laReferencialMatomoID);
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd"); SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd");
// Setting the starting period // Setting the starting period
Calendar start = (Calendar) ExecuteWorkflow.startingLogPeriod.clone(); Calendar start = (Calendar) ExecuteWorkflow.startingLogPeriod.clone();
logger.info("Starting period for log download: " + sdf.format(start.getTime())); logger.info("Starting period for log download: " + sdf.format(start.getTime()));
// Setting the ending period (last day of the month) // Setting the ending period (last day of the month)
Calendar end = (Calendar) ExecuteWorkflow.endingLogPeriod.clone(); Calendar end = (Calendar) ExecuteWorkflow.endingLogPeriod.clone();
end.add(Calendar.MONTH, +1); end.add(Calendar.MONTH, +1);
end.add(Calendar.DAY_OF_MONTH, -1); end.add(Calendar.DAY_OF_MONTH, -1);
logger.info("Ending period for log download: " + sdf.format(end.getTime())); logger.info("Ending period for log download: " + sdf.format(end.getTime()));
PreparedStatement st = ConnectDB PreparedStatement st = ConnectDB
.getHiveConnection() .getHiveConnection()
.prepareStatement( .prepareStatement(
"SELECT max(timestamp) FROM " + ConnectDB.getUsageStatsDBSchema() + "SELECT max(timestamp) FROM " + ConnectDB.getUsageStatsDBSchema()
".lareferencialog WHERE matomoid=? GROUP BY timestamp HAVING max(timestamp) is not null"); + ".lareferencialog WHERE matomoid=?");
st.setInt(1, laReferencialMatomoID); st.setInt(1, laReferencialMatomoID);
Date dateMax = null;
ResultSet rs_date = st.executeQuery(); ResultSet rs_date = st.executeQuery();
while (rs_date.next()) { while (rs_date.next()) {
if (rs_date.getString(1) != null && !rs_date.getString(1).equals("null") if (rs_date.getString(1) != null && !rs_date.getString(1).equals("null")
&& !rs_date.getString(1).equals("")) { && !rs_date.getString(1).equals("")) {
start.setTime(sdf.parse(rs_date.getString(1))); start.setTime(sdf.parse(rs_date.getString(1)));
} dateMax = sdf.parse(rs_date.getString(1));
} }
rs_date.close(); }
rs_date.close();
for (Calendar currDay = (Calendar) start.clone(); currDay.before(end); currDay.add(Calendar.DATE, 1)) { for (Calendar currDay = (Calendar) start.clone(); currDay.before(end); currDay.add(Calendar.DATE, 1)) {
Date date = currDay.getTime(); Date date = currDay.getTime();
logger if (dateMax != null && currDay.getTime().compareTo(dateMax) <= 0) {
.info( logger.info("Date found in logs " + dateMax + " and not downloanding Matomo logs for " + laReferencialMatomoID);
"Downloading logs for LaReferencia repoid " + laReferencialMatomoID + " and for " } else {
+ sdf.format(date)); logger
.info(
"Downloading logs for LaReferencia repoid " + laReferencialMatomoID + " and for "
+ sdf.format(date));
String period = "&period=day&date=" + sdf.format(date); String period = "&period=day&date=" + sdf.format(date);
String outFolder = ""; String outFolder = "";
outFolder = repoLogsPath; outFolder = repoLogsPath;
FileSystem fs = FileSystem.get(new Configuration()); FileSystem fs = FileSystem.get(new Configuration());
FSDataOutputStream fin = fs FSDataOutputStream fin = fs
.create( .create(
new Path(outFolder + "/" + laReferencialMatomoID + "_LaRefPiwiklog" + sdf.format((date)) + ".json"), new Path(outFolder + "/" + laReferencialMatomoID + "_LaRefPiwiklog" + sdf.format((date)) + ".json"),
true); true);
String baseApiUrl = getPiwikLogUrl() + APImethod + "&idSite=" + laReferencialMatomoID + period + format String baseApiUrl = getPiwikLogUrl() + APImethod + "&idSite=" + laReferencialMatomoID + period + format
+ "&expanded=5&filter_limit=1000&token_auth=" + tokenAuth; + "&expanded=5&filter_limit=1000&token_auth=" + tokenAuth;
String content = ""; String content = "";
int i = 0; int i = 0;
JSONParser parser = new JSONParser(); JSONParser parser = new JSONParser();
do { do {
String apiUrl = baseApiUrl; String apiUrl = baseApiUrl;
if (i > 0) { if (i > 0) {
apiUrl += "&filter_offset=" + (i * 1000); apiUrl += "&filter_offset=" + (i * 1000);
} }
content = getJson(apiUrl); content = getJson(apiUrl);
if (content.length() == 0 || content.equals("[]")) if (content.length() == 0 || content.equals("[]")) {
break; break;
}
JSONArray jsonArray = (JSONArray) parser.parse(content); JSONArray jsonArray = (JSONArray) parser.parse(content);
for (Object aJsonArray : jsonArray) { for (Object aJsonArray : jsonArray) {
JSONObject jsonObjectRaw = (JSONObject) aJsonArray; JSONObject jsonObjectRaw = (JSONObject) aJsonArray;
fin.write(jsonObjectRaw.toJSONString().getBytes()); fin.write(jsonObjectRaw.toJSONString().getBytes());
fin.writeChar('\n'); fin.writeChar('\n');
} }
logger logger
.info( .info(
"Downloaded part " + i + " of logs for LaReferencia repoid " + laReferencialMatomoID "Downloaded part " + i + " of logs for LaReferencia repoid " + laReferencialMatomoID
+ " and for " + " and for "
+ sdf.format(date)); + sdf.format(date));
i++; i++;
} while (true); } while (true);
fin.close(); fin.close();
}
} }
} }
} }

View File

@ -204,6 +204,9 @@ public class PiwikDownloadLogs {
logger.info("Downloading from repos with the followins piwikIds: " + piwikIdToVisit); logger.info("Downloading from repos with the followins piwikIds: " + piwikIdToVisit);
// ExecutorService executor = Executors.newFixedThreadPool(ExecuteWorkflow.numberOfDownloadThreads);
for (int siteId : piwikIdToVisit) {
// Setting the starting period // Setting the starting period
Calendar start = (Calendar) ExecuteWorkflow.startingLogPeriod.clone(); Calendar start = (Calendar) ExecuteWorkflow.startingLogPeriod.clone();
logger.info("Starting period for log download: " + sdf.format(start.getTime())); logger.info("Starting period for log download: " + sdf.format(start.getTime()));
@ -214,9 +217,6 @@ public class PiwikDownloadLogs {
end.add(Calendar.DAY_OF_MONTH, -1); end.add(Calendar.DAY_OF_MONTH, -1);
logger.info("Ending period for log download: " + sdf.format(end.getTime())); logger.info("Ending period for log download: " + sdf.format(end.getTime()));
//ExecutorService executor = Executors.newFixedThreadPool(ExecuteWorkflow.numberOfDownloadThreads);
for (int siteId : piwikIdToVisit) {
logger.info("Now working on piwikId: " + siteId); logger.info("Now working on piwikId: " + siteId);
PreparedStatement st = ConnectDB.DB_HIVE_CONNECTION PreparedStatement st = ConnectDB.DB_HIVE_CONNECTION
@ -224,7 +224,7 @@ public class PiwikDownloadLogs {
"SELECT max(timestamp) FROM " + ConnectDB.getUsageStatsDBSchema() "SELECT max(timestamp) FROM " + ConnectDB.getUsageStatsDBSchema()
+ ".piwiklog WHERE source=?"); + ".piwiklog WHERE source=?");
st.setInt(1, siteId); st.setInt(1, siteId);
Date dateMax=null;
ResultSet rs_date = st.executeQuery(); ResultSet rs_date = st.executeQuery();
while (rs_date.next()) { while (rs_date.next()) {
logger.info("Found max date: " + rs_date.getString(1) + " for repository " + siteId); logger.info("Found max date: " + rs_date.getString(1) + " for repository " + siteId);
@ -232,85 +232,92 @@ public class PiwikDownloadLogs {
if (rs_date.getString(1) != null && !rs_date.getString(1).equals("null") if (rs_date.getString(1) != null && !rs_date.getString(1).equals("null")
&& !rs_date.getString(1).equals("")) { && !rs_date.getString(1).equals("")) {
start.setTime(sdf.parse(rs_date.getString(1))); start.setTime(sdf.parse(rs_date.getString(1)));
dateMax = sdf.parse(rs_date.getString(1));
} }
} }
rs_date.close(); rs_date.close();
for (Calendar currDay = (Calendar) start.clone(); currDay.before(end); currDay.add(Calendar.DATE, 1)) { for (Calendar currDay = (Calendar) start.clone(); currDay.before(end); currDay.add(Calendar.DATE, 1)) {
//logger.info("Date used " + currDay.toString()); // logger.info("Date used " + currDay.toString());
//Runnable worker = new WorkerThread(currDay, siteId, repoLogsPath, portalLogPath, portalMatomoID); // Runnable worker = new WorkerThread(currDay, siteId, repoLogsPath, portalLogPath, portalMatomoID);
//executor.execute(worker);// calling execute method of ExecutorService // executor.execute(worker);// calling execute method of ExecutorService
GetOpenAIRELogsForDate(currDay, siteId, repoLogsPath, portalLogPath, portalMatomoID); logger.info("Date used " + currDay.getTime().toString());
if(dateMax!=null && currDay.getTime().compareTo(dateMax)<=0)
logger.info("Date found in logs "+dateMax+ " and not downloanding Matomo logs for "+siteId);
else
GetOpenAIRELogsForDate(currDay, siteId, repoLogsPath, portalLogPath, portalMatomoID);
} }
} }
//executor.shutdown(); // executor.shutdown();
//while (!executor.isTerminated()) { // while (!executor.isTerminated()) {
//} // }
//System.out.println("Finished all threads"); // System.out.println("Finished all threads");
} }
public void GetOpenAIRELogsForDate(Calendar currDay, int siteId, String repoLogsPath, String portalLogPath,
String portalMatomoID) throws Exception {
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd");
Date date = currDay.getTime(); public void GetOpenAIRELogsForDate(Calendar currDay, int siteId, String repoLogsPath, String portalLogPath,
logger.info("Downloading logs for repoid " + siteId + " and for " + sdf.format(date)); String portalMatomoID) throws Exception {
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd");
String period = "&period=day&date=" + sdf.format(date); Date date = currDay.getTime();
String outFolder = ""; logger.info("Downloading logs for repoid " + siteId + " and for " + sdf.format(date));
if (siteId == Integer.parseInt(portalMatomoID)) {
outFolder = portalLogPath; String period = "&period=day&date=" + sdf.format(date);
} else { String outFolder = "";
outFolder = repoLogsPath; if (siteId == Integer.parseInt(portalMatomoID)) {
outFolder = portalLogPath;
} else {
outFolder = repoLogsPath;
}
String baseApiUrl = getPiwikLogUrl() + APImethod + "&idSite=" + siteId + period + format
+ "&expanded=5&filter_limit=1000&token_auth=" + tokenAuth;
String content = "";
int i = 0;
JSONParser parser = new JSONParser();
StringBuffer totalContent = new StringBuffer();
FileSystem fs = FileSystem.get(new Configuration());
do {
int writtenBytes = 0;
String apiUrl = baseApiUrl;
if (i > 0) {
apiUrl += "&filter_offset=" + (i * 1000);
} }
String baseApiUrl = getPiwikLogUrl() + APImethod + "&idSite=" + siteId + period + format content = getJson(apiUrl);
+ "&expanded=5&filter_limit=1000&token_auth=" + tokenAuth; if (content.length() == 0 || content.equals("[]"))
String content = ""; break;
int i = 0; FSDataOutputStream fin = fs
.create(
new Path(outFolder + "/" + siteId + "_Piwiklog" + sdf.format((date)) + "_offset_" + i
+ ".json"),
true);
JSONArray jsonArray = (JSONArray) parser.parse(content);
for (Object aJsonArray : jsonArray) {
JSONObject jsonObjectRaw = (JSONObject) aJsonArray;
byte[] jsonObjectRawBytes = jsonObjectRaw.toJSONString().getBytes();
fin.write(jsonObjectRawBytes);
fin.writeChar('\n');
JSONParser parser = new JSONParser(); writtenBytes += jsonObjectRawBytes.length + 1;
StringBuffer totalContent = new StringBuffer(); }
FileSystem fs = FileSystem.get(new Configuration());
do { fin.close();
int writtenBytes = 0; System.out
String apiUrl = baseApiUrl; .println(
Thread.currentThread().getName() + " (Finished writing) Wrote " + writtenBytes
+ " bytes. Filename: " + siteId + "_Piwiklog" + sdf.format((date)) + "_offset_" + i
+ ".json");
if (i > 0) { i++;
apiUrl += "&filter_offset=" + (i * 1000); } while (true);
}
content = getJson(apiUrl); fs.close();
if (content.length() == 0 || content.equals("[]")) }
break;
FSDataOutputStream fin = fs
.create(
new Path(outFolder + "/" + siteId + "_Piwiklog" + sdf.format((date)) + "_offset_" + i
+ ".json"),
true);
JSONArray jsonArray = (JSONArray) parser.parse(content);
for (Object aJsonArray : jsonArray) {
JSONObject jsonObjectRaw = (JSONObject) aJsonArray;
byte[] jsonObjectRawBytes = jsonObjectRaw.toJSONString().getBytes();
fin.write(jsonObjectRawBytes);
fin.writeChar('\n');
writtenBytes += jsonObjectRawBytes.length + 1;
}
fin.close();
System.out
.println(
Thread.currentThread().getName() + " (Finished writing) Wrote " + writtenBytes
+ " bytes. Filename: " + siteId + "_Piwiklog" + sdf.format((date)) + "_offset_" + i
+ ".json");
i++;
} while (true);
fs.close();
}
} }

View File

@ -199,14 +199,14 @@ public class PiwikStatsDB {
cleanOAI(); cleanOAI();
logger.info("Cleaning oai done"); logger.info("Cleaning oai done");
logger.info("Processing portal logs"); logger.info("Processing portal logs");
processPortalLog(); processPortalLog();
logger.info("Portal logs process done"); logger.info("Portal logs process done");
logger.info("Processing portal usagestats"); logger.info("Processing portal usagestats");
portalStats(); portalStats();
logger.info("Portal usagestats process done"); logger.info("Portal usagestats process done");
logger.info("ViewsStats processing starts"); logger.info("ViewsStats processing starts");
viewsStats(); viewsStats();
logger.info("ViewsStats processing ends"); logger.info("ViewsStats processing ends");
@ -215,8 +215,6 @@ public class PiwikStatsDB {
downloadsStats(); downloadsStats();
logger.info("DownloadsStats processing starts"); logger.info("DownloadsStats processing starts");
logger.info("Updating Production Tables"); logger.info("Updating Production Tables");
updateProdTables(); updateProdTables();
logger.info("Updated Production Tables"); logger.info("Updated Production Tables");
@ -313,7 +311,7 @@ public class PiwikStatsDB {
"SELECT DISTINCT p1.source, p1.id_visit, p1.action, p1.entity_id, p1.timestamp \n" + "SELECT DISTINCT p1.source, p1.id_visit, p1.action, p1.entity_id, p1.timestamp \n" +
"FROM " + ConnectDB.getUsageStatsDBSchema() + ".piwiklogtmp p1, " + "FROM " + ConnectDB.getUsageStatsDBSchema() + ".piwiklogtmp p1, " +
ConnectDB.getUsageStatsDBSchema() + ".piwiklogtmp p2\n" + ConnectDB.getUsageStatsDBSchema() + ".piwiklogtmp p2\n" +
"WHERE p1.source!='5' AND p1.source=p2.source AND p1.id_visit=p2.id_visit AND p1.entity_id=p2.entity_id \n" "WHERE p1.source=p2.source AND p1.id_visit=p2.id_visit AND p1.entity_id=p2.entity_id \n"
+ +
"AND p1.action=p2.action AND p1.action='download' AND p1.timestamp!=p2.timestamp \n" + "AND p1.action=p2.action AND p1.action='download' AND p1.timestamp!=p2.timestamp \n" +
"AND p1.timestamp<p2.timestamp AND ((unix_timestamp(p2.timestamp)-unix_timestamp(p1.timestamp))/60)<30 \n" + "AND p1.timestamp<p2.timestamp AND ((unix_timestamp(p2.timestamp)-unix_timestamp(p1.timestamp))/60)<30 \n" +
@ -329,7 +327,7 @@ public class PiwikStatsDB {
"SELECT DISTINCT p1.source, p1.id_visit, p1.action, p1.entity_id, p1.timestamp \n" + "SELECT DISTINCT p1.source, p1.id_visit, p1.action, p1.entity_id, p1.timestamp \n" +
"FROM " + ConnectDB.getUsageStatsDBSchema() + ".piwiklogtmp p1, " + "FROM " + ConnectDB.getUsageStatsDBSchema() + ".piwiklogtmp p1, " +
ConnectDB.getUsageStatsDBSchema() + ".piwiklogtmp p2\n" + ConnectDB.getUsageStatsDBSchema() + ".piwiklogtmp p2\n" +
"WHERE p1.source!='5' AND p1.source=p2.source AND p1.id_visit=p2.id_visit AND p1.entity_id=p2.entity_id \n" "WHERE p1.source=p2.source AND p1.id_visit=p2.id_visit AND p1.entity_id=p2.entity_id \n"
+ +
"AND p1.action=p2.action AND p1.action='action' AND p1.timestamp!=p2.timestamp \n" + "AND p1.action=p2.action AND p1.action='action' AND p1.timestamp!=p2.timestamp \n" +
"AND p1.timestamp<p2.timestamp AND (unix_timestamp(p2.timestamp)-unix_timestamp(p1.timestamp))<10 \n" + "AND p1.timestamp<p2.timestamp AND (unix_timestamp(p2.timestamp)-unix_timestamp(p1.timestamp))<10 \n" +
@ -380,22 +378,22 @@ public class PiwikStatsDB {
"max(views) AS count, max(openaire_referrer) AS openaire " + "max(views) AS count, max(openaire_referrer) AS openaire " +
"FROM " + ConnectDB.getUsageStatsDBSchema() + ".result_views_monthly_tmp p, " + "FROM " + ConnectDB.getUsageStatsDBSchema() + ".result_views_monthly_tmp p, " +
ConnectDB.getStatsDBSchema() + ".datasource d, " + ConnectDB.getStatsDBSchema() + ".result_oids ro " + ConnectDB.getStatsDBSchema() + ".datasource d, " + ConnectDB.getStatsDBSchema() + ".result_oids ro " +
"WHERE p.source!='5' AND p.source=d.piwik_id AND p.id=ro.oid " + "WHERE p.source=d.piwik_id AND p.id=ro.oid " +
"GROUP BY d.id, ro.id, month " + "GROUP BY d.id, ro.id, month " +
"ORDER BY d.id, ro.id, month"; "ORDER BY d.id, ro.id, month";
stmt.executeUpdate(create_views_stats_tmp); stmt.executeUpdate(create_views_stats_tmp);
logger.info("Created views_stats_tmp table"); logger.info("Created views_stats_tmp table");
/*
logger.info("Dropping views_stats table"); logger.info("Dropping views_stats table");
String drop_views_stats = "DROP TABLE IF EXISTS " + String drop_views_stats = "DROP TABLE IF EXISTS " +
ConnectDB.getUsageStatsDBSchema() + ConnectDB.getUsageStatsDBSchema() +
".views_stats"; ".views_stats";
stmt.executeUpdate(drop_views_stats); stmt.executeUpdate(drop_views_stats);
logger.info("Dropped views_stats table"); logger.info("Dropped views_stats table");
*/
logger.info("Creating views_stats table"); logger.info("Creating views_stats table");
String create_view_stats = "CREATE TABLE IF NOT EXISTS " + ConnectDB.getUsageStatsDBSchema() + ".views_stats " + String create_view_stats = "CREATE TABLE IF NOT EXISTS " + ConnectDB.getUsageStatsDBSchema() + ".views_stats " +
"STORED AS PARQUET AS SELECT * FROM " + ConnectDB.getUsageStatsDBSchema() + ".views_stats_tmp"; "LIKE " + ConnectDB.getUsageStatsDBSchema() + ".views_stats_tmp STORED AS PARQUET";
stmt.executeUpdate(create_view_stats); stmt.executeUpdate(create_view_stats);
logger.info("Created views_stats table"); logger.info("Created views_stats table");
@ -412,23 +410,23 @@ public class PiwikStatsDB {
"'OpenAIRE' as source, d.id as repository_id, ro.id as result_id, month as date, max(views) AS count " + "'OpenAIRE' as source, d.id as repository_id, ro.id as result_id, month as date, max(views) AS count " +
"FROM " + ConnectDB.getUsageStatsDBSchema() + ".result_views_monthly_tmp p, " + "FROM " + ConnectDB.getUsageStatsDBSchema() + ".result_views_monthly_tmp p, " +
ConnectDB.getStatsDBSchema() + ".datasource d, " + ConnectDB.getStatsDBSchema() + ".result_oids ro " + ConnectDB.getStatsDBSchema() + ".datasource d, " + ConnectDB.getStatsDBSchema() + ".result_oids ro " +
"WHERE p.source="+ExecuteWorkflow.portalMatomoID +" AND p.source=d.piwik_id and p.id=ro.id \n" + "WHERE p.source=" + ExecuteWorkflow.portalMatomoID + " AND p.source=d.piwik_id and p.id=ro.id \n" +
"GROUP BY d.id, ro.id, month " + "GROUP BY d.id, ro.id, month " +
"ORDER BY d.id, ro.id, month"; "ORDER BY d.id, ro.id, month";
stmt.executeUpdate(create_pageviews_stats_tmp); stmt.executeUpdate(create_pageviews_stats_tmp);
logger.info("Created pageviews_stats_tmp table"); logger.info("Created pageviews_stats_tmp table");
logger.info("Droping pageviews_stats table"); /* logger.info("Droping pageviews_stats table");
String drop_pageviews_stats = "DROP TABLE IF EXISTS " + String drop_pageviews_stats = "DROP TABLE IF EXISTS " +
ConnectDB.getUsageStatsDBSchema() + ConnectDB.getUsageStatsDBSchema() +
".pageviews_stats"; ".pageviews_stats";
stmt.executeUpdate(drop_pageviews_stats); stmt.executeUpdate(drop_pageviews_stats);
logger.info("Dropped pageviews_stats table"); logger.info("Dropped pageviews_stats table");
*/
logger.info("Creating pageviews_stats table"); logger.info("Creating pageviews_stats table");
String create_pageviews_stats = "CREATE TABLE IF NOT EXISTS " + ConnectDB.getUsageStatsDBSchema() String create_pageviews_stats = "CREATE TABLE IF NOT EXISTS " + ConnectDB.getUsageStatsDBSchema()
+ ".pageviews_stats " + + ".pageviews_stats " +
"STORED AS PARQUET AS SELECT * FROM " + ConnectDB.getUsageStatsDBSchema() + ".pageviews_stats_tmp"; "LIKE " + ConnectDB.getUsageStatsDBSchema() + ".pageviews_stats_tmp STORED AS PARQUET";
stmt.executeUpdate(create_pageviews_stats); stmt.executeUpdate(create_pageviews_stats);
logger.info("Created pageviews_stats table"); logger.info("Created pageviews_stats table");
@ -477,19 +475,19 @@ public class PiwikStatsDB {
"ORDER BY d.id, ro.id, month"; "ORDER BY d.id, ro.id, month";
stmt.executeUpdate(sql); stmt.executeUpdate(sql);
logger.info("Created downloads_stats_tmp table"); logger.info("Created downloads_stats_tmp table");
/*
logger.info("Dropping downloads_stats table"); logger.info("Dropping downloads_stats table");
String drop_downloads_stats = "DROP TABLE IF EXISTS " + String drop_downloads_stats = "DROP TABLE IF EXISTS " +
ConnectDB.getUsageStatsDBSchema() + ConnectDB.getUsageStatsDBSchema() +
".downloads_stats"; ".downloads_stats";
stmt.executeUpdate(drop_downloads_stats); stmt.executeUpdate(drop_downloads_stats);
logger.info("Dropped downloads_stats table"); logger.info("Dropped downloads_stats table");
*/
logger.info("Creating downloads_stats table"); logger.info("Creating downloads_stats table");
String create_pageviews_stats = "CREATE TABLE IF NOT EXISTS " + ConnectDB.getUsageStatsDBSchema() String create_downloads_stats = "CREATE TABLE IF NOT EXISTS " + ConnectDB.getUsageStatsDBSchema()
+ ".downloads_stats " + + ".downloads_stats " +
"STORED AS PARQUET AS SELECT * FROM " + ConnectDB.getUsageStatsDBSchema() + ".downloads_stats_tmp"; "LIKE " + ConnectDB.getUsageStatsDBSchema() + ".downloads_stats_tmp STORED AS PARQUET ";
stmt.executeUpdate(create_pageviews_stats); stmt.executeUpdate(create_downloads_stats);
logger.info("Created downloads_stats table"); logger.info("Created downloads_stats table");
logger.info("Dropping result_downloads_monthly_tmp view"); logger.info("Dropping result_downloads_monthly_tmp view");
@ -843,18 +841,15 @@ public class PiwikStatsDB {
stmt.executeUpdate(sql); stmt.executeUpdate(sql);
stmt.close(); stmt.close();
/* logger.info("PortalStats - Step 3"); /*
stmt = con.createStatement(); * logger.info("PortalStats - Step 3"); stmt = con.createStatement(); sql = "INSERT INTO " +
sql = "INSERT INTO " + ConnectDB.getUsageStatsDBSchema() + ".piwiklogtmp " + * ConnectDB.getUsageStatsDBSchema() + ".piwiklogtmp " +
"SELECT DISTINCT source, id_visit, country, action, url, entity_id, 'organization', `timestamp`, referrer_name, agent " * "SELECT DISTINCT source, id_visit, country, action, url, entity_id, 'organization', `timestamp`, referrer_name, agent "
+ * + "FROM " + ConnectDB.getUsageStatsDBSchema() + ".process_portal_log_tmp " +
"FROM " + ConnectDB.getUsageStatsDBSchema() + ".process_portal_log_tmp " + * "WHERE process_portal_log_tmp.entity_id IS NOT NULL AND process_portal_log_tmp.entity_id " +
"WHERE process_portal_log_tmp.entity_id IS NOT NULL AND process_portal_log_tmp.entity_id " + * "IN (SELECT roid.id FROM " + ConnectDB.getStatsDBSchema() +
"IN (SELECT roid.id FROM " + ConnectDB.getStatsDBSchema() * ".organization_oids roid WHERE roid.id IS NOT NULL)"; // stmt.executeUpdate(sql); stmt.close();
+ ".organization_oids roid WHERE roid.id IS NOT NULL)"; */
// stmt.executeUpdate(sql);
stmt.close();
*/
logger.info("PortalStats - Step 3"); logger.info("PortalStats - Step 3");
stmt = con.createStatement(); stmt = con.createStatement();
sql = "INSERT INTO " + ConnectDB.getUsageStatsDBSchema() + ".piwiklogtmp " + sql = "INSERT INTO " + ConnectDB.getUsageStatsDBSchema() + ".piwiklogtmp " +
@ -866,7 +861,7 @@ public class PiwikStatsDB {
+ ".project_oids roid WHERE roid.id IS NOT NULL)"; + ".project_oids roid WHERE roid.id IS NOT NULL)";
stmt.executeUpdate(sql); stmt.executeUpdate(sql);
stmt.close(); stmt.close();
con.close(); con.close();
} }
@ -1172,22 +1167,16 @@ public class PiwikStatsDB {
"SELECT * FROM " + ConnectDB.getUsageStatsDBSchema() + ".pageviews_stats_tmp"; "SELECT * FROM " + ConnectDB.getUsageStatsDBSchema() + ".pageviews_stats_tmp";
stmt.executeUpdate(sql); stmt.executeUpdate(sql);
/* logger.info("Dropping table views_stats_tmp"); /*
sql = "DROP TABLE IF EXISTS " + ConnectDB.getUsageStatsDBSchema() + ".views_stats_tmp"; * logger.info("Dropping table views_stats_tmp"); sql = "DROP TABLE IF EXISTS " +
stmt.executeUpdate(sql); * ConnectDB.getUsageStatsDBSchema() + ".views_stats_tmp"; stmt.executeUpdate(sql);
* logger.info("Dropping table downloads_stats_tmp"); sql = "DROP TABLE IF EXISTS " +
logger.info("Dropping table downloads_stats_tmp"); * ConnectDB.getUsageStatsDBSchema() + ".downloads_stats_tmp"; stmt.executeUpdate(sql);
sql = "DROP TABLE IF EXISTS " + ConnectDB.getUsageStatsDBSchema() + ".downloads_stats_tmp"; * logger.info("Dropping table pageviews_stats_tmp"); sql = "DROP TABLE IF EXISTS " +
stmt.executeUpdate(sql); * ConnectDB.getUsageStatsDBSchema() + ".pageviews_stats_tmp"; stmt.executeUpdate(sql);
* logger.info("Dropping table process_portal_log_tmp"); sql = "DROP TABLE IF EXISTS " +
logger.info("Dropping table pageviews_stats_tmp"); * ConnectDB.getUsageStatsDBSchema() + ".process_portal_log_tmp"; stmt.executeUpdate(sql);
sql = "DROP TABLE IF EXISTS " + ConnectDB.getUsageStatsDBSchema() + ".pageviews_stats_tmp"; */
stmt.executeUpdate(sql);
logger.info("Dropping table process_portal_log_tmp");
sql = "DROP TABLE IF EXISTS " + ConnectDB.getUsageStatsDBSchema() + ".process_portal_log_tmp";
stmt.executeUpdate(sql);
*/
stmt.close(); stmt.close();
ConnectDB.getHiveConnection().close(); ConnectDB.getHiveConnection().close();

View File

@ -173,7 +173,7 @@ public class UsageStatsExporter {
sql = "INVALIDATE METADATA " + ConnectDB.getUsageStatsDBSchema() + ".pageviews_stats"; sql = "INVALIDATE METADATA " + ConnectDB.getUsageStatsDBSchema() + ".pageviews_stats";
stmt.executeUpdate(sql); stmt.executeUpdate(sql);
stmt.close(); stmt.close();
ConnectDB.getHiveConnection().close(); ConnectDB.getHiveConnection().close();
} }
} }