Changes to download only updates

This commit is contained in:
Dimitris 2020-11-02 09:08:25 +02:00
parent b8a3392b59
commit 32bf943979
7 changed files with 1072 additions and 1065 deletions

View File

@ -89,7 +89,7 @@ public abstract class ConnectDB {
cpds.setAcquireRetryDelay(2000); cpds.setAcquireRetryDelay(2000);
cpds.setBreakAfterAcquireFailure(false); cpds.setBreakAfterAcquireFailure(false);
cpds.setCheckoutTimeout(30000); cpds.setCheckoutTimeout(0);
cpds.setPreferredTestQuery("SELECT 1"); cpds.setPreferredTestQuery("SELECT 1");
cpds.setIdleConnectionTestPeriod(60); cpds.setIdleConnectionTestPeriod(60);
return cpds.getConnection(); return cpds.getConnection();
@ -114,7 +114,7 @@ public abstract class ConnectDB {
cpds.setAcquireRetryDelay(2000); cpds.setAcquireRetryDelay(2000);
cpds.setBreakAfterAcquireFailure(false); cpds.setBreakAfterAcquireFailure(false);
cpds.setCheckoutTimeout(30000); cpds.setCheckoutTimeout(0);
cpds.setPreferredTestQuery("SELECT 1"); cpds.setPreferredTestQuery("SELECT 1");
cpds.setIdleConnectionTestPeriod(60); cpds.setIdleConnectionTestPeriod(60);

View File

@ -1,4 +1,3 @@
package eu.dnetlib.oa.graph.usagestats.export; package eu.dnetlib.oa.graph.usagestats.export;
import java.io.*; import java.io.*;
@ -53,9 +52,9 @@ public class IrusStats {
logger.info("Creating sushilog"); logger.info("Creating sushilog");
Statement stmt = ConnectDB.getHiveConnection().createStatement(); Statement stmt = ConnectDB.getHiveConnection().createStatement();
String sqlCreateTableSushiLog = "CREATE TABLE IF NOT EXISTS " + ConnectDB.getUsageStatsDBSchema() String sqlCreateTableSushiLog = "CREATE TABLE IF NOT EXISTS " + ConnectDB.getUsageStatsDBSchema()
+ ".sushilog(source STRING, " + + ".sushilog(source STRING, "
"repository STRING, rid STRING, date STRING, metric_type STRING, count INT) clustered by (source, " + + "repository STRING, rid STRING, date STRING, metric_type STRING, count INT) clustered by (source, "
"repository, rid, date, metric_type) into 100 buckets stored as orc tblproperties('transactional'='true')"; + "repository, rid, date, metric_type) into 100 buckets stored as orc tblproperties('transactional'='true')";
stmt.executeUpdate(sqlCreateTableSushiLog); stmt.executeUpdate(sqlCreateTableSushiLog);
logger.info("Created sushilog"); logger.info("Created sushilog");
@ -70,7 +69,6 @@ public class IrusStats {
// stmt.executeUpdate(sqlcreateRuleSushiLog); // stmt.executeUpdate(sqlcreateRuleSushiLog);
// String createSushiIndex = "create index if not exists sushilog_duplicates on sushilog(source, repository, rid, date, metric_type);"; // String createSushiIndex = "create index if not exists sushilog_duplicates on sushilog(source, repository, rid, date, metric_type);";
// stmt.executeUpdate(createSushiIndex); // stmt.executeUpdate(createSushiIndex);
stmt.close(); stmt.close();
ConnectDB.getHiveConnection().close(); ConnectDB.getHiveConnection().close();
logger.info("Sushi Tables Created"); logger.info("Sushi Tables Created");
@ -107,7 +105,6 @@ public class IrusStats {
// throw new Exception("Failed to create tables: " + e.toString(), e); // throw new Exception("Failed to create tables: " + e.toString(), e);
// } // }
// } // }
public void processIrusStats() throws Exception { public void processIrusStats() throws Exception {
Statement stmt = ConnectDB.getHiveConnection().createStatement(); Statement stmt = ConnectDB.getHiveConnection().createStatement();
ConnectDB.getHiveConnection().setAutoCommit(false); ConnectDB.getHiveConnection().setAutoCommit(false);
@ -117,106 +114,105 @@ public class IrusStats {
logger.info("Added JSON Serde jar"); logger.info("Added JSON Serde jar");
logger.info("Dropping sushilogtmp_json table"); logger.info("Dropping sushilogtmp_json table");
String dropSushilogtmpJson = "DROP TABLE IF EXISTS " + String dropSushilogtmpJson = "DROP TABLE IF EXISTS "
ConnectDB.getUsageStatsDBSchema() + + ConnectDB.getUsageStatsDBSchema()
".sushilogtmp_json"; + ".sushilogtmp_json";
stmt.executeUpdate(dropSushilogtmpJson); stmt.executeUpdate(dropSushilogtmpJson);
logger.info("Dropped sushilogtmp_json table"); logger.info("Dropped sushilogtmp_json table");
logger.info("Creating irus_sushilogtmp_json table"); logger.info("Creating irus_sushilogtmp_json table");
String createSushilogtmpJson = "CREATE EXTERNAL TABLE IF NOT EXISTS " + String createSushilogtmpJson = "CREATE EXTERNAL TABLE IF NOT EXISTS "
ConnectDB.getUsageStatsDBSchema() + ".irus_sushilogtmp_json(\n" + + ConnectDB.getUsageStatsDBSchema() + ".irus_sushilogtmp_json(\n"
" `ItemIdentifier` ARRAY<\n" + + " `ItemIdentifier` ARRAY<\n"
" struct<\n" + + " struct<\n"
" Type: STRING,\n" + + " Type: STRING,\n"
" Value: STRING\n" + + " Value: STRING\n"
" >\n" + + " >\n"
" >,\n" + + " >,\n"
" `ItemPerformance` ARRAY<\n" + + " `ItemPerformance` ARRAY<\n"
" struct<\n" + + " struct<\n"
" `Period`: struct<\n" + + " `Period`: struct<\n"
" `Begin`: STRING,\n" + + " `Begin`: STRING,\n"
" `End`: STRING\n" + + " `End`: STRING\n"
" >,\n" + + " >,\n"
" `Instance`: struct<\n" + + " `Instance`: struct<\n"
" `Count`: STRING,\n" + + " `Count`: STRING,\n"
" `MetricType`: STRING\n" + + " `MetricType`: STRING\n"
" >\n" + + " >\n"
" >\n" + + " >\n"
" >\n" + + " >\n"
")\n" + + ")\n"
"ROW FORMAT SERDE 'org.apache.hive.hcatalog.data.JsonSerDe'\n" + + "ROW FORMAT SERDE 'org.apache.hive.hcatalog.data.JsonSerDe'\n"
"LOCATION '" + ExecuteWorkflow.irusUKReportPath + "'\n" + + "LOCATION '" + ExecuteWorkflow.irusUKReportPath + "'\n"
"TBLPROPERTIES (\"transactional\"=\"false\")"; + "TBLPROPERTIES (\"transactional\"=\"false\")";
stmt.executeUpdate(createSushilogtmpJson); stmt.executeUpdate(createSushilogtmpJson);
logger.info("Created irus_sushilogtmp_json table"); logger.info("Created irus_sushilogtmp_json table");
logger.info("Dropping irus_sushilogtmp table"); logger.info("Dropping irus_sushilogtmp table");
String dropSushilogtmp = "DROP TABLE IF EXISTS " + String dropSushilogtmp = "DROP TABLE IF EXISTS "
ConnectDB.getUsageStatsDBSchema() + + ConnectDB.getUsageStatsDBSchema()
".irus_sushilogtmp"; + ".irus_sushilogtmp";
stmt.executeUpdate(dropSushilogtmp); stmt.executeUpdate(dropSushilogtmp);
logger.info("Dropped irus_sushilogtmp table"); logger.info("Dropped irus_sushilogtmp table");
logger.info("Creating irus_sushilogtmp table"); logger.info("Creating irus_sushilogtmp table");
String createSushilogtmp = "CREATE TABLE " + ConnectDB.getUsageStatsDBSchema() String createSushilogtmp = "CREATE TABLE " + ConnectDB.getUsageStatsDBSchema()
+ ".irus_sushilogtmp(source STRING, repository STRING, " + + ".irus_sushilogtmp(source STRING, repository STRING, "
"rid STRING, date STRING, metric_type STRING, count INT) clustered by (source) into 100 buckets stored as orc " + "rid STRING, date STRING, metric_type STRING, count INT) clustered by (source) into 100 buckets stored as orc "
+ + "tblproperties('transactional'='true')";
"tblproperties('transactional'='true')";
stmt.executeUpdate(createSushilogtmp); stmt.executeUpdate(createSushilogtmp);
logger.info("Created irus_sushilogtmp table"); logger.info("Created irus_sushilogtmp table");
logger.info("Inserting to irus_sushilogtmp table"); logger.info("Inserting to irus_sushilogtmp table");
String insertSushilogtmp = "INSERT INTO " + ConnectDB.getUsageStatsDBSchema() + ".irus_sushilogtmp " + String insertSushilogtmp = "INSERT INTO " + ConnectDB.getUsageStatsDBSchema() + ".irus_sushilogtmp "
"SELECT 'IRUS-UK', CONCAT('opendoar____::', split(split(INPUT__FILE__NAME,'IrusIRReport_')[1],'_')[0]), " + + "SELECT 'IRUS-UK', CONCAT('opendoar____::', split(split(INPUT__FILE__NAME,'IrusIRReport_')[1],'_')[0]), "
"`ItemIdent`.`Value`, `ItemPerf`.`Period`.`Begin`, " + + "`ItemIdent`.`Value`, `ItemPerf`.`Period`.`Begin`, "
"`ItemPerf`.`Instance`.`MetricType`, `ItemPerf`.`Instance`.`Count` " + + "`ItemPerf`.`Instance`.`MetricType`, `ItemPerf`.`Instance`.`Count` "
"FROM " + ConnectDB.getUsageStatsDBSchema() + ".irus_sushilogtmp_json " + + "FROM " + ConnectDB.getUsageStatsDBSchema() + ".irus_sushilogtmp_json "
"LATERAL VIEW posexplode(ItemIdentifier) ItemIdentifierTable AS seqi, ItemIdent " + + "LATERAL VIEW posexplode(ItemIdentifier) ItemIdentifierTable AS seqi, ItemIdent "
"LATERAL VIEW posexplode(ItemPerformance) ItemPerformanceTable AS seqp, ItemPerf " + + "LATERAL VIEW posexplode(ItemPerformance) ItemPerformanceTable AS seqp, ItemPerf "
"WHERE `ItemIdent`.`Type`= 'OAI'"; + "WHERE `ItemIdent`.`Type`= 'OAI'";
stmt.executeUpdate(insertSushilogtmp); stmt.executeUpdate(insertSushilogtmp);
logger.info("Inserted to irus_sushilogtmp table"); logger.info("Inserted to irus_sushilogtmp table");
logger.info("Creating downloads_stats table"); logger.info("Creating downloads_stats table");
String createDownloadsStats = "CREATE TABLE IF NOT EXISTS " + ConnectDB.getUsageStatsDBSchema() String createDownloadsStats = "CREATE TABLE IF NOT EXISTS " + ConnectDB.getUsageStatsDBSchema()
+ ".downloads_stats " + + ".downloads_stats "
"(`source` string, " + + "(`source` string, "
"`repository_id` string, " + + "`repository_id` string, "
"`result_id` string, " + + "`result_id` string, "
"`date` string, " + + "`date` string, "
"`count` bigint, " + + "`count` bigint, "
"`openaire` bigint)"; + "`openaire` bigint)";
stmt.executeUpdate(createDownloadsStats); stmt.executeUpdate(createDownloadsStats);
logger.info("Created downloads_stats table"); logger.info("Created downloads_stats table");
logger.info("Inserting into downloads_stats"); logger.info("Inserting into downloads_stats");
String insertDStats = "INSERT INTO " + ConnectDB.getUsageStatsDBSchema() + ".downloads_stats " + String insertDStats = "INSERT INTO " + ConnectDB.getUsageStatsDBSchema() + ".downloads_stats "
"SELECT s.source, d.id AS repository_id, " + + "SELECT s.source, d.id AS repository_id, "
"ro.id as result_id, CONCAT(YEAR(date), '/', LPAD(MONTH(date), 2, '0')) as date, s.count, '0' " + + "ro.id as result_id, CONCAT(YEAR(date), '/', LPAD(MONTH(date), 2, '0')) as date, s.count, '0' "
"FROM " + ConnectDB.getUsageStatsDBSchema() + ".irus_sushilogtmp s, " + + "FROM " + ConnectDB.getUsageStatsDBSchema() + ".irus_sushilogtmp s, "
ConnectDB.getStatsDBSchema() + ".datasource_oids d, " + + ConnectDB.getStatsDBSchema() + ".datasource_oids d, "
ConnectDB.getStatsDBSchema() + ".result_oids ro " + + ConnectDB.getStatsDBSchema() + ".result_oids ro "
"WHERE s.repository=d.oid AND s.rid=ro.oid AND metric_type='ft_total' AND s.source='IRUS-UK'"; + "WHERE s.repository=d.oid AND s.rid=ro.oid AND metric_type='ft_total' AND s.source='IRUS-UK'";
stmt.executeUpdate(insertDStats); stmt.executeUpdate(insertDStats);
logger.info("Inserted into downloads_stats"); logger.info("Inserted into downloads_stats");
logger.info("Creating sushilog table"); logger.info("Creating sushilog table");
String createSushilog = "CREATE TABLE IF NOT EXISTS " + ConnectDB.getUsageStatsDBSchema() String createSushilog = "CREATE TABLE IF NOT EXISTS " + ConnectDB.getUsageStatsDBSchema()
+ ".sushilog " + + ".sushilog "
"(`source` string, " + + "(`source` string, "
"`repository_id` string, " + + "`repository_id` string, "
"`rid` string, " + + "`rid` string, "
"`date` string, " + + "`date` string, "
"`metric_type` string, " + + "`metric_type` string, "
"`count` int)"; + "`count` int)";
stmt.executeUpdate(createSushilog); stmt.executeUpdate(createSushilog);
logger.info("Created sushilog table"); logger.info("Created sushilog table");
logger.info("Inserting to sushilog table"); logger.info("Inserting to sushilog table");
String insertToShushilog = "INSERT INTO " + ConnectDB.getUsageStatsDBSchema() + ".sushilog SELECT * FROM " + String insertToShushilog = "INSERT INTO " + ConnectDB.getUsageStatsDBSchema() + ".sushilog SELECT * FROM "
ConnectDB.getUsageStatsDBSchema() + ConnectDB.getUsageStatsDBSchema()
+ ".irus_sushilogtmp"; + ".irus_sushilogtmp";
stmt.executeUpdate(insertToShushilog); stmt.executeUpdate(insertToShushilog);
logger.info("Inserted to sushilog table"); logger.info("Inserted to sushilog table");
@ -236,9 +232,9 @@ public class IrusStats {
end.add(Calendar.DAY_OF_MONTH, -1); end.add(Calendar.DAY_OF_MONTH, -1);
logger.info("(getIrusRRReport) Ending period for log download: " + sdf.format(end.getTime())); logger.info("(getIrusRRReport) Ending period for log download: " + sdf.format(end.getTime()));
String reportUrl = irusUKURL + "GetReport/?Report=RR1&Release=4&RequestorID=OpenAIRE&BeginDate=" + String reportUrl = irusUKURL + "GetReport/?Report=RR1&Release=4&RequestorID=OpenAIRE&BeginDate="
sdf.format(start.getTime()) + "&EndDate=" + sdf.format(end.getTime()) + + sdf.format(start.getTime()) + "&EndDate=" + sdf.format(end.getTime())
"&RepositoryIdentifier=&ItemDataType=&NewJiscBand=&Granularity=Monthly&Callback="; + "&RepositoryIdentifier=&ItemDataType=&NewJiscBand=&Granularity=Monthly&Callback=";
logger.info("(getIrusRRReport) Getting report: " + reportUrl); logger.info("(getIrusRRReport) Getting report: " + reportUrl);
@ -269,8 +265,8 @@ public class IrusStats {
logger.info("(getIrusRRReport) Found the following opendoars for download: " + opendoarsToVisit); logger.info("(getIrusRRReport) Found the following opendoars for download: " + opendoarsToVisit);
if (ExecuteWorkflow.irusNumberOfOpendoarsToDownload > 0 && if (ExecuteWorkflow.irusNumberOfOpendoarsToDownload > 0
ExecuteWorkflow.irusNumberOfOpendoarsToDownload <= opendoarsToVisit.size()) { && ExecuteWorkflow.irusNumberOfOpendoarsToDownload <= opendoarsToVisit.size()) {
logger.info("Trimming siteIds list to the size of: " + ExecuteWorkflow.irusNumberOfOpendoarsToDownload); logger.info("Trimming siteIds list to the size of: " + ExecuteWorkflow.irusNumberOfOpendoarsToDownload);
opendoarsToVisit = opendoarsToVisit.subList(0, ExecuteWorkflow.irusNumberOfOpendoarsToDownload); opendoarsToVisit = opendoarsToVisit.subList(0, ExecuteWorkflow.irusNumberOfOpendoarsToDownload);
} }
@ -310,17 +306,22 @@ public class IrusStats {
"SELECT max(date) FROM " + ConnectDB.getUsageStatsDBSchema() + ".sushilog WHERE repository=?"); "SELECT max(date) FROM " + ConnectDB.getUsageStatsDBSchema() + ".sushilog WHERE repository=?");
st.setString(1, "opendoar____::" + opendoar); st.setString(1, "opendoar____::" + opendoar);
ResultSet rs_date = st.executeQuery(); ResultSet rs_date = st.executeQuery();
Date dateMax = null;
while (rs_date.next()) { while (rs_date.next()) {
if (rs_date.getString(1) != null && !rs_date.getString(1).equals("null") if (rs_date.getString(1) != null && !rs_date.getString(1).equals("null")
&& !rs_date.getString(1).equals("")) { && !rs_date.getString(1).equals("")) {
start.setTime(sdf.parse(rs_date.getString(1))); start.setTime(sdf.parse(rs_date.getString(1)));
dateMax = sdf.parse(rs_date.getString(1));
} }
} }
rs_date.close(); rs_date.close();
int batch_size = 0; int batch_size = 0;
if (dateMax != null && start.getTime().compareTo(dateMax) <= 0) {
logger.info("Date found in logs " + dateMax + " and not downloanding logs for " + opendoar);
} else {
while (start.before(end)) { while (start.before(end)) {
// log.info("date: " + simpleDateFormat.format(start.getTime())); logger.info("date: " + simpleDateFormat.format(start.getTime()));
String reportUrl = this.irusUKURL + "GetReport/?Report=IR1&Release=4&RequestorID=OpenAIRE&BeginDate=" String reportUrl = this.irusUKURL + "GetReport/?Report=IR1&Release=4&RequestorID=OpenAIRE&BeginDate="
+ simpleDateFormat.format(start.getTime()) + "&EndDate=" + simpleDateFormat.format(start.getTime()) + simpleDateFormat.format(start.getTime()) + "&EndDate=" + simpleDateFormat.format(start.getTime())
+ "&RepositoryIdentifier=opendoar%3A" + opendoar + "&RepositoryIdentifier=opendoar%3A" + opendoar
@ -334,8 +335,8 @@ public class IrusStats {
} }
FileSystem fs = FileSystem.get(new Configuration()); FileSystem fs = FileSystem.get(new Configuration());
String filePath = irusUKReportPath + "/" + "IrusIRReport_" + String filePath = irusUKReportPath + "/" + "IrusIRReport_"
opendoar + "_" + simpleDateFormat.format(start.getTime()) + ".json"; + opendoar + "_" + simpleDateFormat.format(start.getTime()) + ".json";
logger.info("Storing to file: " + filePath); logger.info("Storing to file: " + filePath);
FSDataOutputStream fin = fs.create(new Path(filePath), true); FSDataOutputStream fin = fs.create(new Path(filePath), true);
@ -359,7 +360,8 @@ public class IrusStats {
fin.close(); fin.close();
} }
ConnectDB.getHiveConnection().close(); }
//ConnectDB.getHiveConnection().close();
logger.info("(getIrusIRReport) Finished downloading report(s) with opendoar: " + opendoar); logger.info("(getIrusIRReport) Finished downloading report(s) with opendoar: " + opendoar);
} }

View File

@ -1,4 +1,3 @@
package eu.dnetlib.oa.graph.usagestats.export; package eu.dnetlib.oa.graph.usagestats.export;
import java.io.*; import java.io.*;
@ -63,12 +62,12 @@ public class LaReferenciaDownloadLogs {
Statement stmt = ConnectDB.getHiveConnection().createStatement(); Statement stmt = ConnectDB.getHiveConnection().createStatement();
logger.info("Creating LaReferencia tables"); logger.info("Creating LaReferencia tables");
String sqlCreateTableLareferenciaLog = "CREATE TABLE IF NOT EXISTS " + String sqlCreateTableLareferenciaLog = "CREATE TABLE IF NOT EXISTS "
ConnectDB.getUsageStatsDBSchema() + ".lareferencialog(matomoid INT, " + + ConnectDB.getUsageStatsDBSchema() + ".lareferencialog(matomoid INT, "
"source STRING, id_visit STRING, country STRING, action STRING, url STRING, entity_id STRING, " + + "source STRING, id_visit STRING, country STRING, action STRING, url STRING, entity_id STRING, "
"source_item_type STRING, timestamp STRING, referrer_name STRING, agent STRING) " + + "source_item_type STRING, timestamp STRING, referrer_name STRING, agent STRING) "
"clustered by (source, id_visit, action, timestamp, entity_id) into 100 buckets " + + "clustered by (source, id_visit, action, timestamp, entity_id) into 100 buckets "
"stored as orc tblproperties('transactional'='true')"; + "stored as orc tblproperties('transactional'='true')";
stmt.executeUpdate(sqlCreateTableLareferenciaLog); stmt.executeUpdate(sqlCreateTableLareferenciaLog);
logger.info("Created LaReferencia tables"); logger.info("Created LaReferencia tables");
// String sqlcreateRuleLaReferenciaLog = "CREATE OR REPLACE RULE ignore_duplicate_inserts AS " // String sqlcreateRuleLaReferenciaLog = "CREATE OR REPLACE RULE ignore_duplicate_inserts AS "
@ -115,7 +114,6 @@ public class LaReferenciaDownloadLogs {
// // System.exit(0); // // System.exit(0);
// } // }
// } // }
private String getPiwikLogUrl() { private String getPiwikLogUrl() {
return piwikUrl + "/"; return piwikUrl + "/";
} }
@ -160,8 +158,8 @@ public class LaReferenciaDownloadLogs {
} }
logger.info("Found the following siteIds for download: " + siteIdsToVisit); logger.info("Found the following siteIds for download: " + siteIdsToVisit);
if (ExecuteWorkflow.numberOfPiwikIdsToDownload > 0 && if (ExecuteWorkflow.numberOfPiwikIdsToDownload > 0
ExecuteWorkflow.numberOfPiwikIdsToDownload <= siteIdsToVisit.size()) { && ExecuteWorkflow.numberOfPiwikIdsToDownload <= siteIdsToVisit.size()) {
logger.info("Trimming siteIds list to the size of: " + ExecuteWorkflow.numberOfPiwikIdsToDownload); logger.info("Trimming siteIds list to the size of: " + ExecuteWorkflow.numberOfPiwikIdsToDownload);
siteIdsToVisit = siteIdsToVisit.subList(0, ExecuteWorkflow.numberOfPiwikIdsToDownload); siteIdsToVisit = siteIdsToVisit.subList(0, ExecuteWorkflow.numberOfPiwikIdsToDownload);
} }
@ -169,7 +167,7 @@ public class LaReferenciaDownloadLogs {
logger.info("Downloading from repos with the followins siteIds: " + siteIdsToVisit); logger.info("Downloading from repos with the followins siteIds: " + siteIdsToVisit);
for (int siteId : siteIdsToVisit) { for (int siteId : siteIdsToVisit) {
logger.info("Now working on piwikId: " + siteId); logger.info("Now working on LaReferencia MatomoId: " + siteId);
this.GetLaReFerenciaLogs(repoLogsPath, siteId); this.GetLaReFerenciaLogs(repoLogsPath, siteId);
} }
} }
@ -193,21 +191,26 @@ public class LaReferenciaDownloadLogs {
PreparedStatement st = ConnectDB PreparedStatement st = ConnectDB
.getHiveConnection() .getHiveConnection()
.prepareStatement( .prepareStatement(
"SELECT max(timestamp) FROM " + ConnectDB.getUsageStatsDBSchema() + "SELECT max(timestamp) FROM " + ConnectDB.getUsageStatsDBSchema()
".lareferencialog WHERE matomoid=? GROUP BY timestamp HAVING max(timestamp) is not null"); + ".lareferencialog WHERE matomoid=?");
st.setInt(1, laReferencialMatomoID); st.setInt(1, laReferencialMatomoID);
Date dateMax = null;
ResultSet rs_date = st.executeQuery(); ResultSet rs_date = st.executeQuery();
while (rs_date.next()) { while (rs_date.next()) {
if (rs_date.getString(1) != null && !rs_date.getString(1).equals("null") if (rs_date.getString(1) != null && !rs_date.getString(1).equals("null")
&& !rs_date.getString(1).equals("")) { && !rs_date.getString(1).equals("")) {
start.setTime(sdf.parse(rs_date.getString(1))); start.setTime(sdf.parse(rs_date.getString(1)));
dateMax = sdf.parse(rs_date.getString(1));
} }
} }
rs_date.close(); rs_date.close();
for (Calendar currDay = (Calendar) start.clone(); currDay.before(end); currDay.add(Calendar.DATE, 1)) { for (Calendar currDay = (Calendar) start.clone(); currDay.before(end); currDay.add(Calendar.DATE, 1)) {
Date date = currDay.getTime(); Date date = currDay.getTime();
if (dateMax != null && currDay.getTime().compareTo(dateMax) <= 0) {
logger.info("Date found in logs " + dateMax + " and not downloanding Matomo logs for " + laReferencialMatomoID);
} else {
logger logger
.info( .info(
"Downloading logs for LaReferencia repoid " + laReferencialMatomoID + " and for " "Downloading logs for LaReferencia repoid " + laReferencialMatomoID + " and for "
@ -237,8 +240,9 @@ public class LaReferenciaDownloadLogs {
} }
content = getJson(apiUrl); content = getJson(apiUrl);
if (content.length() == 0 || content.equals("[]")) if (content.length() == 0 || content.equals("[]")) {
break; break;
}
JSONArray jsonArray = (JSONArray) parser.parse(content); JSONArray jsonArray = (JSONArray) parser.parse(content);
for (Object aJsonArray : jsonArray) { for (Object aJsonArray : jsonArray) {
@ -255,7 +259,7 @@ public class LaReferenciaDownloadLogs {
i++; i++;
} while (true); } while (true);
fin.close(); fin.close();
}
} }
} }
} }

View File

@ -204,6 +204,9 @@ public class PiwikDownloadLogs {
logger.info("Downloading from repos with the followins piwikIds: " + piwikIdToVisit); logger.info("Downloading from repos with the followins piwikIds: " + piwikIdToVisit);
// ExecutorService executor = Executors.newFixedThreadPool(ExecuteWorkflow.numberOfDownloadThreads);
for (int siteId : piwikIdToVisit) {
// Setting the starting period // Setting the starting period
Calendar start = (Calendar) ExecuteWorkflow.startingLogPeriod.clone(); Calendar start = (Calendar) ExecuteWorkflow.startingLogPeriod.clone();
logger.info("Starting period for log download: " + sdf.format(start.getTime())); logger.info("Starting period for log download: " + sdf.format(start.getTime()));
@ -214,9 +217,6 @@ public class PiwikDownloadLogs {
end.add(Calendar.DAY_OF_MONTH, -1); end.add(Calendar.DAY_OF_MONTH, -1);
logger.info("Ending period for log download: " + sdf.format(end.getTime())); logger.info("Ending period for log download: " + sdf.format(end.getTime()));
//ExecutorService executor = Executors.newFixedThreadPool(ExecuteWorkflow.numberOfDownloadThreads);
for (int siteId : piwikIdToVisit) {
logger.info("Now working on piwikId: " + siteId); logger.info("Now working on piwikId: " + siteId);
PreparedStatement st = ConnectDB.DB_HIVE_CONNECTION PreparedStatement st = ConnectDB.DB_HIVE_CONNECTION
@ -224,7 +224,7 @@ public class PiwikDownloadLogs {
"SELECT max(timestamp) FROM " + ConnectDB.getUsageStatsDBSchema() "SELECT max(timestamp) FROM " + ConnectDB.getUsageStatsDBSchema()
+ ".piwiklog WHERE source=?"); + ".piwiklog WHERE source=?");
st.setInt(1, siteId); st.setInt(1, siteId);
Date dateMax=null;
ResultSet rs_date = st.executeQuery(); ResultSet rs_date = st.executeQuery();
while (rs_date.next()) { while (rs_date.next()) {
logger.info("Found max date: " + rs_date.getString(1) + " for repository " + siteId); logger.info("Found max date: " + rs_date.getString(1) + " for repository " + siteId);
@ -232,6 +232,7 @@ public class PiwikDownloadLogs {
if (rs_date.getString(1) != null && !rs_date.getString(1).equals("null") if (rs_date.getString(1) != null && !rs_date.getString(1).equals("null")
&& !rs_date.getString(1).equals("")) { && !rs_date.getString(1).equals("")) {
start.setTime(sdf.parse(rs_date.getString(1))); start.setTime(sdf.parse(rs_date.getString(1)));
dateMax = sdf.parse(rs_date.getString(1));
} }
} }
rs_date.close(); rs_date.close();
@ -240,7 +241,13 @@ public class PiwikDownloadLogs {
// logger.info("Date used " + currDay.toString()); // logger.info("Date used " + currDay.toString());
// Runnable worker = new WorkerThread(currDay, siteId, repoLogsPath, portalLogPath, portalMatomoID); // Runnable worker = new WorkerThread(currDay, siteId, repoLogsPath, portalLogPath, portalMatomoID);
// executor.execute(worker);// calling execute method of ExecutorService // executor.execute(worker);// calling execute method of ExecutorService
logger.info("Date used " + currDay.getTime().toString());
if(dateMax!=null && currDay.getTime().compareTo(dateMax)<=0)
logger.info("Date found in logs "+dateMax+ " and not downloanding Matomo logs for "+siteId);
else
GetOpenAIRELogsForDate(currDay, siteId, repoLogsPath, portalLogPath, portalMatomoID); GetOpenAIRELogsForDate(currDay, siteId, repoLogsPath, portalLogPath, portalMatomoID);
} }
} }
// executor.shutdown(); // executor.shutdown();

View File

@ -215,8 +215,6 @@ public class PiwikStatsDB {
downloadsStats(); downloadsStats();
logger.info("DownloadsStats processing starts"); logger.info("DownloadsStats processing starts");
logger.info("Updating Production Tables"); logger.info("Updating Production Tables");
updateProdTables(); updateProdTables();
logger.info("Updated Production Tables"); logger.info("Updated Production Tables");
@ -313,7 +311,7 @@ public class PiwikStatsDB {
"SELECT DISTINCT p1.source, p1.id_visit, p1.action, p1.entity_id, p1.timestamp \n" + "SELECT DISTINCT p1.source, p1.id_visit, p1.action, p1.entity_id, p1.timestamp \n" +
"FROM " + ConnectDB.getUsageStatsDBSchema() + ".piwiklogtmp p1, " + "FROM " + ConnectDB.getUsageStatsDBSchema() + ".piwiklogtmp p1, " +
ConnectDB.getUsageStatsDBSchema() + ".piwiklogtmp p2\n" + ConnectDB.getUsageStatsDBSchema() + ".piwiklogtmp p2\n" +
"WHERE p1.source!='5' AND p1.source=p2.source AND p1.id_visit=p2.id_visit AND p1.entity_id=p2.entity_id \n" "WHERE p1.source=p2.source AND p1.id_visit=p2.id_visit AND p1.entity_id=p2.entity_id \n"
+ +
"AND p1.action=p2.action AND p1.action='download' AND p1.timestamp!=p2.timestamp \n" + "AND p1.action=p2.action AND p1.action='download' AND p1.timestamp!=p2.timestamp \n" +
"AND p1.timestamp<p2.timestamp AND ((unix_timestamp(p2.timestamp)-unix_timestamp(p1.timestamp))/60)<30 \n" + "AND p1.timestamp<p2.timestamp AND ((unix_timestamp(p2.timestamp)-unix_timestamp(p1.timestamp))/60)<30 \n" +
@ -329,7 +327,7 @@ public class PiwikStatsDB {
"SELECT DISTINCT p1.source, p1.id_visit, p1.action, p1.entity_id, p1.timestamp \n" + "SELECT DISTINCT p1.source, p1.id_visit, p1.action, p1.entity_id, p1.timestamp \n" +
"FROM " + ConnectDB.getUsageStatsDBSchema() + ".piwiklogtmp p1, " + "FROM " + ConnectDB.getUsageStatsDBSchema() + ".piwiklogtmp p1, " +
ConnectDB.getUsageStatsDBSchema() + ".piwiklogtmp p2\n" + ConnectDB.getUsageStatsDBSchema() + ".piwiklogtmp p2\n" +
"WHERE p1.source!='5' AND p1.source=p2.source AND p1.id_visit=p2.id_visit AND p1.entity_id=p2.entity_id \n" "WHERE p1.source=p2.source AND p1.id_visit=p2.id_visit AND p1.entity_id=p2.entity_id \n"
+ +
"AND p1.action=p2.action AND p1.action='action' AND p1.timestamp!=p2.timestamp \n" + "AND p1.action=p2.action AND p1.action='action' AND p1.timestamp!=p2.timestamp \n" +
"AND p1.timestamp<p2.timestamp AND (unix_timestamp(p2.timestamp)-unix_timestamp(p1.timestamp))<10 \n" + "AND p1.timestamp<p2.timestamp AND (unix_timestamp(p2.timestamp)-unix_timestamp(p1.timestamp))<10 \n" +
@ -380,22 +378,22 @@ public class PiwikStatsDB {
"max(views) AS count, max(openaire_referrer) AS openaire " + "max(views) AS count, max(openaire_referrer) AS openaire " +
"FROM " + ConnectDB.getUsageStatsDBSchema() + ".result_views_monthly_tmp p, " + "FROM " + ConnectDB.getUsageStatsDBSchema() + ".result_views_monthly_tmp p, " +
ConnectDB.getStatsDBSchema() + ".datasource d, " + ConnectDB.getStatsDBSchema() + ".result_oids ro " + ConnectDB.getStatsDBSchema() + ".datasource d, " + ConnectDB.getStatsDBSchema() + ".result_oids ro " +
"WHERE p.source!='5' AND p.source=d.piwik_id AND p.id=ro.oid " + "WHERE p.source=d.piwik_id AND p.id=ro.oid " +
"GROUP BY d.id, ro.id, month " + "GROUP BY d.id, ro.id, month " +
"ORDER BY d.id, ro.id, month"; "ORDER BY d.id, ro.id, month";
stmt.executeUpdate(create_views_stats_tmp); stmt.executeUpdate(create_views_stats_tmp);
logger.info("Created views_stats_tmp table"); logger.info("Created views_stats_tmp table");
/*
logger.info("Dropping views_stats table"); logger.info("Dropping views_stats table");
String drop_views_stats = "DROP TABLE IF EXISTS " + String drop_views_stats = "DROP TABLE IF EXISTS " +
ConnectDB.getUsageStatsDBSchema() + ConnectDB.getUsageStatsDBSchema() +
".views_stats"; ".views_stats";
stmt.executeUpdate(drop_views_stats); stmt.executeUpdate(drop_views_stats);
logger.info("Dropped views_stats table"); logger.info("Dropped views_stats table");
*/
logger.info("Creating views_stats table"); logger.info("Creating views_stats table");
String create_view_stats = "CREATE TABLE IF NOT EXISTS " + ConnectDB.getUsageStatsDBSchema() + ".views_stats " + String create_view_stats = "CREATE TABLE IF NOT EXISTS " + ConnectDB.getUsageStatsDBSchema() + ".views_stats " +
"STORED AS PARQUET AS SELECT * FROM " + ConnectDB.getUsageStatsDBSchema() + ".views_stats_tmp"; "LIKE " + ConnectDB.getUsageStatsDBSchema() + ".views_stats_tmp STORED AS PARQUET";
stmt.executeUpdate(create_view_stats); stmt.executeUpdate(create_view_stats);
logger.info("Created views_stats table"); logger.info("Created views_stats table");
@ -418,17 +416,17 @@ public class PiwikStatsDB {
stmt.executeUpdate(create_pageviews_stats_tmp); stmt.executeUpdate(create_pageviews_stats_tmp);
logger.info("Created pageviews_stats_tmp table"); logger.info("Created pageviews_stats_tmp table");
logger.info("Droping pageviews_stats table"); /* logger.info("Droping pageviews_stats table");
String drop_pageviews_stats = "DROP TABLE IF EXISTS " + String drop_pageviews_stats = "DROP TABLE IF EXISTS " +
ConnectDB.getUsageStatsDBSchema() + ConnectDB.getUsageStatsDBSchema() +
".pageviews_stats"; ".pageviews_stats";
stmt.executeUpdate(drop_pageviews_stats); stmt.executeUpdate(drop_pageviews_stats);
logger.info("Dropped pageviews_stats table"); logger.info("Dropped pageviews_stats table");
*/
logger.info("Creating pageviews_stats table"); logger.info("Creating pageviews_stats table");
String create_pageviews_stats = "CREATE TABLE IF NOT EXISTS " + ConnectDB.getUsageStatsDBSchema() String create_pageviews_stats = "CREATE TABLE IF NOT EXISTS " + ConnectDB.getUsageStatsDBSchema()
+ ".pageviews_stats " + + ".pageviews_stats " +
"STORED AS PARQUET AS SELECT * FROM " + ConnectDB.getUsageStatsDBSchema() + ".pageviews_stats_tmp"; "LIKE " + ConnectDB.getUsageStatsDBSchema() + ".pageviews_stats_tmp STORED AS PARQUET";
stmt.executeUpdate(create_pageviews_stats); stmt.executeUpdate(create_pageviews_stats);
logger.info("Created pageviews_stats table"); logger.info("Created pageviews_stats table");
@ -477,19 +475,19 @@ public class PiwikStatsDB {
"ORDER BY d.id, ro.id, month"; "ORDER BY d.id, ro.id, month";
stmt.executeUpdate(sql); stmt.executeUpdate(sql);
logger.info("Created downloads_stats_tmp table"); logger.info("Created downloads_stats_tmp table");
/*
logger.info("Dropping downloads_stats table"); logger.info("Dropping downloads_stats table");
String drop_downloads_stats = "DROP TABLE IF EXISTS " + String drop_downloads_stats = "DROP TABLE IF EXISTS " +
ConnectDB.getUsageStatsDBSchema() + ConnectDB.getUsageStatsDBSchema() +
".downloads_stats"; ".downloads_stats";
stmt.executeUpdate(drop_downloads_stats); stmt.executeUpdate(drop_downloads_stats);
logger.info("Dropped downloads_stats table"); logger.info("Dropped downloads_stats table");
*/
logger.info("Creating downloads_stats table"); logger.info("Creating downloads_stats table");
String create_pageviews_stats = "CREATE TABLE IF NOT EXISTS " + ConnectDB.getUsageStatsDBSchema() String create_downloads_stats = "CREATE TABLE IF NOT EXISTS " + ConnectDB.getUsageStatsDBSchema()
+ ".downloads_stats " + + ".downloads_stats " +
"STORED AS PARQUET AS SELECT * FROM " + ConnectDB.getUsageStatsDBSchema() + ".downloads_stats_tmp"; "LIKE " + ConnectDB.getUsageStatsDBSchema() + ".downloads_stats_tmp STORED AS PARQUET ";
stmt.executeUpdate(create_pageviews_stats); stmt.executeUpdate(create_downloads_stats);
logger.info("Created downloads_stats table"); logger.info("Created downloads_stats table");
logger.info("Dropping result_downloads_monthly_tmp view"); logger.info("Dropping result_downloads_monthly_tmp view");
@ -843,17 +841,14 @@ public class PiwikStatsDB {
stmt.executeUpdate(sql); stmt.executeUpdate(sql);
stmt.close(); stmt.close();
/* logger.info("PortalStats - Step 3"); /*
stmt = con.createStatement(); * logger.info("PortalStats - Step 3"); stmt = con.createStatement(); sql = "INSERT INTO " +
sql = "INSERT INTO " + ConnectDB.getUsageStatsDBSchema() + ".piwiklogtmp " + * ConnectDB.getUsageStatsDBSchema() + ".piwiklogtmp " +
"SELECT DISTINCT source, id_visit, country, action, url, entity_id, 'organization', `timestamp`, referrer_name, agent " * "SELECT DISTINCT source, id_visit, country, action, url, entity_id, 'organization', `timestamp`, referrer_name, agent "
+ * + "FROM " + ConnectDB.getUsageStatsDBSchema() + ".process_portal_log_tmp " +
"FROM " + ConnectDB.getUsageStatsDBSchema() + ".process_portal_log_tmp " + * "WHERE process_portal_log_tmp.entity_id IS NOT NULL AND process_portal_log_tmp.entity_id " +
"WHERE process_portal_log_tmp.entity_id IS NOT NULL AND process_portal_log_tmp.entity_id " + * "IN (SELECT roid.id FROM " + ConnectDB.getStatsDBSchema() +
"IN (SELECT roid.id FROM " + ConnectDB.getStatsDBSchema() * ".organization_oids roid WHERE roid.id IS NOT NULL)"; // stmt.executeUpdate(sql); stmt.close();
+ ".organization_oids roid WHERE roid.id IS NOT NULL)";
// stmt.executeUpdate(sql);
stmt.close();
*/ */
logger.info("PortalStats - Step 3"); logger.info("PortalStats - Step 3");
stmt = con.createStatement(); stmt = con.createStatement();
@ -1172,21 +1167,15 @@ public class PiwikStatsDB {
"SELECT * FROM " + ConnectDB.getUsageStatsDBSchema() + ".pageviews_stats_tmp"; "SELECT * FROM " + ConnectDB.getUsageStatsDBSchema() + ".pageviews_stats_tmp";
stmt.executeUpdate(sql); stmt.executeUpdate(sql);
/* logger.info("Dropping table views_stats_tmp"); /*
sql = "DROP TABLE IF EXISTS " + ConnectDB.getUsageStatsDBSchema() + ".views_stats_tmp"; * logger.info("Dropping table views_stats_tmp"); sql = "DROP TABLE IF EXISTS " +
stmt.executeUpdate(sql); * ConnectDB.getUsageStatsDBSchema() + ".views_stats_tmp"; stmt.executeUpdate(sql);
* logger.info("Dropping table downloads_stats_tmp"); sql = "DROP TABLE IF EXISTS " +
logger.info("Dropping table downloads_stats_tmp"); * ConnectDB.getUsageStatsDBSchema() + ".downloads_stats_tmp"; stmt.executeUpdate(sql);
sql = "DROP TABLE IF EXISTS " + ConnectDB.getUsageStatsDBSchema() + ".downloads_stats_tmp"; * logger.info("Dropping table pageviews_stats_tmp"); sql = "DROP TABLE IF EXISTS " +
stmt.executeUpdate(sql); * ConnectDB.getUsageStatsDBSchema() + ".pageviews_stats_tmp"; stmt.executeUpdate(sql);
* logger.info("Dropping table process_portal_log_tmp"); sql = "DROP TABLE IF EXISTS " +
logger.info("Dropping table pageviews_stats_tmp"); * ConnectDB.getUsageStatsDBSchema() + ".process_portal_log_tmp"; stmt.executeUpdate(sql);
sql = "DROP TABLE IF EXISTS " + ConnectDB.getUsageStatsDBSchema() + ".pageviews_stats_tmp";
stmt.executeUpdate(sql);
logger.info("Dropping table process_portal_log_tmp");
sql = "DROP TABLE IF EXISTS " + ConnectDB.getUsageStatsDBSchema() + ".process_portal_log_tmp";
stmt.executeUpdate(sql);
*/ */
stmt.close(); stmt.close();
ConnectDB.getHiveConnection().close(); ConnectDB.getHiveConnection().close();

View File

@ -1,4 +1,3 @@
package eu.dnetlib.oa.graph.usagestats.export; package eu.dnetlib.oa.graph.usagestats.export;
import java.io.*; import java.io.*;
@ -13,6 +12,7 @@ import java.sql.Statement;
import java.text.SimpleDateFormat; import java.text.SimpleDateFormat;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Calendar; import java.util.Calendar;
import java.util.Date;
import java.util.HashSet; import java.util.HashSet;
import java.util.List; import java.util.List;
import java.util.Set; import java.util.Set;
@ -95,94 +95,93 @@ public class SarcStats {
logger.info("Added JSON Serde jar"); logger.info("Added JSON Serde jar");
logger.info("Dropping sarc_sushilogtmp_json_array table"); logger.info("Dropping sarc_sushilogtmp_json_array table");
String drop_sarc_sushilogtmp_json_array = "DROP TABLE IF EXISTS " + String drop_sarc_sushilogtmp_json_array = "DROP TABLE IF EXISTS "
ConnectDB.getUsageStatsDBSchema() + ".sarc_sushilogtmp_json_array"; + ConnectDB.getUsageStatsDBSchema() + ".sarc_sushilogtmp_json_array";
stmt.executeUpdate(drop_sarc_sushilogtmp_json_array); stmt.executeUpdate(drop_sarc_sushilogtmp_json_array);
logger.info("Dropped sarc_sushilogtmp_json_array table"); logger.info("Dropped sarc_sushilogtmp_json_array table");
logger.info("Creating sarc_sushilogtmp_json_array table"); logger.info("Creating sarc_sushilogtmp_json_array table");
String create_sarc_sushilogtmp_json_array = "CREATE EXTERNAL TABLE IF NOT EXISTS " + String create_sarc_sushilogtmp_json_array = "CREATE EXTERNAL TABLE IF NOT EXISTS "
ConnectDB.getUsageStatsDBSchema() + ".sarc_sushilogtmp_json_array(\n" + + ConnectDB.getUsageStatsDBSchema() + ".sarc_sushilogtmp_json_array(\n"
" `ItemIdentifier` ARRAY<\n" + + " `ItemIdentifier` ARRAY<\n"
" struct<\n" + + " struct<\n"
" `Type`: STRING,\n" + + " `Type`: STRING,\n"
" `Value`: STRING\n" + + " `Value`: STRING\n"
" >\n" + + " >\n"
" >,\n" + + " >,\n"
" `ItemPerformance` struct<\n" + + " `ItemPerformance` struct<\n"
" `Period`: struct<\n" + + " `Period`: struct<\n"
" `Begin`: STRING,\n" + + " `Begin`: STRING,\n"
" `End`: STRING\n" + + " `End`: STRING\n"
" >,\n" + + " >,\n"
" `Instance`: struct<\n" + + " `Instance`: struct<\n"
" `Count`: STRING,\n" + + " `Count`: STRING,\n"
" `MetricType`: STRING\n" + + " `MetricType`: STRING\n"
" >\n" + + " >\n"
" >\n" + + " >\n"
")" + + ")"
"ROW FORMAT SERDE 'org.apache.hive.hcatalog.data.JsonSerDe'\n" + + "ROW FORMAT SERDE 'org.apache.hive.hcatalog.data.JsonSerDe'\n"
"LOCATION '" + sarcsReportPathArray + "/'\n" + + "LOCATION '" + sarcsReportPathArray + "/'\n"
"TBLPROPERTIES (\"transactional\"=\"false\")"; + "TBLPROPERTIES (\"transactional\"=\"false\")";
stmt.executeUpdate(create_sarc_sushilogtmp_json_array); stmt.executeUpdate(create_sarc_sushilogtmp_json_array);
logger.info("Created sarc_sushilogtmp_json_array table"); logger.info("Created sarc_sushilogtmp_json_array table");
logger.info("Dropping sarc_sushilogtmp_json_non_array table"); logger.info("Dropping sarc_sushilogtmp_json_non_array table");
String drop_sarc_sushilogtmp_json_non_array = "DROP TABLE IF EXISTS " + String drop_sarc_sushilogtmp_json_non_array = "DROP TABLE IF EXISTS "
ConnectDB.getUsageStatsDBSchema() + + ConnectDB.getUsageStatsDBSchema()
".sarc_sushilogtmp_json_non_array"; + ".sarc_sushilogtmp_json_non_array";
stmt.executeUpdate(drop_sarc_sushilogtmp_json_non_array); stmt.executeUpdate(drop_sarc_sushilogtmp_json_non_array);
logger.info("Dropped sarc_sushilogtmp_json_non_array table"); logger.info("Dropped sarc_sushilogtmp_json_non_array table");
logger.info("Creating sarc_sushilogtmp_json_non_array table"); logger.info("Creating sarc_sushilogtmp_json_non_array table");
String create_sarc_sushilogtmp_json_non_array = "CREATE EXTERNAL TABLE IF NOT EXISTS " + String create_sarc_sushilogtmp_json_non_array = "CREATE EXTERNAL TABLE IF NOT EXISTS "
ConnectDB.getUsageStatsDBSchema() + ".sarc_sushilogtmp_json_non_array (\n" + + ConnectDB.getUsageStatsDBSchema() + ".sarc_sushilogtmp_json_non_array (\n"
" `ItemIdentifier` struct<\n" + + " `ItemIdentifier` struct<\n"
" `Type`: STRING,\n" + + " `Type`: STRING,\n"
" `Value`: STRING\n" + + " `Value`: STRING\n"
" >,\n" + + " >,\n"
" `ItemPerformance` struct<\n" + + " `ItemPerformance` struct<\n"
" `Period`: struct<\n" + + " `Period`: struct<\n"
" `Begin`: STRING,\n" + + " `Begin`: STRING,\n"
" `End`: STRING\n" + + " `End`: STRING\n"
" >,\n" + + " >,\n"
" `Instance`: struct<\n" + + " `Instance`: struct<\n"
" `Count`: STRING,\n" + + " `Count`: STRING,\n"
" `MetricType`: STRING\n" + + " `MetricType`: STRING\n"
" >\n" + + " >\n"
" >" + + " >"
")" + + ")"
"ROW FORMAT SERDE 'org.apache.hive.hcatalog.data.JsonSerDe'\n" + + "ROW FORMAT SERDE 'org.apache.hive.hcatalog.data.JsonSerDe'\n"
"LOCATION '" + sarcsReportPathNonArray + "/'\n" + + "LOCATION '" + sarcsReportPathNonArray + "/'\n"
"TBLPROPERTIES (\"transactional\"=\"false\")"; + "TBLPROPERTIES (\"transactional\"=\"false\")";
stmt.executeUpdate(create_sarc_sushilogtmp_json_non_array); stmt.executeUpdate(create_sarc_sushilogtmp_json_non_array);
logger.info("Created sarc_sushilogtmp_json_non_array table"); logger.info("Created sarc_sushilogtmp_json_non_array table");
logger.info("Creating sarc_sushilogtmp table"); logger.info("Creating sarc_sushilogtmp table");
String create_sarc_sushilogtmp = "CREATE TABLE IF NOT EXISTS " + ConnectDB.getUsageStatsDBSchema() String create_sarc_sushilogtmp = "CREATE TABLE IF NOT EXISTS " + ConnectDB.getUsageStatsDBSchema()
+ ".sarc_sushilogtmp(source STRING, repository STRING, " + + ".sarc_sushilogtmp(source STRING, repository STRING, "
"rid STRING, date STRING, metric_type STRING, count INT) clustered by (source) into 100 buckets stored as orc " + "rid STRING, date STRING, metric_type STRING, count INT) clustered by (source) into 100 buckets stored as orc "
+ + "tblproperties('transactional'='true')";
"tblproperties('transactional'='true')";
stmt.executeUpdate(create_sarc_sushilogtmp); stmt.executeUpdate(create_sarc_sushilogtmp);
logger.info("Created sarc_sushilogtmp table"); logger.info("Created sarc_sushilogtmp table");
logger.info("Inserting to sarc_sushilogtmp table (sarc_sushilogtmp_json_array)"); logger.info("Inserting to sarc_sushilogtmp table (sarc_sushilogtmp_json_array)");
String insert_sarc_sushilogtmp = "INSERT INTO " + ConnectDB.getUsageStatsDBSchema() + ".sarc_sushilogtmp " + String insert_sarc_sushilogtmp = "INSERT INTO " + ConnectDB.getUsageStatsDBSchema() + ".sarc_sushilogtmp "
"SELECT 'SARC-OJS', split(split(INPUT__FILE__NAME,'SarcsARReport_')[1],'_')[0], " + + "SELECT 'SARC-OJS', split(split(INPUT__FILE__NAME,'SarcsARReport_')[1],'_')[0], "
" `ItemIdent`.`Value`, `ItemPerformance`.`Period`.`Begin`, " + + " `ItemIdent`.`Value`, `ItemPerformance`.`Period`.`Begin`, "
"`ItemPerformance`.`Instance`.`MetricType`, `ItemPerformance`.`Instance`.`Count` " + + "`ItemPerformance`.`Instance`.`MetricType`, `ItemPerformance`.`Instance`.`Count` "
"FROM " + ConnectDB.getUsageStatsDBSchema() + ".sarc_sushilogtmp_json_array " + + "FROM " + ConnectDB.getUsageStatsDBSchema() + ".sarc_sushilogtmp_json_array "
"LATERAL VIEW posexplode(ItemIdentifier) ItemIdentifierTable AS seqi, ItemIdent " + + "LATERAL VIEW posexplode(ItemIdentifier) ItemIdentifierTable AS seqi, ItemIdent "
"WHERE `ItemIdent`.`Type`='DOI'"; + "WHERE `ItemIdent`.`Type`='DOI'";
stmt.executeUpdate(insert_sarc_sushilogtmp); stmt.executeUpdate(insert_sarc_sushilogtmp);
logger.info("Inserted to sarc_sushilogtmp table (sarc_sushilogtmp_json_array)"); logger.info("Inserted to sarc_sushilogtmp table (sarc_sushilogtmp_json_array)");
logger.info("Inserting to sarc_sushilogtmp table (sarc_sushilogtmp_json_non_array)"); logger.info("Inserting to sarc_sushilogtmp table (sarc_sushilogtmp_json_non_array)");
insert_sarc_sushilogtmp = "INSERT INTO " + ConnectDB.getUsageStatsDBSchema() + ".sarc_sushilogtmp " + insert_sarc_sushilogtmp = "INSERT INTO " + ConnectDB.getUsageStatsDBSchema() + ".sarc_sushilogtmp "
"SELECT 'SARC-OJS', split(split(INPUT__FILE__NAME,'SarcsARReport_')[1],'_')[0], " + + "SELECT 'SARC-OJS', split(split(INPUT__FILE__NAME,'SarcsARReport_')[1],'_')[0], "
"`ItemIdentifier`.`Value`, `ItemPerformance`.`Period`.`Begin`, " + + "`ItemIdentifier`.`Value`, `ItemPerformance`.`Period`.`Begin`, "
"`ItemPerformance`.`Instance`.`MetricType`, `ItemPerformance`.`Instance`.`Count` " + + "`ItemPerformance`.`Instance`.`MetricType`, `ItemPerformance`.`Instance`.`Count` "
"FROM " + ConnectDB.getUsageStatsDBSchema() + ".sarc_sushilogtmp_json_non_array"; + "FROM " + ConnectDB.getUsageStatsDBSchema() + ".sarc_sushilogtmp_json_non_array";
stmt.executeUpdate(insert_sarc_sushilogtmp); stmt.executeUpdate(insert_sarc_sushilogtmp);
logger.info("Inserted to sarc_sushilogtmp table (sarc_sushilogtmp_json_non_array)"); logger.info("Inserted to sarc_sushilogtmp table (sarc_sushilogtmp_json_non_array)");
@ -196,20 +195,20 @@ public class SarcStats {
logger.info("Creating sushilog table"); logger.info("Creating sushilog table");
String createSushilog = "CREATE TABLE IF NOT EXISTS " + ConnectDB.getUsageStatsDBSchema() String createSushilog = "CREATE TABLE IF NOT EXISTS " + ConnectDB.getUsageStatsDBSchema()
+ ".sushilog " + + ".sushilog "
"(`source` string, " + + "(`source` string, "
"`repository` string, " + + "`repository` string, "
"`rid` string, " + + "`rid` string, "
"`date` string, " + + "`date` string, "
"`metric_type` string, " + + "`metric_type` string, "
"`count` int)"; + "`count` int)";
stmt.executeUpdate(createSushilog); stmt.executeUpdate(createSushilog);
logger.info("Created sushilog table"); logger.info("Created sushilog table");
logger.info("Dropping sarc_sushilogtmp table"); logger.info("Dropping sarc_sushilogtmp table");
String drop_sarc_sushilogtmp = "DROP TABLE IF EXISTS " + String drop_sarc_sushilogtmp = "DROP TABLE IF EXISTS "
ConnectDB.getUsageStatsDBSchema() + + ConnectDB.getUsageStatsDBSchema()
".sarc_sushilogtmp"; + ".sarc_sushilogtmp";
stmt.executeUpdate(drop_sarc_sushilogtmp); stmt.executeUpdate(drop_sarc_sushilogtmp);
logger.info("Dropped sarc_sushilogtmp table"); logger.info("Dropped sarc_sushilogtmp table");
ConnectDB.getHiveConnection().close(); ConnectDB.getHiveConnection().close();
@ -267,8 +266,8 @@ public class SarcStats {
"https://revistas.rcaap.pt/millenium/sushiLite/v1_7/", "0873-3015" "https://revistas.rcaap.pt/millenium/sushiLite/v1_7/", "0873-3015"
}); });
if (ExecuteWorkflow.sarcNumberOfIssnToDownload > 0 && if (ExecuteWorkflow.sarcNumberOfIssnToDownload > 0
ExecuteWorkflow.sarcNumberOfIssnToDownload <= issnAndUrls.size()) { && ExecuteWorkflow.sarcNumberOfIssnToDownload <= issnAndUrls.size()) {
logger.info("Trimming siteIds list to the size of: " + ExecuteWorkflow.sarcNumberOfIssnToDownload); logger.info("Trimming siteIds list to the size of: " + ExecuteWorkflow.sarcNumberOfIssnToDownload);
issnAndUrls = issnAndUrls.subList(0, ExecuteWorkflow.sarcNumberOfIssnToDownload); issnAndUrls = issnAndUrls.subList(0, ExecuteWorkflow.sarcNumberOfIssnToDownload);
} }
@ -289,27 +288,27 @@ public class SarcStats {
logger.info("Creating downloads_stats table"); logger.info("Creating downloads_stats table");
String createDownloadsStats = "CREATE TABLE IF NOT EXISTS " + ConnectDB.getUsageStatsDBSchema() String createDownloadsStats = "CREATE TABLE IF NOT EXISTS " + ConnectDB.getUsageStatsDBSchema()
+ ".downloads_stats " + + ".downloads_stats "
"(`source` string, " + + "(`source` string, "
"`repository_id` string, " + + "`repository_id` string, "
"`result_id` string, " + + "`result_id` string, "
"`date` string, " + + "`date` string, "
"`count` bigint, " + + "`count` bigint, "
"`openaire` bigint)"; + "`openaire` bigint)";
stmtHive.executeUpdate(createDownloadsStats); stmtHive.executeUpdate(createDownloadsStats);
logger.info("Created downloads_stats table"); logger.info("Created downloads_stats table");
logger.info("Dropping sarc_sushilogtmp_impala table"); logger.info("Dropping sarc_sushilogtmp_impala table");
String drop_sarc_sushilogtmp_impala = "DROP TABLE IF EXISTS " + String drop_sarc_sushilogtmp_impala = "DROP TABLE IF EXISTS "
ConnectDB.getUsageStatsDBSchema() + + ConnectDB.getUsageStatsDBSchema()
".sarc_sushilogtmp_impala"; + ".sarc_sushilogtmp_impala";
stmtHive.executeUpdate(drop_sarc_sushilogtmp_impala); stmtHive.executeUpdate(drop_sarc_sushilogtmp_impala);
logger.info("Dropped sarc_sushilogtmp_impala table"); logger.info("Dropped sarc_sushilogtmp_impala table");
logger.info("Creating sarc_sushilogtmp_impala, a table readable by impala"); logger.info("Creating sarc_sushilogtmp_impala, a table readable by impala");
String createSarcSushilogtmpImpala = "CREATE TABLE IF NOT EXISTS " + ConnectDB.getUsageStatsDBSchema() String createSarcSushilogtmpImpala = "CREATE TABLE IF NOT EXISTS " + ConnectDB.getUsageStatsDBSchema()
+ ".sarc_sushilogtmp_impala " + + ".sarc_sushilogtmp_impala "
"STORED AS PARQUET AS SELECT * FROM " + ConnectDB.getUsageStatsDBSchema() + ".sarc_sushilogtmp"; + "STORED AS PARQUET AS SELECT * FROM " + ConnectDB.getUsageStatsDBSchema() + ".sarc_sushilogtmp";
stmtHive.executeUpdate(createSarcSushilogtmpImpala); stmtHive.executeUpdate(createSarcSushilogtmpImpala);
logger.info("Created sarc_sushilogtmp_impala"); logger.info("Created sarc_sushilogtmp_impala");
@ -319,9 +318,9 @@ public class SarcStats {
stmtImpala.executeUpdate(invalidateMetadata); stmtImpala.executeUpdate(invalidateMetadata);
logger.info("Dropping downloads_stats_impala table"); logger.info("Dropping downloads_stats_impala table");
String drop_downloads_stats_impala = "DROP TABLE IF EXISTS " + String drop_downloads_stats_impala = "DROP TABLE IF EXISTS "
ConnectDB.getUsageStatsDBSchema() + + ConnectDB.getUsageStatsDBSchema()
".downloads_stats_impala"; + ".downloads_stats_impala";
stmtHive.executeUpdate(drop_downloads_stats_impala); stmtHive.executeUpdate(drop_downloads_stats_impala);
logger.info("Dropped downloads_stats_impala table"); logger.info("Dropped downloads_stats_impala table");
@ -336,36 +335,36 @@ public class SarcStats {
// We run the following query in Impala because it is faster // We run the following query in Impala because it is faster
logger.info("Creating downloads_stats_impala"); logger.info("Creating downloads_stats_impala");
String createDownloadsStatsImpala = "CREATE TABLE " + ConnectDB.getUsageStatsDBSchema() String createDownloadsStatsImpala = "CREATE TABLE " + ConnectDB.getUsageStatsDBSchema()
+ ".downloads_stats_impala AS " + + ".downloads_stats_impala AS "
"SELECT s.source, d.id AS repository_id, " + + "SELECT s.source, d.id AS repository_id, "
"ro.id as result_id, CONCAT(CAST(YEAR(`date`) AS STRING), '/', " + + "ro.id as result_id, CONCAT(CAST(YEAR(`date`) AS STRING), '/', "
"LPAD(CAST(MONTH(`date`) AS STRING), 2, '0')) AS `date`, s.count, '0' " + + "LPAD(CAST(MONTH(`date`) AS STRING), 2, '0')) AS `date`, s.count, '0' "
"FROM " + ConnectDB.getUsageStatsDBSchema() + ".sarc_sushilogtmp_impala s, " + + "FROM " + ConnectDB.getUsageStatsDBSchema() + ".sarc_sushilogtmp_impala s, "
ConnectDB.getStatsDBSchema() + ".datasource_oids d, " + + ConnectDB.getStatsDBSchema() + ".datasource_oids d, "
ConnectDB.getStatsDBSchema() + ".datasource_results dr, " + + ConnectDB.getStatsDBSchema() + ".datasource_results dr, "
ConnectDB.getStatsDBSchema() + ".result_pids ro " + + ConnectDB.getStatsDBSchema() + ".result_pids ro "
"WHERE d.oid LIKE CONCAT('%', s.repository, '%') AND dr.id=d.id AND dr.result=ro.id AND " + + "WHERE d.oid LIKE CONCAT('%', s.repository, '%') AND dr.id=d.id AND dr.result=ro.id AND "
"s.rid=ro.pid AND ro.type='Digital Object Identifier' AND metric_type='ft_total' AND s.source='SARC-OJS'"; + "s.rid=ro.pid AND ro.type='Digital Object Identifier' AND metric_type='ft_total' AND s.source='SARC-OJS'";
stmtImpala.executeUpdate(createDownloadsStatsImpala); stmtImpala.executeUpdate(createDownloadsStatsImpala);
logger.info("Creating downloads_stats_impala"); logger.info("Creating downloads_stats_impala");
// Insert into downloads_stats // Insert into downloads_stats
logger.info("Inserting data from downloads_stats_impala into downloads_stats"); logger.info("Inserting data from downloads_stats_impala into downloads_stats");
String insertDStats = "INSERT INTO " + ConnectDB.getUsageStatsDBSchema() String insertDStats = "INSERT INTO " + ConnectDB.getUsageStatsDBSchema()
+ ".downloads_stats SELECT * " + + ".downloads_stats SELECT * "
"FROM " + ConnectDB.getUsageStatsDBSchema() + ".downloads_stats_impala"; + "FROM " + ConnectDB.getUsageStatsDBSchema() + ".downloads_stats_impala";
stmtHive.executeUpdate(insertDStats); stmtHive.executeUpdate(insertDStats);
logger.info("Inserted into downloads_stats"); logger.info("Inserted into downloads_stats");
logger.info("Creating sushilog table"); logger.info("Creating sushilog table");
String createSushilog = "CREATE TABLE IF NOT EXISTS " + ConnectDB.getUsageStatsDBSchema() String createSushilog = "CREATE TABLE IF NOT EXISTS " + ConnectDB.getUsageStatsDBSchema()
+ ".sushilog " + + ".sushilog "
"(`source` string, " + + "(`source` string, "
"`repository_id` string, " + + "`repository_id` string, "
"`rid` string, " + + "`rid` string, "
"`date` string, " + + "`date` string, "
"`metric_type` string, " + + "`metric_type` string, "
"`count` int)"; + "`count` int)";
stmtHive.executeUpdate(createSushilog); stmtHive.executeUpdate(createSushilog);
logger.info("Created sushilog table"); logger.info("Created sushilog table");
@ -403,10 +402,12 @@ public class SarcStats {
"SELECT max(date) FROM " + ConnectDB.getUsageStatsDBSchema() + ".sushilog WHERE repository=?"); "SELECT max(date) FROM " + ConnectDB.getUsageStatsDBSchema() + ".sushilog WHERE repository=?");
st.setString(1, issn); st.setString(1, issn);
ResultSet rs_date = st.executeQuery(); ResultSet rs_date = st.executeQuery();
Date dateMax = null;
while (rs_date.next()) { while (rs_date.next()) {
if (rs_date.getString(1) != null && !rs_date.getString(1).equals("null") if (rs_date.getString(1) != null && !rs_date.getString(1).equals("null")
&& !rs_date.getString(1).equals("")) { && !rs_date.getString(1).equals("")) {
start.setTime(sdf.parse(rs_date.getString(1))); start.setTime(sdf.parse(rs_date.getString(1)));
dateMax = sdf.parse(rs_date.getString(1));
} }
} }
rs_date.close(); rs_date.close();
@ -425,6 +426,10 @@ public class SarcStats {
org.apache.hadoop.fs.LocalFileSystem.class.getName()); org.apache.hadoop.fs.LocalFileSystem.class.getName());
FileSystem dfs = FileSystem.get(config); FileSystem dfs = FileSystem.get(config);
if (dateMax != null && start.getTime().compareTo(dateMax) <= 0) {
logger.info("Date found in logs " + dateMax + " and not downloanding logs for " + issn);
} else {
while (start.before(end)) { while (start.before(end)) {
String reportUrl = url + "GetReport/?Report=AR1&Format=json&BeginDate=" String reportUrl = url + "GetReport/?Report=AR1&Format=json&BeginDate="
+ simpleDateFormat.format(start.getTime()) + "&EndDate=" + simpleDateFormat.format(start.getTime()); + simpleDateFormat.format(start.getTime()) + "&EndDate=" + simpleDateFormat.format(start.getTime());
@ -440,8 +445,7 @@ public class SarcStats {
JSONObject jsonObject = null; JSONObject jsonObject = null;
try { try {
jsonObject = (JSONObject) parser.parse(text); jsonObject = (JSONObject) parser.parse(text);
} } // if there is a parsing error continue with the next url
// if there is a parsing error continue with the next url
catch (ParseException pe) { catch (ParseException pe) {
continue; continue;
} }
@ -466,14 +470,14 @@ public class SarcStats {
} }
// Creating the file in the filesystem for the ItemIdentifier as array object // Creating the file in the filesystem for the ItemIdentifier as array object
String filePathArray = sarcsReportPathArray + "/SarcsARReport_" + issn + "_" + String filePathArray = sarcsReportPathArray + "/SarcsARReport_" + issn + "_"
simpleDateFormat.format(start.getTime()) + ".json"; + simpleDateFormat.format(start.getTime()) + ".json";
logger.info("Storing to file: " + filePathArray); logger.info("Storing to file: " + filePathArray);
FSDataOutputStream finArray = dfs.create(new Path(filePathArray), true); FSDataOutputStream finArray = dfs.create(new Path(filePathArray), true);
// Creating the file in the filesystem for the ItemIdentifier as array object // Creating the file in the filesystem for the ItemIdentifier as array object
String filePathNonArray = sarcsReportPathNonArray + "/SarcsARReport_" + issn + "_" + String filePathNonArray = sarcsReportPathNonArray + "/SarcsARReport_" + issn + "_"
simpleDateFormat.format(start.getTime()) + ".json"; + simpleDateFormat.format(start.getTime()) + ".json";
logger.info("Storing to file: " + filePathNonArray); logger.info("Storing to file: " + filePathNonArray);
FSDataOutputStream finNonArray = dfs.create(new Path(filePathNonArray), true); FSDataOutputStream finNonArray = dfs.create(new Path(filePathNonArray), true);
@ -505,17 +509,17 @@ public class SarcStats {
} }
dfs.close(); dfs.close();
}
ConnectDB.getHiveConnection().close(); //ConnectDB.getHiveConnection().close();
} }
private void renameKeysRecursively(String delimiter, JSONArray givenJsonObj) throws Exception { private void renameKeysRecursively(String delimiter, JSONArray givenJsonObj) throws Exception {
for (Object jjval : givenJsonObj) { for (Object jjval : givenJsonObj) {
if (jjval instanceof JSONArray) if (jjval instanceof JSONArray) {
renameKeysRecursively(delimiter, (JSONArray) jjval); renameKeysRecursively(delimiter, (JSONArray) jjval);
else if (jjval instanceof JSONObject) } else if (jjval instanceof JSONObject) {
renameKeysRecursively(delimiter, (JSONObject) jjval); renameKeysRecursively(delimiter, (JSONObject) jjval);
// All other types of vals } // All other types of vals
else else
; ;
} }
@ -532,8 +536,9 @@ public class SarcStats {
givenJsonObj.remove(jkey); givenJsonObj.remove(jkey);
givenJsonObj.put(newJkey, jval); givenJsonObj.put(newJkey, jval);
if (jval instanceof JSONObject) if (jval instanceof JSONObject) {
renameKeysRecursively(delimiter, (JSONObject) jval); renameKeysRecursively(delimiter, (JSONObject) jval);
}
if (jval instanceof JSONArray) { if (jval instanceof JSONArray) {
renameKeysRecursively(delimiter, (JSONArray) jval); renameKeysRecursively(delimiter, (JSONArray) jval);