From e04f137a4c9151bdd04d31031535ec0a32441e79 Mon Sep 17 00:00:00 2001 From: Dimitris Date: Tue, 16 Mar 2021 07:56:18 +0200 Subject: [PATCH] Beta 15032021 --- runme.sh | 2 +- .../config/WebMvcConfiguration.java | 3 + .../controllers/SushiLiteController.java | 125 +- .../repositories/UsageStatsRepository.java | 1027 ++++++----------- .../usagestats/services/SushiLiteService.java | 8 +- .../services/SushiLiteServiceImpl.java | 22 +- src/main/resources/static/index.html | 1 + .../resources/static/sushilite/DSR/index.html | 201 ++++ .../resources/static/sushilite/index.html | 2 +- usageStatsAPI.properties | 8 +- 10 files changed, 681 insertions(+), 718 deletions(-) create mode 100644 src/main/resources/static/sushilite/DSR/index.html diff --git a/runme.sh b/runme.sh index fa80ebf..fee2454 100755 --- a/runme.sh +++ b/runme.sh @@ -1 +1 @@ -java -jar target/usagestats_r5.jar --spring.config.location=file:./usageStatsAPI.properties & \ No newline at end of file +java -jar target/usagestats_r5.jar --spring.config.location=file:./usageStatsAPI.properties diff --git a/src/main/java/eu/dnetlib/usagestats/config/WebMvcConfiguration.java b/src/main/java/eu/dnetlib/usagestats/config/WebMvcConfiguration.java index ec7c4d8..cf0b890 100644 --- a/src/main/java/eu/dnetlib/usagestats/config/WebMvcConfiguration.java +++ b/src/main/java/eu/dnetlib/usagestats/config/WebMvcConfiguration.java @@ -44,6 +44,9 @@ public class WebMvcConfiguration extends WebMvcConfigurerAdapter { registry.addViewController("/sushilite/IR").setViewName("redirect:/sushilite/IR/"); registry.addViewController("/sushilite/IR/").setViewName("forward:/sushilite/IR/index.html"); + registry.addViewController("/sushilite/DSR").setViewName("redirect:/sushilite/DSR/"); + registry.addViewController("/sushilite/DSR/").setViewName("forward:/sushilite/DSR/index.html"); + super.addViewControllers(registry); } } diff --git a/src/main/java/eu/dnetlib/usagestats/controllers/SushiLiteController.java b/src/main/java/eu/dnetlib/usagestats/controllers/SushiLiteController.java index 3d746cc..a1dd237 100755 --- a/src/main/java/eu/dnetlib/usagestats/controllers/SushiLiteController.java +++ b/src/main/java/eu/dnetlib/usagestats/controllers/SushiLiteController.java @@ -8,6 +8,13 @@ import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import java.util.List; +import java.util.concurrent.Callable; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.Future; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; import javax.servlet.http.HttpServletResponse; import org.apache.log4j.Logger; @@ -38,25 +45,6 @@ class SushiLiteController { this.sushiLiteService = sushiLiteService; } -// @RequestMapping(value = "/sushilite/r5/GetReport/", method = RequestMethod.GET) -// public String getReport( -// @RequestParam(value = "Report", defaultValue = "") String reportP, -// @RequestParam(value = "Release", defaultValue = "4") String release, -// @RequestParam(value = "RequestorID", defaultValue = "anonymous") String requestorId, -// @RequestParam(value = "BeginDate", defaultValue = "") String beginDate, -// @RequestParam(value = "EndDate", defaultValue = "") String endDate, -// @RequestParam(value = "RepositoryIdentifier", defaultValue = "") String repositoryIdentifier, -// @RequestParam(value = "ItemIdentifier", defaultValue = "") String itemIdentifier, -// @RequestParam(value = "ItemDataType", defaultValue = "") String itemDataType, -// @RequestParam(value = "hasDOI", defaultValue = "") String hasDoi, -// @RequestParam(value = "Granularity", defaultValue = "Monthly") String granularity, -// @RequestParam(value = "Callback", defaultValue = "") String callback, -// @RequestParam(value = "Pretty", defaultValue = "") String pretty) { -// log.info("Sushi Report request: " + reportP + " from " + requestorId); -// log.info("repository identifier: " + repositoryIdentifier + " - item identifier: " + itemIdentifier); -// -// return sushiLiteService.displayReport(reportP, release, requestorId, beginDate, endDate, repositoryIdentifier, itemIdentifier, itemDataType, hasDoi, granularity, callback, pretty); -// } @RequestMapping(value = "/sushilite/r5/status", method = RequestMethod.GET) public String getReportStatus() { log.info("COUNTER Report status request "); @@ -96,29 +84,104 @@ class SushiLiteController { @RequestParam(value = "DataType", defaultValue = "") String dataType, @RequestParam(value = "Granularity", defaultValue = "Monthly") String granularity) { log.info("COUNTER ΙR Report request for repository " + repositoryIdentifier + " and for item " + itemIdentifier); - String report = sushiLiteService.displayReportIR(requestorId, repositoryIdentifier, itemIdentifier, beginDate, endDate, metricType, dataType, granularity); + class BuildReportWithTimeout implements Callable { - if (report.indexOf(".zip") < 0) { - return new ResponseEntity<>(report, HttpStatus.OK); - } else { + @Override + public ResponseEntity call() throws Exception { + String report = sushiLiteService.displayReportIR(requestorId, repositoryIdentifier, itemIdentifier, beginDate, endDate, metricType, dataType, granularity); + if (report.indexOf(".zip") < 0) { + return new ResponseEntity<>(report, HttpStatus.OK); + } else { - String compressedOutput = "
 {\"Report\":\"IR\", \"Description\":\"Compressed Report Due to large number of records\", \"URL To Download Report from: \":\"" + report + "\"} 
"; - return new ResponseEntity<>(compressedOutput, HttpStatus.OK); + String compressedOutput = "
 {\"Report\":\"IR\", \"Description\":\"Compressed Report Due to large number of records\", \"URL To Download Report from: \":\"" + report + "\"} 
"; + return new ResponseEntity<>(compressedOutput, HttpStatus.OK); + } + } } + final ExecutorService executor = Executors.newSingleThreadExecutor(); + final Future future = executor.submit(new BuildReportWithTimeout()); + + try { + return future.get(2, TimeUnit.MINUTES); + } catch (InterruptedException ie) { + String compressedOutput = "
 {\"Report\":\"IR\", \"Description\":\"Large Report Timeout. Please contact usagecounts@openaire.eu for requests" + "\"} 
"; + return new ResponseEntity<>(compressedOutput, HttpStatus.OK); + } catch (ExecutionException ee) { + /* Handle the error. Or ignore it. */ + } catch (TimeoutException te) { + String compressedOutput = "
 {\"Report\":\"IR\", \"Description\":\"Large Report Timeout. Please contact usagecounts@openaire.eu for requests" + "\"} 
"; + return new ResponseEntity<>(compressedOutput, HttpStatus.OK); + + } + if (!executor.isTerminated()) { + executor.shutdownNow(); + } + + return null; +// String report = sushiLiteService.displayReportIR(requestorId, repositoryIdentifier, itemIdentifier, beginDate, endDate, metricType, dataType, granularity); +// +// if (report.indexOf(".zip") < 0) { +// return new ResponseEntity<>(report, HttpStatus.OK); +// } else { +// +// String compressedOutput = "
 {\"Report\":\"IR\", \"Description\":\"Compressed Report Due to large number of records\", \"URL To Download Report from: \":\"" + report + "\"} 
"; +// return new ResponseEntity<>(compressedOutput, HttpStatus.OK); +// } } @RequestMapping(value = "/sushilite/r5/reports/dsr", method = RequestMethod.GET) - public String getReportDSR( + public ResponseEntity getReportDSR( @RequestParam(value = "RepositoryIdentifier", defaultValue = "") String repositoryIdentifier, - @RequestParam(value = "ItemIdentifier", defaultValue = "") String itemIdentifier, + @RequestParam(value = "DatasetIdentifier", defaultValue = "") String itemIdentifier, @RequestParam(value = "RequestorID", defaultValue = "anonymous") String requestorId, @RequestParam(value = "BeginDate", defaultValue = "") String beginDate, @RequestParam(value = "EndDate", defaultValue = "") String endDate, @RequestParam(value = "MetricType", defaultValue = "") List metricType, - @RequestParam(value = "DataType", defaultValue = "") String dataType, - @RequestParam(value = "Granularity", defaultValue = "Monthly") String granularity) { - log.info("COUNTER ΙR Report request for repository " + repositoryIdentifier + " and for item " + itemIdentifier); - return sushiLiteService.displayReportDSR(requestorId, repositoryIdentifier, itemIdentifier, beginDate, endDate, metricType, dataType, granularity); + @RequestParam(value = "Granularity", defaultValue = "Monthly") String granularity) throws Exception { + log.info("COUNTER DSR Report request for repository " + repositoryIdentifier + " and for item " + itemIdentifier); + + class BuildReportWithTimeout implements Callable { + + @Override + public ResponseEntity call() throws Exception { + String report = sushiLiteService.displayReportDSR(requestorId, repositoryIdentifier, itemIdentifier, beginDate, endDate, metricType, granularity); + if (report.indexOf(".zip") < 0) { + return new ResponseEntity<>(report, HttpStatus.OK); + } else { + + String compressedOutput = "
 {\"Report\":\"DSR\", \"Description\":\"Compressed Report Due to large number of records\", \"URL To Download Report from: \":\"" + report + "\"} 
"; + return new ResponseEntity<>(compressedOutput, HttpStatus.OK); + } + } + } + final ExecutorService executor = Executors.newSingleThreadExecutor(); + final Future future = executor.submit(new BuildReportWithTimeout()); + + try { + return future.get(2, TimeUnit.MINUTES); + } catch (InterruptedException ie) { + String compressedOutput = "
 {\"Report\":\"DSR\", \"Description\":\"Large Report Timeout. Please contact usagecounts@openaire.eu for requests" + "\"} 
"; + return new ResponseEntity<>(compressedOutput, HttpStatus.OK); + } catch (ExecutionException ee) { + /* Handle the error. Or ignore it. */ + } catch (TimeoutException te) { + String compressedOutput = "
 {\"Report\":\"DSR\", \"Description\":\"Large Report Timeout. Please contact usagecounts@openaire.eu for requests" + "\"} 
"; + return new ResponseEntity<>(compressedOutput, HttpStatus.OK); + + } + if (!executor.isTerminated()) { + executor.shutdownNow(); + } + + return null; +// String report = sushiLiteService.displayReportDSR(requestorId, repositoryIdentifier, itemIdentifier, beginDate, endDate, metricType, granularity); +// if (report.indexOf(".zip") < 0) { +// return new ResponseEntity<>(report, HttpStatus.OK); +// } else { +// +// String compressedOutput = "
 {\"Report\":\"DSR\", \"Description\":\"Compressed Report Due to large number of records\", \"URL To Download Report from: \":\"" + report + "\"} 
"; +// return new ResponseEntity<>(compressedOutput, HttpStatus.OK); +// } } @RequestMapping(value = "/download/{file_name}", method = RequestMethod.GET) diff --git a/src/main/java/eu/dnetlib/usagestats/repositories/UsageStatsRepository.java b/src/main/java/eu/dnetlib/usagestats/repositories/UsageStatsRepository.java index 0c02f88..9008f98 100755 --- a/src/main/java/eu/dnetlib/usagestats/repositories/UsageStatsRepository.java +++ b/src/main/java/eu/dnetlib/usagestats/repositories/UsageStatsRepository.java @@ -685,15 +685,15 @@ public class UsageStatsRepository { } public void executeItemDSR(List reportItems, String repositoryIdentifier, String itemIdentifier, Date beginDate, - Date endDate, List metricType, String itemDataType, String granularity) { + Date endDate, List metricType, String granularity) { String[] split = itemIdentifier.split(":"); switch (split[0].toLowerCase()) { case "oid": - executeDSROid(reportItems, repositoryIdentifier, itemIdentifier.replaceFirst(split[0] + ":", ""), beginDate, endDate, metricType, itemDataType, granularity); + executeDSROid(reportItems, repositoryIdentifier, itemIdentifier.replaceFirst(split[0] + ":", ""), beginDate, endDate, metricType, granularity); break; case "doi": - executeDSRDoi(reportItems, repositoryIdentifier, itemIdentifier.replaceFirst(split[0] + ":", ""), beginDate, endDate, metricType, itemDataType, granularity); + executeDSRDoi(reportItems, repositoryIdentifier, itemIdentifier.replaceFirst(split[0] + ":", ""), beginDate, endDate, metricType, granularity); break; // case "openaire": // executeOpenaire(reportItems, itemIdentifier.replaceFirst(split[0] + ":", ""), repositoryIdentifier, itemDataType, beginDate, endDate, granularity); @@ -765,7 +765,7 @@ public class UsageStatsRepository { } private void executeDSROid(List reportItems, String repositoryIdentifier, - String oid, Date beginDate, Date endDate, List metricType, String itemDataType, String granularity) { + String oid, Date beginDate, Date endDate, List metricType, String granularity) { Connection connection = null; PreparedStatement st = null; ResultSet rs = null; @@ -782,9 +782,9 @@ public class UsageStatsRepository { while (rs.next()) { if (repositoryIdentifier != null) { - executeBatchItemsDSR(reportItems, repositoryIdentifier, rs.getString(1), beginDate, endDate, metricType, itemDataType, granularity); + executeBatchItemsDSR(reportItems, repositoryIdentifier, rs.getString(1), beginDate, endDate, metricType, granularity); } else { - executeItemsAllRepoDSR(reportItems, rs.getString(1), beginDate, endDate, metricType, itemDataType, granularity); + executeItemsAllRepoDSR(reportItems, rs.getString(1), beginDate, endDate, metricType, granularity); } } } catch (Exception e) { @@ -797,7 +797,7 @@ public class UsageStatsRepository { } private void executeDSRDoi(List reportItems, String repositoryIdentifier, - String doi, Date beginDate, Date endDate, List metricType, String itemDataType, String granularity) { + String doi, Date beginDate, Date endDate, List metricType, String granularity) { Connection connection = null; PreparedStatement st = null; ResultSet rs = null; @@ -815,9 +815,9 @@ public class UsageStatsRepository { while (rs.next()) { if (repositoryIdentifier != null) { - executeBatchItemsDSR(reportItems, repositoryIdentifier, rs.getString(1), beginDate, endDate, metricType, itemDataType, granularity); + executeBatchItemsDSR(reportItems, repositoryIdentifier, rs.getString(1), beginDate, endDate, metricType,granularity); } else { - executeItemsAllRepoDSR(reportItems, rs.getString(1), beginDate, endDate, metricType, itemDataType, granularity); + executeItemsAllRepoDSR(reportItems, rs.getString(1), beginDate, endDate, metricType, granularity); } } } catch (Exception e) { @@ -1611,7 +1611,7 @@ public class UsageStatsRepository { ResultSet rs = null; String platform = getInstitutionName(repositoryIdentifier); log.info("Platform " + platform); - log.info("Data Type "+dataType); + log.info("Data Type " + dataType); try { connection = usageStatsDB.getConnection(); @@ -2695,7 +2695,7 @@ public class UsageStatsRepository { public void executeBatchItemsDSR(List reportItems, String repositoryIdentifier, String itemIdentifier, Date beginDate, - Date endDate, List metricType, String dataType, String granularity) throws Exception { + Date endDate, List metricType, String granularity) throws Exception { SimpleDateFormat report_dateFormat = new SimpleDateFormat("yyyy-MM-dd"); SimpleDateFormat postgresFormat = new SimpleDateFormat("yyyy/MM"); String beginDateStr = postgresFormat.format(beginDate); @@ -2711,243 +2711,171 @@ public class UsageStatsRepository { connection = usageStatsDB.getConnection(); if (granularity.equalsIgnoreCase("totals")) { - if (dataType.equals("")) { - st = connection.prepareStatement("WITH tpd as (select id, concat(type,'#-#',pid) type_id from " + statsDB + ".result_pids) " - + "SELECT rc.type, sum(us.downloads) as downloads, sum(us.views) as views, " - + "us.result_id as resultid, rs.title as item, rs.year as yop, group_concat(distinct tpd.type_id,'#!#') as oid, " - + "count(case when us.downloads >0 then 1 else null end) as uniquedownloads, count(case when us.views >0 then 1 else null end) as uniqueviews " - + "FROM " + usagestatsImpalaDB + ".usage_stats us, " + statsDB + ".result_classifications rc, " + statsDB + ".result rs, " - + statsDB + ".result_oids ro, tpd " - + "WHERE us.`date`>=? AND us.`date`<=? AND us.repository_id=? AND rc.id=us.result_id AND us.result_id=rs.id AND ro.id=us.result_id AND tpd.id=us.result_id " - + "GROUP BY rc.type, rs.title, us.result_id, rs.title, rs.year order by rc.type ASC;"); +// if (dataType.equals("")) { +// st = connection.prepareStatement("WITH tpd as (select id, concat(type,'#-#',pid) type_id from " + statsDB + ".result_pids) " +// + "SELECT rc.type, sum(us.downloads) as downloads, sum(us.views) as views, " +// + "us.result_id as resultid, rs.title as item, rs.year as yop, group_concat(distinct tpd.type_id,'#!#') as oid, " +// + "count(case when us.downloads >0 then 1 else null end) as uniquedownloads, count(case when us.views >0 then 1 else null end) as uniqueviews, " +// + "dp.access_method as access_method " +// + "FROM " + usagestatsImpalaDB + ".usage_stats us, " + statsDB + ".result_classifications rc, " + statsDB + ".result rs, " +// + statsDB + ".result_oids ro, tpd, datasetsusagestats_20210312a.datasetsperformance dp " +// + "WHERE us.`date`>=? AND us.`date`<=? AND us.repository_id=? AND rc.id=us.result_id AND us.result_id=rs.id AND ro.id=us.result_id AND tpd.id=us.result_id " +// + "AND dp.ds_type=ro.oid " +// + "GROUP BY rc.type, rs.title, us.result_id, rs.title, rs.year, dp.access_method order by rc.type ASC;"); +// +// st.setString(1, beginDateStr); +// st.setString(2, endDateStr); +// st.setString(3, repositoryIdentifier); +// +// rs = st.executeQuery(); +// COUNTER_Dataset_Usage reportItem = null; +// +// boolean[] metricTypeValue = {false, false, false, false}; +// if (metricType.contains("Total_Dataset_Requests")) { +// metricTypeValue[0] = true; +// } +// if (metricType.contains("Total_Dataset_Investigations")) { +// metricTypeValue[1] = true; +// } +// if (metricType.contains("Unique_Dataset_Requests")) { +// metricTypeValue[2] = true; +// } +// if (metricType.contains("Unique_Dataset_Investigations")) { +// metricTypeValue[3] = true; +// } +// +// while (rs.next()) { +// reportItem = new COUNTER_Dataset_Usage(rs.getString(5), "OpenAIRE", platform, rs.getString(1), Integer.toString(rs.getInt(6)), rs.getString(10)); +// String[] identifiersAll = rs.getString(7).split("#!#"); +// for (int i = 0; i < identifiersAll.length; i++) { +// String[] typeIdentifierArray = identifiersAll[i].split("#-#"); +// reportItem.addIdentifier(new COUNTER_Dataset_Identifiers(typeIdentifierArray[0], typeIdentifierArray[1])); +// } +// +// reportItem.addPerformance(new COUNTER_Dataset_Performance(report_dateFormat.format(beginDate), report_dateFormat.format(endDate), +// metricTypeValue[0] == true ? rs.getString(2) : null, metricTypeValue[1] == true ? rs.getString(3) : null, metricTypeValue[2] == true ? rs.getString(8) : null, metricTypeValue[3] == true ? rs.getString(9) : null)); +// reportItems.add(reportItem); +// } +// +// } //else { + st = connection.prepareStatement("WITH tpd as (select id, concat(type,'#-#',pid) type_id from " + statsDB + ".result_pids) " + + "SELECT rc.type, sum(us.downloads) as downloads, sum(us.views) as views, " + + "us.result_id as resultid, rs.title as item, rs.year as yop, group_concat(distinct tpd.type_id,'#!#') as oid, " + + "count(case when us.downloads >0 then 1 else null end) as uniquedownloads, count(case when us.views >0 then 1 else null end) as uniqueviews, " + + "dp.access_method as access_method " + + "FROM " + usagestatsImpalaDB + ".usage_stats us, " + statsDB + ".result_classifications rc, " + statsDB + ".result rs, " + + statsDB + ".result_oids ro, tpd, datasetsusagestats_20210312a.datasetsperformance dp " + + "WHERE us.`date`>=? AND us.`date`<=? AND us.repository_id=? AND rc.id=us.result_id " + + "AND us.result_id=rs.id AND ro.id=us.result_id AND tpd.id=us.result_id AND rc.type='Dataset' " + + "GROUP BY ro.id, rc.type, rs.title, us.result_id, rs.year, dp.access_method ORDER by rc.type ASC;"); - st.setString(1, beginDateStr); - st.setString(2, endDateStr); - st.setString(3, repositoryIdentifier); + st.setString(1, beginDateStr); + st.setString(2, endDateStr); + st.setString(3, repositoryIdentifier); - rs = st.executeQuery(); - COUNTER_Dataset_Usage reportItem = null; + rs = st.executeQuery(); + COUNTER_Dataset_Usage reportItem = null; + boolean[] metricTypeValue = {false, false, false, false}; - boolean[] metricTypeValue = {false, false, false, false}; - if (metricType.contains("Total_Dataset_Requests")) { - metricTypeValue[0] = true; - } - if (metricType.contains("Total_Dataset_Investigations")) { - metricTypeValue[1] = true; - } - if (metricType.contains("Unique_Dataset_Requests")) { - metricTypeValue[2] = true; - } - if (metricType.contains("Unique_Dataset_Investigations")) { - metricTypeValue[3] = true; - } - - while (rs.next()) { - reportItem = new COUNTER_Dataset_Usage(rs.getString(5), "OpenAIRE", platform, rs.getString(1), Integer.toString(rs.getInt(6)), "Regular"); - String[] identifiersAll = rs.getString(7).split("#!#"); - for (int i = 0; i < identifiersAll.length; i++) { - String[] typeIdentifierArray = identifiersAll[i].split("#-#"); - reportItem.addIdentifier(new COUNTER_Dataset_Identifiers(typeIdentifierArray[0], typeIdentifierArray[1])); - } - - reportItem.addPerformance(new COUNTER_Dataset_Performance(report_dateFormat.format(beginDate), report_dateFormat.format(endDate), - metricTypeValue[0] == true ? rs.getString(2) : null, metricTypeValue[1] == true ? rs.getString(3) : null, metricTypeValue[2] == true ? rs.getString(8) : null, metricTypeValue[3] == true ? rs.getString(9) : null)); - reportItems.add(reportItem); - } - - } else { - st = connection.prepareStatement("WITH tpd as (select id, concat(type,'#-#',pid) type_id from " + statsDB + ".result_pids) " - + "SELECT rc.type, sum(us.downloads) as downloads, sum(us.views) as views, " - + "us.result_id as resultid, rs.title as item, rs.year as yop, group_concat(distinct tpd.type_id,'#!#') as oid, " - + "count(case when us.downloads >0 then 1 else null end) as uniquedownloads, count(case when us.views >0 then 1 else null end) as uniqueviews " - + "FROM " + usagestatsImpalaDB + ".usage_stats us, " + statsDB + ".result_classifications rc, " + statsDB + ".result rs, " - + statsDB + ".result_oids ro, tpd " - + "WHERE us.`date`>=? AND us.`date`<=? AND us.repository_id=? AND rc.type=? AND rc.id=us.result_id " - + "AND us.result_id=rs.id AND ro.id=us.result_id AND tpd.id=us.result_id " - + "GROUP BY ro.id, rc.type, rs.title, us.result_id, rs.year order by rc.type ASC;"); - - st.setString(1, beginDateStr); - st.setString(2, endDateStr); - st.setString(3, repositoryIdentifier); - st.setString(4, dataType); - - rs = st.executeQuery(); - COUNTER_Dataset_Usage reportItem = null; - boolean[] metricTypeValue = {false, false, false, false}; - - if (metricType.contains("Total_Dataset_Requests")) { - metricTypeValue[0] = true; - } - if (metricType.contains("Total_Dataset_Investigations")) { - metricTypeValue[1] = true; - } - if (metricType.contains("Unique_Dataset_Requests")) { - metricTypeValue[2] = true; - } - if (metricType.contains("Unique_Dataset_Investigations")) { - metricTypeValue[3] = true; - } - while (rs.next()) { - reportItem = new COUNTER_Dataset_Usage(rs.getString(5), "OpenAIRE", platform, rs.getString(1), Integer.toString(rs.getInt(6)), "Regular"); - String[] identifiersAll = rs.getString(7).split("#!#"); - for (int i = 0; i < identifiersAll.length; i++) { - String[] typeIdentifierArray = identifiersAll[i].split("#-#"); - reportItem.addIdentifier(new COUNTER_Dataset_Identifiers(typeIdentifierArray[0], typeIdentifierArray[1])); - } - - reportItem.addPerformance(new COUNTER_Dataset_Performance(report_dateFormat.format(beginDate), report_dateFormat.format(endDate), - metricTypeValue[0] == true ? rs.getString(2) : null, metricTypeValue[1] == true ? rs.getString(3) : null, metricTypeValue[2] == true ? rs.getString(8) : null, metricTypeValue[3] == true ? rs.getString(9) : null)); - reportItems.add(reportItem); - } + if (metricType.contains("Total_Dataset_Requests")) { + metricTypeValue[0] = true; } + if (metricType.contains("Total_Dataset_Investigations")) { + metricTypeValue[1] = true; + } + if (metricType.contains("Unique_Dataset_Requests")) { + metricTypeValue[2] = true; + } + if (metricType.contains("Unique_Dataset_Investigations")) { + metricTypeValue[3] = true; + } + while (rs.next()) { + reportItem = new COUNTER_Dataset_Usage(rs.getString(5), "OpenAIRE", platform, rs.getString(1), Integer.toString(rs.getInt(6)), rs.getString(10)); + String[] identifiersAll = rs.getString(7).split("#!#"); + for (int i = 0; i < identifiersAll.length; i++) { + String[] typeIdentifierArray = identifiersAll[i].split("#-#"); + reportItem.addIdentifier(new COUNTER_Dataset_Identifiers(typeIdentifierArray[0], typeIdentifierArray[1])); + } + + reportItem.addPerformance(new COUNTER_Dataset_Performance(report_dateFormat.format(beginDate), report_dateFormat.format(endDate), + metricTypeValue[0] == true ? rs.getString(2) : null, metricTypeValue[1] == true ? rs.getString(3) : null, metricTypeValue[2] == true ? rs.getString(8) : null, metricTypeValue[3] == true ? rs.getString(9) : null)); + reportItems.add(reportItem); + } + //} } else if (granularity.equalsIgnoreCase("monthly")) { + st = connection.prepareStatement("WITH tpd as (select id, concat(type,'#-#',pid) type_id from " + statsDB + ".result_pids) " + + "SELECT rc.type, us.`date`, sum(us.downloads) as downloads, sum(us.views) as views, " + + "us.result_id as resultid, rs.title as item, rs.year as yop, group_concat(distinct tpd.type_id,'#!#') as oid, " + + "count(case when us.downloads >0 then 1 else null end) as uniquedownloads, count(case when us.views >0 then 1 else null end) as uniqueviews, " + + "dp.access_method as access_method " + + "FROM " + usagestatsImpalaDB + ".usage_stats us, " + statsDB + ".result_classifications rc, " + statsDB + ".result rs, " + + statsDB + ".result_oids ro, tpd, datasetsusagestats_20210312a.datasetsperformance dp " + + "WHERE us.`date`>=? AND us.`date`<=? AND us.repository_id=? AND rc.id=us.result_id " + + "AND us.result_id=rs.id AND ro.id=us.result_id AND tpd.id=us.result_id AND rc.type='Dataset' " + + "GROUP BY ro.id, rc.type, rs.title, us.result_id, us.`date`, rs.year, dp.access_method ORDER by rc.type, us.`date` ASC;"); + st.setString(1, beginDateStr); + st.setString(2, endDateStr); + st.setString(3, repositoryIdentifier); + rs = st.executeQuery(); - if (dataType.equals("")) { - st = connection.prepareStatement("WITH tpd as (select id, concat(type,'#-#',pid) type_id from " + statsDB + ".result_pids) " - + "SELECT rc.type, us.`date`, sum(us.downloads) as downloads, sum(us.views) as views, " - + "us.result_id as resultid, rs.title as item, rs.year as yop, group_concat(distinct tpd.type_id,'#!#') as oid, " - + "count(case when us.downloads >0 then 1 else null end) as uniquedownloads, count(case when us.views >0 then 1 else null end) as uniqueviews " - + "FROM " + usagestatsImpalaDB + ".usage_stats us, " + statsDB + ".result_classifications rc, " + statsDB + ".result rs, " - + statsDB + ".result_oids ro, tpd " - + "WHERE us.`date`>=? AND us.`date`<=? AND us.repository_id=? AND rc.id=us.result_id AND us.result_id=rs.id AND ro.id=us.result_id " - + "AND tpd.id=us.result_id " - + "GROUP BY ro.id, rc.type, rs.title, us.result_id, us.`date`, rs.year order by rc.type, us.`date` ASC;"); - st.setString(1, beginDateStr); - st.setString(2, endDateStr); - st.setString(3, repositoryIdentifier); + String result = ""; + String lastDate = ""; + COUNTER_Dataset_Usage reportItem = null; + boolean[] metricTypeValue = {false, false, false, false}; - rs = st.executeQuery(); - String result = ""; - String lastDate = ""; - COUNTER_Dataset_Usage reportItem = null; - boolean[] metricTypeValue = {false, false, false, false}; + if (metricType.contains("Total_Dataset_Requests")) { + metricTypeValue[0] = true; + } + if (metricType.contains("Total_Dataset_Investigations")) { + metricTypeValue[1] = true; + } + if (metricType.contains("Unique_Dataset_Requests")) { + metricTypeValue[2] = true; + } + if (metricType.contains("Unique_Dataset_Investigations")) { + metricTypeValue[3] = true; + } - if (metricType.contains("Total_Dataset_Requests")) { - metricTypeValue[0] = true; - } - if (metricType.contains("Total_Dataset_Investigations")) { - metricTypeValue[1] = true; - } - if (metricType.contains("Unique_Dataset_Requests")) { - metricTypeValue[2] = true; - } - if (metricType.contains("Unique_Dataset_Investigations")) { - metricTypeValue[3] = true; - } + int ft_total = 0; + int abstr = 0; - int ft_total = 0; - int abstr = 0; + Calendar endCal = Calendar.getInstance(); + endCal.setTime(postgresFormat.parse(endDateStr)); + endCal.add(Calendar.MONTH, 1); + Date endDateForZeros = endCal.getTime(); - Calendar endCal = Calendar.getInstance(); - endCal.setTime(postgresFormat.parse(endDateStr)); - endCal.add(Calendar.MONTH, 1); - Date endDateForZeros = endCal.getTime(); + lastDate = beginDateStr; + String datatype = ""; + List identifiers = null; - lastDate = beginDateStr; - String datatype = ""; - while (rs.next()) { - Calendar endC = Calendar.getInstance(); - endC.setTime(postgresFormat.parse(rs.getString(2))); - endC.set(Calendar.DATE, endC.getActualMaximum(Calendar.DATE)); + while (rs.next()) { + Calendar endC = Calendar.getInstance(); + endC.setTime(postgresFormat.parse(rs.getString(2))); + endC.set(Calendar.DATE, endC.getActualMaximum(Calendar.DATE)); // //iterate on data types - if (!datatype.equals(rs.getString(1))) { - datatype = rs.getString(1); - } - - reportItem = new COUNTER_Dataset_Usage(rs.getString(6), "OpenAIRE", platform, rs.getString(1), Integer.toString(rs.getInt(7)), "Regular"); - String[] identifiersAll = rs.getString(8).split("#!#"); - for (int i = 0; i < identifiersAll.length; i++) { - String[] typeIdentifierArray = identifiersAll[i].split("#-#"); - reportItem.addIdentifier(new COUNTER_Dataset_Identifiers(typeIdentifierArray[0], typeIdentifierArray[1])); - } - reportItem.addPerformance(new COUNTER_Dataset_Performance(report_dateFormat.format(postgresFormat.parse(rs.getString(2))), report_dateFormat.format(endC.getTime()), - metricTypeValue[0] == true ? rs.getString(3) : null, metricTypeValue[1] == true ? rs.getString(4) : null, metricTypeValue[2] == true ? rs.getString(9) : null, metricTypeValue[3] == true ? rs.getString(10) : null)); - reportItems.add(reportItem); - - endC.setTime(postgresFormat.parse(rs.getString(2))); - endC.add(Calendar.MONTH, 1); - lastDate = postgresFormat.format(endC.getTime()); - //if (reportItem != null) { - //fillWithZeros(postgresFormat.parse(lastDate), endDateForZeros, reportItem); - } - } else { - st = connection.prepareStatement("WITH tpd as (select id, concat(type,'#-#',pid) type_id from " + statsDB + ".result_pids) " - + "SELECT rc.type, us.`date`, sum(us.downloads) as downloads, sum(us.views) as views, " - + "us.result_id as resultid, rs.title as item, rs.year as yop, group_concat(distinct tpd.type_id,'#!#') as oid, " - + "count(case when us.downloads >0 then 1 else null end) as uniquedownloads, count(case when us.views >0 then 1 else null end) as uniqueviews " - + "FROM " + usagestatsImpalaDB + ".usage_stats us, " + statsDB + ".result_classifications rc, " + statsDB + ".result rs, " - + statsDB + ".result_oids ro, tpd " - + "WHERE us.`date`>=? AND us.`date`<=? AND us.repository_id=? AND rc.type=? AND rc.id=us.result_id " - + "AND us.result_id=rs.id AND ro.id=us.result_id AND tpd.id=us.result_id " - + "GROUP BY ro.id, rc.type, rs.title, us.result_id, us.`date`, rs.year order by rc.type, us.`date` ASC;"); - st.setString(1, beginDateStr); - st.setString(2, endDateStr); - st.setString(3, repositoryIdentifier); - st.setString(4, dataType); - rs = st.executeQuery(); - - String result = ""; - String lastDate = ""; - COUNTER_Dataset_Usage reportItem = null; - boolean[] metricTypeValue = {false, false, false, false}; - - if (metricType.contains("Total_Dataset_Requests")) { - metricTypeValue[0] = true; - } - if (metricType.contains("Total_Dataset_Investigations")) { - metricTypeValue[1] = true; - } - if (metricType.contains("Unique_Dataset_Requests")) { - metricTypeValue[2] = true; - } - if (metricType.contains("Unique_Dataset_Investigations")) { - metricTypeValue[3] = true; - } - - int ft_total = 0; - int abstr = 0; - - Calendar endCal = Calendar.getInstance(); - endCal.setTime(postgresFormat.parse(endDateStr)); - endCal.add(Calendar.MONTH, 1); - Date endDateForZeros = endCal.getTime(); - - lastDate = beginDateStr; - String datatype = ""; - List identifiers = null; - - while (rs.next()) { - Calendar endC = Calendar.getInstance(); - endC.setTime(postgresFormat.parse(rs.getString(2))); - endC.set(Calendar.DATE, endC.getActualMaximum(Calendar.DATE)); -// //iterate on data types - if (!datatype.equals(rs.getString(1))) { + if (!datatype.equals(rs.getString(1))) { // if (reportItem != null) { // reportItems.add(reportItem); // } // reportItem = new COUNTER_Platform_Usage("", "OpenAIRE", rs.getString(1), "Regular", ""); - datatype = rs.getString(1); - } - - reportItem = new COUNTER_Dataset_Usage(rs.getString(6), "OpenAIRE", platform, rs.getString(1), Integer.toString(rs.getInt(7)), "Regular"); - String[] identifiersAll = rs.getString(8).split("#!#"); - for (int i = 0; i < identifiersAll.length; i++) { - String[] typeIdentifierArray = identifiersAll[i].split("#-#"); - reportItem.addIdentifier(new COUNTER_Dataset_Identifiers(typeIdentifierArray[0], typeIdentifierArray[1])); - } - reportItem.addPerformance(new COUNTER_Dataset_Performance(report_dateFormat.format(postgresFormat.parse(rs.getString(2))), report_dateFormat.format(endC.getTime()), - metricTypeValue[0] == true ? rs.getString(3) : null, metricTypeValue[1] == true ? rs.getString(4) : null, metricTypeValue[2] == true ? rs.getString(9) : null, metricTypeValue[3] == true ? rs.getString(10) : null)); - reportItems.add(reportItem); - - // } -// -// //} - endC.setTime(postgresFormat.parse(rs.getString(2))); - endC.add(Calendar.MONTH, 1); - lastDate = postgresFormat.format(endC.getTime()); - //if (reportItem != null) { - //fillWithZeros(postgresFormat.parse(lastDate), endDateForZeros, reportItem); + datatype = rs.getString(1); } + + reportItem = new COUNTER_Dataset_Usage(rs.getString(6), "OpenAIRE", platform, rs.getString(1), Integer.toString(rs.getInt(7)), rs.getString(11)); + String[] identifiersAll = rs.getString(8).split("#!#"); + for (int i = 0; i < identifiersAll.length; i++) { + String[] typeIdentifierArray = identifiersAll[i].split("#-#"); + reportItem.addIdentifier(new COUNTER_Dataset_Identifiers(typeIdentifierArray[0], typeIdentifierArray[1])); + } + reportItem.addPerformance(new COUNTER_Dataset_Performance(report_dateFormat.format(postgresFormat.parse(rs.getString(2))), report_dateFormat.format(endC.getTime()), + metricTypeValue[0] == true ? rs.getString(3) : null, metricTypeValue[1] == true ? rs.getString(4) : null, metricTypeValue[2] == true ? rs.getString(9) : null, metricTypeValue[3] == true ? rs.getString(10) : null)); + reportItems.add(reportItem); + + endC.setTime(postgresFormat.parse(rs.getString(2))); + endC.add(Calendar.MONTH, 1); + lastDate = postgresFormat.format(endC.getTime()); + //if (reportItem != null) { + //fillWithZeros(postgresFormat.parse(lastDate), endDateForZeros, reportItem); } } /* @@ -2968,237 +2896,121 @@ public class UsageStatsRepository { connection = usageStatsDB.getConnection(); if (granularity.equalsIgnoreCase("totals")) { - if (dataType.equals("")) { - st = connection.prepareStatement("WITH tpd as (select id, concat(type,'#-#',pid) type_id from " + statsDB + ".result_pids) " - + "SELECT rc.type, sum(us.downloads) as downloads, sum(us.views) as views, " - + "us.result_id as resultid, rs.title as item, rs.year as yop, group_concat(distinct tpd.type_id,'#!#') as oid, " - + "count(case when us.downloads >0 then 1 else null end) as uniquedownloads, count(case when us.views >0 then 1 else null end) as uniqueviews " - + "FROM " + usagestatsImpalaDB + ".usage_stats us, " + statsDB + ".result_classifications rc, " + statsDB + ".result rs, " - + statsDB + ".result_oids ro, tpd " - + "WHERE us.`date`>=? AND us.`date`<=? AND us.repository_id=? AND us.result_id=? AND rc.id=us.result_id AND us.result_id=rs.id AND ro.id=us.result_id AND tpd.id=us.result_id " - + "GROUP BY rc.type, rs.title, us.result_id, rs.title, rs.year order by rc.type ASC;"); + st = connection.prepareStatement("WITH tpd as (select id, concat(type,'#-#',pid) type_id from " + statsDB + ".result_pids) " + + "SELECT rc.type, sum(us.downloads) as downloads, sum(us.views) as views, " + + "us.result_id as resultid, rs.title as item, rs.year as yop, group_concat(distinct tpd.type_id,'#!#') as oid, " + + "count(case when us.downloads >0 then 1 else null end) as uniquedownloads, count(case when us.views >0 then 1 else null end) as uniqueviews," + + "dp.access_method as access_method " + + "FROM " + usagestatsImpalaDB + ".usage_stats us, " + statsDB + ".result_classifications rc, " + statsDB + ".result rs, " + + statsDB + ".result_oids ro, tpd, datasetsusagestats_20210312a.datasetsperformance dp " + + "WHERE us.`date`>=? AND us.`date`<=? AND us.repository_id=? AND us.result_id=? AND rc.id=us.result_id " + + "AND us.result_id=rs.id AND ro.id=us.result_id AND tpd.id=us.result_id AND rc.type='Dataset' " + + "GROUP BY ro.id, rc.type, rs.title, us.result_id, rs.year, dp.access_method ORDER by rc.type ASC;"); - st.setString(1, beginDateStr); - st.setString(2, endDateStr); - st.setString(3, repositoryIdentifier); - st.setString(4, itemIdentifier); + st.setString(1, beginDateStr); + st.setString(2, endDateStr); + st.setString(3, repositoryIdentifier); + st.setString(4, itemIdentifier); - rs = st.executeQuery(); - COUNTER_Dataset_Usage reportItem = null; - boolean[] metricTypeValue = {false, false, false, false}; - if (metricType.contains("Total_Dataset_Requests")) { - metricTypeValue[0] = true; - } - if (metricType.contains("Total_Dataset_Investigations")) { - metricTypeValue[1] = true; - } - if (metricType.contains("Unique_Dataset_Requests")) { - metricTypeValue[2] = true; - } - if (metricType.contains("Unique_Dataset_Investigations")) { - metricTypeValue[3] = true; - } + rs = st.executeQuery(); + COUNTER_Dataset_Usage reportItem = null; + boolean[] metricTypeValue = {false, false, false, false}; + if (metricType.contains("Total_Dataset_Requests")) { + metricTypeValue[0] = true; + } + if (metricType.contains("Total_Dataset_Investigations")) { + metricTypeValue[1] = true; + } + if (metricType.contains("Unique_Dataset_Requests")) { + metricTypeValue[2] = true; + } + if (metricType.contains("Unique_Dataset_Investigations")) { + metricTypeValue[3] = true; + } - while (rs.next()) { - reportItem = new COUNTER_Dataset_Usage(rs.getString(5), "OpenAIRE", platform, rs.getString(1), Integer.toString(rs.getInt(6)), "Regular"); - String[] identifiersAll = rs.getString(7).split("#!#"); - for (int i = 0; i < identifiersAll.length; i++) { - String[] typeIdentifierArray = identifiersAll[i].split("#-#"); - reportItem.addIdentifier(new COUNTER_Dataset_Identifiers(typeIdentifierArray[0], typeIdentifierArray[1])); - } - - reportItem.addPerformance(new COUNTER_Dataset_Performance(report_dateFormat.format(beginDate), report_dateFormat.format(endDate), - metricTypeValue[0] == true ? rs.getString(2) : null, metricTypeValue[1] == true ? rs.getString(3) : null, metricTypeValue[2] == true ? rs.getString(8) : null, metricTypeValue[3] == true ? rs.getString(9) : null)); - reportItems.add(reportItem); - } - } else { - st = connection.prepareStatement("WITH tpd as (select id, concat(type,'#-#',pid) type_id from " + statsDB + ".result_pids) " - + "SELECT rc.type, sum(us.downloads) as downloads, sum(us.views) as views, " - + "us.result_id as resultid, rs.title as item, rs.year as yop, group_concat(distinct tpd.type_id,'#!#') as oid, " - + "count(case when us.downloads >0 then 1 else null end) as uniquedownloads, count(case when us.views >0 then 1 else null end) as uniqueviews " - + "FROM " + usagestatsImpalaDB + ".usage_stats us, " + statsDB + ".result_classifications rc, " + statsDB + ".result rs, " - + statsDB + ".result_oids ro, tpd " - + "WHERE us.`date`>=? AND us.`date`<=? AND us.repository_id=? AND us.result_id=? AND rc.type=? AND rc.id=us.result_id " - + "AND us.result_id=rs.id AND ro.id=us.result_id AND tpd.id=us.result_id " - + "GROUP BY ro.id, rc.type, rs.title, us.result_id, rs.year order by rc.type ASC;"); - - st.setString(1, beginDateStr); - st.setString(2, endDateStr); - st.setString(3, repositoryIdentifier); - st.setString(4, itemIdentifier); - st.setString(5, dataType); - - rs = st.executeQuery(); - COUNTER_Dataset_Usage reportItem = null; - boolean[] metricTypeValue = {false, false, false, false}; - if (metricType.contains("Total_Dataset_Requests")) { - metricTypeValue[0] = true; - } - if (metricType.contains("Total_Dataset_Investigations")) { - metricTypeValue[1] = true; - } - if (metricType.contains("Unique_Dataset_Requests")) { - metricTypeValue[2] = true; - } - if (metricType.contains("Unique_Dataset_Investigations")) { - metricTypeValue[3] = true; - } - - while (rs.next()) { - reportItem = new COUNTER_Dataset_Usage(rs.getString(5), "OpenAIRE", platform, rs.getString(1), Integer.toString(rs.getInt(6)), "Regular"); - String[] identifiersAll = rs.getString(7).split("#!#"); - for (int i = 0; i < identifiersAll.length; i++) { - String[] typeIdentifierArray = identifiersAll[i].split("#-#"); - reportItem.addIdentifier(new COUNTER_Dataset_Identifiers(typeIdentifierArray[0], typeIdentifierArray[1])); - } - reportItem.addPerformance(new COUNTER_Dataset_Performance(report_dateFormat.format(beginDate), report_dateFormat.format(endDate), - metricTypeValue[0] == true ? rs.getString(2) : null, metricTypeValue[1] == true ? rs.getString(3) : null, metricTypeValue[2] == true ? rs.getString(8) : null, metricTypeValue[3] == true ? rs.getString(9) : null)); - reportItems.add(reportItem); + while (rs.next()) { + reportItem = new COUNTER_Dataset_Usage(rs.getString(5), "OpenAIRE", platform, rs.getString(1), Integer.toString(rs.getInt(6)), rs.getString(10)); + String[] identifiersAll = rs.getString(7).split("#!#"); + for (int i = 0; i < identifiersAll.length; i++) { + String[] typeIdentifierArray = identifiersAll[i].split("#-#"); + reportItem.addIdentifier(new COUNTER_Dataset_Identifiers(typeIdentifierArray[0], typeIdentifierArray[1])); } + reportItem.addPerformance(new COUNTER_Dataset_Performance(report_dateFormat.format(beginDate), report_dateFormat.format(endDate), + metricTypeValue[0] == true ? rs.getString(2) : null, metricTypeValue[1] == true ? rs.getString(3) : null, metricTypeValue[2] == true ? rs.getString(8) : null, metricTypeValue[3] == true ? rs.getString(9) : null)); + reportItems.add(reportItem); } } else if (granularity.equalsIgnoreCase("monthly")) { - if (dataType.equals("")) { + st = connection.prepareStatement("WITH tpd as (select id, concat(type,'#-#',pid) type_id from " + statsDB + ".result_pids) " + + "SELECT rc.type, us.`date`, sum(us.downloads) as downloads, sum(us.views) as views, " + + "us.result_id as resultid, rs.title as item, rs.year as yop, group_concat(distinct tpd.type_id,'#!#') as oid, " + + "count(case when us.downloads >0 then 1 else null end) as uniquedownloads, count(case when us.views >0 then 1 else null end) as uniqueviews, " + + "dp.access_method as access_method " + + "FROM " + usagestatsImpalaDB + ".usage_stats us, " + statsDB + ".result_classifications rc, " + statsDB + ".result rs, " + + statsDB + ".result_oids ro, tpd, datasetsusagestats_20210312a.datasetsperformance dp " + + "WHERE us.`date`>=? AND us.`date`<=? AND us.repository_id=? AND us.result_id=? AND rc.id=us.result_id " + + "AND us.result_id=rs.id AND ro.id=us.result_id AND tpd.id=us.result_id AND rc.type='Dataset' " + + "GROUP BY ro.id, rc.type, rs.title, us.result_id, us.`date`, rs.year, dp.access_method ORDER by rc.type, us.`date` ASC;"); + st.setString(1, beginDateStr); + st.setString(2, endDateStr); + st.setString(3, repositoryIdentifier); + st.setString(4, itemIdentifier); - st = connection.prepareStatement("WITH tpd as (select id, concat(type,'#-#',pid) type_id from " + statsDB + ".result_pids) " - + "SELECT rc.type, us.`date`, sum(us.downloads) as downloads, sum(us.views) as views, " - + "us.result_id as resultid, rs.title as item, rs.year as yop, group_concat(distinct tpd.type_id,'#!#') as oid, " - + "count(case when us.downloads >0 then 1 else null end) as uniquedownloads, count(case when us.views >0 then 1 else null end) as uniqueviews " - + "FROM " + usagestatsImpalaDB + ".usage_stats us, " + statsDB + ".result_classifications rc, " + statsDB + ".result rs, " - + statsDB + ".result_oids ro, tpd " - + "WHERE us.`date`>=? AND us.`date`<=? AND us.repository_id=? AND us.result_id=? AND rc.id=us.result_id AND us.result_id=rs.id AND ro.id=us.result_id " - + "AND tpd.id=us.result_id " - + "GROUP BY ro.id, rc.type, rs.title, us.result_id, us.`date`, rs.year order by rc.type, us.`date` ASC;"); - st.setString(1, beginDateStr); - st.setString(2, endDateStr); - st.setString(3, repositoryIdentifier); - st.setString(4, itemIdentifier); + rs = st.executeQuery(); + String result = ""; + String lastDate = ""; + COUNTER_Dataset_Usage reportItem = null; - rs = st.executeQuery(); - String result = ""; - String lastDate = ""; - COUNTER_Dataset_Usage reportItem = null; - boolean[] metricTypeValue = {false, false, false, false}; + boolean[] metricTypeValue = {false, false, false, false}; - if (metricType.contains("Total_Dataset_Requests")) { - metricTypeValue[0] = true; - } - if (metricType.contains("Total_Dataset_Investigations")) { - metricTypeValue[1] = true; - } - if (metricType.contains("Unique_Dataset_Requests")) { - metricTypeValue[2] = true; - } - if (metricType.contains("Unique_Dataset_Investigations")) { - metricTypeValue[3] = true; - } + if (metricType.contains("Total_Dataset_Requests")) { + metricTypeValue[0] = true; + } + if (metricType.contains("Total_Dataset_Investigations")) { + metricTypeValue[1] = true; + } + if (metricType.contains("Unique_Dataset_Requests")) { + metricTypeValue[2] = true; + } + if (metricType.contains("Unique_Dataset_Investigations")) { + metricTypeValue[3] = true; + } - int ft_total = 0; - int abstr = 0; + int ft_total = 0; + int abstr = 0; - Calendar endCal = Calendar.getInstance(); - endCal.setTime(postgresFormat.parse(endDateStr)); - endCal.add(Calendar.MONTH, 1); - Date endDateForZeros = endCal.getTime(); + Calendar endCal = Calendar.getInstance(); + endCal.setTime(postgresFormat.parse(endDateStr)); + endCal.add(Calendar.MONTH, 1); + Date endDateForZeros = endCal.getTime(); - lastDate = beginDateStr; - String datatype = ""; - while (rs.next()) { - Calendar endC = Calendar.getInstance(); - endC.setTime(postgresFormat.parse(rs.getString(2))); - endC.set(Calendar.DATE, endC.getActualMaximum(Calendar.DATE)); + lastDate = beginDateStr; + String datatype = ""; + + while (rs.next()) { + Calendar endC = Calendar.getInstance(); + endC.setTime(postgresFormat.parse(rs.getString(2))); + endC.set(Calendar.DATE, endC.getActualMaximum(Calendar.DATE)); // //iterate on data types - if (!datatype.equals(rs.getString(1))) { - datatype = rs.getString(1); - } - reportItem = new COUNTER_Dataset_Usage(rs.getString(6), "OpenAIRE", platform, rs.getString(1), Integer.toString(rs.getInt(7)), "Regular"); - String[] identifiersAll = rs.getString(8).split("#!#"); - for (int i = 0; i < identifiersAll.length; i++) { - String[] typeIdentifierArray = identifiersAll[i].split("#-#"); - reportItem.addIdentifier(new COUNTER_Dataset_Identifiers(typeIdentifierArray[0], typeIdentifierArray[1])); - } - reportItem.addPerformance(new COUNTER_Dataset_Performance(report_dateFormat.format(postgresFormat.parse(rs.getString(2))), report_dateFormat.format(endC.getTime()), - metricTypeValue[0] == true ? rs.getString(3) : null, metricTypeValue[1] == true ? rs.getString(4) : null, metricTypeValue[2] == true ? rs.getString(9) : null, metricTypeValue[3] == true ? rs.getString(10) : null)); - reportItems.add(reportItem); - - endC.setTime(postgresFormat.parse(rs.getString(2))); - endC.add(Calendar.MONTH, 1); - lastDate = postgresFormat.format(endC.getTime()); - //if (reportItem != null) { - //fillWithZeros(postgresFormat.parse(lastDate), endDateForZeros, reportItem); - } - } else { - st = connection.prepareStatement("WITH tpd as (select id, concat(type,'#-#',pid) type_id from " + statsDB + ".result_pids) " - + "SELECT rc.type, us.`date`, sum(us.downloads) as downloads, sum(us.views) as views, " - + "us.result_id as resultid, rs.title as item, rs.year as yop, group_concat(distinct tpd.type_id,'#!#') as oid, " - + "count(case when us.downloads >0 then 1 else null end) as uniquedownloads, count(case when us.views >0 then 1 else null end) as uniqueviews " - + "FROM " + usagestatsImpalaDB + ".usage_stats us, " + statsDB + ".result_classifications rc, " + statsDB + ".result rs, " - + statsDB + ".result_oids ro, tpd " - + "WHERE us.`date`>=? AND us.`date`<=? AND us.repository_id=? AND us.result_id=? AND rc.type=? AND rc.id=us.result_id " - + "AND us.result_id=rs.id AND ro.id=us.result_id AND tpd.id=us.result_id " - + "GROUP BY ro.id, rc.type, rs.title, us.result_id, us.`date`, rs.year order by rc.type, us.`date` ASC;"); - st.setString(1, beginDateStr); - st.setString(2, endDateStr); - st.setString(3, repositoryIdentifier); - st.setString(4, itemIdentifier); - st.setString(5, dataType); - - rs = st.executeQuery(); - String result = ""; - String lastDate = ""; - COUNTER_Dataset_Usage reportItem = null; - - boolean[] metricTypeValue = {false, false, false, false}; - - if (metricType.contains("Total_Dataset_Requests")) { - metricTypeValue[0] = true; - } - if (metricType.contains("Total_Dataset_Investigations")) { - metricTypeValue[1] = true; - } - if (metricType.contains("Unique_Dataset_Requests")) { - metricTypeValue[2] = true; - } - if (metricType.contains("Unique_Dataset_Investigations")) { - metricTypeValue[3] = true; + if (!datatype.equals(rs.getString(1))) { + datatype = rs.getString(1); } - int ft_total = 0; - int abstr = 0; - - Calendar endCal = Calendar.getInstance(); - endCal.setTime(postgresFormat.parse(endDateStr)); - endCal.add(Calendar.MONTH, 1); - Date endDateForZeros = endCal.getTime(); - - lastDate = beginDateStr; - String datatype = ""; - - while (rs.next()) { - Calendar endC = Calendar.getInstance(); - endC.setTime(postgresFormat.parse(rs.getString(2))); - endC.set(Calendar.DATE, endC.getActualMaximum(Calendar.DATE)); -// //iterate on data types - if (!datatype.equals(rs.getString(1))) { - datatype = rs.getString(1); - } - - reportItem = new COUNTER_Dataset_Usage(rs.getString(6), "OpenAIRE", platform, rs.getString(1), Integer.toString(rs.getInt(7)), "Regular"); - String[] identifiersAll = rs.getString(8).split("#!#"); - for (int i = 0; i < identifiersAll.length; i++) { - String[] typeIdentifierArray = identifiersAll[i].split("#-#"); - reportItem.addIdentifier(new COUNTER_Dataset_Identifiers(typeIdentifierArray[0], typeIdentifierArray[1])); - } - reportItem.addPerformance(new COUNTER_Dataset_Performance(report_dateFormat.format(postgresFormat.parse(rs.getString(2))), report_dateFormat.format(endC.getTime()), - metricTypeValue[0] == true ? rs.getString(3) : null, metricTypeValue[1] == true ? rs.getString(4) : null, metricTypeValue[2] == true ? rs.getString(9) : null, metricTypeValue[3] == true ? rs.getString(10) : null)); - reportItems.add(reportItem); - - endC.setTime(postgresFormat.parse(rs.getString(2))); - endC.add(Calendar.MONTH, 1); - lastDate = postgresFormat.format(endC.getTime()); - //if (reportItem != null) { - //fillWithZeros(postgresFormat.parse(lastDate), endDateForZeros, reportItem); + reportItem = new COUNTER_Dataset_Usage(rs.getString(6), "OpenAIRE", platform, rs.getString(1), Integer.toString(rs.getInt(7)), rs.getString(11)); + String[] identifiersAll = rs.getString(8).split("#!#"); + for (int i = 0; i < identifiersAll.length; i++) { + String[] typeIdentifierArray = identifiersAll[i].split("#-#"); + reportItem.addIdentifier(new COUNTER_Dataset_Identifiers(typeIdentifierArray[0], typeIdentifierArray[1])); } + reportItem.addPerformance(new COUNTER_Dataset_Performance(report_dateFormat.format(postgresFormat.parse(rs.getString(2))), report_dateFormat.format(endC.getTime()), + metricTypeValue[0] == true ? rs.getString(3) : null, metricTypeValue[1] == true ? rs.getString(4) : null, metricTypeValue[2] == true ? rs.getString(9) : null, metricTypeValue[3] == true ? rs.getString(10) : null)); + reportItems.add(reportItem); + + endC.setTime(postgresFormat.parse(rs.getString(2))); + endC.add(Calendar.MONTH, 1); + lastDate = postgresFormat.format(endC.getTime()); + //if (reportItem != null) { + //fillWithZeros(postgresFormat.parse(lastDate), endDateForZeros, reportItem); } } /* @@ -3219,7 +3031,7 @@ public class UsageStatsRepository { public void executeItemsAllRepoDSR(List reportItems, String itemIdentifier, Date beginDate, - Date endDate, List metricType, String dataType, String granularity) throws Exception { + Date endDate, List metricType, String granularity) throws Exception { SimpleDateFormat report_dateFormat = new SimpleDateFormat("yyyy-MM-dd"); SimpleDateFormat postgresFormat = new SimpleDateFormat("yyyy/MM"); String beginDateStr = postgresFormat.format(beginDate); @@ -3233,240 +3045,128 @@ public class UsageStatsRepository { connection = usageStatsDB.getConnection(); if (granularity.equalsIgnoreCase("totals")) { - if (dataType.equals("")) { - st = connection.prepareStatement("WITH tpd as (select id, concat(type,'#-#',pid) type_id from " + statsDB + ".result_pids) " - + "SELECT rc.type, sum(us.downloads) as downloads, sum(us.views) as views, " - + "us.result_id as resultid, rs.title as item, rs.year as yop, group_concat(distinct tpd.type_id,'#!#') as oid, ds.name, " - + "count(case when us.downloads >0 then 1 else null end) as uniquedownloads, count(case when us.views >0 then 1 else null end) as uniqueviews " - + "FROM " + usagestatsImpalaDB + ".usage_stats us, " + statsDB + ".result_classifications rc, " + statsDB + ".result rs, " - + statsDB + ".result_oids ro, tpd, " + statsDB + ".datasource ds " - + "WHERE us.`date`>=? AND us.`date`<=? AND us.repository_id=ds.id AND us.result_id=? AND rc.id=us.result_id AND us.result_id=rs.id AND ro.id=us.result_id AND tpd.id=us.result_id " - + "GROUP BY rc.type, rs.title, us.result_id, rs.title, rs.year, ds.name order by rc.type ASC;"); - st.setString(1, beginDateStr); - st.setString(2, endDateStr); - st.setString(3, itemIdentifier); + st = connection.prepareStatement("WITH tpd as (select id, concat(type,'#-#',pid) type_id from " + statsDB + ".result_pids) " + + "SELECT rc.type, sum(us.downloads) as downloads, sum(us.views) as views, " + + "us.result_id as resultid, rs.title as item, rs.year as yop, group_concat(distinct tpd.type_id,'#!#') as oid, ds.name, " + + "count(case when us.downloads >0 then 1 else null end) as uniquedownloads, count(case when us.views >0 then 1 else null end) as uniqueviews, " + + "dp.access_method " + + "FROM " + usagestatsImpalaDB + ".usage_stats us, " + statsDB + ".result_classifications rc, " + statsDB + ".result rs, " + + statsDB + ".result_oids ro, tpd, " + statsDB + ".datasource ds, datasetsusagestats_20210312a.datasetsperformance dp " + + "WHERE us.`date`>=? AND us.`date`<=? AND us.repository_id=ds.id AND us.result_id=? AND rc.id=us.result_id " + + "AND us.result_id=rs.id AND ro.id=us.result_id AND tpd.id=us.result_id AND rc.type='Dataset' " + + "GROUP BY ro.id, rc.type, rs.title, us.result_id, rs.year, ds.name,dp.access_method ORDER by rc.type ASC;"); - rs = st.executeQuery(); - COUNTER_Dataset_Usage reportItem = null; - boolean[] metricTypeValue = {false, false, false, false}; - if (metricType.contains("Total_Dataset_Requests")) { - metricTypeValue[0] = true; - } - if (metricType.contains("Total_Dataset_Investigations")) { - metricTypeValue[1] = true; - } - if (metricType.contains("Unique_Dataset_Requests")) { - metricTypeValue[2] = true; - } - if (metricType.contains("Unique_Dataset_Investigations")) { - metricTypeValue[3] = true; - } + st.setString(1, beginDateStr); + st.setString(2, endDateStr); + st.setString(3, itemIdentifier); - while (rs.next()) { - reportItem = new COUNTER_Dataset_Usage(rs.getString(5), "OpenAIRE", rs.getString(8), rs.getString(1), Integer.toString(rs.getInt(6)), "Regular"); - String[] identifiersAll = rs.getString(7).split("#!#"); - for (int i = 0; i < identifiersAll.length; i++) { - String[] typeIdentifierArray = identifiersAll[i].split("#-#"); - reportItem.addIdentifier(new COUNTER_Dataset_Identifiers(typeIdentifierArray[0], typeIdentifierArray[1])); - } - - reportItem.addPerformance(new COUNTER_Dataset_Performance(report_dateFormat.format(beginDate), report_dateFormat.format(endDate), - metricTypeValue[0] == true ? rs.getString(2) : null, metricTypeValue[1] == true ? rs.getString(3) : null, metricTypeValue[2] == true ? rs.getString(9) : null, metricTypeValue[3] == true ? rs.getString(10) : null)); - reportItems.add(reportItem); - } - - } else { - st = connection.prepareStatement("WITH tpd as (select id, concat(type,'#-#',pid) type_id from " + statsDB + ".result_pids) " - + "SELECT rc.type, sum(us.downloads) as downloads, sum(us.views) as views, " - + "us.result_id as resultid, rs.title as item, rs.year as yop, group_concat(distinct tpd.type_id,'#!#') as oid, ds.name, " - + "count(case when us.downloads >0 then 1 else null end) as uniquedownloads, count(case when us.views >0 then 1 else null end) as uniqueviews " - + "FROM " + usagestatsImpalaDB + ".usage_stats us, " + statsDB + ".result_classifications rc, " + statsDB + ".result rs, " - + statsDB + ".result_oids ro, tpd, " + statsDB + ".datasource ds " - + "WHERE us.`date`>=? AND us.`date`<=? AND us.repository_id=ds.id AND us.result_id=? AND rc.type=? AND rc.id=us.result_id " - + "AND us.result_id=rs.id AND ro.id=us.result_id AND tpd.id=us.result_id " - + "GROUP BY ro.id, rc.type, rs.title, us.result_id, rs.year, ds.name order by rc.type ASC;"); - - st.setString(1, beginDateStr); - st.setString(2, endDateStr); - st.setString(3, itemIdentifier); - st.setString(4, dataType); - - rs = st.executeQuery(); - COUNTER_Dataset_Usage reportItem = null; - boolean[] metricTypeValue = {false, false, false, false}; - - if (metricType.contains("Total_Dataset_Requests")) { - metricTypeValue[0] = true; - } - if (metricType.contains("Total_Dataset_Investigations")) { - metricTypeValue[1] = true; - } - if (metricType.contains("Unique_Dataset_Requests")) { - metricTypeValue[2] = true; - } - if (metricType.contains("Unique_Dataset_Investigations")) { - metricTypeValue[3] = true; - } - - while (rs.next()) { - String platform = getInstitutionName(rs.getString(8)); - reportItem = new COUNTER_Dataset_Usage(rs.getString(5), "OpenAIRE", rs.getString(8), rs.getString(1), Integer.toString(rs.getInt(6)), "Regular"); - String[] identifiersAll = rs.getString(7).split("#!#"); - for (int i = 0; i < identifiersAll.length; i++) { - String[] typeIdentifierArray = identifiersAll[i].split("#-#"); - reportItem.addIdentifier(new COUNTER_Dataset_Identifiers(typeIdentifierArray[0], typeIdentifierArray[1])); - } - - reportItem.addPerformance(new COUNTER_Dataset_Performance(report_dateFormat.format(beginDate), report_dateFormat.format(endDate), - metricTypeValue[0] == true ? rs.getString(2) : null, metricTypeValue[1] == true ? rs.getString(3) : null, metricTypeValue[2] == true ? rs.getString(9) : null, metricTypeValue[3] == true ? rs.getString(10) : null)); - reportItems.add(reportItem); - } + rs = st.executeQuery(); + COUNTER_Dataset_Usage reportItem = null; + boolean[] metricTypeValue = {false, false, false, false}; + if (metricType.contains("Total_Dataset_Requests")) { + metricTypeValue[0] = true; } + if (metricType.contains("Total_Dataset_Investigations")) { + metricTypeValue[1] = true; + } + if (metricType.contains("Unique_Dataset_Requests")) { + metricTypeValue[2] = true; + } + if (metricType.contains("Unique_Dataset_Investigations")) { + metricTypeValue[3] = true; + } + + while (rs.next()) { + String platform = getInstitutionName(rs.getString(8)); + reportItem = new COUNTER_Dataset_Usage(rs.getString(5), "OpenAIRE", rs.getString(8), rs.getString(1), Integer.toString(rs.getInt(6)), rs.getString(11)); + String[] identifiersAll = rs.getString(7).split("#!#"); + for (int i = 0; i < identifiersAll.length; i++) { + String[] typeIdentifierArray = identifiersAll[i].split("#-#"); + reportItem.addIdentifier(new COUNTER_Dataset_Identifiers(typeIdentifierArray[0], typeIdentifierArray[1])); + } + + reportItem.addPerformance(new COUNTER_Dataset_Performance(report_dateFormat.format(beginDate), report_dateFormat.format(endDate), + metricTypeValue[0] == true ? rs.getString(2) : null, metricTypeValue[1] == true ? rs.getString(3) : null, metricTypeValue[2] == true ? rs.getString(9) : null, metricTypeValue[3] == true ? rs.getString(10) : null)); + reportItems.add(reportItem); + } + + //} } else if (granularity.equalsIgnoreCase("monthly")) { - if (dataType.equals("")) { + st = connection.prepareStatement("WITH tpd as (select id, concat(type,'#-#',pid) type_id from " + statsDB + ".result_pids) " + + "SELECT rc.type, us.`date`, sum(us.downloads) as downloads, sum(us.views) as views, " + + "us.result_id as resultid, rs.title as item, rs.year as yop, group_concat(distinct tpd.type_id,'#!#') as oid, ds.name, " + + "count(case when us.downloads >0 then 1 else null end) as uniquedownloads, count(case when us.views >0 then 1 else null end) as uniqueviews, " + + "dp.access_method " + + "FROM " + usagestatsImpalaDB + ".usage_stats us, " + statsDB + ".result_classifications rc, " + statsDB + ".result rs, " + + statsDB + ".result_oids ro, tpd, " + statsDB + ".datasource ds, datasetsusagestats_20210312a.datasetsperformance dp " + + "WHERE us.`date`>=? AND us.`date`<=? AND us.repository_id=ds.id AND us.result_id=? AND rc.id=us.result_id " + + "AND us.result_id=rs.id AND ro.id=us.result_id AND tpd.id=us.result_id AND rc.type='Dataset' " + + "GROUP BY ro.id, rc.type, rs.title, us.result_id, us.`date`, rs.year, ds.name, dp.access_method ORDER by rc.type, us.`date` ASC;"); + st.setString(1, beginDateStr); + st.setString(2, endDateStr); + st.setString(3, itemIdentifier); - st = connection.prepareStatement("WITH tpd as (select id, concat(type,'#-#',pid) type_id from " + statsDB + ".result_pids) " - + "SELECT rc.type, us.`date`, sum(us.downloads) as downloads, sum(us.views) as views, " - + "us.result_id as resultid, rs.title as item, rs.year as yop, group_concat(distinct tpd.type_id,'#!#') as oid, ds.name, " - + "count(case when us.downloads >0 then 1 else null end) as uniquedownloads, count(case when us.views >0 then 1 else null end) as uniqueviews " - + "FROM " + usagestatsImpalaDB + ".usage_stats us, " + statsDB + ".result_classifications rc, " + statsDB + ".result rs, " - + statsDB + ".result_oids ro, tpd, " + statsDB + ".datasource ds " - + "WHERE us.`date`>=? AND us.`date`<=? AND us.repository_id=ds.id AND us.result_id=? AND rc.id=us.result_id AND us.result_id=rs.id AND ro.id=us.result_id " - + "AND tpd.id=us.result_id " - + "GROUP BY ro.id, rc.type, rs.title, us.result_id, us.`date`, rs.year, ds.name order by rc.type, us.`date` ASC;"); - st.setString(1, beginDateStr); - st.setString(2, endDateStr); - st.setString(3, itemIdentifier); + rs = st.executeQuery(); + String result = ""; + String lastDate = ""; + COUNTER_Dataset_Usage reportItem = null; + boolean[] metricTypeValue = {false, false, false, false}; - rs = st.executeQuery(); - String result = ""; - String lastDate = ""; - COUNTER_Dataset_Usage reportItem = null; - boolean[] metricTypeValue = {false, false, false, false}; + if (metricType.contains("Total_Dataset_Requests")) { + metricTypeValue[0] = true; + } + if (metricType.contains("Total_Dataset_Investigations")) { + metricTypeValue[1] = true; + } + if (metricType.contains("Unique_Dataset_Requests")) { + metricTypeValue[2] = true; + } + if (metricType.contains("Unique_Dataset_Investigations")) { + metricTypeValue[3] = true; + } - if (metricType.contains("Total_Dataset_Requests")) { - metricTypeValue[0] = true; - } - if (metricType.contains("Total_Dataset_Investigations")) { - metricTypeValue[1] = true; - } - if (metricType.contains("Unique_Dataset_Requests")) { - metricTypeValue[2] = true; - } - if (metricType.contains("Unique_Dataset_Investigations")) { - metricTypeValue[3] = true; - } + int ft_total = 0; + int abstr = 0; - int ft_total = 0; - int abstr = 0; + Calendar endCal = Calendar.getInstance(); + endCal.setTime(postgresFormat.parse(endDateStr)); + endCal.add(Calendar.MONTH, 1); + Date endDateForZeros = endCal.getTime(); - Calendar endCal = Calendar.getInstance(); - endCal.setTime(postgresFormat.parse(endDateStr)); - endCal.add(Calendar.MONTH, 1); - Date endDateForZeros = endCal.getTime(); + lastDate = beginDateStr; + String datatype = ""; - lastDate = beginDateStr; - String datatype = ""; - while (rs.next()) { - Calendar endC = Calendar.getInstance(); - endC.setTime(postgresFormat.parse(rs.getString(2))); - endC.set(Calendar.DATE, endC.getActualMaximum(Calendar.DATE)); + while (rs.next()) { + Calendar endC = Calendar.getInstance(); + endC.setTime(postgresFormat.parse(rs.getString(2))); + endC.set(Calendar.DATE, endC.getActualMaximum(Calendar.DATE)); // //iterate on data types - if (!datatype.equals(rs.getString(1))) { - datatype = rs.getString(1); - } - reportItem = new COUNTER_Dataset_Usage(rs.getString(6), "OpenAIRE", rs.getString(9), rs.getString(1), Integer.toString(rs.getInt(7)), "Regular"); - String[] identifiersAll = rs.getString(8).split("#!#"); - for (int i = 0; i < identifiersAll.length; i++) { - String[] typeIdentifierArray = identifiersAll[i].split("#-#"); - reportItem.addIdentifier(new COUNTER_Dataset_Identifiers(typeIdentifierArray[0], typeIdentifierArray[1])); - } - reportItem.addPerformance(new COUNTER_Dataset_Performance(report_dateFormat.format(postgresFormat.parse(rs.getString(2))), report_dateFormat.format(endC.getTime()), - metricTypeValue[0] == true ? rs.getString(3) : null, metricTypeValue[1] == true ? rs.getString(4) : null, metricTypeValue[2] == true ? rs.getString(10) : null, metricTypeValue[3] == true ? rs.getString(11) : null)); - reportItems.add(reportItem); - endC.setTime(postgresFormat.parse(rs.getString(2))); - endC.add(Calendar.MONTH, 1); - lastDate = postgresFormat.format(endC.getTime()); - //if (reportItem != null) { - //fillWithZeros(postgresFormat.parse(lastDate), endDateForZeros, reportItem); - } - } else { - st = connection.prepareStatement("WITH tpd as (select id, concat(type,'#-#',pid) type_id from " + statsDB + ".result_pids) " - + "SELECT rc.type, us.`date`, sum(us.downloads) as downloads, sum(us.views) as views, " - + "us.result_id as resultid, rs.title as item, rs.year as yop, group_concat(distinct tpd.type_id,'#!#') as oid, ds.name, " - + "count(case when us.downloads >0 then 1 else null end) as uniquedownloads, count(case when us.views >0 then 1 else null end) as uniqueviews " - + "FROM " + usagestatsImpalaDB + ".usage_stats us, " + statsDB + ".result_classifications rc, " + statsDB + ".result rs, " - + statsDB + ".result_oids ro, tpd, " + statsDB + ".datasource ds " - + "WHERE us.`date`>=? AND us.`date`<=? AND us.repository_id=ds.id AND us.result_id=? AND rc.type=? AND rc.id=us.result_id " - + "AND us.result_id=rs.id AND ro.id=us.result_id AND tpd.id=us.result_id " - + "GROUP BY ro.id, rc.type, rs.title, us.result_id, us.`date`, rs.year, ds.name order by rc.type, us.`date` ASC;"); - st.setString(1, beginDateStr); - st.setString(2, endDateStr); - st.setString(3, itemIdentifier); - st.setString(4, dataType); - - rs = st.executeQuery(); - String result = ""; - String lastDate = ""; - COUNTER_Dataset_Usage reportItem = null; - boolean[] metricTypeValue = {false, false, false, false}; - - if (metricType.contains("Total_Dataset_Requests")) { - metricTypeValue[0] = true; - } - if (metricType.contains("Total_Dataset_Investigations")) { - metricTypeValue[1] = true; - } - if (metricType.contains("Unique_Dataset_Requests")) { - metricTypeValue[2] = true; - } - if (metricType.contains("Unique_Dataset_Investigations")) { - metricTypeValue[3] = true; - } - - int ft_total = 0; - int abstr = 0; - - Calendar endCal = Calendar.getInstance(); - endCal.setTime(postgresFormat.parse(endDateStr)); - endCal.add(Calendar.MONTH, 1); - Date endDateForZeros = endCal.getTime(); - - lastDate = beginDateStr; - String datatype = ""; - - while (rs.next()) { - Calendar endC = Calendar.getInstance(); - endC.setTime(postgresFormat.parse(rs.getString(2))); - endC.set(Calendar.DATE, endC.getActualMaximum(Calendar.DATE)); -// //iterate on data types - if (!datatype.equals(rs.getString(1))) { + if (!datatype.equals(rs.getString(1))) { // if (reportItem != null) { // reportItems.add(reportItem); // } // reportItem = new COUNTER_Platform_Usage("", "OpenAIRE", rs.getString(1), "Regular", ""); - datatype = rs.getString(1); - } - reportItem = new COUNTER_Dataset_Usage(rs.getString(6), "OpenAIRE", rs.getString(9), rs.getString(1), Integer.toString(rs.getInt(7)), "Regular"); - String[] identifiersAll = rs.getString(8).split("#!#"); - for (int i = 0; i < identifiersAll.length; i++) { - String[] typeIdentifierArray = identifiersAll[i].split("#-#"); - reportItem.addIdentifier(new COUNTER_Dataset_Identifiers(typeIdentifierArray[0], typeIdentifierArray[1])); - } - - reportItem.addPerformance(new COUNTER_Dataset_Performance(report_dateFormat.format(postgresFormat.parse(rs.getString(2))), report_dateFormat.format(endC.getTime()), - metricTypeValue[0] == true ? rs.getString(3) : null, metricTypeValue[1] == true ? rs.getString(4) : null, metricTypeValue[2] == true ? rs.getString(10) : null, metricTypeValue[3] == true ? rs.getString(11) : null)); - reportItems.add(reportItem); - - endC.setTime(postgresFormat.parse(rs.getString(2))); - endC.add(Calendar.MONTH, 1); - lastDate = postgresFormat.format(endC.getTime()); - //if (reportItem != null) { - //fillWithZeros(postgresFormat.parse(lastDate), endDateForZeros, reportItem); + datatype = rs.getString(1); } + reportItem = new COUNTER_Dataset_Usage(rs.getString(6), "OpenAIRE", rs.getString(9), rs.getString(1), Integer.toString(rs.getInt(7)), rs.getString(12)); + String[] identifiersAll = rs.getString(8).split("#!#"); + for (int i = 0; i < identifiersAll.length; i++) { + String[] typeIdentifierArray = identifiersAll[i].split("#-#"); + reportItem.addIdentifier(new COUNTER_Dataset_Identifiers(typeIdentifierArray[0], typeIdentifierArray[1])); + } + + reportItem.addPerformance(new COUNTER_Dataset_Performance(report_dateFormat.format(postgresFormat.parse(rs.getString(2))), report_dateFormat.format(endC.getTime()), + metricTypeValue[0] == true ? rs.getString(3) : null, metricTypeValue[1] == true ? rs.getString(4) : null, metricTypeValue[2] == true ? rs.getString(10) : null, metricTypeValue[3] == true ? rs.getString(11) : null)); + reportItems.add(reportItem); + + endC.setTime(postgresFormat.parse(rs.getString(2))); + endC.add(Calendar.MONTH, 1); + lastDate = postgresFormat.format(endC.getTime()); + //if (reportItem != null) { + //fillWithZeros(postgresFormat.parse(lastDate), endDateForZeros, reportItem); } } /* @@ -3539,7 +3239,7 @@ public class UsageStatsRepository { } } catch (Exception e) { log.info(e); - }finally { + } finally { DbUtils.closeQuietly(connection); } return false; @@ -3567,8 +3267,7 @@ public class UsageStatsRepository { } catch (Exception e) { log.error("No member found " + e); - } - finally { + } finally { DbUtils.closeQuietly(rs); DbUtils.closeQuietly(st); DbUtils.closeQuietly(connection); diff --git a/src/main/java/eu/dnetlib/usagestats/services/SushiLiteService.java b/src/main/java/eu/dnetlib/usagestats/services/SushiLiteService.java index 8e71de0..98eb6b2 100755 --- a/src/main/java/eu/dnetlib/usagestats/services/SushiLiteService.java +++ b/src/main/java/eu/dnetlib/usagestats/services/SushiLiteService.java @@ -3,11 +3,7 @@ package eu.dnetlib.usagestats.services; import eu.dnetlib.usagestats.sushilite.domain.COUNTER_Dataset_Report; import eu.dnetlib.usagestats.sushilite.domain.COUNTER_Item_Report; import eu.dnetlib.usagestats.sushilite.domain.COUNTER_Platform_Report; -import eu.dnetlib.usagestats.sushilite.domain.COUNTER_Title_Report; -import eu.dnetlib.usagestats.sushilite.domain.ReportResponseWrapper; -import eu.dnetlib.usagestats.sushilite.domain.SUSHI_Consortium_Member_List; import eu.dnetlib.usagestats.sushilite.domain.SUSHI_Service_Status; -import java.sql.SQLException; import java.util.ArrayList; import java.util.List; @@ -39,6 +35,6 @@ public interface SushiLiteService { COUNTER_Item_Report buildReportIR(String customerID, String repositoryIdentifier, String itemIdentifier, String beginDate,String endDate, List metricType, String dataType,String granularity) throws Exception; String displayReportIR(String customerID, String repositoryIdentifier, String itemIdentifier, String beginDate,String endDate, List metricType, String dataType,String granularity); - COUNTER_Dataset_Report buildReportDSR(String customerID, String repositoryIdentifier, String itemIdentifier, String beginDate,String endDate, List metricType, String dataType,String granularity) throws Exception; - String displayReportDSR(String customerID, String repositoryIdentifier, String itemIdentifier, String beginDate,String endDate, List metricType, String dataType,String granularity); + COUNTER_Dataset_Report buildReportDSR(String customerID, String repositoryIdentifier, String itemIdentifier, String beginDate,String endDate, List metricType, String granularity) throws Exception; + String displayReportDSR(String customerID, String repositoryIdentifier, String itemIdentifier, String beginDate,String endDate, List metricType, String granularity); } diff --git a/src/main/java/eu/dnetlib/usagestats/services/SushiLiteServiceImpl.java b/src/main/java/eu/dnetlib/usagestats/services/SushiLiteServiceImpl.java index 3f1d8b7..4c928a3 100755 --- a/src/main/java/eu/dnetlib/usagestats/services/SushiLiteServiceImpl.java +++ b/src/main/java/eu/dnetlib/usagestats/services/SushiLiteServiceImpl.java @@ -310,12 +310,13 @@ public class SushiLiteServiceImpl implements SushiLiteService { @Override public ArrayList buildReportSupported() { ArrayList reportSupportedList = new ArrayList(); - SUSHI_Report_List r1 = new SUSHI_Report_List("Status Report", "", "5", "Current status of the reporting service supported by this API", "/status"); - SUSHI_Report_List r2 = new SUSHI_Report_List("Members Report", "", "5", "List of UsageCounts members", "/members"); + SUSHI_Report_List r1 = new SUSHI_Report_List("Status Report", "Status Report", "5", "Current status of the reporting service supported by this API", "/status"); + SUSHI_Report_List r2 = new SUSHI_Report_List("Members Report", "Members Report", "5", "List of UsageCounts members", "/members"); SUSHI_Report_List r3 = new SUSHI_Report_List("List Of Reports Report", "", "5", "List of reports supported by the API", "/reports/"); SUSHI_Report_List r4 = new SUSHI_Report_List("Platform Master Report PR", "PR", "5", "A customizable report summarizing activity across a provider’s platforms that allows the user to apply filters and select other configuration options for the report. ", "/PR"); SUSHI_Report_List r5 = new SUSHI_Report_List("Platform Usage Report", "PR_P1", "5", "Standard View of the Package Master Report that presents usage for the overall Platform broken down by Metric_Type.", "/PR_1"); - SUSHI_Report_List r6 = new SUSHI_Report_List("Platform Item Report", "IR", "5", "COUNTER 'Item Master Report", "/IR"); + SUSHI_Report_List r6 = new SUSHI_Report_List("Platform Item Report", "IR", "5", "COUNTER Item Master Report", "/IR"); + SUSHI_Report_List r7 = new SUSHI_Report_List("Datasets Report", "IR", "5", "COUNTER Datasets Report", "/DSR"); reportSupportedList.add(r1); reportSupportedList.add(r2); @@ -323,6 +324,7 @@ public class SushiLiteServiceImpl implements SushiLiteService { reportSupportedList.add(r4); reportSupportedList.add(r5); reportSupportedList.add(r6); + reportSupportedList.add(r7); return reportSupportedList; } @@ -693,7 +695,7 @@ public class SushiLiteServiceImpl implements SushiLiteService { @Override public COUNTER_Dataset_Report buildReportDSR(String customerID, String repositoryIdentifier, String itemIdentifier, String beginDate, - String endDate, List metricType, String dataType, String granularity) throws Exception { + String endDate, List metricType,String granularity) throws Exception { ZonedDateTime dateTime = ZonedDateTime.now(); // Gets the current date and time, with your default time-zone DateTimeFormatter formatter = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss'Z'"); @@ -794,7 +796,7 @@ public class SushiLiteServiceImpl implements SushiLiteService { if (!itemid.equals("-1") && !repoid.equals("-1") && beginDateParsed != null && endDateParsed != null && beginDateParsed.before(endDateParsed)) { if (!itemid.equals("") && !repoid.equals("")) { //if (dataType.equalsIgnoreCase("") || dataType.equalsIgnoreCase("article")) { - usageStatsRepository.executeItemDSR(reportItems, repoid, itemIdentifier, beginDateParsed, endDateParsed, metricType, dataType, granularity); + usageStatsRepository.executeItemDSR(reportItems, repoid, itemIdentifier, beginDateParsed, endDateParsed, metricType, granularity); if (reportItems.isEmpty()) { reportExceptions.add(new SUSHI_Error_Model("3030", "Error", "usagecounts.openaire.eu", "No Usage Available for Requested Dates", "Service did not find any data")); } @@ -802,14 +804,14 @@ public class SushiLiteServiceImpl implements SushiLiteService { //reportExceptions.add(new SUSHI_Error_Model("3030", "Error", "usagecounts.openaire.eu", "No Usage Available for Requested Dates", "Service did not find any data")); //} } else if (!repoid.equals("")) { - usageStatsRepository.executeBatchItemsDSR(reportItems, repoid, itemIdentifier, beginDateParsed, endDateParsed, metricType, dataType, granularity); + usageStatsRepository.executeBatchItemsDSR(reportItems, repoid, itemIdentifier, beginDateParsed, endDateParsed, metricType, granularity); if (reportItems.isEmpty()) { reportExceptions.add(new SUSHI_Error_Model("3030", "Error", "usagecounts.openaire.eu", "No Usage Available for Requested Dates", "Service did not find any data")); } } } if (repoid.equals("")) { - usageStatsRepository.executeItemDSR(reportItems, null, itemIdentifier, beginDateParsed, endDateParsed, metricType, dataType, granularity); + usageStatsRepository.executeItemDSR(reportItems, null, itemIdentifier, beginDateParsed, endDateParsed, metricType, granularity); if (reportItems.isEmpty()) { reportExceptions.add(new SUSHI_Error_Model("3030", "Error", "usagecounts.openaire.eu", "No Usage Available for Requested Dates", "Service did not find any data")); } @@ -1107,13 +1109,11 @@ public class SushiLiteServiceImpl implements SushiLiteService { @Override public String displayReportDSR(String customerID, String repositoryIdentifier, String itemIdentifier, String beginDate, - String endDate, List metricType, - String dataType, String granularity - ) { + String endDate, List metricType,String granularity) { ObjectMapper objectMapper = new ObjectMapper(); try { - COUNTER_Dataset_Report report = buildReportDSR(customerID, repositoryIdentifier, itemIdentifier, beginDate, endDate, metricType, dataType, granularity); + COUNTER_Dataset_Report report = buildReportDSR(customerID, repositoryIdentifier, itemIdentifier, beginDate, endDate, metricType, granularity); if (reportForCompression == false) { return "
" + objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(report) + "
"; } else { diff --git a/src/main/resources/static/index.html b/src/main/resources/static/index.html index 0059da9..95f1233 100644 --- a/src/main/resources/static/index.html +++ b/src/main/resources/static/index.html @@ -81,6 +81,7 @@
  • COUNTER PR
  • COUNTER PR_P1
  • COUNTER IR
  • +
  • COUNTER Datasets Report

  • diff --git a/src/main/resources/static/sushilite/DSR/index.html b/src/main/resources/static/sushilite/DSR/index.html new file mode 100644 index 0000000..dd5a347 --- /dev/null +++ b/src/main/resources/static/sushilite/DSR/index.html @@ -0,0 +1,201 @@ + + + + + + + + + + OpenAIRE SUSHI Lite Client + + + + + + + + +
    + + + + +
    +
    +
    +
    +
    +
    +
    +
    +
    +

    Report Request

    +
    +
    + Report Name: +
    +
    + +
    +
    +
    +
    + Release: +
    +
    + +
    +
    +
    +
    + Requestor: +
    +
    + +
    +
    +

    Report Filters

    +
    Date range
    +

    Valid date formats are yyyy-mm-dd or yyyy-mm. Default range is the last available month. +

    +
    +
    + Begin Date: +
    +
    + +
    +
    +
    +
    + End Date: +
    +
    + +
    +
    +
    Filters
    +

    Provide either a Repository Identifier or an Item Identifier
    + Identifier format: namespace:value
    + Valid namespace for Repository Identifier: openaire or opendoar.
    + Valid namespace for Dataset Identifier: openaire, doi or oid(for OAI-PMH). +

    +
    +
    + Repository Identifier: +
    +
    + +
    +
    +
    +
    + Dataset Identifier: +
    +
    + +
    +
    +

    Metric Type

    +

    Total_Dataset_Requests

    +

    Total_Dataset_Investigations

    +

    Unique_Dataset_Requests

    +

    Unique_Dataset_Investigations

    + +

    Report Attributes

    +

    Valid Granularity values: Monthly or Totals

    +
    +
    + Granularity: +
    +
    + +
    +
    + + + +
    +
    +
    +
    + + +
    + + diff --git a/src/main/resources/static/sushilite/index.html b/src/main/resources/static/sushilite/index.html index b23ff08..c3d8a2e 100644 --- a/src/main/resources/static/sushilite/index.html +++ b/src/main/resources/static/sushilite/index.html @@ -81,7 +81,7 @@
  • COUNTER PR
  • COUNTER PR_P1
  • COUNTER IR
  • - +
  • COUNTER Datasets Report

  • diff --git a/usageStatsAPI.properties b/usageStatsAPI.properties index fc78772..8fd2cda 100644 --- a/usageStatsAPI.properties +++ b/usageStatsAPI.properties @@ -5,17 +5,17 @@ spring.datasource.driverClassName=com.cloudera.impala.jdbc41.Driver spring.jpa.properties.hibernate.dialect = org.hibernate.dialect.HSQLDialect spring.datasource.hikari.max-lifetime=600000 usagestats.url=jdbc:impala://iis-cdh5-test-gw.ocean.icm.edu.pl:21050/;UseNativeQuery=1;AutoReconnect=1 -usagestats.redis.hostname=10.19.65.40 +usagestats.redis.hostname=localhost usagestats.redis.port=6379 usagestats.redis_scheme=tcp spring.jackson.serialization.INDENT_OUTPUT=true prod.statsdb=openaire_prod_stats prod.usagestatsImpalaDB=openaire_prod_usage_stats compression.max_number_of_records=100 -download.folder=/home/dimitris.pierrakos/Sushilite_R5/DownloadSushiReports -sushi-lite.server=https://beta.services.openaire.eu/usagestats_r5 +download.folder=/Users/dpie/Desktop/DownloadSushiReports +sushi-lite.server=http:://localhost/usagestats_r5 server.servlet.context-path=/usagestats_r5 #server.port=8080 #spring.datasource.testWhileIdle = true #spring.datasource.timeBetweenEvictionRunsMillis = 60000 -#spring.datasource.validationQuery = SELECT 1 \ No newline at end of file +#spring.datasource.validationQuery = SELECT 1