Commit 05122021

This commit is contained in:
dimitrispie 2021-01-05 14:22:12 +02:00
parent df98293291
commit f620bd36bd
5 changed files with 405 additions and 291 deletions

View File

@ -165,7 +165,7 @@
</configuration>
</plugin>
</plugins>
<finalName>usagestats</finalName>
<finalName>usagestats_r5</finalName>
</build>
<repositories>

View File

@ -1,29 +1,15 @@
package eu.dnetlib.usagestats.controllers;
import eu.dnetlib.usagestats.services.SushiLiteService;
import java.io.BufferedInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.URI;
import javax.servlet.http.HttpServletResponse;
import org.apache.log4j.Logger;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.http.HttpHeaders;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.util.FileCopyUtils;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestHeader;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
/**
* Created by dimitris.pierrakos on 09/10/2020.
* Created by dpie on 30/12/2020.
*/
@RestController
class SushiLiteController {
@ -31,62 +17,39 @@ class SushiLiteController {
private final Logger log = Logger.getLogger(this.getClass());
private final SushiLiteService sushiLiteService;
@Value("${download.folder}")
private String download_folder;
public SushiLiteController(SushiLiteService sushiLiteService) {
this.sushiLiteService = sushiLiteService;
}
@RequestMapping(value = "/sushilite/GetReport/", method = RequestMethod.GET)
public ResponseEntity<String> getReport(@RequestParam(value = "Report", defaultValue = "") String reportP, @RequestParam(value = "Release", defaultValue = "4") String release, @RequestParam(value = "RequestorID", defaultValue = "anonymous") String requestorId,
@RequestMapping(value = "/sushilite/r5/GetReport/", method = RequestMethod.GET)
public String getReport(@RequestParam(value = "Report", defaultValue = "") String reportP, @RequestParam(value = "Release", defaultValue = "4") String release, @RequestParam(value = "RequestorID", defaultValue = "anonymous") String requestorId,
@RequestParam(value = "BeginDate", defaultValue = "") String beginDate, @RequestParam(value = "EndDate", defaultValue = "") String endDate, @RequestParam(value = "RepositoryIdentifier", defaultValue = "") String repositoryIdentifier,
@RequestParam(value = "ItemIdentifier", defaultValue = "") String itemIdentifier, @RequestParam(value = "ItemDataType", defaultValue = "") String itemDataType,
@RequestParam(value = "hasDOI", defaultValue = "") String hasDoi, @RequestParam(value = "Granularity", defaultValue = "Monthly") String granularity, @RequestParam(value = "Callback", defaultValue = "") String callback,
@RequestParam(value = "Pretty", defaultValue = "") String pretty,
//added for compression case report
@RequestHeader(value = "User-Agent", required = false) String userAgent) throws InterruptedException, Exception {
@RequestParam(value = "Pretty", defaultValue = "") String pretty) {
log.info("Sushi Report request: " + reportP + " from " + requestorId);
log.info("repository identifier: " + repositoryIdentifier + " - item identifier: " + itemIdentifier);
String report = sushiLiteService.displayReport(reportP, release, requestorId, beginDate, endDate, repositoryIdentifier, itemIdentifier, itemDataType, hasDoi, granularity, callback, pretty, userAgent);
if (report.indexOf(".zip") < 0) {
return new ResponseEntity<>(report, HttpStatus.OK);
} else {
/* URI reportZipURL = new URI(report);
HttpHeaders httpHeaders = new HttpHeaders();
httpHeaders.setLocation(reportZipURL);*/
//report= "report is available. Download it from localhost:8080/download/file.zip";
String content = "<!DOCTYPE html>"
+ "<html>"
+ "<head>"
+ "OpenAIRE SUSHI Lite Client Report"
+ "</head>"
+ "<body>"
+ "<p>"
+ "<a href='" + report + "' target='_blank'>Click to download the report</a>"
+ "</p>"
+ "</body>"
+ "</html>";
log.info(content);
//return new ResponseEntity<>(httpHeaders, HttpStatus.SEE_OTHER);
return new ResponseEntity<>(content, HttpStatus.OK);
}
return sushiLiteService.displayReport(reportP, release, requestorId, beginDate, endDate, repositoryIdentifier, itemIdentifier, itemDataType, hasDoi, granularity, callback, pretty);
}
@RequestMapping(value = "/download/{file_name}", method = RequestMethod.GET)
public void downloadFile(HttpServletResponse response, @PathVariable("file_name") String filetoDownload) throws IOException {
File file = new File(download_folder + "/" + filetoDownload + ".zip");
log.info("File downloaded at " + file.getAbsolutePath());
String mimeType = "application/octet-stream";
response.setContentType(mimeType);
/* "Content-Disposition : attachment" will be directly download, may provide save as popup, based on your browser setting*/
response.setHeader("Content-Disposition", String.format("attachment; filename=\"%s\"", file.getName()));
response.setContentLength((int) file.length());
InputStream inputStream = new BufferedInputStream(new FileInputStream(file));
FileCopyUtils.copy(inputStream, response.getOutputStream());
@RequestMapping(value = "/sushilite/r5/status", method = RequestMethod.GET)
public String getReportStatus() {
log.info("Sushi Report status request ");
return sushiLiteService.displayReportStatus();
}
@RequestMapping(value = "/sushilite/r5/reports", method = RequestMethod.GET)
public String getReportSupported() {
log.info("Sushi Supported Reports request ");
return sushiLiteService.displayReportsSupported();
}
@RequestMapping(value = "/sushilite/r5/reports/pr", method = RequestMethod.GET)
public String getReportPR(@RequestParam(value = "RepositoryIdentifier", defaultValue = "") String repositoryIdentifier,
@RequestParam(value = "BeginDate", defaultValue = "") String beginDate, @RequestParam(value = "EndDate", defaultValue = "") String endDate) {
log.info("Sushi PR Report request for repository "+repositoryIdentifier);
return sushiLiteService.displayReportPR(repositoryIdentifier, beginDate, endDate);
}
}

View File

@ -50,7 +50,8 @@ public class UsageStatsRepository {
@Value("${prod.usagestatsImpalaDB}")
private String usagestatsImpalaDB;
public UsageStatsRepository(DataSource usageStatsDB, RedisTemplate<String, String> redisTemplate) {
public UsageStatsRepository(DataSource usageStatsDB,
RedisTemplate<String, String> redisTemplate) {
this.usageStatsDB = usageStatsDB;
this.jedis = redisTemplate.opsForHash();
}
@ -114,7 +115,7 @@ public class UsageStatsRepository {
jedis.put("test", "fetchMode", "3");
} catch (Exception e) {
System.out.println(e);
}finally {
} finally {
DbUtils.closeQuietly(rs);
DbUtils.closeQuietly(st);
DbUtils.closeQuietly(connection);
@ -123,7 +124,8 @@ public class UsageStatsRepository {
return montlhyList;
}
public TotalStatsReposViewsDownloads executeTotalStatsReposViewsDownloads(String query) {
public TotalStatsReposViewsDownloads executeTotalStatsReposViewsDownloads(
String query) {
TotalStatsReposViewsDownloads totalStatsReposViewsDownlads = new TotalStatsReposViewsDownloads();
String total_repos = " ";
@ -156,7 +158,7 @@ public class UsageStatsRepository {
jedis.put(redis_key, "fetchMode", "3");
} catch (Exception e) {
System.out.println(e);
}finally {
} finally {
DbUtils.closeQuietly(rs);
DbUtils.closeQuietly(st);
DbUtils.closeQuietly(connection);
@ -223,7 +225,8 @@ public class UsageStatsRepository {
return countryListAll;
}
public CountryUsageStats executeCountryUsageStats(String query, String country) {
public CountryUsageStats executeCountryUsageStats(String query,
String country) {
CountryUsageStats countryUsageStats = new CountryUsageStats();
String total_repos = " ";
String views = " ";
@ -238,7 +241,7 @@ public class UsageStatsRepository {
try {
connection = usageStatsDB.getConnection();
st = connection.prepareStatement(query);
redis_key = MD5(st.toString());
//st.setString(1, country);
log.info(st.toString());
@ -314,7 +317,8 @@ public class UsageStatsRepository {
return countryReposList;
}
public List<MonthlyUsageStats> executeMontlyUsageStatsForRepo(String query, String datasourceId) {
public List<MonthlyUsageStats> executeMontlyUsageStatsForRepo(String query,
String datasourceId) {
List<MonthlyUsageStats> montlhyList = new ArrayList<MonthlyUsageStats>();
String redis_key = "";
@ -355,7 +359,8 @@ public class UsageStatsRepository {
return montlhyList;
}
public UsageStats executeUsageStats(String query, List<String> values, String type) {
public UsageStats executeUsageStats(String query, List<String> values,
String type) {
UsageStats usageStats = new UsageStats();
int total_views = 0;
@ -469,7 +474,7 @@ public class UsageStatsRepository {
try {
connection = usageStatsDB.getConnection();
//st = connection.prepareStatement("SELECT count(distinct d.repository_id) AS repository, count(distinct d.result_id) AS items, sum(d.count) AS downloads, sum(v.count) AS views from public.downloads_stats d FULL OUTER JOIN public.views_stats v ON d.source=v.source AND d.repository_id=v.repository_id AND d.result_id=v.result_id AND d.date=v.date;");
st = connection.prepareStatement("SELECT ndv(distinct repository_id) AS repository, ndv(distinct result_id) AS items, sum(downloads) AS downloads, sum(views) AS views FROM "+usagestatsImpalaDB+".usage_stats;");
st = connection.prepareStatement("SELECT ndv(distinct repository_id) AS repository, ndv(distinct result_id) AS items, sum(downloads) AS downloads, sum(views) AS views FROM " + usagestatsImpalaDB + ".usage_stats;");
rs = st.executeQuery();
rs.next();
totalStats.setRepositories(rs.getInt(1));
@ -480,7 +485,7 @@ public class UsageStatsRepository {
st.close();
//st = connection.prepareStatement("select coalesce(d.date,v.date) as month, count(distinct d.repository_id) as repository, count(distinct d.result_id) as items, sum(d.count) as downloads, sum(v.count) as views from public.downloads_stats d FULL OUTER JOIN public.views_stats v ON d.source=v.source AND d.repository_id=v.repository_id AND d.result_id=v.result_id AND d.date=v.date group by month order by month;");
st = connection.prepareStatement("SELECT `date`, ndv(distinct repository_id) AS repository, ndv(distinct result_id) AS items, sum(downloads) AS downloads, sum(views) AS views FROM "+usagestatsImpalaDB+".usage_stats GROUP BY `date` ORDER BY `date`;");
st = connection.prepareStatement("SELECT `date`, ndv(distinct repository_id) AS repository, ndv(distinct result_id) AS items, sum(downloads) AS downloads, sum(views) AS views FROM " + usagestatsImpalaDB + ".usage_stats GROUP BY `date` ORDER BY `date`;");
rs = st.executeQuery();
while (rs.next()) {
int year = Integer.parseInt(rs.getString(1).substring(0, 4));
@ -505,9 +510,9 @@ public class UsageStatsRepository {
st.close();
//st = connection.prepareStatement("SELECT COALESCE(SUBSTRING(d.date FROM 1 FOR 4), SUBSTRING(v.date FROM 1 FOR 4)) AS year, COUNT(DISTINCT d.repository_id) AS repository, COUNT(DISTINCT d.result_id) AS items, SUM(d.count) AS downloads, SUM(v.count) AS views FROM public.downloads_stats d FULL OUTER JOIN public.views_stats v ON d.source=v.source AND d.repository_id=v.repository_id AND d.result_id=v.result_id AND d.date=v.date GROUP BY year ORDER BY year;");
st = connection.prepareStatement("SELECT SUBSTR(`date`,1,4) AS year, ndv(DISTINCT repository_id) AS repository, \n" +
"ndv(DISTINCT result_id) AS items, SUM(downloads) AS downloads, SUM(views) AS views \n" +
"FROM "+usagestatsImpalaDB+".usage_stats GROUP BY year ORDER BY year;");
st = connection.prepareStatement("SELECT SUBSTR(`date`,1,4) AS year, ndv(DISTINCT repository_id) AS repository, \n"
+ "ndv(DISTINCT result_id) AS items, SUM(downloads) AS downloads, SUM(views) AS views \n"
+ "FROM " + usagestatsImpalaDB + ".usage_stats GROUP BY year ORDER BY year;");
rs = st.executeQuery();
List<YearlyStats> yearlyStatsList = new ArrayList<>();
while (rs.next()) {
@ -544,7 +549,7 @@ public class UsageStatsRepository {
PreparedStatement st = null;
Connection connection = null;
ResultSet rs = null;
log.info("database "+statsDB);
log.info("database " + statsDB);
try {
connection = usageStatsDB.getConnection();
String[] split = repositoryIdentifier.split(":");
@ -552,10 +557,10 @@ public class UsageStatsRepository {
switch (split[0].toLowerCase()) {
case "openaire":
if (!report.equals("jr1")) {
st = connection.prepareStatement("select id from "+statsDB+".datasource where id=?");
st = connection.prepareStatement("select id from " + statsDB + ".datasource where id=?");
st.setString(1, repositoryIdentifier.replaceFirst(split[0] + ":", ""));
} else {
st = connection.prepareStatement("select id from "+statsDB+".datasource where id=? AND (type='Journal' OR type='Journal Aggregator/Publisher')");
st = connection.prepareStatement("select id from " + statsDB + ".datasource where id=? AND (type='Journal' OR type='Journal Aggregator/Publisher')");
st.setString(1, repositoryIdentifier.replaceFirst(split[0] + ":", ""));
}
@ -567,10 +572,10 @@ public class UsageStatsRepository {
case "opendoar":
if (!report.equals("jr1")) {
st = connection.prepareStatement("select id from "+statsDB+".datasource_oids where oid=?");
st = connection.prepareStatement("select id from " + statsDB + ".datasource_oids where oid=?");
st.setString(1, "opendoar____::" + repositoryIdentifier.replaceFirst(split[0] + ":", ""));
} else {
st = connection.prepareStatement("select distinct d.id from "+statsDB+".datasource d, "+statsDB+".datasource_oids di where di.oid=? and d.id=di.id and (type='Journal' OR type='Journal Aggregator/Publisher')");
st = connection.prepareStatement("select distinct d.id from " + statsDB + ".datasource d, " + statsDB + ".datasource_oids di where di.oid=? and d.id=di.id and (type='Journal' OR type='Journal Aggregator/Publisher')");
st.setString(1, "opendoar____::" + repositoryIdentifier.replaceFirst(split[0] + ":", ""));
}
@ -580,7 +585,7 @@ public class UsageStatsRepository {
}
return openaire_id;
case "issn":
st = connection.prepareStatement("select distinct d.id from "+statsDB+".datasource d, "+statsDB+".datasource_oids di, "+statsDB+".datasource_results dr where d.id=dr.id and di.oid like ? and d.id=di.id and (type='Journal' OR type='Journal Aggregator/Publisher')");
st = connection.prepareStatement("select distinct d.id from " + statsDB + ".datasource d, " + statsDB + ".datasource_oids di, " + statsDB + ".datasource_results dr where d.id=dr.id and di.oid like ? and d.id=di.id and (type='Journal' OR type='Journal Aggregator/Publisher')");
st.setString(1, "%" + repositoryIdentifier.replaceFirst(split[0] + ":", "") + "%");
rs = st.executeQuery();
@ -601,9 +606,11 @@ public class UsageStatsRepository {
return "-1";
}
public void executeItem(List<ReportItem> reportItems, String itemIdentifier, String repositoryIdentifier, String itemDataType, Date beginDate, Date endDate, String granularity) {
public void executeItem(List<ReportItem> reportItems, String itemIdentifier,
String repositoryIdentifier, String itemDataType, Date beginDate,
Date endDate, String granularity) {
String[] split = itemIdentifier.split(":");
switch (split[0].toLowerCase()) {
switch (split[0].toLowerCase()) {
case "oid":
executeOid(reportItems, itemIdentifier.replaceFirst(split[0] + ":", ""), repositoryIdentifier, itemDataType, beginDate, endDate, granularity);
break;
@ -617,14 +624,16 @@ public class UsageStatsRepository {
}
}
private void executeOid(List<ReportItem> reportItems, String oid, String repositoryIdentifier, String itemDataType, Date beginDate, Date endDate, String granularity) {
private void executeOid(List<ReportItem> reportItems, String oid,
String repositoryIdentifier, String itemDataType, Date beginDate,
Date endDate, String granularity) {
Connection connection = null;
PreparedStatement st = null;
ResultSet rs = null;
try {
connection = usageStatsDB.getConnection();
//st = connection.prepareStatement("SELECT DISTINCT roid.id FROM public.result_oids roid, public.downloads_stats s WHERE s.result_id=roid.id AND roid.orid=? UNION SELECT DISTINCT roid.id FROM public.result_oids roid, public.views_stats s WHERE s.result_id=roid.id AND roid.orid=?");
st = connection.prepareStatement("SELECT DISTINCT roid.id FROM "+statsDB+".result_oids roid, "+usagestatsImpalaDB+".usage_stats us WHERE us.result_id=roid.id AND roid.oid=?");
st = connection.prepareStatement("SELECT DISTINCT roid.id FROM " + statsDB + ".result_oids roid, " + usagestatsImpalaDB + ".usage_stats us WHERE us.result_id=roid.id AND roid.oid=?");
st.setString(1, oid);
//st.setString(2, oid);
@ -643,14 +652,16 @@ public class UsageStatsRepository {
}
}
private void executeDoi(List<ReportItem> reportItems, String doi, String repositoryIdentifier, String itemDataType, Date beginDate, Date endDate, String granularity) {
private void executeDoi(List<ReportItem> reportItems, String doi,
String repositoryIdentifier, String itemDataType, Date beginDate,
Date endDate, String granularity) {
Connection connection = null;
PreparedStatement st = null;
ResultSet rs = null;
try {
connection = usageStatsDB.getConnection();
//st = connection.prepareStatement("SELECT DISTINCT poid.id FROM public.result_pids poid, public.downloads_stats s WHERE s.result_id=poid.id AND poid.type='doi' AND poid.pid=? UNION SELECT DISTINCT poid.id FROM public.result_pids poid, public.views_stats s WHERE s.result_id=poid.id AND poid.type='doi' AND poid.pid=?");
st = connection.prepareStatement("SELECT DISTINCT poid.id FROM "+statsDB+".result_pids poid, "+usageStatsDB+".usage_stats us WHERE us.result_id=poid.id AND poid.type='Digital Object Identifier' AND poid.pid=?");
st = connection.prepareStatement("SELECT DISTINCT poid.id FROM " + statsDB + ".result_pids poid, " + usageStatsDB + ".usage_stats us WHERE us.result_id=poid.id AND poid.type='Digital Object Identifier' AND poid.pid=?");
st.setString(1, doi);
//st.setString(2, doi);
@ -668,7 +679,9 @@ public class UsageStatsRepository {
}
}
private void executeOpenaire(List<ReportItem> reportItems, String openaire, String repositoryIdentifier, String itemDataType, Date beginDate, Date endDate, String granularity) {
private void executeOpenaire(List<ReportItem> reportItems, String openaire,
String repositoryIdentifier, String itemDataType, Date beginDate,
Date endDate, String granularity) {
SimpleDateFormat report_dateFormat = new SimpleDateFormat("yyyy-MM-dd");
SimpleDateFormat postgresFormat = new SimpleDateFormat("yyyy/MM");
String beginDateStr = postgresFormat.format(beginDate);
@ -693,15 +706,15 @@ public class UsageStatsRepository {
//st = connection.prepareStatement("SELECT res.repository_id, r.title, r.publisher, r.source, rc.type, pids.pid, d.name, res.ddate, oids.orid, res.downloads, res.views FROM (SELECT coalesce(ds.repository_id, vs.repository_id) AS repository_id, coalesce(ds.result_id, vs.result_id) AS result_id, coalesce(ds.date, vs.date) AS ddate, coalesce(ds.sum, 0) AS downloads, coalesce(vs.sum,0) AS views FROM (SELECT s.repository_id, s.result_id, s.date, sum(s.count) FROM public.downloads_stats s WHERE s.date>=? AND s.date<=? AND s.result_id=? GROUP BY s.repository_id, s.result_id, s.date) AS ds FULL OUTER JOIN (SELECT s.repository_id, s.result_id, s.date, sum(s.count) FROM public.views_stats s WHERE s.date>=? AND s.date<=? AND s.result_id=? GROUP BY s.repository_id, s.result_id, s.date) AS vs ON ds.result_id=vs.result_id AND ds.date=vs.date) AS res JOIN public.result r ON res.result_id=r.id JOIN public.datasource d ON d.id=res.repository_id JOIN public.result_classifications rc ON rc.id=r.id LEFT JOIN (SELECT pids.id, string_agg(pids.pid, '#!#') AS pid FROM public.result_pids pids WHERE pids.id=? AND type='doi' GROUP BY pids.id) AS pids ON pids.id=r.id LEFT JOIN (SELECT oids.id, string_agg(oids.orid, '#!#') AS orid FROM public.result_oids oids WHERE oids.id=? GROUP BY oids.id) AS oids ON oids.id=r.id ORDER BY res.repository_id, res.ddate;");
//st = connection.prepareStatement("SELECT res.repository_id, r.title, r.publisher, r.source, rc.type, pids.pid, d.name, res.date, oids.orid, res.downloads, res.views FROM (SELECT us.repository_id, us.result_id, us.date, us.downloads, us.views FROM usage_stats us WHERE us.date>=? AND us.date<=? AND us.result_id=?) AS res JOIN public.result r ON res.result_id=r.id JOIN public.datasource d ON d.id=res.repository_id JOIN public.result_classifications rc ON rc.id=r.id LEFT JOIN (SELECT pids.id, string_agg(pids.pid, '#!#') AS pid FROM public.result_pids pids WHERE pids.id=? AND type='doi' GROUP BY pids.id) AS pids ON pids.id=r.id LEFT JOIN (SELECT oids.id, string_agg(oids.orid, '#!#') AS orid FROM public.result_oids oids WHERE oids.id=? GROUP BY oids.id) AS oids ON oids.id=r.id ORDER BY res.repository_id, res.date;");
st = connection.prepareStatement("SELECT distinct res.repository_id, r.title, r.publisher, r.source, rc.type, pids.pid, d.name, res.`date`, oids.oid, res.downloads, res.views "
+ "FROM (SELECT us.repository_id, us.result_id, us.`date`, us.downloads, us.views FROM "+usagestatsImpalaDB+".usage_stats us "
+ "FROM (SELECT us.repository_id, us.result_id, us.`date`, us.downloads, us.views FROM " + usagestatsImpalaDB + ".usage_stats us "
+ "WHERE us.`date`>=? AND us.`date`<=? AND us.result_id=?) AS res "
+ "JOIN "+statsDB+".result r ON res.result_id=r.id "
+ "JOIN "+statsDB+".datasource d ON d.id=res.repository_id "
+ "JOIN (select id, group_concat(type,',') as type FROM "+statsDB+".result_classifications where id=? "
+ "JOIN " + statsDB + ".result r ON res.result_id=r.id "
+ "JOIN " + statsDB + ".datasource d ON d.id=res.repository_id "
+ "JOIN (select id, group_concat(type,',') as type FROM " + statsDB + ".result_classifications where id=? "
+ "GROUP by id) rc ON rc.id=r.id "
+ "LEFT JOIN (SELECT pids.id, group_concat(pids.pid, '#!#') AS pid FROM "+statsDB+".result_pids pids "
+ "LEFT JOIN (SELECT pids.id, group_concat(pids.pid, '#!#') AS pid FROM " + statsDB + ".result_pids pids "
+ "WHERE pids.id=? AND type='Digital Object Identifier' GROUP BY pids.id) AS pids ON pids.id=r.id "
+ "LEFT JOIN (SELECT id, group_concat(oids.oid, '#!#') AS oid FROM "+statsDB+".result_oids oids "
+ "LEFT JOIN (SELECT id, group_concat(oids.oid, '#!#') AS oid FROM " + statsDB + ".result_oids oids "
+ "WHERE oids.id=? GROUP BY oids.id) AS oids ON oids.id=r.id ORDER BY res.repository_id, res.`date`;");
st.setString(1, beginDateStr);
st.setString(2, endDateStr);
@ -715,16 +728,16 @@ public class UsageStatsRepository {
} else {
//st = connection.prepareStatement("SELECT res.repository_id, r.title, r.publisher, r.source, rc.type, pids.pid, d.name, res.ddate, oids.orid, res.downloads, res.views FROM (SELECT coalesce(ds.repository_id, vs.repository_id) AS repository_id, coalesce(ds.result_id, vs.result_id) AS result_id, coalesce(ds.date, vs.date) AS ddate, coalesce(ds.sum, 0) AS downloads, coalesce(vs.sum,0) AS views FROM (SELECT s.repository_id, s.result_id, s.date, sum(s.count) FROM public.downloads_stats s WHERE s.date>=? AND s.date<=? AND s.result_id=? GROUP BY s.repository_id, s.result_id, s.date) AS ds FULL OUTER JOIN (SELECT s.repository_id, s.result_id, s.date, sum(s.count) FROM public.views_stats s WHERE s.date>=? AND s.date<=? AND s.result_id=? GROUP BY s.repository_id, s.result_id, s.date) AS vs ON ds.result_id=vs.result_id AND ds.date=vs.date) AS res JOIN public.result r ON res.result_id=r.id JOIN public.datasource d ON d.id=res.repository_id JOIN public.result_classifications rc ON rc.id=r.id AND rc.type=? LEFT JOIN (SELECT pids.id, string_agg(pids.pid, '#!#') AS pid FROM public.result_pids pids WHERE pids.id=? AND type='doi' GROUP BY pids.id) AS pids ON pids.id=r.id LEFT JOIN (SELECT oids.id, string_agg(oids.orid, '#!#') AS orid FROM public.result_oids oids WHERE oids.id=? GROUP BY oids.id) AS oids ON oids.id=r.id ORDER BY res.repository_id, res.ddate;");
st = connection.prepareStatement("SELECT distinct res.repository_id, r.title, r.publisher, r.source, rc.type, pids.pid, d.name, res.`date`, oids.oid, res.downloads, "
+ "res.views FROM (SELECT us.repository_id, us.result_id, us.`date`, us.downloads, us.views FROM "+usagestatsImpalaDB+".usage_stats us "
+ "res.views FROM (SELECT us.repository_id, us.result_id, us.`date`, us.downloads, us.views FROM " + usagestatsImpalaDB + ".usage_stats us "
+ "WHERE us.`date`>=? AND us.`date`<=? AND us.result_id=?) AS res "
+ "JOIN "+statsDB+".result r ON res.result_id=r.id "
+ "JOIN "+statsDB+".datasource d ON d.id=res.repository_id "
+ "JOIN "+statsDB+".result_classifications rc ON rc.id=r.id AND rc.type=? "
+ "JOIN " + statsDB + ".result r ON res.result_id=r.id "
+ "JOIN " + statsDB + ".datasource d ON d.id=res.repository_id "
+ "JOIN " + statsDB + ".result_classifications rc ON rc.id=r.id AND rc.type=? "
+ "LEFT JOIN (SELECT pids.id, group_concat(pids.pid, '#!#') AS pid "
+ "FROM "+statsDB+".result_pids pids WHERE pids.id=? "
+ "FROM " + statsDB + ".result_pids pids WHERE pids.id=? "
+ "AND type='Digital Object Identifier' GROUP BY pids.id) AS pids ON pids.id=r.id "
+ "LEFT JOIN (SELECT oids.id, group_concat(oids.oid, '#!#') AS oid "
+ "FROM "+statsDB+".result_oids oids WHERE oids.id=? "
+ "FROM " + statsDB + ".result_oids oids WHERE oids.id=? "
+ "GROUP BY oids.id) AS oids ON oids.id=r.id ORDER BY res.repository_id, res.`date`;");
st.setString(1, beginDateStr);
st.setString(2, endDateStr);
@ -741,12 +754,12 @@ public class UsageStatsRepository {
//st = connection.prepareStatement("SELECT res.repository_id, r.title, r.publisher, r.source, rc.type, pids.pid, d.name, res.ddate, oids.orid, res.downloads, res.views FROM (SELECT coalesce(ds.repository_id, vs.repository_id) AS repository_id, coalesce(ds.result_id, vs.result_id) AS result_id, coalesce(ds.date, vs.date) AS ddate, coalesce(ds.sum, 0) AS downloads, coalesce(vs.sum,0) AS views FROM (SELECT s.repository_id, s.result_id, s.date, sum(s.count) FROM public.downloads_stats s WHERE s.date>=? AND s.date<=? AND s.result_id=? AND s.repository_id=? GROUP BY s.repository_id, s.result_id, s.date) AS ds FULL OUTER JOIN (SELECT s.repository_id, s.result_id, s.date, sum(s.count) FROM public.views_stats s WHERE s.date>=? AND s.date<=? AND s.result_id=? AND s.repository_id=? GROUP BY s.repository_id, s.result_id, s.date) AS vs ON ds.result_id=vs.result_id AND ds.date=vs.date) AS res JOIN public.result r ON res.result_id=r.id JOIN public.datasource d ON d.id=res.repository_id JOIN public.result_classifications rc ON rc.id=r.id LEFT JOIN (SELECT pids.id, string_agg(pids.pid, '#!#') AS pid FROM public.result_pids pids WHERE pids.id=? AND type='doi' GROUP BY pids.id) AS pids ON pids.id=r.id LEFT JOIN (SELECT oids.id, string_agg(oids.orid, '#!#') AS orid FROM public.result_oids oids WHERE oids.id=? GROUP BY oids.id) AS oids ON oids.id=r.id ORDER BY res.repository_id, res.ddate;");
st = connection.prepareStatement("SELECT distinct res.repository_id, r.title, r.publisher, r.source, rc.type, pids.pid, d.name, res.`date`, oids.oid, res.downloads, res.views "
+ "FROM (SELECT us.repository_id, us.result_id, us.`date`, us.downloads, us.views "
+ "FROM "+usagestatsImpalaDB+".usage_stats us WHERE us.`date`>=? AND us.`date`<=? AND us.result_id=? AND us.repository_id=?) AS res "
+ "JOIN "+statsDB+".result r ON res.result_id=r.id JOIN "+statsDB+".datasource d ON d.id=res.repository_id "
+ "JOIN (select id, group_concat(type,',') as type from "+statsDB+".result_classifications where id=? group by id) rc ON rc.id=r.id "
+ "LEFT JOIN (SELECT pids.id, group_concat(pids.pid, '#!#') AS pid FROM "+statsDB+".result_pids pids "
+ "FROM " + usagestatsImpalaDB + ".usage_stats us WHERE us.`date`>=? AND us.`date`<=? AND us.result_id=? AND us.repository_id=?) AS res "
+ "JOIN " + statsDB + ".result r ON res.result_id=r.id JOIN " + statsDB + ".datasource d ON d.id=res.repository_id "
+ "JOIN (select id, group_concat(type,',') as type from " + statsDB + ".result_classifications where id=? group by id) rc ON rc.id=r.id "
+ "LEFT JOIN (SELECT pids.id, group_concat(pids.pid, '#!#') AS pid FROM " + statsDB + ".result_pids pids "
+ "WHERE pids.id=? AND type='Digital Object Identifier' GROUP BY pids.id) AS pids ON pids.id=r.id "
+ "LEFT JOIN (SELECT oids.id, group_concat(oids.oid, '#!#') AS oid FROM "+statsDB+".result_oids oids "
+ "LEFT JOIN (SELECT oids.id, group_concat(oids.oid, '#!#') AS oid FROM " + statsDB + ".result_oids oids "
+ "WHERE oids.id=? GROUP BY oids.id) AS oids ON oids.id=r.id ORDER BY res.repository_id, res.`date`;");
st.setString(1, beginDateStr);
st.setString(2, endDateStr);
@ -762,14 +775,14 @@ public class UsageStatsRepository {
} else {
//st = connection.prepareStatement("SELECT res.repository_id, r.title, r.publisher, r.source, rc.type, pids.pid, d.name, res.ddate, oids.orid, res.downloads, res.views FROM (SELECT coalesce(ds.repository_id, vs.repository_id) AS repository_id, coalesce(ds.result_id, vs.result_id) AS result_id, coalesce(ds.date, vs.date) AS ddate, coalesce(ds.sum, 0) AS downloads, coalesce(vs.sum,0) AS views FROM (SELECT s.repository_id, s.result_id, s.date, sum(s.count) FROM public.downloads_stats s WHERE s.date>=? AND s.date<=? AND s.result_id=? AND s.repository_id=? GROUP BY s.repository_id, s.result_id, s.date) AS ds FULL OUTER JOIN (SELECT s.repository_id, s.result_id, s.date, sum(s.count) FROM public.views_stats s WHERE s.date>=? AND s.date<=? AND s.result_id=? AND s.repository_id=? GROUP BY s.repository_id, s.result_id, s.date) AS vs ON ds.result_id=vs.result_id AND ds.date=vs.date) AS res JOIN public.result r ON res.result_id=r.id JOIN public.datasource d ON d.id=res.repository_id JOIN public.result_classifications rc ON rc.id=r.id AND rc.type=? LEFT JOIN (SELECT pids.id, string_agg(pids.pid, '#!#') AS pid FROM public.result_pids pids WHERE pids.id=? AND type='doi' GROUP BY pids.id) AS pids ON pids.id=r.id LEFT JOIN (SELECT oids.id, string_agg(oids.orid, '#!#') AS orid FROM public.result_oids oids WHERE oids.id=? GROUP BY oids.id) AS oids ON oids.id=r.id ORDER BY res.repository_id, res.ddate;");
st = connection.prepareStatement("SELECT distinct res.repository_id, r.title, r.publisher, r.source, rc.type, pids.pid, d.name, res.`date`, oids.oid, res.downloads,res.views "
+ "FROM (SELECT us.repository_id, us.result_id, us.`date`, us.downloads, us.views FROM "+usagestatsImpalaDB+".usage_stats us "
+ "FROM (SELECT us.repository_id, us.result_id, us.`date`, us.downloads, us.views FROM " + usagestatsImpalaDB + ".usage_stats us "
+ "WHERE us.`date`>=? AND us.`date`<=? AND us.result_id=? AND us.repository_id=?) AS res "
+ "JOIN "+statsDB+".result r ON res.result_id=r.id JOIN "+statsDB+".datasource d ON d.id=res.repository_id "
+ "JOIN "+statsDB+".result_classifications rc ON rc.id=r.id AND rc.type=?' "
+ "LEFT JOIN (SELECT pids.id, group_concat(pids.pid, '#!#') AS pid FROM "+statsDB+".result_pids pids "
+ "JOIN " + statsDB + ".result r ON res.result_id=r.id JOIN " + statsDB + ".datasource d ON d.id=res.repository_id "
+ "JOIN " + statsDB + ".result_classifications rc ON rc.id=r.id AND rc.type=?' "
+ "LEFT JOIN (SELECT pids.id, group_concat(pids.pid, '#!#') AS pid FROM " + statsDB + ".result_pids pids "
+ "WHERE pids.id=? AND type='Digital Object Identifier' GROUP BY pids.id) AS pids ON pids.id=r.id "
+ "LEFT JOIN (SELECT oids.id, group_concat(oids.oid, '#!#') AS oid "
+ "FROM "+statsDB+".result_oids oids WHERE oids.id=? "
+ "FROM " + statsDB + ".result_oids oids WHERE oids.id=? "
+ "GROUP BY oids.id) AS oids ON oids.id=r.id ORDER BY res.repository_id, res.`date`;");
st.setString(1, beginDateStr);
st.setString(2, endDateStr);
@ -800,7 +813,7 @@ public class UsageStatsRepository {
reportItems.add(reportItem);
}
repository = rs.getString(1);
reportItem = new ReportItem(rs.getString(3), rs.getString(7), rs.getString(5), rs.getString(2));
reportItem = new ReportItem(rs.getString(3), rs.getString(7), rs.getString(5), rs.getString(2), "");
reportItem.addIdentifier(new ItemIdentifier("OpenAIRE", openaire));
reportItem.addIdentifier(new ItemIdentifier("URLs", rs.getString(4)));
if (rs.getString(9) != null && !rs.getString(9).equals("")) {
@ -840,7 +853,7 @@ public class UsageStatsRepository {
}
repository = rs.getString(1);
lastDate = beginDateStr;
reportItem = new ReportItem(rs.getString(3), rs.getString(7), rs.getString(5), rs.getString(2));
reportItem = new ReportItem(rs.getString(3), rs.getString(7), rs.getString(5), rs.getString(2), "");
reportItem.addIdentifier(new ItemIdentifier("OpenAIRE", openaire));
reportItem.addIdentifier(new ItemIdentifier("URLs", rs.getString(4)));
if (rs.getString(9) != null && !rs.getString(9).equals("")) {
@ -890,7 +903,9 @@ public class UsageStatsRepository {
}
}
public void executeRepo(List<ReportItem> reportItems, String repositoryIdentifier, String itemDataType, Date beginDate, Date endDate, String granularity) {
public void executeRepo(List<ReportItem> reportItems,
String repositoryIdentifier, String itemDataType, Date beginDate,
Date endDate, String granularity) {
SimpleDateFormat report_dateFormat = new SimpleDateFormat("yyyy-MM-dd");
SimpleDateFormat postgresFormat = new SimpleDateFormat("yyyy/MM");
String beginDateStr = postgresFormat.format(beginDate);
@ -908,9 +923,9 @@ public class UsageStatsRepository {
//st = connection.prepareStatement("SELECT d.id, d.name, d.websiteurl, dois.orid, res.ddate, res.downloads, res.views FROM (SELECT coalesce(ds.source, vs.source), coalesce(ds.repository_id, vs.repository_id) AS repository_id, coalesce(ds.date, vs.date) AS ddate, coalesce(ds.sum, 0) AS downloads, coalesce(vs.sum,0) AS views FROM (SELECT s.source, s.repository_id, s.date, sum(s.count) FROM public.downloads_stats s WHERE s.date>=? AND s.date<=? GROUP BY s.source, s.repository_id, s.date) AS ds FULL OUTER JOIN (SELECT s.source, s.repository_id, s.date, sum(s.count) FROM public.views_stats s WHERE s.date>=? AND s.date<=? GROUP BY s.source, s.repository_id, s.date) AS vs ON ds.source=vs.source AND ds.repository_id=vs.repository_id AND ds.date=vs.date) AS res JOIN public.datasource d ON d.id=res.repository_id JOIN public.datasource_oids dois ON d.id=dois.id WHERE dois.orid LIKE 'opendoar%' ORDER BY d.id, d.name, res.ddate ASC;");
st = connection.prepareStatement("SELECT d.id, d.name, d.websiteurl, dois.oid, res.`date`, res.downloads, res.views "
+ "FROM (SELECT us.source, us.repository_id, us.`date`, sum(us.downloads) AS downloads, sum(us.views) AS views "
+ "FROM "+usagestatsImpalaDB+".usage_stats us WHERE us.`date`>=? AND us.`date`<=? GROUP BY us.source, us.repository_id, us.`date`) "
+ "AS res JOIN "+statsDB+".datasource d ON d.id=res.repository_id "
+ "JOIN "+statsDB+".datasource_oids dois ON d.id=dois.id WHERE dois.oid "
+ "FROM " + usagestatsImpalaDB + ".usage_stats us WHERE us.`date`>=? AND us.`date`<=? GROUP BY us.source, us.repository_id, us.`date`) "
+ "AS res JOIN " + statsDB + ".datasource d ON d.id=res.repository_id "
+ "JOIN " + statsDB + ".datasource_oids dois ON d.id=dois.id WHERE dois.oid "
+ "LIKE 'opendoar%' ORDER BY d.id, d.name, res.`date` ASC;");
st.setString(1, beginDateStr);
st.setString(2, endDateStr);
@ -920,11 +935,11 @@ public class UsageStatsRepository {
//st = connection.prepareStatement("SELECT d.id, d.name, d.websiteurl, dois.orid, res.ddate, res.downloads, res.views FROM (SELECT coalesce(ds.source, vs.source), coalesce(ds.repository_id, vs.repository_id) AS repository_id, coalesce(ds.date, vs.date) AS ddate, coalesce(ds.sum, 0) AS downloads, coalesce(vs.sum,0) AS views FROM (SELECT s.source, s.repository_id, s.date, sum(s.count) FROM public.downloads_stats s, result_classifications rc WHERE rc.id=s.result_id AND s.date>=? AND s.date<=? AND rc.type=? GROUP BY s.source, s.repository_id, s.date) AS ds FULL OUTER JOIN (SELECT s.source, s.repository_id, s.date, sum(s.count) FROM public.views_stats s, public.result_classifications rc WHERE rc.id=s.result_id AND s.date>=? AND s.date<=? AND rc.type=? GROUP BY s.source, s.repository_id, s.date) AS vs ON ds.source=vs.source AND ds.repository_id=vs.repository_id AND ds.date=vs.date) AS res JOIN public.datasource d ON d.id=res.repository_id JOIN public.datasource_oids dois ON d.id=dois.id WHERE dois.orid LIKE 'opendoar%' ORDER BY d.id, d.name, res.ddate ASC;");
st = connection.prepareStatement("SELECT d.id, d.name, d.websiteurl, dois.oid, res.`date`, res.downloads, res.views "
+ "FROM (SELECT us.source, us.repository_id, us.`date`, sum(us.downloads) AS downloads, sum(us.views) AS views "
+ "FROM "+usagestatsImpalaDB+".usage_stats us, "+statsDB+".result_classifications rc "
+ "FROM " + usagestatsImpalaDB + ".usage_stats us, " + statsDB + ".result_classifications rc "
+ "WHERE rc.id=us.result_id AND us.`date`>=? AND us.`date`<=? AND rc.type=? "
+ "GROUP BY us.source, us.repository_id, us.`date`) AS res "
+ "JOIN "+statsDB+".datasource d ON d.id=res.repository_id "
+ "JOIN "+statsDB+".datasource_oids dois ON d.id=dois.id WHERE dois.oid LIKE 'opendoar%' "
+ "JOIN " + statsDB + ".datasource d ON d.id=res.repository_id "
+ "JOIN " + statsDB + ".datasource_oids dois ON d.id=dois.id WHERE dois.oid LIKE 'opendoar%' "
+ "ORDER BY d.id, d.name, res.`date` ASC;");
st.setString(1, beginDateStr);
st.setString(2, endDateStr);
@ -938,10 +953,10 @@ public class UsageStatsRepository {
//st = connection.prepareStatement("SELECT d.id, d.name, d.websiteurl, dois.orid, res.ddate, res.downloads, res.views FROM (SELECT coalesce(ds.source, vs.source), coalesce(ds.repository_id, vs.repository_id) AS repository_id, coalesce(ds.date, vs.date) AS ddate, coalesce(ds.sum, 0) AS downloads, coalesce(vs.sum,0) AS views FROM (SELECT s.source, s.repository_id, s.date, sum(s.count) FROM public.downloads_stats s WHERE s.date>=? AND s.date<=? AND s.repository_id=? GROUP BY s.source, s.repository_id, s.date) AS ds FULL OUTER JOIN (SELECT s.source, s.repository_id, s.date, sum(s.count) FROM public.views_stats s WHERE s.date>=? AND s.date<=? AND s.repository_id=? GROUP BY s.source, s.repository_id, s.date) AS vs ON ds.source=vs.source AND ds.repository_id=vs.repository_id AND ds.date=vs.date) AS res JOIN public.datasource d ON d.id=res.repository_id JOIN public.datasource_oids dois ON d.id=dois.id WHERE dois.orid LIKE 'opendoar%' ORDER BY d.id, d.name, res.ddate ASC;");
st = connection.prepareStatement("SELECT d.id, d.name, d.websiteurl, dois.oid, res.`date`, res.downloads, res.views "
+ "FROM (SELECT us.source, us.repository_id, us.`date`, sum(us.downloads) AS downloads, sum(us.views) AS views "
+ "FROM "+usagestatsImpalaDB+".usage_stats us WHERE us.`date`>=? AND us.`date`<=? AND us.repository_id=? "
+ "FROM " + usagestatsImpalaDB + ".usage_stats us WHERE us.`date`>=? AND us.`date`<=? AND us.repository_id=? "
+ "GROUP BY us.source, us.repository_id, us.`date`) AS res "
+ "JOIN "+statsDB+".datasource d ON d.id=res.repository_id "
+ "JOIN "+statsDB+".datasource_oids dois ON d.id=dois.id "
+ "JOIN " + statsDB + ".datasource d ON d.id=res.repository_id "
+ "JOIN " + statsDB + ".datasource_oids dois ON d.id=dois.id "
+ "WHERE dois.oid LIKE 'opendoar%' ORDER BY d.id, d.name, res.`date` ASC;");
st.setString(1, beginDateStr);
st.setString(2, endDateStr);
@ -953,11 +968,11 @@ public class UsageStatsRepository {
//st = connection.prepareStatement("SELECT d.id, d.name, d.websiteurl, dois.orid, res.ddate, res.downloads, res.views FROM (SELECT coalesce(ds.source, vs.source), coalesce(ds.repository_id, vs.repository_id) AS repository_id, coalesce(ds.date, vs.date) AS ddate, coalesce(ds.sum, 0) AS downloads, coalesce(vs.sum,0) AS views FROM (SELECT s.source, s.repository_id, s.date, sum(s.count) FROM public.downloads_stats s, public.result_classifications rc WHERE rc.id=s.result_id AND s.date>=? AND s.date<=? AND rc.type=? AND s.repository_id=? GROUP BY s.source, s.repository_id, s.date) AS ds FULL OUTER JOIN (SELECT s.source, s.repository_id, s.date, sum(s.count) FROM public.views_stats s, public.result_classifications rc WHERE rc.id=s.result_id AND s.date>=? AND s.date<=? AND rc.type=? AND s.repository_id=? GROUP BY s.source, s.repository_id, s.date) AS vs ON ds.source=vs.source AND ds.repository_id=vs.repository_id AND ds.date=vs.date) AS res JOIN public.datasource d ON d.id=res.repository_id JOIN public.datasource_oids dois ON d.id=dois.id WHERE dois.orid LIKE 'opendoar%' ORDER BY d.id, d.name, res.ddate ASC;");
st = connection.prepareStatement("SELECT d.id, d.name, d.websiteurl, dois.oid, res.`date`, res.downloads, res.views "
+ "FROM (SELECT us.source, us.repository_id, us.`date`, sum(us.downloads) AS downloads, sum(us.views) AS views "
+ "FROM "+usagestatsImpalaDB+".usage_stats us, "+statsDB+".result_classifications rc "
+ "FROM " + usagestatsImpalaDB + ".usage_stats us, " + statsDB + ".result_classifications rc "
+ "WHERE rc.id=us.result_id AND us.`date`>=? AND us.`date`<=? AND rc.type=? "
+ "AND us.repository_id=? GROUP BY us.source, us.repository_id, us.`date`) AS res "
+ "JOIN "+statsDB+".datasource d ON d.id=res.repository_id "
+ "JOIN "+statsDB+".datasource_oids dois ON d.id=dois.id "
+ "JOIN " + statsDB + ".datasource d ON d.id=res.repository_id "
+ "JOIN " + statsDB + ".datasource_oids dois ON d.id=dois.id "
+ "WHERE dois.oid LIKE 'opendoar%' ORDER BY d.id, d.name, res.`date` ASC;");
st.setString(1, beginDateStr);
st.setString(2, endDateStr);
@ -1002,7 +1017,7 @@ public class UsageStatsRepository {
reportItems.add(reportItem);
}
repository = rs.getString(1);
reportItem = new ReportItem(null, rs.getString(2), "Platform", null);
reportItem = new ReportItem(null, rs.getString(2), "Platform", null, "");
reportItem.addIdentifier(new ItemIdentifier("OpenAIRE", rs.getString(1)));
reportItem.addIdentifier(new ItemIdentifier("OpenDOAR", rs.getString(4).substring(rs.getString(4).lastIndexOf(":") + 1)));
reportItem.addIdentifier(new ItemIdentifier("URL", rs.getString(3)));
@ -1029,7 +1044,7 @@ public class UsageStatsRepository {
}
repository = rs.getString(1);
lastDate = beginDateStr;
reportItem = new ReportItem(null, rs.getString(2), "Platform", null);
reportItem = new ReportItem(null, rs.getString(2), "Platform", null, "");
reportItem.addIdentifier(new ItemIdentifier("OpenAIRE", rs.getString(1)));
reportItem.addIdentifier(new ItemIdentifier("OpenDOAR", rs.getString(4).substring(rs.getString(4).lastIndexOf(":") + 1)));
reportItem.addIdentifier(new ItemIdentifier("URL", rs.getString(3)));
@ -1069,7 +1084,9 @@ public class UsageStatsRepository {
}
}
public void executeJournal(List<ReportItem> reportItems, String repositoryIdentifier, String itemDataType, Date beginDate, Date endDate, String granularity) {
public void executeJournal(List<ReportItem> reportItems,
String repositoryIdentifier, String itemDataType, Date beginDate,
Date endDate, String granularity) {
SimpleDateFormat report_dateFormat = new SimpleDateFormat("yyyy-MM-dd");
SimpleDateFormat postgresFormat = new SimpleDateFormat("yyyy/MM");
String beginDateStr = postgresFormat.format(beginDate);
@ -1087,10 +1104,10 @@ public class UsageStatsRepository {
//st = connection.prepareStatement("SELECT d.id, d.name, d.websiteurl, dois.orid, res.ddate, res.downloads, res.views FROM (SELECT coalesce(ds.source, vs.source), coalesce(ds.repository_id, vs.repository_id) AS repository_id, coalesce(ds.date, vs.date) AS ddate, coalesce(ds.sum, 0) AS downloads, coalesce(vs.sum,0) AS views FROM (SELECT s.source, s.repository_id, s.date, sum(s.count) FROM public.downloads_stats s WHERE s.date>=? AND s.date<=? GROUP BY s.source, s.repository_id, s.date) AS ds FULL OUTER JOIN (SELECT s.source, s.repository_id, s.date, sum(s.count) FROM public.views_stats s WHERE s.date>=? AND s.date<=? GROUP BY s.source, s.repository_id, s.date) AS vs ON ds.source=vs.source AND ds.repository_id=vs.repository_id AND ds.date=vs.date) AS res JOIN public.datasource d ON d.id=res.repository_id JOIN public.datasource_oids dois ON d.id=dois.id WHERE (d.type='Journal' OR d.type='Journal Aggregator/Publisher') ORDER BY d.id, d.name, res.ddate ASC;");
st = connection.prepareStatement("SELECT d.id, d.name, d.websiteurl, dois.oid, res.`date`, res.downloads, res.views "
+ "FROM (SELECT us.source, us.repository_id, us.`date`, sum(us.downloads) as downloads, sum(us.views) as views "
+ "FROM "+usagestatsImpalaDB+".usage_stats us WHERE us.`date`>=? AND us.`date`<=? GROUP BY us.source, us.repository_id, us.`date`) AS res "
+ "JOIN "+statsDB+".datasource d ON d.id=res.repository_id "
+ "JOIN "+statsDB+".datasource_oids dois ON d.id=dois.id "
+ "WHERE (d.type='Journal' OR d.type='Journal Aggregator/Publisher') ORDER BY d.id, d.name, res.`date` ASC;");
+ "FROM " + usagestatsImpalaDB + ".usage_stats us WHERE us.`date`>=? AND us.`date`<=? GROUP BY us.source, us.repository_id, us.`date`) AS res "
+ "JOIN " + statsDB + ".datasource d ON d.id=res.repository_id "
+ "JOIN " + statsDB + ".datasource_oids dois ON d.id=dois.id "
+ "WHERE (d.type='Journal' OR d.type='Journal Aggregator/Publisher') ORDER BY d.id, d.name, res.`date` ASC;");
st.setString(1, beginDateStr);
st.setString(2, endDateStr);
//st.setString(3, beginDateStr);
@ -1099,11 +1116,11 @@ public class UsageStatsRepository {
//st = connection.prepareStatement("SELECT d.id, d.name, d.websiteurl, dois.orid, res.ddate, res.downloads, res.views FROM (SELECT coalesce(ds.source, vs.source), coalesce(ds.repository_id, vs.repository_id) AS repository_id, coalesce(ds.date, vs.date) AS ddate, coalesce(ds.sum, 0) AS downloads, coalesce(vs.sum,0) AS views FROM (SELECT s.source, s.repository_id, s.date, sum(s.count) FROM public.downloads_stats s, public.result_classifications rc WHERE rc.id=s.result_id AND s.date>=? AND s.date<=? AND rc.type=? GROUP BY s.source, s.repository_id, s.date) AS ds FULL OUTER JOIN (SELECT s.source, s.repository_id, s.date, sum(s.count) FROM public.views_stats s, public.result_classifications rc WHERE rc.id=s.result_id AND s.date>=? AND s.date<=? AND rc.type=? GROUP BY s.source, s.repository_id, s.date) AS vs ON ds.source=vs.source AND ds.repository_id=vs.repository_id AND ds.date=vs.date) AS res JOIN public.datasource d ON d.id=res.repository_id JOIN public.datasource_oids dois ON d.id=dois.id WHERE (d.type='Journal' OR d.type='Journal Aggregator/Publisher') ORDER BY d.id, d.name, res.ddate ASC;");
st = connection.prepareStatement("SELECT d.id, d.name, d.websiteurl, dois.oid, res.`date`, res.downloads, res.views "
+ "FROM (SELECT us.source, us.repository_id, us.`date`, sum(us.downloads) as downloads, sum(us.views) as views "
+ "FROM "+usagestatsImpalaDB+".usage_stats us, "+statsDB+".result_classifications rc "
+ "FROM " + usagestatsImpalaDB + ".usage_stats us, " + statsDB + ".result_classifications rc "
+ "WHERE rc.id=us.result_id AND us.`date`>=? AND us.`date`<=? AND rc.type=? "
+ "GROUP BY us.source, us.repository_id, us.`date`) AS res "
+ "JOIN "+statsDB+".datasource d ON d.id=res.repository_id "
+ "JOIN "+statsDB+".datasource_oids dois ON d.id=dois.id "
+ "JOIN " + statsDB + ".datasource d ON d.id=res.repository_id "
+ "JOIN " + statsDB + ".datasource_oids dois ON d.id=dois.id "
+ "WHERE (d.type='Journal' OR d.type='Journal Aggregator/Publisher') ORDER BY d.id, d.name, res.`date` ASC;");
st.setString(1, beginDateStr);
st.setString(2, endDateStr);
@ -1117,9 +1134,9 @@ public class UsageStatsRepository {
//st = connection.prepareStatement("SELECT d.id, d.name, d.websiteurl, dois.orid, res.ddate, res.downloads, res.views FROM (SELECT coalesce(ds.source, vs.source), coalesce(ds.repository_id, vs.repository_id) AS repository_id, coalesce(ds.date, vs.date) AS ddate, coalesce(ds.sum, 0) AS downloads, coalesce(vs.sum,0) AS views FROM (SELECT s.source, s.repository_id, s.date, sum(s.count) FROM public.downloads_stats s WHERE s.date>=? AND s.date<=? AND s.repository_id=? GROUP BY s.source, s.repository_id, s.date) AS ds FULL OUTER JOIN (SELECT s.source, s.repository_id, s.date, sum(s.count) FROM public.views_stats s WHERE s.date>=? AND s.date<=? AND s.repository_id=? GROUP BY s.source, s.repository_id, s.date) AS vs ON ds.source=vs.source AND ds.repository_id=vs.repository_id AND ds.date=vs.date) AS res JOIN public.datasource d ON d.id=res.repository_id JOIN public.datasource_oids dois ON d.id=dois.id WHERE (d.type='Journal' OR d.type='Journal Aggregator/Publisher') ORDER BY d.id, d.name, res.ddate ASC;");
st = connection.prepareStatement("SELECT d.id, d.name, d.websiteurl, dois.oid, res.`date`, res.downloads, res.views "
+ "FROM (SELECT us.source, us.repository_id, us.`date`, sum(us.downloads) as downloads, sum(us.views) as views "
+ "FROM "+usagestatsImpalaDB+".usage_stats us WHERE us.`date`>=? AND us.`date`<=? AND us.repository_id=? "
+ "GROUP BY us.source, us.repository_id, us.`date`) AS res JOIN "+statsDB+".datasource d ON d.id=res.repository_id "
+ "JOIN "+statsDB+".datasource_oids dois ON d.id=dois.id "
+ "FROM " + usagestatsImpalaDB + ".usage_stats us WHERE us.`date`>=? AND us.`date`<=? AND us.repository_id=? "
+ "GROUP BY us.source, us.repository_id, us.`date`) AS res JOIN " + statsDB + ".datasource d ON d.id=res.repository_id "
+ "JOIN " + statsDB + ".datasource_oids dois ON d.id=dois.id "
+ "WHERE (d.type='Journal' OR d.type='Journal Aggregator/Publisher') ORDER BY d.id, d.name, res.`date` ASC;");
st.setString(1, beginDateStr);
st.setString(2, endDateStr);
@ -1131,11 +1148,11 @@ public class UsageStatsRepository {
//st = connection.prepareStatement("SELECT d.id, d.name, d.websiteurl, dois.orid, res.ddate, res.downloads, res.views FROM (SELECT coalesce(ds.source, vs.source), coalesce(ds.repository_id, vs.repository_id) AS repository_id, coalesce(ds.date, vs.date) AS ddate, coalesce(ds.sum, 0) AS downloads, coalesce(vs.sum,0) AS views FROM (SELECT s.source, s.repository_id, s.date, sum(s.count) FROM public.downloads_stats s, public.result_classifications rc WHERE rc.id=s.result_id AND s.date>=? AND s.date<=? AND rc.type=? AND s.repository_id=? GROUP BY s.source, s.repository_id, s.date) AS ds FULL OUTER JOIN (SELECT s.source, s.repository_id, s.date, sum(s.count) FROM public.views_stats s, public.result_classifications rc WHERE rc.id=s.result_id AND s.date>=? AND s.date<=? AND rc.type=? AND s.repository_id=? GROUP BY s.source, s.repository_id, s.date) AS vs ON ds.source=vs.source AND ds.repository_id=vs.repository_id AND ds.date=vs.date) AS res JOIN public.datasource d ON d.id=res.repository_id JOIN public.datasource_oids dois ON d.id=dois.id WHERE (d.type='Journal' OR d.type='Journal Aggregator/Publisher') ORDER BY d.id, d.name, res.ddate ASC;");
st = connection.prepareStatement("SELECT d.id, d.name, d.websiteurl, dois.oid, res.`date`, res.downloads, res.views "
+ "FROM (SELECT us.source, us.repository_id, us.`date`, sum(us.downloads) as downloads, sum(us.views) as views "
+ "FROM "+usagestatsImpalaDB+".usage_stats us, "+statsDB+".result_classifications rc WHERE rc.id=us.result_id "
+ "FROM " + usagestatsImpalaDB + ".usage_stats us, " + statsDB + ".result_classifications rc WHERE rc.id=us.result_id "
+ "AND us.`date`>=?' AND us.`date`<=? AND rc.type=? AND us.repository_id=? "
+ "GROUP BY us.source, us.repository_id, us.`date`) AS res "
+ "JOIN "+statsDB+".datasource d ON d.id=res.repository_id "
+ "JOIN "+statsDB+".datasource_oids dois ON d.id=dois.id "
+ "JOIN " + statsDB + ".datasource d ON d.id=res.repository_id "
+ "JOIN " + statsDB + ".datasource_oids dois ON d.id=dois.id "
+ "WHERE (d.type='Journal' OR d.type='Journal Aggregator/Publisher') ORDER BY d.id, d.name, res.`date` ASC;");
st.setString(1, beginDateStr);
st.setString(2, endDateStr);
@ -1180,7 +1197,7 @@ public class UsageStatsRepository {
reportItems.add(reportItem);
}
repository = rs.getString(1);
reportItem = new ReportItem(null, rs.getString(2), "Platform", null);
reportItem = new ReportItem(null, rs.getString(2), "Platform", null, "");
reportItem.addIdentifier(new ItemIdentifier("OpenAIRE", rs.getString(1)));
reportItem.addIdentifier(new ItemIdentifier("ISSN", rs.getString(4).substring(rs.getString(4).lastIndexOf(":") + 1)));
if (rs.getString(3) != null) {
@ -1209,7 +1226,7 @@ public class UsageStatsRepository {
}
repository = rs.getString(1);
lastDate = beginDateStr;
reportItem = new ReportItem(null, rs.getString(2), "Platform", null);
reportItem = new ReportItem(null, rs.getString(2), "Platform", null, "");
reportItem.addIdentifier(new ItemIdentifier("OpenAIRE", rs.getString(1)));
reportItem.addIdentifier(new ItemIdentifier("ISSN", rs.getString(4).substring(rs.getString(4).lastIndexOf(":") + 1)));
if (rs.getString(3) != null) {
@ -1251,7 +1268,9 @@ public class UsageStatsRepository {
}
}
public void executeBatchItems(List<ReportItem> reportItems, String repositoryIdentifier, String itemDataType, Date beginDate, Date endDate, String granularity) {
public void executeBatchItems(List<ReportItem> reportItems,
String repositoryIdentifier, String itemDataType, Date beginDate,
Date endDate, String granularity) {
SimpleDateFormat report_dateFormat = new SimpleDateFormat("yyyy-MM-dd");
SimpleDateFormat postgresFormat = new SimpleDateFormat("yyyy/MM");
String beginDateStr = postgresFormat.format(beginDate);
@ -1268,17 +1287,17 @@ public class UsageStatsRepository {
//st = connection.prepareStatement("SELECT res.result_id, r.title, r.publisher, r.source, rc.type, pids.pid, d.name, res.ddate, oids.orid, res.downloads, res.views FROM (SELECT coalesce(ds.repository_id, vs.repository_id) AS repository_id, coalesce(ds.result_id, vs.result_id) AS result_id, coalesce(ds.date, vs.date) AS ddate, coalesce(ds.sum, 0) AS downloads, coalesce(vs.sum,0) AS views FROM (SELECT s.repository_id, s.result_id, s.date, sum(s.count) FROM public.downloads_stats s WHERE s.date>=? AND s.date<=? AND s.repository_id=? GROUP BY s.repository_id, s.result_id, s.date) AS ds FULL OUTER JOIN (SELECT s.repository_id, s.result_id, s.date, sum(s.count) FROM public.views_stats s WHERE s.date>=? AND s.date<=? AND s.repository_id=? GROUP BY s.repository_id, s.result_id, s.date) AS vs ON ds.result_id=vs.result_id AND ds.date=vs.date) AS res JOIN public.result r ON res.result_id=r.id JOIN public.datasource d ON d.id=res.repository_id JOIN public.result_classifications rc ON rc.id=r.id LEFT JOIN (SELECT pids.id, string_agg(pids.pid, '#!#') AS pid FROM public.result_pids pids, public.result_datasources rd WHERE rd.id=pids.id AND type='doi' AND rd.datasource=? GROUP BY pids.id) AS pids ON pids.id=r.id LEFT JOIN (SELECT oids.id, string_agg(oids.orid, '#!#') AS orid FROM public.result_oids oids, public.result_datasources rd WHERE rd.id=oids.id AND rd.datasource=? GROUP BY oids.id) AS oids ON oids.id=r.id ORDER BY res.result_id, res.ddate;");
//st = connection.prepareStatement("SELECT res.result_id, r.title, r.publisher, r.source, rc.type, pids.pid, d.name, res.date, oids.orid, res.downloads, res.views FROM (SELECT us.repository_id, us.result_id, us.date, sum(us.downloads) as downloads, sum(us.views) as views FROM public.usage_stats us WHERE us.date>=? AND us.date<=? AND us.repository_id=? GROUP BY us.repository_id, us.result_id, us.date) AS res JOIN public.result r ON res.result_id=r.id JOIN public.datasource d ON d.id=res.repository_id JOIN public.result_classifications rc ON rc.id=r.id LEFT JOIN (SELECT pids.id, string_agg(pids.pid, '#!#') AS pid FROM public.result_pids pids, public.result_datasources rd WHERE rd.id=pids.id AND type='doi' AND rd.datasource=? GROUP BY pids.id) AS pids ON pids.id=r.id LEFT JOIN (SELECT oids.id, string_agg(oids.orid, '#!#') AS orid FROM public.result_oids oids, public.result_datasources rd WHERE rd.id=oids.id AND rd.datasource=? GROUP BY oids.id) AS oids ON oids.id=r.id ORDER BY res.result_id, res.date;");
st = connection.prepareStatement("SELECT distinct res.result_id, r.title, r.publisher, r.source, rc.type, pids.pid, d.name, res.`date`, oids.oid, res.downloads, res.views "
+ "FROM (SELECT us.repository_id, us.result_id, us.`date`, sum(us.downloads) as downloads, sum(us.views) as views FROM "+usagestatsImpalaDB+".usage_stats us "
+ "FROM (SELECT us.repository_id, us.result_id, us.`date`, sum(us.downloads) as downloads, sum(us.views) as views FROM " + usagestatsImpalaDB + ".usage_stats us "
+ "WHERE us.`date`>=? AND us.`date`<=? AND us.repository_id=? "
+ "GROUP BY us.repository_id, us.result_id, us.`date`) AS res JOIN "+statsDB+".result r ON res.result_id=r.id "
+ "JOIN "+statsDB+".datasource d ON d.id=res.repository_id "
+ "JOIN "+statsDB+".result_classifications rc ON rc.id=r.id "
+ "GROUP BY us.repository_id, us.result_id, us.`date`) AS res JOIN " + statsDB + ".result r ON res.result_id=r.id "
+ "JOIN " + statsDB + ".datasource d ON d.id=res.repository_id "
+ "JOIN " + statsDB + ".result_classifications rc ON rc.id=r.id "
+ "LEFT JOIN (SELECT pids.id, group_concat(pids.pid, '#!#') AS pid "
+ "FROM "+statsDB+".result_pids pids, "+statsDB+".result_datasources rd "
+ "FROM " + statsDB + ".result_pids pids, " + statsDB + ".result_datasources rd "
+ "WHERE rd.id=pids.id AND type='Digital Object Identifier' AND rd.datasource=? "
+ "GROUP BY pids.id) AS pids ON pids.id=r.id "
+ "LEFT JOIN (SELECT oids.id, group_concat(oids.oid, '#!#') AS oid "
+ "FROM "+statsDB+".result_oids oids, "+statsDB+".result_datasources rd "
+ "FROM " + statsDB + ".result_oids oids, " + statsDB + ".result_datasources rd "
+ "WHERE rd.id=oids.id AND rd.datasource=? GROUP BY oids.id) "
+ "AS oids ON oids.id=r.id ORDER BY res.result_id, res.`date`;");
st.setString(1, beginDateStr);
@ -1293,16 +1312,16 @@ public class UsageStatsRepository {
//st = connection.prepareStatement("SELECT res.result_id, r.title, r.publisher, r.source, rc.type, pids.pid, d.name, res.ddate, oids.orid, res.downloads, res.views FROM (SELECT coalesce(ds.repository_id, vs.repository_id) AS repository_id, coalesce(ds.result_id, vs.result_id) AS result_id, coalesce(ds.date, vs.date) AS ddate, coalesce(ds.sum, 0) AS downloads, coalesce(vs.sum,0) AS views FROM (SELECT s.repository_id, s.result_id, s.date, sum(s.count) FROM public.downloads_stats s WHERE s.date>=? AND s.date<=? AND s.repository_id=? GROUP BY s.repository_id, s.result_id, s.date) AS ds FULL OUTER JOIN (SELECT s.repository_id, s.result_id, s.date, sum(s.count) FROM public.views_stats s WHERE s.date>=? AND s.date<=? AND s.repository_id=? GROUP BY s.repository_id, s.result_id, s.date) AS vs ON ds.result_id=vs.result_id AND ds.date=vs.date) AS res JOIN public.result r ON res.result_id=r.id JOIN public.datasource d ON d.id=res.repository_id JOIN public.result_classifications rc ON rc.id=r.id LEFT JOIN (SELECT pids.id, string_agg(pids.pid, '#!#') AS pid FROM public.result_pids pids, result_datasources rd WHERE rd.id=pids.id AND type='doi' AND rd.datasource=? GROUP BY pids.id) AS pids ON pids.id=r.id LEFT JOIN (SELECT oids.id, string_agg(oids.orid, '#!#') AS orid FROM public.result_oids oids, public.result_datasources rd WHERE rd.id=oids.id AND rd.datasource=? GROUP BY oids.id) AS oids ON oids.id=r.id WHERE rc.type=? ORDER BY res.result_id, res.ddate;");
st = connection.prepareStatement("SELECT distinct res.result_id, r.title, r.publisher, r.source, rc.type, pids.pid, d.name, res.`date`, oids.oid, res.downloads, res.views "
+ "FROM (SELECT us.repository_id, us.result_id, us.`date`, sum(us.downloads) as downloads, sum(us.views) as views "
+ "FROM "+usagestatsImpalaDB+".usage_stats us WHERE us.`date`>=? AND us.`date`<=? AND us.repository_id=? "
+ "FROM " + usagestatsImpalaDB + ".usage_stats us WHERE us.`date`>=? AND us.`date`<=? AND us.repository_id=? "
+ "GROUP BY us.repository_id, us.result_id, us.`date`) AS res "
+ "JOIN "+statsDB+".result r ON res.result_id=r.id JOIN "+statsDB+".datasource d ON d.id=res.repository_id "
+ "JOIN "+statsDB+".result_classifications rc ON rc.id=r.id "
+ "JOIN " + statsDB + ".result r ON res.result_id=r.id JOIN " + statsDB + ".datasource d ON d.id=res.repository_id "
+ "JOIN " + statsDB + ".result_classifications rc ON rc.id=r.id "
+ "LEFT JOIN (SELECT pids.id, group_concat(pids.pid, '#!#') AS pid "
+ "FROM "+statsDB+".result_pids pids, "+statsDB+".result_datasources rd "
+ "FROM " + statsDB + ".result_pids pids, " + statsDB + ".result_datasources rd "
+ "WHERE rd.id=pids.id AND type='Digital Object Identifier' "
+ "AND rd.datasource=? GROUP BY pids.id) AS pids ON pids.id=r.id "
+ "LEFT JOIN (SELECT oids.id, group_concat(oids.oid, '#!#') AS oid "
+ "FROM "+statsDB+".result_oids oids, "+statsDB+".result_datasources rd "
+ "FROM " + statsDB + ".result_oids oids, " + statsDB + ".result_datasources rd "
+ "WHERE rd.id=oids.id AND rd.datasource=? GROUP BY oids.id) AS oids ON oids.id=r.id "
+ "WHERE rc.type=? ORDER BY res.result_id, res.`date`;");
st.setString(1, beginDateStr);
@ -1342,7 +1361,7 @@ public class UsageStatsRepository {
reportItems.add(reportItem);
}
result = rs.getString(1);
reportItem = new ReportItem(rs.getString(3), rs.getString(7), rs.getString(5), rs.getString(2));
reportItem = new ReportItem(rs.getString(3), rs.getString(7), rs.getString(5), rs.getString(2), "");
reportItem.addIdentifier(new ItemIdentifier("OpenAIRE", rs.getString(1)));
reportItem.addIdentifier(new ItemIdentifier("URLs", rs.getString(4)));
if (rs.getString(9) != null && !rs.getString(9).equals("")) {
@ -1382,7 +1401,7 @@ public class UsageStatsRepository {
}
result = rs.getString(1);
lastDate = beginDateStr;
reportItem = new ReportItem(rs.getString(3), rs.getString(7), rs.getString(5), rs.getString(2));
reportItem = new ReportItem(rs.getString(3), rs.getString(7), rs.getString(5), rs.getString(2), "");
reportItem.addIdentifier(new ItemIdentifier("OpenAIRE", rs.getString(1)));
reportItem.addIdentifier(new ItemIdentifier("URLs", rs.getString(4)));
if (rs.getString(9) != null && !rs.getString(9).equals("")) {
@ -1432,6 +1451,96 @@ public class UsageStatsRepository {
}
}
public void executeBatchItemsPR(List<ReportItem> reportItems,
String repositoryIdentifier, Date beginDate,
Date endDate, String granularity) {
SimpleDateFormat report_dateFormat = new SimpleDateFormat("yyyy-MM-dd");
SimpleDateFormat postgresFormat = new SimpleDateFormat("yyyy/MM");
String beginDateStr = postgresFormat.format(beginDate);
String endDateStr = postgresFormat.format(endDate);
Connection connection = null;
PreparedStatement st = null;
ResultSet rs = null;
try {
connection = usageStatsDB.getConnection();
st = connection.prepareStatement("SELECT `date`, sum(us.downloads) as downloads, sum(us.views) as views FROM " + usagestatsImpalaDB + ".usage_stats us "
+ "WHERE us.`date`>=? AND us.`date`<=? AND us.repository_id=? GROUP BY us.repository_id, us.`date` order by us.`date` ASC;");
st.setString(1, beginDateStr);
st.setString(2, endDateStr);
st.setString(3, repositoryIdentifier);
log.info(st.toString());
rs = st.executeQuery();
String result = "";
String lastDate = "";
ReportItem reportItem = null;
int ft_total = 0;
int abstr = 0;
if (granularity.equalsIgnoreCase("totals")) {
reportItem = new ReportItem(" " + repositoryIdentifier + " Platform", "Platform", "Regular", "", "");
ft_total = 0;
abstr = 0;
while (rs.next()) {
// if (!rs.getString(1).equals(result)) {
// if (reportItem != null) {
// reportItem.addPerformance(new ItemPerformance(report_dateFormat.format(beginDate), report_dateFormat.format(endDate), Integer.toString(ft_total), Integer.toString(abstr)));
// reportItems.add(reportItem);
// }
// //result = rs.getString(1);
// }
ft_total += rs.getInt(2);
abstr += rs.getInt(3);
}
if (reportItem != null) {
reportItem.addPerformance(new ItemPerformance(report_dateFormat.format(beginDate), report_dateFormat.format(endDate), Integer.toString(ft_total), Integer.toString(abstr)));
reportItems.add(reportItem);
}
} else if (granularity.equalsIgnoreCase("monthly")) {
Calendar endCal = Calendar.getInstance();
endCal.setTime(postgresFormat.parse(endDateStr));
endCal.add(Calendar.MONTH, 1);
Date endDateForZeros = endCal.getTime();
reportItem = new ReportItem(" " + repositoryIdentifier + " Platform", "Platform", "Regular", "", "");
lastDate = beginDateStr;
while (rs.next()) {
Calendar endC = Calendar.getInstance();
endC.setTime(postgresFormat.parse(rs.getString(1)));
endC.set(Calendar.DATE, endC.getActualMaximum(Calendar.DATE));
if (reportItem != null) {
reportItem.addPerformance(new ItemPerformance(report_dateFormat.format(postgresFormat.parse(rs.getString(1))), report_dateFormat.format(endC.getTime()), rs.getString(2), rs.getString(3)));
}
endC.setTime(postgresFormat.parse(rs.getString(1)));
endC.add(Calendar.MONTH, 1);
lastDate = postgresFormat.format(endC.getTime());
}
if (reportItem != null) {
fillWithZeros(postgresFormat.parse(lastDate), endDateForZeros, reportItem);
reportItems.add(reportItem);
}
}
/*
jedis.put(redis_key, "persistent", "false");
jedis.put(redis_key, "query", st.toString());
jedis.put(redis_key, "result", toJson(reportItems));
jedis.put(redis_key, "fetchMode", "3");
*/
} catch (Exception e) {
log.error("Batch Item Report failed: ", e);
} finally {
DbUtils.closeQuietly(rs);
DbUtils.closeQuietly(st);
DbUtils.closeQuietly(connection);
}
}
private void fillWithZeros(Date from, Date to, ReportItem reportItem) {
SimpleDateFormat report_dateFormat = new SimpleDateFormat("yyyy-MM-dd");

View File

@ -1,14 +1,27 @@
package eu.dnetlib.usagestats.services;
import eu.dnetlib.usagestats.sushilite.domain.ReportPR;
import eu.dnetlib.usagestats.sushilite.domain.ReportResponseWrapper;
import eu.dnetlib.usagestats.sushilite.domain.ReportStatus;
import java.util.ArrayList;
public interface SushiLiteService {
ReportResponseWrapper buildReport(String reportName, String release, String requestorId, String beginDate,
String endDate, String repositoryIdentifier, String itemIdentifier,
String itemDataType, String hasDoi, String granularity, String callback);
String endDate, String repositoryIdentifier, String itemIdentifier,
String itemDataType, String hasDoi, String granularity, String callback);
String displayReport(String reportName, String release, String requestorId, String beginDate,
String endDate, String repositoryIdentifier, String itemIdentifier,
String itemDataType, String hasDoi, String granularity, String callback, String pretty, String userAgent);
}
String endDate, String repositoryIdentifier, String itemIdentifier,
String itemDataType, String hasDoi, String granularity, String callback, String pretty);
ReportStatus buildReportStatus();
String displayReportStatus();
ArrayList buildReportSupported();
String displayReportsSupported();
ReportPR buildReportPR(String repositoryIdentifier, String beginDate,String endDate);
String displayReportPR(String repositoryIdentifier, String beginDate,String endDate);
}

View File

@ -1,28 +1,29 @@
package eu.dnetlib.usagestats.services;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectWriter;
import eu.dnetlib.usagestats.repositories.UsageStatsRepository;
import eu.dnetlib.usagestats.sushilite.domain.Alert;
import eu.dnetlib.usagestats.sushilite.domain.Filter;
import eu.dnetlib.usagestats.sushilite.domain.InstitutionID;
import eu.dnetlib.usagestats.sushilite.domain.ReportItem;
import eu.dnetlib.usagestats.sushilite.domain.ReportException;
import eu.dnetlib.usagestats.sushilite.domain.ReportPR;
import eu.dnetlib.usagestats.sushilite.domain.ReportResponse;
import eu.dnetlib.usagestats.sushilite.domain.ReportResponseWrapper;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import eu.dnetlib.usagestats.sushilite.domain.ReportStatus;
import eu.dnetlib.usagestats.sushilite.domain.ReportSupported;
import org.springframework.stereotype.Service;
import java.text.SimpleDateFormat;
import java.time.ZonedDateTime;
import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.List;
import java.util.zip.ZipEntry;
import java.util.zip.ZipOutputStream;
import java.util.logging.Level;
import org.apache.log4j.Logger;
import org.springframework.beans.factory.annotation.Value;
@Service
public class SushiLiteServiceImpl implements SushiLiteService {
@ -31,32 +32,16 @@ public class SushiLiteServiceImpl implements SushiLiteService {
private final Logger log = Logger.getLogger(this.getClass());
@Value("${usagestats.redis.hostname}")
private String hostname;
@Value("${usagestats.redis.port}")
private int port;
@Value("${compression.max_number_of_records}")
private int compression_max_number_of_records;
@Value("${download.folder}")
private String download_folder;
@Value("${sushi-lite.server}")
private String sushi_lite_server;
String finalcompressedString = "";
boolean reportForCompression = false;
private int noOfItems = 0;
String userAgent;
public SushiLiteServiceImpl(UsageStatsRepository usageStatsRepository) {
this.usageStatsRepository = usageStatsRepository;
}
@Override
public ReportResponseWrapper buildReport(String reportName, String release, String requestorId, String beginDate,
public ReportResponseWrapper buildReport(String reportName, String release,
String requestorId, String beginDate,
String endDate, String repositoryIdentifier, String itemIdentifier,
String itemDataType, String hasDoi, String granularity, String callback) {
String itemDataType, String hasDoi, String granularity,
String callback) {
List<ReportItem> reportItems = new ArrayList<>();
List<ReportException> reportExceptions = new ArrayList<>();
@ -226,120 +211,27 @@ public class SushiLiteServiceImpl implements SushiLiteService {
ReportResponse reportResponse = new ReportResponse(reportName, release, requestorId, beginDate, endDate,
repositoryIdentifier, itemIdentifier, itemDataType, hasDoi, granularity, callback, reportItems, reportExceptions);
if (reportItems.size() > compression_max_number_of_records) {
log.info("Compression due to "+reportItems.size());
reportForCompression = true;
}
noOfItems = reportItems.size();
return new ReportResponseWrapper(reportResponse);
}
@Override
public String displayReport(String reportName, String release, String requestorId, String beginDate, String endDate, String repositoryIdentifier, String itemIdentifier, String itemDataType, String hasDoi, String granularity, String callback, String pretty, String userAgent) {
public String displayReport(String reportName, String release,
String requestorId, String beginDate, String endDate,
String repositoryIdentifier, String itemIdentifier,
String itemDataType, String hasDoi, String granularity,
String callback, String pretty) {
ObjectMapper objectMapper = new ObjectMapper();
objectMapper.configure(JsonGenerator.Feature.QUOTE_FIELD_NAMES, false);
objectMapper.configure(JsonParser.Feature.ALLOW_UNQUOTED_FIELD_NAMES, true);
java.sql.Timestamp timestamp = new java.sql.Timestamp(System.currentTimeMillis());
//System.out.println("Display report start " + timestamp);
log.info("Starting process...." + timestamp);
this.userAgent = userAgent;
try {
ReportResponseWrapper reportResponseWrapper = buildReport(reportName, release, requestorId, beginDate, endDate, repositoryIdentifier, itemIdentifier, itemDataType, hasDoi, granularity, callback);
if (pretty.equalsIgnoreCase("pretty")) {
if (reportForCompression == false) {
if (userAgent.contains("Mozilla")) {
return "<pre>" + objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(reportResponseWrapper) + "</pre>";
} else {
return objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(reportResponseWrapper);
}
} else {
if (userAgent.contains("Mozilla")) {
byte[] sourceReport = objectMapper.writeValueAsBytes(reportResponseWrapper);
ObjectWriter writer = objectMapper.writer();
java.sql.Timestamp timestamp1 = new java.sql.Timestamp(System.currentTimeMillis());
//System.out.println("String start " + timestamp1);
log.info("Start building report..." + timestamp1);
String outputname = reportName + "_" + repositoryIdentifier.replace("_", "").replace(":", "") + beginDate.replace("-", "") + "_" + endDate.replace("-", "");
String directory = new File(download_folder).getAbsolutePath();
writer.writeValue(new File(directory + "/" + outputname + ".json"), new String(sourceReport));
FileOutputStream fos = new FileOutputStream(directory + "/" + outputname + ".zip");
ZipOutputStream zipOut = new ZipOutputStream(fos);
File fileToZip = new File(directory + "/" + outputname + ".json");
FileInputStream fis = new FileInputStream(fileToZip);
ZipEntry zipEntry = new ZipEntry(fileToZip.getName());
zipOut.putNextEntry(zipEntry);
byte[] bytes = new byte[1024];
int length;
while ((length = fis.read(bytes)) >= 0) {
zipOut.write(bytes, 0, length);
}
zipOut.close();
fis.close();
fos.close();
fileToZip.delete();
java.sql.Timestamp timestamp2 = new java.sql.Timestamp(System.currentTimeMillis());
//System.out.println("String end " + timestamp2);
log.info("Report created..." + timestamp2);
return new String(sushi_lite_server + "/download/" + outputname + ".zip");
//return "report is available. Download it from localhost:8080/download/"+outputname+".zip";
} else {
byte[] sourceReport = objectMapper.writeValueAsBytes(reportResponseWrapper);
return new String(sourceReport);
}
}
}
if (reportForCompression == false) {
byte[] sourceReport = objectMapper.writeValueAsBytes(reportResponseWrapper);
return new String(sourceReport);
} else if (userAgent.contains("Mozilla")) {
java.sql.Timestamp timestamp3 = new java.sql.Timestamp(System.currentTimeMillis());
//System.out.println("String start " + timestamp3);
log.info("Start building report..." + timestamp3);
byte[] sourceReport = objectMapper.writeValueAsBytes(reportResponseWrapper);
ObjectWriter writer = objectMapper.writer();
String outputname = reportName + "_" + repositoryIdentifier.replace("_", "").replace(":", "") + beginDate.replace("-", "") + "_" + endDate.replace("-", "");
String directory = new File(download_folder).getAbsolutePath();
writer.writeValue(new File(directory + "/" + outputname + ".json"), new String(sourceReport).replaceAll("\\\"", ""));
FileOutputStream fos = new FileOutputStream(directory + "/" + outputname + ".zip");
ZipOutputStream zipOut = new ZipOutputStream(fos);
File fileToZip = new File(directory + "/" + outputname + ".json");
FileInputStream fis = new FileInputStream(fileToZip);
ZipEntry zipEntry = new ZipEntry(fileToZip.getName());
zipOut.putNextEntry(zipEntry);
byte[] bytes = new byte[1024];
int length;
while ((length = fis.read(bytes)) >= 0) {
zipOut.write(bytes, 0, length);
}
zipOut.close();
fis.close();
fos.close();
fileToZip.delete();
java.sql.Timestamp timestamp4 = new java.sql.Timestamp(System.currentTimeMillis());
//System.out.println("String end " + timestamp4);
log.info("Report created..." + timestamp4);
return new String(sushi_lite_server + "/download/" + outputname + ".zip");
} else {
byte[] sourceReport = objectMapper.writeValueAsBytes(reportResponseWrapper);
return new String(sourceReport);
return "<pre>" + objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(buildReport(reportName, release, requestorId, beginDate, endDate, repositoryIdentifier, itemIdentifier, itemDataType, hasDoi, granularity, callback)).replaceAll("/", "\\\\/") + "</pre>";
}
return objectMapper.writeValueAsString(buildReport(reportName, release, requestorId, beginDate, endDate, repositoryIdentifier, itemIdentifier, itemDataType, hasDoi, granularity, callback)).replaceAll("/", "\\\\/");
} catch (Exception e) {
e.printStackTrace();
return null;
}
//return report.getReport(reportP, release, requestorId, beginDate, endDate, repositoryIdentifier, itemIdentifier, itemDataType, hasDoi, granularity, callback, pretty);
}
private Date tryParse(String dateString) {
@ -354,4 +246,141 @@ public class SushiLiteServiceImpl implements SushiLiteService {
}
return null;
}
@Override
public ReportStatus buildReportStatus() {
ZonedDateTime dateTime = ZonedDateTime.now(); // Gets the current date and time, with your default time-zone
DateTimeFormatter formatter = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss'Z'");
Alert alert1 = new Alert("2020-12-28T13:32:44.632Z", "string");
Alert alert2 = new Alert(dateTime.format(formatter), "string");
ArrayList alertsList = new ArrayList();
alertsList.add(alert1);
alertsList.add(alert2);
ReportStatus reportStatus = new ReportStatus("desctiption", true, "registry_url", "note", alertsList);
return (reportStatus);
}
@Override
public String displayReportStatus() {
ObjectMapper objectMapper = new ObjectMapper();
try {
return "<pre>" + objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(buildReportStatus()) + "</pre>";
} catch (JsonProcessingException ex) {
java.util.logging.Logger.getLogger(SushiLiteServiceImpl.class.getName()).log(Level.SEVERE, null, ex);
}
return null;
}
@Override
public ArrayList buildReportSupported() {
ArrayList reportSupportedList = new ArrayList();
ReportSupported r1 = new ReportSupported("Report_Name", "Report_ID", "Release", "Report_Description", "Path");
ReportSupported r2 = new ReportSupported("Report_Name2", "Report_ID2", "Release2", "Report_Description2", "Path2");
reportSupportedList.add(r1);
reportSupportedList.add(r2);
return reportSupportedList;
}
@Override
public String displayReportsSupported() {
ObjectMapper objectMapper = new ObjectMapper();
try {
return "<pre>" + objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(buildReportSupported()) + "</pre>";
} catch (JsonProcessingException ex) {
java.util.logging.Logger.getLogger(SushiLiteServiceImpl.class.getName()).log(Level.SEVERE, null, ex);
}
return null;
}
@Override
public ReportPR buildReportPR(String repositoryIdentifier, String beginDate,
String endDate) {
List<ReportException> reportExceptions = new ArrayList<>();
ZonedDateTime dateTime = ZonedDateTime.now(); // Gets the current date and time, with your default time-zone
DateTimeFormatter formatter = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss'Z'");
//Display Report Created Day
String granularity = "totals";
Date beginDateParsed;
if (!beginDate.equals("")) {
beginDateParsed = tryParse(beginDate);
if (beginDateParsed != null && (granularity.toLowerCase().equals("monthly") || beginDate.length() == 7)) {
Calendar temp = Calendar.getInstance();
temp.setTime(beginDateParsed);
temp.set(Calendar.DAY_OF_MONTH, temp.getActualMinimum(Calendar.DAY_OF_MONTH));
beginDateParsed = temp.getTime();
}
} else {
Calendar temp = Calendar.getInstance();
temp.add(Calendar.MONTH, -1);
temp.set(Calendar.DAY_OF_MONTH, temp.getActualMinimum(Calendar.DAY_OF_MONTH));
beginDateParsed = temp.getTime();
reportExceptions.add(new ReportException("3021", "Warning", "Unspecified Date Arguments", "Begin Date set to default: " + new SimpleDateFormat("yyyy-MM-dd").format(beginDateParsed)));
}
Date endDateParsed;
if (!endDate.equals("")) {
endDateParsed = tryParse(endDate);
if (endDateParsed != null && (granularity.toLowerCase().equals("monthly") || endDate.length() == 7)) {
Calendar temp = Calendar.getInstance();
temp.setTime(endDateParsed);
temp.set(Calendar.DAY_OF_MONTH, temp.getActualMaximum(Calendar.DAY_OF_MONTH));
endDateParsed = temp.getTime();
}
} else {
Calendar temp = Calendar.getInstance();
temp.add(Calendar.MONTH, -1);
temp.set(Calendar.DAY_OF_MONTH, temp.getActualMaximum(Calendar.DAY_OF_MONTH));
endDateParsed = temp.getTime();
reportExceptions.add(new ReportException("3021", "Warning", "Unspecified Date Arguments", "End Date set to default: " + new SimpleDateFormat("yyyy-MM-dd").format(endDateParsed)));
}
//log.error("dates: " + beginDateParsed.toString() + " - " + endDateParsed.toString());
if (beginDateParsed == null) {
reportExceptions.add(new ReportException("3020", "Error", "Invalid Date Arguments", "Begin Date: " + beginDate + " is not a valid date"));
}
if (endDateParsed == null) {
reportExceptions.add(new ReportException("3020", "Error", "Invalid Date Arguments", "End Date: " + endDate + " is not a valid date"));
}
if (beginDateParsed != null && endDateParsed != null && !beginDateParsed.before(endDateParsed)) {
reportExceptions.add(new ReportException("3020", "Error", "Invalid Date Arguments", "BeginDate \'" + new SimpleDateFormat("yyyy-MM-dd").format(beginDateParsed) + "\' is greater than EndDate \'" + new SimpleDateFormat("yyyy-MM-dd").format(endDateParsed) + "\'"));
}
List<Filter> reportFilters = new ArrayList();
reportFilters.add(new Filter("BeginDate", beginDate));
reportFilters.add(new Filter("EndDate", endDate));
String reportID = "Platform";
String reportName = "Plaform Total Views & Downloads";
String insitutionName = "Insititution Name " + repositoryIdentifier;
List<InstitutionID> institutionIdD = new ArrayList();
institutionIdD.add(new InstitutionID("Openaire", repositoryIdentifier));
List<ReportItem> reportItems = new ArrayList();
usageStatsRepository.executeBatchItemsPR(reportItems, repositoryIdentifier, beginDateParsed, endDateParsed, granularity);
if (reportItems.isEmpty()) {
reportExceptions.add(new ReportException("3030", "Error", "No Usage Available for Requested Dates", "Service did not find any data"));
}
ReportPR reportPr = new ReportPR(dateTime.format(formatter), repositoryIdentifier, reportID, reportName, insitutionName, institutionIdD, reportExceptions, reportFilters, reportItems);
return reportPr;
}
@Override
public String displayReportPR(String repositoryIdentifier, String beginDate,
String endDate) {
ObjectMapper objectMapper = new ObjectMapper();
log.info((beginDate + " " + endDate));
try {
return "<pre>" + objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(buildReportPR(repositoryIdentifier, beginDate, endDate)) + "</pre>";
} catch (JsonProcessingException ex) {
java.util.logging.Logger.getLogger(SushiLiteServiceImpl.class.getName()).log(Level.SEVERE, null, ex);
}
return null;
}
}