Remove pretty json format

This commit is contained in:
dimitrispie 2023-03-24 10:43:42 +02:00
parent e667fed660
commit 901b47ac27
9 changed files with 284 additions and 715 deletions

10
pom.xml
View File

@ -77,6 +77,16 @@
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.springframework.data</groupId>
<artifactId>spring-data-redis</artifactId>
<!-- <version>2.2.0.BUILD-SNAPSHOT</version> -->
</dependency>
<dependency>
<groupId>redis.clients</groupId>
<artifactId>jedis</artifactId>
<!-- <version>3.0.1</version> -->
</dependency>
<!--
<dependency>
<groupId>org.springframework.boot</groupId>

View File

@ -11,11 +11,9 @@ import org.springframework.data.redis.connection.jedis.JedisConnectionFactory;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.data.redis.serializer.StringRedisSerializer;
/**
* Created by tsampikos on 20/4/2017.
*/
@ConfigurationProperties(prefix = "usagestats")
@EnableConfigurationProperties
@Configuration
//@ConfigurationProperties(prefix = "usagestats")
//@EnableConfigurationProperties
public class SpringRedisConfiguration {
private final Logger log = Logger.getLogger(this.getClass());
@ -26,6 +24,9 @@ public class SpringRedisConfiguration {
@Value("${usagestats.redis.port}")
private int port;
public SpringRedisConfiguration() {
}
@Bean
public JedisConnectionFactory connectionFactory() {
JedisConnectionFactory connectionFactory = new JedisConnectionFactory();

View File

@ -1,82 +1,47 @@
package eu.dnetlib.usagestats.controllers;
import com.fasterxml.jackson.databind.ObjectMapper;
import eu.dnetlib.usagestats.services.SushiLiteService;
import eu.dnetlib.usagestats.services.SushiLiteServiceSample;
import eu.dnetlib.usagestats.sushilite.domain.SUSHI_Error_Model;
import java.io.BufferedInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.time.ZonedDateTime;
import java.time.format.DateTimeFormatter;
import java.util.List;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import javax.servlet.http.HttpServletResponse;
import org.apache.log4j.Logger;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.util.FileCopyUtils;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestHeader;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
import org.springframework.web.bind.annotation.*;
import javax.servlet.http.HttpServletResponse;
import java.io.*;
import java.util.List;
import java.util.concurrent.*;
/**
* Created by D.Pierrakos
*/
@RestController
class SushiLiteController {
class SushiLiteControllerSample {
private final Logger log = Logger.getLogger(this.getClass());
private final SushiLiteService sushiLiteService;
private final SushiLiteServiceSample sushiLiteServiceSample;
@Value("${download.folder}")
private String download_folder;
public SushiLiteController(SushiLiteService sushiLiteService) {
this.sushiLiteService = sushiLiteService;
public SushiLiteControllerSample(SushiLiteServiceSample sushiLiteService) {
this.sushiLiteServiceSample = sushiLiteService;
}
@RequestMapping(value = "/sushilite/r5/status", method = RequestMethod.GET)
public String getReportStatus() {
log.info("COUNTER Report status request ");
return sushiLiteService.displayReportStatus();
}
// @RequestMapping(value = "/sushilite/r5/reports/pr_p1", method = RequestMethod.GET)
// public String getReportPR_P1(
// @RequestParam(value = "RepositoryIdentifier", defaultValue = "") String repositoryIdentifier,
// @RequestParam(value = "RequestorID", defaultValue = "anonymous") String requestorId,
// @RequestParam(value = "BeginDate", defaultValue = "") String beginDate,
// @RequestParam(value = "EndDate", defaultValue = "") String endDate) {
// log.info("COUNTER PR_P1 Report request for repository " + repositoryIdentifier);
// return sushiLiteServiceSample.displayReportPR_P1(requestorId, repositoryIdentifier, beginDate, endDate);
// }
@RequestMapping(value = "/sushilite/r5/reports", method = RequestMethod.GET)
public String getReportSupported() {
log.info("COUNTER Supported Reports request ");
return sushiLiteService.displayReportsSupported();
}
@RequestMapping(value = "/sushilite/r5/members", method = RequestMethod.GET)
public String getMembers() {
log.info("COUNTER Members request ");
return sushiLiteService.displayConsortiumMemberList();
}
@RequestMapping(value = "/sushilite/r5/reports/pr_p1", method = RequestMethod.GET)
public String getReportPR_P1(
@RequestParam(value = "RepositoryIdentifier", defaultValue = "") String repositoryIdentifier,
@RequestParam(value = "RequestorID", defaultValue = "anonymous") String requestorId,
@RequestParam(value = "BeginDate", defaultValue = "") String beginDate,
@RequestParam(value = "EndDate", defaultValue = "") String endDate) {
log.info("COUNTER PR_P1 Report request for repository " + repositoryIdentifier);
return sushiLiteService.displayReportPR_P1(requestorId, repositoryIdentifier, beginDate, endDate);
}
@RequestMapping(value = "/sushilite/r5/reports/ir", method = RequestMethod.GET)
@RequestMapping(value = "/sushilite/r5/reports/ir_sample", method = RequestMethod.GET)
public ResponseEntity getReportΙR(
@RequestParam(value = "RepositoryIdentifier", defaultValue = "") String repositoryIdentifier,
@RequestParam(value = "ItemIdentifier", defaultValue = "") String itemIdentifier,
@ -91,7 +56,7 @@ class SushiLiteController {
@Override
public ResponseEntity call() throws Exception {
String report = sushiLiteService.displayReportIR(requestorId, repositoryIdentifier, itemIdentifier, beginDate, endDate, metricType, dataType, granularity);
String report = sushiLiteServiceSample.displayReportIR(requestorId, repositoryIdentifier, itemIdentifier, beginDate, endDate, metricType, dataType, granularity);
if (report.indexOf(".zip") < 0) {
return new ResponseEntity<>(report, HttpStatus.OK);
} else {
@ -124,18 +89,9 @@ class SushiLiteController {
}
return null;
// String report = sushiLiteService.displayReportIR(requestorId, repositoryIdentifier, itemIdentifier, beginDate, endDate, metricType, dataType, granularity);
//
// if (report.indexOf(".zip") < 0) {
// return new ResponseEntity<>(report, HttpStatus.OK);
// } else {
//
// String compressedOutput = "<pre> {\"Report\":\"IR\", \"Description\":\"Compressed Report Due to large number of records\", \"URL To Download Report from: \":\"" + report + "\"} </pre>";
// return new ResponseEntity<>(compressedOutput, HttpStatus.OK);
// }
}
@RequestMapping(value = "/sushilite/r5/reports/dsr", method = RequestMethod.GET)
@RequestMapping(value = "/sushilite/r5/reports/dsr_sample", method = RequestMethod.GET)
public ResponseEntity getReportDSR(
@RequestParam(value = "RepositoryIdentifier", defaultValue = "") String repositoryIdentifier,
@RequestParam(value = "DatasetIdentifier", defaultValue = "") String itemIdentifier,
@ -150,7 +106,7 @@ class SushiLiteController {
@Override
public ResponseEntity call() throws Exception {
String report = sushiLiteService.displayReportDSR(requestorId, repositoryIdentifier, itemIdentifier, beginDate, endDate, metricType, granularity);
String report = sushiLiteServiceSample.displayReportDSR(requestorId, repositoryIdentifier, itemIdentifier, beginDate, endDate, metricType, granularity);
if (report.indexOf(".zip") < 0) {
return new ResponseEntity<>(report, HttpStatus.OK);
} else {
@ -180,49 +136,41 @@ class SushiLiteController {
}
return null;
// String report = sushiLiteService.displayReportDSR(requestorId, repositoryIdentifier, itemIdentifier, beginDate, endDate, metricType, granularity);
}
// @RequestMapping(value = "/download/{file_name}", method = RequestMethod.GET)
// public void downloadFile(HttpServletResponse response, @PathVariable("file_name") String filetoDownload) throws IOException {
// File file = new File(download_folder + "/" + filetoDownload);
// log.info("File downloaded at " + file.getAbsolutePath());
// String mimeType = "application/octet-stream";
//
// response.setContentType(mimeType);
//
// /* "Content-Disposition : attachment" will be directly download, may provide save as popup, based on your browser setting*/
// response.setHeader("Content-Disposition", String.format("attachment; filename=\"%s\"", file.getName()));
// response.setContentLength((int) file.length());
// InputStream inputStream = new BufferedInputStream(new FileInputStream(file));
// FileCopyUtils.copy(inputStream, response.getOutputStream());
// }
// @RequestMapping(value = "/sushilite/r5/reports/pr_sample", method = RequestMethod.GET)
// public ResponseEntity<String> getReportPR(@RequestParam(value = "RepositoryIdentifier", defaultValue = "") String repositoryIdentifier,
// @RequestParam(value = "RequestorID", defaultValue = "anonymous") String requestorId,
// @RequestParam(value = "BeginDate", defaultValue = "") String beginDate,
// @RequestParam(value = "EndDate", defaultValue = "") String endDate,
// @RequestParam(value = "MetricType", defaultValue = "") String metricType,
// @RequestParam(value = "DataType", defaultValue = "") String dataType,
// @RequestParam(value = "Granularity", defaultValue = "Monthly") String granularity) throws InterruptedException, Exception {
//
// String report = sushiLiteServiceSample.displayReportPR(requestorId, repositoryIdentifier, beginDate, endDate, metricType, dataType, granularity);
//
// if (report.indexOf(".zip") < 0) {
// return new ResponseEntity<>(report, HttpStatus.OK);
// } else {
//
// String compressedOutput = "<pre> {\"Report\":\"DSR\", \"Description\":\"Compressed Report Due to large number of records\", \"URL To Download Report from: \":\"" + report + "\"} </pre>";
// return new ResponseEntity<>(compressedOutput, HttpStatus.OK);
// }
}
@RequestMapping(value = "/download/{file_name}", method = RequestMethod.GET)
public void downloadFile(HttpServletResponse response, @PathVariable("file_name") String filetoDownload) throws IOException {
File file = new File(download_folder + "/" + filetoDownload);
log.info("File downloaded at " + file.getAbsolutePath());
String mimeType = "application/octet-stream";
response.setContentType(mimeType);
/* "Content-Disposition : attachment" will be directly download, may provide save as popup, based on your browser setting*/
response.setHeader("Content-Disposition", String.format("attachment; filename=\"%s\"", file.getName()));
response.setContentLength((int) file.length());
InputStream inputStream = new BufferedInputStream(new FileInputStream(file));
FileCopyUtils.copy(inputStream, response.getOutputStream());
}
@RequestMapping(value = "/sushilite/r5/reports/pr", method = RequestMethod.GET)
public ResponseEntity<String> getReportPR(@RequestParam(value = "RepositoryIdentifier", defaultValue = "") String repositoryIdentifier,
@RequestParam(value = "RequestorID", defaultValue = "anonymous") String requestorId,
@RequestParam(value = "BeginDate", defaultValue = "") String beginDate,
@RequestParam(value = "EndDate", defaultValue = "") String endDate,
@RequestParam(value = "MetricType", defaultValue = "") String metricType,
@RequestParam(value = "DataType", defaultValue = "") String dataType,
@RequestParam(value = "Granularity", defaultValue = "Monthly") String granularity) throws InterruptedException, Exception {
String report = sushiLiteService.displayReportPR(requestorId, repositoryIdentifier, beginDate, endDate, metricType, dataType, granularity);
if (report.indexOf(".zip") < 0) {
return new ResponseEntity<>(report, HttpStatus.OK);
} else {
String compreessedOutput = "<pre> {\"Report\":\"PR\", \"Description\":\"Compressed Report Due to large number of records\", \"URL To Download Report from: \":\"" + report + "\"} </pre>";
return new ResponseEntity<>(compreessedOutput, HttpStatus.OK);
}
}
// String compreessedOutput = "<pre> {\"Report\":\"PR\", \"Description\":\"Compressed Report Due to large number of records\", \"URL To Download Report from: \":\"" + report + "\"} </pre>";
//
// return new ResponseEntity<>(compreessedOutput, HttpStatus.OK);
// }
// }
}

View File

@ -1,53 +1,28 @@
package eu.dnetlib.usagestats.repositories;
import com.fasterxml.jackson.databind.ObjectMapper;
import eu.dnetlib.usagestats.portal.*;
import eu.dnetlib.usagestats.sushilite.domain.*;
import org.apache.commons.dbutils.DbUtils;
import org.apache.log4j.Logger;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.data.redis.core.HashOperations;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.stereotype.Repository;
import com.fasterxml.jackson.databind.ObjectMapper;
import eu.dnetlib.usagestats.portal.CountryRepositories;
import eu.dnetlib.usagestats.portal.CountryUsageStats;
import eu.dnetlib.usagestats.portal.CountryUsageStatsAll;
import eu.dnetlib.usagestats.portal.MonthlyStats;
import eu.dnetlib.usagestats.portal.MonthlyUsageStats;
import eu.dnetlib.usagestats.portal.RepositoryStats;
import eu.dnetlib.usagestats.portal.TotalStats;
import eu.dnetlib.usagestats.portal.TotalStatsReposViewsDownloads;
import eu.dnetlib.usagestats.portal.UsageStats;
import eu.dnetlib.usagestats.portal.YearlyStats;
import eu.dnetlib.usagestats.sushilite.domain.COUNTER_Dataset_Identifiers;
import eu.dnetlib.usagestats.sushilite.domain.COUNTER_Dataset_Performance;
import eu.dnetlib.usagestats.sushilite.domain.COUNTER_Dataset_Usage;
import eu.dnetlib.usagestats.sushilite.domain.COUNTER_Item_Identifiers;
import eu.dnetlib.usagestats.sushilite.domain.COUNTER_Item_Performance;
import eu.dnetlib.usagestats.sushilite.domain.COUNTER_Item_Usage;
import eu.dnetlib.usagestats.sushilite.domain.COUNTER_Platform_Usage;
import eu.dnetlib.usagestats.sushilite.domain.SUSHI_Consortium_Member_List;
import eu.dnetlib.usagestats.sushilite.domain.SUSHI_Org_Identifiers;
import org.apache.commons.dbutils.DbUtils;
import javax.sql.DataSource;
import java.security.MessageDigest;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import org.springframework.beans.factory.annotation.Value;
import java.util.*;
/**
* Created by D.Pierrakos
*/
@Repository
public class UsageStatsRepository {
public class UsageStatsRepositorySample {
private final DataSource usageStatsDB;
@ -59,8 +34,8 @@ public class UsageStatsRepository {
@Value("${prod.usagestatsImpalaDB}")
private String usagestatsImpalaDB;
public UsageStatsRepository(DataSource usageStatsDB,
RedisTemplate<String, String> redisTemplate) {
public UsageStatsRepositorySample(DataSource usageStatsDB,
RedisTemplate<String, String> redisTemplate) {
this.usageStatsDB = usageStatsDB;
this.jedis = redisTemplate.opsForHash();
}
@ -868,7 +843,7 @@ public class UsageStatsRepository {
+"sum(us.unique_item_requests) as unique_item_requests, "
+"sum(us.unique_item_investigations) as unique_item_investigations "
+ "FROM " + usagestatsImpalaDB + ".counter_r5_stats_with_metrics us, "
+ statsDB + ".result_classifications rc WHERE us.`date`>=? AND us.`date`<=? AND us.repository_id=? AND rc.id=us.result_id GROUP BY rc.type order by rc.type ASC;");
+ statsDB + ".result_classifications rc WHERE us.`date`>=? AND us.`date`<=? AND us.repository_id=? AND rc.id=us.result_id GROUP BY rc.type order by rc.type ASC limit 10;");
st.setString(1, beginDateStr);
st.setString(2, endDateStr);
@ -902,7 +877,7 @@ public class UsageStatsRepository {
+"sum(us.unique_item_requests) as unique_item_requests, "
+"sum(us.unique_item_investigations) as unique_item_investigations "
+"FROM " + usagestatsImpalaDB + ".counter_r5_stats_with_metrics us, "
+ statsDB + ".result_classifications rc WHERE us.`date`>=? AND us.`date`<=? AND us.repository_id=? AND rc.type=? AND rc.id=us.result_id GROUP BY rc.type order by rc.type ASC;");
+ statsDB + ".result_classifications rc WHERE us.`date`>=? AND us.`date`<=? AND us.repository_id=? AND rc.type=? AND rc.id=us.result_id GROUP BY rc.type order by rc.type ASC limit 10;");
st.setString(1, beginDateStr);
st.setString(2, endDateStr);
@ -941,7 +916,7 @@ public class UsageStatsRepository {
+"sum(us.unique_item_requests) as unique_item_requests, "
+"sum(us.unique_item_investigations) as unique_item_investigations "
+ "FROM " + usagestatsImpalaDB + ".counter_r5_stats_with_metrics us, "
+ statsDB + ".result_classifications rc WHERE us.`date`>=? AND us.`date`<=? AND us.repository_id=? AND rc.id=us.result_id GROUP BY rc.type, us.`date` order by rc.type, us.`date` ASC;");
+ statsDB + ".result_classifications rc WHERE us.`date`>=? AND us.`date`<=? AND us.repository_id=? AND rc.id=us.result_id GROUP BY rc.type, us.`date` order by rc.type, us.`date` ASC limit 10;");
st.setString(1, beginDateStr);
st.setString(2, endDateStr);
@ -1007,7 +982,7 @@ public class UsageStatsRepository {
+"sum(us.unique_item_requests) as unique_item_requests, "
+"sum(us.unique_item_investigations) as unique_item_investigations "
+ "FROM " + usagestatsImpalaDB + ".counter_r5_stats_with_metrics us, "
+ statsDB + ".result_classifications rc WHERE us.`date`>=? AND us.`date`<=? AND us.repository_id=? AND rc.type=? AND rc.id=us.result_id GROUP BY rc.type, us.`date` order by rc.type, us.`date` ASC;");
+ statsDB + ".result_classifications rc WHERE us.`date`>=? AND us.`date`<=? AND us.repository_id=? AND rc.type=? AND rc.id=us.result_id GROUP BY rc.type, us.`date` order by rc.type, us.`date` ASC limit 10;");
st.setString(1, beginDateStr);
st.setString(2, endDateStr);
@ -1104,7 +1079,7 @@ public class UsageStatsRepository {
+"sum(us.unique_item_requests) as unique_item_requests, "
+"sum(us.unique_item_investigations) as unique_item_investigations "
+ "FROM " + usagestatsImpalaDB + ".counter_r5_stats_with_metrics us "
+ "WHERE us.`date`>=? AND us.`date`<=? AND us.repository_id=? GROUP BY us.`date` order by us.`date` ASC;");
+ "WHERE us.`date`>=? AND us.`date`<=? AND us.repository_id=? GROUP BY us.`date` order by us.`date` ASC limit 10;");
st.setString(1, beginDateStr);
st.setString(2, endDateStr);
@ -1188,7 +1163,7 @@ public class UsageStatsRepository {
+ "WHERE us.`date`>=? AND us.`date`<=? "
+ "AND rc.id=us.result_id AND us.result_id=rs.id AND us.repository_id=? "
+ "GROUP BY rc.type, rs.title, us.result_id, rs.title, rs.year, us.repository_id) tmp, tpd "
+ "WHERE tpd.id=resultid GROUP BY repo,type,resultid,item,yop,total_item_requests,total_item_investigations,unique_item_requests,unique_item_investigations;");
+ "WHERE tpd.id=resultid GROUP BY repo,type,resultid,item,yop,total_item_requests,total_item_investigations,unique_item_requests,unique_item_investigations limit 10;");
st.setString(1, beginDateStr);
st.setString(2, endDateStr);
@ -1234,7 +1209,7 @@ public class UsageStatsRepository {
+ statsDB + ".result rs WHERE us.`date`>=? AND us.`date`<=? "
+ "AND rc.id=us.result_id AND us.result_id=rs.id AND us.repository_id=? and rc.type=? "
+ "GROUP BY rc.type, rs.title, us.result_id, rs.title, rs.year, us.repository_id) tmp, tpd "
+ "WHERE tpd.id=resultid GROUP BY repo,type,resultid,item,yop,total_item_requests,total_item_investigations,unique_item_requests,unique_item_investigations;");
+ "WHERE tpd.id=resultid GROUP BY repo,type,resultid,item,yop,total_item_requests,total_item_investigations,unique_item_requests,unique_item_investigations limit 10;");
st.setString(1, beginDateStr);
st.setString(2, endDateStr);
@ -1282,7 +1257,7 @@ public class UsageStatsRepository {
+ "FROM " + usagestatsImpalaDB + ".counter_r5_stats_with_metrics us,(SELECT distinct id, type FROM " + statsDB + ".result_classifications) rc, "
+ statsDB + ".result rs WHERE us.`date`>=? AND us.`date`<=? AND rc.id=us.result_id AND us.result_id=rs.id AND us.repository_id=? "
+ "GROUP BY us.`date`,rc.type, rs.title, us.result_id, rs.title, rs.year, us.repository_id) tmp1, tpd "
+ "WHERE tpd.id=resultid GROUP BY repo,`date`, type,resultid,item,yop,total_item_requests,total_item_investigations,unique_item_requests,unique_item_investigations");
+ "WHERE tpd.id=resultid GROUP BY repo,`date`, type,resultid,item,yop,total_item_requests,total_item_investigations,unique_item_requests,unique_item_investigations limit 10;");
st.setString(1, beginDateStr);
@ -1353,7 +1328,7 @@ public class UsageStatsRepository {
+ statsDB + ".result rs WHERE us.`date`>=? AND us.`date`<=? "
+ "AND rc.id=us.result_id AND us.result_id=rs.id AND us.repository_id=? AND rc.type=? "
+ "GROUP BY us.`date`,rc.type, rs.title, us.result_id, rs.title, rs.year, us.repository_id) tmp, tpd "
+ "WHERE tpd.id=resultid group by repo,`date`, type,resultid,item,yop,total_item_requests,total_item_investigations,unique_item_requests,unique_item_investigations;");
+ "WHERE tpd.id=resultid group by repo,`date`, type,resultid,item,yop,total_item_requests,total_item_investigations,unique_item_requests,unique_item_investigations limit 10;");
st.setString(1, beginDateStr);
@ -1453,7 +1428,7 @@ public class UsageStatsRepository {
+ "FROM " + usagestatsImpalaDB + ".counter_r5_stats_with_metrics us,(SELECT distinct id, type FROM " + statsDB + ".result_classifications) rc, "
+ statsDB + ".result rs WHERE us.`date`>=? AND us.`date`<=? AND rc.id=us.result_id AND us.result_id=rs.id AND us.repository_id=? "
+ "AND us.result_id=? GROUP BY rc.type, rs.title, us.result_id, rs.title, rs.year, us.repository_id) tmp, tpd WHERE tpd.id=resultid "
+ "GROUP BY repo,type,resultid,item,yop,total_item_requests,total_item_investigations,unique_item_requests,unique_item_investigations;");
+ "GROUP BY repo,type,resultid,item,yop,total_item_requests,total_item_investigations,unique_item_requests,unique_item_investigations limit 10;");
st.setString(1, beginDateStr);
st.setString(2, endDateStr);
@ -1497,7 +1472,7 @@ public class UsageStatsRepository {
+ "FROM " + usagestatsImpalaDB + ".counter_r5_stats_with_metrics us,(select distinct id, type FROM " + statsDB + ".result_classifications) rc, "
+ statsDB + ".result rs WHERE us.`date`>=? AND us.`date`<=? AND rc.id=us.result_id AND us.result_id=rs.id "
+ "AND us.repository_id=? AND us.result_id=? AND rc.type=? GROUP BY rc.type, rs.title, us.result_id, rs.title, rs.year, us.repository_id) tmp, tpd "
+ "WHERE tpd.id=resultid group by repo, type,resultid,item,yop,total_item_requests,total_item_investigations,unique_item_requests,unique_item_investigations;");
+ "WHERE tpd.id=resultid group by repo, type,resultid,item,yop,total_item_requests,total_item_investigations,unique_item_requests,unique_item_investigations limit 10;");
st.setString(1, beginDateStr);
st.setString(2, endDateStr);
@ -1538,7 +1513,7 @@ public class UsageStatsRepository {
+ "FROM " + usagestatsImpalaDB + ".counter_r5_stats_with_metrics us,(select distinct id, type FROM " + statsDB + ".result_classifications) rc, "
+ statsDB + ".result rs WHERE us.`date`>=? AND us.`date`<=? AND rc.id=us.result_id AND us.result_id=rs.id AND us.repository_id=? AND us.result_id=? "
+ "GROUP BY us.`date`,rc.type, rs.title, us.result_id, rs.title, rs.year, us.repository_id) tmp, tpd "
+ "WHERE tpd.id=resultid group by repo,`date`, type,resultid,item,yop,total_item_requests,total_item_investigations,unique_item_requests,unique_item_investigations;");
+ "WHERE tpd.id=resultid group by repo,`date`, type,resultid,item,yop,total_item_requests,total_item_investigations,unique_item_requests,unique_item_investigations limit 10;");
st.setString(1, beginDateStr);
st.setString(2, endDateStr);
@ -1608,7 +1583,7 @@ public class UsageStatsRepository {
+ "WHERE us.`date`>=? AND us.`date`<=? AND rc.id=us.result_id AND us.result_id=rs.id "
+ "AND us.repository_id=? AND us.result_id=? AND rc.type=? "
+ "GROUP BY us.`date`,rc.type, rs.title, us.result_id, rs.title, rs.year, us.repository_id) tmp, tpd "
+ "WHERE tpd.id=resultid group by repo,`date`, type,resultid,item,yop,total_item_requests,total_item_investigations,unique_item_requests,unique_item_investigations;");
+ "WHERE tpd.id=resultid group by repo,`date`, type,resultid,item,yop,total_item_requests,total_item_investigations,unique_item_requests,unique_item_investigations limit 10;");
st.setString(1, beginDateStr);
st.setString(2, endDateStr);
@ -1719,7 +1694,7 @@ public class UsageStatsRepository {
+ "FROM " + usagestatsImpalaDB + ".counter_r5_stats_with_metrics us, (SELECT distinct id, type FROM " + statsDB + ".result_classifications) rc, " + statsDB + ".result rs, "
+ statsDB + ".datasource ds WHERE us.`date`>=? AND us.`date`<=? AND rc.id=us.result_id AND us.result_id=rs.id AND us.result_id=? "
+ "AND us.repository_id=ds.id GROUP BY rc.type, rs.title, us.result_id, rs.title, rs.year, us.repository_id, ds.name) tmp, tpd "
+ "WHERE tpd.id=resultid group by repo,name, type,resultid,item,yop,total_item_requests,total_item_investigations,unique_item_requests,unique_item_investigations");
+ "WHERE tpd.id=resultid group by repo,name, type,resultid,item,yop,total_item_requests,total_item_investigations,unique_item_requests,unique_item_investigations limit 10;");
st.setString(1, beginDateStr);
@ -1764,7 +1739,7 @@ public class UsageStatsRepository {
+ "FROM " + usagestatsImpalaDB + ".counter_r5_stats_with_metrics us, (SELECT distinct id, type FROM " + statsDB + ".result_classifications) rc, " + statsDB + ".result rs, "
+ statsDB + ".datasource ds WHERE us.`date`>=? AND us.`date`<=? AND rc.id=us.result_id AND us.result_id=rs.id AND us.result_id=? AND rc.type=? "
+ "AND us.repository_id=ds.id GROUP BY rc.type, rs.title, us.result_id, rs.title, rs.year, us.repository_id, ds.name) tmp, tpd "
+ "WHERE tpd.id=resultid group by repo,name, type,resultid,item,yop,total_item_requests,total_item_investigations,unique_item_requests,unique_item_investigations");
+ "WHERE tpd.id=resultid group by repo,name, type,resultid,item,yop,total_item_requests,total_item_investigations,unique_item_requests,unique_item_investigations limit 10;");
st.setString(1, beginDateStr);
st.setString(2, endDateStr);
@ -1814,7 +1789,7 @@ public class UsageStatsRepository {
+ "FROM " + usagestatsImpalaDB + ".counter_r5_stats_with_metrics us, (SELECT distinct id, type FROM " + statsDB + ".result_classifications) rc, " + statsDB + ".result rs, "
+ statsDB + ".datasource ds WHERE us.`date`>=? AND us.`date`<=? AND rc.id=us.result_id AND us.result_id=rs.id AND us.result_id=? AND us.repository_id=ds.id "
+ "GROUP BY us.`date`,rc.type, rs.title, us.result_id, rs.title, rs.year, us.repository_id, ds.name) tmp, tpd "
+ "WHERE tpd.id=resultid group by repo,`date`,name, type,resultid,item,yop,total_item_requests,total_item_investigations,unique_item_requests,unique_item_investigations;");
+ "WHERE tpd.id=resultid group by repo,`date`,name, type,resultid,item,yop,total_item_requests,total_item_investigations,unique_item_requests,unique_item_investigations limit 10;");
st.setString(1, beginDateStr);
st.setString(2, endDateStr);
@ -1880,7 +1855,7 @@ public class UsageStatsRepository {
+ "FROM " + usagestatsImpalaDB + ".counter_r5_stats_with_metrics us, (SELECT distinct id, type FROM " + statsDB + ".result_classifications) rc, " + statsDB + ".result rs, "
+ statsDB + ".datasource ds WHERE us.`date`>=? AND us.`date`<=? AND rc.id=us.result_id AND us.result_id=rs.id AND us.result_id=? AND us.repository_id=ds.id AND rc.type=? "
+ "GROUP BY us.`date`,rc.type, rs.title, us.result_id, rs.title, rs.year, us.repository_id, ds.name) tmp, tpd "
+ "WHERE tpd.id=resultid group by repo,`date`,name, type,resultid,item,yop,total_item_requests,total_item_investigations,unique_item_requests,unique_item_investigations;");
+ "WHERE tpd.id=resultid group by repo,`date`,name, type,resultid,item,yop,total_item_requests,total_item_investigations,unique_item_requests,unique_item_investigations limit 10;");
st.setString(1, beginDateStr);
st.setString(2, endDateStr);
st.setString(3, itemIdentifier);
@ -1999,7 +1974,7 @@ public class UsageStatsRepository {
+ "FROM " + usagestatsImpalaDB + ".counter_r5_stats_with_metrics us,(SELECT distinct id, type FROM " + statsDB + ".result_classifications) rc, openaire_prod_stats.result rs "
+ "WHERE us.`date`>='? AND us.`date`<=? AND rc.id=us.result_id AND us.result_id=rs.id AND us.repository_id=? "
+ "AND rc.type='Dataset' GROUP BY rc.type, rs.title, us.result_id, rs.title, rs.year, us.repository_id) tmp, tpd "
+ "WHERE tpd.id=resultid GROUP BY repo,type,resultid,item,yop,total_item_requests,total_item_investigations,unique_item_requests,unique_item_investigations;");
+ "WHERE tpd.id=resultid GROUP BY repo,type,resultid,item,yop,total_item_requests,total_item_investigations,unique_item_requests,unique_item_investigations limit 10;");
st.setString(1, beginDateStr);
st.setString(2, endDateStr);
st.setString(3, repositoryIdentifier);
@ -2053,7 +2028,7 @@ public class UsageStatsRepository {
+ "FROM " + usagestatsImpalaDB + ".counter_r5_stats_with_metrics us,(SELECT distinct id, type FROM " + statsDB + ".result_classifications) rc, openaire_prod_stats.result rs "
+ "WHERE us.`date`>=? AND us.`date`<=? AND rc.id=us.result_id AND us.result_id=rs.id AND us.repository_id=? "
+ "AND rc.type='Dataset' GROUP BY us.`date`, rc.type, rs.title, us.result_id, rs.title, rs.year, us.repository_id) tmp, tpd "
+ "WHERE tpd.id=resultid GROUP BY repo,`date`, type,resultid,item,yop,total_item_requests,total_item_investigations,unique_item_requests,unique_item_investigations ORDER BY `date` ASC;");
+ "WHERE tpd.id=resultid GROUP BY repo,`date`, type,resultid,item,yop,total_item_requests,total_item_investigations,unique_item_requests,unique_item_investigations ORDER BY `date` ASC limit 10;");
st.setString(1, beginDateStr);
st.setString(2, endDateStr);
@ -2156,7 +2131,7 @@ public class UsageStatsRepository {
+ "FROM " + usagestatsImpalaDB + ".counter_r5_stats_with_metrics us,(SELECT distinct id, type FROM " + statsDB + ".result_classifications) rc, openaire_prod_stats.result rs "
+ "WHERE us.`date`>=? AND us.`date`<=? AND rc.id=us.result_id AND us.result_id=rs.id AND us.repository_id=? AND us.result_id=? "
+ "AND rc.type='Dataset' GROUP BY rc.type, rs.title, us.result_id, rs.title, rs.year, us.repository_id) tmp, tpd "
+ "WHERE tpd.id=resultid GROUP BY repo,type,resultid,item,yop,total_item_requests,total_item_investigations,unique_item_requests,unique_item_investigations;");
+ "WHERE tpd.id=resultid GROUP BY repo,type,resultid,item,yop,total_item_requests,total_item_investigations,unique_item_requests,unique_item_investigations limit 10;");
st.setString(1, beginDateStr);
st.setString(2, endDateStr);
st.setString(3, repositoryIdentifier);
@ -2209,7 +2184,7 @@ public class UsageStatsRepository {
+ "FROM " + usagestatsImpalaDB + ".counter_r5_stats_with_metrics us,(SELECT distinct id, type FROM " + statsDB + ".result_classifications) rc, openaire_prod_stats.result rs "
+ "WHERE us.`date`>=? AND us.`date`<=? AND rc.id=us.result_id AND us.result_id=rs.id AND us.repository_id=? AND us.result_id=? "
+ "AND rc.type='Dataset' GROUP BY us.`date`, rc.type, rs.title, us.result_id, rs.title, rs.year, us.repository_id, us.`date`) tmp, tpd "
+ "WHERE tpd.id=resultid GROUP BY repo,`date`, type,resultid,item,yop,total_item_requests,total_item_investigations,unique_item_requests,unique_item_investigations ORDER BY `date` ASC;");
+ "WHERE tpd.id=resultid GROUP BY repo,`date`, type,resultid,item,yop,total_item_requests,total_item_investigations,unique_item_requests,unique_item_investigations ORDER BY `date` ASC limit 10;");
st.setString(1, beginDateStr);
st.setString(2, endDateStr);
st.setString(3, repositoryIdentifier);
@ -2325,7 +2300,7 @@ public class UsageStatsRepository {
+ "FROM " + usagestatsImpalaDB + ".counter_r5_stats_with_metrics us,(SELECT distinct id, type FROM " + statsDB + ".result_classifications) rc, " + statsDB + ".result rs, "
+ statsDB + ".datasource ds WHERE us.`date`>=? AND us.`date`<=? AND rc.id=us.result_id AND us.result_id=rs.id AND us.result_id=? "
+ "AND rc.type='Dataset' AND ds.id=us.repository_id GROUP BY rc.type, rs.title, us.result_id, rs.title, rs.year, us.repository_id, ds.name) tmp, tpd "
+ "WHERE tpd.id=resultid GROUP BY name, type,resultid,item,yop,total_item_requests,total_item_investigations,unique_item_requests,unique_item_investigations ");
+ "WHERE tpd.id=resultid GROUP BY name, type,resultid,item,yop,total_item_requests,total_item_investigations,unique_item_requests,unique_item_investigations limit 10;");
st.setString(1, beginDateStr);
st.setString(2, endDateStr);
st.setString(3, itemIdentifier);
@ -2338,7 +2313,7 @@ public class UsageStatsRepository {
+ "dp.access_method access_method FROM openaire_prod_datacite_usage_stats.datasetsperformance_nonarray_view dp, "
+ statsDB + ".result_pids rp, " + statsDB + ".datasource ds "
+ "WHERE dp.period_from>=? AND dp.period_end<=? and rp.pid=ds_type AND rp.id=? "
+ "AND ds.name=dp.platform GROUP BY dp.ds_title, dp.yop, dp.platform, dp.access_method, dp.ds_type,rp.id ");
+ "AND ds.name=dp.platform GROUP BY dp.ds_title, dp.yop, dp.platform, dp.access_method, dp.ds_type,rp.id limit 10");
st1.setString(1, report_dateFormat.format(beginDate));
st1.setString(2, report_dateFormat.format(endDate));
@ -2408,7 +2383,7 @@ public class UsageStatsRepository {
+ "FROM " + usagestatsImpalaDB + ".counter_r5_stats_with_metrics us,(SELECT distinct id, type FROM " + statsDB + ".result_classifications) rc, openaire_prod_stats.result rs, "
+ statsDB + ".datasource ds WHERE us.`date`>=? AND us.`date`<=? AND rc.id=us.result_id AND us.result_id=rs.id AND ds.id=us.repository_id AND us.result_id=? "
+ "AND rc.type='Dataset' GROUP BY ds.name, us.repository_id, rc.type, us.`date`, rs.title, us.result_id, rs.title, rs.year, us.`date`) tmp, tpd "
+ "WHERE tpd.id=resultid GROUP BY name,`date`, type,resultid,item,yop,total_item_requests,total_item_investigations,unique_item_requests,unique_item_investigations ORDER BY `date` ASC;");
+ "WHERE tpd.id=resultid GROUP BY name,`date`, type,resultid,item,yop,total_item_requests,total_item_investigations,unique_item_requests,unique_item_investigations ORDER BY `date` ASC limit 10;");
st.setString(1, beginDateStr);
st.setString(2, endDateStr);
st.setString(3, itemIdentifier);
@ -2421,7 +2396,7 @@ public class UsageStatsRepository {
+ "dp.access_method access_method FROM openaire_prod_datacite_usage_stats.datasetsperformance_nonarray_view dp, "
+ statsDB + ".result_pids rp, " + statsDB + ".datasource ds "
+ "WHERE dp.period_from>=? AND dp.period_end<=? and rp.pid=ds_type and rp.id=? and ds.name=dp.platform "
+ "GROUP BY dp.ds_title, dp.yop, dp.platform, dp.access_method, dp.ds_type,rp.id, dp.period_from");
+ "GROUP BY dp.ds_title, dp.yop, dp.platform, dp.access_method, dp.ds_type,rp.id, dp.period_from limit 10");
st1.setString(1, report_dateFormat.format(beginDate));
st1.setString(2, report_dateFormat.format(endDate));
st1.setString(3, itemIdentifier);
@ -2569,7 +2544,7 @@ public class UsageStatsRepository {
+ "dp.access_method access_method, dp.platform FROM openaire_prod_datacite_usage_stats.datasetsperformance_nonarray_view dp, "
+ statsDB + ".result_pids rp, " + statsDB + ".datasource ds "
+ "WHERE dp.period_from>=? AND dp.period_end<=? and rp.pid=ds_type and ds.id=? and ds.name=dp.platform "
+ "GROUP BY dp.ds_title, dp.yop, dp.platform, dp.access_method, dp.ds_type,rp.id");
+ "GROUP BY dp.ds_title, dp.yop, dp.platform, dp.access_method, dp.ds_type,rp.id limit 10;");
st.setString(1, beginDateStr);
st.setString(2, endDateStr);
st.setString(3, repositoryIdentifier);
@ -2623,7 +2598,7 @@ public class UsageStatsRepository {
+ "dp.access_method access_method, dp.platform FROM openaire_prod_datacite_usage_stats.datasetsperformance_nonarray_view dp, "
+ statsDB + ".result_pids rp, " + statsDB + ".datasource ds "
+ "WHERE dp.period_from>=? AND dp.period_end<=? and rp.pid=ds_type and ds.id=? and ds.name=dp.platform "
+ "GROUP BY dp.ds_title, dp.yop, dp.platform, dp.access_method, dp.ds_type,rp.id, dp.period_from");
+ "GROUP BY dp.ds_title, dp.yop, dp.platform, dp.access_method, dp.ds_type,rp.id, dp.period_from limit 10;");
st.setString(1, beginDateStr);
st.setString(2, endDateStr);
@ -2726,7 +2701,7 @@ public class UsageStatsRepository {
+ "dp.access_method access_method, dp.platform FROM openaire_prod_datacite_usage_stats.datasetsperformance_nonarray_view dp, "
+ statsDB + ".result_pids rp, " + statsDB + ".datasource ds "
+ "WHERE dp.period_from>=? AND dp.period_end<=? and rp.pid=ds_type and ds.id=? AND rp.id=? and ds.name=dp.platform "
+ "GROUP BY dp.ds_title, dp.yop, dp.platform, dp.access_method, dp.ds_type,rp.id");
+ "GROUP BY dp.ds_title, dp.yop, dp.platform, dp.access_method, dp.ds_type,rp.id limit 10;");
st.setString(1, beginDateStr);
st.setString(2, endDateStr);
st.setString(3, repositoryIdentifier);
@ -2779,7 +2754,7 @@ public class UsageStatsRepository {
+ "dp.access_method access_method, dp.platform FROM openaire_prod_datacite_usage_stats.datasetsperformance_nonarray_view dp, "
+ statsDB + ".result_pids rp, " + statsDB + ".datasource ds "
+ "WHERE dp.period_from>=? AND dp.period_end<=? and rp.pid=ds_type and ds.id=? AND rp.id=? and ds.name=dp.platform "
+ "GROUP BY dp.ds_title, dp.yop, dp.platform, dp.access_method, dp.ds_type,rp.id, dp.period_from");
+ "GROUP BY dp.ds_title, dp.yop, dp.platform, dp.access_method, dp.ds_type,rp.id, dp.period_from limit 10");
st.setString(1, beginDateStr);
st.setString(2, endDateStr);
st.setString(3, repositoryIdentifier);

View File

@ -57,202 +57,6 @@ public class SushiLiteServiceImpl implements SushiLiteService {
this.usageStatsRepository = usageStatsRepository;
}
// @Override
// public ReportResponseWrapper buildReport(String reportName, String release,
// String requestorId, String beginDate,
// String endDate, String repositoryIdentifier, String itemIdentifier,
// String itemDataType, String hasDoi, String granularity,
// String callback) {
//
// List<COUNTER_Platform_Usage> reportItems = new ArrayList<>();
// List<SUSHI_Error_Model> reportExceptions = new ArrayList<>();
//
// if (!granularity.equalsIgnoreCase("totals") && !granularity.equalsIgnoreCase("monthly")) {
// reportExceptions.add(new SUSHI_Error_Model("3062", "Warning", "Invalid ReportAttribute Value", "usagecounts.openaire.eu", "Granularity: \'" + granularity + "\' unknown. Defaulting to Monthly"));
// granularity = "Monthly";
// }
//
// Date beginDateParsed;
// if (!beginDate.equals("")) {
// beginDateParsed = tryParse(beginDate);
// if (beginDateParsed != null && (granularity.toLowerCase().equals("monthly") || beginDate.length() == 7)) {
// Calendar temp = Calendar.getInstance();
// temp.setTime(beginDateParsed);
// temp.set(Calendar.DAY_OF_MONTH, temp.getActualMinimum(Calendar.DAY_OF_MONTH));
// beginDateParsed = temp.getTime();
// }
// } else {
// Calendar temp = Calendar.getInstance();
// temp.add(Calendar.MONTH, -1);
// temp.set(Calendar.DAY_OF_MONTH, temp.getActualMinimum(Calendar.DAY_OF_MONTH));
// beginDateParsed = temp.getTime();
// reportExceptions.add(new SUSHI_Error_Model("3021", "Warning", "Unspecified Date Arguments", "usagecounts.openaire.eu", "Begin Date set to default: " + new SimpleDateFormat("yyyy-MM-dd").format(beginDateParsed)));
// }
//
// Date endDateParsed;
// if (!endDate.equals("")) {
// endDateParsed = tryParse(endDate);
// if (endDateParsed != null && (granularity.toLowerCase().equals("monthly") || endDate.length() == 7)) {
// Calendar temp = Calendar.getInstance();
// temp.setTime(endDateParsed);
// temp.set(Calendar.DAY_OF_MONTH, temp.getActualMaximum(Calendar.DAY_OF_MONTH));
// endDateParsed = temp.getTime();
// }
// } else {
// Calendar temp = Calendar.getInstance();
// temp.add(Calendar.MONTH, -1);
// temp.set(Calendar.DAY_OF_MONTH, temp.getActualMaximum(Calendar.DAY_OF_MONTH));
// endDateParsed = temp.getTime();
// reportExceptions.add(new SUSHI_Error_Model("3021", "Warning", "Unspecified Date Arguments", "usagecounts.openaire.eu", "End Date set to default: " + new SimpleDateFormat("yyyy-MM-dd").format(endDateParsed)));
// }
// //log.error("dates: " + beginDateParsed.toString() + " - " + endDateParsed.toString());
//
// if (beginDateParsed == null) {
// reportExceptions.add(new SUSHI_Error_Model("3020", "Error", "Invalid Date Arguments", "usagecounts.openaire.eu", "Begin Date: " + beginDate + " is not a valid date"));
// }
// if (endDateParsed == null) {
// reportExceptions.add(new SUSHI_Error_Model("3020", "Error", "Invalid Date Arguments", "usagecounts.openaire.eu", "End Date: " + endDate + " is not a valid date"));
// }
// if (beginDateParsed != null && endDateParsed != null && !beginDateParsed.before(endDateParsed)) {
// reportExceptions.add(new SUSHI_Error_Model("3020", "Error", "Invalid Date Arguments", "usagecounts.openaire.eu", "BeginDate \'" + new SimpleDateFormat("yyyy-MM-dd").format(beginDateParsed) + "\' is greater than EndDate \'" + new SimpleDateFormat("yyyy-MM-dd").format(endDateParsed) + "\'"));
// }
//
// String repoid = "";
// if (!repositoryIdentifier.equals("")) {
// repoid = usageStatsRepository.executeRepoId(repositoryIdentifier, reportName.toLowerCase());
// if (repoid.equals("-1")) {
// reportExceptions.add(new SUSHI_Error_Model("3060", "Error", "Invalid Filter Value", "usagecounts.openaire.eu", "RepositoryIdentifier: " + repositoryIdentifier + " is not valid"));
// }
// }
// String itemid = "";
// if (!itemIdentifier.equals("")) {
// String[] split = itemIdentifier.split(":");
// switch (split[0].toLowerCase()) {
// case "oid":
// itemid = itemIdentifier;
// break;
// case "doi":
// itemid = itemIdentifier;
// break;
// case "openaire":
// itemid = itemIdentifier;
// break;
// default:
// reportExceptions.add(new SUSHI_Error_Model("3060", "Error", "Invalid Filter Value", "usagecounts.openaire.eu", "ItemIdentifier: " + itemIdentifier + " is not valid"));
// itemid = "-1";
// }
// }
// if (itemid.equals("") && repoid.equals("") && !reportName.equalsIgnoreCase("rr1") && !reportName.equalsIgnoreCase("jr1")) {
// reportExceptions.add(new SUSHI_Error_Model("3070", "Error", "Required Filter Missing", "usagecounts.openaire.eu", "ItemIdentifier or RepositoryIdentifier must be supplied"));
// }
// if (reportName.equalsIgnoreCase("ar1")) {
// if (!itemid.equals("-1") && !repoid.equals("-1") && beginDateParsed != null && endDateParsed != null && beginDateParsed.before(endDateParsed)) {
// if (!itemid.equals("")) {
// if (itemDataType.equalsIgnoreCase("") || itemDataType.equalsIgnoreCase("article")) {
// usageStatsRepository.executeItem(reportItems, itemIdentifier, repoid, "Article", beginDateParsed, endDateParsed, granularity);
// if (reportItems.isEmpty()) {
// reportExceptions.add(new SUSHI_Error_Model("3030", "Error", "usagecounts.openaire.eu", "No Usage Available for Requested Dates", "Service did not find any data"));
// }
// } else {
// reportExceptions.add(new SUSHI_Error_Model("3030", "Error", "usagecounts.openaire.eu", "No Usage Available for Requested Dates", "Service did not find any data"));
// }
// } else if (!repoid.equals("")) {
// usageStatsRepository.executeBatchItems(reportItems, repoid, "Article", beginDateParsed, endDateParsed, granularity);
// if (reportItems.isEmpty()) {
// reportExceptions.add(new SUSHI_Error_Model("3030", "Error", "usagecounts.openaire.eu", "No Usage Available for Requested Dates", "Service did not find any data"));
// }
// }
// }
// } else if (reportName.equalsIgnoreCase("br1")) {
// if (!itemid.equals("-1") && !repoid.equals("-1") && beginDateParsed != null && endDateParsed != null && beginDateParsed.before(endDateParsed)) {
// if (!itemid.equals("")) {
// if (itemDataType.equalsIgnoreCase("") || itemDataType.equalsIgnoreCase("book")) {
// usageStatsRepository.executeItem(reportItems, itemIdentifier, repoid, "Book", beginDateParsed, endDateParsed, granularity);
// if (reportItems.isEmpty()) {
// reportExceptions.add(new SUSHI_Error_Model("3030", "Error", "usagecounts.openaire.eu", "No Usage Available for Requested Dates", "Service did not find any data"));
// }
// } else {
// reportExceptions.add(new SUSHI_Error_Model("3030", "Error", "usagecounts.openaire.eu", "No Usage Available for Requested Dates", "Service did not find any data"));
// }
// } else if (!repoid.equals("")) {
// usageStatsRepository.executeBatchItems(reportItems, repoid, "Book", beginDateParsed, endDateParsed, granularity);
// if (reportItems.isEmpty()) {
// reportExceptions.add(new SUSHI_Error_Model("3030", "Error", "usagecounts.openaire.eu", "No Usage Available for Requested Dates", "Service did not find any data"));
// }
// }
// }
// } else if (reportName.equalsIgnoreCase("br2")) {
// if (!itemid.equals("-1") && !repoid.equals("-1") && beginDateParsed != null && endDateParsed != null && beginDateParsed.before(endDateParsed)) {
// if (!itemid.equals("")) {
// if (itemDataType.equalsIgnoreCase("") || itemDataType.equalsIgnoreCase("part of book or chapter of book")) {
// usageStatsRepository.executeItem(reportItems, itemIdentifier, repoid, "Part of book or chapter of book", beginDateParsed, endDateParsed, granularity);
// if (reportItems.isEmpty()) {
// reportExceptions.add(new SUSHI_Error_Model("3030", "Error", "usagecounts.openaire.eu", "No Usage Available for Requested Dates", "Service did not find any data"));
// }
// } else {
// reportExceptions.add(new SUSHI_Error_Model("3030", "Error", "usagecounts.openaire.eu", "No Usage Available for Requested Dates", "Service did not find any data"));
// }
// } else if (!repoid.equals("")) {
// usageStatsRepository.executeBatchItems(reportItems, repoid, "Part of book or chapter of book", beginDateParsed, endDateParsed, granularity);
// if (reportItems.isEmpty()) {
// reportExceptions.add(new SUSHI_Error_Model("3030", "Error", "usagecounts.openaire.eu", "No Usage Available for Requested Dates", "Service did not find any data"));
// }
// }
// }
// } else if (reportName.equalsIgnoreCase("ir1")) {
// if (!itemid.equals("-1") && !repoid.equals("-1") && beginDateParsed != null && endDateParsed != null && beginDateParsed.before(endDateParsed)) {
// if (!itemid.equals("")) {
// usageStatsRepository.executeItem(reportItems, itemIdentifier, repoid, itemDataType, beginDateParsed, endDateParsed, granularity);
// } else if (!repoid.equals("")) {
// usageStatsRepository.executeBatchItems(reportItems, repoid, itemDataType, beginDateParsed, endDateParsed, granularity);
// }
// if (reportItems.isEmpty()) {
// reportExceptions.add(new SUSHI_Error_Model("3030", "Error", "usagecounts.openaire.eu", "No Usage Available for Requested Dates", "Service did not find any data"));
// }
// }
// } else if (reportName.equalsIgnoreCase("rr1")) {
// if (!repoid.equals("-1") && beginDateParsed != null && endDateParsed != null && beginDateParsed.before(endDateParsed)) {
// usageStatsRepository.executeRepo(reportItems, repoid, itemDataType, beginDateParsed, endDateParsed, granularity);
// }
// if (reportItems.isEmpty()) {
// reportExceptions.add(new SUSHI_Error_Model("3030", "Error", "usagecounts.openaire.eu", "No Usage Available for Requested Dates", "Service did not find any data"));
// }
// } else if (reportName.equalsIgnoreCase("jr1")) {
// if (!repoid.equals("-1") && beginDateParsed != null && endDateParsed != null && beginDateParsed.before(endDateParsed)) {
// usageStatsRepository.executeJournal(reportItems, repoid, itemDataType, beginDateParsed, endDateParsed, granularity);
// }
// if (reportItems.isEmpty()) {
// reportExceptions.add(new SUSHI_Error_Model("3030", "Error", "usagecounts.openaire.eu", "No Usage Available for Requested Dates", "Service did not find any data"));
// }
// } else if (reportName.equals("")) {
// reportExceptions.add(new SUSHI_Error_Model("3050", "Error", "usagecounts.openaire.eu", "Report argument is missing", "You must supply a Report argument"));
// } else {
// reportExceptions.add(new SUSHI_Error_Model("3000", "Error", "usagecounts.openaire.eu", "Report " + reportName + " not supported", "Supported reports: AR1, IR1, RR1, BR1, BR2"));
// }
//
// ReportResponse reportResponse = new ReportResponse(reportName, release, requestorId, beginDate, endDate,
// repositoryIdentifier, itemIdentifier, itemDataType, hasDoi, granularity, callback, reportItems, reportExceptions);
//
// return new ReportResponseWrapper(reportResponse);
// }
// @Override
// public String displayReport(String reportName, String release,
// String requestorId, String beginDate, String endDate,
// String repositoryIdentifier, String itemIdentifier,
// String itemDataType, String hasDoi, String granularity,
// String callback, String pretty) {
// ObjectMapper objectMapper = new ObjectMapper();
// try {
// if (pretty.equalsIgnoreCase("pretty")) {
// return "<pre>" + objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(buildReport(reportName, release, requestorId, beginDate, endDate, repositoryIdentifier, itemIdentifier, itemDataType, hasDoi, granularity, callback)).replaceAll("/", "\\\\/") + "</pre>";
// }
// return objectMapper.writeValueAsString(buildReport(reportName, release, requestorId, beginDate, endDate, repositoryIdentifier, itemIdentifier, itemDataType, hasDoi, granularity, callback)).replaceAll("/", "\\\\/");
// } catch (Exception e) {
// e.printStackTrace();
// return null;
// }
// //return report.getReport(reportP, release, requestorId, beginDate, endDate, repositoryIdentifier, itemIdentifier, itemDataType, hasDoi, granularity, callback, pretty);
// }
private Date tryParse(String dateString) {
try {
if (dateString.length() == 7) {
@ -294,12 +98,15 @@ public class SushiLiteServiceImpl implements SushiLiteService {
ObjectMapper objectMapper = new ObjectMapper();
try {
if (buildReportStatus() != null) {
return "<pre>" + objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(buildReportStatus()) + "</pre>";
// return "<pre>" + objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(buildReportStatus()) + "</pre>";
return objectMapper.writer().writeValueAsString(buildReportStatus());
} else {
ZonedDateTime dateTime = ZonedDateTime.now(); // Gets the current date and time, with your default time-zone
DateTimeFormatter formatter = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss'Z'");
SUSHI_Error_Model errorModel = new SUSHI_Error_Model("1000", "Fatal", "Service Not Available", "usagecounts.openaire.eu", "Request was for: " + dateTime.format(formatter));
return "<pre>" + objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(errorModel) + "</pre>";
// return "<pre>" + objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(errorModel) + "</pre>";
return objectMapper.writer().writeValueAsString(errorModel);
}
} catch (JsonProcessingException ex) {
java.util.logging.Logger.getLogger(SushiLiteServiceImpl.class.getName()).log(Level.SEVERE, null, ex);
@ -333,7 +140,8 @@ public class SushiLiteServiceImpl implements SushiLiteService {
public String displayReportsSupported() {
ObjectMapper objectMapper = new ObjectMapper();
try {
return "<pre>" + objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(buildReportSupported()) + "</pre>";
// return "<pre>" + objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(buildReportSupported()) + "</pre>";
return objectMapper.writer().writeValueAsString(buildReportSupported());
} catch (JsonProcessingException ex) {
java.util.logging.Logger.getLogger(SushiLiteServiceImpl.class.getName()).log(Level.SEVERE, null, ex);
}
@ -425,9 +233,6 @@ public class SushiLiteServiceImpl implements SushiLiteService {
reportExceptions.add(new SUSHI_Error_Model("3030", "Error", "usagecounts.openaire.eu", "No Usage Available for Requested Dates", "Service did not find any data"));
}
// if (reportExceptions.size() == 0) {
// reportExceptions = null;
// }
COUNTER_Platform_Report reportPr = new COUNTER_Platform_Report(dateTime.format(formatter), customerID, reportID, reportName, institutionName, institutionIdD, reportExceptions, reportFilters, reportItems);
log.info("Total report items " + reportItems.size());
@ -599,31 +404,26 @@ public class SushiLiteServiceImpl implements SushiLiteService {
// Calucalte time difference
// in milliseconds
beginDateParsed = tryParse(beginDate);
Calendar temp = Calendar.getInstance();
temp.setTime(beginDateParsed);
temp.set(Calendar.DAY_OF_MONTH, temp.getActualMaximum(Calendar.DAY_OF_MONTH));
beginDateParsed = temp.getTime();
// beginDateParsed = tryParse(beginDate);
// Calendar temp = Calendar.getInstance();
// temp.setTime(beginDateParsed);
// temp.set(Calendar.DAY_OF_MONTH, temp.getActualMaximum(Calendar.DAY_OF_MONTH));
// beginDateParsed = temp.getTime();
//
//
// endDateParsed = tryParse(endDate);
// Calendar temp1 = Calendar.getInstance();
// temp1.setTime(endDateParsed);
// temp1.set(Calendar.DAY_OF_MONTH, temp1.getActualMaximum(Calendar.DAY_OF_MONTH));
// endDateParsed = temp1.getTime();
endDateParsed = tryParse(endDate);
Calendar temp1 = Calendar.getInstance();
temp1.setTime(endDateParsed);
temp1.set(Calendar.DAY_OF_MONTH, temp1.getActualMaximum(Calendar.DAY_OF_MONTH));
endDateParsed = temp1.getTime();
long difference_In_Time
= endDateParsed.getTime() - beginDateParsed.getTime();
long difference_In_Years = (difference_In_Time/ (1000 * 60 * 60 * 24));
long difference_In_Years = (difference_In_Time/ (1000 * 60 * 60 * 24));
if(difference_In_Years>365)
reportExceptions.add(new SUSHI_Error_Model("4000", "Notice", "Requested Period for more than a year not allowed", "usagecounts.openaire.eu", "Contact usagecounts@openaire.eu for longer period"));
// if (beginDateParsed == null) {
// reportExceptions.add(new SUSHI_Error_Model("3020", "Error", "Invalid Date Arguments", "usagecounts.openaire.eu", "Begin Date: " + beginDate + " is not a valid date"));
// }
// if (endDateParsed == null) {
// reportExceptions.add(new SUSHI_Error_Model("3020", "Error", "Invalid Date Arguments", "usagecounts.openaire.eu", "End Date: " + endDate + " is not a valid date"));
// }
reportExceptions.add(new SUSHI_Error_Model("4000", "Notice", "Requested Period for more than a year not allowed", "usagecounts.openaire.eu", "Contact usagecounts@openaire.eu for longer period"));
if (beginDateParsed != null && endDateParsed != null && !beginDateParsed.before(endDateParsed)) {
reportExceptions.add(new SUSHI_Error_Model("3020", "Error", "Invalid Date Arguments", "usagecounts.openaire.eu", "BeginDate \'" + new SimpleDateFormat("yyyy-MM-dd").format(beginDateParsed) + "\' is greater than EndDate \'" + new SimpleDateFormat("yyyy-MM-dd").format(endDateParsed) + "\'"));
}
@ -634,12 +434,6 @@ public class SushiLiteServiceImpl implements SushiLiteService {
if(repoID.equals(""))
institutionName=null;
//if (!repositoryIdentifier.equals("")) {
// repoid = usageStatsRepository.getInstitutionID(repositoryIdentifier);
// if (repositoryIdentifier.equals("-1")) {
// reportExceptions.add(new SUSHI_Error_Model("3060", "Error", "Invalid Filter Value", "usagecounts.openaire.eu", "RepositoryIdentifier: " + repoID + " is not valid"));
// }
//}
String itemid = "";
if (!itemIdentifier.equals("")) {
String[] split = itemIdentifier.split(":");
@ -699,12 +493,6 @@ public class SushiLiteServiceImpl implements SushiLiteService {
if (reportItems.isEmpty()) {
reportExceptions.add(new SUSHI_Error_Model("3030", "Error", "usagecounts.openaire.eu", "No Usage Available for Requested Dates", "Service did not find any data"));
}
//} else if (reportName.equals("")) {
// reportExceptions.add(new SUSHI_Error_Model("3050", "Error", "usagecounts.openaire.eu", "Report argument is missing", "You must supply a Report argument"));
//} else {
// reportExceptions.add(new SUSHI_Error_Model("3000", "Error", "usagecounts.openaire.eu", "Report " + reportName + " not supported", "Supported reports: AR1, IR1, RR1, BR1, BR2"));
//}
if (reportExceptions.isEmpty()) {
reportExceptions = null;
}
@ -847,73 +635,9 @@ public class SushiLiteServiceImpl implements SushiLiteService {
}
}
// }
// //} else if (reportName.equalsIgnoreCase("br1")) {
// if (!itemid.equals("-1") && !repoid.equals("-1") && beginDateParsed != null && endDateParsed != null && beginDateParsed.before(endDateParsed)) {
// if (!itemid.equals("")) {
// if (dataType.equalsIgnoreCase("") || dataType.equalsIgnoreCase("book")) {
// usageStatsRepository.executeItem(reportItems, itemIdentifier, repoid, "Book", beginDateParsed, endDateParsed, granularity);
// if (reportItems.isEmpty()) {
// reportExceptions.add(new SUSHI_Error_Model("3030", "Error", "usagecounts.openaire.eu", "No Usage Available for Requested Dates", "Service did not find any data"));
// }
// } else {
// reportExceptions.add(new SUSHI_Error_Model("3030", "Error", "usagecounts.openaire.eu", "No Usage Available for Requested Dates", "Service did not find any data"));
// }
// } else if (!repoid.equals("")) {
// usageStatsRepository.executeBatchItems(reportItems, repoid, "Book", beginDateParsed, endDateParsed, granularity);
// if (reportItems.isEmpty()) {
// reportExceptions.add(new SUSHI_Error_Model("3030", "Error", "usagecounts.openaire.eu", "No Usage Available for Requested Dates", "Service did not find any data"));
// }
// }
// }
// //}
// if (!itemid.equals("-1") && !repoid.equals("-1") && beginDateParsed != null && endDateParsed != null && beginDateParsed.before(endDateParsed)) {
// if (!itemid.equals("")) {
// if (dataType.equalsIgnoreCase("") || dataType.equalsIgnoreCase("part of book or chapter of book")) {
// usageStatsRepository.executeItem(reportItems, itemIdentifier, repoid, "Part of book or chapter of book", beginDateParsed, endDateParsed, granularity);
// if (reportItems.isEmpty()) {
// reportExceptions.add(new SUSHI_Error_Model("3030", "Error", "usagecounts.openaire.eu", "No Usage Available for Requested Dates", "Service did not find any data"));
// }
// } else {
// reportExceptions.add(new SUSHI_Error_Model("3030", "Error", "usagecounts.openaire.eu", "No Usage Available for Requested Dates", "Service did not find any data"));
// }
// } else if (!repoid.equals("")) {
// usageStatsRepository.executeBatchItems(reportItems, repoid, "Part of book or chapter of book", beginDateParsed, endDateParsed, granularity);
// if (reportItems.isEmpty()) {
// reportExceptions.add(new SUSHI_Error_Model("3030", "Error", "usagecounts.openaire.eu", "No Usage Available for Requested Dates", "Service did not find any data"));
// }
// }
// }
// //else if (reportName.equalsIgnoreCase("ir1")) {
// if (!itemid.equals("-1") && !repoid.equals("-1") && beginDateParsed != null && endDateParsed != null && beginDateParsed.before(endDateParsed)) {
// if (!itemid.equals("")) {
// usageStatsRepository.executeItem(reportItems, itemIdentifier, repoid, dataType, beginDateParsed, endDateParsed, granularity);
// } else if (!repoid.equals("")) {
// usageStatsRepository.executeBatchItems(reportItems, repoid, dataType, beginDateParsed, endDateParsed, granularity);
// }
// if (reportItems.isEmpty()) {
// reportExceptions.add(new SUSHI_Error_Model("3030", "Error", "usagecounts.openaire.eu", "No Usage Available for Requested Dates", "Service did not find any data"));
// }
// }
// //} else if (reportName.equalsIgnoreCase("rr1")) {
// if (!repoid.equals("-1") && beginDateParsed != null && endDateParsed != null && beginDateParsed.before(endDateParsed)) {
// usageStatsRepository.executeRepo(reportItems, repoid, dataType, beginDateParsed, endDateParsed, granularity);
// }
// if (reportItems.isEmpty()) {
// reportExceptions.add(new SUSHI_Error_Model("3030", "Error", "usagecounts.openaire.eu", "No Usage Available for Requested Dates", "Service did not find any data"));
// }
// //} else if (reportName.equalsIgnoreCase("jr1")) {
// if (!repoid.equals("-1") && beginDateParsed != null && endDateParsed != null && beginDateParsed.before(endDateParsed)) {
// usageStatsRepository.executeJournal(reportItems, repoid, dataType, beginDateParsed, endDateParsed, granularity);
// }
if (reportItems.isEmpty()) {
reportExceptions.add(new SUSHI_Error_Model("3030", "Error", "usagecounts.openaire.eu", "No Usage Available for Requested Dates", "Service did not find any data"));
}
//} else if (reportName.equals("")) {
// reportExceptions.add(new SUSHI_Error_Model("3050", "Error", "usagecounts.openaire.eu", "Report argument is missing", "You must supply a Report argument"));
//} else {
// reportExceptions.add(new SUSHI_Error_Model("3000", "Error", "usagecounts.openaire.eu", "Report " + reportName + " not supported", "Supported reports: AR1, IR1, RR1, BR1, BR2"));
//}
if (reportExceptions.size() == 0) {
reportExceptions = null;
@ -923,83 +647,6 @@ public class SushiLiteServiceImpl implements SushiLiteService {
return reportResponse;
}
// @Override
// public COUNTER_Title_Report buildReportIR(String customerID, String repositoryIdentifier, String itemIdentifier, String beginDate,
// String endDate, String metricType, String dataType, String granularity) {
// List<SUSHI_Error_Model> reportExceptions = new ArrayList<>();
// ZonedDateTime dateTime = ZonedDateTime.now(); // Gets the current date and time, with your default time-zone
// DateTimeFormatter formatter = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss'Z'");
// //Display Report Created Day
//
// repositoryIdentifier = usageStatsRepository.getInstitutionID(repositoryIdentifier);
//
// Date beginDateParsed;
// if (!beginDate.equals("")) {
// beginDateParsed = tryParse(beginDate);
// if (beginDateParsed != null && (granularity.toLowerCase().equals("monthly") || beginDate.length() == 7)) {
// Calendar temp = Calendar.getInstance();
// temp.setTime(beginDateParsed);
// temp.set(Calendar.DAY_OF_MONTH, temp.getActualMinimum(Calendar.DAY_OF_MONTH));
// beginDateParsed = temp.getTime();
// }
// } else {
// Calendar temp = Calendar.getInstance();
// temp.add(Calendar.MONTH, -1);
// temp.set(Calendar.DAY_OF_MONTH, temp.getActualMinimum(Calendar.DAY_OF_MONTH));
// beginDateParsed = temp.getTime();
// reportExceptions.add(new SUSHI_Error_Model("3021", "Warning", "usagecounts.openaire.eu", "Unspecified Date Arguments", "Begin Date set to default: " + new SimpleDateFormat("yyyy-MM-dd").format(beginDateParsed)));
// }
//
// Date endDateParsed;
// if (!endDate.equals("")) {
// endDateParsed = tryParse(endDate);
// if (endDateParsed != null && (granularity.toLowerCase().equals("monthly") || endDate.length() == 7)) {
// Calendar temp = Calendar.getInstance();
// temp.setTime(endDateParsed);
// temp.set(Calendar.DAY_OF_MONTH, temp.getActualMaximum(Calendar.DAY_OF_MONTH));
// endDateParsed = temp.getTime();
// }
// } else {
// Calendar temp = Calendar.getInstance();
// temp.add(Calendar.MONTH, -1);
// temp.set(Calendar.DAY_OF_MONTH, temp.getActualMaximum(Calendar.DAY_OF_MONTH));
// endDateParsed = temp.getTime();
// reportExceptions.add(new SUSHI_Error_Model("3021", "Warning", "usagecounts.openaire.eu", "Unspecified Date Arguments", "End Date set to default: " + new SimpleDateFormat("yyyy-MM-dd").format(endDateParsed)));
// }
// //log.error("dates: " + beginDateParsed.toString() + " - " + endDateParsed.toString());
//
// if (beginDateParsed == null) {
// reportExceptions.add(new SUSHI_Error_Model("3020", "Error", "usagecounts.openaire.eu", "Invalid Date Arguments", "Begin Date: " + beginDate + " is not a valid date"));
// }
// if (endDateParsed == null) {
// reportExceptions.add(new SUSHI_Error_Model("3020", "Error", "usagecounts.openaire.eu", "Invalid Date Arguments", "End Date: " + endDate + " is not a valid date"));
// }
// if (beginDateParsed != null && endDateParsed != null && !beginDateParsed.before(endDateParsed)) {
// reportExceptions.add(new SUSHI_Error_Model("3020", "Error", "usagecounts.openaire.eu", "Invalid Date Arguments", "BeginDate \'" + new SimpleDateFormat("yyyy-MM-dd").format(beginDateParsed) + "\' is greater than EndDate \'" + new SimpleDateFormat("yyyy-MM-dd").format(endDateParsed) + "\'"));
// }
//
// List<Filter> reportFilters = new ArrayList();
// reportFilters.add(new Filter("BeginDate", beginDate));
// reportFilters.add(new Filter("EndDate", endDate));
// String reportID = "ΤR";
// String reportName = "Title Master Report";
// String institutionName = usageStatsRepository.getInstitutionName(repositoryIdentifier);
// List<SUSHI_Org_Identifiers> institutionIdD = new ArrayList();
// institutionIdD.add(new SUSHI_Org_Identifiers("Openaire", repositoryIdentifier));
//
// List<COUNTER_Platform_Usage> reportItems = new ArrayList();
// usageStatsRepository.executeBatchItemsTR(reportItems, repositoryIdentifier, itemIdentifier,beginDateParsed, endDateParsed, metricType, dataType, granularity);
//
// if (reportItems.isEmpty()) {
// reportExceptions.add(new SUSHI_Error_Model("3030", "Error", "usagecounts.openaire.eu", "No Usage Available for Requested Dates", "Service did not find any data"));
// }
//
// if (reportExceptions.size() == 0) {
// reportExceptions = null;
// }
// COUNTER_Title_Report reportTR = new COUNTER_Title_Report(dateTime.format(formatter), customerID, reportID, reportName, institutionName, institutionIdD, reportExceptions, reportFilters, reportItems);
// return reportTR;
// }
@Override
public String displayReportPR(String customerID, String repositoryIdentifier, String beginDate,
@ -1009,7 +656,8 @@ public class SushiLiteServiceImpl implements SushiLiteService {
try {
if (reportForCompression == false) {
return "<pre>" + objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(report) + "</pre>";
// return "<pre>" + objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(report) + "</pre>";
return objectMapper.writer().writeValueAsString(report) ;
} else {
log.info((beginDate + " " + endDate));
try {
@ -1065,8 +713,8 @@ public class SushiLiteServiceImpl implements SushiLiteService {
ObjectMapper objectMapper = new ObjectMapper();
log.info((beginDate + " " + endDate));
try {
return "<pre>" + objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(buildReportPR_P1(customerID, repositoryIdentifier, beginDate, endDate)) + "</pre>";
// return "<pre>" + objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(buildReportPR_P1(customerID, repositoryIdentifier, beginDate, endDate)) + "</pre>";
return objectMapper.writer().writeValueAsString(buildReportPR_P1(customerID, repositoryIdentifier, beginDate, endDate));
} catch (JsonProcessingException ex) {
java.util.logging.Logger.getLogger(SushiLiteServiceImpl.class
.getName()).log(Level.SEVERE, null, ex);
@ -1086,7 +734,8 @@ public class SushiLiteServiceImpl implements SushiLiteService {
try {
COUNTER_Item_Report report = buildReportIR(customerID, repositoryIdentifier, itemIdentifier, beginDate, endDate, metricType, dataType, granularity);
if (reportForCompression == false) {
return "<pre>" + objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(report) + "</pre>";
// return "<pre>" + objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(report) + "</pre>";
return objectMapper.writer().writeValueAsString(report);
} else {
log.info((beginDate + " " + endDate));
@ -1145,7 +794,8 @@ public class SushiLiteServiceImpl implements SushiLiteService {
try {
COUNTER_Dataset_Report report = buildReportDSR(customerID, repositoryIdentifier, itemIdentifier, beginDate, endDate, metricType, granularity);
if (reportForCompression == false) {
return "<pre>" + objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(report) + "</pre>";
// return "<pre>" + objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(report) + "</pre>";
return objectMapper.writer().writeValueAsString(report);
} else {
log.info((beginDate + " " + endDate));
@ -1204,8 +854,8 @@ public class SushiLiteServiceImpl implements SushiLiteService {
public String displayConsortiumMemberList() {
ObjectMapper objectMapper = new ObjectMapper();
try {
return "<pre>" + objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(buildMembersList()) + "</pre>";
// return "<pre>" + objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(buildMembersList()) + "</pre>";
return objectMapper.writer().writeValueAsString(buildMembersList());
} catch (Exception ex) {
java.util.logging.Logger.getLogger(SushiLiteServiceImpl.class
.getName()).log(Level.SEVERE, null, ex);

View File

@ -3,25 +3,13 @@ package eu.dnetlib.usagestats.services;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import eu.dnetlib.usagestats.repositories.UsageStatsRepository;
import eu.dnetlib.usagestats.sushilite.domain.Alert;
import eu.dnetlib.usagestats.sushilite.domain.COUNTER_Dataset_Report;
import eu.dnetlib.usagestats.sushilite.domain.COUNTER_Dataset_Usage;
import eu.dnetlib.usagestats.sushilite.domain.COUNTER_Item_Report;
import eu.dnetlib.usagestats.sushilite.domain.COUNTER_Item_Usage;
import eu.dnetlib.usagestats.sushilite.domain.Filter;
import eu.dnetlib.usagestats.sushilite.domain.SUSHI_Org_Identifiers;
import eu.dnetlib.usagestats.sushilite.domain.COUNTER_Platform_Usage;
import eu.dnetlib.usagestats.sushilite.domain.SUSHI_Error_Model;
import eu.dnetlib.usagestats.sushilite.domain.COUNTER_Platform_Report;
import eu.dnetlib.usagestats.sushilite.domain.SUSHI_Service_Status;
import eu.dnetlib.usagestats.sushilite.domain.SUSHI_Report_List;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.FileWriter;
import eu.dnetlib.usagestats.repositories.UsageStatsRepositorySample;
import eu.dnetlib.usagestats.sushilite.domain.*;
import org.apache.log4j.Logger;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import java.io.*;
import java.text.SimpleDateFormat;
import java.time.ZonedDateTime;
import java.time.format.DateTimeFormatter;
@ -32,13 +20,11 @@ import java.util.List;
import java.util.logging.Level;
import java.util.zip.ZipEntry;
import java.util.zip.ZipOutputStream;
import org.apache.log4j.Logger;
import org.springframework.beans.factory.annotation.Value;
@Service
public class SushiLiteServiceImpl implements SushiLiteService {
public class SushiLiteServiceImplSample implements SushiLiteServiceSample {
private final UsageStatsRepository usageStatsRepository;
private final UsageStatsRepositorySample usageStatsRepository;
private final Logger log = Logger.getLogger(this.getClass());
@ -53,7 +39,7 @@ public class SushiLiteServiceImpl implements SushiLiteService {
boolean reportForCompression = false;
public SushiLiteServiceImpl(UsageStatsRepository usageStatsRepository) {
public SushiLiteServiceImplSample(UsageStatsRepositorySample usageStatsRepository) {
this.usageStatsRepository = usageStatsRepository;
}
@ -359,7 +345,7 @@ public class SushiLiteServiceImpl implements SushiLiteService {
Date beginDateParsed;
if (!beginDate.equals("")) {
beginDateParsed = tryParse(beginDate);
if (beginDateParsed != null && (granularity.toLowerCase().equals("monthly") || beginDate.length() == 7)) {
if (beginDateParsed != null && (granularity.equalsIgnoreCase("monthly") || beginDate.length() == 7)) {
Calendar temp = Calendar.getInstance();
temp.setTime(beginDateParsed);
temp.set(Calendar.DAY_OF_MONTH, temp.getActualMinimum(Calendar.DAY_OF_MONTH));
@ -378,7 +364,7 @@ public class SushiLiteServiceImpl implements SushiLiteService {
Date endDateParsed;
if (!endDate.equals("")) {
endDateParsed = tryParse(endDate);
if (endDateParsed != null && (granularity.toLowerCase().equals("monthly") || endDate.length() == 7)) {
if (endDateParsed != null && (granularity.equalsIgnoreCase("monthly") || endDate.length() == 7)) {
Calendar temp = Calendar.getInstance();
temp.setTime(endDateParsed);
temp.set(Calendar.DAY_OF_MONTH, temp.getActualMaximum(Calendar.DAY_OF_MONTH));
@ -402,7 +388,7 @@ public class SushiLiteServiceImpl implements SushiLiteService {
reportExceptions.add(new SUSHI_Error_Model("3020", "Error", "usagecounts.openaire.eu", "Invalid Date Arguments", "End Date: " + endDate + " is not a valid date"));
}
if (beginDateParsed != null && endDateParsed != null && !beginDateParsed.before(endDateParsed)) {
reportExceptions.add(new SUSHI_Error_Model("3020", "Error", "usagecounts.openaire.eu", "Invalid Date Arguments", "BeginDate \'" + new SimpleDateFormat("yyyy-MM-dd").format(beginDateParsed) + "\' is greater than EndDate \'" + new SimpleDateFormat("yyyy-MM-dd").format(endDateParsed) + "\'"));
reportExceptions.add(new SUSHI_Error_Model("3020", "Error", "usagecounts.openaire.eu", "Invalid Date Arguments", "BeginDate '" + new SimpleDateFormat("yyyy-MM-dd").format(beginDateParsed) + "' is greater than EndDate '" + new SimpleDateFormat("yyyy-MM-dd").format(endDateParsed) + "'"));
}
List<Filter> reportFilters = new ArrayList();
@ -459,7 +445,7 @@ public class SushiLiteServiceImpl implements SushiLiteService {
Date beginDateParsed;
if (!beginDate.equals("")) {
beginDateParsed = tryParse(beginDate);
if (beginDateParsed != null && (granularity.toLowerCase().equals("monthly") || beginDate.length() == 7)) {
if (beginDateParsed != null && (granularity.equalsIgnoreCase("monthly") || beginDate.length() == 7)) {
Calendar temp = Calendar.getInstance();
temp.setTime(beginDateParsed);
temp.set(Calendar.DAY_OF_MONTH, temp.getActualMinimum(Calendar.DAY_OF_MONTH));
@ -476,7 +462,7 @@ public class SushiLiteServiceImpl implements SushiLiteService {
Date endDateParsed;
if (!endDate.equals("")) {
endDateParsed = tryParse(endDate);
if (endDateParsed != null && (granularity.toLowerCase().equals("monthly") || endDate.length() == 7)) {
if (endDateParsed != null && (granularity.equalsIgnoreCase("monthly") || endDate.length() == 7)) {
Calendar temp = Calendar.getInstance();
temp.setTime(endDateParsed);
temp.set(Calendar.DAY_OF_MONTH, temp.getActualMaximum(Calendar.DAY_OF_MONTH));
@ -498,7 +484,7 @@ public class SushiLiteServiceImpl implements SushiLiteService {
reportExceptions.add(new SUSHI_Error_Model("3020", "Error", "usagecounts.openaire.eu", "Invalid Date Arguments", "End Date: " + endDate + " is not a valid date"));
}
if (beginDateParsed != null && endDateParsed != null && !beginDateParsed.before(endDateParsed)) {
reportExceptions.add(new SUSHI_Error_Model("3020", "Error", "usagecounts.openaire.eu", "Invalid Date Arguments", "BeginDate \'" + new SimpleDateFormat("yyyy-MM-dd").format(beginDateParsed) + "\' is greater than EndDate \'" + new SimpleDateFormat("yyyy-MM-dd").format(endDateParsed) + "\'"));
reportExceptions.add(new SUSHI_Error_Model("3020", "Error", "usagecounts.openaire.eu", "Invalid Date Arguments", "BeginDate '" + new SimpleDateFormat("yyyy-MM-dd").format(beginDateParsed) + "' is greater than EndDate '" + new SimpleDateFormat("yyyy-MM-dd").format(endDateParsed) + "'"));
}
List<Filter> reportFilters = new ArrayList();
@ -554,14 +540,14 @@ public class SushiLiteServiceImpl implements SushiLiteService {
orgIdentifiers.add(new SUSHI_Org_Identifiers("Openaire", repositoryIdentifier));
if (!granularity.equalsIgnoreCase("totals") && !granularity.equalsIgnoreCase("monthly")) {
reportExceptions.add(new SUSHI_Error_Model("3062", "Warning", "Invalid ReportAttribute Value", "usagecounts.openaire.eu", "Granularity: \'" + granularity + "\' unknown. Defaulting to Monthly"));
reportExceptions.add(new SUSHI_Error_Model("3062", "Warning", "Invalid ReportAttribute Value", "usagecounts.openaire.eu", "Granularity: '" + granularity + "' unknown. Defaulting to Monthly"));
granularity = "Monthly";
}
Date beginDateParsed;
if (!beginDate.equals("")) {
beginDateParsed = tryParse(beginDate);
if (beginDateParsed != null && (granularity.toLowerCase().equals("monthly") || beginDate.length() == 7)) {
if (beginDateParsed != null && (granularity.equalsIgnoreCase("monthly") || beginDate.length() == 7)) {
Calendar temp = Calendar.getInstance();
temp.setTime(beginDateParsed);
temp.set(Calendar.DAY_OF_MONTH, temp.getActualMinimum(Calendar.DAY_OF_MONTH));
@ -580,7 +566,7 @@ public class SushiLiteServiceImpl implements SushiLiteService {
Date endDateParsed;
if (!endDate.equals("")) {
endDateParsed = tryParse(endDate);
if (endDateParsed != null && (granularity.toLowerCase().equals("monthly") || endDate.length() == 7)) {
if (endDateParsed != null && (granularity.equalsIgnoreCase("monthly") || endDate.length() == 7)) {
Calendar temp = Calendar.getInstance();
temp.setTime(endDateParsed);
temp.set(Calendar.DAY_OF_MONTH, temp.getActualMaximum(Calendar.DAY_OF_MONTH));
@ -597,21 +583,21 @@ public class SushiLiteServiceImpl implements SushiLiteService {
}
// Calucalte time difference
// in milliseconds
beginDateParsed = tryParse(beginDate);
Calendar temp = Calendar.getInstance();
temp.setTime(beginDateParsed);
temp.set(Calendar.DAY_OF_MONTH, temp.getActualMaximum(Calendar.DAY_OF_MONTH));
beginDateParsed = temp.getTime();
endDateParsed = tryParse(endDate);
Calendar temp1 = Calendar.getInstance();
temp1.setTime(endDateParsed);
temp1.set(Calendar.DAY_OF_MONTH, temp1.getActualMaximum(Calendar.DAY_OF_MONTH));
endDateParsed = temp1.getTime();
// // Calucalte time difference
// // in milliseconds
// beginDateParsed = tryParse(beginDate);
// Calendar temp = Calendar.getInstance();
// temp.setTime(beginDateParsed);
// temp.set(Calendar.DAY_OF_MONTH, temp.getActualMaximum(Calendar.DAY_OF_MONTH));
// beginDateParsed = temp.getTime();
//
//
// endDateParsed = tryParse(endDate);
// Calendar temp1 = Calendar.getInstance();
// temp1.setTime(endDateParsed);
// temp1.set(Calendar.DAY_OF_MONTH, temp1.getActualMaximum(Calendar.DAY_OF_MONTH));
// endDateParsed = temp1.getTime();
//
long difference_In_Time
= endDateParsed.getTime() - beginDateParsed.getTime();
long difference_In_Years = (difference_In_Time/ (1000 * 60 * 60 * 24));
@ -625,7 +611,7 @@ public class SushiLiteServiceImpl implements SushiLiteService {
// reportExceptions.add(new SUSHI_Error_Model("3020", "Error", "Invalid Date Arguments", "usagecounts.openaire.eu", "End Date: " + endDate + " is not a valid date"));
// }
if (beginDateParsed != null && endDateParsed != null && !beginDateParsed.before(endDateParsed)) {
reportExceptions.add(new SUSHI_Error_Model("3020", "Error", "Invalid Date Arguments", "usagecounts.openaire.eu", "BeginDate \'" + new SimpleDateFormat("yyyy-MM-dd").format(beginDateParsed) + "\' is greater than EndDate \'" + new SimpleDateFormat("yyyy-MM-dd").format(endDateParsed) + "\'"));
reportExceptions.add(new SUSHI_Error_Model("3020", "Error", "Invalid Date Arguments", "usagecounts.openaire.eu", "BeginDate '" + new SimpleDateFormat("yyyy-MM-dd").format(beginDateParsed) + "' is greater than EndDate '" + new SimpleDateFormat("yyyy-MM-dd").format(endDateParsed) + "'"));
}
String institutionName=null;
@ -741,14 +727,14 @@ public class SushiLiteServiceImpl implements SushiLiteService {
orgIdentifiers.add(new SUSHI_Org_Identifiers("Openaire", repositoryIdentifier));
if (!granularity.equalsIgnoreCase("totals") && !granularity.equalsIgnoreCase("monthly")) {
reportExceptions.add(new SUSHI_Error_Model("3062", "Warning", "Invalid ReportAttribute Value", "usagecounts.openaire.eu", "Granularity: \'" + granularity + "\' unknown. Defaulting to Monthly"));
reportExceptions.add(new SUSHI_Error_Model("3062", "Warning", "Invalid ReportAttribute Value", "usagecounts.openaire.eu", "Granularity: '" + granularity + "' unknown. Defaulting to Monthly"));
granularity = "Monthly";
}
Date beginDateParsed;
if (!beginDate.equals("")) {
beginDateParsed = tryParse(beginDate);
if (beginDateParsed != null && (granularity.toLowerCase().equals("monthly") || beginDate.length() == 7)) {
if (beginDateParsed != null && (granularity.equalsIgnoreCase("monthly") || beginDate.length() == 7)) {
Calendar temp = Calendar.getInstance();
temp.setTime(beginDateParsed);
temp.set(Calendar.DAY_OF_MONTH, temp.getActualMinimum(Calendar.DAY_OF_MONTH));
@ -765,7 +751,7 @@ public class SushiLiteServiceImpl implements SushiLiteService {
Date endDateParsed;
if (!endDate.equals("")) {
endDateParsed = tryParse(endDate);
if (endDateParsed != null && (granularity.toLowerCase().equals("monthly") || endDate.length() == 7)) {
if (endDateParsed != null && (granularity.equalsIgnoreCase("monthly") || endDate.length() == 7)) {
Calendar temp = Calendar.getInstance();
temp.setTime(endDateParsed);
temp.set(Calendar.DAY_OF_MONTH, temp.getActualMaximum(Calendar.DAY_OF_MONTH));
@ -786,7 +772,7 @@ public class SushiLiteServiceImpl implements SushiLiteService {
reportExceptions.add(new SUSHI_Error_Model("3020", "Error", "Invalid Date Arguments", "usagecounts.openaire.eu", "End Date: " + endDate + " is not a valid date"));
}
if (beginDateParsed != null && endDateParsed != null && !beginDateParsed.before(endDateParsed)) {
reportExceptions.add(new SUSHI_Error_Model("3020", "Error", "Invalid Date Arguments", "usagecounts.openaire.eu", "BeginDate \'" + new SimpleDateFormat("yyyy-MM-dd").format(beginDateParsed) + "\' is greater than EndDate \'" + new SimpleDateFormat("yyyy-MM-dd").format(endDateParsed) + "\'"));
reportExceptions.add(new SUSHI_Error_Model("3020", "Error", "Invalid Date Arguments", "usagecounts.openaire.eu", "BeginDate '" + new SimpleDateFormat("yyyy-MM-dd").format(beginDateParsed) + "' is greater than EndDate '" + new SimpleDateFormat("yyyy-MM-dd").format(endDateParsed) + "'"));
}
String repoid = repositoryIdentifier;
@ -1045,7 +1031,7 @@ public class SushiLiteServiceImpl implements SushiLiteService {
log.info("Report created..." + timestamp2);
reportForCompression = false;
return new String(sushi_lite_server + "/download/" + outputname + ".zip");
return sushi_lite_server + "/download/" + outputname + ".zip";
} catch (JsonProcessingException ex) {
java.util.logging.Logger.getLogger(SushiLiteServiceImpl.class.getName()).log(Level.SEVERE, null, ex);
}
@ -1120,7 +1106,7 @@ public class SushiLiteServiceImpl implements SushiLiteService {
//System.out.println("String end " + timestamp2);
log.info("Report created..." + timestamp2);
reportForCompression = false;
return new String(sushi_lite_server + "/download/" + outputname + ".zip");
return sushi_lite_server + "/download/" + outputname + ".zip";
} catch (Exception ex) {
java.util.logging.Logger.getLogger(SushiLiteServiceImpl.class
@ -1179,7 +1165,7 @@ public class SushiLiteServiceImpl implements SushiLiteService {
//System.out.println("String end " + timestamp2);
log.info("Report created..." + timestamp2);
reportForCompression = false;
return new String(sushi_lite_server + "/download/" + outputname + ".zip");
return sushi_lite_server + "/download/" + outputname + ".zip";
} catch (Exception ex) {
java.util.logging.Logger.getLogger(SushiLiteServiceImpl.class
@ -1215,95 +1201,95 @@ public class SushiLiteServiceImpl implements SushiLiteService {
}
}
class JSONUtil {
public static String escape(String input) {
StringBuilder output = new StringBuilder();
for (int i = 0; i < input.length(); i++) {
char ch = input.charAt(i);
int chx = (int) ch;
// let's not put any nulls in our strings
assert (chx != 0);
if (ch == '\n') {
output.append("\\n");
} else if (ch == '\t') {
output.append("\\t");
} else if (ch == '\r') {
output.append("\\r");
} else if (ch == '\\') {
output.append("\\\\");
} else if (ch == '"') {
output.append("\\\"");
} else if (ch == '\b') {
output.append("\\b");
} else if (ch == '\f') {
output.append("\\f");
} else if (chx >= 0x10000) {
assert false : "Java stores as u16, so it should never give us a character that's bigger than 2 bytes. It literally can't.";
} else if (chx > 127) {
output.append(String.format("\\u%04x", chx));
} else {
output.append(ch);
}
}
return output.toString();
}
public static String unescape(String input) {
StringBuilder builder = new StringBuilder();
int i = 0;
while (i < input.length()) {
char delimiter = input.charAt(i);
i++; // consume letter or backslash
if (delimiter == '\\' && i < input.length()) {
// consume first after backslash
char ch = input.charAt(i);
i++;
if (ch == '\\' || ch == '/' || ch == '"' || ch == '\'') {
builder.append(ch);
} else if (ch == 'n') {
builder.append('\n');
} else if (ch == 'r') {
builder.append('\r');
} else if (ch == 't') {
builder.append('\t');
} else if (ch == 'b') {
builder.append('\b');
} else if (ch == 'f') {
builder.append('\f');
} else if (ch == 'u') {
StringBuilder hex = new StringBuilder();
// expect 4 digits
if (i + 4 > input.length()) {
throw new RuntimeException("Not enough unicode digits! ");
}
for (char x : input.substring(i, i + 4).toCharArray()) {
if (!Character.isLetterOrDigit(x)) {
throw new RuntimeException("Bad character in unicode escape.");
}
hex.append(Character.toLowerCase(x));
}
i += 4; // consume those four digits.
int code = Integer.parseInt(hex.toString(), 16);
builder.append((char) code);
} else {
throw new RuntimeException("Illegal escape sequence: \\" + ch);
}
} else { // it's not a backslash, or it's the last character.
builder.append(delimiter);
}
}
return builder.toString();
}
}
//class JSONUtil {
//
// public static String escape(String input) {
// StringBuilder output = new StringBuilder();
//
// for (int i = 0; i < input.length(); i++) {
// char ch = input.charAt(i);
// int chx = ch;
//
// // let's not put any nulls in our strings
// assert (chx != 0);
//
// if (ch == '\n') {
// output.append("\\n");
// } else if (ch == '\t') {
// output.append("\\t");
// } else if (ch == '\r') {
// output.append("\\r");
// } else if (ch == '\\') {
// output.append("\\\\");
// } else if (ch == '"') {
// output.append("\\\"");
// } else if (ch == '\b') {
// output.append("\\b");
// } else if (ch == '\f') {
// output.append("\\f");
// } else if (chx >= 0x10000) {
// assert false : "Java stores as u16, so it should never give us a character that's bigger than 2 bytes. It literally can't.";
// } else if (chx > 127) {
// output.append(String.format("\\u%04x", chx));
// } else {
// output.append(ch);
// }
// }
//
// return output.toString();
// }
//
// public static String unescape(String input) {
// StringBuilder builder = new StringBuilder();
//
// int i = 0;
// while (i < input.length()) {
// char delimiter = input.charAt(i);
// i++; // consume letter or backslash
//
// if (delimiter == '\\' && i < input.length()) {
//
// // consume first after backslash
// char ch = input.charAt(i);
// i++;
//
// if (ch == '\\' || ch == '/' || ch == '"' || ch == '\'') {
// builder.append(ch);
// } else if (ch == 'n') {
// builder.append('\n');
// } else if (ch == 'r') {
// builder.append('\r');
// } else if (ch == 't') {
// builder.append('\t');
// } else if (ch == 'b') {
// builder.append('\b');
// } else if (ch == 'f') {
// builder.append('\f');
// } else if (ch == 'u') {
//
// StringBuilder hex = new StringBuilder();
//
// // expect 4 digits
// if (i + 4 > input.length()) {
// throw new RuntimeException("Not enough unicode digits! ");
// }
// for (char x : input.substring(i, i + 4).toCharArray()) {
// if (!Character.isLetterOrDigit(x)) {
// throw new RuntimeException("Bad character in unicode escape.");
// }
// hex.append(Character.toLowerCase(x));
// }
// i += 4; // consume those four digits.
//
// int code = Integer.parseInt(hex.toString(), 16);
// builder.append((char) code);
// } else {
// throw new RuntimeException("Illegal escape sequence: \\" + ch);
// }
// } else { // it's not a backslash, or it's the last character.
// builder.append(delimiter);
// }
// }
// return builder.toString();
// }
//}

View File

@ -4,10 +4,11 @@ import eu.dnetlib.usagestats.sushilite.domain.COUNTER_Dataset_Report;
import eu.dnetlib.usagestats.sushilite.domain.COUNTER_Item_Report;
import eu.dnetlib.usagestats.sushilite.domain.COUNTER_Platform_Report;
import eu.dnetlib.usagestats.sushilite.domain.SUSHI_Service_Status;
import java.util.ArrayList;
import java.util.List;
public interface SushiLiteService {
public interface SushiLiteServiceSample {
// ReportResponseWrapper buildReport(String reportName, String release, String requestorId, String beginDate,
// String endDate, String repositoryIdentifier, String itemIdentifier,

View File

@ -132,14 +132,14 @@ public class UsageStatsServiceImpl implements UsageStatsService {
String query = "SELECT c.name, count(distinct repository_id) as total_repos, sum(views) as views, sum(downloads) as downloads "
+ "FROM "+statsDB+".organization t, "+statsDB+".organization_datasources o, "
+ ""+statsDB+".country c, "+usagestatsImpalaDB+".usage_stats "
+ "WHERE o.datasource=t.id AND c.code=t.country AND o.id=repository_id GROUP BY c.name";
+ "WHERE o.id=t.id AND c.code=t.country AND o.datasource=repository_id GROUP BY c.name";
return usageStatsRepository.executeCountryUsageStats(query);
}
@Override
public CountryUsageStats getCountryUsageStats(String country) {
String query = "SELECT count(distinct repository_id) as total_repos, sum(views) as views, sum(downloads) as downloads "
+ "from "+statsDB+".organization t, "+statsDB+".organization_datasources o, "
+ ""+statsDB+".country c, "+usagestatsImpalaDB+".usage_stats where o.datasource=t.id and c.code=t.country and o.id=repository_id "
+ ""+statsDB+".country c, "+usagestatsImpalaDB+".usage_stats where o.id=t.id and c.code=t.country and o.datasource=repository_id "
+ "and c.name='"+country+"'";
log.info("Country Usage Stats query "+query);
return usageStatsRepository.executeCountryUsageStats(query, country);
@ -150,7 +150,7 @@ public class UsageStatsServiceImpl implements UsageStatsService {
public List<CountryRepositories> getCountryRepositories() {
String query = "SELECT c.name, d.name FROM "+statsDB+".datasource d, "+statsDB+".organization t, "
+ statsDB+".organization_datasources o, "+statsDB+".country c, "+usagestatsImpalaDB+".usage_stats "
+ "WHERE o.datasource=t.id AND c.code=t.country AND o.id=repository_id and repository_id=d.id "
+ "WHERE o.datasource=t.id AND c.code=t.country AND o.datasource=repository_id and repository_id=d.datasource "
+ "GROUP BY d.name, c.name ORDER BY c.name";
return usageStatsRepository.executeCountryRepositories(query);
}

View File

@ -10,9 +10,8 @@ log4j.additivity.org.springframework = false
log4j.appender.R=org.apache.log4j.RollingFileAppender
#log4j.appender.R.File=/var/log/dnet/usageStatsAPI/usageStatsAPI_R5.log
#log4j.appender.R.File=/home/dimitris.pierrakos/Sushilite_R5/usageStatsAPI_R5.log
log4j.appender.R.File=/Users/dpie/OneDrive/Research/Projects/OpenAIRE Usage Statistics Service/Code/code-repo.d4science.org/Sushilite-R5/usageStatsAPI_R5.log
#log4j.appender.R.File=/srv/springboot/9880/usageStatsAPI_R5.log
#log4j.appender.R.File=/home/dpie/Desktop/usageStatsAPI_R5.log
#log4j.appender.R.File=/Users/dpie/OneDrive/Research/Projects/OpenAIRE Usage Statistics Service/Code/code-repo.d4science.org/Sushilite-R5/usageStatsAPI_R5.log
log4j.appender.R.File=/srv/springboot/9880/usageStatsAPI_R5.log
loglog4j.appender.R.MaxFileSize=10MB
log4j.appender.R.MaxBackupIndex=10
log4j.appender.R.layout=org.apache.log4j.PatternLayout
@ -20,9 +19,8 @@ log4j.appender.R.layout.ConversionPattern= %d %p %t [%c] - %m%n
log4j.appender.S=org.apache.log4j.RollingFileAppender
#log4j.appender.S.File=/var/log/dnet/usageStatsAPI/usageStatsAPI_R5-spring.log
#log4j.appender.S.File=/home/dimitris.pierrakos/Sushilite_R5/usageStatsAPI_R5-spring.log
log4j.appender.S.File=/Users/dpie/OneDrive/Research/Projects/OpenAIRE Usage Statistics Service/Code/code-repo.d4science.org/Sushilite-R5/usageStatsAPI_R5-spring.log
#log4j.appender.S.File=/Volumes/Zeus/dpie/Desktop/usageStatsAPI_R5-spring.log
#log4j.appender.S.File=/srv/springboot/9880/usageStatsAPI_R5-spring.log
#log4j.appender.S.File=/Users/dpie/OneDrive/Research/Projects/OpenAIRE Usage Statistics Service/Code/code-repo.d4science.org/Sushilite-R5/usageStatsAPI_R5-spring.log
log4j.appender.S.File=/srv/springboot/9880/usageStatsAPI_R5-spring.log
#log4j.appender.S.File=/home/dpie/Desktop/usageStatsAPI_R5-spring.log
log4j.appender.S.MaxFileSize=10MB
log4j.appender.S.MaxBackupIndex=10