2020-05-07 18:00:03 +02:00
2020-05-12 19:38:31 +02:00
package eu.dnetlib.oa.graph.usagestats.export ;
2020-05-07 18:00:03 +02:00
import java.io.* ;
import java.net.URL ;
import java.net.URLConnection ;
import java.sql.PreparedStatement ;
import java.sql.ResultSet ;
import java.sql.Statement ;
import java.text.SimpleDateFormat ;
2020-10-06 22:44:25 +02:00
import java.util.ArrayList ;
2020-05-07 18:00:03 +02:00
import java.util.Calendar ;
import java.util.Date ;
2020-10-06 22:44:25 +02:00
import java.util.List ;
2020-05-07 18:00:03 +02:00
2020-07-22 18:22:04 +02:00
import org.apache.hadoop.conf.Configuration ;
import org.apache.hadoop.fs.FSDataOutputStream ;
import org.apache.hadoop.fs.FileSystem ;
import org.apache.hadoop.fs.Path ;
2020-05-07 18:00:03 +02:00
import org.json.simple.JSONArray ;
import org.json.simple.JSONObject ;
import org.json.simple.parser.JSONParser ;
2020-10-02 15:25:21 +02:00
import org.slf4j.Logger ;
import org.slf4j.LoggerFactory ;
2020-05-07 18:00:03 +02:00
/ * *
2020-10-02 15:25:21 +02:00
* @author D . Pierrakos , S . Zoupanos
2020-05-07 18:00:03 +02:00
* /
public class IrusStats {
private String irusUKURL ;
2020-10-02 15:25:21 +02:00
private static final Logger logger = LoggerFactory . getLogger ( IrusStats . class ) ;
2020-05-07 18:00:03 +02:00
public IrusStats ( String irusUKURL ) throws Exception {
this . irusUKURL = irusUKURL ;
2020-09-13 13:51:45 +02:00
// The following may not be needed - It will be created when JSON tables are created
2020-07-22 18:22:04 +02:00
// createTmpTables();
2020-05-07 18:00:03 +02:00
}
2020-10-06 22:44:25 +02:00
public void reCreateLogDirs ( ) throws Exception {
FileSystem dfs = FileSystem . get ( new Configuration ( ) ) ;
logger . info ( " Deleting irusUKReport directory: " + ExecuteWorkflow . irusUKReportPath ) ;
dfs . delete ( new Path ( ExecuteWorkflow . irusUKReportPath ) , true ) ;
2020-05-07 18:00:03 +02:00
2020-10-06 22:44:25 +02:00
logger . info ( " Creating irusUKReport directory: " + ExecuteWorkflow . irusUKReportPath ) ;
dfs . mkdirs ( new Path ( ExecuteWorkflow . irusUKReportPath ) ) ;
}
public void createTables ( ) throws Exception {
try {
2020-10-02 15:25:21 +02:00
logger . info ( " Creating sushilog " ) ;
2020-09-27 12:19:45 +02:00
Statement stmt = ConnectDB . getHiveConnection ( ) . createStatement ( ) ;
2020-09-13 13:51:45 +02:00
String sqlCreateTableSushiLog = " CREATE TABLE IF NOT EXISTS " + ConnectDB . getUsageStatsDBSchema ( )
+ " .sushilog(source STRING, " +
" repository STRING, rid STRING, date STRING, metric_type STRING, count INT) clustered by (source, " +
" repository, rid, date, metric_type) into 100 buckets stored as orc tblproperties('transactional'='true') " ;
2020-05-07 18:00:03 +02:00
stmt . executeUpdate ( sqlCreateTableSushiLog ) ;
2020-10-02 15:25:21 +02:00
logger . info ( " Created sushilog " ) ;
2020-09-13 13:51:45 +02:00
// To see how to apply to the ignore duplicate rules and indexes
// stmt.executeUpdate(sqlCreateTableSushiLog);
// String sqlcreateRuleSushiLog = "CREATE OR REPLACE RULE ignore_duplicate_inserts AS "
// + " ON INSERT TO sushilog "
// + " WHERE (EXISTS ( SELECT sushilog.source, sushilog.repository,"
// + "sushilog.rid, sushilog.date "
// + "FROM sushilog "
// + "WHERE sushilog.source = new.source AND sushilog.repository = new.repository AND sushilog.rid = new.rid AND sushilog.date = new.date AND sushilog.metric_type = new.metric_type)) DO INSTEAD NOTHING;";
// stmt.executeUpdate(sqlcreateRuleSushiLog);
// String createSushiIndex = "create index if not exists sushilog_duplicates on sushilog(source, repository, rid, date, metric_type);";
// stmt.executeUpdate(createSushiIndex);
2020-05-07 18:00:03 +02:00
stmt . close ( ) ;
2020-09-27 12:19:45 +02:00
ConnectDB . getHiveConnection ( ) . close ( ) ;
2020-10-02 15:25:21 +02:00
logger . info ( " Sushi Tables Created " ) ;
2020-05-07 18:00:03 +02:00
} catch ( Exception e ) {
2020-10-02 15:25:21 +02:00
logger . error ( " Failed to create tables: " + e ) ;
2020-05-07 18:00:03 +02:00
throw new Exception ( " Failed to create tables: " + e . toString ( ) , e ) ;
}
}
2020-09-13 15:00:40 +02:00
// // The following may not be needed - It will be created when JSON tables are created
// private void createTmpTables() throws Exception {
// try {
//
// Statement stmt = ConnectDB.getConnection().createStatement();
// String sqlCreateTableSushiLog = "CREATE TABLE IF NOT EXISTS sushilogtmp(source TEXT, repository TEXT, rid TEXT, date TEXT, metric_type TEXT, count INT, PRIMARY KEY(source, repository, rid, date, metric_type));";
// stmt.executeUpdate(sqlCreateTableSushiLog);
//
// // stmt.executeUpdate("CREATE TABLE IF NOT EXISTS public.sushilog AS TABLE sushilog;");
// // String sqlCopyPublicSushiLog = "INSERT INTO sushilog SELECT * FROM public.sushilog;";
// // stmt.executeUpdate(sqlCopyPublicSushiLog);
// String sqlcreateRuleSushiLog = "CREATE OR REPLACE RULE ignore_duplicate_inserts AS "
// + " ON INSERT TO sushilogtmp "
// + " WHERE (EXISTS ( SELECT sushilogtmp.source, sushilogtmp.repository,"
// + "sushilogtmp.rid, sushilogtmp.date "
// + "FROM sushilogtmp "
// + "WHERE sushilogtmp.source = new.source AND sushilogtmp.repository = new.repository AND sushilogtmp.rid = new.rid AND sushilogtmp.date = new.date AND sushilogtmp.metric_type = new.metric_type)) DO INSTEAD NOTHING;";
// stmt.executeUpdate(sqlcreateRuleSushiLog);
//
// stmt.close();
// ConnectDB.getConnection().close();
// log.info("Sushi Tmp Tables Created");
// } catch (Exception e) {
// log.error("Failed to create tables: " + e);
// throw new Exception("Failed to create tables: " + e.toString(), e);
// }
// }
public void processIrusStats ( ) throws Exception {
2020-09-27 12:19:45 +02:00
Statement stmt = ConnectDB . getHiveConnection ( ) . createStatement ( ) ;
ConnectDB . getHiveConnection ( ) . setAutoCommit ( false ) ;
2020-05-07 18:00:03 +02:00
2020-10-02 15:25:21 +02:00
logger . info ( " Adding JSON Serde jar " ) ;
2020-09-13 15:00:40 +02:00
stmt . executeUpdate ( " add jar /usr/share/cmf/common_jars/hive-hcatalog-core-1.1.0-cdh5.14.0.jar " ) ;
2020-10-02 15:25:21 +02:00
logger . info ( " Added JSON Serde jar " ) ;
2020-09-14 19:10:53 +02:00
2020-10-02 15:25:21 +02:00
logger . info ( " Dropping sushilogtmp_json table " ) ;
2020-09-13 15:00:40 +02:00
String drop_sushilogtmp_json = " DROP TABLE IF EXISTS " +
ConnectDB . getUsageStatsDBSchema ( ) +
" .sushilogtmp_json " ;
stmt . executeUpdate ( drop_sushilogtmp_json ) ;
2020-10-02 15:25:21 +02:00
logger . info ( " Dropped sushilogtmp_json table " ) ;
2020-09-13 15:00:40 +02:00
2020-10-02 15:25:21 +02:00
logger . info ( " Creating sushilogtmp_json table " ) ;
2020-09-13 15:00:40 +02:00
String create_sushilogtmp_json = " CREATE EXTERNAL TABLE IF NOT EXISTS " +
ConnectDB . getUsageStatsDBSchema ( ) + " .sushilogtmp_json( \ n " +
" `ItemIdentifier` ARRAY< \ n " +
" struct< \ n " +
" Type: STRING, \ n " +
" Value: STRING \ n " +
" > \ n " +
" >, \ n " +
" `ItemPerformance` ARRAY< \ n " +
" struct< \ n " +
" `Period`: struct< \ n " +
" `Begin`: STRING, \ n " +
" `End`: STRING \ n " +
" >, \ n " +
" `Instance`: struct< \ n " +
" `Count`: STRING, \ n " +
" `MetricType`: STRING \ n " +
" > \ n " +
" > \ n " +
" > \ n " +
" ) \ n " +
" ROW FORMAT SERDE 'org.apache.hive.hcatalog.data.JsonSerDe' \ n " +
2020-10-01 22:24:40 +02:00
" LOCATION ' " + ExecuteWorkflow . irusUKReportPath + " ' \ n " +
2020-09-13 15:00:40 +02:00
" TBLPROPERTIES ( \" transactional \" = \" false \" ) " ;
stmt . executeUpdate ( create_sushilogtmp_json ) ;
2020-10-02 15:25:21 +02:00
logger . info ( " Created sushilogtmp_json table " ) ;
2020-09-13 15:00:40 +02:00
2020-10-02 15:25:21 +02:00
logger . info ( " Dropping sushilogtmp table " ) ;
2020-09-13 15:00:40 +02:00
String drop_sushilogtmp = " DROP TABLE IF EXISTS " +
ConnectDB . getUsageStatsDBSchema ( ) +
" .sushilogtmp " ;
stmt . executeUpdate ( drop_sushilogtmp ) ;
2020-10-02 15:25:21 +02:00
logger . info ( " Dropped sushilogtmp table " ) ;
2020-09-13 15:00:40 +02:00
2020-10-02 15:25:21 +02:00
logger . info ( " Creating sushilogtmp table " ) ;
2020-09-13 15:00:40 +02:00
String create_sushilogtmp = " CREATE TABLE " + ConnectDB . getUsageStatsDBSchema ( )
+ " .sushilogtmp(source STRING, repository STRING, " +
" rid STRING, date STRING, metric_type STRING, count INT) clustered by (source) into 100 buckets stored as orc "
+
" tblproperties('transactional'='true') " ;
stmt . executeUpdate ( create_sushilogtmp ) ;
2020-10-02 15:25:21 +02:00
logger . info ( " Created sushilogtmp table " ) ;
2020-09-13 15:00:40 +02:00
2020-10-02 15:25:21 +02:00
logger . info ( " Inserting to sushilogtmp table " ) ;
2020-09-13 15:00:40 +02:00
String insert_sushilogtmp = " INSERT INTO " + ConnectDB . getUsageStatsDBSchema ( ) + " .sushilogtmp " +
" SELECT 'IRUS-UK', 'opendoar____::', `ItemIdent`.`Value`, `ItemPerf`.`Period`.`Begin`, " +
" `ItemPerf`.`Instance`.`MetricType`, `ItemPerf`.`Instance`.`Count` " +
" FROM " + ConnectDB . getUsageStatsDBSchema ( ) + " .sushilogtmp_json " +
" LATERAL VIEW posexplode(ItemIdentifier) ItemIdentifierTable AS seqi, ItemIdent " +
" LATERAL VIEW posexplode(ItemPerformance) ItemPerformanceTable AS seqp, ItemPerf " +
" WHERE `ItemIdent`.`Type`= 'OAI' " ;
stmt . executeUpdate ( insert_sushilogtmp ) ;
2020-10-02 15:25:21 +02:00
logger . info ( " Inserted to sushilogtmp table " ) ;
2020-09-13 15:00:40 +02:00
2020-09-27 12:19:45 +02:00
ConnectDB . getHiveConnection ( ) . close ( ) ;
2020-05-07 18:00:03 +02:00
2020-09-14 19:10:53 +02:00
// // !!!!!!!!!!!!!!!!!!!!!
// // To do the following
// // !!!!!!!!!!!!!!!!!!!!!
//
// // String sql = "INSERT INTO sushi_result_downloads SELECT s.source, d.id AS repository, ro.id, s.date, s.count
// // FROM sushilog s, datasource_oids d, result_oids ro WHERE s.repository=d.orid AND s.oai=ro.orid AND
// // metric_type='ft_total'";
// // String sql = "SELECT s.source, d.id AS repository_id, ro.id as result_id, extract('year' from s.date::date)
// // ||'/'|| LPAD(CAST(extract('month' from s.date::date) AS VARCHAR), 2, '0') as date, s.count INTO
// // downloads_stats FROM sushilog s, datasource_oids d, result_oids ro WHERE s.repository=d.orid AND
// // s.oai=ro.orid AND metric_type='ft_total'";
// // String sql = "INSERT INTO downloads_stats SELECT s.source, d.id AS repository_id, ro.id as result_id,
// // extract('year' from s.date::date) ||'/'|| LPAD(CAST(extract('month' from s.date::date) AS VARCHAR), 2, '0')
// // as date, s.count FROM sushilog s, datasource_oids d, result_oids ro WHERE s.repository=d.orid AND
// // s.oai=ro.orid AND metric_type='ft_total';";
// String sql = "INSERT INTO downloads_stats SELECT s.source, d.id AS repository_id, ro.id as result_id, extract('year' from s.date::date) ||'/'|| LPAD(CAST(extract('month' from s.date::date) AS VARCHAR), 2, '0') as date, s.count, '0' FROM sushilogtmp s, public.datasource_oids d, public.result_oids ro WHERE s.repository=d.orid AND s.rid=ro.orid AND metric_type='ft_total' AND s.source='IRUS-UK';";
//
// stmt.executeUpdate(sql);
//
// sql = "Insert into sushilog select * from sushilogtmp;";
// stmt.executeUpdate(sql);
//
// ConnectDB.getConnection().close();
2020-05-07 18:00:03 +02:00
}
2020-09-13 14:01:29 +02:00
public void getIrusRRReport ( String irusUKReportPath ) throws Exception {
2020-10-06 22:44:25 +02:00
SimpleDateFormat sdf = new SimpleDateFormat ( " YYYY-MM " ) ;
// Setting the starting period
Calendar start = ( Calendar ) ExecuteWorkflow . startingLogPeriod . clone ( ) ;
logger . info ( " Starting period for log download: " + sdf . format ( start . getTime ( ) ) ) ;
// Setting the ending period (last day of the month)
Calendar end = ( Calendar ) ExecuteWorkflow . endingLogPeriod . clone ( ) ;
end . add ( Calendar . MONTH , + 1 ) ;
end . add ( Calendar . DAY_OF_MONTH , - 1 ) ;
logger . info ( " Ending period for log download: " + sdf . format ( end . getTime ( ) ) ) ;
String reportUrl = irusUKURL + " GetReport/?Report=RR1&Release=4&RequestorID=OpenAIRE&BeginDate= " +
sdf . format ( start . getTime ( ) ) + " &EndDate= " + sdf . format ( end . getTime ( ) ) +
" &RepositoryIdentifier=&ItemDataType=&NewJiscBand=&Granularity=Monthly&Callback= " ;
2020-05-07 18:00:03 +02:00
2020-10-02 15:25:21 +02:00
logger . info ( " (processIrusRRReport) Getting report: " + reportUrl ) ;
2020-05-07 18:00:03 +02:00
String text = getJson ( reportUrl , " " , " " ) ;
2020-10-06 22:44:25 +02:00
List < String > opendoarsToVisit = new ArrayList < String > ( ) ;
2020-05-07 18:00:03 +02:00
JSONParser parser = new JSONParser ( ) ;
JSONObject jsonObject = ( JSONObject ) parser . parse ( text ) ;
jsonObject = ( JSONObject ) jsonObject . get ( " ReportResponse " ) ;
jsonObject = ( JSONObject ) jsonObject . get ( " Report " ) ;
jsonObject = ( JSONObject ) jsonObject . get ( " Report " ) ;
jsonObject = ( JSONObject ) jsonObject . get ( " Customer " ) ;
JSONArray jsonArray = ( JSONArray ) jsonObject . get ( " ReportItems " ) ;
int i = 0 ;
for ( Object aJsonArray : jsonArray ) {
JSONObject jsonObjectRow = ( JSONObject ) aJsonArray ;
JSONArray itemIdentifier = ( JSONArray ) jsonObjectRow . get ( " ItemIdentifier " ) ;
for ( Object identifier : itemIdentifier ) {
JSONObject opendoar = ( JSONObject ) identifier ;
if ( opendoar . get ( " Type " ) . toString ( ) . equals ( " OpenDOAR " ) ) {
i + + ;
2020-10-06 22:44:25 +02:00
opendoarsToVisit . add ( opendoar . get ( " Value " ) . toString ( ) ) ;
2020-09-13 14:01:29 +02:00
getIrusIRReport ( opendoar . get ( " Value " ) . toString ( ) , irusUKReportPath ) ;
2020-05-07 18:00:03 +02:00
break ;
}
}
// break;
}
2020-09-13 15:00:40 +02:00
2020-10-06 22:44:25 +02:00
logger . info ( " Found the following opendoars for download: " + opendoarsToVisit ) ;
if ( ExecuteWorkflow . irusNumberOfOpendoarsToDownload > 0 & &
ExecuteWorkflow . irusNumberOfOpendoarsToDownload < = opendoarsToVisit . size ( ) ) {
logger . info ( " Trimming siteIds list to the size of: " + ExecuteWorkflow . irusNumberOfOpendoarsToDownload ) ;
opendoarsToVisit = opendoarsToVisit . subList ( 0 , ExecuteWorkflow . irusNumberOfOpendoarsToDownload ) ;
}
logger . info ( " Downloading the followins opendoars: " + opendoarsToVisit ) ;
for ( String opendoar : opendoarsToVisit ) {
logger . info ( " Now working on piwikId: " + opendoar ) ;
this . getIrusIRReport ( opendoar , irusUKReportPath ) ;
}
logger . info ( " Finished with report: " + reportUrl ) ;
2020-05-07 18:00:03 +02:00
}
2020-09-13 14:01:29 +02:00
private void getIrusIRReport ( String opendoar , String irusUKReportPath ) throws Exception {
2020-09-13 13:51:45 +02:00
2020-10-02 15:25:21 +02:00
logger . info ( " (processIrusIRReport) Getting report(s) with opendoar: " + opendoar ) ;
2020-09-13 13:51:45 +02:00
2020-09-27 12:19:45 +02:00
ConnectDB . getHiveConnection ( ) . setAutoCommit ( false ) ;
2020-05-07 18:00:03 +02:00
SimpleDateFormat simpleDateFormat = new SimpleDateFormat ( " YYYY-MM " ) ;
2020-10-06 22:44:25 +02:00
// Setting the starting period
Calendar start = ( Calendar ) ExecuteWorkflow . startingLogPeriod . clone ( ) ;
logger . info ( " Starting period for log download: " + simpleDateFormat . format ( start . getTime ( ) ) ) ;
2020-05-07 18:00:03 +02:00
2020-10-06 22:44:25 +02:00
// Setting the ending period (last day of the month)
Calendar end = ( Calendar ) ExecuteWorkflow . endingLogPeriod . clone ( ) ;
end . add ( Calendar . MONTH , + 1 ) ;
2020-05-07 18:00:03 +02:00
end . add ( Calendar . DAY_OF_MONTH , - 1 ) ;
2020-10-06 22:44:25 +02:00
logger . info ( " Ending period for log download: " + simpleDateFormat . format ( end . getTime ( ) ) ) ;
2020-05-07 18:00:03 +02:00
SimpleDateFormat sdf = new SimpleDateFormat ( " yyyy-MM-dd " ) ;
PreparedStatement st = ConnectDB
2020-09-27 12:19:45 +02:00
. getHiveConnection ( )
2020-09-13 13:51:45 +02:00
. prepareStatement (
" SELECT max(date) FROM " + ConnectDB . getUsageStatsDBSchema ( ) + " .sushilog WHERE repository=? " ) ;
2020-05-07 18:00:03 +02:00
st . setString ( 1 , " opendoar____:: " + opendoar ) ;
ResultSet rs_date = st . executeQuery ( ) ;
while ( rs_date . next ( ) ) {
if ( rs_date . getString ( 1 ) ! = null & & ! rs_date . getString ( 1 ) . equals ( " null " )
& & ! rs_date . getString ( 1 ) . equals ( " " ) ) {
start . setTime ( sdf . parse ( rs_date . getString ( 1 ) ) ) ;
}
}
rs_date . close ( ) ;
int batch_size = 0 ;
while ( start . before ( end ) ) {
// log.info("date: " + simpleDateFormat.format(start.getTime()));
String reportUrl = this . irusUKURL + " GetReport/?Report=IR1&Release=4&RequestorID=OpenAIRE&BeginDate= "
+ simpleDateFormat . format ( start . getTime ( ) ) + " &EndDate= " + simpleDateFormat . format ( start . getTime ( ) )
+ " &RepositoryIdentifier=opendoar%3A " + opendoar
+ " &ItemIdentifier=&ItemDataType=&hasDOI=&Granularity=Monthly&Callback= " ;
start . add ( Calendar . MONTH , 1 ) ;
2020-07-22 18:22:04 +02:00
System . out . println ( " Downloading file: " + reportUrl ) ;
2020-05-07 18:00:03 +02:00
String text = getJson ( reportUrl , " " , " " ) ;
if ( text = = null ) {
continue ;
}
2020-07-22 18:22:04 +02:00
FileSystem fs = FileSystem . get ( new Configuration ( ) ) ;
String filePath = irusUKReportPath + " / " + " IrusIRReport_ " +
opendoar + " _ " + simpleDateFormat . format ( start . getTime ( ) ) + " .json " ;
System . out . println ( " Storing to file: " + filePath ) ;
FSDataOutputStream fin = fs . create ( new Path ( filePath ) , true ) ;
2020-05-07 18:00:03 +02:00
JSONParser parser = new JSONParser ( ) ;
JSONObject jsonObject = ( JSONObject ) parser . parse ( text ) ;
jsonObject = ( JSONObject ) jsonObject . get ( " ReportResponse " ) ;
jsonObject = ( JSONObject ) jsonObject . get ( " Report " ) ;
jsonObject = ( JSONObject ) jsonObject . get ( " Report " ) ;
jsonObject = ( JSONObject ) jsonObject . get ( " Customer " ) ;
JSONArray jsonArray = ( JSONArray ) jsonObject . get ( " ReportItems " ) ;
if ( jsonArray = = null ) {
continue ;
}
String oai = " " ;
for ( Object aJsonArray : jsonArray ) {
JSONObject jsonObjectRow = ( JSONObject ) aJsonArray ;
2020-07-22 18:22:04 +02:00
fin . write ( jsonObjectRow . toJSONString ( ) . getBytes ( ) ) ;
fin . writeChar ( '\n' ) ;
2020-05-07 18:00:03 +02:00
}
2020-07-22 18:22:04 +02:00
fin . close ( ) ;
2020-05-07 18:00:03 +02:00
}
2020-09-27 12:19:45 +02:00
ConnectDB . getHiveConnection ( ) . close ( ) ;
2020-09-13 15:00:40 +02:00
2020-10-02 15:25:21 +02:00
logger . info ( " (processIrusIRReport) Finished downloading report(s) with opendoar: " + opendoar ) ;
2020-05-07 18:00:03 +02:00
}
2020-07-22 18:22:04 +02:00
private String getJson ( String url ) throws Exception {
try {
System . out . println ( " ===> Connecting to: " + url ) ;
URL website = new URL ( url ) ;
System . out . println ( " Connection url -----> " + url ) ;
URLConnection connection = website . openConnection ( ) ;
2020-05-07 18:00:03 +02:00
2020-07-22 18:22:04 +02:00
// connection.setRequestProperty ("Authorization", "Basic "+encoded);
StringBuilder response ;
try ( BufferedReader in = new BufferedReader ( new InputStreamReader ( connection . getInputStream ( ) ) ) ) {
response = new StringBuilder ( ) ;
String inputLine ;
while ( ( inputLine = in . readLine ( ) ) ! = null ) {
response . append ( inputLine ) ;
// response.append("\n");
}
2020-05-07 18:00:03 +02:00
}
2020-07-22 18:22:04 +02:00
System . out . println ( " response ====> " + response . toString ( ) ) ;
2020-05-07 18:00:03 +02:00
2020-07-22 18:22:04 +02:00
return response . toString ( ) ;
} catch ( Exception e ) {
2020-10-02 15:25:21 +02:00
logger . error ( " Failed to get URL: " + e ) ;
2020-07-22 18:22:04 +02:00
System . out . println ( " Failed to get URL: " + e ) ;
throw new Exception ( " Failed to get URL: " + e . toString ( ) , e ) ;
2020-05-07 18:00:03 +02:00
}
}
private String getJson ( String url , String username , String password ) throws Exception {
// String cred=username+":"+password;
// String encoded = new sun.misc.BASE64Encoder().encode (cred.getBytes());
try {
URL website = new URL ( url ) ;
URLConnection connection = website . openConnection ( ) ;
// connection.setRequestProperty ("Authorization", "Basic "+encoded);
StringBuilder response ;
try ( BufferedReader in = new BufferedReader ( new InputStreamReader ( connection . getInputStream ( ) ) ) ) {
response = new StringBuilder ( ) ;
String inputLine ;
while ( ( inputLine = in . readLine ( ) ) ! = null ) {
response . append ( inputLine ) ;
response . append ( " \ n " ) ;
}
}
return response . toString ( ) ;
} catch ( Exception e ) {
2020-10-02 15:25:21 +02:00
logger . error ( " Failed to get URL " , e ) ;
2020-05-07 18:00:03 +02:00
return null ;
}
}
}