forked from D-Net/dnet-hadoop
Passing via properties the DB parameters
This commit is contained in:
parent
07e750939f
commit
2e2e2b8b29
|
@ -1,7 +0,0 @@
|
|||
stats_db_name=openaire_beta_20200618_stats
|
||||
openaire_db_name=openaire_beta_20200618
|
||||
external_stats_db_name=stats_ext
|
||||
stats_db_shadow_name=openaire_beta_20200618_stats_shadow
|
||||
hive_timeout=3000
|
||||
hive_spark_client_timeout=100000
|
||||
hive_spark_client_server_timeout=100000
|
|
@ -7,11 +7,8 @@
|
|||
package eu.dnetlib.oa.graph.usagestats.export;
|
||||
|
||||
/**
|
||||
*
|
||||
* @author D. Pierrakos, S. Zoupanos
|
||||
*
|
||||
*/
|
||||
|
||||
/**
|
||||
* @author D. Pierrakos, S. Zoupanos
|
||||
*/
|
||||
|
@ -30,18 +27,16 @@ public abstract class ConnectDB {
|
|||
|
||||
private static String dbHiveUrl;
|
||||
private static String dbImpalaUrl;
|
||||
private static String dbUsername;
|
||||
private static String dbPassword;
|
||||
private static String usageStatsDBSchema;
|
||||
private static String statsDBSchema;
|
||||
private final static Logger log = Logger.getLogger(ConnectDB.class);
|
||||
|
||||
static void init(Properties properties) throws ClassNotFoundException {
|
||||
static void init() throws ClassNotFoundException {
|
||||
|
||||
dbHiveUrl = "jdbc:hive2://iis-cdh5-test-m3.ocean.icm.edu.pl:10000/;UseNativeQuery=1";
|
||||
dbImpalaUrl = "jdbc:hive2://iis-cdh5-test-gw.ocean.icm.edu.pl:21050/;auth=noSasl";
|
||||
usageStatsDBSchema = "usagestats_20200913";
|
||||
statsDBSchema = "openaire_prod_stats_shadow_20200821";
|
||||
dbHiveUrl = ExecuteWorkflow.dbHiveUrl;
|
||||
dbImpalaUrl = ExecuteWorkflow.dbImpalaUrl;
|
||||
usageStatsDBSchema = ExecuteWorkflow.usageStatsDBSchema;
|
||||
statsDBSchema = ExecuteWorkflow.statsDBSchema;
|
||||
|
||||
Class.forName("org.apache.hive.jdbc.HiveDriver");
|
||||
}
|
||||
|
|
|
@ -27,6 +27,10 @@ public class ExecuteWorkflow {
|
|||
static String lareferenciaLogPath;
|
||||
static String lareferenciaBaseURL;
|
||||
static String lareferenciaAuthToken;
|
||||
static String dbHiveUrl;
|
||||
static String dbImpalaUrl;
|
||||
static String usageStatsDBSchema;
|
||||
static String statsDBSchema;
|
||||
|
||||
public static void main(String args[]) throws Exception {
|
||||
|
||||
|
@ -52,6 +56,11 @@ public class ExecuteWorkflow {
|
|||
lareferenciaBaseURL = parser.get("lareferenciaBaseURL");
|
||||
lareferenciaAuthToken = parser.get("lareferenciaAuthToken");
|
||||
|
||||
dbHiveUrl = parser.get("dbHiveUrl");
|
||||
dbImpalaUrl = parser.get("dbImpalaUrl");
|
||||
usageStatsDBSchema = parser.get("usageStatsDBSchema");
|
||||
statsDBSchema = parser.get("statsDBSchema");
|
||||
|
||||
System.out.println("====> Printing parsed variables");
|
||||
System.out.println(ExecuteWorkflow.matomoAuthToken);
|
||||
System.out.println(ExecuteWorkflow.matomoBaseURL);
|
||||
|
@ -64,6 +73,10 @@ public class ExecuteWorkflow {
|
|||
System.out.println(ExecuteWorkflow.lareferenciaLogPath);
|
||||
System.out.println(ExecuteWorkflow.lareferenciaBaseURL);
|
||||
System.out.println(ExecuteWorkflow.lareferenciaAuthToken);
|
||||
System.out.println(ExecuteWorkflow.dbHiveUrl);
|
||||
System.out.println(ExecuteWorkflow.dbImpalaUrl);
|
||||
System.out.println(ExecuteWorkflow.usageStatsDBSchema);
|
||||
System.out.println(ExecuteWorkflow.statsDBSchema);
|
||||
|
||||
UsageStatsExporter usagestatsExport = new UsageStatsExporter();
|
||||
usagestatsExport.export();
|
||||
|
|
|
@ -78,6 +78,12 @@ public class PiwikStatsDB {
|
|||
private void createDatabase() throws Exception {
|
||||
try {
|
||||
stmt = ConnectDB.getHiveConnection().createStatement();
|
||||
|
||||
logger.info("Dropping usagestats DB");
|
||||
String dropDatabase = "DROP DATABASE IF EXISTS " + ConnectDB.getUsageStatsDBSchema() + "CASCADE;";
|
||||
stmt.executeUpdate(dropDatabase);
|
||||
|
||||
logger.info("Creating usagestats DB");
|
||||
String createDatabase = "CREATE DATABASE IF NOT EXISTS " + ConnectDB.getUsageStatsDBSchema();
|
||||
stmt.executeUpdate(createDatabase);
|
||||
|
||||
|
|
|
@ -7,11 +7,8 @@
|
|||
package eu.dnetlib.oa.graph.usagestats.export;
|
||||
|
||||
/**
|
||||
*
|
||||
* @author D. Pierrakos, S. Zoupanos
|
||||
*
|
||||
*/
|
||||
|
||||
/**
|
||||
* @author D. Pierrakos, S. Zoupanos
|
||||
*/
|
||||
|
|
|
@ -43,12 +43,18 @@ public class UsageStatsExporter {
|
|||
|
||||
public void export() throws Exception {
|
||||
|
||||
logger.info("Initialising DB properties");
|
||||
ConnectDB.init();
|
||||
|
||||
// System.exit(0);
|
||||
|
||||
// runImpalaQuery();
|
||||
|
||||
// Create DB tables - they are also needed to download the statistics too
|
||||
logger.info("Creating database and tables");
|
||||
PiwikStatsDB piwikstatsdb = new PiwikStatsDB(ExecuteWorkflow.repoLogPath, ExecuteWorkflow.portalLogPath);
|
||||
//
|
||||
|
||||
// // Download the statistics - The following 2 lines are not needed after the download - Commenting them out for
|
||||
// // the moment
|
||||
logger.info("Initializing the download logs module");
|
||||
|
@ -88,7 +94,7 @@ public class UsageStatsExporter {
|
|||
// log.info("sarc done");
|
||||
|
||||
// // finalize usagestats
|
||||
// piwikstatsdb.finalizeStats();
|
||||
piwikstatsdb.finalizeStats();
|
||||
// log.info("finalized stats");
|
||||
}
|
||||
}
|
||||
|
|
|
@ -70,9 +70,29 @@
|
|||
"paramLongName": "lareferenciaAuthToken",
|
||||
"paramDescription": "activate tranform-only mode. Only apply transformation step",
|
||||
"paramRequired": true
|
||||
},
|
||||
{
|
||||
"paramName": "dbhu",
|
||||
"paramLongName": "dbHiveUrl",
|
||||
"paramDescription": "activate tranform-only mode. Only apply transformation step",
|
||||
"paramRequired": true
|
||||
},
|
||||
{
|
||||
"paramName": "dbiu",
|
||||
"paramLongName": "dbImpalaUrl",
|
||||
"paramDescription": "activate tranform-only mode. Only apply transformation step",
|
||||
"paramRequired": true
|
||||
},
|
||||
{
|
||||
"paramName": "usdbs",
|
||||
"paramLongName": "usageStatsDBSchema",
|
||||
"paramDescription": "activate tranform-only mode. Only apply transformation step",
|
||||
"paramRequired": true
|
||||
},
|
||||
{
|
||||
"paramName": "sdbs",
|
||||
"paramLongName": "statsDBSchema",
|
||||
"paramDescription": "activate tranform-only mode. Only apply transformation step",
|
||||
"paramRequired": true
|
||||
}
|
||||
]
|
||||
|
||||
|
||||
|
||||
|
||||
|
|
|
@ -21,7 +21,11 @@
|
|||
</property>
|
||||
<property>
|
||||
<name>hiveJdbcUrl</name>
|
||||
<value>jdbc:hive2://iis-cdh5-test-m3.ocean.icm.edu.pl:10000</value>
|
||||
<value>jdbc:hive2://iis-cdh5-test-m3.ocean.icm.edu.pl:10000/;UseNativeQuery=1</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>impalaJdbcUrl</name>
|
||||
<value>jdbc:hive2://iis-cdh5-test-gw.ocean.icm.edu.pl:21050/;auth=noSasl;</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>oozie.wf.workflow.notification.url</name>
|
||||
|
|
|
@ -2,11 +2,15 @@
|
|||
<parameters>
|
||||
<property>
|
||||
<name>hiveMetastoreUris</name>
|
||||
<description>hive server metastore URIs</description>
|
||||
<description>Hive server metastore URIs</description>
|
||||
</property>
|
||||
<property>
|
||||
<name>hiveJdbcUrl</name>
|
||||
<description>hive server jdbc url</description>
|
||||
<description>Hive server jdbc url</description>
|
||||
</property>
|
||||
<property>
|
||||
<name>impalaJdbcUrl</name>
|
||||
<description>Impala server jdbc url</description>
|
||||
</property>
|
||||
</parameters>
|
||||
|
||||
|
@ -50,6 +54,10 @@
|
|||
<arg>--lareferenciaLogPath</arg><arg>${lareferenciaLogPath}</arg>
|
||||
<arg>--lareferenciaBaseURL</arg><arg>${lareferenciaBaseURL}</arg>
|
||||
<arg>--lareferenciaAuthToken</arg><arg>${lareferenciaAuthToken}</arg>
|
||||
<arg>--dbHiveUrl</arg><arg>${hiveJdbcUrl}</arg>
|
||||
<arg>--dbImpalaUrl</arg><arg>${impalaJdbcUrl}</arg>
|
||||
<arg>--usageStatsDBSchema</arg><arg>${usageStatsDBSchema}</arg>
|
||||
<arg>--statsDBSchema</arg><arg>${statsDBSchema}</arg>
|
||||
<capture-output/>
|
||||
</java>
|
||||
<ok to="End" />
|
||||
|
|
Loading…
Reference in New Issue