Simple java action added.

Simple java connection to hive db + basic statements added
This commit is contained in:
Spyros Zoupanos 2020-05-09 13:20:41 +03:00
parent cabe92d155
commit c0b509abfb
22 changed files with 460 additions and 28 deletions

View File

@ -0,0 +1,35 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<parent>
<artifactId>dhp-workflows</artifactId >
<groupId>eu.dnetlib.dhp</groupId>
<version>1.1.7-SNAPSHOT</version>
</parent>
<groupId>eu.dnetlib</groupId>
<modelVersion>4.0.0</modelVersion>
<artifactId>dhp-java-action-dbconnection</artifactId>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
<cdh.hive.version>0.13.1-cdh5.2.1</cdh.hive.version>
<cdh.hadoop.version>2.5.0-cdh5.2.1</cdh.hadoop.version>
</properties>
<dependencies>
<dependency>
<groupId>org.apache.hive</groupId>
<artifactId>hive-jdbc</artifactId>
<version>${cdh.hive.version}</version>
<!-- <version>3.1.2</version> -->
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<version>${cdh.hadoop.version}</version>
</dependency>
</dependencies>
</project>

View File

@ -0,0 +1,99 @@
package eu.dnetlib.oa.graph.usagestats.export;
import java.io.IOException;
import java.io.InputStream;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.Properties;
public class Simple {
public static void main(final String[] args) throws Exception {
System.out.println("=============================================");
System.out.println("Staring");
System.out.println("=============================================");
// Connection string that works OK
// String connectionUrl =
// "jdbc:hive2://iis-cdh5-test-m3.ocean.icm.edu.pl:10000/stats_wf_db_galexiou_oozie_beta;UID=spyros;PWD=XXXXXX;UseNativeQuery=1";
// The following connectio string also works
String connectionUrl = "jdbc:hive2://iis-cdh5-test-m3.ocean.icm.edu.pl:10000/;UseNativeQuery=1";
String jdbcDriverName = "org.apache.hive.jdbc.HiveDriver";
System.out.println("\n=============================================");
System.out.println("Cloudera Impala JDBC Example");
System.out.println("Using Connection URL: " + connectionUrl);
System.out.println("USing JDBC Driver " + jdbcDriverName);
Connection con = null;
try {
System.out.println("Step 1");
Class.forName(jdbcDriverName);
System.out.println("Step 2");
con = DriverManager.getConnection(connectionUrl);
System.out.println("Step 3");
Statement stmt = con.createStatement();
System.out.println("Step 4");
ResultSet rs = stmt.executeQuery("select personid, lastname from test_db_spyros.persons");
System.out.println("\n== Begin Query Results ======================");
// print the results to the console
while (rs.next()) {
// the example query returns one String column
System.out.println(rs.getString(1) + " | " + rs.getString(2));
}
System.out.println("== End Query Results =======================\n\n");
// Drop table if exists
stmt.execute("DROP TABLE IF EXISTS test_db_spyros.Persons2");
// Create table
stmt
.execute(
"CREATE TABLE test_db_spyros.Persons2 (PersonID int, LastName varchar(255), FirstName varchar(255))");
// Insert
stmt
.execute(
"INSERT INTO test_db_spyros.persons2 (personid, lastname, firstname) VALUES ('1', 'ZoupZoup', 'Spyros')");
// Select the inserted values
rs = stmt.executeQuery("select personid, lastname, firstname from test_db_spyros.persons2");
while (rs.next()) {
// the example query returns one String column
System.out.println(rs.getString(1) + " | " + rs.getString(2) + " | " + rs.getString(3));
}
} catch (SQLException e) {
System.out.println("Ex 1");
e.printStackTrace();
System.out.println(e.getMessage());
} catch (Exception e) {
System.out.println("Ex 2");
e.printStackTrace();
System.out.println(e.getMessage());
} finally {
try {
con.close();
} catch (Exception e) {
// swallow
System.out.println("Ex 3");
e.printStackTrace();
System.out.println(e.getMessage());
}
}
System.out.println("=============================================");
System.out.println("Ending");
System.out.println("=============================================");
}
}

View File

@ -27,4 +27,8 @@
<name>oozie.wf.workflow.notification.url</name>
<value>{serviceUrl}/v1/oozieNotification/jobUpdate?jobId=$jobId%26status=$status</value>
</property>
<property>
<name>oozie.use.system.libpath</name>
<value>true</value>
</property>
</configuration>

View File

@ -0,0 +1,48 @@
<workflow-app name="java_action_dbconnection" xmlns="uri:oozie:workflow:0.5">
<parameters>
<property>
<name>hiveMetastoreUris</name>
<description>hive server metastore URIs</description>
</property>
<property>
<name>hiveJdbcUrl</name>
<description>hive server jdbc url</description>
</property>
</parameters>
<global>
<job-tracker>${jobTracker}</job-tracker>
<name-node>${nameNode}</name-node>
<configuration>
<property>
<name>hive.metastore.uris</name>
<value>${hiveMetastoreUris}</value>
</property>
<property>
<name>mapreduce.job.queuename</name>
<value>${queueName}</value>
</property>
<property>
<name>oozie.launcher.mapred.job.queue.name</name>
<value>${oozieLauncherQueueName}</value>
</property>
</configuration>
</global>
<start to="Step1"/>
<kill name="Kill">
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
</kill>
<action name='Step1'>
<java>
<main-class>eu.dnetlib.oa.graph.usagestats.export.Simple</main-class>
</java>
<ok to="End" />
<error to="Kill" />
</action>
<end name="End"/>
</workflow-app>

View File

@ -0,0 +1,13 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<parent>
<artifactId>dhp-workflows</artifactId >
<groupId>eu.dnetlib.dhp</groupId>
<version>1.1.7-SNAPSHOT</version>
</parent>
<groupId>eu.dnetlib</groupId>
<modelVersion>4.0.0</modelVersion>
<artifactId>dhp-java-action-simple</artifactId>
</project>

View File

@ -0,0 +1,12 @@
package eu.dnetlib.oa.graph.usagestats.export;
public class Simple {
public static void main(final String[] args) throws Exception {
System.out.println("=============================================");
System.out.println("Hello world");
System.out.println("=============================================");
}
}

View File

@ -0,0 +1,34 @@
<configuration>
<property>
<name>jobTracker</name>
<value>${jobTracker}</value>
</property>
<property>
<name>nameNode</name>
<value>${nameNode}</value>
</property>
<property>
<name>oozie.use.system.libpath</name>
<value>true</value>
</property>
<property>
<name>oozie.action.sharelib.for.spark</name>
<value>spark2</value>
</property>
<property>
<name>hiveMetastoreUris</name>
<value>thrift://iis-cdh5-test-m3.ocean.icm.edu.pl:9083</value>
</property>
<property>
<name>hiveJdbcUrl</name>
<value>jdbc:hive2://iis-cdh5-test-m3.ocean.icm.edu.pl:10000</value>
</property>
<property>
<name>oozie.wf.workflow.notification.url</name>
<value>{serviceUrl}/v1/oozieNotification/jobUpdate?jobId=$jobId%26status=$status</value>
</property>
<property>
<name>oozie.use.system.libpath</name>
<value>true</value>
</property>
</configuration>

View File

@ -0,0 +1,48 @@
<workflow-app name="java_action_simple" xmlns="uri:oozie:workflow:0.5">
<parameters>
<property>
<name>hiveMetastoreUris</name>
<description>hive server metastore URIs</description>
</property>
<property>
<name>hiveJdbcUrl</name>
<description>hive server jdbc url</description>
</property>
</parameters>
<global>
<job-tracker>${jobTracker}</job-tracker>
<name-node>${nameNode}</name-node>
<configuration>
<property>
<name>hive.metastore.uris</name>
<value>${hiveMetastoreUris}</value>
</property>
<property>
<name>mapreduce.job.queuename</name>
<value>${queueName}</value>
</property>
<property>
<name>oozie.launcher.mapred.job.queue.name</name>
<value>${oozieLauncherQueueName}</value>
</property>
</configuration>
</global>
<start to="Step1"/>
<kill name="Kill">
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
</kill>
<action name='Step1'>
<java>
<main-class>eu.dnetlib.oa.graph.usagestats.export.Simple</main-class>
</java>
<ok to="End" />
<error to="Kill" />
</action>
<end name="End"/>
</workflow-app>

View File

@ -50,8 +50,8 @@
</plugin>
</plugins>
</build>
<properties>
<!-- <properties>
<maven.compiler.source>1.7</maven.compiler.source>
<maven.compiler.target>1.7</maven.compiler.target>
</properties>
</properties> -->
</project>

View File

@ -0,0 +1,60 @@
package eu.dnetlib.oa.graph.usagestats.export;
public class ClouderaImpalaJdbcExample {
public static void main(String[] args) {
System.out.println("\n=============================================");
//// loadConfiguration();
//
//// String sqlStatement = args[0];
// String sqlStatement = "select count(*) from fundref;";
// connectionUrl = "jdbc:hive2://iis-cdh5-test-m3.ocean.icm.edu.pl:10000/stats_wf_db_galexiou_oozie_beta;UID=spyros;PWD=RU78N9sqQndnH3SQ;UseNativeQuery=1";
//// connectionUrl = "jdbc:hive2://172.16.10.74:21050/emay_data;auth=noSasl";
//// "jdbc:hive2://iis-cdh5-test-m3.ocean.icm.edu.pl:10000"
//// url=jdbc:impala://iis-cdh5-test-gw.ocean.icm.edu.pl:21050/stats_wf_db_galexiou_oozie_beta;UID=antonis.lempesis;PWD=XXXXX;UseNativeQuery=1
// jdbcDriverName = "org.apache.hive.jdbc.HiveDriver";
//
// System.out.println("\n=============================================");
// System.out.println("Cloudera Impala JDBC Example");
// System.out.println("Using Connection URL: " + connectionUrl);
// System.out.println("USing JDBC Driver " + jdbcDriverName);
// System.out.println("Running Query: " + sqlStatement);
//
// Connection con = null;
//
// try {
//
// Class.forName(jdbcDriverName);
//
// con = DriverManager.getConnection(connectionUrl);
//
// Statement stmt = con.createStatement();
//
// ResultSet rs = stmt.executeQuery(sqlStatement);
//
// System.out.println("\n== Begin Query Results ======================");
//
// // print the results to the console
// while (rs.next()) {
// // the example query returns one String column
// System.out.println(rs.getString(1));
// }
//
// System.out.println("== End Query Results =======================\n\n");
//
// } catch (SQLException e) {
// e.printStackTrace();
// } catch (Exception e) {
// e.printStackTrace();
// } finally {
// try {
// con.close();
// } catch (Exception e) {
// // swallow
// }
// }
}
}

View File

@ -1,43 +1,43 @@
<html>
<head>
<title>Revision 58415: /dnet45/modules/dnet-openaire-usage-stats-export-wf/trunk/dnet-openaire-usage-stats-export/src/main/java/eu/dnetlib/usagestats/export</title>
<head>
<title>Revision 58415: /dnet45/modules/dnet-openaire-usage-stats-export-wf/trunk/dnet-openaire-usage-stats-export/src/main/java/eu/dnetlib/usagestats/export</title>
</head>
<body>
<h2>Revision 58415: /dnet45/modules/dnet-openaire-usage-stats-export-wf/trunk/dnet-openaire-usage-stats-export/src/main/java/eu/dnetlib/usagestats/export</h2>
<ul>
<li>
<a href="../">..</a>
<li>
<a href="../">..</a>
</li>
<li>
<a href="ConnectDB.java">ConnectDB.java</a>
<li>
<a href="ConnectDB.java">ConnectDB.java</a>
</li>
<li>
<a href="ExecuteWorkflow.java">ExecuteWorkflow.java</a>
<li>
<a href="ExecuteWorkflow.java">ExecuteWorkflow.java</a>
</li>
<li>
<a href="IrusStats.java">IrusStats.java</a>
<li>
<a href="IrusStats.java">IrusStats.java</a>
</li>
<li>
<a href="PiwikDownloadLogs.java">PiwikDownloadLogs.java</a>
<li>
<a href="PiwikDownloadLogs.java">PiwikDownloadLogs.java</a>
</li>
<li>
<a href="PiwikStatsDB.java">PiwikStatsDB.java</a>
<li>
<a href="PiwikStatsDB.java">PiwikStatsDB.java</a>
</li>
<li>
<a href="ReadCounterRobotsList.java">ReadCounterRobotsList.java</a>
<li>
<a href="ReadCounterRobotsList.java">ReadCounterRobotsList.java</a>
</li>
<li>
<a href="SarcStats.java">SarcStats.java</a>
<li>
<a href="SarcStats.java">SarcStats.java</a>
</li>
<li>
<a href="UsageStatsExporter.java">UsageStatsExporter.java</a>
<li>
<a href="UsageStatsExporter.java">UsageStatsExporter.java</a>
</li>
</ul>
<hr noshade>
<hr noshade>
<em>
Powered by
Powered by
<a href="http://subversion.tigris.org/">Subversion</a>
version 1.4.4 (r25188).
</em>
version 1.4.4 (r25188).
</em>
</body>
</html>

View File

@ -0,0 +1,34 @@
<configuration>
<property>
<name>jobTracker</name>
<value>${jobTracker}</value>
</property>
<property>
<name>nameNode</name>
<value>${nameNode}</value>
</property>
<property>
<name>oozie.use.system.libpath</name>
<value>true</value>
</property>
<property>
<name>oozie.action.sharelib.for.spark</name>
<value>spark2</value>
</property>
<property>
<name>hiveMetastoreUris</name>
<value>thrift://iis-cdh5-test-m3.ocean.icm.edu.pl:9083</value>
</property>
<property>
<name>hiveJdbcUrl</name>
<value>jdbc:hive2://iis-cdh5-test-m3.ocean.icm.edu.pl:10000</value>
</property>
<property>
<name>oozie.wf.workflow.notification.url</name>
<value>{serviceUrl}/v1/oozieNotification/jobUpdate?jobId=$jobId%26status=$status</value>
</property>
<property>
<name>oozie.use.system.libpath</name>
<value>true</value>
</property>
</configuration>

View File

@ -31,15 +31,58 @@
<name>hive.metastore.uris</name>
<value>${hiveMetastoreUris}</value>
</property>
<property>
<name>mapreduce.job.queuename</name>
<value>${queueName}</value>
</property>
<property>
<name>oozie.launcher.mapred.job.queue.name</name>
<value>${oozieLauncherQueueName}</value>
</property>
</configuration>
</global>
<start to="Step1"/>
<start to="Step11"/>
<kill name="Kill">
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
</kill>
<action name='Step11'>
<java>
<prepare>
<delete path="${jobOutput}"/>
</prepare>
<configuration>
<property>
<name>mapred.queue.name</name>
<value>default</value>
</property>
</configuration>
<main-class>eu.dnetlib.oa.graph.usagestats.export.ClouderaImpalaJdbcExample</main-class>
<capture-output/>
</java>
<ok to="End" />
<error to="fail" />
</action>
<action name="ImportDB_claims">
<java>
<prepare>
<delete path="${contentPath}/db_claims"/>
</prepare>
<main-class>eu.dnetlib.dhp.oa.graph.raw.MigrateDbEntitiesApplication</main-class>
<arg>--hdfsPath</arg><arg>${contentPath}/db_claims</arg>
<arg>--postgresUrl</arg><arg>${postgresURL}</arg>
<arg>--postgresUser</arg><arg>${postgresUser}</arg>
<arg>--postgresPassword</arg><arg>${postgresPassword}</arg>
<arg>--action</arg><arg>claims</arg>
</java>
<ok to="ImportODF_claims"/>
<error to="Kill"/>
</action>
<action name='Step1'>
<java>
<job-tracker>${jobTracker}</job-tracker>
@ -50,7 +93,7 @@
<value>${queueName}</value>
</property>
</configuration>
<main-class>eu.dnetlib.oa.graph.usage-stats.export.UsageStatsExporter</main-class>
<main-class>eu.dnetlib.oa.graph.usagestats.export.UsageStatsExporter</main-class>
<main-class>org.apache.oozie.test.MyTest</main-class>
<arg>${outputFileName}</arg>
<capture-output/>

View File

@ -27,6 +27,8 @@
<module>dhp-graph-provision-scholexplorer</module>
<module>dhp-stats-update</module>
<module>dhp-usage-stats-update</module>
<module>dhp-java-action-simple</module>
<module>dhp-java-action-dbconnection/</module>
<module>dhp-broker-events</module>
</modules>