small adjustments in the graph2hive workflow

This commit is contained in:
Claudio Atzori 2020-04-21 18:52:23 +02:00
parent 0b55795d4d
commit c891661822
3 changed files with 15 additions and 17 deletions

View File

@ -19,6 +19,8 @@ public class GraphHiveImporterJob {
private static final Logger log = LoggerFactory.getLogger(GraphHiveImporterJob.class); private static final Logger log = LoggerFactory.getLogger(GraphHiveImporterJob.class);
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
public static void main(String[] args) throws Exception { public static void main(String[] args) throws Exception {
final ArgumentApplicationParser parser = final ArgumentApplicationParser parser =
@ -37,12 +39,12 @@ public class GraphHiveImporterJob {
String inputPath = parser.get("inputPath"); String inputPath = parser.get("inputPath");
log.info("inputPath: {}", inputPath); log.info("inputPath: {}", inputPath);
String hiveMetastoreUris = parser.get("hiveMetastoreUris");
log.info("hiveMetastoreUris: {}", hiveMetastoreUris);
String hiveDbName = parser.get("hiveDbName"); String hiveDbName = parser.get("hiveDbName");
log.info("hiveDbName: {}", hiveDbName); log.info("hiveDbName: {}", hiveDbName);
String hiveMetastoreUris = parser.get("hiveMetastoreUris");
log.info("hiveMetastoreUris: {}", hiveMetastoreUris);
SparkConf conf = new SparkConf(); SparkConf conf = new SparkConf();
conf.set("hive.metastore.uris", hiveMetastoreUris); conf.set("hive.metastore.uris", hiveMetastoreUris);
@ -58,13 +60,13 @@ public class GraphHiveImporterJob {
spark.sql(String.format("DROP DATABASE IF EXISTS %s CASCADE", hiveDbName)); spark.sql(String.format("DROP DATABASE IF EXISTS %s CASCADE", hiveDbName));
spark.sql(String.format("CREATE DATABASE IF NOT EXISTS %s", hiveDbName)); spark.sql(String.format("CREATE DATABASE IF NOT EXISTS %s", hiveDbName));
final JavaSparkContext sc = new JavaSparkContext(spark.sparkContext()); final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
// Read the input file and convert it into RDD of serializable object // Read the input file and convert it into RDD of serializable object
ModelSupport.oafTypes.forEach( ModelSupport.oafTypes.forEach(
(name, clazz) -> (name, clazz) ->
spark.createDataset( spark.createDataset(
sc.textFile(inputPath + "/" + name) sc.textFile(inputPath + "/" + name)
.map(s -> new ObjectMapper().readValue(s, clazz)) .map(s -> OBJECT_MAPPER.readValue(s, clazz))
.rdd(), .rdd(),
Encoders.bean(clazz)) Encoders.bean(clazz))
.write() .write()

View File

@ -12,11 +12,7 @@
<value>true</value> <value>true</value>
</property> </property>
<property> <property>
<name>oozie.action.sharelib.for.spark</name> <name>hiveMetastoreUris</name>
<value>spark2</value>
</property>
<property>
<name>hive_metastore_uris</name>
<value>thrift://iis-cdh5-test-m3.ocean.icm.edu.pl:9083</value> <value>thrift://iis-cdh5-test-m3.ocean.icm.edu.pl:9083</value>
</property> </property>
<property> <property>
@ -24,7 +20,7 @@
<value>jdbc:hive2://iis-cdh5-test-m3.ocean.icm.edu.pl:10000</value> <value>jdbc:hive2://iis-cdh5-test-m3.ocean.icm.edu.pl:10000</value>
</property> </property>
<property> <property>
<name>hive_db_name</name> <name>hiveDbName</name>
<value>openaire</value> <value>openaire</value>
</property> </property>
</configuration> </configuration>

View File

@ -2,11 +2,11 @@
<parameters> <parameters>
<property> <property>
<name>sourcePath</name> <name>inputPath</name>
<description>the source path</description> <description>the source path</description>
</property> </property>
<property> <property>
<name>hive_db_name</name> <name>hiveDbName</name>
<description>the target hive database name</description> <description>the target hive database name</description>
</property> </property>
<property> <property>
@ -87,9 +87,9 @@
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir} --conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir} --conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
</spark-opts> </spark-opts>
<arg>--sourcePath</arg><arg>${sourcePath}</arg> <arg>--inputPath</arg><arg>${inputPath}</arg>
<arg>--hive_db_name</arg><arg>${hive_db_name}</arg> <arg>--hiveDbName</arg><arg>${hiveDbName}</arg>
<arg>--hive_metastore_uris</arg><arg>${hive_metastore_uris}</arg> <arg>--hiveMetastoreUris</arg><arg>${hiveMetastoreUris}</arg>
</spark> </spark>
<ok to="PostProcessing"/> <ok to="PostProcessing"/>
<error to="Kill"/> <error to="Kill"/>
@ -102,7 +102,7 @@
<configuration> <configuration>
<property> <property>
<name>hive.metastore.uris</name> <name>hive.metastore.uris</name>
<value>${hive_metastore_uris}</value> <value>${hiveMetastoreUris}</value>
</property> </property>
</configuration> </configuration>
<jdbc-url>${hive_jdbc_url}/${hive_db_name}</jdbc-url> <jdbc-url>${hive_jdbc_url}/${hive_db_name}</jdbc-url>