forked from D-Net/dnet-hadoop
small adjustments in the graph2hive workflow
This commit is contained in:
parent
0b55795d4d
commit
c891661822
|
@ -19,6 +19,8 @@ public class GraphHiveImporterJob {
|
|||
|
||||
private static final Logger log = LoggerFactory.getLogger(GraphHiveImporterJob.class);
|
||||
|
||||
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
|
||||
final ArgumentApplicationParser parser =
|
||||
|
@ -37,12 +39,12 @@ public class GraphHiveImporterJob {
|
|||
String inputPath = parser.get("inputPath");
|
||||
log.info("inputPath: {}", inputPath);
|
||||
|
||||
String hiveMetastoreUris = parser.get("hiveMetastoreUris");
|
||||
log.info("hiveMetastoreUris: {}", hiveMetastoreUris);
|
||||
|
||||
String hiveDbName = parser.get("hiveDbName");
|
||||
log.info("hiveDbName: {}", hiveDbName);
|
||||
|
||||
String hiveMetastoreUris = parser.get("hiveMetastoreUris");
|
||||
log.info("hiveMetastoreUris: {}", hiveMetastoreUris);
|
||||
|
||||
SparkConf conf = new SparkConf();
|
||||
conf.set("hive.metastore.uris", hiveMetastoreUris);
|
||||
|
||||
|
@ -58,13 +60,13 @@ public class GraphHiveImporterJob {
|
|||
spark.sql(String.format("DROP DATABASE IF EXISTS %s CASCADE", hiveDbName));
|
||||
spark.sql(String.format("CREATE DATABASE IF NOT EXISTS %s", hiveDbName));
|
||||
|
||||
final JavaSparkContext sc = new JavaSparkContext(spark.sparkContext());
|
||||
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
|
||||
// Read the input file and convert it into RDD of serializable object
|
||||
ModelSupport.oafTypes.forEach(
|
||||
(name, clazz) ->
|
||||
spark.createDataset(
|
||||
sc.textFile(inputPath + "/" + name)
|
||||
.map(s -> new ObjectMapper().readValue(s, clazz))
|
||||
.map(s -> OBJECT_MAPPER.readValue(s, clazz))
|
||||
.rdd(),
|
||||
Encoders.bean(clazz))
|
||||
.write()
|
||||
|
|
|
@ -12,11 +12,7 @@
|
|||
<value>true</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>oozie.action.sharelib.for.spark</name>
|
||||
<value>spark2</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>hive_metastore_uris</name>
|
||||
<name>hiveMetastoreUris</name>
|
||||
<value>thrift://iis-cdh5-test-m3.ocean.icm.edu.pl:9083</value>
|
||||
</property>
|
||||
<property>
|
||||
|
@ -24,7 +20,7 @@
|
|||
<value>jdbc:hive2://iis-cdh5-test-m3.ocean.icm.edu.pl:10000</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>hive_db_name</name>
|
||||
<name>hiveDbName</name>
|
||||
<value>openaire</value>
|
||||
</property>
|
||||
</configuration>
|
|
@ -2,11 +2,11 @@
|
|||
|
||||
<parameters>
|
||||
<property>
|
||||
<name>sourcePath</name>
|
||||
<name>inputPath</name>
|
||||
<description>the source path</description>
|
||||
</property>
|
||||
<property>
|
||||
<name>hive_db_name</name>
|
||||
<name>hiveDbName</name>
|
||||
<description>the target hive database name</description>
|
||||
</property>
|
||||
<property>
|
||||
|
@ -87,9 +87,9 @@
|
|||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
||||
</spark-opts>
|
||||
<arg>--sourcePath</arg><arg>${sourcePath}</arg>
|
||||
<arg>--hive_db_name</arg><arg>${hive_db_name}</arg>
|
||||
<arg>--hive_metastore_uris</arg><arg>${hive_metastore_uris}</arg>
|
||||
<arg>--inputPath</arg><arg>${inputPath}</arg>
|
||||
<arg>--hiveDbName</arg><arg>${hiveDbName}</arg>
|
||||
<arg>--hiveMetastoreUris</arg><arg>${hiveMetastoreUris}</arg>
|
||||
</spark>
|
||||
<ok to="PostProcessing"/>
|
||||
<error to="Kill"/>
|
||||
|
@ -102,7 +102,7 @@
|
|||
<configuration>
|
||||
<property>
|
||||
<name>hive.metastore.uris</name>
|
||||
<value>${hive_metastore_uris}</value>
|
||||
<value>${hiveMetastoreUris}</value>
|
||||
</property>
|
||||
</configuration>
|
||||
<jdbc-url>${hive_jdbc_url}/${hive_db_name}</jdbc-url>
|
||||
|
|
Loading…
Reference in New Issue