forked from D-Net/dnet-hadoop
337 lines
15 KiB
XML
337 lines
15 KiB
XML
<workflow-app name="import_graph_as_hive_DB" xmlns="uri:oozie:workflow:0.5">
|
|
|
|
<parameters>
|
|
<property>
|
|
<name>inputPath</name>
|
|
<description>the source path</description>
|
|
</property>
|
|
<property>
|
|
<name>hiveDbName</name>
|
|
<description>the target hive database name</description>
|
|
</property>
|
|
<property>
|
|
<name>hiveJdbcUrl</name>
|
|
<description>hive server jdbc url</description>
|
|
</property>
|
|
<property>
|
|
<name>hiveMetastoreUris</name>
|
|
<description>hive server metastore URIs</description>
|
|
</property>
|
|
<property>
|
|
<name>sparkDriverMemory</name>
|
|
<description>memory for driver process</description>
|
|
</property>
|
|
<property>
|
|
<name>sparkExecutorMemory</name>
|
|
<description>memory for individual executor</description>
|
|
</property>
|
|
<property>
|
|
<name>sparkExecutorCores</name>
|
|
<description>number of cores used by single executor</description>
|
|
</property>
|
|
<property>
|
|
<name>oozieActionShareLibForSpark2</name>
|
|
<description>oozie action sharelib for spark 2.*</description>
|
|
</property>
|
|
<property>
|
|
<name>spark2ExtraListeners</name>
|
|
<value>com.cloudera.spark.lineage.NavigatorAppListener</value>
|
|
<description>spark 2.* extra listeners classname</description>
|
|
</property>
|
|
<property>
|
|
<name>spark2SqlQueryExecutionListeners</name>
|
|
<value>com.cloudera.spark.lineage.NavigatorQueryListener</value>
|
|
<description>spark 2.* sql query execution listeners classname</description>
|
|
</property>
|
|
<property>
|
|
<name>spark2YarnHistoryServerAddress</name>
|
|
<description>spark 2.* yarn history server address</description>
|
|
</property>
|
|
<property>
|
|
<name>spark2EventLogDir</name>
|
|
<description>spark 2.* event log dir location</description>
|
|
</property>
|
|
</parameters>
|
|
|
|
<global>
|
|
<job-tracker>${jobTracker}</job-tracker>
|
|
<name-node>${nameNode}</name-node>
|
|
<configuration>
|
|
<property>
|
|
<name>mapreduce.job.queuename</name>
|
|
<value>${queueName}</value>
|
|
</property>
|
|
<property>
|
|
<name>oozie.launcher.mapred.job.queue.name</name>
|
|
<value>${oozieLauncherQueueName}</value>
|
|
</property>
|
|
<property>
|
|
<name>oozie.action.sharelib.for.spark</name>
|
|
<value>${oozieActionShareLibForSpark2}</value>
|
|
</property>
|
|
</configuration>
|
|
</global>
|
|
|
|
<start to="reset_DB"/>
|
|
|
|
<kill name="Kill">
|
|
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
|
|
</kill>
|
|
|
|
<action name="reset_DB">
|
|
<hive2 xmlns="uri:oozie:hive2-action:0.1">
|
|
<configuration>
|
|
<property>
|
|
<name>hive.metastore.uris</name>
|
|
<value>${hiveMetastoreUris}</value>
|
|
</property>
|
|
</configuration>
|
|
<jdbc-url>${hiveJdbcUrl}/${hiveDbName}</jdbc-url>
|
|
<script>lib/scripts/reset_db.sql</script>
|
|
<param>hiveDbName=${hiveDbName}</param>
|
|
</hive2>
|
|
<ok to="fork_import"/>
|
|
<error to="Kill"/>
|
|
</action>
|
|
|
|
<fork name="fork_import">
|
|
<path start="import_publication"/>
|
|
<path start="import_dataset"/>
|
|
<path start="import_orp"/>
|
|
<path start="import_software"/>
|
|
<path start="import_datasource"/>
|
|
<path start="import_organization"/>
|
|
<path start="import_project"/>
|
|
<path start="import_relation"/>
|
|
</fork>
|
|
|
|
<action name="import_publication">
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
<master>yarn</master>
|
|
<mode>cluster</mode>
|
|
<name>Import table publication</name>
|
|
<class>eu.dnetlib.dhp.oa.graph.hive.GraphHiveTableImporterJob</class>
|
|
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
|
<spark-opts>
|
|
--executor-memory=${sparkExecutorMemory}
|
|
--executor-cores=${sparkExecutorCores}
|
|
--driver-memory=${sparkDriverMemory}
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
|
</spark-opts>
|
|
<arg>--inputPath</arg><arg>${inputPath}/publication</arg>
|
|
<arg>--hiveDbName</arg><arg>${hiveDbName}</arg>
|
|
<arg>--className</arg><arg>eu.dnetlib.dhp.schema.oaf.Publication</arg>
|
|
<arg>--hiveMetastoreUris</arg><arg>${hiveMetastoreUris}</arg>
|
|
</spark>
|
|
<ok to="join_import"/>
|
|
<error to="Kill"/>
|
|
</action>
|
|
|
|
<action name="import_dataset">
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
<master>yarn</master>
|
|
<mode>cluster</mode>
|
|
<name>Import table dataset</name>
|
|
<class>eu.dnetlib.dhp.oa.graph.hive.GraphHiveTableImporterJob</class>
|
|
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
|
<spark-opts>
|
|
--executor-memory=${sparkExecutorMemory}
|
|
--executor-cores=${sparkExecutorCores}
|
|
--driver-memory=${sparkDriverMemory}
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
|
</spark-opts>
|
|
<arg>--inputPath</arg><arg>${inputPath}/dataset</arg>
|
|
<arg>--hiveDbName</arg><arg>${hiveDbName}</arg>
|
|
<arg>--className</arg><arg>eu.dnetlib.dhp.schema.oaf.Dataset</arg>
|
|
<arg>--hiveMetastoreUris</arg><arg>${hiveMetastoreUris}</arg>
|
|
</spark>
|
|
<ok to="join_import"/>
|
|
<error to="Kill"/>
|
|
</action>
|
|
|
|
<action name="import_orp">
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
<master>yarn</master>
|
|
<mode>cluster</mode>
|
|
<name>Import table otherresearchproduct</name>
|
|
<class>eu.dnetlib.dhp.oa.graph.hive.GraphHiveTableImporterJob</class>
|
|
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
|
<spark-opts>
|
|
--executor-memory=${sparkExecutorMemory}
|
|
--executor-cores=${sparkExecutorCores}
|
|
--driver-memory=${sparkDriverMemory}
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
|
</spark-opts>
|
|
<arg>--inputPath</arg><arg>${inputPath}/otherresearchproduct</arg>
|
|
<arg>--hiveDbName</arg><arg>${hiveDbName}</arg>
|
|
<arg>--className</arg><arg>eu.dnetlib.dhp.schema.oaf.OtherResearchProduct</arg>
|
|
<arg>--hiveMetastoreUris</arg><arg>${hiveMetastoreUris}</arg>
|
|
</spark>
|
|
<ok to="join_import"/>
|
|
<error to="Kill"/>
|
|
</action>
|
|
|
|
<action name="import_software">
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
<master>yarn</master>
|
|
<mode>cluster</mode>
|
|
<name>Import table software</name>
|
|
<class>eu.dnetlib.dhp.oa.graph.hive.GraphHiveTableImporterJob</class>
|
|
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
|
<spark-opts>
|
|
--executor-memory=${sparkExecutorMemory}
|
|
--executor-cores=${sparkExecutorCores}
|
|
--driver-memory=${sparkDriverMemory}
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
|
</spark-opts>
|
|
<arg>--inputPath</arg><arg>${inputPath}/software</arg>
|
|
<arg>--hiveDbName</arg><arg>${hiveDbName}</arg>
|
|
<arg>--className</arg><arg>eu.dnetlib.dhp.schema.oaf.Software</arg>
|
|
<arg>--hiveMetastoreUris</arg><arg>${hiveMetastoreUris}</arg>
|
|
</spark>
|
|
<ok to="join_import"/>
|
|
<error to="Kill"/>
|
|
</action>
|
|
|
|
<action name="import_datasource">
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
<master>yarn</master>
|
|
<mode>cluster</mode>
|
|
<name>Import table datasource</name>
|
|
<class>eu.dnetlib.dhp.oa.graph.hive.GraphHiveTableImporterJob</class>
|
|
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
|
<spark-opts>
|
|
--executor-memory=${sparkExecutorMemory}
|
|
--executor-cores=${sparkExecutorCores}
|
|
--driver-memory=${sparkDriverMemory}
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
|
</spark-opts>
|
|
<arg>--inputPath</arg><arg>${inputPath}/datasource</arg>
|
|
<arg>--hiveDbName</arg><arg>${hiveDbName}</arg>
|
|
<arg>--className</arg><arg>eu.dnetlib.dhp.schema.oaf.Datasource</arg>
|
|
<arg>--hiveMetastoreUris</arg><arg>${hiveMetastoreUris}</arg>
|
|
</spark>
|
|
<ok to="join_import"/>
|
|
<error to="Kill"/>
|
|
</action>
|
|
|
|
<action name="import_organization">
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
<master>yarn</master>
|
|
<mode>cluster</mode>
|
|
<name>Import table organization</name>
|
|
<class>eu.dnetlib.dhp.oa.graph.hive.GraphHiveTableImporterJob</class>
|
|
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
|
<spark-opts>
|
|
--executor-memory=${sparkExecutorMemory}
|
|
--executor-cores=${sparkExecutorCores}
|
|
--driver-memory=${sparkDriverMemory}
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
|
</spark-opts>
|
|
<arg>--inputPath</arg><arg>${inputPath}/organization</arg>
|
|
<arg>--hiveDbName</arg><arg>${hiveDbName}</arg>
|
|
<arg>--className</arg><arg>eu.dnetlib.dhp.schema.oaf.Organization</arg>
|
|
<arg>--hiveMetastoreUris</arg><arg>${hiveMetastoreUris}</arg>
|
|
</spark>
|
|
<ok to="join_import"/>
|
|
<error to="Kill"/>
|
|
</action>
|
|
|
|
<action name="import_project">
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
<master>yarn</master>
|
|
<mode>cluster</mode>
|
|
<name>Import table project</name>
|
|
<class>eu.dnetlib.dhp.oa.graph.hive.GraphHiveTableImporterJob</class>
|
|
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
|
<spark-opts>
|
|
--executor-memory=${sparkExecutorMemory}
|
|
--executor-cores=${sparkExecutorCores}
|
|
--driver-memory=${sparkDriverMemory}
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
|
</spark-opts>
|
|
<arg>--inputPath</arg><arg>${inputPath}/project</arg>
|
|
<arg>--hiveDbName</arg><arg>${hiveDbName}</arg>
|
|
<arg>--className</arg><arg>eu.dnetlib.dhp.schema.oaf.Project</arg>
|
|
<arg>--hiveMetastoreUris</arg><arg>${hiveMetastoreUris}</arg>
|
|
<arg>--numPartitions</arg><arg>100</arg>
|
|
</spark>
|
|
<ok to="join_import"/>
|
|
<error to="Kill"/>
|
|
</action>
|
|
|
|
<action name="import_relation">
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
<master>yarn</master>
|
|
<mode>cluster</mode>
|
|
<name>Import table project</name>
|
|
<class>eu.dnetlib.dhp.oa.graph.hive.GraphHiveTableImporterJob</class>
|
|
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
|
<spark-opts>
|
|
--executor-memory=${sparkExecutorMemory}
|
|
--executor-cores=${sparkExecutorCores}
|
|
--driver-memory=${sparkDriverMemory}
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
|
</spark-opts>
|
|
<arg>--inputPath</arg><arg>${inputPath}/relation</arg>
|
|
<arg>--hiveDbName</arg><arg>${hiveDbName}</arg>
|
|
<arg>--className</arg><arg>eu.dnetlib.dhp.schema.oaf.Relation</arg>
|
|
<arg>--hiveMetastoreUris</arg><arg>${hiveMetastoreUris}</arg>
|
|
</spark>
|
|
<ok to="join_import"/>
|
|
<error to="Kill"/>
|
|
</action>
|
|
|
|
<join name="join_import" to="PostProcessing"/>
|
|
|
|
<action name="PostProcessing">
|
|
<hive2 xmlns="uri:oozie:hive2-action:0.1">
|
|
<configuration>
|
|
<property>
|
|
<name>hive.metastore.uris</name>
|
|
<value>${hiveMetastoreUris}</value>
|
|
</property>
|
|
</configuration>
|
|
<jdbc-url>${hiveJdbcUrl}/${hiveDbName}</jdbc-url>
|
|
<script>lib/scripts/postprocessing.sql</script>
|
|
<param>hiveDbName=${hiveDbName}</param>
|
|
</hive2>
|
|
<ok to="End"/>
|
|
<error to="Kill"/>
|
|
</action>
|
|
|
|
<end name="End"/>
|
|
|
|
</workflow-app> |