forked from D-Net/dnet-hadoop
296 lines
13 KiB
XML
296 lines
13 KiB
XML
<workflow-app name="create broker events" xmlns="uri:oozie:workflow:0.5">
|
|
|
|
<parameters>
|
|
<property>
|
|
<name>graphInputPath</name>
|
|
<description>the path where the graph is stored</description>
|
|
</property>
|
|
<property>
|
|
<name>workingPath</name>
|
|
<description>the path where the the generated data will be stored</description>
|
|
</property>
|
|
<property>
|
|
<name>isLookupUrl</name>
|
|
<description>the address of the lookUp service</description>
|
|
</property>
|
|
<property>
|
|
<name>dedupConfProfId</name>
|
|
<description>the id of a valid Dedup Configuration Profile</description>
|
|
</property>
|
|
|
|
<property>
|
|
<name>sparkDriverMemory</name>
|
|
<description>memory for driver process</description>
|
|
</property>
|
|
<property>
|
|
<name>sparkExecutorMemory</name>
|
|
<description>memory for individual executor</description>
|
|
</property>
|
|
<property>
|
|
<name>sparkExecutorCores</name>
|
|
<description>number of cores used by single executor</description>
|
|
</property>
|
|
<property>
|
|
<name>oozieActionShareLibForSpark2</name>
|
|
<description>oozie action sharelib for spark 2.*</description>
|
|
</property>
|
|
<property>
|
|
<name>spark2ExtraListeners</name>
|
|
<value>com.cloudera.spark.lineage.NavigatorAppListener</value>
|
|
<description>spark 2.* extra listeners classname</description>
|
|
</property>
|
|
<property>
|
|
<name>spark2SqlQueryExecutionListeners</name>
|
|
<value>com.cloudera.spark.lineage.NavigatorQueryListener</value>
|
|
<description>spark 2.* sql query execution listeners classname</description>
|
|
</property>
|
|
<property>
|
|
<name>spark2YarnHistoryServerAddress</name>
|
|
<description>spark 2.* yarn history server address</description>
|
|
</property>
|
|
<property>
|
|
<name>spark2EventLogDir</name>
|
|
<description>spark 2.* event log dir location</description>
|
|
</property>
|
|
</parameters>
|
|
|
|
<global>
|
|
<job-tracker>${jobTracker}</job-tracker>
|
|
<name-node>${nameNode}</name-node>
|
|
<configuration>
|
|
<property>
|
|
<name>mapreduce.job.queuename</name>
|
|
<value>${queueName}</value>
|
|
</property>
|
|
<property>
|
|
<name>oozie.launcher.mapred.job.queue.name</name>
|
|
<value>${oozieLauncherQueueName}</value>
|
|
</property>
|
|
<property>
|
|
<name>oozie.action.sharelib.for.spark</name>
|
|
<value>${oozieActionShareLibForSpark2}</value>
|
|
</property>
|
|
</configuration>
|
|
</global>
|
|
|
|
<start to="ensure_working_path"/>
|
|
|
|
<kill name="Kill">
|
|
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
|
|
</kill>
|
|
|
|
<action name="ensure_working_path">
|
|
<fs>
|
|
<mkdir path='${workingPath}'/>
|
|
</fs>
|
|
<ok to="start_entities_and_rels"/>
|
|
<error to="Kill"/>
|
|
</action>
|
|
|
|
<fork name="start_entities_and_rels">
|
|
<path start="prepare_simple_entities"/>
|
|
<path start="prepare_related_softwares"/>
|
|
<path start="prepare_related_datasets"/>
|
|
<path start="prepare_related_projects"/>
|
|
<path start="prepare_related_publications"/>
|
|
</fork>
|
|
|
|
<action name="prepare_simple_entities">
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
<master>yarn</master>
|
|
<mode>cluster</mode>
|
|
<name>PrepareSimpleEntititiesJob</name>
|
|
<class>eu.dnetlib.dhp.broker.oa.PrepareSimpleEntititiesJob</class>
|
|
<jar>dhp-broker-events-${projectVersion}.jar</jar>
|
|
<spark-opts>
|
|
--executor-cores=${sparkExecutorCores}
|
|
--executor-memory=${sparkExecutorMemory}
|
|
--driver-memory=${sparkDriverMemory}
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
--conf spark.sql.shuffle.partitions=3840
|
|
</spark-opts>
|
|
<arg>--graphPath</arg><arg>${graphInputPath}</arg>
|
|
<arg>--workingPath</arg><arg>${workingPath}</arg>
|
|
</spark>
|
|
<ok to="wait_entities_and_rels"/>
|
|
<error to="Kill"/>
|
|
</action>
|
|
|
|
|
|
<action name="prepare_related_datasets">
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
<master>yarn</master>
|
|
<mode>cluster</mode>
|
|
<name>PrepareRelatedDatasetsJob</name>
|
|
<class>eu.dnetlib.dhp.broker.oa.PrepareRelatedDatasetsJob</class>
|
|
<jar>dhp-broker-events-${projectVersion}.jar</jar>
|
|
<spark-opts>
|
|
--executor-cores=${sparkExecutorCores}
|
|
--executor-memory=${sparkExecutorMemory}
|
|
--driver-memory=${sparkDriverMemory}
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
--conf spark.sql.shuffle.partitions=3840
|
|
</spark-opts>
|
|
<arg>--graphPath</arg><arg>${graphInputPath}</arg>
|
|
<arg>--workingPath</arg><arg>${workingPath}</arg>
|
|
</spark>
|
|
<ok to="wait_entities_and_rels"/>
|
|
<error to="Kill"/>
|
|
</action>
|
|
|
|
<action name="prepare_related_projects">
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
<master>yarn</master>
|
|
<mode>cluster</mode>
|
|
<name>PrepareRelatedProjectsJob</name>
|
|
<class>eu.dnetlib.dhp.broker.oa.PrepareRelatedProjectsJob</class>
|
|
<jar>dhp-broker-events-${projectVersion}.jar</jar>
|
|
<spark-opts>
|
|
--executor-cores=${sparkExecutorCores}
|
|
--executor-memory=${sparkExecutorMemory}
|
|
--driver-memory=${sparkDriverMemory}
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
--conf spark.sql.shuffle.partitions=3840
|
|
</spark-opts>
|
|
<arg>--graphPath</arg><arg>${graphInputPath}</arg>
|
|
<arg>--workingPath</arg><arg>${workingPath}</arg>
|
|
</spark>
|
|
<ok to="wait_entities_and_rels"/>
|
|
<error to="Kill"/>
|
|
</action>
|
|
|
|
<action name="prepare_related_publications">
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
<master>yarn</master>
|
|
<mode>cluster</mode>
|
|
<name>PrepareRelatedPublicationsJob</name>
|
|
<class>eu.dnetlib.dhp.broker.oa.PrepareRelatedPublicationsJob</class>
|
|
<jar>dhp-broker-events-${projectVersion}.jar</jar>
|
|
<spark-opts>
|
|
--executor-cores=${sparkExecutorCores}
|
|
--executor-memory=${sparkExecutorMemory}
|
|
--driver-memory=${sparkDriverMemory}
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
--conf spark.sql.shuffle.partitions=3840
|
|
</spark-opts>
|
|
<arg>--graphPath</arg><arg>${graphInputPath}</arg>
|
|
<arg>--workingPath</arg><arg>${workingPath}</arg>
|
|
</spark>
|
|
<ok to="wait_entities_and_rels"/>
|
|
<error to="Kill"/>
|
|
</action>
|
|
|
|
<action name="prepare_related_softwares">
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
<master>yarn</master>
|
|
<mode>cluster</mode>
|
|
<name>PrepareRelatedSoftwaresJob</name>
|
|
<class>eu.dnetlib.dhp.broker.oa.PrepareRelatedSoftwaresJob</class>
|
|
<jar>dhp-broker-events-${projectVersion}.jar</jar>
|
|
<spark-opts>
|
|
--executor-cores=${sparkExecutorCores}
|
|
--executor-memory=${sparkExecutorMemory}
|
|
--driver-memory=${sparkDriverMemory}
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
--conf spark.sql.shuffle.partitions=3840
|
|
</spark-opts>
|
|
<arg>--graphPath</arg><arg>${graphInputPath}</arg>
|
|
<arg>--workingPath</arg><arg>${workingPath}</arg>
|
|
</spark>
|
|
<ok to="wait_entities_and_rels"/>
|
|
<error to="Kill"/>
|
|
</action>
|
|
|
|
<join name="wait_entities_and_rels" to="join_entities"/>
|
|
|
|
<action name="join_entities">
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
<master>yarn</master>
|
|
<mode>cluster</mode>
|
|
<name>JoinEntitiesJob</name>
|
|
<class>eu.dnetlib.dhp.broker.oa.JoinEntitiesJob</class>
|
|
<jar>dhp-broker-events-${projectVersion}.jar</jar>
|
|
<spark-opts>
|
|
--executor-cores=${sparkExecutorCores}
|
|
--executor-memory=${sparkExecutorMemory}
|
|
--driver-memory=${sparkDriverMemory}
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
--conf spark.sql.shuffle.partitions=3840
|
|
</spark-opts>
|
|
<arg>--graphPath</arg><arg>${graphInputPath}</arg>
|
|
<arg>--workingPath</arg><arg>${workingPath}</arg>
|
|
</spark>
|
|
<ok to="prepare_groups"/>
|
|
<error to="Kill"/>
|
|
</action>
|
|
|
|
<action name="prepare_groups">
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
<master>yarn</master>
|
|
<mode>cluster</mode>
|
|
<name>PrepareGroupsJob</name>
|
|
<class>eu.dnetlib.dhp.broker.oa.PrepareGroupsJob</class>
|
|
<jar>dhp-broker-events-${projectVersion}.jar</jar>
|
|
<spark-opts>
|
|
--executor-cores=${sparkExecutorCores}
|
|
--executor-memory=${sparkExecutorMemory}
|
|
--driver-memory=${sparkDriverMemory}
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
--conf spark.sql.shuffle.partitions=3840
|
|
</spark-opts>
|
|
<arg>--graphPath</arg><arg>${graphInputPath}</arg>
|
|
<arg>--workingPath</arg><arg>${workingPath}</arg>
|
|
</spark>
|
|
<ok to="generate_events"/>
|
|
<error to="Kill"/>
|
|
</action>
|
|
|
|
<action name="generate_events">
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
<master>yarn</master>
|
|
<mode>cluster</mode>
|
|
<name>GenerateEventsJob</name>
|
|
<class>eu.dnetlib.dhp.broker.oa.GenerateEventsJob</class>
|
|
<jar>dhp-broker-events-${projectVersion}.jar</jar>
|
|
<spark-opts>
|
|
--executor-cores=${sparkExecutorCores}
|
|
--executor-memory=${sparkExecutorMemory}
|
|
--driver-memory=${sparkDriverMemory}
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
--conf spark.sql.shuffle.partitions=3840
|
|
</spark-opts>
|
|
<arg>--workingPath</arg><arg>${workingPath}</arg>
|
|
<arg>--isLookupUrl</arg><arg>${isLookupUrl}</arg>
|
|
<arg>--dedupConfProfile</arg><arg>${dedupConfProfId}</arg>
|
|
</spark>
|
|
<ok to="End"/>
|
|
<error to="Kill"/>
|
|
</action>
|
|
|
|
<end name="End"/>
|
|
|
|
</workflow-app> |