forked from D-Net/dnet-hadoop
129 lines
5.4 KiB
XML
129 lines
5.4 KiB
XML
<workflow-app name="Build Root Records" xmlns="uri:oozie:workflow:0.5">
|
|
<parameters>
|
|
<property>
|
|
<name>graphBasePath</name>
|
|
<description>the raw graph base path</description>
|
|
</property>
|
|
<property>
|
|
<name>isLookUpUrl</name>
|
|
<description>the address of the lookUp service</description>
|
|
</property>
|
|
<property>
|
|
<name>actionSetId</name>
|
|
<description>id of the actionSet</description>
|
|
</property>
|
|
<property>
|
|
<name>workingPath</name>
|
|
<description>path of the working directory</description>
|
|
</property>
|
|
<property>
|
|
<name>dedupGraphPath</name>
|
|
<description>path of the dedup graph</description>
|
|
</property>
|
|
<property>
|
|
<name>sparkDriverMemory</name>
|
|
<description>memory for driver process</description>
|
|
</property>
|
|
<property>
|
|
<name>sparkExecutorMemory</name>
|
|
<description>memory for individual executor</description>
|
|
</property>
|
|
<property>
|
|
<name>sparkExecutorCores</name>
|
|
<description>number of cores used by single executor</description>
|
|
</property>
|
|
</parameters>
|
|
|
|
<start to="DeleteWorkingPath"/>
|
|
|
|
<kill name="Kill">
|
|
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
|
|
</kill>
|
|
|
|
<action name="DeleteWorkingPath">
|
|
<fs>
|
|
<delete path='${workingPath}/${actionSetId}/*_mergerel'/>
|
|
<delete path='${workingPath}/${actionSetId}/*_deduprecord'/>
|
|
</fs>
|
|
<ok to="CreateMergeRel"/>
|
|
<error to="Kill"/>
|
|
</action>
|
|
|
|
<action name="CreateMergeRel">
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
<job-tracker>${jobTracker}</job-tracker>
|
|
<name-node>${nameNode}</name-node>
|
|
<master>yarn-cluster</master>
|
|
<mode>cluster</mode>
|
|
<name>Create Merge Relations</name>
|
|
<class>eu.dnetlib.dhp.dedup.SparkCreateConnectedComponent</class>
|
|
<jar>dhp-dedup-${projectVersion}.jar</jar>
|
|
<spark-opts>--executor-memory ${sparkExecutorMemory} --executor-cores ${sparkExecutorCores}
|
|
--driver-memory=${sparkDriverMemory} --conf
|
|
spark.extraListeners="com.cloudera.spark.lineage.NavigatorAppListener" --conf
|
|
spark.sql.queryExecutionListeners="com.cloudera.spark.lineage.NavigatorQueryListener" --conf
|
|
spark.sql.warehouse.dir="/user/hive/warehouse"
|
|
</spark-opts>
|
|
<arg>-mt</arg><arg>yarn-cluster</arg>
|
|
<arg>--i</arg><arg>${graphBasePath}</arg>
|
|
<arg>--w</arg><arg>${workingPath}</arg>
|
|
<arg>--la</arg><arg>${isLookUpUrl}</arg>
|
|
<arg>--asi</arg><arg>${actionSetId}</arg>
|
|
</spark>
|
|
<ok to="CreateDedupRecord"/>
|
|
<error to="Kill"/>
|
|
</action>
|
|
|
|
<action name="CreateDedupRecord">
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
<job-tracker>${jobTracker}</job-tracker>
|
|
<name-node>${nameNode}</name-node>
|
|
<master>yarn-cluster</master>
|
|
<mode>cluster</mode>
|
|
<name>Create Dedup Record</name>
|
|
<class>eu.dnetlib.dhp.dedup.SparkCreateDedupRecord</class>
|
|
<jar>dhp-dedup-${projectVersion}.jar</jar>
|
|
<spark-opts>--executor-memory ${sparkExecutorMemory} --executor-cores ${sparkExecutorCores}
|
|
--driver-memory=${sparkDriverMemory} --conf
|
|
spark.extraListeners="com.cloudera.spark.lineage.NavigatorAppListener" --conf
|
|
spark.sql.queryExecutionListeners="com.cloudera.spark.lineage.NavigatorQueryListener" --conf
|
|
spark.sql.warehouse.dir="/user/hive/warehouse"
|
|
</spark-opts>
|
|
<arg>-mt</arg><arg>yarn-cluster</arg>
|
|
<arg>--i</arg><arg>${graphBasePath}</arg>
|
|
<arg>--w</arg><arg>${workingPath}</arg>
|
|
<arg>--la</arg><arg>${isLookUpUrl}</arg>
|
|
<arg>--asi</arg><arg>${actionSetId}</arg>
|
|
</spark>
|
|
<ok to="UpdateEntity"/>
|
|
<error to="Kill"/>
|
|
</action>
|
|
|
|
<action name="UpdateEntity">
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
<job-tracker>${jobTracker}</job-tracker>
|
|
<name-node>${nameNode}</name-node>
|
|
<master>yarn-cluster</master>
|
|
<mode>cluster</mode>
|
|
<name>Create Dedup Record</name>
|
|
<class>eu.dnetlib.dhp.dedup.SparkUpdateEntity</class>
|
|
<jar>dhp-dedup-${projectVersion}.jar</jar>
|
|
<spark-opts>--executor-memory ${sparkExecutorMemory} --executor-cores ${sparkExecutorCores}
|
|
--driver-memory=${sparkDriverMemory} --conf
|
|
spark.extraListeners="com.cloudera.spark.lineage.NavigatorAppListener" --conf
|
|
spark.sql.queryExecutionListeners="com.cloudera.spark.lineage.NavigatorQueryListener" --conf
|
|
spark.sql.warehouse.dir="/user/hive/warehouse"
|
|
</spark-opts>
|
|
<arg>-mt</arg><arg>yarn-cluster</arg>
|
|
<arg>--i</arg><arg>${graphBasePath}</arg>
|
|
<arg>--w</arg><arg>${workingPath}</arg>
|
|
<arg>--la</arg><arg>${isLookUpUrl}</arg>
|
|
<arg>--asi</arg><arg>${actionSetId}</arg>
|
|
<arg>--o</arg><arg>${dedupGraphPath}</arg>
|
|
</spark>
|
|
<ok to="End"/>
|
|
<error to="Kill"/>
|
|
</action>
|
|
|
|
<end name="End"/>
|
|
</workflow-app> |