structure of the workflows updated

This commit is contained in:
miconis 2020-03-23 11:43:49 +01:00
parent a4c52661a0
commit c20e179f5a
3 changed files with 27 additions and 39 deletions

View File

@ -26,22 +26,17 @@
</property> </property>
</parameters> </parameters>
<start to="DeleteWorkingPath"/> <start to="PropagateRelation"/>
<kill name="Kill"> <kill name="Kill">
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message> <message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
</kill> </kill>
<action name="DeleteWorkingPath"> <action name="PropagateRelation">
<fs>
<delete path='${dedupGraphPath}/relation'/>
</fs>
<ok to="DuplicateScan"/>
<error to="Kill"/>
</action>
<action name="DuplicateScan">
<spark xmlns="uri:oozie:spark-action:0.2"> <spark xmlns="uri:oozie:spark-action:0.2">
<prepare>
<delete path='${dedupGraphPath}/relation'/>
</prepare>
<job-tracker>${jobTracker}</job-tracker> <job-tracker>${jobTracker}</job-tracker>
<name-node>${nameNode}</name-node> <name-node>${nameNode}</name-node>
<master>yarn-cluster</master> <master>yarn-cluster</master>
@ -49,11 +44,13 @@
<name>Update Relations</name> <name>Update Relations</name>
<class>eu.dnetlib.dhp.dedup.SparkPropagateRelation</class> <class>eu.dnetlib.dhp.dedup.SparkPropagateRelation</class>
<jar>dhp-dedup-${projectVersion}.jar</jar> <jar>dhp-dedup-${projectVersion}.jar</jar>
<spark-opts>--executor-memory ${sparkExecutorMemory} --executor-cores ${sparkExecutorCores} <spark-opts>
--driver-memory=${sparkDriverMemory} --conf --executor-memory ${sparkExecutorMemory}
spark.extraListeners="com.cloudera.spark.lineage.NavigatorAppListener" --conf --executor-cores ${sparkExecutorCores}
spark.sql.queryExecutionListeners="com.cloudera.spark.lineage.NavigatorQueryListener" --conf --driver-memory=${sparkDriverMemory}
spark.sql.warehouse.dir="/user/hive/warehouse" --conf spark.extraListeners="com.cloudera.spark.lineage.NavigatorAppListener"
--conf spark.sql.queryExecutionListeners="com.cloudera.spark.lineage.NavigatorQueryListener"
--conf spark.sql.warehouse.dir="/user/hive/warehouse"
</spark-opts> </spark-opts>
<arg>-mt</arg><arg>yarn-cluster</arg> <arg>-mt</arg><arg>yarn-cluster</arg>
<arg>--i</arg><arg>${graphBasePath}</arg> <arg>--i</arg><arg>${graphBasePath}</arg>

View File

@ -34,23 +34,17 @@
</property> </property>
</parameters> </parameters>
<start to="DeleteWorkingPath"/> <start to="CreateMergeRel"/>
<kill name="Kill"> <kill name="Kill">
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message> <message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
</kill> </kill>
<action name="DeleteWorkingPath">
<fs>
<delete path='${workingPath}/${actionSetId}/*_mergerel'/>
<delete path='${workingPath}/${actionSetId}/*_deduprecord'/>
</fs>
<ok to="CreateMergeRel"/>
<error to="Kill"/>
</action>
<action name="CreateMergeRel"> <action name="CreateMergeRel">
<spark xmlns="uri:oozie:spark-action:0.2"> <spark xmlns="uri:oozie:spark-action:0.2">
<prepare>
<delete path='${workingPath}/${actionSetId}/*_mergerel'/>
</prepare>
<job-tracker>${jobTracker}</job-tracker> <job-tracker>${jobTracker}</job-tracker>
<name-node>${nameNode}</name-node> <name-node>${nameNode}</name-node>
<master>yarn-cluster</master> <master>yarn-cluster</master>
@ -58,11 +52,13 @@
<name>Create Merge Relations</name> <name>Create Merge Relations</name>
<class>eu.dnetlib.dhp.dedup.SparkCreateConnectedComponent</class> <class>eu.dnetlib.dhp.dedup.SparkCreateConnectedComponent</class>
<jar>dhp-dedup-${projectVersion}.jar</jar> <jar>dhp-dedup-${projectVersion}.jar</jar>
<spark-opts>--executor-memory ${sparkExecutorMemory} --executor-cores ${sparkExecutorCores} <spark-opts>
--driver-memory=${sparkDriverMemory} --conf --executor-memory ${sparkExecutorMemory}
spark.extraListeners="com.cloudera.spark.lineage.NavigatorAppListener" --conf --executor-cores ${sparkExecutorCores}
spark.sql.queryExecutionListeners="com.cloudera.spark.lineage.NavigatorQueryListener" --conf --driver-memory=${sparkDriverMemory}
spark.sql.warehouse.dir="/user/hive/warehouse" --conf spark.extraListeners="com.cloudera.spark.lineage.NavigatorAppListener"
--conf spark.sql.queryExecutionListeners="com.cloudera.spark.lineage.NavigatorQueryListener"
--conf spark.sql.warehouse.dir="/user/hive/warehouse"
</spark-opts> </spark-opts>
<arg>-mt</arg><arg>yarn-cluster</arg> <arg>-mt</arg><arg>yarn-cluster</arg>
<arg>--i</arg><arg>${graphBasePath}</arg> <arg>--i</arg><arg>${graphBasePath}</arg>
@ -76,6 +72,9 @@
<action name="CreateDedupRecord"> <action name="CreateDedupRecord">
<spark xmlns="uri:oozie:spark-action:0.2"> <spark xmlns="uri:oozie:spark-action:0.2">
<prepare>
<delete path='${workingPath}/${actionSetId}/*_deduprecord'/>
</prepare>
<job-tracker>${jobTracker}</job-tracker> <job-tracker>${jobTracker}</job-tracker>
<name-node>${nameNode}</name-node> <name-node>${nameNode}</name-node>
<master>yarn-cluster</master> <master>yarn-cluster</master>

View File

@ -49,20 +49,12 @@
</configuration> </configuration>
</global> </global>
<start to="DeleteWorkingPath"/> <start to="DuplicateScan"/>
<kill name="Kill"> <kill name="Kill">
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message> <message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
</kill> </kill>
<action name="DeleteWorkingPath">
<fs>
<delete path='${workingPath}/${actionSetId}/*_simrel'/>
</fs>
<ok to="DuplicateScan"/>
<error to="Kill"/>
</action>
<action name="DuplicateScan"> <action name="DuplicateScan">
<spark xmlns="uri:oozie:spark-action:0.2"> <spark xmlns="uri:oozie:spark-action:0.2">
<prepare> <prepare>