1
0
Fork 0

simplified reset_outputpath nodes across the workflows, applied common xml formatting

This commit is contained in:
Claudio Atzori 2020-05-08 12:33:31 +02:00
parent 9b4c0d4b3a
commit b2192fdcdc
7 changed files with 252 additions and 228 deletions

View File

@ -1,39 +1,33 @@
<workflow-app name="bulk_tagging" xmlns="uri:oozie:workflow:0.5"> <workflow-app name="bulk_tagging" xmlns="uri:oozie:workflow:0.5">
<parameters> <parameters>
<property> <property>
<name>sourcePath</name> <name>sourcePath</name>
<description>the source path</description> <description>the source path</description>
</property> </property>
<property> <property>
<name>isLookUpUrl</name> <name>isLookUpUrl</name>
<description>the isLookup service endpoint</description> <description>the isLookup service endpoint</description>
</property> </property>
<property> <property>
<name>pathMap</name> <name>pathMap</name>
<description>the json path associated to each selection field</description> <description>the json path associated to each selection field</description>
</property> </property>
<property> <property>
<name>outputPath</name> <name>outputPath</name>
<description>the output path</description> <description>the output path</description>
</property> </property>
</parameters> </parameters>
<start to="reset-outputpath"/> <start to="reset_outputpath"/>
<kill name="Kill"> <kill name="Kill">
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message> <message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
</kill> </kill>
<action name="reset-outputpath"> <action name="reset_outputpath">
<fs> <fs>
<delete path='${outputPath}/relation'/> <delete path="${outputPath}"/>
<delete path='${outputPath}/dataset'/> <mkdir path="${outputPath}"/>
<delete path='${outputPath}/software'/>
<delete path='${outputPath}/publication'/>
<delete path='${outputPath}/otherresearchproduct'/>
<delete path='${outputPath}/project'/>
<delete path='${outputPath}/organization'/>
<delete path='${outputPath}/datasource'/>
</fs> </fs>
<ok to="copy_entities"/> <ok to="copy_entities"/>
<error to="Kill"/> <error to="Kill"/>
@ -67,6 +61,7 @@
<ok to="copy_wait"/> <ok to="copy_wait"/>
<error to="Kill"/> <error to="Kill"/>
</action> </action>
<action name="copy_projects"> <action name="copy_projects">
<distcp xmlns="uri:oozie:distcp-action:0.2"> <distcp xmlns="uri:oozie:distcp-action:0.2">
<job-tracker>${jobTracker}</job-tracker> <job-tracker>${jobTracker}</job-tracker>
@ -91,14 +86,13 @@
<join name="copy_wait" to="fork_exec_bulktag"/> <join name="copy_wait" to="fork_exec_bulktag"/>
<fork name="fork_exec_bulktag">
<path start="join_bulktag_publication"/>
<path start="join_bulktag_dataset"/>
<path start="join_bulktag_otherresearchproduct"/>
<path start="join_bulktag_software"/>
</fork>
<fork name="fork_exec_bulktag">
<path start="join_bulktag_publication"/>
<path start="join_bulktag_dataset"/>
<path start="join_bulktag_otherresearchproduct"/>
<path start="join_bulktag_software"/>
</fork>
<action name="join_bulktag_publication"> <action name="join_bulktag_publication">
<spark xmlns="uri:oozie:spark-action:0.2"> <spark xmlns="uri:oozie:spark-action:0.2">
<job-tracker>${jobTracker}</job-tracker> <job-tracker>${jobTracker}</job-tracker>
@ -127,6 +121,7 @@
<ok to="wait"/> <ok to="wait"/>
<error to="Kill"/> <error to="Kill"/>
</action> </action>
<action name="join_bulktag_dataset"> <action name="join_bulktag_dataset">
<spark xmlns="uri:oozie:spark-action:0.2"> <spark xmlns="uri:oozie:spark-action:0.2">
<job-tracker>${jobTracker}</job-tracker> <job-tracker>${jobTracker}</job-tracker>
@ -155,6 +150,7 @@
<ok to="wait"/> <ok to="wait"/>
<error to="Kill"/> <error to="Kill"/>
</action> </action>
<action name="join_bulktag_otherresearchproduct"> <action name="join_bulktag_otherresearchproduct">
<spark xmlns="uri:oozie:spark-action:0.2"> <spark xmlns="uri:oozie:spark-action:0.2">
<job-tracker>${jobTracker}</job-tracker> <job-tracker>${jobTracker}</job-tracker>
@ -183,6 +179,7 @@
<ok to="wait"/> <ok to="wait"/>
<error to="Kill"/> <error to="Kill"/>
</action> </action>
<action name="join_bulktag_software"> <action name="join_bulktag_software">
<spark xmlns="uri:oozie:spark-action:0.2"> <spark xmlns="uri:oozie:spark-action:0.2">
<job-tracker>${jobTracker}</job-tracker> <job-tracker>${jobTracker}</job-tracker>
@ -211,6 +208,9 @@
<ok to="wait"/> <ok to="wait"/>
<error to="Kill"/> <error to="Kill"/>
</action> </action>
<join name="wait" to="End"/>
<end name="End"/> <join name="wait" to="End"/>
<end name="End"/>
</workflow-app> </workflow-app>

View File

@ -27,14 +27,8 @@
<action name="reset_outputpath"> <action name="reset_outputpath">
<fs> <fs>
<delete path="${outputPath}/relation"/> <delete path="${outputPath}"/>
<delete path="${outputPath}/dataset"/> <mkdir path="${outputPath}"/>
<delete path="${outputPath}/software"/>
<delete path="${outputPath}/publication"/>
<delete path="${outputPath}/otherresearchproduct"/>
<delete path="${outputPath}/project"/>
<delete path="${outputPath}/organization"/>
<delete path="${outputPath}/datasource"/>
</fs> </fs>
<ok to="copy_entities"/> <ok to="copy_entities"/>
<error to="Kill"/> <error to="Kill"/>

View File

@ -1,37 +1,29 @@
<workflow-app name="orcid_to_result_from_semrel_propagation" xmlns="uri:oozie:workflow:0.5"> <workflow-app name="orcid_to_result_from_semrel_propagation" xmlns="uri:oozie:workflow:0.5">
<parameters> <parameters>
<property> <property>
<name>sourcePath</name> <name>sourcePath</name>
<description>the source path</description> <description>the source path</description>
</property> </property>
<property> <property>
<name>allowedsemrels</name> <name>allowedsemrels</name>
<description>the semantic relationships allowed for propagation</description> <description>the semantic relationships allowed for propagation</description>
</property> </property>
<property> <property>
<name>outputPath</name> <name>outputPath</name>
<description>the output path</description> <description>the output path</description>
</property> </property>
</parameters> </parameters>
<start to="reset-outputpath"/>
<start to="reset_outputpath"/>
<!-- <start to="fork_prepare_assoc_step1"/>--> <kill name="Kill">
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
</kill>
<kill name="Kill"> <action name="reset_outputpath">
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
</kill>
<action name="reset-outputpath">
<fs> <fs>
<delete path='${workingDir}/preparedInfo'/> <delete path="${outputPath}"/>
<delete path='${outputPath}/relation'/> <mkdir path="${outputPath}"/>
<delete path='${outputPath}/dataset'/>
<delete path='${outputPath}/software'/>
<delete path='${outputPath}/publication'/>
<delete path='${outputPath}/otherresearchproduct'/>
<delete path='${outputPath}/project'/>
<delete path='${outputPath}/organization'/>
<delete path='${outputPath}/datasource'/>
</fs> </fs>
<ok to="copy_entities"/> <ok to="copy_entities"/>
<error to="Kill"/> <error to="Kill"/>
@ -119,11 +111,16 @@
--conf spark.hadoop.mapreduce.map.speculative=false --conf spark.hadoop.mapreduce.map.speculative=false
--conf spark.hadoop.mapreduce.reduce.speculative=false --conf spark.hadoop.mapreduce.reduce.speculative=false
</spark-opts> </spark-opts>
<arg>--sourcePath</arg><arg>${sourcePath}</arg> <arg>--sourcePath</arg>
<arg>--hive_metastore_uris</arg><arg>${hive_metastore_uris}</arg> <arg>${sourcePath}</arg>
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Publication</arg> <arg>--hive_metastore_uris</arg>
<arg>--outputPath</arg><arg>${workingDir}/preparedInfo/targetOrcidAssoc</arg> <arg>${hive_metastore_uris}</arg>
<arg>--allowedsemrels</arg><arg>${allowedsemrels}</arg> <arg>--resultTableName</arg>
<arg>eu.dnetlib.dhp.schema.oaf.Publication</arg>
<arg>--outputPath</arg>
<arg>${workingDir}/preparedInfo/targetOrcidAssoc</arg>
<arg>--allowedsemrels</arg>
<arg>${allowedsemrels}</arg>
</spark> </spark>
<ok to="wait"/> <ok to="wait"/>
<error to="Kill"/> <error to="Kill"/>
@ -147,11 +144,16 @@
--conf spark.dynamicAllocation.enabled=true --conf spark.dynamicAllocation.enabled=true
--conf spark.dynamicAllocation.maxExecutors=${spark2MaxExecutors} --conf spark.dynamicAllocation.maxExecutors=${spark2MaxExecutors}
</spark-opts> </spark-opts>
<arg>--sourcePath</arg><arg>${sourcePath}</arg> <arg>--sourcePath</arg>
<arg>--hive_metastore_uris</arg><arg>${hive_metastore_uris}</arg> <arg>${sourcePath}</arg>
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Dataset</arg> <arg>--hive_metastore_uris</arg>
<arg>--outputPath</arg><arg>${workingDir}/preparedInfo/targetOrcidAssoc</arg> <arg>${hive_metastore_uris}</arg>
<arg>--allowedsemrels</arg><arg>${allowedsemrels}</arg> <arg>--resultTableName</arg>
<arg>eu.dnetlib.dhp.schema.oaf.Dataset</arg>
<arg>--outputPath</arg>
<arg>${workingDir}/preparedInfo/targetOrcidAssoc</arg>
<arg>--allowedsemrels</arg>
<arg>${allowedsemrels}</arg>
</spark> </spark>
<ok to="wait"/> <ok to="wait"/>
<error to="Kill"/> <error to="Kill"/>
@ -175,11 +177,16 @@
--conf spark.dynamicAllocation.enabled=true --conf spark.dynamicAllocation.enabled=true
--conf spark.dynamicAllocation.maxExecutors=${spark2MaxExecutors} --conf spark.dynamicAllocation.maxExecutors=${spark2MaxExecutors}
</spark-opts> </spark-opts>
<arg>--sourcePath</arg><arg>${sourcePath}</arg> <arg>--sourcePath</arg>
<arg>--hive_metastore_uris</arg><arg>${hive_metastore_uris}</arg> <arg>${sourcePath}</arg>
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.OtherResearchProduct</arg> <arg>--hive_metastore_uris</arg>
<arg>--outputPath</arg><arg>${workingDir}/preparedInfo/targetOrcidAssoc</arg> <arg>${hive_metastore_uris}</arg>
<arg>--allowedsemrels</arg><arg>${allowedsemrels}</arg> <arg>--resultTableName</arg>
<arg>eu.dnetlib.dhp.schema.oaf.OtherResearchProduct</arg>
<arg>--outputPath</arg>
<arg>${workingDir}/preparedInfo/targetOrcidAssoc</arg>
<arg>--allowedsemrels</arg>
<arg>${allowedsemrels}</arg>
</spark> </spark>
<ok to="wait"/> <ok to="wait"/>
<error to="Kill"/> <error to="Kill"/>
@ -203,11 +210,16 @@
--conf spark.dynamicAllocation.enabled=true --conf spark.dynamicAllocation.enabled=true
--conf spark.dynamicAllocation.maxExecutors=${spark2MaxExecutors} --conf spark.dynamicAllocation.maxExecutors=${spark2MaxExecutors}
</spark-opts> </spark-opts>
<arg>--sourcePath</arg><arg>${sourcePath}</arg> <arg>--sourcePath</arg>
<arg>--hive_metastore_uris</arg><arg>${hive_metastore_uris}</arg> <arg>${sourcePath}</arg>
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Software</arg> <arg>--hive_metastore_uris</arg>
<arg>--outputPath</arg><arg>${workingDir}/preparedInfo/targetOrcidAssoc</arg> <arg>${hive_metastore_uris}</arg>
<arg>--allowedsemrels</arg><arg>${allowedsemrels}</arg> <arg>--resultTableName</arg>
<arg>eu.dnetlib.dhp.schema.oaf.Software</arg>
<arg>--outputPath</arg>
<arg>${workingDir}/preparedInfo/targetOrcidAssoc</arg>
<arg>--allowedsemrels</arg>
<arg>${allowedsemrels}</arg>
</spark> </spark>
<ok to="wait"/> <ok to="wait"/>
<error to="Kill"/> <error to="Kill"/>
@ -233,11 +245,13 @@
--conf spark.dynamicAllocation.enabled=true --conf spark.dynamicAllocation.enabled=true
--conf spark.dynamicAllocation.maxExecutors=${spark2MaxExecutors} --conf spark.dynamicAllocation.maxExecutors=${spark2MaxExecutors}
</spark-opts> </spark-opts>
<arg>--sourcePath</arg><arg>${workingDir}/preparedInfo/targetOrcidAssoc</arg> <arg>--sourcePath</arg>
<arg>--outputPath</arg><arg>${workingDir}/preparedInfo/mergedOrcidAssoc</arg> <arg>${workingDir}/preparedInfo/targetOrcidAssoc</arg>
<arg>--outputPath</arg>
<arg>${workingDir}/preparedInfo/mergedOrcidAssoc</arg>
</spark> </spark>
<ok to="fork-join-exec-propagation"/> <ok to="fork-join-exec-propagation"/>
<!-- <ok to="End"/>--> <!-- <ok to="End"/>-->
<error to="Kill"/> <error to="Kill"/>
</action> </action>
@ -270,12 +284,18 @@
--conf spark.hadoop.mapreduce.reduce.speculative=false --conf spark.hadoop.mapreduce.reduce.speculative=false
--conf spark.sql.shuffle.partitions=3840 --conf spark.sql.shuffle.partitions=3840
</spark-opts> </spark-opts>
<arg>--possibleUpdatesPath</arg><arg>${workingDir}/preparedInfo/mergedOrcidAssoc</arg> <arg>--possibleUpdatesPath</arg>
<arg>--sourcePath</arg><arg>${sourcePath}/publication</arg> <arg>${workingDir}/preparedInfo/mergedOrcidAssoc</arg>
<arg>--hive_metastore_uris</arg><arg>${hive_metastore_uris}</arg> <arg>--sourcePath</arg>
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Publication</arg> <arg>${sourcePath}/publication</arg>
<arg>--outputPath</arg><arg>${outputPath}/publication</arg> <arg>--hive_metastore_uris</arg>
<arg>--saveGraph</arg><arg>${saveGraph}</arg> <arg>${hive_metastore_uris}</arg>
<arg>--resultTableName</arg>
<arg>eu.dnetlib.dhp.schema.oaf.Publication</arg>
<arg>--outputPath</arg>
<arg>${outputPath}/publication</arg>
<arg>--saveGraph</arg>
<arg>${saveGraph}</arg>
</spark> </spark>
<ok to="wait2"/> <ok to="wait2"/>
<error to="Kill"/> <error to="Kill"/>
@ -301,12 +321,18 @@
--conf spark.hadoop.mapreduce.map.speculative=false --conf spark.hadoop.mapreduce.map.speculative=false
--conf spark.hadoop.mapreduce.reduce.speculative=false --conf spark.hadoop.mapreduce.reduce.speculative=false
</spark-opts> </spark-opts>
<arg>--possibleUpdatesPath</arg><arg>${workingDir}/preparedInfo/mergedOrcidAssoc</arg> <arg>--possibleUpdatesPath</arg>
<arg>--sourcePath</arg><arg>${sourcePath}/dataset</arg> <arg>${workingDir}/preparedInfo/mergedOrcidAssoc</arg>
<arg>--hive_metastore_uris</arg><arg>${hive_metastore_uris}</arg> <arg>--sourcePath</arg>
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Dataset</arg> <arg>${sourcePath}/dataset</arg>
<arg>--outputPath</arg><arg>${outputPath}/dataset</arg> <arg>--hive_metastore_uris</arg>
<arg>--saveGraph</arg><arg>${saveGraph}</arg> <arg>${hive_metastore_uris}</arg>
<arg>--resultTableName</arg>
<arg>eu.dnetlib.dhp.schema.oaf.Dataset</arg>
<arg>--outputPath</arg>
<arg>${outputPath}/dataset</arg>
<arg>--saveGraph</arg>
<arg>${saveGraph}</arg>
</spark> </spark>
<ok to="wait2"/> <ok to="wait2"/>
<error to="Kill"/> <error to="Kill"/>
@ -332,12 +358,18 @@
--conf spark.hadoop.mapreduce.map.speculative=false --conf spark.hadoop.mapreduce.map.speculative=false
--conf spark.hadoop.mapreduce.reduce.speculative=false --conf spark.hadoop.mapreduce.reduce.speculative=false
</spark-opts> </spark-opts>
<arg>--possibleUpdatesPath</arg><arg>${workingDir}/preparedInfo/mergedOrcidAssoc</arg> <arg>--possibleUpdatesPath</arg>
<arg>--sourcePath</arg><arg>${sourcePath}/otherresearchproduct</arg> <arg>${workingDir}/preparedInfo/mergedOrcidAssoc</arg>
<arg>--hive_metastore_uris</arg><arg>${hive_metastore_uris}</arg> <arg>--sourcePath</arg>
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.OtherResearchProduct</arg> <arg>${sourcePath}/otherresearchproduct</arg>
<arg>--outputPath</arg><arg>${outputPath}/otherresearchproduct</arg> <arg>--hive_metastore_uris</arg>
<arg>--saveGraph</arg><arg>${saveGraph}</arg> <arg>${hive_metastore_uris}</arg>
<arg>--resultTableName</arg>
<arg>eu.dnetlib.dhp.schema.oaf.OtherResearchProduct</arg>
<arg>--outputPath</arg>
<arg>${outputPath}/otherresearchproduct</arg>
<arg>--saveGraph</arg>
<arg>${saveGraph}</arg>
</spark> </spark>
<ok to="wait2"/> <ok to="wait2"/>
<error to="Kill"/> <error to="Kill"/>
@ -363,16 +395,22 @@
--conf spark.hadoop.mapreduce.map.speculative=false --conf spark.hadoop.mapreduce.map.speculative=false
--conf spark.hadoop.mapreduce.reduce.speculative=false --conf spark.hadoop.mapreduce.reduce.speculative=false
</spark-opts> </spark-opts>
<arg>--possibleUpdatesPath</arg><arg>${workingDir}/preparedInfo/mergedOrcidAssoc</arg> <arg>--possibleUpdatesPath</arg>
<arg>--sourcePath</arg><arg>${sourcePath}/software</arg> <arg>${workingDir}/preparedInfo/mergedOrcidAssoc</arg>
<arg>--hive_metastore_uris</arg><arg>${hive_metastore_uris}</arg> <arg>--sourcePath</arg>
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Software</arg> <arg>${sourcePath}/software</arg>
<arg>--outputPath</arg><arg>${outputPath}/software</arg> <arg>--hive_metastore_uris</arg>
<arg>--saveGraph</arg><arg>${saveGraph}</arg> <arg>${hive_metastore_uris}</arg>
<arg>--resultTableName</arg>
<arg>eu.dnetlib.dhp.schema.oaf.Software</arg>
<arg>--outputPath</arg>
<arg>${outputPath}/software</arg>
<arg>--saveGraph</arg>
<arg>${saveGraph}</arg>
</spark> </spark>
<ok to="wait2"/> <ok to="wait2"/>
<error to="Kill"/> <error to="Kill"/>
</action> </action>
<join name="wait2" to="End"/> <join name="wait2" to="End"/>
<end name="End"/> <end name="End"/>
</workflow-app> </workflow-app>

View File

@ -14,27 +14,21 @@
</property> </property>
</parameters> </parameters>
<start to="reset-outputpath"/> <start to="reset_outputpath"/>
<!-- <start to="apply_propagation"/>-->
<kill name="Kill"> <kill name="Kill">
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message> <message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
</kill> </kill>
<action name="reset-outputpath"> <action name="reset_outputpath">
<fs> <fs>
<delete path='${workingDir}/preparedInfo'/> <delete path="${outputPath}"/>
<delete path='${outputPath}/relation'/> <mkdir path="${outputPath}"/>
<delete path='${outputPath}/dataset'/>
<delete path='${outputPath}/software'/>
<delete path='${outputPath}/publication'/>
<delete path='${outputPath}/otherresearchproduct'/>
<delete path='${outputPath}/project'/>
<delete path='${outputPath}/organization'/>
<delete path='${outputPath}/datasource'/>
</fs> </fs>
<ok to="copy_entities"/> <ok to="copy_entities"/>
<error to="Kill"/> <error to="Kill"/>
</action> </action>
<fork name="copy_entities"> <fork name="copy_entities">
<path start="copy_relation"/> <path start="copy_relation"/>
<path start="copy_publication"/> <path start="copy_publication"/>
@ -190,4 +184,5 @@
</action> </action>
<end name="End"/> <end name="End"/>
</workflow-app> </workflow-app>

View File

@ -14,23 +14,16 @@
</property> </property>
</parameters> </parameters>
<start to="reset-outputpath"/> <start to="reset_outputpath"/>
<kill name="Kill"> <kill name="Kill">
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message> <message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
</kill> </kill>
<action name="reset-outputpath"> <action name="reset_outputpath">
<fs> <fs>
<delete path='${outputPath}/relation'/> <delete path="${outputPath}"/>
<delete path='${outputPath}/datasource'/> <mkdir path="${outputPath}"/>
<delete path='${outputPath}/organization'/>
<delete path='${outputPath}/project'/>
<delete path='${outputPath}/publication'/>
<delete path='${outputPath}/dataset'/>
<delete path='${outputPath}/software'/>
<delete path='${outputPath}/otherresearchproduct'/>
<delete path='${workingDir}/preparedInfo'/>
</fs> </fs>
<ok to="copy_entities"/> <ok to="copy_entities"/>
<error to="Kill"/> <error to="Kill"/>
@ -64,6 +57,7 @@
<ok to="copy_wait"/> <ok to="copy_wait"/>
<error to="Kill"/> <error to="Kill"/>
</action> </action>
<action name="copy_projects"> <action name="copy_projects">
<distcp xmlns="uri:oozie:distcp-action:0.2"> <distcp xmlns="uri:oozie:distcp-action:0.2">
<job-tracker>${jobTracker}</job-tracker> <job-tracker>${jobTracker}</job-tracker>
@ -85,6 +79,7 @@
<ok to="copy_wait"/> <ok to="copy_wait"/>
<error to="Kill"/> <error to="Kill"/>
</action> </action>
<join name="copy_wait" to="prepare_result_communitylist"/> <join name="copy_wait" to="prepare_result_communitylist"/>
<action name="prepare_result_communitylist"> <action name="prepare_result_communitylist">
@ -111,7 +106,6 @@
<arg>--organizationtoresultcommunitymap</arg><arg>${organizationtoresultcommunitymap}</arg> <arg>--organizationtoresultcommunitymap</arg><arg>${organizationtoresultcommunitymap}</arg>
</spark> </spark>
<ok to="fork-join-exec-propagation"/> <ok to="fork-join-exec-propagation"/>
<!-- <ok to="End"/>-->
<error to="Kill"/> <error to="Kill"/>
</action> </action>
@ -150,6 +144,7 @@
<ok to="wait2"/> <ok to="wait2"/>
<error to="Kill"/> <error to="Kill"/>
</action> </action>
<action name="join_propagate_dataset"> <action name="join_propagate_dataset">
<spark xmlns="uri:oozie:spark-action:0.2"> <spark xmlns="uri:oozie:spark-action:0.2">
<master>yarn</master> <master>yarn</master>
@ -178,6 +173,7 @@
<ok to="wait2"/> <ok to="wait2"/>
<error to="Kill"/> <error to="Kill"/>
</action> </action>
<action name="join_propagate_otherresearchproduct"> <action name="join_propagate_otherresearchproduct">
<spark xmlns="uri:oozie:spark-action:0.2"> <spark xmlns="uri:oozie:spark-action:0.2">
<master>yarn</master> <master>yarn</master>
@ -206,6 +202,7 @@
<ok to="wait2"/> <ok to="wait2"/>
<error to="Kill"/> <error to="Kill"/>
</action> </action>
<action name="join_propagate_software"> <action name="join_propagate_software">
<spark xmlns="uri:oozie:spark-action:0.2"> <spark xmlns="uri:oozie:spark-action:0.2">
<master>yarn</master> <master>yarn</master>
@ -238,4 +235,5 @@
<join name="wait2" to="End"/> <join name="wait2" to="End"/>
<end name="End"/> <end name="End"/>
</workflow-app> </workflow-app>

View File

@ -1,43 +1,38 @@
<workflow-app name="result_to_community_from_semrel_propagation" xmlns="uri:oozie:workflow:0.5"> <workflow-app name="result_to_community_from_semrel_propagation" xmlns="uri:oozie:workflow:0.5">
<parameters> <parameters>
<property> <property>
<name>sourcePath</name> <name>sourcePath</name>
<description>the source path</description> <description>the source path</description>
</property> </property>
<property> <property>
<name>allowedsemrels</name> <name>allowedsemrels</name>
<description>the semantic relationships allowed for propagation</description> <description>the semantic relationships allowed for propagation</description>
</property> </property>
<property> <property>
<name>isLookUpUrl</name> <name>isLookUpUrl</name>
<description>the isLookup service endpoint</description> <description>the isLookup service endpoint</description>
</property> </property>
<property> <property>
<name>outputPath</name> <name>outputPath</name>
<description>the output path</description> <description>the output path</description>
</property> </property>
</parameters> </parameters>
<start to="reset-outputpath"/> <start to="reset_outputpath"/>
<kill name="Kill"> <kill name="Kill">
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message> <message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
</kill> </kill>
<action name="reset-outputpath">
<action name="reset_outputpath">
<fs> <fs>
<delete path='${outputPath}/relation'/> <delete path="${outputPath}"/>
<delete path='${outputPath}/datasource'/> <mkdir path="${outputPath}"/>
<delete path='${outputPath}/organization'/>
<delete path='${outputPath}/project'/>
<delete path='${outputPath}/publication'/>
<delete path='${outputPath}/dataset'/>
<delete path='${outputPath}/software'/>
<delete path='${outputPath}/otherresearchproduct'/>
<delete path='${workingDir}/preparedInfo'/>
</fs> </fs>
<ok to="copy_entities"/> <ok to="copy_entities"/>
<error to="Kill"/> <error to="Kill"/>
</action> </action>
<fork name="copy_entities"> <fork name="copy_entities">
<path start="copy_relation"/> <path start="copy_relation"/>
<path start="copy_organization"/> <path start="copy_organization"/>
@ -126,6 +121,7 @@
<ok to="wait"/> <ok to="wait"/>
<error to="Kill"/> <error to="Kill"/>
</action> </action>
<action name="join_prepare_dataset"> <action name="join_prepare_dataset">
<spark xmlns="uri:oozie:spark-action:0.2"> <spark xmlns="uri:oozie:spark-action:0.2">
<master>yarn</master> <master>yarn</master>
@ -182,6 +178,7 @@
<ok to="wait"/> <ok to="wait"/>
<error to="Kill"/> <error to="Kill"/>
</action> </action>
<action name="join_prepare_software"> <action name="join_prepare_software">
<spark xmlns="uri:oozie:spark-action:0.2"> <spark xmlns="uri:oozie:spark-action:0.2">
<master>yarn</master> <master>yarn</master>
@ -214,29 +211,29 @@
<join name="wait" to="prepare_assoc_step2"/> <join name="wait" to="prepare_assoc_step2"/>
<action name="prepare_assoc_step2"> <action name="prepare_assoc_step2">
<spark xmlns="uri:oozie:spark-action:0.2"> <spark xmlns="uri:oozie:spark-action:0.2">
<master>yarn</master> <master>yarn</master>
<mode>cluster</mode> <mode>cluster</mode>
<name>ResultToCommunityEmRelPropagation-PreparePhase2</name> <name>ResultToCommunityEmRelPropagation-PreparePhase2</name>
<class>eu.dnetlib.dhp.resulttocommunityfromsemrel.PrepareResultCommunitySetStep2</class> <class>eu.dnetlib.dhp.resulttocommunityfromsemrel.PrepareResultCommunitySetStep2</class>
<jar>dhp-propagation-${projectVersion}.jar</jar> <jar>dhp-propagation-${projectVersion}.jar</jar>
<spark-opts> <spark-opts>
--executor-cores=${sparkExecutorCores} --executor-cores=${sparkExecutorCores}
--executor-memory=${sparkExecutorMemory} --executor-memory=${sparkExecutorMemory}
--driver-memory=${sparkDriverMemory} --driver-memory=${sparkDriverMemory}
--conf spark.extraListeners=${spark2ExtraListeners} --conf spark.extraListeners=${spark2ExtraListeners}
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners} --conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress} --conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir} --conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
--conf spark.dynamicAllocation.enabled=true --conf spark.dynamicAllocation.enabled=true
--conf spark.dynamicAllocation.maxExecutors=${spark2MaxExecutors} --conf spark.dynamicAllocation.maxExecutors=${spark2MaxExecutors}
</spark-opts> </spark-opts>
<arg>--sourcePath</arg><arg>${workingDir}/preparedInfo/targetCommunityAssoc</arg> <arg>--sourcePath</arg><arg>${workingDir}/preparedInfo/targetCommunityAssoc</arg>
<arg>--outputPath</arg><arg>${workingDir}/preparedInfo/mergedCommunityAssoc</arg> <arg>--outputPath</arg><arg>${workingDir}/preparedInfo/mergedCommunityAssoc</arg>
</spark> </spark>
<ok to="fork-join-exec-propagation"/> <ok to="fork-join-exec-propagation"/>
<!-- <ok to="End"/>--> <!-- <ok to="End"/>-->
<error to="Kill"/> <error to="Kill"/>
</action> </action>
@ -275,6 +272,7 @@
<ok to="wait2"/> <ok to="wait2"/>
<error to="Kill"/> <error to="Kill"/>
</action> </action>
<action name="join_propagate_dataset"> <action name="join_propagate_dataset">
<spark xmlns="uri:oozie:spark-action:0.2"> <spark xmlns="uri:oozie:spark-action:0.2">
<master>yarn</master> <master>yarn</master>
@ -303,6 +301,7 @@
<ok to="wait2"/> <ok to="wait2"/>
<error to="Kill"/> <error to="Kill"/>
</action> </action>
<action name="join_propagate_otherresearchproduct"> <action name="join_propagate_otherresearchproduct">
<spark xmlns="uri:oozie:spark-action:0.2"> <spark xmlns="uri:oozie:spark-action:0.2">
<master>yarn</master> <master>yarn</master>
@ -331,6 +330,7 @@
<ok to="wait2"/> <ok to="wait2"/>
<error to="Kill"/> <error to="Kill"/>
</action> </action>
<action name="join_propagate_software"> <action name="join_propagate_software">
<spark xmlns="uri:oozie:spark-action:0.2"> <spark xmlns="uri:oozie:spark-action:0.2">
<master>yarn</master> <master>yarn</master>
@ -362,5 +362,6 @@
<join name="wait2" to="End"/> <join name="wait2" to="End"/>
<end name="End"/> <end name="End"/>
</workflow-app> </workflow-app>

View File

@ -1,32 +1,25 @@
<workflow-app name="affiliation_from_instrepo_propagation" xmlns="uri:oozie:workflow:0.5"> <workflow-app name="affiliation_from_instrepo_propagation" xmlns="uri:oozie:workflow:0.5">
<parameters> <parameters>
<property> <property>
<name>sourcePath</name> <name>sourcePath</name>
<description>the source path</description> <description>the source path</description>
</property> </property>
<property> <property>
<name>outputPath</name> <name>outputPath</name>
<description>sets the outputPath</description> <description>sets the outputPath</description>
</property> </property>
</parameters> </parameters>
<start to="reset-outputpath"/> <start to="reset_outputpath"/>
<!--<start to="prepare_result_organization_association"/>-->
<kill name="Kill">
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
</kill>
<action name="reset-outputpath"> <kill name="Kill">
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
</kill>
<action name="reset_outputpath">
<fs> <fs>
<delete path='${outputPath}/relation'/> <delete path="${outputPath}"/>
<delete path='${outputPath}/datasource'/> <mkdir path="${outputPath}"/>
<delete path='${outputPath}/organization'/>
<delete path='${outputPath}/project'/>
<delete path='${outputPath}/publication'/>
<delete path='${outputPath}/dataset'/>
<delete path='${outputPath}/software'/>
<delete path='${outputPath}/otherresearchproduct'/>
<delete path='${workingDir}/preparedInfo'/>
</fs> </fs>
<ok to="copy_entities"/> <ok to="copy_entities"/>
<error to="Kill"/> <error to="Kill"/>
@ -53,6 +46,7 @@
<ok to="wait"/> <ok to="wait"/>
<error to="Kill"/> <error to="Kill"/>
</action> </action>
<action name="copy_publication"> <action name="copy_publication">
<distcp xmlns="uri:oozie:distcp-action:0.2"> <distcp xmlns="uri:oozie:distcp-action:0.2">
<job-tracker>${jobTracker}</job-tracker> <job-tracker>${jobTracker}</job-tracker>
@ -85,6 +79,7 @@
<ok to="wait"/> <ok to="wait"/>
<error to="Kill"/> <error to="Kill"/>
</action> </action>
<action name="copy_software"> <action name="copy_software">
<distcp xmlns="uri:oozie:distcp-action:0.2"> <distcp xmlns="uri:oozie:distcp-action:0.2">
<job-tracker>${jobTracker}</job-tracker> <job-tracker>${jobTracker}</job-tracker>
@ -95,6 +90,7 @@
<ok to="wait"/> <ok to="wait"/>
<error to="Kill"/> <error to="Kill"/>
</action> </action>
<action name="copy_organization"> <action name="copy_organization">
<distcp xmlns="uri:oozie:distcp-action:0.2"> <distcp xmlns="uri:oozie:distcp-action:0.2">
<job-tracker>${jobTracker}</job-tracker> <job-tracker>${jobTracker}</job-tracker>
@ -151,9 +147,9 @@
<arg>--alreadyLinkedPath</arg><arg>${workingDir}/preparedInfo/alreadyLinked</arg> <arg>--alreadyLinkedPath</arg><arg>${workingDir}/preparedInfo/alreadyLinked</arg>
</spark> </spark>
<ok to="fork_join_apply_resulttoorganization_propagation"/> <ok to="fork_join_apply_resulttoorganization_propagation"/>
<!-- <ok to="End"/>-->
<error to="Kill"/> <error to="Kill"/>
</action> </action>
<fork name="fork_join_apply_resulttoorganization_propagation"> <fork name="fork_join_apply_resulttoorganization_propagation">
<path start="join_propagation_publication"/> <path start="join_propagation_publication"/>
<path start="join_propagation_dataset"/> <path start="join_propagation_dataset"/>
@ -220,6 +216,7 @@
<ok to="wait2"/> <ok to="wait2"/>
<error to="Kill"/> <error to="Kill"/>
</action> </action>
<action name="join_propagation_otherresearchproduct"> <action name="join_propagation_otherresearchproduct">
<spark xmlns="uri:oozie:spark-action:0.2"> <spark xmlns="uri:oozie:spark-action:0.2">
<master>yarn</master> <master>yarn</master>
@ -281,6 +278,7 @@
</action> </action>
<join name="wait2" to="End"/> <join name="wait2" to="End"/>
<end name="End"/> <end name="End"/>
</workflow-app> </workflow-app>