fixed duplicates action name in the workflow

This commit is contained in:
Miriam Baglioni 2020-04-27 10:52:30 +02:00
parent f9ee343fc0
commit d30e710165
1 changed files with 5 additions and 36 deletions

View File

@ -149,7 +149,7 @@
<arg>--datasourceOrganizationPath</arg><arg>${workingDir}/resulttoorganization_propagation/preparedInfo/datasourceOrganization</arg>
<arg>--alreadyLinkedPath</arg><arg>${workingDir}/resulttoorganization_propagation/preparedInfo/alreadyLinked</arg>
</spark>
<ok to="wait"/>
<ok to="wait2"/>
<error to="Kill"/>
</action>
@ -180,7 +180,7 @@
<arg>--datasourceOrganizationPath</arg><arg>${workingDir}/resulttoorganization_propagation/preparedInfo/datasourceOrganization</arg>
<arg>--alreadyLinkedPath</arg><arg>${workingDir}/resulttoorganization_propagation/preparedInfo/alreadyLinked</arg>
</spark>
<ok to="wait"/>
<ok to="wait2"/>
<error to="Kill"/>
</action>
@ -211,7 +211,7 @@
<arg>--datasourceOrganizationPath</arg><arg>${workingDir}/resulttoorganization_propagation/preparedInfo/datasourceOrganization</arg>
<arg>--alreadyLinkedPath</arg><arg>${workingDir}/resulttoorganization_propagation/preparedInfo/alreadyLinked</arg>
</spark>
<ok to="wait"/>
<ok to="wait2"/>
<error to="Kill"/>
</action>
@ -242,42 +242,11 @@
<arg>--datasourceOrganizationPath</arg><arg>${workingDir}/resulttoorganization_propagation/preparedInfo/datasourceOrganization</arg>
<arg>--alreadyLinkedPath</arg><arg>${workingDir}/resulttoorganization_propagation/preparedInfo/alreadyLinked</arg>
</spark>
<ok to="wait"/>
<ok to="wait2"/>
<error to="Kill"/>
</action>
<join name="wait" to="End"/>
<join name="wait2" to="End"/>
<end name="End"/>
<!-- <action name="AffiliationPropagation">-->
<!-- <spark xmlns="uri:oozie:spark-action:0.2">-->
<!-- <job-tracker>${jobTracker}</job-tracker>-->
<!-- <name-node>${nameNode}</name-node>-->
<!-- <master>yarn-cluster</master>-->
<!-- <mode>cluster</mode>-->
<!-- <name>AffiliationPropagation</name>-->
<!-- <class>eu.dnetlib.dhp.resulttoorganizationfrominstrepo.SparkResultToOrganizationFromIstRepoJob</class>-->
<!-- <jar>dhp-propagation-${projectVersion}.jar</jar>-->
<!-- <spark-opts>-->
<!-- &#45;&#45;num-executors=${sparkExecutorNumber}-->
<!-- &#45;&#45;executor-memory=${sparkExecutorMemory}-->
<!-- &#45;&#45;executor-cores=${sparkExecutorCores}-->
<!-- &#45;&#45;driver-memory=${sparkDriverMemory}-->
<!-- &#45;&#45;conf spark.extraListeners=${spark2ExtraListeners}-->
<!-- &#45;&#45;conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}-->
<!-- &#45;&#45;conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}-->
<!-- &#45;&#45;conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}-->
<!-- &#45;&#45;conf spark.dynamicAllocation.enabled=true-->
<!-- &#45;&#45;conf spark.dynamicAllocation.maxExecutors=${spark2MaxExecutors}-->
<!-- </spark-opts>-->
<!-- <arg>-mt</arg> <arg>yarn-cluster</arg>-->
<!-- <arg>&#45;&#45;sourcePath</arg><arg>${sourcePath}</arg>-->
<!-- <arg>&#45;&#45;hive_metastore_uris</arg><arg>${hive_metastore_uris}</arg>-->
<!-- <arg>&#45;&#45;writeUpdate</arg><arg>${writeUpdate}</arg>-->
<!-- <arg>&#45;&#45;saveGraph</arg><arg>${saveGraph}</arg>-->
<!-- </spark>-->
<!-- <ok to="End"/>-->
<!-- <error to="Kill"/>-->
<!--</action>-->
</workflow-app>