2023-03-14 18:28:27 +01:00
|
|
|
<workflow-app xmlns="uri:oozie:workflow:0.5" name="ranking-wf">
|
|
|
|
|
2023-05-15 21:28:48 +02:00
|
|
|
<!-- Global params -->
|
|
|
|
<global>
|
|
|
|
<job-tracker>${jobTracker}</job-tracker>
|
|
|
|
<name-node>${nameNode}</name-node>
|
|
|
|
<configuration>
|
|
|
|
<property>
|
|
|
|
<name>oozie.action.sharelib.for.spark</name>
|
|
|
|
<value>${oozieActionShareLibForSpark2}</value>
|
|
|
|
</property>
|
2023-08-29 09:03:03 +02:00
|
|
|
<property>
|
|
|
|
<name>projectImpactIndicatorsOutput</name>
|
|
|
|
<value>${nameNode}${workingDir}/project_indicators</value>
|
|
|
|
</property>
|
|
|
|
<property>
|
|
|
|
<name>openaireGraphInputPath</name>
|
|
|
|
<value>${nameNode}/${workingDir}/openaire_id_graph</value>
|
|
|
|
</property>
|
|
|
|
<property>
|
|
|
|
<name>synonymFolder</name>
|
|
|
|
<value>${nameNode}/${workingDir}/openaireid_to_dois/</value>
|
|
|
|
</property>
|
|
|
|
<property>
|
|
|
|
<name>checkpointDir</name>
|
|
|
|
<value>${nameNode}/${workingDir}/check/</value>
|
|
|
|
</property>
|
|
|
|
<property>
|
|
|
|
<name>bipScorePath</name>
|
|
|
|
<value>${nameNode}${workingDir}/openaire_universe_scores/</value>
|
|
|
|
</property>
|
2023-05-15 21:28:48 +02:00
|
|
|
</configuration>
|
|
|
|
</global>
|
|
|
|
|
2023-03-14 18:28:27 +01:00
|
|
|
<!-- start using a decision node, so as to determine from which point onwards a job will continue -->
|
|
|
|
<start to="entry-point-decision" />
|
2023-05-15 16:52:20 +02:00
|
|
|
|
2023-03-14 18:28:27 +01:00
|
|
|
<decision name="entry-point-decision">
|
|
|
|
<switch>
|
|
|
|
<!-- The default will be set as the normal start, a.k.a. get-doi-synonyms -->
|
|
|
|
<!-- If any different condition is set, go to the corresponding start -->
|
2023-09-12 21:31:50 +02:00
|
|
|
<case to="spark-cc">${wf:conf('resume') eq "cc"}</case>
|
|
|
|
<case to="spark-ram">${wf:conf('resume') eq "ram"}</case>
|
2023-05-15 21:28:48 +02:00
|
|
|
<case to="spark-impulse">${wf:conf('resume') eq "impulse"}</case>
|
2023-07-06 12:47:51 +02:00
|
|
|
<case to="spark-pagerank">${wf:conf('resume') eq "pagerank"}</case>
|
|
|
|
<case to="spark-attrank">${wf:conf('resume') eq "attrank"}</case>
|
|
|
|
<!-- <case to="iterative-rankings">${wf:conf('resume') eq "rankings-iterative"}</case> -->
|
2023-05-15 21:28:48 +02:00
|
|
|
<case to="get-file-names">${wf:conf('resume') eq "format-results"}</case>
|
|
|
|
<case to="map-openaire-to-doi">${wf:conf('resume') eq "map-ids"}</case>
|
|
|
|
<case to="map-scores-to-dois">${wf:conf('resume') eq "map-scores"}</case>
|
|
|
|
<case to="create-openaire-ranking-graph">${wf:conf('resume') eq "start"}</case>
|
2023-07-21 14:26:50 +02:00
|
|
|
|
|
|
|
<!-- Aggregation of impact scores on the project level -->
|
2023-05-15 21:28:48 +02:00
|
|
|
<case to="project-impact-indicators">${wf:conf('resume') eq "projects-impact"}</case>
|
2023-08-11 14:56:53 +02:00
|
|
|
<case to="create-actionset">${wf:conf('resume') eq "create-actionset"}</case>
|
2023-04-07 15:30:12 +02:00
|
|
|
|
2023-03-14 18:28:27 +01:00
|
|
|
<default to="create-openaire-ranking-graph" />
|
|
|
|
</switch>
|
|
|
|
</decision>
|
2023-06-23 11:51:50 +02:00
|
|
|
|
2023-07-21 15:07:10 +02:00
|
|
|
<!-- initial step: create citation network -->
|
2023-03-14 18:28:27 +01:00
|
|
|
<action name="create-openaire-ranking-graph">
|
|
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
2023-05-15 21:28:48 +02:00
|
|
|
|
2023-06-23 11:51:50 +02:00
|
|
|
<master>yarn-cluster</master>
|
2023-03-14 18:28:27 +01:00
|
|
|
<mode>cluster</mode>
|
2023-07-21 15:07:10 +02:00
|
|
|
<name>OpenAIRE Ranking Graph Creation</name>
|
2023-03-14 18:28:27 +01:00
|
|
|
<jar>create_openaire_ranking_graph.py</jar>
|
2023-05-15 14:59:51 +02:00
|
|
|
|
2023-05-15 14:50:23 +02:00
|
|
|
<spark-opts>
|
|
|
|
--executor-memory=${sparkHighExecutorMemory}
|
|
|
|
--executor-cores=${sparkExecutorCores}
|
|
|
|
--driver-memory=${sparkHighDriverMemory}
|
|
|
|
--conf spark.sql.shuffle.partitions=${sparkShufflePartitions}
|
|
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
|
|
</spark-opts>
|
2023-05-15 14:59:51 +02:00
|
|
|
|
2023-03-14 18:28:27 +01:00
|
|
|
<!-- Script arguments here -->
|
|
|
|
<!-- The openaire graph data from which to read relations and objects -->
|
|
|
|
<arg>${openaireDataInput}</arg>
|
2023-05-15 14:53:12 +02:00
|
|
|
<!-- Year for filtering entries w/ larger values / empty -->
|
2023-03-14 18:28:27 +01:00
|
|
|
<arg>${currentYear}</arg>
|
|
|
|
<!-- number of partitions to be used on joins -->
|
2023-05-15 14:50:23 +02:00
|
|
|
<arg>${sparkShufflePartitions}</arg>
|
2023-03-14 18:28:27 +01:00
|
|
|
<!-- The output of the graph should be the openaire input graph for ranking-->
|
2023-08-29 09:03:03 +02:00
|
|
|
<arg>${nameNode}${workingDir}/openaire_id_graph</arg>
|
|
|
|
<file>${nameNode}${wfAppPath}/create_openaire_ranking_graph.py#create_openaire_ranking_graph.py</file>
|
2023-03-14 18:28:27 +01:00
|
|
|
</spark>
|
2023-05-15 14:53:12 +02:00
|
|
|
|
2023-09-12 21:31:50 +02:00
|
|
|
<ok to="spark-cc"/>
|
2023-03-14 18:28:27 +01:00
|
|
|
<error to="openaire-graph-error" />
|
2023-05-15 14:53:12 +02:00
|
|
|
|
|
|
|
</action>
|
|
|
|
|
2023-07-21 15:07:10 +02:00
|
|
|
<!-- Run Citation Count calculation -->
|
2023-03-14 18:28:27 +01:00
|
|
|
<action name="spark-cc">
|
|
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
|
|
|
2023-04-26 19:40:06 +02:00
|
|
|
<master>yarn-cluster</master>
|
2023-03-14 18:28:27 +01:00
|
|
|
<mode>cluster</mode>
|
2023-07-21 15:07:10 +02:00
|
|
|
<name>Citation Count calculation</name>
|
2023-03-14 18:28:27 +01:00
|
|
|
<jar>CC.py</jar>
|
2023-05-15 14:59:51 +02:00
|
|
|
|
2023-05-15 14:50:23 +02:00
|
|
|
<spark-opts>
|
|
|
|
--executor-memory=${sparkHighExecutorMemory}
|
|
|
|
--executor-cores=${sparkExecutorCores}
|
|
|
|
--driver-memory=${sparkNormalDriverMemory}
|
|
|
|
--conf spark.sql.shuffle.partitions=${sparkShufflePartitions}
|
|
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
|
|
</spark-opts>
|
2023-05-15 14:59:51 +02:00
|
|
|
|
2023-03-14 18:28:27 +01:00
|
|
|
<!-- Script arguments here -->
|
2023-08-29 09:03:03 +02:00
|
|
|
<arg>${nameNode}/${workingDir}/openaire_id_graph</arg>
|
2023-03-14 18:28:27 +01:00
|
|
|
<!-- number of partitions to be used on joins -->
|
2023-05-15 14:50:23 +02:00
|
|
|
<arg>${sparkShufflePartitions}</arg>
|
2023-07-21 15:07:10 +02:00
|
|
|
|
2023-05-16 15:28:48 +02:00
|
|
|
<file>${wfAppPath}/bip-ranker/CC.py#CC.py</file>
|
2023-03-14 18:28:27 +01:00
|
|
|
</spark>
|
2023-05-15 14:53:12 +02:00
|
|
|
|
2023-09-12 21:31:50 +02:00
|
|
|
<ok to="spark-ram" />
|
2023-03-14 18:28:27 +01:00
|
|
|
<error to="cc-fail" />
|
2023-05-15 14:53:12 +02:00
|
|
|
|
|
|
|
</action>
|
2023-03-14 18:28:27 +01:00
|
|
|
|
2023-07-21 15:07:10 +02:00
|
|
|
<!-- RAM calculation -->
|
2023-03-14 18:28:27 +01:00
|
|
|
<action name="spark-ram">
|
|
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
2023-05-15 14:53:12 +02:00
|
|
|
|
2023-06-23 11:51:50 +02:00
|
|
|
<master>yarn-cluster</master>
|
2023-03-14 18:28:27 +01:00
|
|
|
<mode>cluster</mode>
|
2023-07-21 15:07:10 +02:00
|
|
|
<name>RAM calculation</name>
|
2023-03-14 18:28:27 +01:00
|
|
|
<jar>TAR.py</jar>
|
2023-05-15 14:59:51 +02:00
|
|
|
|
2023-05-15 14:50:23 +02:00
|
|
|
<spark-opts>
|
|
|
|
--executor-memory=${sparkHighExecutorMemory}
|
|
|
|
--executor-cores=${sparkExecutorCores}
|
|
|
|
--driver-memory=${sparkNormalDriverMemory}
|
|
|
|
--conf spark.sql.shuffle.partitions=${sparkShufflePartitions}
|
|
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
|
|
</spark-opts>
|
2023-05-15 14:59:51 +02:00
|
|
|
|
2023-03-14 18:28:27 +01:00
|
|
|
<!-- Script arguments here -->
|
2023-08-29 09:03:03 +02:00
|
|
|
<arg>${nameNode}/${workingDir}/openaire_id_graph</arg>
|
2023-03-14 18:28:27 +01:00
|
|
|
<arg>${ramGamma}</arg>
|
|
|
|
<arg>${currentYear}</arg>
|
|
|
|
<arg>RAM</arg>
|
2023-05-15 14:50:23 +02:00
|
|
|
<arg>${sparkShufflePartitions}</arg>
|
2023-04-28 12:09:13 +02:00
|
|
|
<arg>${checkpointDir}</arg>
|
2023-07-21 15:07:10 +02:00
|
|
|
|
2023-05-16 15:28:48 +02:00
|
|
|
<file>${wfAppPath}/bip-ranker/TAR.py#TAR.py</file>
|
2023-03-14 18:28:27 +01:00
|
|
|
</spark>
|
2023-05-15 14:53:12 +02:00
|
|
|
|
2023-09-12 21:31:50 +02:00
|
|
|
<ok to="spark-impulse" />
|
2023-03-14 18:28:27 +01:00
|
|
|
<error to="ram-fail" />
|
2023-05-15 14:53:12 +02:00
|
|
|
|
|
|
|
</action>
|
|
|
|
|
2023-03-14 18:28:27 +01:00
|
|
|
<action name="spark-impulse">
|
|
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
|
|
|
2023-04-26 19:40:06 +02:00
|
|
|
<master>yarn-cluster</master>
|
2023-03-14 18:28:27 +01:00
|
|
|
<mode>cluster</mode>
|
2023-07-21 15:07:10 +02:00
|
|
|
<name>Impulse calculation</name>
|
2023-03-14 18:28:27 +01:00
|
|
|
<jar>CC.py</jar>
|
2023-05-15 14:59:51 +02:00
|
|
|
|
2023-05-15 14:50:23 +02:00
|
|
|
<spark-opts>
|
|
|
|
--executor-memory=${sparkHighExecutorMemory}
|
|
|
|
--executor-cores=${sparkExecutorCores}
|
|
|
|
--driver-memory=${sparkNormalDriverMemory}
|
|
|
|
--conf spark.sql.shuffle.partitions=${sparkShufflePartitions}
|
|
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
|
|
</spark-opts>
|
2023-05-15 14:59:51 +02:00
|
|
|
|
2023-03-14 18:28:27 +01:00
|
|
|
<!-- Script arguments here -->
|
2023-08-29 09:03:03 +02:00
|
|
|
<arg>${nameNode}/${workingDir}/openaire_id_graph</arg>
|
2023-03-14 18:28:27 +01:00
|
|
|
<!-- number of partitions to be used on joins -->
|
2023-05-15 14:50:23 +02:00
|
|
|
<arg>${sparkShufflePartitions}</arg>
|
2023-03-14 18:28:27 +01:00
|
|
|
<arg>3</arg>
|
2023-07-21 15:07:10 +02:00
|
|
|
|
2023-05-16 15:28:48 +02:00
|
|
|
<file>${wfAppPath}/bip-ranker/CC.py#CC.py</file>
|
2023-03-14 18:28:27 +01:00
|
|
|
</spark>
|
2023-05-15 14:53:12 +02:00
|
|
|
|
2023-07-06 12:47:51 +02:00
|
|
|
<ok to="spark-pagerank" />
|
2023-03-14 18:28:27 +01:00
|
|
|
<error to="impulse-fail" />
|
2023-05-15 14:53:12 +02:00
|
|
|
|
|
|
|
</action>
|
2023-03-14 18:28:27 +01:00
|
|
|
|
|
|
|
<action name="spark-pagerank">
|
|
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
2023-05-15 14:53:12 +02:00
|
|
|
|
2023-06-23 11:51:50 +02:00
|
|
|
<master>yarn-cluster</master>
|
2023-03-14 18:28:27 +01:00
|
|
|
<mode>cluster</mode>
|
2023-07-21 15:07:10 +02:00
|
|
|
<name>Pagerank calculation</name>
|
2023-03-14 18:28:27 +01:00
|
|
|
<jar>PageRank.py</jar>
|
2023-05-15 14:59:51 +02:00
|
|
|
|
2023-05-15 14:50:23 +02:00
|
|
|
<spark-opts>
|
|
|
|
--executor-memory=${sparkHighExecutorMemory}
|
|
|
|
--executor-cores=${sparkExecutorCores}
|
|
|
|
--driver-memory=${sparkNormalDriverMemory}
|
|
|
|
--conf spark.sql.shuffle.partitions=${sparkShufflePartitions}
|
|
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
|
|
</spark-opts>
|
2023-05-15 14:59:51 +02:00
|
|
|
|
2023-03-14 18:28:27 +01:00
|
|
|
<!-- Script arguments here -->
|
2023-08-29 09:03:03 +02:00
|
|
|
<arg>${nameNode}/${workingDir}/openaire_id_graph</arg>
|
2023-03-14 18:28:27 +01:00
|
|
|
<arg>${pageRankAlpha}</arg>
|
|
|
|
<arg>${convergenceError}</arg>
|
|
|
|
<arg>${checkpointDir}</arg>
|
|
|
|
<!-- number of partitions to be used on joins -->
|
2023-05-15 14:50:23 +02:00
|
|
|
<arg>${sparkShufflePartitions}</arg>
|
2023-03-14 18:28:27 +01:00
|
|
|
<arg>dfs</arg>
|
2023-07-21 15:07:10 +02:00
|
|
|
|
2023-05-16 15:28:48 +02:00
|
|
|
<file>${wfAppPath}/bip-ranker/PageRank.py#PageRank.py</file>
|
2023-03-14 18:28:27 +01:00
|
|
|
</spark>
|
2023-05-15 14:53:12 +02:00
|
|
|
|
2023-07-06 12:47:51 +02:00
|
|
|
<ok to="spark-attrank" />
|
2023-03-14 18:28:27 +01:00
|
|
|
<error to="pagerank-fail" />
|
2023-05-15 14:53:12 +02:00
|
|
|
|
2023-03-14 18:28:27 +01:00
|
|
|
</action>
|
2023-05-15 14:53:12 +02:00
|
|
|
|
2023-03-14 18:28:27 +01:00
|
|
|
<action name="spark-attrank">
|
|
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
2023-05-15 14:53:12 +02:00
|
|
|
|
2023-06-23 11:51:50 +02:00
|
|
|
<master>yarn-cluster</master>
|
2023-03-14 18:28:27 +01:00
|
|
|
<mode>cluster</mode>
|
2023-07-21 15:07:10 +02:00
|
|
|
<name>AttRank calculation</name>
|
2023-03-14 18:28:27 +01:00
|
|
|
<jar>AttRank.py</jar>
|
2023-05-15 14:59:51 +02:00
|
|
|
|
2023-05-15 14:50:23 +02:00
|
|
|
<spark-opts>
|
|
|
|
--executor-memory=${sparkHighExecutorMemory}
|
|
|
|
--executor-cores=${sparkExecutorCores}
|
|
|
|
--driver-memory=${sparkNormalDriverMemory}
|
|
|
|
--conf spark.sql.shuffle.partitions=${sparkShufflePartitions}
|
|
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
|
|
</spark-opts>
|
2023-05-15 14:59:51 +02:00
|
|
|
|
2023-03-14 18:28:27 +01:00
|
|
|
<!-- Script arguments here -->
|
2023-08-29 09:03:03 +02:00
|
|
|
<arg>${nameNode}/${workingDir}/openaire_id_graph</arg>
|
2023-03-14 18:28:27 +01:00
|
|
|
<arg>${attrankAlpha}</arg>
|
|
|
|
<arg>${attrankBeta}</arg>
|
|
|
|
<arg>${attrankGamma}</arg>
|
|
|
|
<arg>${attrankRho}</arg>
|
|
|
|
<arg>${currentYear}</arg>
|
|
|
|
<arg>${attrankStartYear}</arg>
|
|
|
|
<arg>${convergenceError}</arg>
|
|
|
|
<arg>${checkpointDir}</arg>
|
|
|
|
<!-- number of partitions to be used on joins -->
|
2023-05-15 14:50:23 +02:00
|
|
|
<arg>${sparkShufflePartitions}</arg>
|
2023-03-14 18:28:27 +01:00
|
|
|
<arg>dfs</arg>
|
2023-07-21 15:07:10 +02:00
|
|
|
|
2023-05-16 15:28:48 +02:00
|
|
|
<file>${wfAppPath}/bip-ranker/AttRank.py#AttRank.py</file>
|
2023-03-14 18:28:27 +01:00
|
|
|
</spark>
|
2023-05-15 14:53:12 +02:00
|
|
|
|
2023-07-06 12:47:51 +02:00
|
|
|
<ok to="get-file-names" />
|
2023-03-14 18:28:27 +01:00
|
|
|
<error to="attrank-fail" />
|
2023-05-15 14:53:12 +02:00
|
|
|
|
|
|
|
</action>
|
|
|
|
|
2023-03-14 18:28:27 +01:00
|
|
|
<action name="get-file-names">
|
|
|
|
<shell xmlns="uri:oozie:shell-action:0.3">
|
2023-05-15 14:53:12 +02:00
|
|
|
|
2023-04-26 19:40:06 +02:00
|
|
|
<!-- Exec is needed for shell commands - points to type of shell command -->
|
|
|
|
<exec>/usr/bin/bash</exec>
|
|
|
|
<!-- name of script to run -->
|
|
|
|
<argument>get_ranking_files.sh</argument>
|
|
|
|
<!-- We only pass the directory where we expect to find the rankings -->
|
2023-06-23 11:51:50 +02:00
|
|
|
<argument>${workingDir}</argument>
|
2023-05-15 14:53:12 +02:00
|
|
|
|
2023-03-14 18:28:27 +01:00
|
|
|
<file>${wfAppPath}/get_ranking_files.sh#get_ranking_files.sh</file>
|
|
|
|
<!-- Get the output in order to be usable by following actions -->
|
|
|
|
<capture-output/>
|
|
|
|
</shell>
|
2023-05-15 14:53:12 +02:00
|
|
|
|
2023-03-14 18:28:27 +01:00
|
|
|
<ok to="format-result-files" />
|
|
|
|
<error to="filename-getting-error" />
|
2023-05-15 14:53:12 +02:00
|
|
|
|
2023-03-14 18:28:27 +01:00
|
|
|
</action>
|
2023-05-15 14:53:12 +02:00
|
|
|
|
2023-03-14 18:28:27 +01:00
|
|
|
<!-- Now we will run in parallel the formatting of ranking files for BiP! DB and openaire (json files) -->
|
|
|
|
<fork name="format-result-files">
|
|
|
|
<path start="format-bip-files"/>
|
|
|
|
<path start="format-json-files"/>
|
|
|
|
</fork>
|
2023-05-15 14:53:12 +02:00
|
|
|
|
|
|
|
|
2023-03-14 18:28:27 +01:00
|
|
|
<!-- Format json files -->
|
|
|
|
<!-- Two parts: a) format files b) make the file endings .json.gz -->
|
|
|
|
<action name="format-json-files">
|
|
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
2023-05-15 14:53:12 +02:00
|
|
|
|
2023-04-26 19:40:06 +02:00
|
|
|
<master>yarn-cluster</master>
|
2023-03-14 18:28:27 +01:00
|
|
|
<mode>cluster</mode>
|
|
|
|
<name>Format Ranking Results JSON</name>
|
|
|
|
<jar>format_ranking_results.py</jar>
|
2023-05-15 14:59:51 +02:00
|
|
|
|
2023-05-15 14:50:23 +02:00
|
|
|
<spark-opts>
|
|
|
|
--executor-memory=${sparkNormalExecutorMemory}
|
|
|
|
--executor-cores=${sparkExecutorCores}
|
|
|
|
--driver-memory=${sparkNormalDriverMemory}
|
|
|
|
--conf spark.sql.shuffle.partitions=${sparkShufflePartitions}
|
|
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
|
|
</spark-opts>
|
2023-05-15 14:59:51 +02:00
|
|
|
|
2023-03-14 18:28:27 +01:00
|
|
|
<!-- Script arguments here -->
|
2023-05-11 13:42:25 +02:00
|
|
|
<arg>json-5-way</arg>
|
2023-03-14 18:28:27 +01:00
|
|
|
<!-- Input files must be identified dynamically -->
|
2023-05-15 14:50:23 +02:00
|
|
|
<arg>${nameNode}/${workingDir}/${wf:actionData('get-file-names')['pr_file']}</arg>
|
|
|
|
<arg>${nameNode}/${workingDir}/${wf:actionData('get-file-names')['attrank_file']}</arg>
|
|
|
|
<arg>${nameNode}/${workingDir}/${wf:actionData('get-file-names')['cc_file']}</arg>
|
|
|
|
<arg>${nameNode}/${workingDir}/${wf:actionData('get-file-names')['impulse_file']}</arg>
|
|
|
|
<arg>${nameNode}/${workingDir}/${wf:actionData('get-file-names')['ram_file']}</arg>
|
2023-03-14 18:28:27 +01:00
|
|
|
<!-- Num partitions -->
|
2023-05-15 14:50:23 +02:00
|
|
|
<arg>${sparkShufflePartitions}</arg>
|
2023-03-14 18:28:27 +01:00
|
|
|
<!-- Type of data to be produced [bip (dois) / openaire (openaire-ids) ] -->
|
|
|
|
<arg>openaire</arg>
|
2023-07-21 15:07:10 +02:00
|
|
|
|
2023-03-14 18:28:27 +01:00
|
|
|
<file>${wfAppPath}/format_ranking_results.py#format_ranking_results.py</file>
|
2023-05-15 14:53:12 +02:00
|
|
|
</spark>
|
|
|
|
|
2023-03-14 18:28:27 +01:00
|
|
|
<ok to="join-file-formatting" />
|
|
|
|
<error to="json-formatting-fail" />
|
2023-05-15 14:53:12 +02:00
|
|
|
</action>
|
|
|
|
|
2023-03-14 18:28:27 +01:00
|
|
|
<!-- This is the second line of parallel workflow execution where we create the BiP! DB files -->
|
|
|
|
<action name="format-bip-files">
|
|
|
|
<!-- This is required as a tag for spark jobs, regardless of programming language -->
|
|
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
2023-05-15 14:53:12 +02:00
|
|
|
|
2023-04-26 19:40:06 +02:00
|
|
|
<!-- using configs from an example on openaire -->
|
|
|
|
<master>yarn-cluster</master>
|
2023-03-14 18:28:27 +01:00
|
|
|
<mode>cluster</mode>
|
2023-05-15 14:53:12 +02:00
|
|
|
|
2023-03-14 18:28:27 +01:00
|
|
|
<!-- This is the name of our job -->
|
|
|
|
<name>Format Ranking Results BiP! DB</name>
|
|
|
|
<!-- Script name goes here -->
|
|
|
|
<jar>format_ranking_results.py</jar>
|
|
|
|
<!-- spark configuration options: I've taken most of them from an example from dhp workflows / Master value stolen from sandro -->
|
2023-05-15 14:59:51 +02:00
|
|
|
|
2023-05-15 14:50:23 +02:00
|
|
|
<spark-opts>
|
|
|
|
--executor-memory=${sparkNormalExecutorMemory}
|
|
|
|
--executor-cores=${sparkExecutorCores}
|
|
|
|
--driver-memory=${sparkNormalDriverMemory}
|
|
|
|
--conf spark.sql.shuffle.partitions=${sparkShufflePartitions}
|
|
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
|
|
</spark-opts>
|
2023-05-15 14:59:51 +02:00
|
|
|
|
2023-03-14 18:28:27 +01:00
|
|
|
<!-- Script arguments here -->
|
|
|
|
<arg>zenodo</arg>
|
|
|
|
<!-- Input files must be identified dynamically -->
|
2023-05-15 14:50:23 +02:00
|
|
|
<arg>${nameNode}/${workingDir}/${wf:actionData('get-file-names')['pr_file']}</arg>
|
|
|
|
<arg>${nameNode}/${workingDir}/${wf:actionData('get-file-names')['attrank_file']}</arg>
|
|
|
|
<arg>${nameNode}/${workingDir}/${wf:actionData('get-file-names')['cc_file']}</arg>
|
|
|
|
<arg>${nameNode}/${workingDir}/${wf:actionData('get-file-names')['impulse_file']}</arg>
|
|
|
|
<arg>${nameNode}/${workingDir}/${wf:actionData('get-file-names')['ram_file']}</arg>
|
2023-03-14 18:28:27 +01:00
|
|
|
<!-- Num partitions -->
|
2023-05-15 14:50:23 +02:00
|
|
|
<arg>${sparkShufflePartitions}</arg>
|
2023-03-14 18:28:27 +01:00
|
|
|
<!-- Type of data to be produced [bip (dois) / openaire (openaire-ids) ] -->
|
|
|
|
<arg>openaire</arg>
|
|
|
|
<!-- This needs to point to the file on the hdfs i think -->
|
|
|
|
<file>${wfAppPath}/format_ranking_results.py#format_ranking_results.py</file>
|
2023-05-15 14:53:12 +02:00
|
|
|
</spark>
|
|
|
|
|
2023-03-14 18:28:27 +01:00
|
|
|
<ok to="join-file-formatting" />
|
|
|
|
<error to="bip-formatting-fail" />
|
2023-05-15 14:53:12 +02:00
|
|
|
</action>
|
|
|
|
|
2023-07-21 15:07:10 +02:00
|
|
|
<!-- Finish formatting jobs -->
|
2023-05-15 14:53:12 +02:00
|
|
|
<join name="join-file-formatting" to="map-openaire-to-doi"/>
|
2023-03-14 18:28:27 +01:00
|
|
|
|
2023-07-21 15:07:10 +02:00
|
|
|
<!-- maps openaire ids to DOIs -->
|
2023-03-14 18:28:27 +01:00
|
|
|
<action name="map-openaire-to-doi">
|
|
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
2023-05-15 21:28:48 +02:00
|
|
|
|
2023-03-14 18:28:27 +01:00
|
|
|
<!-- Delete previously created doi synonym folder -->
|
|
|
|
<prepare>
|
|
|
|
<delete path="${synonymFolder}"/>
|
|
|
|
</prepare>
|
2023-05-15 14:53:12 +02:00
|
|
|
|
2023-06-23 11:51:50 +02:00
|
|
|
<master>yarn-cluster</master>
|
2023-03-14 18:28:27 +01:00
|
|
|
<mode>cluster</mode>
|
|
|
|
<name>Openaire-DOI synonym collection</name>
|
|
|
|
<jar>map_openaire_ids_to_dois.py</jar>
|
2023-05-15 14:59:51 +02:00
|
|
|
|
2023-05-15 14:50:23 +02:00
|
|
|
<spark-opts>
|
|
|
|
--executor-memory=${sparkHighExecutorMemory}
|
|
|
|
--executor-cores=${sparkExecutorCores}
|
|
|
|
--driver-memory=${sparkHighDriverMemory}
|
|
|
|
--conf spark.sql.shuffle.partitions=${sparkShufflePartitions}
|
|
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
|
|
</spark-opts>
|
2023-05-15 14:59:51 +02:00
|
|
|
|
2023-03-14 18:28:27 +01:00
|
|
|
<!-- Script arguments here -->
|
2023-06-23 11:51:50 +02:00
|
|
|
<arg>${openaireDataInput}/</arg>
|
2023-03-14 18:28:27 +01:00
|
|
|
<!-- number of partitions to be used on joins -->
|
|
|
|
<arg>${synonymFolder}</arg>
|
2023-07-21 15:07:10 +02:00
|
|
|
|
2023-03-14 18:28:27 +01:00
|
|
|
<file>${wfAppPath}/map_openaire_ids_to_dois.py#map_openaire_ids_to_dois.py</file>
|
|
|
|
</spark>
|
2023-05-15 14:53:12 +02:00
|
|
|
|
2023-03-14 18:28:27 +01:00
|
|
|
<ok to="map-scores-to-dois" />
|
|
|
|
<error to="synonym-collection-fail" />
|
2023-05-15 14:53:12 +02:00
|
|
|
|
|
|
|
</action>
|
2023-03-14 18:28:27 +01:00
|
|
|
|
2023-07-21 15:07:10 +02:00
|
|
|
<!-- mapping openaire scores to DOIs -->
|
2023-03-14 18:28:27 +01:00
|
|
|
<action name="map-scores-to-dois">
|
|
|
|
<!-- This is required as a tag for spark jobs, regardless of programming language -->
|
|
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
|
|
|
2023-06-23 11:51:50 +02:00
|
|
|
<!-- using configs from an example on openaire -->
|
|
|
|
<master>yarn-cluster</master>
|
2023-03-14 18:28:27 +01:00
|
|
|
<mode>cluster</mode>
|
|
|
|
<name>Mapping Openaire Scores to DOIs</name>
|
|
|
|
<jar>map_scores_to_dois.py</jar>
|
2023-05-15 14:59:51 +02:00
|
|
|
|
2023-05-15 14:50:23 +02:00
|
|
|
<spark-opts>
|
|
|
|
--executor-memory=${sparkHighExecutorMemory}
|
|
|
|
--executor-cores=${sparkExecutorCores}
|
|
|
|
--driver-memory=${sparkHighDriverMemory}
|
|
|
|
--conf spark.sql.shuffle.partitions=${sparkShufflePartitions}
|
|
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
|
|
</spark-opts>
|
2023-05-15 14:59:51 +02:00
|
|
|
|
2023-03-14 18:28:27 +01:00
|
|
|
<!-- Script arguments here -->
|
|
|
|
<arg>${synonymFolder}</arg>
|
|
|
|
<!-- Number of partitions -->
|
2023-05-15 14:50:23 +02:00
|
|
|
<arg>${sparkShufflePartitions}</arg>
|
2023-03-14 18:28:27 +01:00
|
|
|
<!-- The remaining input are the ranking files fproduced for bip db-->
|
2023-05-15 14:50:23 +02:00
|
|
|
<arg>${nameNode}/${workingDir}/${wf:actionData('get-file-names')['pr_file']}</arg>
|
|
|
|
<arg>${nameNode}/${workingDir}/${wf:actionData('get-file-names')['attrank_file']}</arg>
|
|
|
|
<arg>${nameNode}/${workingDir}/${wf:actionData('get-file-names')['cc_file']}</arg>
|
|
|
|
<arg>${nameNode}/${workingDir}/${wf:actionData('get-file-names')['impulse_file']}</arg>
|
|
|
|
<arg>${nameNode}/${workingDir}/${wf:actionData('get-file-names')['ram_file']}</arg>
|
2023-03-14 18:28:27 +01:00
|
|
|
|
|
|
|
<file>${wfAppPath}/map_scores_to_dois.py#map_scores_to_dois.py</file>
|
|
|
|
</spark>
|
2023-05-15 14:53:12 +02:00
|
|
|
|
2023-08-11 14:56:53 +02:00
|
|
|
<ok to="project-impact-indicators" />
|
2023-03-14 18:28:27 +01:00
|
|
|
<error to="map-scores-fail" />
|
2023-05-15 14:53:12 +02:00
|
|
|
|
|
|
|
</action>
|
2023-03-21 15:14:15 +01:00
|
|
|
|
2023-04-07 15:30:12 +02:00
|
|
|
<action name="project-impact-indicators">
|
|
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
2023-05-15 21:28:48 +02:00
|
|
|
|
2023-04-07 15:30:12 +02:00
|
|
|
<master>yarn-cluster</master>
|
|
|
|
<mode>cluster</mode>
|
2023-07-21 15:07:10 +02:00
|
|
|
<name>Project Impact Indicators calculation</name>
|
2023-04-18 11:26:01 +02:00
|
|
|
<jar>projects_impact.py</jar>
|
2023-05-15 14:59:51 +02:00
|
|
|
|
2023-05-15 14:50:23 +02:00
|
|
|
<spark-opts>
|
|
|
|
--executor-memory=${sparkHighExecutorMemory}
|
|
|
|
--executor-cores=${sparkExecutorCores}
|
|
|
|
--driver-memory=${sparkNormalDriverMemory}
|
|
|
|
--conf spark.sql.shuffle.partitions=${sparkShufflePartitions}
|
2023-04-07 15:30:12 +02:00
|
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
2023-05-15 14:50:23 +02:00
|
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
|
|
</spark-opts>
|
2023-04-07 15:30:12 +02:00
|
|
|
|
|
|
|
<!-- Script arguments here -->
|
|
|
|
<!-- graph data folder from which to read relations -->
|
2023-07-21 14:26:50 +02:00
|
|
|
<arg>${openaireDataInput}/relation</arg>
|
2023-04-07 15:30:12 +02:00
|
|
|
|
|
|
|
<!-- input files with impact indicators for results -->
|
2023-05-15 14:50:23 +02:00
|
|
|
<arg>${nameNode}/${workingDir}/${wf:actionData('get-file-names')['pr_file']}</arg>
|
|
|
|
<arg>${nameNode}/${workingDir}/${wf:actionData('get-file-names')['attrank_file']}</arg>
|
|
|
|
<arg>${nameNode}/${workingDir}/${wf:actionData('get-file-names')['cc_file']}</arg>
|
|
|
|
<arg>${nameNode}/${workingDir}/${wf:actionData('get-file-names')['impulse_file']}</arg>
|
2023-04-07 15:30:12 +02:00
|
|
|
|
|
|
|
<!-- number of partitions to be used on joins -->
|
2023-05-15 14:50:23 +02:00
|
|
|
<arg>${sparkShufflePartitions}</arg>
|
2023-04-07 15:30:12 +02:00
|
|
|
|
|
|
|
<arg>${projectImpactIndicatorsOutput}</arg>
|
|
|
|
<file>${wfAppPath}/projects_impact.py#projects_impact.py</file>
|
|
|
|
</spark>
|
|
|
|
|
2023-08-11 14:56:53 +02:00
|
|
|
<ok to="delete-output-path-for-actionset" />
|
2023-04-07 15:30:12 +02:00
|
|
|
<error to="project-impact-indicators-fail" />
|
2023-08-11 14:56:53 +02:00
|
|
|
</action>
|
2023-04-07 15:30:12 +02:00
|
|
|
|
2023-08-11 14:56:53 +02:00
|
|
|
<!-- Re-create folder for actionsets -->
|
|
|
|
<action name="delete-output-path-for-actionset">
|
|
|
|
<fs>
|
|
|
|
<delete path="${actionSetOutputPath}"/>
|
|
|
|
<mkdir path="${actionSetOutputPath}"/>
|
|
|
|
</fs>
|
|
|
|
<ok to="create-actionset"/>
|
|
|
|
<error to="actionset-delete-fail"/>
|
2023-04-07 15:30:12 +02:00
|
|
|
</action>
|
2023-03-14 18:28:27 +01:00
|
|
|
|
2023-08-11 14:56:53 +02:00
|
|
|
<action name="create-actionset">
|
2023-04-26 19:40:06 +02:00
|
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
2023-07-21 15:07:10 +02:00
|
|
|
|
2023-07-21 14:26:50 +02:00
|
|
|
<master>yarn-cluster</master>
|
2023-04-26 19:40:06 +02:00
|
|
|
<mode>cluster</mode>
|
2023-08-11 14:56:53 +02:00
|
|
|
<name>Produces the atomic action with the bip finder scores</name>
|
2023-04-26 19:40:06 +02:00
|
|
|
<class>eu.dnetlib.dhp.actionmanager.bipfinder.SparkAtomicActionScoreJob</class>
|
|
|
|
<jar>dhp-aggregation-${projectVersion}.jar</jar>
|
2023-07-21 15:07:10 +02:00
|
|
|
|
2023-04-26 19:40:06 +02:00
|
|
|
<spark-opts>
|
2023-05-15 14:50:23 +02:00
|
|
|
--executor-memory=${sparkNormalExecutorMemory}
|
2023-04-26 19:40:06 +02:00
|
|
|
--executor-cores=${sparkExecutorCores}
|
2023-05-15 14:50:23 +02:00
|
|
|
--driver-memory=${sparkNormalDriverMemory}
|
2023-04-26 19:40:06 +02:00
|
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
|
|
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
|
|
|
</spark-opts>
|
2023-07-21 15:07:10 +02:00
|
|
|
|
2023-08-11 14:56:53 +02:00
|
|
|
<arg>--resultsInputPath</arg><arg>${bipScorePath}</arg>
|
|
|
|
<arg>--projectsInputPath</arg><arg>${projectImpactIndicatorsOutput}</arg>
|
|
|
|
<arg>--outputPath</arg><arg>${actionSetOutputPath}</arg>
|
2023-04-26 19:40:06 +02:00
|
|
|
</spark>
|
2023-07-21 15:07:10 +02:00
|
|
|
|
2023-04-26 19:40:06 +02:00
|
|
|
<ok to="end"/>
|
2023-08-11 14:56:53 +02:00
|
|
|
<error to="actionset-creation-fail"/>
|
2023-04-26 19:40:06 +02:00
|
|
|
</action>
|
2023-03-14 18:28:27 +01:00
|
|
|
|
2023-05-15 16:52:20 +02:00
|
|
|
<!-- Definitions of failure messages -->
|
2023-05-15 18:55:35 +02:00
|
|
|
<kill name="openaire-graph-error">
|
|
|
|
<message>Creation of openaire-graph failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
|
2023-05-15 16:52:20 +02:00
|
|
|
</kill>
|
|
|
|
|
2023-03-14 18:28:27 +01:00
|
|
|
<kill name="cc-fail">
|
|
|
|
<message>CC failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
|
|
|
|
</kill>
|
2023-05-15 16:52:20 +02:00
|
|
|
|
2023-05-15 18:55:35 +02:00
|
|
|
<kill name="ram-fail">
|
|
|
|
<message>RAM failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
|
|
|
|
</kill>
|
|
|
|
|
2023-03-14 18:28:27 +01:00
|
|
|
<kill name="impulse-fail">
|
|
|
|
<message>Impulse failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
|
2023-05-15 16:52:20 +02:00
|
|
|
</kill>
|
|
|
|
|
2023-05-15 18:55:35 +02:00
|
|
|
<kill name="pagerank-fail">
|
|
|
|
<message>PageRank failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
|
2023-05-15 16:52:20 +02:00
|
|
|
</kill>
|
2023-03-14 18:28:27 +01:00
|
|
|
|
2023-05-15 18:55:35 +02:00
|
|
|
<kill name="attrank-fail">
|
|
|
|
<message>AttRank failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
|
|
|
|
</kill>
|
|
|
|
|
|
|
|
<kill name="filename-getting-error">
|
|
|
|
<message>Error getting key-value pairs for output files, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
|
|
|
|
</kill>
|
|
|
|
|
|
|
|
<kill name="json-formatting-fail">
|
|
|
|
<message>Error formatting json files, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
|
|
|
|
</kill>
|
|
|
|
|
|
|
|
<kill name="bip-formatting-fail">
|
|
|
|
<message>Error formatting BIP files, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
|
2023-05-15 16:52:20 +02:00
|
|
|
</kill>
|
2023-03-14 18:28:27 +01:00
|
|
|
|
|
|
|
<kill name="synonym-collection-fail">
|
|
|
|
<message>Synonym collection failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
|
2023-05-15 16:52:20 +02:00
|
|
|
</kill>
|
2023-03-14 18:28:27 +01:00
|
|
|
|
|
|
|
<kill name="map-scores-fail">
|
|
|
|
<message>Mapping scores to DOIs failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
|
2023-05-15 16:52:20 +02:00
|
|
|
</kill>
|
2023-03-14 18:28:27 +01:00
|
|
|
|
2023-03-21 15:14:15 +01:00
|
|
|
<kill name="actionset-delete-fail">
|
|
|
|
<message>Deleting output path for actionsets failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
|
2023-05-15 16:52:20 +02:00
|
|
|
</kill>
|
2023-03-21 15:14:15 +01:00
|
|
|
|
|
|
|
<kill name="actionset-creation-fail">
|
2023-04-26 19:40:06 +02:00
|
|
|
<message>ActionSet creation for results failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
|
2023-04-07 15:30:12 +02:00
|
|
|
</kill>
|
2023-03-21 15:14:15 +01:00
|
|
|
|
2023-04-07 15:30:12 +02:00
|
|
|
<kill name="project-impact-indicators-fail">
|
|
|
|
<message>Calculating project impact indicators failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
|
|
|
|
</kill>
|
2023-03-21 17:24:12 +01:00
|
|
|
|
2023-05-15 16:52:20 +02:00
|
|
|
<!-- Define ending node -->
|
|
|
|
<end name="end" />
|
|
|
|
|
2023-03-14 18:28:27 +01:00
|
|
|
</workflow-app>
|