81 lines
3.3 KiB
XML
81 lines
3.3 KiB
XML
<workflow-app name="H2020Programme" xmlns="uri:oozie:workflow:0.5">
|
|
<parameters>
|
|
<property>
|
|
<name>projectFileURL</name>
|
|
<description>the url where to get the projects file</description>
|
|
</property>
|
|
|
|
<property>
|
|
<name>programmeFileURL</name>
|
|
<description>the url where to get the programme file</description>
|
|
</property>
|
|
|
|
<property>
|
|
<name>outputPath</name>
|
|
<description>path where to store the action set</description>
|
|
</property>
|
|
</parameters>
|
|
|
|
<start to="deleteoutputpath"/>
|
|
<kill name="Kill">
|
|
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
|
|
</kill>
|
|
<action name="deleteoutputpath">
|
|
<fs>
|
|
<delete path='${outputPath}'/>
|
|
<mkdir path='${outputPath}'/>
|
|
</fs>
|
|
<ok to="get_project_file"/>
|
|
<error to="Kill"/>
|
|
</action>
|
|
|
|
<action name="get_project_file">
|
|
<java>
|
|
<main-class>eu.dnetlib.dhp.actionset.h2020programme.GetFile</main-class>
|
|
<arg>--hdfsNameNode</arg><arg>${nameNode}</arg>
|
|
<arg>--fileUrl</arg><arg>${projectFileURL}</arg>
|
|
<arg>--hdfsPath</arg><arg>${workingDir}/projects.csv</arg>
|
|
</java>
|
|
<ok to="shell_get_programme_file"/>
|
|
<error to="Kill"/>
|
|
</action>
|
|
|
|
<action name="shell_get_programme_file">
|
|
<java>
|
|
<main-class>eu.dnetlib.dhp.actionset.h2020programme.GetFile</main-class>
|
|
<arg>--hdfsNameNode</arg><arg>${nameNode}</arg>
|
|
<arg>--fileUrl</arg><arg>${programmeFileURL}</arg>
|
|
<arg>--hdfsPath</arg><arg>${workingDir}/programme.csv</arg>
|
|
</java>
|
|
<ok to="End"/>
|
|
<error to="Kill"/>
|
|
</action>
|
|
|
|
<!-- <action name="create_actionset">-->
|
|
<!-- <spark xmlns="uri:oozie:spark-action:0.2">-->
|
|
<!-- <master>yarn</master>-->
|
|
<!-- <mode>cluster</mode>-->
|
|
<!-- <name>ProjectProgrammeAS</name>-->
|
|
<!-- <class>eu.dnetlib.dhp.actionset.h2020programme</class>-->
|
|
<!-- <jar>dhp-aggregation-${projectVersion}.jar</jar>-->
|
|
<!-- <spark-opts>-->
|
|
<!-- --executor-cores=${sparkExecutorCores}-->
|
|
<!-- --executor-memory=${sparkExecutorMemory}-->
|
|
<!-- --driver-memory=${sparkDriverMemory}-->
|
|
<!-- --conf spark.extraListeners=${spark2ExtraListeners}-->
|
|
<!-- --conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}-->
|
|
<!-- --conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}-->
|
|
<!-- --conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}-->
|
|
<!-- --conf spark.sql.shuffle.partitions=3840-->
|
|
<!-- </spark-opts>-->
|
|
<!-- <arg>--sourcePath</arg><arg>${sourcePath}/relation</arg>-->
|
|
<!-- <arg>--outputPath</arg><arg>${outputPath}/relation</arg>-->
|
|
<!-- <arg>--hdfsPath</arg><arg>${workingDir}/blacklist</arg>-->
|
|
<!-- <arg>--mergesPath</arg><arg>${workingDir}/mergesRelation</arg>-->
|
|
<!-- </spark>-->
|
|
<!-- <ok to="End"/>-->
|
|
<!-- <error to="Kill"/>-->
|
|
<!-- </action>-->
|
|
|
|
<end name="End"/>
|
|
</workflow-app> |