forked from D-Net/dnet-hadoop
328 lines
14 KiB
XML
328 lines
14 KiB
XML
<workflow-app name="patch_refereed" xmlns="uri:oozie:workflow:0.5">
|
|
<parameters>
|
|
<property>
|
|
<name>inputPathBeta</name>
|
|
<description>the source path</description>
|
|
</property>
|
|
<property>
|
|
<name>inputPathProd</name>
|
|
<description>the source path</description>
|
|
</property>
|
|
<property>
|
|
<name>outputPath</name>
|
|
<description>the output path</description>
|
|
</property>
|
|
</parameters>
|
|
|
|
<global>
|
|
<job-tracker>${jobTracker}</job-tracker>
|
|
<name-node>${nameNode}</name-node>
|
|
<configuration>
|
|
<property>
|
|
<name>oozie.action.sharelib.for.spark</name>
|
|
<value>${oozieActionShareLibForSpark2}</value>
|
|
</property>
|
|
</configuration>
|
|
</global>
|
|
|
|
<start to="reset_outputpath"/>
|
|
|
|
<kill name="Kill">
|
|
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
|
|
</kill>
|
|
|
|
<action name="reset_outputpath">
|
|
<fs>
|
|
<delete path="${outputPath}"/>
|
|
<mkdir path="${outputPath}"/>
|
|
</fs>
|
|
<!-- <ok to="prepare_info"/>-->
|
|
<ok to="copy_entities"/>
|
|
<error to="Kill"/>
|
|
</action>
|
|
|
|
<fork name="copy_entities">
|
|
<path start="copy_relation"/>
|
|
<path start="copy_organization"/>
|
|
<path start="copy_projects"/>
|
|
<path start="copy_datasources"/>
|
|
</fork>
|
|
|
|
<action name="copy_relation">
|
|
<distcp xmlns="uri:oozie:distcp-action:0.2">
|
|
<arg>${nameNode}/${inputPathProd}/relation</arg>
|
|
<arg>${nameNode}/${outputPath}/relation</arg>
|
|
</distcp>
|
|
<ok to="copy_wait"/>
|
|
<error to="Kill"/>
|
|
</action>
|
|
|
|
<action name="copy_organization">
|
|
<distcp xmlns="uri:oozie:distcp-action:0.2">
|
|
<arg>${nameNode}/${inputPathProd}/organization</arg>
|
|
<arg>${nameNode}/${outputPath}/organization</arg>
|
|
</distcp>
|
|
<ok to="copy_wait"/>
|
|
<error to="Kill"/>
|
|
</action>
|
|
|
|
<action name="copy_projects">
|
|
<distcp xmlns="uri:oozie:distcp-action:0.2">
|
|
<arg>${nameNode}/${inputPathProd}/project</arg>
|
|
<arg>${nameNode}/${outputPath}/project</arg>
|
|
</distcp>
|
|
<ok to="copy_wait"/>
|
|
<error to="Kill"/>
|
|
</action>
|
|
|
|
<action name="copy_datasources">
|
|
<distcp xmlns="uri:oozie:distcp-action:0.2">
|
|
<arg>${nameNode}/${inputPathProd}/datasource</arg>
|
|
<arg>${nameNode}/${outputPath}/datasource</arg>
|
|
</distcp>
|
|
<ok to="copy_wait"/>
|
|
<error to="Kill"/>
|
|
</action>
|
|
|
|
<join name="copy_wait" to="prepare_info"/>
|
|
|
|
<fork name="prepare_info">
|
|
<path start="prepare_publication"/>
|
|
<path start="prepare_dataset"/>
|
|
<path start="prepare_software"/>
|
|
<path start="prepare_orp"/>
|
|
</fork>
|
|
|
|
<action name="prepare_publication">
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
<master>yarn-cluster</master>
|
|
<mode>cluster</mode>
|
|
<name>prepareRefereed-publication</name>
|
|
<class>eu.dnetlib.dhp.patchrefereed.SparkPrepareResultInstanceList</class>
|
|
<jar>dhp-patch-${projectVersion}.jar</jar>
|
|
<spark-opts>
|
|
--num-executors=${sparkExecutorNumber}
|
|
--executor-memory=${sparkExecutorMemory}
|
|
--executor-cores=${sparkExecutorCores}
|
|
--driver-memory=${sparkDriverMemory}
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
</spark-opts>
|
|
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Publication</arg>
|
|
<arg>--inputPath</arg><arg>${inputPathBeta}/publication</arg>
|
|
<arg>--outputPath</arg><arg>${workingDir}/publication</arg>
|
|
</spark>
|
|
<ok to="wait"/>
|
|
<error to="Kill"/>
|
|
</action>
|
|
|
|
<action name="prepare_dataset">
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
<master>yarn-cluster</master>
|
|
<mode>cluster</mode>
|
|
<name>prepareRefereed-dataset</name>
|
|
<class>eu.dnetlib.dhp.patchrefereed.SparkPrepareResultInstanceList</class>
|
|
<jar>dhp-patch-${projectVersion}.jar</jar>
|
|
<spark-opts>
|
|
--num-executors=${sparkExecutorNumber}
|
|
--executor-memory=${sparkExecutorMemory}
|
|
--executor-cores=${sparkExecutorCores}
|
|
--driver-memory=${sparkDriverMemory}
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
</spark-opts>
|
|
<arg>--inputPath</arg><arg>${inputPathBeta}/dataset</arg>
|
|
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Dataset</arg>
|
|
<arg>--outputPath</arg><arg>${workingDir}/dataset</arg>
|
|
</spark>
|
|
<ok to="wait"/>
|
|
<error to="Kill"/>
|
|
</action>
|
|
|
|
<action name="prepare_orp">
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
<master>yarn-cluster</master>
|
|
<mode>cluster</mode>
|
|
<name>prepareRefereed-other</name>
|
|
<class>eu.dnetlib.dhp.patchrefereed.SparkPrepareResultInstanceList</class>
|
|
<jar>dhp-patch-${projectVersion}.jar</jar>
|
|
<spark-opts>
|
|
--num-executors=${sparkExecutorNumber}
|
|
--executor-memory=${sparkExecutorMemory}
|
|
--executor-cores=${sparkExecutorCores}
|
|
--driver-memory=${sparkDriverMemory}
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
</spark-opts>
|
|
<arg>--inputPath</arg><arg>${inputPathBeta}/otherresearchproduct</arg>
|
|
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.OtherResearchProduct</arg>
|
|
<arg>--outputPath</arg><arg>${workingDir}/other</arg>
|
|
</spark>
|
|
<ok to="wait"/>
|
|
<error to="Kill"/>
|
|
</action>
|
|
|
|
<action name="prepare_software">
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
<master>yarn-cluster</master>
|
|
<mode>cluster</mode>
|
|
<name>prepareRefereed-software</name>
|
|
<class>eu.dnetlib.dhp.patchrefereed.SparkPrepareResultInstanceList</class>
|
|
<jar>dhp-patch-${projectVersion}.jar</jar>
|
|
<spark-opts>
|
|
--num-executors=${sparkExecutorNumber}
|
|
--executor-memory=${sparkExecutorMemory}
|
|
--executor-cores=${sparkExecutorCores}
|
|
--driver-memory=${sparkDriverMemory}
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
</spark-opts>
|
|
<arg>--inputPath</arg><arg>${inputPathBeta}/software</arg>
|
|
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Software</arg>
|
|
<arg>--outputPath</arg><arg>${workingDir}/software</arg>
|
|
|
|
</spark>
|
|
<ok to="wait"/>
|
|
<error to="Kill"/>
|
|
</action>
|
|
|
|
<join name="wait" to="patch_refereed"/>
|
|
<!-- <join name="wait" to="End"/>-->
|
|
|
|
<fork name="patch_refereed">
|
|
<path start="patch_publication"/>
|
|
<path start="patch_dataset"/>
|
|
<path start="patch_software"/>
|
|
<path start="patch_orp"/>
|
|
</fork>
|
|
|
|
<action name="patch_publication">
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
<master>yarn-cluster</master>
|
|
<mode>cluster</mode>
|
|
<name>pathcRefereed-publication</name>
|
|
<class>eu.dnetlib.dhp.patchrefereed.SparkPatchRefereed</class>
|
|
<jar>dhp-patch-${projectVersion}.jar</jar>
|
|
<spark-opts>
|
|
--num-executors=${sparkExecutorNumber}
|
|
--executor-memory=${sparkExecutorMemory}
|
|
--executor-cores=${sparkExecutorCores}
|
|
--driver-memory=${sparkDriverMemory}
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
--conf spark.speculation=false
|
|
--conf spark.hadoop.mapreduce.map.speculative=false
|
|
--conf spark.hadoop.mapreduce.reduce.speculative=false
|
|
</spark-opts>
|
|
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Publication</arg>
|
|
<arg>--inputPath</arg><arg>${inputPathProd}/publication</arg>
|
|
<arg>--preparedInfoPath</arg><arg>${workingDir}/publication</arg>
|
|
<arg>--outputPath</arg><arg>${outputPath}/publication</arg>
|
|
</spark>
|
|
<ok to="wait_patch"/>
|
|
<error to="Kill"/>
|
|
</action>
|
|
|
|
<action name="patch_dataset">
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
<master>yarn-cluster</master>
|
|
<mode>cluster</mode>
|
|
<name>pathcRefereed-dataset</name>
|
|
<class>eu.dnetlib.dhp.patchrefereed.SparkPatchRefereed</class>
|
|
<jar>dhp-patch-${projectVersion}.jar</jar>
|
|
<spark-opts>
|
|
--num-executors=${sparkExecutorNumber}
|
|
--executor-memory=${sparkExecutorMemory}
|
|
--executor-cores=${sparkExecutorCores}
|
|
--driver-memory=${sparkDriverMemory}
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
--conf spark.speculation=false
|
|
--conf spark.hadoop.mapreduce.map.speculative=false
|
|
--conf spark.hadoop.mapreduce.reduce.speculative=false
|
|
</spark-opts>
|
|
<arg>--inputPath</arg><arg>${inputPathProd}/dataset</arg>
|
|
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Dataset</arg>
|
|
<arg>--preparedInfoPath</arg><arg>${workingDir}/dataset</arg>
|
|
<arg>--outputPath</arg><arg>${outputPath}/dataset</arg>
|
|
</spark>
|
|
<ok to="wait_patch"/>
|
|
<error to="Kill"/>
|
|
</action>
|
|
|
|
<action name="patch_software">
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
<master>yarn-cluster</master>
|
|
<mode>cluster</mode>
|
|
<name>pathcRefereed-software</name>
|
|
<class>eu.dnetlib.dhp.patchrefereed.SparkPatchRefereed</class>
|
|
<jar>dhp-patch-${projectVersion}.jar</jar>
|
|
<spark-opts>
|
|
--num-executors=${sparkExecutorNumber}
|
|
--executor-memory=${sparkExecutorMemory}
|
|
--executor-cores=${sparkExecutorCores}
|
|
--driver-memory=${sparkDriverMemory}
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
--conf spark.speculation=false
|
|
--conf spark.hadoop.mapreduce.map.speculative=false
|
|
--conf spark.hadoop.mapreduce.reduce.speculative=false
|
|
</spark-opts>
|
|
<arg>--inputPath</arg><arg>${inputPathProd}/software</arg>
|
|
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Software</arg>
|
|
<arg>--preparedInfoPath</arg><arg>${workingDir}/software</arg>
|
|
<arg>--outputPath</arg><arg>${outputPath}/software</arg>
|
|
</spark>
|
|
<ok to="wait_patch"/>
|
|
<error to="Kill"/>
|
|
</action>
|
|
|
|
<action name="patch_orp">
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
<master>yarn-cluster</master>
|
|
<mode>cluster</mode>
|
|
<name>pathcRefereed-orp</name>
|
|
<class>eu.dnetlib.dhp.patchrefereed.SparkPatchRefereed</class>
|
|
<jar>dhp-patch-${projectVersion}.jar</jar>
|
|
<spark-opts>
|
|
--num-executors=${sparkExecutorNumber}
|
|
--executor-memory=${sparkExecutorMemory}
|
|
--executor-cores=${sparkExecutorCores}
|
|
--driver-memory=${sparkDriverMemory}
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
--conf spark.speculation=false
|
|
--conf spark.hadoop.mapreduce.map.speculative=false
|
|
--conf spark.hadoop.mapreduce.reduce.speculative=false
|
|
</spark-opts>
|
|
<arg>--inputPath</arg><arg>${inputPathProd}/otherresearchproduct</arg>
|
|
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.OtherResearchProduct</arg>
|
|
<arg>--outputPath</arg><arg>${outputPath}/otherresearchproduct</arg>
|
|
<arg>--preparedInfoPath</arg><arg>${workingDir}/other</arg>
|
|
|
|
</spark>
|
|
<ok to="wait_patch"/>
|
|
<error to="Kill"/>
|
|
</action>
|
|
|
|
<join name="wait_patch" to="End"/>
|
|
|
|
<end name="End"/>
|
|
|
|
</workflow-app> |