dnet-hadoop/dhp-workflows/dhp-graph-mapper/src/main/resources/eu/dnetlib/dhp/sx/graph/bio/oozie_app/workflow.xml

177 lines
7.4 KiB
XML

<workflow-app name="Transform_BioEntity_Workflow" xmlns="uri:oozie:workflow:0.5">
<parameters>
<property>
<name>PDBPath</name>
<description>the PDB Database Working Path</description>
</property>
<property>
<name>UNIPROTDBPath</name>
<description>the UNIPROT Database Working Path</description>
</property>
<property>
<name>EBIDataset</name>
<description>the EBI Links Dataset Path</description>
</property>
<property>
<name>ScholixResolvedDBPath</name>
<description>the Scholix Resolved Dataset Path</description>
</property>
<property>
<name>CrossrefLinksPath</name>
<description>the CrossrefLinks Path</description>
</property>
<property>
<name>targetPath</name>
<description>the Target Working dir path</description>
</property>
</parameters>
<start to="ConvertPDB"/>
<kill name="Kill">
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
</kill>
<action name="ConvertPDB">
<spark xmlns="uri:oozie:spark-action:0.2">
<master>yarn</master>
<mode>cluster</mode>
<name>Convert PDB to OAF Dataset</name>
<class>eu.dnetlib.dhp.sx.graph.bio.SparkTransformBioDatabaseToOAF</class>
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
<spark-opts>
--executor-memory=${sparkExecutorMemory}
--executor-cores=${sparkExecutorCores}
--driver-memory=${sparkDriverMemory}
--conf spark.extraListeners=${spark2ExtraListeners}
--conf spark.sql.shuffle.partitions=2000
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
</spark-opts>
<arg>--master</arg><arg>yarn</arg>
<arg>--dbPath</arg><arg>${PDBPath}</arg>
<arg>--database</arg><arg>PDB</arg>
<arg>--targetPath</arg><arg>${targetPath}/pdb_OAF</arg>
</spark>
<ok to="ConvertUNIPROT"/>
<error to="Kill"/>
</action>
<action name="ConvertUNIPROT">
<spark xmlns="uri:oozie:spark-action:0.2">
<master>yarn</master>
<mode>cluster</mode>
<name>Convert UNIPROT to OAF Dataset</name>
<class>eu.dnetlib.dhp.sx.graph.bio.SparkTransformBioDatabaseToOAF</class>
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
<spark-opts>
--executor-memory=${sparkExecutorMemory}
--executor-cores=${sparkExecutorCores}
--driver-memory=${sparkDriverMemory}
--conf spark.extraListeners=${spark2ExtraListeners}
--conf spark.sql.shuffle.partitions=2000
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
</spark-opts>
<arg>--master</arg><arg>yarn</arg>
<arg>--dbPath</arg><arg>${UNIPROTDBPath}</arg>
<arg>--database</arg><arg>UNIPROT</arg>
<arg>--targetPath</arg><arg>${targetPath}/uniprot_OAF</arg>
</spark>
<ok to="ConvertEBILinks"/>
<error to="Kill"/>
</action>
<action name="ConvertEBILinks">
<spark xmlns="uri:oozie:spark-action:0.2">
<master>yarn</master>
<mode>cluster</mode>
<name>Convert EBI Links to OAF Dataset</name>
<class>eu.dnetlib.dhp.sx.graph.ebi.SparkEBILinksToOaf</class>
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
<spark-opts>
--executor-memory=${sparkExecutorMemory}
--executor-cores=${sparkExecutorCores}
--driver-memory=${sparkDriverMemory}
--conf spark.extraListeners=${spark2ExtraListeners}
--conf spark.sql.shuffle.partitions=2000
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
</spark-opts>
<arg>--master</arg><arg>yarn</arg>
<arg>--sourcePath</arg><arg>${EBIDataset}</arg>
<arg>--targetPath</arg><arg>${targetPath}/ebi_OAF</arg>
</spark>
<ok to="ConvertScholixResolved"/>
<error to="Kill"/>
</action>
<action name="ConvertScholixResolved">
<spark xmlns="uri:oozie:spark-action:0.2">
<master>yarn</master>
<mode>cluster</mode>
<name>Convert Scholix to OAF Dataset</name>
<class>eu.dnetlib.dhp.sx.graph.bio.SparkTransformBioDatabaseToOAF</class>
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
<spark-opts>
--executor-memory=${sparkExecutorMemory}
--executor-cores=${sparkExecutorCores}
--driver-memory=${sparkDriverMemory}
--conf spark.extraListeners=${spark2ExtraListeners}
--conf spark.sql.shuffle.partitions=2000
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
</spark-opts>
<arg>--master</arg><arg>yarn</arg>
<arg>--dbPath</arg><arg>${ScholixResolvedDBPath}</arg>
<arg>--database</arg><arg>SCHOLIX</arg>
<arg>--targetPath</arg><arg>${targetPath}/scholix_resolved_OAF</arg>
</spark>
<ok to="ConvertCrossrefLinks"/>
<error to="Kill"/>
</action>
<action name="ConvertCrossrefLinks">
<spark xmlns="uri:oozie:spark-action:0.2">
<master>yarn</master>
<mode>cluster</mode>
<name>Convert Crossref Links to OAF Dataset</name>
<class>eu.dnetlib.dhp.sx.graph.bio.SparkTransformBioDatabaseToOAF</class>
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
<spark-opts>
--executor-memory=${sparkExecutorMemory}
--executor-cores=${sparkExecutorCores}
--driver-memory=${sparkDriverMemory}
--conf spark.extraListeners=${spark2ExtraListeners}
--conf spark.sql.shuffle.partitions=2000
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
</spark-opts>
<arg>--master</arg><arg>yarn</arg>
<arg>--dbPath</arg><arg>${CrossrefLinksPath}</arg>
<arg>--database</arg><arg>CROSSREF_LINKS</arg>
<arg>--targetPath</arg><arg>${targetPath}/crossref_unresolved_relation_OAF</arg>
</spark>
<ok to="End"/>
<error to="Kill"/>
</action>
<end name="End"/>
</workflow-app>