|
|
|
@ -1,34 +1,4 @@
|
|
|
|
|
<workflow-app name="bulk_tagging" xmlns="uri:oozie:workflow:0.5">
|
|
|
|
|
<parameters>
|
|
|
|
|
<property>
|
|
|
|
|
<name>sourcePath</name>
|
|
|
|
|
<description>the source path</description>
|
|
|
|
|
</property>
|
|
|
|
|
<property>
|
|
|
|
|
<name>sparkDriverMemory</name>
|
|
|
|
|
<description>memory for driver process</description>
|
|
|
|
|
</property>
|
|
|
|
|
<property>
|
|
|
|
|
<name>sparkExecutorMemory</name>
|
|
|
|
|
<description>memory for individual executor</description>
|
|
|
|
|
</property>
|
|
|
|
|
<property>
|
|
|
|
|
<name>sparkExecutorCores</name>
|
|
|
|
|
<description>number of cores used by single executor</description>
|
|
|
|
|
</property>
|
|
|
|
|
<property>
|
|
|
|
|
<name>isLookUpUrl</name>
|
|
|
|
|
<description>the isLookup service endpoint</description>
|
|
|
|
|
</property>
|
|
|
|
|
<property>
|
|
|
|
|
<name>pathMap</name>
|
|
|
|
|
<description>the json path associated to each selection field</description>
|
|
|
|
|
</property>
|
|
|
|
|
<property>
|
|
|
|
|
<name>outputPath</name>
|
|
|
|
|
<description>the output path</description>
|
|
|
|
|
</property>
|
|
|
|
|
</parameters>
|
|
|
|
|
<workflow-app name="blacklsting" xmlns="uri:oozie:workflow:0.5">
|
|
|
|
|
|
|
|
|
|
<start to="reset-outputpath"/>
|
|
|
|
|
|
|
|
|
@ -38,190 +8,26 @@
|
|
|
|
|
|
|
|
|
|
<action name="reset-outputpath">
|
|
|
|
|
<fs>
|
|
|
|
|
<delete path='${outputPath}/relation'/>
|
|
|
|
|
<delete path='${outputPath}/publication'/>
|
|
|
|
|
<delete path='${outputPath}/dataset'/>
|
|
|
|
|
<delete path='${outputPath}/otherresearchproduct'/>
|
|
|
|
|
<delete path='${outputPath}/software'/>
|
|
|
|
|
<delete path='${outputPath}/organization'/>
|
|
|
|
|
<delete path='${outputPath}/project'/>
|
|
|
|
|
<delete path='${outputPath}/datasource'/>
|
|
|
|
|
<delete path='${hdfsPath}'/>
|
|
|
|
|
</fs>
|
|
|
|
|
<ok to="copy_entities"/>
|
|
|
|
|
<ok to="read_blacklist"/>
|
|
|
|
|
<error to="Kill"/>
|
|
|
|
|
</action>
|
|
|
|
|
|
|
|
|
|
<fork name="copy_entities">
|
|
|
|
|
<path start="copy_relation"/>
|
|
|
|
|
<path start="copy_organization"/>
|
|
|
|
|
<path start="copy_projects"/>
|
|
|
|
|
<path start="copy_datasources"/>
|
|
|
|
|
</fork>
|
|
|
|
|
|
|
|
|
|
<action name="copy_relation">
|
|
|
|
|
<distcp xmlns="uri:oozie:distcp-action:0.2">
|
|
|
|
|
<action name="read_blacklist">
|
|
|
|
|
<java>
|
|
|
|
|
<job-tracker>${jobTracker}</job-tracker>
|
|
|
|
|
<name-node>${nameNode}</name-node>
|
|
|
|
|
<arg>${nameNode}/${sourcePath}/relation</arg>
|
|
|
|
|
<arg>${nameNode}/${outputPath}/relation</arg>
|
|
|
|
|
</distcp>
|
|
|
|
|
<ok to="copy_wait"/>
|
|
|
|
|
<main-class>eu.dnetlib.dhp.blacklist.ReadBlacklistFromDB</main-class>
|
|
|
|
|
<arg>--hdfsPath</arg><arg>${workingDir}/blacklist</arg>
|
|
|
|
|
<arg>--hdfsNameNode</arg><arg>${nameNode}</arg>
|
|
|
|
|
<arg>--postgresUrl</arg><arg>${postgresUrl}</arg>
|
|
|
|
|
<arg>--postgresUser</arg><arg>${postgresUser}</arg>
|
|
|
|
|
<arg>--postgresPassword</arg><arg>${postgresPassword}</arg>
|
|
|
|
|
</java>
|
|
|
|
|
<ok to="End"/>
|
|
|
|
|
<error to="Kill"/>
|
|
|
|
|
</action>
|
|
|
|
|
|
|
|
|
|
<action name="copy_organization">
|
|
|
|
|
<distcp xmlns="uri:oozie:distcp-action:0.2">
|
|
|
|
|
<job-tracker>${jobTracker}</job-tracker>
|
|
|
|
|
<name-node>${nameNode}</name-node>
|
|
|
|
|
<arg>${nameNode}/${sourcePath}/organization</arg>
|
|
|
|
|
<arg>${nameNode}/${outputPath}/organization</arg>
|
|
|
|
|
</distcp>
|
|
|
|
|
<ok to="copy_wait"/>
|
|
|
|
|
<error to="Kill"/>
|
|
|
|
|
</action>
|
|
|
|
|
|
|
|
|
|
<action name="copy_projects">
|
|
|
|
|
<distcp xmlns="uri:oozie:distcp-action:0.2">
|
|
|
|
|
<job-tracker>${jobTracker}</job-tracker>
|
|
|
|
|
<name-node>${nameNode}</name-node>
|
|
|
|
|
<arg>${nameNode}/${sourcePath}/project</arg>
|
|
|
|
|
<arg>${nameNode}/${outputPath}/project</arg>
|
|
|
|
|
</distcp>
|
|
|
|
|
<ok to="copy_wait"/>
|
|
|
|
|
<error to="Kill"/>
|
|
|
|
|
</action>
|
|
|
|
|
|
|
|
|
|
<action name="copy_datasources">
|
|
|
|
|
<distcp xmlns="uri:oozie:distcp-action:0.2">
|
|
|
|
|
<job-tracker>${jobTracker}</job-tracker>
|
|
|
|
|
<name-node>${nameNode}</name-node>
|
|
|
|
|
<arg>${nameNode}/${sourcePath}/datasource</arg>
|
|
|
|
|
<arg>${nameNode}/${outputPath}/datasource</arg>
|
|
|
|
|
</distcp>
|
|
|
|
|
<ok to="copy_wait"/>
|
|
|
|
|
<error to="Kill"/>
|
|
|
|
|
</action>
|
|
|
|
|
|
|
|
|
|
<join name="copy_wait" to="fork_exec_bulktag"/>
|
|
|
|
|
|
|
|
|
|
<fork name="fork_exec_bulktag">
|
|
|
|
|
<path start="join_bulktag_publication"/>
|
|
|
|
|
<path start="join_bulktag_dataset"/>
|
|
|
|
|
<path start="join_bulktag_otherresearchproduct"/>
|
|
|
|
|
<path start="join_bulktag_software"/>
|
|
|
|
|
</fork>
|
|
|
|
|
<action name="join_bulktag_publication">
|
|
|
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
|
|
|
<job-tracker>${jobTracker}</job-tracker>
|
|
|
|
|
<name-node>${nameNode}</name-node>
|
|
|
|
|
<master>yarn-cluster</master>
|
|
|
|
|
<mode>cluster</mode>
|
|
|
|
|
<name>bulkTagging-publication</name>
|
|
|
|
|
<class>eu.dnetlib.dhp.bulktag.SparkBulkTagJob2</class>
|
|
|
|
|
<jar>dhp-bulktag-${projectVersion}.jar</jar>
|
|
|
|
|
<spark-opts>
|
|
|
|
|
--num-executors=${sparkExecutorNumber}
|
|
|
|
|
--executor-memory=${sparkExecutorMemory}
|
|
|
|
|
--executor-cores=${sparkExecutorCores}
|
|
|
|
|
--driver-memory=${sparkDriverMemory}
|
|
|
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
|
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
|
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
|
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
|
|
|
</spark-opts>
|
|
|
|
|
<arg>--sourcePath</arg><arg>${sourcePath}/publication</arg>
|
|
|
|
|
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Publication</arg>
|
|
|
|
|
<arg>--outputPath</arg><arg>${outputPath}/publication</arg>
|
|
|
|
|
<arg>--pathMap</arg><arg>${pathMap}</arg>
|
|
|
|
|
<arg>--isLookUpUrl</arg><arg>${isLookUpUrl}</arg>
|
|
|
|
|
</spark>
|
|
|
|
|
<ok to="wait"/>
|
|
|
|
|
<error to="Kill"/>
|
|
|
|
|
</action>
|
|
|
|
|
<action name="join_bulktag_dataset">
|
|
|
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
|
|
|
<job-tracker>${jobTracker}</job-tracker>
|
|
|
|
|
<name-node>${nameNode}</name-node>
|
|
|
|
|
<master>yarn-cluster</master>
|
|
|
|
|
<mode>cluster</mode>
|
|
|
|
|
<name>bulkTagging-dataset</name>
|
|
|
|
|
<class>eu.dnetlib.dhp.bulktag.SparkBulkTagJob2</class>
|
|
|
|
|
<jar>dhp-bulktag-${projectVersion}.jar</jar>
|
|
|
|
|
<spark-opts>
|
|
|
|
|
--num-executors=${sparkExecutorNumber}
|
|
|
|
|
--executor-memory=${sparkExecutorMemory}
|
|
|
|
|
--executor-cores=${sparkExecutorCores}
|
|
|
|
|
--driver-memory=${sparkDriverMemory}
|
|
|
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
|
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
|
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
|
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
|
|
|
</spark-opts>
|
|
|
|
|
<arg>--sourcePath</arg><arg>${sourcePath}/dataset</arg>
|
|
|
|
|
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Dataset</arg>
|
|
|
|
|
<arg>--outputPath</arg><arg>${outputPath}/dataset</arg>
|
|
|
|
|
<arg>--pathMap</arg><arg>${pathMap}</arg>
|
|
|
|
|
<arg>--isLookUpUrl</arg><arg>${isLookUpUrl}</arg>
|
|
|
|
|
</spark>
|
|
|
|
|
<ok to="wait"/>
|
|
|
|
|
<error to="Kill"/>
|
|
|
|
|
</action>
|
|
|
|
|
<action name="join_bulktag_otherresearchproduct">
|
|
|
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
|
|
|
<job-tracker>${jobTracker}</job-tracker>
|
|
|
|
|
<name-node>${nameNode}</name-node>
|
|
|
|
|
<master>yarn-cluster</master>
|
|
|
|
|
<mode>cluster</mode>
|
|
|
|
|
<name>bulkTagging-orp</name>
|
|
|
|
|
<class>eu.dnetlib.dhp.bulktag.SparkBulkTagJob2</class>
|
|
|
|
|
<jar>dhp-bulktag-${projectVersion}.jar</jar>
|
|
|
|
|
<spark-opts>
|
|
|
|
|
--num-executors=${sparkExecutorNumber}
|
|
|
|
|
--executor-memory=${sparkExecutorMemory}
|
|
|
|
|
--executor-cores=${sparkExecutorCores}
|
|
|
|
|
--driver-memory=${sparkDriverMemory}
|
|
|
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
|
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
|
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
|
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
|
|
|
</spark-opts>
|
|
|
|
|
<arg>--sourcePath</arg><arg>${sourcePath}/otherresearchproduct</arg>
|
|
|
|
|
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.OtherResearchProduct</arg>
|
|
|
|
|
<arg>--outputPath</arg><arg>${outputPath}/otherresearchproduct</arg>
|
|
|
|
|
<arg>--pathMap</arg><arg>${pathMap}</arg>
|
|
|
|
|
<arg>--isLookUpUrl</arg><arg>${isLookUpUrl}</arg>
|
|
|
|
|
</spark>
|
|
|
|
|
<ok to="wait"/>
|
|
|
|
|
<error to="Kill"/>
|
|
|
|
|
</action>
|
|
|
|
|
<action name="join_bulktag_software">
|
|
|
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
|
|
|
<job-tracker>${jobTracker}</job-tracker>
|
|
|
|
|
<name-node>${nameNode}</name-node>
|
|
|
|
|
<master>yarn-cluster</master>
|
|
|
|
|
<mode>cluster</mode>
|
|
|
|
|
<name>bulkTagging-software</name>
|
|
|
|
|
<class>eu.dnetlib.dhp.bulktag.SparkBulkTagJob2</class>
|
|
|
|
|
<jar>dhp-bulktag-${projectVersion}.jar</jar>
|
|
|
|
|
<spark-opts>
|
|
|
|
|
--num-executors=${sparkExecutorNumber}
|
|
|
|
|
--executor-memory=${sparkExecutorMemory}
|
|
|
|
|
--executor-cores=${sparkExecutorCores}
|
|
|
|
|
--driver-memory=${sparkDriverMemory}
|
|
|
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
|
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
|
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
|
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
|
|
|
</spark-opts>
|
|
|
|
|
<arg>--sourcePath</arg><arg>${sourcePath}/software</arg>
|
|
|
|
|
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Software</arg>
|
|
|
|
|
<arg>--outputPath</arg><arg>${outputPath}/software</arg>
|
|
|
|
|
<arg>--pathMap</arg><arg>${pathMap}</arg>
|
|
|
|
|
<arg>--isLookUpUrl</arg><arg>${isLookUpUrl}</arg>
|
|
|
|
|
</spark>
|
|
|
|
|
<ok to="wait"/>
|
|
|
|
|
<error to="Kill"/>
|
|
|
|
|
</action>
|
|
|
|
|
<join name="wait" to="End"/>
|
|
|
|
|
<end name="End"/>
|
|
|
|
|
</workflow-app>
|