2020-12-21 11:58:21 +01:00
|
|
|
<workflow-app name="Openorgs Dedup" xmlns="uri:oozie:workflow:0.5">
|
|
|
|
<parameters>
|
|
|
|
<property>
|
|
|
|
<name>graphBasePath</name>
|
|
|
|
<description>the raw graph base path</description>
|
|
|
|
</property>
|
|
|
|
<property>
|
|
|
|
<name>isLookUpUrl</name>
|
|
|
|
<description>the address of the lookUp service</description>
|
|
|
|
</property>
|
|
|
|
<property>
|
|
|
|
<name>actionSetId</name>
|
|
|
|
<description>id of the actionSet</description>
|
|
|
|
</property>
|
|
|
|
<property>
|
|
|
|
<name>workingPath</name>
|
|
|
|
<description>path for the working directory</description>
|
|
|
|
</property>
|
|
|
|
<property>
|
|
|
|
<name>dedupGraphPath</name>
|
|
|
|
<description>path for the output graph</description>
|
|
|
|
</property>
|
|
|
|
<property>
|
|
|
|
<name>cutConnectedComponent</name>
|
|
|
|
<description>max number of elements in a connected component</description>
|
|
|
|
</property>
|
|
|
|
<property>
|
|
|
|
<name>sparkDriverMemory</name>
|
|
|
|
<description>memory for driver process</description>
|
|
|
|
</property>
|
|
|
|
<property>
|
|
|
|
<name>sparkExecutorMemory</name>
|
|
|
|
<description>memory for individual executor</description>
|
|
|
|
</property>
|
|
|
|
<property>
|
|
|
|
<name>sparkExecutorCores</name>
|
|
|
|
<description>number of cores used by single executor</description>
|
|
|
|
</property>
|
|
|
|
<property>
|
|
|
|
<name>oozieActionShareLibForSpark2</name>
|
|
|
|
<description>oozie action sharelib for spark 2.*</description>
|
|
|
|
</property>
|
|
|
|
<property>
|
|
|
|
<name>spark2ExtraListeners</name>
|
|
|
|
<value>com.cloudera.spark.lineage.NavigatorAppListener</value>
|
|
|
|
<description>spark 2.* extra listeners classname</description>
|
|
|
|
</property>
|
|
|
|
<property>
|
|
|
|
<name>spark2SqlQueryExecutionListeners</name>
|
|
|
|
<value>com.cloudera.spark.lineage.NavigatorQueryListener</value>
|
|
|
|
<description>spark 2.* sql query execution listeners classname</description>
|
|
|
|
</property>
|
|
|
|
<property>
|
|
|
|
<name>spark2YarnHistoryServerAddress</name>
|
|
|
|
<description>spark 2.* yarn history server address</description>
|
|
|
|
</property>
|
|
|
|
<property>
|
|
|
|
<name>spark2EventLogDir</name>
|
|
|
|
<description>spark 2.* event log dir location</description>
|
|
|
|
</property>
|
|
|
|
</parameters>
|
|
|
|
|
|
|
|
<global>
|
|
|
|
<job-tracker>${jobTracker}</job-tracker>
|
|
|
|
<name-node>${nameNode}</name-node>
|
|
|
|
<configuration>
|
|
|
|
<property>
|
|
|
|
<name>mapreduce.job.queuename</name>
|
|
|
|
<value>${queueName}</value>
|
|
|
|
</property>
|
|
|
|
<property>
|
|
|
|
<name>oozie.launcher.mapred.job.queue.name</name>
|
|
|
|
<value>${oozieLauncherQueueName}</value>
|
|
|
|
</property>
|
|
|
|
<property>
|
|
|
|
<name>oozie.action.sharelib.for.spark</name>
|
|
|
|
<value>${oozieActionShareLibForSpark2}</value>
|
|
|
|
</property>
|
|
|
|
</configuration>
|
|
|
|
</global>
|
|
|
|
|
2021-02-26 10:19:28 +01:00
|
|
|
<start to="resetOrgSimRels"/>
|
2020-12-21 11:58:21 +01:00
|
|
|
|
|
|
|
<kill name="Kill">
|
|
|
|
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
|
|
|
|
</kill>
|
|
|
|
|
|
|
|
<action name="resetOrgSimRels">
|
|
|
|
<fs>
|
2021-03-19 16:57:40 +01:00
|
|
|
<delete path="${workingPath}/${actionSetIdOpenorgs}/organization_simrel"/>
|
|
|
|
<delete path="${workingPath}/${actionSetIdOpenorgs}/organization_mergerel"/>
|
2020-12-21 11:58:21 +01:00
|
|
|
</fs>
|
2021-02-26 10:19:28 +01:00
|
|
|
<ok to="CreateSimRels"/>
|
2020-12-21 11:58:21 +01:00
|
|
|
<error to="Kill"/>
|
|
|
|
</action>
|
|
|
|
|
2021-02-10 11:51:50 +01:00
|
|
|
<action name="CreateSimRels">
|
2020-12-21 11:58:21 +01:00
|
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
|
|
<master>yarn</master>
|
|
|
|
<mode>cluster</mode>
|
2021-02-10 11:51:50 +01:00
|
|
|
<name>Create Similarity Relations</name>
|
|
|
|
<class>eu.dnetlib.dhp.oa.dedup.SparkCreateSimRels</class>
|
2020-12-21 11:58:21 +01:00
|
|
|
<jar>dhp-dedup-openaire-${projectVersion}.jar</jar>
|
|
|
|
<spark-opts>
|
|
|
|
--executor-memory=${sparkExecutorMemory}
|
|
|
|
--executor-cores=${sparkExecutorCores}
|
|
|
|
--driver-memory=${sparkDriverMemory}
|
|
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
|
|
--conf spark.sql.shuffle.partitions=3840
|
|
|
|
</spark-opts>
|
|
|
|
<arg>--graphBasePath</arg><arg>${graphBasePath}</arg>
|
2021-02-10 11:51:50 +01:00
|
|
|
<arg>--isLookUpUrl</arg><arg>${isLookUpUrl}</arg>
|
2021-03-19 16:57:40 +01:00
|
|
|
<arg>--actionSetId</arg><arg>${actionSetIdOpenorgs}</arg>
|
2021-02-10 11:51:50 +01:00
|
|
|
<arg>--workingPath</arg><arg>${workingPath}</arg>
|
2020-12-21 11:58:21 +01:00
|
|
|
<arg>--numPartitions</arg><arg>8000</arg>
|
|
|
|
</spark>
|
2021-02-10 11:51:50 +01:00
|
|
|
<ok to="CopyOpenorgsSimRels"/>
|
2020-12-21 11:58:21 +01:00
|
|
|
<error to="Kill"/>
|
|
|
|
</action>
|
|
|
|
|
2021-02-26 10:19:28 +01:00
|
|
|
<!-- copy simrels relations coming from openorgs -->
|
2021-02-10 11:51:50 +01:00
|
|
|
<action name="CopyOpenorgsSimRels">
|
2020-12-21 11:58:21 +01:00
|
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
|
|
<master>yarn</master>
|
|
|
|
<mode>cluster</mode>
|
2021-02-10 11:51:50 +01:00
|
|
|
<name>Copy OpenOrgs Sim Rels</name>
|
|
|
|
<class>eu.dnetlib.dhp.oa.dedup.SparkCopyOpenorgsSimRels</class>
|
2020-12-21 11:58:21 +01:00
|
|
|
<jar>dhp-dedup-openaire-${projectVersion}.jar</jar>
|
|
|
|
<spark-opts>
|
|
|
|
--executor-memory=${sparkExecutorMemory}
|
|
|
|
--executor-cores=${sparkExecutorCores}
|
|
|
|
--driver-memory=${sparkDriverMemory}
|
|
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
|
|
--conf spark.sql.shuffle.partitions=3840
|
|
|
|
</spark-opts>
|
|
|
|
<arg>--graphBasePath</arg><arg>${graphBasePath}</arg>
|
2021-02-26 10:19:28 +01:00
|
|
|
<arg>--isLookUpUrl</arg><arg>${isLookUpUrl}</arg>
|
2020-12-21 11:58:21 +01:00
|
|
|
<arg>--workingPath</arg><arg>${workingPath}</arg>
|
2021-03-19 16:57:40 +01:00
|
|
|
<arg>--actionSetId</arg><arg>${actionSetIdOpenorgs}</arg>
|
2020-12-21 11:58:21 +01:00
|
|
|
<arg>--numPartitions</arg><arg>8000</arg>
|
|
|
|
</spark>
|
|
|
|
<ok to="CreateMergeRels"/>
|
|
|
|
<error to="Kill"/>
|
|
|
|
</action>
|
|
|
|
|
|
|
|
<action name="CreateMergeRels">
|
|
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
|
|
<master>yarn</master>
|
|
|
|
<mode>cluster</mode>
|
|
|
|
<name>Create Merge Relations</name>
|
|
|
|
<class>eu.dnetlib.dhp.oa.dedup.SparkCreateMergeRels</class>
|
|
|
|
<jar>dhp-dedup-openaire-${projectVersion}.jar</jar>
|
|
|
|
<spark-opts>
|
|
|
|
--executor-memory=${sparkExecutorMemory}
|
|
|
|
--executor-cores=${sparkExecutorCores}
|
|
|
|
--driver-memory=${sparkDriverMemory}
|
|
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
|
|
--conf spark.sql.shuffle.partitions=3840
|
|
|
|
</spark-opts>
|
|
|
|
<arg>--graphBasePath</arg><arg>${graphBasePath}</arg>
|
|
|
|
<arg>--workingPath</arg><arg>${workingPath}</arg>
|
|
|
|
<arg>--isLookUpUrl</arg><arg>${isLookUpUrl}</arg>
|
2021-03-19 16:57:40 +01:00
|
|
|
<arg>--actionSetId</arg><arg>${actionSetIdOpenorgs}</arg>
|
2020-12-21 11:58:21 +01:00
|
|
|
<arg>--cutConnectedComponent</arg><arg>${cutConnectedComponent}</arg>
|
|
|
|
</spark>
|
|
|
|
<ok to="PrepareOrgRels"/>
|
|
|
|
<error to="Kill"/>
|
|
|
|
</action>
|
|
|
|
|
|
|
|
<action name="PrepareOrgRels">
|
|
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
|
|
<master>yarn</master>
|
|
|
|
<mode>cluster</mode>
|
|
|
|
<name>Prepare Organization Relations</name>
|
|
|
|
<class>eu.dnetlib.dhp.oa.dedup.SparkPrepareOrgRels</class>
|
|
|
|
<jar>dhp-dedup-openaire-${projectVersion}.jar</jar>
|
|
|
|
<spark-opts>
|
|
|
|
--executor-memory=${sparkExecutorMemory}
|
|
|
|
--executor-cores=${sparkExecutorCores}
|
|
|
|
--driver-memory=${sparkDriverMemory}
|
|
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
|
|
--conf spark.sql.shuffle.partitions=3840
|
|
|
|
</spark-opts>
|
|
|
|
<arg>--graphBasePath</arg><arg>${graphBasePath}</arg>
|
|
|
|
<arg>--workingPath</arg><arg>${workingPath}</arg>
|
|
|
|
<arg>--isLookUpUrl</arg><arg>${isLookUpUrl}</arg>
|
2021-03-19 16:57:40 +01:00
|
|
|
<arg>--actionSetId</arg><arg>${actionSetIdOpenorgs}</arg>
|
2020-12-21 11:58:21 +01:00
|
|
|
<arg>--dbUrl</arg><arg>${dbUrl}</arg>
|
|
|
|
<arg>--dbTable</arg><arg>${dbTable}</arg>
|
|
|
|
<arg>--dbUser</arg><arg>${dbUser}</arg>
|
|
|
|
<arg>--dbPwd</arg><arg>${dbPwd}</arg>
|
|
|
|
<arg>--numConnections</arg><arg>20</arg>
|
|
|
|
</spark>
|
|
|
|
<ok to="PrepareNewOrgs"/>
|
|
|
|
<error to="Kill"/>
|
|
|
|
</action>
|
|
|
|
|
|
|
|
<action name="PrepareNewOrgs">
|
|
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
|
|
<master>yarn</master>
|
|
|
|
<mode>cluster</mode>
|
|
|
|
<name>Prepare New Organizations</name>
|
|
|
|
<class>eu.dnetlib.dhp.oa.dedup.SparkPrepareNewOrgs</class>
|
|
|
|
<jar>dhp-dedup-openaire-${projectVersion}.jar</jar>
|
|
|
|
<spark-opts>
|
|
|
|
--executor-memory=${sparkExecutorMemory}
|
|
|
|
--executor-cores=${sparkExecutorCores}
|
|
|
|
--driver-memory=${sparkDriverMemory}
|
|
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
|
|
--conf spark.sql.shuffle.partitions=3840
|
|
|
|
</spark-opts>
|
|
|
|
<arg>--graphBasePath</arg><arg>${graphBasePath}</arg>
|
|
|
|
<arg>--workingPath</arg><arg>${workingPath}</arg>
|
|
|
|
<arg>--isLookUpUrl</arg><arg>${isLookUpUrl}</arg>
|
2021-03-19 16:57:40 +01:00
|
|
|
<arg>--actionSetId</arg><arg>${actionSetIdOpenorgs}</arg>
|
2020-12-21 11:58:21 +01:00
|
|
|
<arg>--apiUrl</arg><arg>${apiUrl}</arg>
|
|
|
|
<arg>--dbUrl</arg><arg>${dbUrl}</arg>
|
|
|
|
<arg>--dbTable</arg><arg>${dbTable}</arg>
|
|
|
|
<arg>--dbUser</arg><arg>${dbUser}</arg>
|
|
|
|
<arg>--dbPwd</arg><arg>${dbPwd}</arg>
|
|
|
|
<arg>--numConnections</arg><arg>20</arg>
|
|
|
|
</spark>
|
|
|
|
<ok to="End"/>
|
|
|
|
<error to="Kill"/>
|
|
|
|
</action>
|
|
|
|
|
|
|
|
<end name="End"/>
|
|
|
|
</workflow-app>
|