867 lines
40 KiB
XML
867 lines
40 KiB
XML
<workflow-app name="sub-dump_subset" xmlns="uri:oozie:workflow:0.5">
|
|
<parameters>
|
|
<property>
|
|
<name>sourcePath</name>
|
|
<description>the source path</description>
|
|
</property>
|
|
<property>
|
|
<name>outputPath</name>
|
|
<description>the output path</description>
|
|
</property>
|
|
<property>
|
|
<name>organizationCommunityMap</name>
|
|
<description>the organization community map</description>
|
|
</property>
|
|
<property>
|
|
<name>pathMap</name>
|
|
<description>the path where to find the elements involved in the constraints within the json</description>
|
|
</property>
|
|
<property>
|
|
<name>selectionCriteria</name>
|
|
<description>the selection criteria used to select the results</description>
|
|
</property>
|
|
<property>
|
|
<name>mapAs</name>
|
|
<description>It specifies the type of model for the dump (community - complete)</description>
|
|
</property>
|
|
<property>
|
|
<name>hiveDbName</name>
|
|
<description>the target hive database name</description>
|
|
</property>
|
|
<property>
|
|
<name>hiveJdbcUrl</name>
|
|
<description>hive server jdbc url</description>
|
|
</property>
|
|
<property>
|
|
<name>hiveMetastoreUris</name>
|
|
<description>hive server metastore URIs</description>
|
|
</property>
|
|
<property>
|
|
<name>sparkDriverMemory</name>
|
|
<description>memory for driver process</description>
|
|
</property>
|
|
<property>
|
|
<name>sparkExecutorMemory</name>
|
|
<description>memory for individual executor</description>
|
|
</property>
|
|
<property>
|
|
<name>sparkExecutorCores</name>
|
|
<description>number of cores used by single executor</description>
|
|
</property>
|
|
<property>
|
|
<name>oozieActionShareLibForSpark2</name>
|
|
<description>oozie action sharelib for spark 2.*</description>
|
|
</property>
|
|
<property>
|
|
<name>spark2ExtraListeners</name>
|
|
<value>com.cloudera.spark.lineage.NavigatorAppListener</value>
|
|
<description>spark 2.* extra listeners classname</description>
|
|
</property>
|
|
<property>
|
|
<name>spark2SqlQueryExecutionListeners</name>
|
|
<value>com.cloudera.spark.lineage.NavigatorQueryListener</value>
|
|
<description>spark 2.* sql query execution listeners classname</description>
|
|
</property>
|
|
<property>
|
|
<name>spark2YarnHistoryServerAddress</name>
|
|
<description>spark 2.* yarn history server address</description>
|
|
</property>
|
|
<property>
|
|
<name>spark2EventLogDir</name>
|
|
<description>spark 2.* event log dir location</description>
|
|
</property>
|
|
</parameters>
|
|
|
|
<global>
|
|
<job-tracker>${jobTracker}</job-tracker>
|
|
<name-node>${nameNode}</name-node>
|
|
<configuration>
|
|
<property>
|
|
<name>mapreduce.job.queuename</name>
|
|
<value>${queueName}</value>
|
|
</property>
|
|
<property>
|
|
<name>oozie.launcher.mapred.job.queue.name</name>
|
|
<value>${oozieLauncherQueueName}</value>
|
|
</property>
|
|
<property>
|
|
<name>oozie.action.sharelib.for.spark</name>
|
|
<value>${oozieActionShareLibForSpark2}</value>
|
|
</property>
|
|
|
|
</configuration>
|
|
</global>
|
|
|
|
<start to="get_master_duplicate" />
|
|
|
|
<action name="get_master_duplicate">
|
|
<java>
|
|
<main-class>eu.dnetlib.dhp.oa.graph.dump.subset.ReadMasterDuplicateFromDB</main-class>
|
|
<arg>--hdfsPath</arg><arg>${workingDir}/masterduplicate</arg>
|
|
<arg>--hdfsNameNode</arg><arg>${nameNode}</arg>
|
|
<arg>--postgresUrl</arg><arg>${postgresURL}</arg>
|
|
<arg>--postgresUser</arg><arg>${postgresUser}</arg>
|
|
<arg>--postgresPassword</arg><arg>${postgresPassword}</arg>
|
|
</java>
|
|
<ok to="fork_select_and_dump"/>
|
|
<error to="Kill"/>
|
|
</action>
|
|
|
|
<fork name="fork_select_and_dump">
|
|
<path start="select_and_dump_publication"/>
|
|
<path start="select_and_dump_dataset"/>
|
|
<path start="select_and_dump_orp"/>
|
|
<path start="select_and_dump_software"/>
|
|
</fork>
|
|
|
|
<action name="select_and_dump_publication">
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
<master>yarn</master>
|
|
<mode>cluster</mode>
|
|
<name>Dump table publication </name>
|
|
<class>eu.dnetlib.dhp.oa.graph.dump.subset.SparkDumpResult</class>
|
|
<jar>dump-${projectVersion}.jar</jar>
|
|
<spark-opts>
|
|
--executor-memory=${sparkExecutorMemory}
|
|
--executor-cores=${sparkExecutorCores}
|
|
--driver-memory=${sparkDriverMemory}
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
|
</spark-opts>
|
|
<arg>--sourcePath</arg><arg>${sourcePath}/publication</arg>
|
|
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Publication</arg>
|
|
<arg>--outputPath</arg><arg>${outputPath}</arg>
|
|
<arg>--communityMapPath</arg><arg>${communityMapPath}</arg>
|
|
<arg>--pathMap</arg><arg>${pathMap}</arg>
|
|
<arg>--selectionCriteria</arg><arg>${selectionCriteria}</arg>
|
|
<arg>--resultType</arg><arg>publication</arg>
|
|
<arg>--masterDuplicatePath</arg><arg>${workingDir}/masterduplicate</arg>
|
|
</spark>
|
|
<ok to="join_dump"/>
|
|
<error to="Kill"/>
|
|
</action>
|
|
|
|
|
|
<action name="select_and_dump_dataset">
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
<master>yarn</master>
|
|
<mode>cluster</mode>
|
|
<name>Dump table dataset </name>
|
|
<class>eu.dnetlib.dhp.oa.graph.dump.subset.SparkDumpResult</class>
|
|
<jar>dump-${projectVersion}.jar</jar>
|
|
<spark-opts>
|
|
--executor-memory=${sparkExecutorMemory}
|
|
--executor-cores=${sparkExecutorCores}
|
|
--driver-memory=${sparkDriverMemory}
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
|
</spark-opts>
|
|
<arg>--sourcePath</arg><arg>${sourcePath}/dataset</arg>
|
|
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Dataset</arg>
|
|
<arg>--outputPath</arg><arg>${outputPath}</arg>
|
|
<arg>--pathMap</arg><arg>${pathMap}</arg>
|
|
<arg>--selectionCriteria</arg><arg>${selectionCriteria}</arg>
|
|
<arg>--resultType</arg><arg>dataset</arg>
|
|
<arg>--masterDuplicatePath</arg><arg>${workingDir}/masterduplicate</arg>
|
|
</spark>
|
|
<ok to="join_dump"/>
|
|
<error to="Kill"/>
|
|
</action>
|
|
|
|
<action name="select_and_dump_orp">
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
<master>yarn</master>
|
|
<mode>cluster</mode>
|
|
<name>Dump table ORP </name>
|
|
<class>eu.dnetlib.dhp.oa.graph.dump.subset.SparkDumpResult</class>
|
|
<jar>dump-${projectVersion}.jar</jar>
|
|
<spark-opts>
|
|
--executor-memory=${sparkExecutorMemory}
|
|
--executor-cores=${sparkExecutorCores}
|
|
--driver-memory=${sparkDriverMemory}
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
|
</spark-opts>
|
|
<arg>--sourcePath</arg><arg>${sourcePath}/otherresearchproduct</arg>
|
|
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.OtherResearchProduct</arg>
|
|
<arg>--outputPath</arg><arg>${outputPath}</arg>
|
|
<arg>--pathMap</arg><arg>${pathMap}</arg>
|
|
<arg>--selectionCriteria</arg><arg>${selectionCriteria}</arg>
|
|
<arg>--resultType</arg><arg>otherresearchproduct</arg>
|
|
<arg>--masterDuplicatePath</arg><arg>${workingDir}/masterduplicate</arg>
|
|
</spark>
|
|
<ok to="join_dump"/>
|
|
<error to="Kill"/>
|
|
</action>
|
|
|
|
<action name="select_and_dump_software">
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
<master>yarn</master>
|
|
<mode>cluster</mode>
|
|
<name>Dump table software </name>
|
|
<class>eu.dnetlib.dhp.oa.graph.dump.subset.SparkDumpResult</class>
|
|
<jar>dump-${projectVersion}.jar</jar>
|
|
<spark-opts>
|
|
--executor-memory=${sparkExecutorMemory}
|
|
--executor-cores=${sparkExecutorCores}
|
|
--driver-memory=${sparkDriverMemory}
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
|
</spark-opts>
|
|
<arg>--sourcePath</arg><arg>${sourcePath}/software</arg>
|
|
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Software</arg>
|
|
<arg>--outputPath</arg><arg>${outputPath}</arg>
|
|
<arg>--pathMap</arg><arg>${pathMap}</arg>
|
|
<arg>--selectionCriteria</arg><arg>${selectionCriteria}</arg>
|
|
<arg>--resultType</arg><arg>software</arg>
|
|
<arg>--masterDuplicatePath</arg><arg>${workingDir}/masterduplicate</arg>
|
|
</spark>
|
|
<ok to="join_dump"/>
|
|
<error to="Kill"/>
|
|
</action>
|
|
|
|
<join name="join_dump" to="select_subset"/>
|
|
|
|
<action name="select_subset">
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
<master>yarn</master>
|
|
<mode>cluster</mode>
|
|
<name>Select valid table relation </name>
|
|
<class>eu.dnetlib.dhp.oa.graph.dump.subset.SparkSelectSubset</class>
|
|
<jar>dump-${projectVersion}.jar</jar>
|
|
<spark-opts>
|
|
--executor-memory=${sparkExecutorMemory}
|
|
--executor-cores=${sparkExecutorCores}
|
|
--driver-memory=${sparkDriverMemory}
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
|
--conf spark.sql.shuffle.partitions=3840
|
|
</spark-opts>
|
|
<arg>--sourcePath</arg><arg>${sourcePath}</arg>
|
|
<arg>--outputPath</arg><arg>${outputPath}</arg>
|
|
<arg>--removeSet</arg><arg>${removeSet}</arg>
|
|
|
|
</spark>
|
|
<ok to="dumpModel"/>
|
|
<error to="Kill"/>
|
|
</action>
|
|
|
|
|
|
<decision name="dumpModel">
|
|
<switch>
|
|
<case to="fork_dump_community">${wf:conf('mapAs') eq "community"}</case>
|
|
<default to="fork_dump_otherentities"/>
|
|
</switch>
|
|
</decision>
|
|
|
|
<fork name="fork_dump_community">
|
|
<path start="dump_publication"/>
|
|
<path start="dump_dataset"/>
|
|
<path start="dump_orp"/>
|
|
<path start="dump_software"/>
|
|
</fork>
|
|
|
|
<action name="dump_publication">
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
<master>yarn</master>
|
|
<mode>cluster</mode>
|
|
<name>Dump table publication for community/funder related products</name>
|
|
<class>eu.dnetlib.dhp.oa.graph.dump.community.SparkDumpCommunityProducts</class>
|
|
<jar>dump-${projectVersion}.jar</jar>
|
|
<spark-opts>
|
|
--executor-memory=${sparkExecutorMemory}
|
|
--executor-cores=${sparkExecutorCores}
|
|
--driver-memory=${sparkDriverMemory}
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
|
</spark-opts>
|
|
<arg>--sourcePath</arg><arg>${outputPath}/original/publication</arg>
|
|
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Publication</arg>
|
|
<arg>--outputPath</arg><arg>${workingDir}/dump/publication</arg>
|
|
<arg>--communityMapPath</arg><arg>${communityMapPath}</arg>
|
|
</spark>
|
|
<ok to="join_dump_comm"/>
|
|
<error to="Kill"/>
|
|
</action>
|
|
|
|
<action name="dump_dataset">
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
<master>yarn</master>
|
|
<mode>cluster</mode>
|
|
<name>Dump table dataset for community/funder related products</name>
|
|
<class>eu.dnetlib.dhp.oa.graph.dump.community.SparkDumpCommunityProducts</class>
|
|
<jar>dump-${projectVersion}.jar</jar>
|
|
<spark-opts>
|
|
--executor-memory=${sparkExecutorMemory}
|
|
--executor-cores=${sparkExecutorCores}
|
|
--driver-memory=${sparkDriverMemory}
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
|
</spark-opts>
|
|
<arg>--sourcePath</arg><arg>${outputPath}/original/dataset</arg>
|
|
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Dataset</arg>
|
|
<arg>--outputPath</arg><arg>${workingDir}/dump/dataset</arg>
|
|
<arg>--communityMapPath</arg><arg>${communityMapPath}</arg>
|
|
</spark>
|
|
<ok to="join_dump_comm"/>
|
|
<error to="Kill"/>
|
|
</action>
|
|
|
|
<action name="dump_orp">
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
<master>yarn</master>
|
|
<mode>cluster</mode>
|
|
<name>Dump table ORP for community related products</name>
|
|
<class>eu.dnetlib.dhp.oa.graph.dump.community.SparkDumpCommunityProducts</class>
|
|
<jar>dump-${projectVersion}.jar</jar>
|
|
<spark-opts>
|
|
--executor-memory=${sparkExecutorMemory}
|
|
--executor-cores=${sparkExecutorCores}
|
|
--driver-memory=${sparkDriverMemory}
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
|
</spark-opts>
|
|
<arg>--sourcePath</arg><arg>${outputPath}/original/otherresearchproduct</arg>
|
|
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.OtherResearchProduct</arg>
|
|
<arg>--outputPath</arg><arg>${workingDir}/dump/otherresearchproduct</arg>
|
|
<arg>--communityMapPath</arg><arg>${communityMapPath}</arg>
|
|
</spark>
|
|
<ok to="join_dump_comm"/>
|
|
<error to="Kill"/>
|
|
</action>
|
|
|
|
<action name="dump_software">
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
<master>yarn</master>
|
|
<mode>cluster</mode>
|
|
<name>Dump table software for community related products</name>
|
|
<class>eu.dnetlib.dhp.oa.graph.dump.community.SparkDumpCommunityProducts</class>
|
|
<jar>dump-${projectVersion}.jar</jar>
|
|
<spark-opts>
|
|
--executor-memory=${sparkExecutorMemory}
|
|
--executor-cores=${sparkExecutorCores}
|
|
--driver-memory=${sparkDriverMemory}
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
|
</spark-opts>
|
|
<arg>--sourcePath</arg><arg>${outputPath}/original/software</arg>
|
|
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Software</arg>
|
|
<arg>--outputPath</arg><arg>${workingDir}/dump/software</arg>
|
|
<arg>--communityMapPath</arg><arg>${communityMapPath}</arg>
|
|
</spark>
|
|
<ok to="join_dump_comm"/>
|
|
<error to="Kill"/>
|
|
</action>
|
|
|
|
<join name="join_dump_comm" to="prepareResultProject"/>
|
|
|
|
<action name="prepareResultProject">
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
<master>yarn</master>
|
|
<mode>cluster</mode>
|
|
<name>Prepare association result subset of project info</name>
|
|
<class>eu.dnetlib.dhp.oa.graph.dump.community.SparkPrepareResultProject</class>
|
|
<jar>dump-${projectVersion}.jar</jar>
|
|
<spark-opts>
|
|
--executor-memory=${sparkExecutorMemory}
|
|
--executor-cores=${sparkExecutorCores}
|
|
--driver-memory=${sparkDriverMemory}
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
|
</spark-opts>
|
|
<arg>--sourcePath</arg><arg>${outputPath}/original</arg>
|
|
<arg>--outputPath</arg><arg>${workingDir}/preparedInfo</arg>
|
|
</spark>
|
|
<ok to="fork_extendWithProject"/>
|
|
<error to="Kill"/>
|
|
</action>
|
|
|
|
|
|
|
|
<fork name="fork_extendWithProject">
|
|
<path start="extend_publication"/>
|
|
<path start="extend_dataset"/>
|
|
<path start="extend_orp"/>
|
|
<path start="extend_software"/>
|
|
</fork>
|
|
|
|
<action name="extend_publication">
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
<master>yarn</master>
|
|
<mode>cluster</mode>
|
|
<name>Extend dumped publications with information about project</name>
|
|
<class>eu.dnetlib.dhp.oa.graph.dump.community.SparkUpdateProjectInfo</class>
|
|
<jar>dump-${projectVersion}.jar</jar>
|
|
<spark-opts>
|
|
--executor-memory=${sparkExecutorMemory}
|
|
--executor-cores=${sparkExecutorCores}
|
|
--driver-memory=${sparkDriverMemory}
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
|
</spark-opts>
|
|
<arg>--sourcePath</arg><arg>${workingDir}/dump/publication</arg>
|
|
<arg>--outputPath</arg><arg>${outputPath}/dump/publication</arg>
|
|
<arg>--preparedInfoPath</arg><arg>${workingDir}/preparedInfo</arg>
|
|
</spark>
|
|
<ok to="join_extend"/>
|
|
<error to="Kill"/>
|
|
</action>
|
|
|
|
<action name="extend_dataset">
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
<master>yarn</master>
|
|
<mode>cluster</mode>
|
|
<name>Extend dumped dataset with information about project</name>
|
|
<class>eu.dnetlib.dhp.oa.graph.dump.community.SparkUpdateProjectInfo</class>
|
|
<jar>dump-${projectVersion}.jar</jar>
|
|
<spark-opts>
|
|
--executor-memory=${sparkExecutorMemory}
|
|
--executor-cores=${sparkExecutorCores}
|
|
--driver-memory=${sparkDriverMemory}
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
|
</spark-opts>
|
|
<arg>--sourcePath</arg><arg>${workingDir}/dump/dataset</arg>
|
|
<arg>--outputPath</arg><arg>${outputPath}/dump/dataset</arg>
|
|
<arg>--preparedInfoPath</arg><arg>${workingDir}/preparedInfo</arg>
|
|
</spark>
|
|
<ok to="join_extend"/>
|
|
<error to="Kill"/>
|
|
</action>
|
|
|
|
<action name="extend_orp">
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
<master>yarn</master>
|
|
<mode>cluster</mode>
|
|
<name>Extend dumped ORP with information about project</name>
|
|
<class>eu.dnetlib.dhp.oa.graph.dump.community.SparkUpdateProjectInfo</class>
|
|
<jar>dump-${projectVersion}.jar</jar>
|
|
<spark-opts>
|
|
--executor-memory=${sparkExecutorMemory}
|
|
--executor-cores=${sparkExecutorCores}
|
|
--driver-memory=${sparkDriverMemory}
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
|
</spark-opts>
|
|
<arg>--sourcePath</arg><arg>${workingDir}/dump/otherresearchproduct</arg>
|
|
<arg>--outputPath</arg><arg>${outputPath}/dump/otherresearchproduct</arg>
|
|
<arg>--preparedInfoPath</arg><arg>${workingDir}/preparedInfo</arg>
|
|
</spark>
|
|
<ok to="join_extend"/>
|
|
<error to="Kill"/>
|
|
</action>
|
|
|
|
<action name="extend_software">
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
<master>yarn</master>
|
|
<mode>cluster</mode>
|
|
<name>Extend dumped software with information about project</name>
|
|
<class>eu.dnetlib.dhp.oa.graph.dump.community.SparkUpdateProjectInfo</class>
|
|
<jar>dump-${projectVersion}.jar</jar>
|
|
<spark-opts>
|
|
--executor-memory=${sparkExecutorMemory}
|
|
--executor-cores=${sparkExecutorCores}
|
|
--driver-memory=${sparkDriverMemory}
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
|
</spark-opts>
|
|
<arg>--sourcePath</arg><arg>${workingDir}/dump/software</arg>
|
|
<arg>--outputPath</arg><arg>${outputPath}/dump/software</arg>
|
|
<arg>--preparedInfoPath</arg><arg>${workingDir}/preparedInfo</arg>
|
|
</spark>
|
|
<ok to="join_extend"/>
|
|
<error to="Kill"/>
|
|
</action>
|
|
|
|
|
|
<join name="join_extend" to="End"/>
|
|
|
|
<fork name="fork_dump_otherentities">
|
|
<path start="dump_organization"/>
|
|
<path start="dump_project"/>
|
|
<path start="dump_datasource"/>
|
|
</fork>
|
|
|
|
<action name="dump_organization">
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
<master>yarn</master>
|
|
<mode>cluster</mode>
|
|
<name>Dump table organization </name>
|
|
<class>eu.dnetlib.dhp.oa.graph.dump.complete.SparkDumpEntitiesJob</class>
|
|
<jar>dump-${projectVersion}.jar</jar>
|
|
<spark-opts>
|
|
--executor-memory=${sparkExecutorMemory}
|
|
--executor-cores=${sparkExecutorCores}
|
|
--driver-memory=${sparkDriverMemory}
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
|
</spark-opts>
|
|
<arg>--sourcePath</arg><arg>${outputPath}/original/organization</arg>
|
|
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Organization</arg>
|
|
<arg>--outputPath</arg><arg>${outputPath}/dump/organization</arg>
|
|
</spark>
|
|
<ok to="join_dump_otherentities"/>
|
|
<error to="Kill"/>
|
|
</action>
|
|
|
|
<action name="dump_project">
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
<master>yarn</master>
|
|
<mode>cluster</mode>
|
|
<name>Dump table project </name>
|
|
<class>eu.dnetlib.dhp.oa.graph.dump.complete.SparkDumpEntitiesJob</class>
|
|
<jar>dump-${projectVersion}.jar</jar>
|
|
<spark-opts>
|
|
--executor-memory=${sparkExecutorMemory}
|
|
--executor-cores=${sparkExecutorCores}
|
|
--driver-memory=${sparkDriverMemory}
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
|
</spark-opts>
|
|
<arg>--sourcePath</arg><arg>${outputPath}/original/project</arg>
|
|
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Project</arg>
|
|
<arg>--outputPath</arg><arg>${outputPath}/dump/project</arg>
|
|
</spark>
|
|
<ok to="join_dump_otherentities"/>
|
|
<error to="Kill"/>
|
|
</action>
|
|
|
|
<action name="dump_datasource">
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
<master>yarn</master>
|
|
<mode>cluster</mode>
|
|
<name>Dump table datasource </name>
|
|
<class>eu.dnetlib.dhp.oa.graph.dump.complete.SparkDumpEntitiesJob</class>
|
|
<jar>dump-${projectVersion}.jar</jar>
|
|
<spark-opts>
|
|
--executor-memory=${sparkExecutorMemory}
|
|
--executor-cores=${sparkExecutorCores}
|
|
--driver-memory=${sparkDriverMemory}
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
|
</spark-opts>
|
|
<arg>--sourcePath</arg><arg>${outputPath}/original/datasource</arg>
|
|
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Datasource</arg>
|
|
<arg>--outputPath</arg><arg>${outputPath}/dump/datasource</arg>
|
|
</spark>
|
|
<ok to="join_dump_otherentities"/>
|
|
<error to="Kill"/>
|
|
</action>
|
|
|
|
<join name="join_dump_otherentities" to="fork_context"/>
|
|
|
|
<fork name="fork_context">
|
|
<path start="create_entities_fromcontext"/>
|
|
<path start="create_relation_fromcontext"/>
|
|
<path start="create_relation_fromorgs"/>
|
|
</fork>
|
|
|
|
<action name="create_entities_fromcontext">
|
|
<java>
|
|
<main-class>eu.dnetlib.dhp.oa.graph.dump.complete.CreateContextEntities</main-class>
|
|
<arg>--hdfsPath</arg><arg>${workingDir}/context/community_infrastructure.json.gz</arg>
|
|
<arg>--nameNode</arg><arg>${nameNode}</arg>
|
|
</java>
|
|
<ok to="select_valid_context"/>
|
|
<error to="Kill"/>
|
|
</action>
|
|
|
|
<action name="select_valid_context">
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
<master>yarn</master>
|
|
<mode>cluster</mode>
|
|
<name>Dump table software </name>
|
|
<class>eu.dnetlib.dhp.oa.graph.dump.subset.SparkSelectValidContext</class>
|
|
<jar>dump-${projectVersion}.jar</jar>
|
|
<spark-opts>
|
|
--executor-memory=${sparkExecutorMemory}
|
|
--executor-cores=${sparkExecutorCores}
|
|
--driver-memory=${sparkDriverMemory}
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
|
</spark-opts>
|
|
<arg>--sourcePath</arg><arg>${outputPath}/original</arg>
|
|
<arg>--contextPath</arg><arg>${workingDir}/context/community_infrastructure.json.gz</arg>
|
|
<arg>--communityMapPath</arg><arg>${communityMapPath}</arg>
|
|
<arg>--outputPath</arg><arg>${outputPath}/dump/communities_infrastructures</arg>
|
|
</spark>
|
|
<ok to="join_context"/>
|
|
<error to="Kill"/>
|
|
</action>
|
|
<action name="create_relation_fromcontext">
|
|
<java>
|
|
<main-class>eu.dnetlib.dhp.oa.graph.dump.complete.CreateContextRelation</main-class>
|
|
<arg>--hdfsPath</arg><arg>${workingDir}/dump/relation/context</arg>
|
|
<arg>--nameNode</arg><arg>${nameNode}</arg>
|
|
</java>
|
|
<ok to="join_context"/>
|
|
<error to="Kill"/>
|
|
</action>
|
|
|
|
<action name="create_relation_fromorgs">
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
<master>yarn</master>
|
|
<mode>cluster</mode>
|
|
<name>Dump table relation </name>
|
|
<class>eu.dnetlib.dhp.oa.graph.dump.complete.SparkOrganizationRelation</class>
|
|
<jar>dump-${projectVersion}.jar</jar>
|
|
<spark-opts>
|
|
--executor-memory=${sparkExecutorMemory}
|
|
--executor-cores=${sparkExecutorCores}
|
|
--driver-memory=${sparkDriverMemory}
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
|
</spark-opts>
|
|
<arg>--sourcePath</arg><arg>${sourcePath}/relation</arg>
|
|
<arg>--outputPath</arg><arg>${workingDir}/dump/relation/contextOrg</arg>
|
|
<arg>--organizationCommunityMap</arg><arg>${organizationCommunityMap}</arg>
|
|
<arg>--communityMapPath</arg><arg>${communityMapPath}</arg>
|
|
</spark>
|
|
<ok to="join_context"/>
|
|
<error to="Kill"/>
|
|
</action>
|
|
|
|
<join name="join_context" to="filter_relation_context"/>
|
|
|
|
<action name="filter_relation_context">
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
<master>yarn</master>
|
|
<mode>cluster</mode>
|
|
<name>Dump table software </name>
|
|
<class>eu.dnetlib.dhp.oa.graph.dump.subset.SparkSelectValidRelationContext</class>
|
|
<jar>dump-${projectVersion}.jar</jar>
|
|
<spark-opts>
|
|
--executor-memory=${sparkExecutorMemory}
|
|
--executor-cores=${sparkExecutorCores}
|
|
--driver-memory=${sparkDriverMemory}
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
|
</spark-opts>
|
|
<arg>--sourcePath</arg><arg>${outputPath}/dump</arg>
|
|
<arg>--contextRelationPath</arg><arg>${workingDir}/dump/relation</arg> <!-- new relations from context -->
|
|
|
|
</spark>
|
|
<ok to="dump_relation"/>
|
|
<error to="Kill"/>
|
|
</action>
|
|
|
|
|
|
<action name="dump_relation">
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
<master>yarn</master>
|
|
<mode>cluster</mode>
|
|
<name>Dump table relation </name>
|
|
<class>eu.dnetlib.dhp.oa.graph.dump.complete.SparkDumpRelationJob</class>
|
|
<jar>dump-${projectVersion}.jar</jar>
|
|
<spark-opts>
|
|
--executor-memory=${sparkExecutorMemory}
|
|
--executor-cores=${sparkExecutorCores}
|
|
--driver-memory=${sparkDriverMemory}
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
|
</spark-opts>
|
|
<arg>--sourcePath</arg><arg>${outputPath}/original/relation</arg>
|
|
<arg>--outputPath</arg><arg>${workingDir}/relation</arg>
|
|
<arg>--removeSet</arg><arg>${removeSet}</arg>
|
|
</spark>
|
|
<ok to="rels_from_pubs"/>
|
|
<error to="Kill"/>
|
|
</action>
|
|
|
|
<action name="rels_from_pubs">
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
<master>yarn</master>
|
|
<mode>cluster</mode>
|
|
<name>Extract Relations from publication </name>
|
|
<class>eu.dnetlib.dhp.oa.graph.dump.complete.SparkExtractRelationFromEntities</class>
|
|
<jar>dump-${projectVersion}.jar</jar>
|
|
<spark-opts>
|
|
--executor-memory=${sparkExecutorMemory}
|
|
--executor-cores=${sparkExecutorCores}
|
|
--driver-memory=${sparkDriverMemory}
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
|
</spark-opts>
|
|
<arg>--sourcePath</arg><arg>${outputPath}/original/publication</arg>
|
|
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Publication</arg>
|
|
<arg>--outputPath</arg><arg>${workingDir}/relation</arg>
|
|
<arg>--communityMapPath</arg><arg>${communityMapPath}</arg>
|
|
</spark>
|
|
<ok to="rels_from_dats"/>
|
|
<error to="Kill"/>
|
|
</action>
|
|
|
|
<action name="rels_from_dats">
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
<master>yarn</master>
|
|
<mode>cluster</mode>
|
|
<name>Dump table dataset </name>
|
|
<class>eu.dnetlib.dhp.oa.graph.dump.complete.SparkExtractRelationFromEntities</class>
|
|
<jar>dump-${projectVersion}.jar</jar>
|
|
<spark-opts>
|
|
--executor-memory=${sparkExecutorMemory}
|
|
--executor-cores=${sparkExecutorCores}
|
|
--driver-memory=${sparkDriverMemory}
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
|
</spark-opts>
|
|
<arg>--sourcePath</arg><arg>${outputPath}/original/dataset</arg>
|
|
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Dataset</arg>
|
|
<arg>--outputPath</arg><arg>${workingDir}/relation</arg>
|
|
<arg>--communityMapPath</arg><arg>${communityMapPath}</arg>
|
|
</spark>
|
|
<ok to="rels_from_orp"/>
|
|
<error to="Kill"/>
|
|
</action>
|
|
|
|
<action name="rels_from_orp">
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
<master>yarn</master>
|
|
<mode>cluster</mode>
|
|
<name>Dump table ORP </name>
|
|
<class>eu.dnetlib.dhp.oa.graph.dump.complete.SparkExtractRelationFromEntities</class>
|
|
<jar>dump-${projectVersion}.jar</jar>
|
|
<spark-opts>
|
|
--executor-memory=${sparkExecutorMemory}
|
|
--executor-cores=${sparkExecutorCores}
|
|
--driver-memory=${sparkDriverMemory}
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
|
</spark-opts>
|
|
<arg>--sourcePath</arg><arg>${outputPath}/original/otherresearchproduct</arg>
|
|
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.OtherResearchProduct</arg>
|
|
<arg>--outputPath</arg><arg>${workingDir}/relation</arg>
|
|
<arg>--communityMapPath</arg><arg>${communityMapPath}</arg>
|
|
</spark>
|
|
<ok to="rels_from_sw"/>
|
|
<error to="Kill"/>
|
|
</action>
|
|
|
|
<action name="rels_from_sw">
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
<master>yarn</master>
|
|
<mode>cluster</mode>
|
|
<name>Dump table software </name>
|
|
<class>eu.dnetlib.dhp.oa.graph.dump.complete.SparkExtractRelationFromEntities</class>
|
|
<jar>dump-${projectVersion}.jar</jar>
|
|
<spark-opts>
|
|
--executor-memory=${sparkExecutorMemory}
|
|
--executor-cores=${sparkExecutorCores}
|
|
--driver-memory=${sparkDriverMemory}
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
|
</spark-opts>
|
|
<arg>--sourcePath</arg><arg>${outputPath}/original/software</arg>
|
|
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Software</arg>
|
|
<arg>--outputPath</arg><arg>${workingDir}/relation</arg>
|
|
<arg>--communityMapPath</arg><arg>${communityMapPath}</arg>
|
|
</spark>
|
|
<ok to="filter_relation"/>
|
|
<error to="Kill"/>
|
|
</action>
|
|
|
|
<action name="filter_relation">
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
<master>yarn</master>
|
|
<mode>cluster</mode>
|
|
<name>Select valid relations</name>
|
|
<class>eu.dnetlib.dhp.oa.graph.dump.subset.SparkSelectValidRelation</class>
|
|
<jar>dump-${projectVersion}.jar</jar>
|
|
<spark-opts>
|
|
--executor-memory=${sparkExecutorMemory}
|
|
--executor-cores=${sparkExecutorCores}
|
|
--driver-memory=${sparkDriverMemory}
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
|
</spark-opts>
|
|
<arg>--sourcePath</arg><arg>${outputPath}/dump</arg>
|
|
<arg>--relationPath</arg><arg>${workingDir}/relation</arg> <!-- new relations from context -->
|
|
</spark>
|
|
<ok to="End"/>
|
|
<error to="Kill"/>
|
|
</action>
|
|
<kill name="Kill">
|
|
<message>Sub-workflow dump complete failed with error message ${wf:errorMessage()}
|
|
</message>
|
|
</kill>
|
|
|
|
<end name="End" />
|
|
</workflow-app> |