forked from D-Net/dnet-hadoop
497 lines
24 KiB
XML
497 lines
24 KiB
XML
<workflow-app name="import_graph_as_hive_DB" xmlns="uri:oozie:workflow:0.5">
|
|
|
|
<parameters>
|
|
<property>
|
|
<name>sourcePath</name>
|
|
<description>the source path</description>
|
|
</property>
|
|
<property>
|
|
<name>isLookUpUrl</name>
|
|
<description>the isLookup service endpoint</description>
|
|
</property>
|
|
<property>
|
|
<name>outputPath</name>
|
|
<description>the output path</description>
|
|
</property>
|
|
<property>
|
|
<name>hiveDbName</name>
|
|
<description>the target hive database name</description>
|
|
</property>
|
|
<property>
|
|
<name>hiveJdbcUrl</name>
|
|
<description>hive server jdbc url</description>
|
|
</property>
|
|
<property>
|
|
<name>hiveMetastoreUris</name>
|
|
<description>hive server metastore URIs</description>
|
|
</property>
|
|
<property>
|
|
<name>sparkDriverMemory</name>
|
|
<description>memory for driver process</description>
|
|
</property>
|
|
<property>
|
|
<name>sparkExecutorMemory</name>
|
|
<description>memory for individual executor</description>
|
|
</property>
|
|
<property>
|
|
<name>sparkExecutorCores</name>
|
|
<description>number of cores used by single executor</description>
|
|
</property>
|
|
<property>
|
|
<name>oozieActionShareLibForSpark2</name>
|
|
<description>oozie action sharelib for spark 2.*</description>
|
|
</property>
|
|
<property>
|
|
<name>spark2ExtraListeners</name>
|
|
<value>com.cloudera.spark.lineage.NavigatorAppListener</value>
|
|
<description>spark 2.* extra listeners classname</description>
|
|
</property>
|
|
<property>
|
|
<name>spark2SqlQueryExecutionListeners</name>
|
|
<value>com.cloudera.spark.lineage.NavigatorQueryListener</value>
|
|
<description>spark 2.* sql query execution listeners classname</description>
|
|
</property>
|
|
<property>
|
|
<name>spark2YarnHistoryServerAddress</name>
|
|
<description>spark 2.* yarn history server address</description>
|
|
</property>
|
|
<property>
|
|
<name>spark2EventLogDir</name>
|
|
<description>spark 2.* event log dir location</description>
|
|
</property>
|
|
</parameters>
|
|
|
|
<global>
|
|
<job-tracker>${jobTracker}</job-tracker>
|
|
<name-node>${nameNode}</name-node>
|
|
<configuration>
|
|
<property>
|
|
<name>mapreduce.job.queuename</name>
|
|
<value>${queueName}</value>
|
|
</property>
|
|
<property>
|
|
<name>oozie.launcher.mapred.job.queue.name</name>
|
|
<value>${oozieLauncherQueueName}</value>
|
|
</property>
|
|
<property>
|
|
<name>oozie.action.sharelib.for.spark</name>
|
|
<value>${oozieActionShareLibForSpark2}</value>
|
|
</property>
|
|
</configuration>
|
|
</global>
|
|
|
|
<start to="reset_outputpath"/>
|
|
|
|
<kill name="Kill">
|
|
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
|
|
</kill>
|
|
|
|
<action name="reset_outputpath">
|
|
<fs>
|
|
<delete path="${outputPath}"/>
|
|
<mkdir path="${outputPath}"/>
|
|
</fs>
|
|
<ok to="fork_dump"/>
|
|
<error to="Kill"/>
|
|
</action>
|
|
|
|
|
|
<fork name="fork_dump">
|
|
<path start="dump_publication"/>
|
|
<path start="dump_dataset"/>
|
|
<path start="dump_orp"/>
|
|
<path start="dump_software"/>
|
|
</fork>
|
|
|
|
<action name="dump_publication">
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
<master>yarn</master>
|
|
<mode>cluster</mode>
|
|
<name>Dump table publication for community related products</name>
|
|
<class>eu.dnetlib.dhp.oa.graph.dump.SparkDumpCommunityProducts</class>
|
|
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
|
<spark-opts>
|
|
--executor-memory=${sparkExecutorMemory}
|
|
--executor-cores=${sparkExecutorCores}
|
|
--driver-memory=${sparkDriverMemory}
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
|
</spark-opts>
|
|
<arg>--sourcePath</arg><arg>${inputPath}/publication</arg>
|
|
<arg>--inputType</arg><arg>publication</arg>
|
|
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Publication</arg>
|
|
<arg>--dumpTableName</arg><arg>eu.dnetlib.dhp.schema.dump.oaf.Publication</arg>
|
|
<arg>--outputPath</arg><arg>${workingDir}/publication</arg>
|
|
<arg>--isLookUpUrl</arg><arg>${isLoohUpUrl}</arg>
|
|
</spark>
|
|
<ok to="join_dump"/>
|
|
<error to="Kill"/>
|
|
</action>
|
|
|
|
<action name="dump_dataset">
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
<master>yarn</master>
|
|
<mode>cluster</mode>
|
|
<name>Dump table dataset for community related products</name>
|
|
<class>eu.dnetlib.dhp.oa.graph.dump.SparkDumpCommunityProducts</class>
|
|
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
|
<spark-opts>
|
|
--executor-memory=${sparkExecutorMemory}
|
|
--executor-cores=${sparkExecutorCores}
|
|
--driver-memory=${sparkDriverMemory}
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
|
</spark-opts>
|
|
<arg>--sourcePath</arg><arg>${inputPath}/dataset</arg>
|
|
<arg>--inputType</arg><arg>dataset</arg>
|
|
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Dataset</arg>
|
|
<arg>--dumpTableName</arg><arg>eu.dnetlib.dhp.schema.dump.oaf.Dataset</arg>
|
|
<arg>--outputPath</arg><arg>${workingDir}/dataset</arg>
|
|
<arg>--isLookUpUrl</arg><arg>${isLoohUpUrl}</arg>
|
|
</spark>
|
|
<ok to="join_dump"/>
|
|
<error to="Kill"/>
|
|
</action>
|
|
|
|
<action name="dump_orp">
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
<master>yarn</master>
|
|
<mode>cluster</mode>
|
|
<name>Dump table ORP for community related products</name>
|
|
<class>eu.dnetlib.dhp.oa.graph.dump.SparkDumpCommunityProducts</class>
|
|
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
|
<spark-opts>
|
|
--executor-memory=${sparkExecutorMemory}
|
|
--executor-cores=${sparkExecutorCores}
|
|
--driver-memory=${sparkDriverMemory}
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
|
</spark-opts>
|
|
<arg>--sourcePath</arg><arg>${inputPath}/otherresearchproduct</arg>
|
|
<arg>--inputType</arg><arg>otherresearchproduct</arg>
|
|
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.OtherResearchProduct</arg>
|
|
<arg>--dumpTableName</arg><arg>eu.dnetlib.dhp.schema.dump.oaf.OtherResearchProduct</arg>
|
|
<arg>--outputPath</arg><arg>${workingDir}/otherresearchproduct</arg>
|
|
<arg>--isLookUpUrl</arg><arg>${isLoohUpUrl}</arg>
|
|
</spark>
|
|
<ok to="join_dump"/>
|
|
<error to="Kill"/>
|
|
</action>
|
|
|
|
<action name="dump_software">
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
<master>yarn</master>
|
|
<mode>cluster</mode>
|
|
<name>Dump table software for community related products</name>
|
|
<class>eu.dnetlib.dhp.oa.graph.dump.SparkDumpCommunityProducts</class>
|
|
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
|
<spark-opts>
|
|
--executor-memory=${sparkExecutorMemory}
|
|
--executor-cores=${sparkExecutorCores}
|
|
--driver-memory=${sparkDriverMemory}
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
|
</spark-opts>
|
|
<arg>--sourcePath</arg><arg>${inputPath}/software</arg>
|
|
<arg>--inputType</arg><arg>software</arg>
|
|
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Software</arg>
|
|
<arg>--dumpTableName</arg><arg>eu.dnetlib.dhp.schema.dump.oaf.Software</arg>
|
|
<arg>--outputPath</arg><arg>${workingDir}/software</arg>
|
|
<arg>--isLookUpUrl</arg><arg>${isLoohUpUrl}</arg>
|
|
</spark>
|
|
<ok to="join_dump"/>
|
|
<error to="Kill"/>
|
|
</action>
|
|
|
|
<join name="join_dump" to="prepareResultProject"/>
|
|
|
|
<action name="prepareResultProject">
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
<master>yarn</master>
|
|
<mode>cluster</mode>
|
|
<name>Prepare association result subset of project info</name>
|
|
<class>eu.dnetlib.dhp.oa.graph.dump.SparkPrepareResultProject</class>
|
|
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
|
<spark-opts>
|
|
--executor-memory=${sparkExecutorMemory}
|
|
--executor-cores=${sparkExecutorCores}
|
|
--driver-memory=${sparkDriverMemory}
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
|
</spark-opts>
|
|
<arg>--sourcePath</arg><arg>${inputPath}</arg>
|
|
<arg>--outputPath</arg><arg>${workingDir}/preparedInfo</arg>
|
|
</spark>
|
|
<ok to="fork_extendWithProject"/>
|
|
<error to="Kill"/>
|
|
</action>
|
|
|
|
<fork name="fork_extendWithProject">
|
|
<path start="extend_publication"/>
|
|
<path start="extend_dataset"/>
|
|
<path start="extend_orp"/>
|
|
<path start="extend_software"/>
|
|
</fork>
|
|
|
|
<action name="extend_publication">
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
<master>yarn</master>
|
|
<mode>cluster</mode>
|
|
<name>Extend dumped publications with information about projects</name>
|
|
<class>eu.dnetlib.dhp.oa.graph.dump.SparkUpdateProjectInfo</class>
|
|
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
|
<spark-opts>
|
|
--executor-memory=${sparkExecutorMemory}
|
|
--executor-cores=${sparkExecutorCores}
|
|
--driver-memory=${sparkDriverMemory}
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
|
</spark-opts>
|
|
<arg>--sourcePath</arg><arg>${workingDir}/publication</arg>
|
|
<arg>--outputPath</arg><arg>${workingDir}/ext/publication</arg>
|
|
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.dump.oaf.Publication</arg>
|
|
<arg>--preparedInfoPath</arg><arg>${workingDir}/preparedInfo</arg>
|
|
</spark>
|
|
<ok to="join_extend"/>
|
|
<error to="Kill"/>
|
|
</action>
|
|
|
|
<action name="extend_dataset">
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
<master>yarn</master>
|
|
<mode>cluster</mode>
|
|
<name>Extend dumped dataset with information about projects</name>
|
|
<class>eu.dnetlib.dhp.oa.graph.dump.SparkUpdateProjectInfo</class>
|
|
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
|
<spark-opts>
|
|
--executor-memory=${sparkExecutorMemory}
|
|
--executor-cores=${sparkExecutorCores}
|
|
--driver-memory=${sparkDriverMemory}
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
|
</spark-opts>
|
|
<arg>--sourcePath</arg><arg>${workingDir}/dataset</arg>
|
|
<arg>--outputPath</arg><arg>${workingDir}/ext/dataset</arg>
|
|
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.dump.oaf.Dataset</arg>
|
|
<arg>--preparedInfoPath</arg><arg>${workingDir}/preparedInfo</arg>
|
|
</spark>
|
|
<ok to="join_extend"/>
|
|
<error to="Kill"/>
|
|
</action>
|
|
<action name="extend_orp">
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
<master>yarn</master>
|
|
<mode>cluster</mode>
|
|
<name>Extend dumped ORP with information about projects</name>
|
|
<class>eu.dnetlib.dhp.oa.graph.dump.SparkUpdateProjectInfo</class>
|
|
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
|
<spark-opts>
|
|
--executor-memory=${sparkExecutorMemory}
|
|
--executor-cores=${sparkExecutorCores}
|
|
--driver-memory=${sparkDriverMemory}
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
|
</spark-opts>
|
|
<arg>--sourcePath</arg><arg>${workingDir}/otherresearchproduct</arg>
|
|
<arg>--outputPath</arg><arg>${workingDir}/ext/otherresearchproduct</arg>
|
|
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.dump.oaf.OtherResearchProduct</arg>
|
|
<arg>--preparedInfoPath</arg><arg>${workingDir}/preparedInfo</arg>
|
|
</spark>
|
|
<ok to="join_extend"/>
|
|
<error to="Kill"/>
|
|
</action>
|
|
<action name="extend_software">
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
<master>yarn</master>
|
|
<mode>cluster</mode>
|
|
<name>Extend dumped software with information about projects</name>
|
|
<class>eu.dnetlib.dhp.oa.graph.dump.SparkUpdateProjectInfo</class>
|
|
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
|
<spark-opts>
|
|
--executor-memory=${sparkExecutorMemory}
|
|
--executor-cores=${sparkExecutorCores}
|
|
--driver-memory=${sparkDriverMemory}
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
|
</spark-opts>
|
|
<arg>--sourcePath</arg><arg>${workingDir}/software</arg>
|
|
<arg>--outputPath</arg><arg>${workingDir}/ext/software</arg>
|
|
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.dump.oaf.Software</arg>
|
|
<arg>--preparedInfoPath</arg><arg>${workingDir}/preparedInfo</arg>
|
|
</spark>
|
|
<ok to="join_extend"/>
|
|
<error to="Kill"/>
|
|
</action>
|
|
|
|
|
|
<join name="join_extend" to="fork_splitForCommunities"/>
|
|
|
|
|
|
<fork name="fork_splitForCommunities">
|
|
<path start="split_publication"/>
|
|
<path start="split_dataset"/>
|
|
<path start="split_orp"/>
|
|
<path start="split_software"/>
|
|
</fork>
|
|
|
|
<action name="split_publication">
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
<master>yarn</master>
|
|
<mode>cluster</mode>
|
|
<name>Split dumped result for community</name>
|
|
<class>eu.dnetlib.dhp.oa.graph.dump.SparkSplitForCommunity</class>
|
|
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
|
<spark-opts>
|
|
--executor-memory=${sparkExecutorMemory}
|
|
--executor-cores=${sparkExecutorCores}
|
|
--driver-memory=${sparkDriverMemory}
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
|
</spark-opts>
|
|
<arg>--sourcePath</arg><arg>${workingDir}/ext/publication</arg>
|
|
<arg>--outputPath</arg><arg>${outputPath}</arg>
|
|
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.dump.oaf.Software</arg>
|
|
<arg>--isLookUpUrl</arg><arg>${isLoohUpUrl}</arg>
|
|
</spark>
|
|
<ok to="join_split"/>
|
|
<error to="Kill"/>
|
|
</action>
|
|
|
|
<action name="split_dataset">
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
<master>yarn</master>
|
|
<mode>cluster</mode>
|
|
<name>Split dumped result for community</name>
|
|
<class>eu.dnetlib.dhp.oa.graph.dump.SparkSplitForCommunity</class>
|
|
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
|
<spark-opts>
|
|
--executor-memory=${sparkExecutorMemory}
|
|
--executor-cores=${sparkExecutorCores}
|
|
--driver-memory=${sparkDriverMemory}
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
|
</spark-opts>
|
|
<arg>--sourcePath</arg><arg>${workingDir}/ext/dataset</arg>
|
|
<arg>--outputPath</arg><arg>${outputPath}</arg>
|
|
<arg>--className</arg><arg>eu.dnetlib.dhp.schema.dump.oaf.Dataset</arg>
|
|
<arg>--isLookUpUrl</arg><arg>${isLoohUpUrl}</arg>
|
|
</spark>
|
|
<ok to="join_split"/>
|
|
<error to="Kill"/>
|
|
</action>
|
|
<action name="split_orp">
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
<master>yarn</master>
|
|
<mode>cluster</mode>
|
|
<name>Split dumped result for community</name>
|
|
<class>eu.dnetlib.dhp.oa.graph.dump.SparkSplitForCommunity</class>
|
|
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
|
<spark-opts>
|
|
--executor-memory=${sparkExecutorMemory}
|
|
--executor-cores=${sparkExecutorCores}
|
|
--driver-memory=${sparkDriverMemory}
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
|
</spark-opts>
|
|
<arg>--sourcePath</arg><arg>${workingDir}/ext/orp</arg>
|
|
<arg>--outputPath</arg><arg>${outputPath}</arg>
|
|
<arg>--className</arg><arg>eu.dnetlib.dhp.schema.dump.oaf.OtherResearchProduct</arg>
|
|
<arg>--isLookUpUrl</arg><arg>${isLoohUpUrl}</arg>
|
|
</spark>
|
|
<ok to="join_split"/>
|
|
<error to="Kill"/>
|
|
</action>
|
|
<action name="split_software">
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
<master>yarn</master>
|
|
<mode>cluster</mode>
|
|
<name>Split dumped result for community</name>
|
|
<class>eu.dnetlib.dhp.oa.graph.dump.SparkSplitForCommunity</class>
|
|
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
|
<spark-opts>
|
|
--executor-memory=${sparkExecutorMemory}
|
|
--executor-cores=${sparkExecutorCores}
|
|
--driver-memory=${sparkDriverMemory}
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
|
</spark-opts>
|
|
<arg>--sourcePath</arg><arg>${workingDir}/ext/software</arg>
|
|
<arg>--outputPath</arg><arg>${outputPath}</arg>
|
|
<arg>--className</arg><arg>eu.dnetlib.dhp.schema.dump.oaf.Software</arg>
|
|
<arg>--isLookUpUrl</arg><arg>${isLoohUpUrl}</arg>
|
|
</spark>
|
|
<ok to="join_split"/>
|
|
<error to="Kill"/>
|
|
</action>
|
|
|
|
<!-- <join name="join_split" to="loadInZenodo"/>-->
|
|
<join name="join_split" to="End"/>
|
|
|
|
<!-- <action name="loadInZenodo">-->
|
|
<!-- <spark xmlns="uri:oozie:spark-action:0.2">-->
|
|
<!-- <master>yarn</master>-->
|
|
<!-- <mode>cluster</mode>-->
|
|
<!-- <name>Import table software</name>-->
|
|
<!-- <class>eu.dnetlib.dhp.oa.graph.hive.GraphHiveTableImporterJob</class>-->
|
|
<!-- <jar>dhp-graph-mapper-${projectVersion}.jar</jar>-->
|
|
<!-- <spark-opts>-->
|
|
<!-- --executor-memory=${sparkExecutorMemory}-->
|
|
<!-- --executor-cores=${sparkExecutorCores}-->
|
|
<!-- --driver-memory=${sparkDriverMemory}-->
|
|
<!-- --conf spark.extraListeners=${spark2ExtraListeners}-->
|
|
<!-- --conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}-->
|
|
<!-- --conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}-->
|
|
<!-- --conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}-->
|
|
<!-- --conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}-->
|
|
<!-- </spark-opts>-->
|
|
<!-- <arg>--inputPath</arg><arg>${workingDir}/ext/publication</arg>-->
|
|
<!-- <arg>--hiveDbName</arg><arg>${hiveDbName}</arg>-->
|
|
<!-- <arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.dump.oaf.Publication</arg>-->
|
|
<!-- -->
|
|
<!-- </spark>-->
|
|
<!-- <ok to="End"/>-->
|
|
<!-- <error to="Kill"/>-->
|
|
<!-- </action>-->
|
|
|
|
<end name="End"/>
|
|
|
|
</workflow-app> |