2020-04-10 17:53:07 +02:00
|
|
|
<workflow-app name="create RAW Graph (all steps)" xmlns="uri:oozie:workflow:0.5">
|
|
|
|
|
|
|
|
<parameters>
|
|
|
|
<property>
|
2020-04-14 15:54:41 +02:00
|
|
|
<name>graphOutputPath</name>
|
2020-04-10 17:53:07 +02:00
|
|
|
<description>the target path to store raw graph</description>
|
|
|
|
</property>
|
|
|
|
<property>
|
|
|
|
<name>reuseContent</name>
|
|
|
|
<value>false</value>
|
|
|
|
<description>should import content from the aggregator or reuse a previous version</description>
|
|
|
|
</property>
|
2020-04-15 16:16:20 +02:00
|
|
|
<property>
|
|
|
|
<name>contentPath</name>
|
|
|
|
<description>path location to store (or reuse) content from the aggregator</description>
|
|
|
|
</property>
|
2020-04-10 17:53:07 +02:00
|
|
|
<property>
|
|
|
|
<name>postgresURL</name>
|
|
|
|
<description>the postgres URL to access to the database</description>
|
|
|
|
</property>
|
|
|
|
<property>
|
|
|
|
<name>postgresUser</name>
|
|
|
|
<description>the user postgres</description>
|
|
|
|
</property>
|
|
|
|
<property>
|
|
|
|
<name>postgresPassword</name>
|
|
|
|
<description>the password postgres</description>
|
|
|
|
</property>
|
|
|
|
<property>
|
|
|
|
<name>mongoURL</name>
|
|
|
|
<description>mongoDB url, example: mongodb://[username:password@]host[:port]</description>
|
|
|
|
</property>
|
|
|
|
<property>
|
|
|
|
<name>mongoDb</name>
|
|
|
|
<description>mongo database</description>
|
|
|
|
</property>
|
2020-05-27 11:34:13 +02:00
|
|
|
<property>
|
|
|
|
<name>isLookupUrl</name>
|
|
|
|
<description>the address of the lookUp service</description>
|
|
|
|
</property>
|
2020-04-10 17:53:07 +02:00
|
|
|
|
|
|
|
<property>
|
|
|
|
<name>sparkDriverMemory</name>
|
|
|
|
<description>memory for driver process</description>
|
|
|
|
</property>
|
|
|
|
<property>
|
|
|
|
<name>sparkExecutorMemory</name>
|
|
|
|
<description>memory for individual executor</description>
|
|
|
|
</property>
|
|
|
|
<property>
|
|
|
|
<name>sparkExecutorCores</name>
|
|
|
|
<description>number of cores used by single executor</description>
|
|
|
|
</property>
|
|
|
|
<property>
|
|
|
|
<name>oozieActionShareLibForSpark2</name>
|
|
|
|
<description>oozie action sharelib for spark 2.*</description>
|
|
|
|
</property>
|
|
|
|
<property>
|
|
|
|
<name>spark2ExtraListeners</name>
|
|
|
|
<value>com.cloudera.spark.lineage.NavigatorAppListener</value>
|
|
|
|
<description>spark 2.* extra listeners classname</description>
|
|
|
|
</property>
|
|
|
|
<property>
|
|
|
|
<name>spark2SqlQueryExecutionListeners</name>
|
|
|
|
<value>com.cloudera.spark.lineage.NavigatorQueryListener</value>
|
|
|
|
<description>spark 2.* sql query execution listeners classname</description>
|
|
|
|
</property>
|
|
|
|
<property>
|
|
|
|
<name>spark2YarnHistoryServerAddress</name>
|
|
|
|
<description>spark 2.* yarn history server address</description>
|
|
|
|
</property>
|
|
|
|
<property>
|
|
|
|
<name>spark2EventLogDir</name>
|
|
|
|
<description>spark 2.* event log dir location</description>
|
|
|
|
</property>
|
|
|
|
</parameters>
|
|
|
|
|
|
|
|
<global>
|
|
|
|
<job-tracker>${jobTracker}</job-tracker>
|
|
|
|
<name-node>${nameNode}</name-node>
|
|
|
|
<configuration>
|
|
|
|
<property>
|
|
|
|
<name>mapreduce.job.queuename</name>
|
|
|
|
<value>${queueName}</value>
|
|
|
|
</property>
|
|
|
|
<property>
|
|
|
|
<name>oozie.launcher.mapred.job.queue.name</name>
|
|
|
|
<value>${oozieLauncherQueueName}</value>
|
|
|
|
</property>
|
|
|
|
<property>
|
|
|
|
<name>oozie.action.sharelib.for.spark</name>
|
|
|
|
<value>${oozieActionShareLibForSpark2}</value>
|
|
|
|
</property>
|
|
|
|
</configuration>
|
|
|
|
</global>
|
|
|
|
|
|
|
|
<start to="reuse_aggregator_content"/>
|
|
|
|
|
|
|
|
<kill name="Kill">
|
|
|
|
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
|
|
|
|
</kill>
|
|
|
|
|
|
|
|
<decision name="reuse_aggregator_content">
|
|
|
|
<switch>
|
|
|
|
<case to="start_import">${wf:conf('reuseContent') eq false}</case>
|
|
|
|
<case to="fork_generate_entities">${wf:conf('reuseContent') eq true}</case>
|
|
|
|
<default to="start_import"/>
|
|
|
|
</switch>
|
|
|
|
</decision>
|
|
|
|
|
|
|
|
<fork name="start_import">
|
|
|
|
<path start="ImportDB"/>
|
|
|
|
<path start="ImportDB_claims"/>
|
|
|
|
</fork>
|
|
|
|
|
|
|
|
<action name="ImportDB_claims">
|
|
|
|
<java>
|
|
|
|
<prepare>
|
2020-04-15 16:16:20 +02:00
|
|
|
<delete path="${contentPath}/db_claims"/>
|
2020-04-10 17:53:07 +02:00
|
|
|
</prepare>
|
|
|
|
<main-class>eu.dnetlib.dhp.oa.graph.raw.MigrateDbEntitiesApplication</main-class>
|
2020-05-04 19:19:29 +02:00
|
|
|
<arg>--hdfsPath</arg><arg>${contentPath}/db_claims</arg>
|
|
|
|
<arg>--postgresUrl</arg><arg>${postgresURL}</arg>
|
|
|
|
<arg>--postgresUser</arg><arg>${postgresUser}</arg>
|
|
|
|
<arg>--postgresPassword</arg><arg>${postgresPassword}</arg>
|
2020-05-29 18:17:30 +02:00
|
|
|
<arg>--isLookupUrl</arg><arg>${isLookupUrl}</arg>
|
2020-05-04 19:19:29 +02:00
|
|
|
<arg>--action</arg><arg>claims</arg>
|
2020-04-10 17:53:07 +02:00
|
|
|
</java>
|
|
|
|
<ok to="ImportODF_claims"/>
|
|
|
|
<error to="Kill"/>
|
|
|
|
</action>
|
|
|
|
|
|
|
|
<action name="ImportODF_claims">
|
|
|
|
<java>
|
|
|
|
<prepare>
|
2020-04-15 16:16:20 +02:00
|
|
|
<delete path="${contentPath}/odf_claims"/>
|
2020-04-10 17:53:07 +02:00
|
|
|
</prepare>
|
|
|
|
<main-class>eu.dnetlib.dhp.oa.graph.raw.MigrateMongoMdstoresApplication</main-class>
|
2020-04-15 16:16:20 +02:00
|
|
|
<arg>-p</arg><arg>${contentPath}/odf_claims</arg>
|
2020-04-10 17:53:07 +02:00
|
|
|
<arg>-mongourl</arg><arg>${mongoURL}</arg>
|
|
|
|
<arg>-mongodb</arg><arg>${mongoDb}</arg>
|
|
|
|
<arg>-f</arg><arg>ODF</arg>
|
|
|
|
<arg>-l</arg><arg>store</arg>
|
|
|
|
<arg>-i</arg><arg>claim</arg>
|
|
|
|
</java>
|
|
|
|
<ok to="ImportOAF_claims"/>
|
|
|
|
<error to="Kill"/>
|
|
|
|
</action>
|
|
|
|
|
|
|
|
<action name="ImportOAF_claims">
|
|
|
|
<java>
|
|
|
|
<prepare>
|
2020-04-15 16:16:20 +02:00
|
|
|
<delete path="${contentPath}/oaf_claims"/>
|
2020-04-10 17:53:07 +02:00
|
|
|
</prepare>
|
|
|
|
<main-class>eu.dnetlib.dhp.oa.graph.raw.MigrateMongoMdstoresApplication</main-class>
|
2020-04-15 16:16:20 +02:00
|
|
|
<arg>-p</arg><arg>${contentPath}/oaf_claims</arg>
|
2020-04-10 17:53:07 +02:00
|
|
|
<arg>-mongourl</arg><arg>${mongoURL}</arg>
|
|
|
|
<arg>-mongodb</arg><arg>${mongoDb}</arg>
|
|
|
|
<arg>-f</arg><arg>OAF</arg>
|
|
|
|
<arg>-l</arg><arg>store</arg>
|
|
|
|
<arg>-i</arg><arg>claim</arg>
|
|
|
|
</java>
|
|
|
|
<ok to="wait_import"/>
|
|
|
|
<error to="Kill"/>
|
|
|
|
</action>
|
|
|
|
|
|
|
|
<action name="ImportDB">
|
|
|
|
<java>
|
|
|
|
<prepare>
|
2020-04-15 16:16:20 +02:00
|
|
|
<delete path="${contentPath}/db_records"/>
|
2020-04-10 17:53:07 +02:00
|
|
|
</prepare>
|
|
|
|
<main-class>eu.dnetlib.dhp.oa.graph.raw.MigrateDbEntitiesApplication</main-class>
|
2020-05-04 19:19:29 +02:00
|
|
|
<arg>--hdfsPath</arg><arg>${contentPath}/db_records</arg>
|
|
|
|
<arg>--postgresUrl</arg><arg>${postgresURL}</arg>
|
|
|
|
<arg>--postgresUser</arg><arg>${postgresUser}</arg>
|
|
|
|
<arg>--postgresPassword</arg><arg>${postgresPassword}</arg>
|
2020-05-29 18:17:30 +02:00
|
|
|
<arg>--isLookupUrl</arg><arg>${isLookupUrl}</arg>
|
2020-04-10 17:53:07 +02:00
|
|
|
</java>
|
|
|
|
<ok to="ImportODF"/>
|
|
|
|
<error to="Kill"/>
|
|
|
|
</action>
|
|
|
|
|
|
|
|
<action name="ImportODF">
|
|
|
|
<java>
|
|
|
|
<prepare>
|
2020-04-15 16:16:20 +02:00
|
|
|
<delete path="${contentPath}/odf_records"/>
|
2020-04-10 17:53:07 +02:00
|
|
|
</prepare>
|
|
|
|
<main-class>eu.dnetlib.dhp.oa.graph.raw.MigrateMongoMdstoresApplication</main-class>
|
2020-05-04 19:19:29 +02:00
|
|
|
<arg>--hdfsPath</arg><arg>${contentPath}/odf_records</arg>
|
|
|
|
<arg>--mongoBaseUrl</arg><arg>${mongoURL}</arg>
|
|
|
|
<arg>--mongoDb</arg><arg>${mongoDb}</arg>
|
|
|
|
<arg>--mdFormat</arg><arg>ODF</arg>
|
|
|
|
<arg>--mdLayout</arg><arg>store</arg>
|
|
|
|
<arg>--mdInterpretation</arg><arg>cleaned</arg>
|
2020-04-10 17:53:07 +02:00
|
|
|
</java>
|
|
|
|
<ok to="ImportOAF"/>
|
|
|
|
<error to="Kill"/>
|
|
|
|
</action>
|
|
|
|
|
|
|
|
<action name="ImportOAF">
|
|
|
|
<java>
|
|
|
|
<prepare>
|
2020-04-15 16:16:20 +02:00
|
|
|
<delete path="${contentPath}/oaf_records"/>
|
2020-04-10 17:53:07 +02:00
|
|
|
</prepare>
|
|
|
|
<main-class>eu.dnetlib.dhp.oa.graph.raw.MigrateMongoMdstoresApplication</main-class>
|
2020-05-04 19:19:29 +02:00
|
|
|
<arg>--hdfsPath</arg><arg>${contentPath}/oaf_records</arg>
|
|
|
|
<arg>--mongoBaseUrl</arg><arg>${mongoURL}</arg>
|
|
|
|
<arg>--mongoDb</arg><arg>${mongoDb}</arg>
|
|
|
|
<arg>--mdFormat</arg><arg>OAF</arg>
|
|
|
|
<arg>--mdLayout</arg><arg>store</arg>
|
|
|
|
<arg>--mdInterpretation</arg><arg>cleaned</arg>
|
2020-04-10 17:53:07 +02:00
|
|
|
</java>
|
|
|
|
<ok to="wait_import"/>
|
|
|
|
<error to="Kill"/>
|
|
|
|
</action>
|
|
|
|
|
|
|
|
<join name="wait_import" to="fork_generate_entities"/>
|
|
|
|
|
|
|
|
<fork name="fork_generate_entities">
|
|
|
|
<path start="GenerateEntities_claim"/>
|
|
|
|
<path start="GenerateEntities"/>
|
|
|
|
</fork>
|
|
|
|
|
|
|
|
<action name="GenerateEntities_claim">
|
|
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
|
|
<master>yarn</master>
|
|
|
|
<mode>cluster</mode>
|
|
|
|
<name>GenerateEntities_claim</name>
|
|
|
|
<class>eu.dnetlib.dhp.oa.graph.raw.GenerateEntitiesApplication</class>
|
|
|
|
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
|
|
|
<spark-opts>
|
|
|
|
--executor-memory ${sparkExecutorMemory}
|
|
|
|
--executor-cores ${sparkExecutorCores}
|
|
|
|
--driver-memory=${sparkDriverMemory}
|
|
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
|
|
</spark-opts>
|
2020-05-04 19:19:29 +02:00
|
|
|
<arg>--sourcePaths</arg><arg>${contentPath}/db_claims,${contentPath}/oaf_claims,${contentPath}/odf_claims</arg>
|
|
|
|
<arg>--targetPath</arg><arg>${workingDir}/entities_claim</arg>
|
2020-05-29 18:17:30 +02:00
|
|
|
<arg>--isLookupUrl</arg><arg>${isLookupUrl}</arg>
|
2020-04-10 17:53:07 +02:00
|
|
|
</spark>
|
|
|
|
<ok to="GenerateGraph_claims"/>
|
|
|
|
<error to="Kill"/>
|
|
|
|
</action>
|
|
|
|
|
|
|
|
<action name="GenerateGraph_claims">
|
|
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
|
|
<master>yarn</master>
|
|
|
|
<mode>cluster</mode>
|
|
|
|
<name>GenerateGraph_claims</name>
|
|
|
|
<class>eu.dnetlib.dhp.oa.graph.raw.DispatchEntitiesApplication</class>
|
|
|
|
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
|
|
|
<spark-opts>
|
|
|
|
--executor-memory ${sparkExecutorMemory}
|
|
|
|
--executor-cores ${sparkExecutorCores}
|
|
|
|
--driver-memory=${sparkDriverMemory}
|
|
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
|
|
</spark-opts>
|
2020-05-04 19:19:29 +02:00
|
|
|
<arg>--sourcePath</arg><arg>${workingDir}/entities_claim</arg>
|
|
|
|
<arg>--graphRawPath</arg><arg>${workingDir}/graph_claims</arg>
|
2020-04-10 17:53:07 +02:00
|
|
|
</spark>
|
|
|
|
<ok to="wait_graphs"/>
|
|
|
|
<error to="Kill"/>
|
|
|
|
</action>
|
|
|
|
|
|
|
|
<action name="GenerateEntities">
|
|
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
|
|
<master>yarn</master>
|
|
|
|
<mode>cluster</mode>
|
|
|
|
<name>GenerateEntities</name>
|
|
|
|
<class>eu.dnetlib.dhp.oa.graph.raw.GenerateEntitiesApplication</class>
|
|
|
|
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
|
|
|
<spark-opts>
|
|
|
|
--executor-memory ${sparkExecutorMemory}
|
|
|
|
--executor-cores ${sparkExecutorCores}
|
|
|
|
--driver-memory=${sparkDriverMemory}
|
|
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
|
|
</spark-opts>
|
2020-05-04 19:19:29 +02:00
|
|
|
<arg>--sourcePaths</arg><arg>${contentPath}/db_records,${contentPath}/oaf_records,${contentPath}/odf_records</arg>
|
|
|
|
<arg>--targetPath</arg><arg>${workingDir}/entities</arg>
|
2020-05-29 18:17:30 +02:00
|
|
|
<arg>--isLookupUrl</arg><arg>${isLookupUrl}</arg>
|
2020-04-10 17:53:07 +02:00
|
|
|
</spark>
|
|
|
|
<ok to="GenerateGraph"/>
|
|
|
|
<error to="Kill"/>
|
|
|
|
</action>
|
|
|
|
|
|
|
|
<action name="GenerateGraph">
|
|
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
|
|
<master>yarn</master>
|
|
|
|
<mode>cluster</mode>
|
|
|
|
<name>GenerateGraph</name>
|
|
|
|
<class>eu.dnetlib.dhp.oa.graph.raw.DispatchEntitiesApplication</class>
|
|
|
|
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
|
|
|
<spark-opts>
|
|
|
|
--executor-memory ${sparkExecutorMemory}
|
|
|
|
--executor-cores ${sparkExecutorCores}
|
|
|
|
--driver-memory=${sparkDriverMemory}
|
|
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
|
|
--conf spark.sql.shuffle.partitions=7680
|
|
|
|
</spark-opts>
|
2020-05-04 19:19:29 +02:00
|
|
|
<arg>--sourcePath</arg><arg>${workingDir}/entities</arg>
|
|
|
|
<arg>--graphRawPath</arg><arg>${workingDir}/graph_raw</arg>
|
2020-04-10 17:53:07 +02:00
|
|
|
</spark>
|
|
|
|
<ok to="wait_graphs"/>
|
|
|
|
<error to="Kill"/>
|
|
|
|
</action>
|
|
|
|
|
|
|
|
<join name="wait_graphs" to="fork_merge_claims"/>
|
|
|
|
|
|
|
|
<fork name="fork_merge_claims">
|
|
|
|
<path start="merge_claims_publication"/>
|
|
|
|
<path start="merge_claims_dataset"/>
|
|
|
|
<path start="merge_claims_software"/>
|
|
|
|
<path start="merge_claims_otherresearchproduct"/>
|
|
|
|
<path start="merge_claims_datasource"/>
|
|
|
|
<path start="merge_claims_organization"/>
|
|
|
|
<path start="merge_claims_project"/>
|
|
|
|
<path start="merge_claims_relation"/>
|
|
|
|
</fork>
|
|
|
|
|
|
|
|
|
|
|
|
<action name="merge_claims_publication">
|
|
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
|
|
<master>yarn</master>
|
|
|
|
<mode>cluster</mode>
|
|
|
|
<name>MergeClaims_publication</name>
|
|
|
|
<class>eu.dnetlib.dhp.oa.graph.raw.MergeClaimsApplication</class>
|
|
|
|
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
|
|
|
<spark-opts>
|
|
|
|
--executor-memory ${sparkExecutorMemory}
|
|
|
|
--executor-cores ${sparkExecutorCores}
|
|
|
|
--driver-memory=${sparkDriverMemory}
|
|
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
|
|
--conf spark.sql.shuffle.partitions=7680
|
|
|
|
</spark-opts>
|
|
|
|
<arg>--rawGraphPath</arg><arg>${workingDir}/graph_raw</arg>
|
|
|
|
<arg>--claimsGraphPath</arg><arg>${workingDir}/graph_claims</arg>
|
2020-04-14 15:54:41 +02:00
|
|
|
<arg>--outputRawGaphPath</arg><arg>${graphOutputPath}</arg>
|
2020-04-10 17:53:07 +02:00
|
|
|
<arg>--graphTableClassName</arg><arg>eu.dnetlib.dhp.schema.oaf.Publication</arg>
|
|
|
|
</spark>
|
|
|
|
<ok to="wait_merge"/>
|
|
|
|
<error to="Kill"/>
|
|
|
|
</action>
|
|
|
|
|
|
|
|
<action name="merge_claims_dataset">
|
|
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
|
|
<master>yarn</master>
|
|
|
|
<mode>cluster</mode>
|
|
|
|
<name>MergeClaims_dataset</name>
|
|
|
|
<class>eu.dnetlib.dhp.oa.graph.raw.MergeClaimsApplication</class>
|
|
|
|
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
|
|
|
<spark-opts>
|
|
|
|
--executor-memory ${sparkExecutorMemory}
|
|
|
|
--executor-cores ${sparkExecutorCores}
|
|
|
|
--driver-memory=${sparkDriverMemory}
|
|
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
|
|
--conf spark.sql.shuffle.partitions=7680
|
|
|
|
</spark-opts>
|
|
|
|
<arg>--rawGraphPath</arg><arg>${workingDir}/graph_raw</arg>
|
|
|
|
<arg>--claimsGraphPath</arg><arg>${workingDir}/graph_claims</arg>
|
2020-04-14 15:54:41 +02:00
|
|
|
<arg>--outputRawGaphPath</arg><arg>${graphOutputPath}</arg>
|
2020-04-10 17:53:07 +02:00
|
|
|
<arg>--graphTableClassName</arg><arg>eu.dnetlib.dhp.schema.oaf.Dataset</arg>
|
|
|
|
</spark>
|
|
|
|
<ok to="wait_merge"/>
|
|
|
|
<error to="Kill"/>
|
|
|
|
</action>
|
|
|
|
|
|
|
|
<action name="merge_claims_relation">
|
|
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
|
|
<master>yarn</master>
|
|
|
|
<mode>cluster</mode>
|
|
|
|
<name>MergeClaims_relation</name>
|
|
|
|
<class>eu.dnetlib.dhp.oa.graph.raw.MergeClaimsApplication</class>
|
|
|
|
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
|
|
|
<spark-opts>
|
|
|
|
--executor-memory ${sparkExecutorMemory}
|
|
|
|
--executor-cores ${sparkExecutorCores}
|
|
|
|
--driver-memory=${sparkDriverMemory}
|
|
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
|
|
--conf spark.sql.shuffle.partitions=3840
|
|
|
|
</spark-opts>
|
|
|
|
<arg>--rawGraphPath</arg><arg>${workingDir}/graph_raw</arg>
|
|
|
|
<arg>--claimsGraphPath</arg><arg>${workingDir}/graph_claims</arg>
|
2020-04-14 15:54:41 +02:00
|
|
|
<arg>--outputRawGaphPath</arg><arg>${graphOutputPath}</arg>
|
2020-04-10 17:53:07 +02:00
|
|
|
<arg>--graphTableClassName</arg><arg>eu.dnetlib.dhp.schema.oaf.Relation</arg>
|
|
|
|
</spark>
|
|
|
|
<ok to="wait_merge"/>
|
|
|
|
<error to="Kill"/>
|
|
|
|
</action>
|
|
|
|
|
|
|
|
<action name="merge_claims_software">
|
|
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
|
|
<master>yarn</master>
|
|
|
|
<mode>cluster</mode>
|
|
|
|
<name>MergeClaims_software</name>
|
|
|
|
<class>eu.dnetlib.dhp.oa.graph.raw.MergeClaimsApplication</class>
|
|
|
|
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
|
|
|
<spark-opts>
|
|
|
|
--executor-memory ${sparkExecutorMemory}
|
|
|
|
--executor-cores ${sparkExecutorCores}
|
|
|
|
--driver-memory=${sparkDriverMemory}
|
|
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
|
|
--conf spark.sql.shuffle.partitions=1920
|
|
|
|
</spark-opts>
|
|
|
|
<arg>--rawGraphPath</arg><arg>${workingDir}/graph_raw</arg>
|
|
|
|
<arg>--claimsGraphPath</arg><arg>${workingDir}/graph_claims</arg>
|
2020-04-14 15:54:41 +02:00
|
|
|
<arg>--outputRawGaphPath</arg><arg>${graphOutputPath}</arg>
|
2020-04-10 17:53:07 +02:00
|
|
|
<arg>--graphTableClassName</arg><arg>eu.dnetlib.dhp.schema.oaf.Software</arg>
|
|
|
|
</spark>
|
|
|
|
<ok to="wait_merge"/>
|
|
|
|
<error to="Kill"/>
|
|
|
|
</action>
|
|
|
|
|
|
|
|
<action name="merge_claims_otherresearchproduct">
|
|
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
|
|
<master>yarn</master>
|
|
|
|
<mode>cluster</mode>
|
|
|
|
<name>MergeClaims_otherresearchproduct</name>
|
|
|
|
<class>eu.dnetlib.dhp.oa.graph.raw.MergeClaimsApplication</class>
|
|
|
|
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
|
|
|
<spark-opts>
|
|
|
|
--executor-memory ${sparkExecutorMemory}
|
|
|
|
--executor-cores ${sparkExecutorCores}
|
|
|
|
--driver-memory=${sparkDriverMemory}
|
|
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
|
|
--conf spark.sql.shuffle.partitions=1920
|
|
|
|
</spark-opts>
|
|
|
|
<arg>--rawGraphPath</arg><arg>${workingDir}/graph_raw</arg>
|
|
|
|
<arg>--claimsGraphPath</arg><arg>${workingDir}/graph_claims</arg>
|
2020-04-14 15:54:41 +02:00
|
|
|
<arg>--outputRawGaphPath</arg><arg>${graphOutputPath}</arg>
|
2020-04-10 17:53:07 +02:00
|
|
|
<arg>--graphTableClassName</arg><arg>eu.dnetlib.dhp.schema.oaf.OtherResearchProduct</arg>
|
|
|
|
</spark>
|
|
|
|
<ok to="wait_merge"/>
|
|
|
|
<error to="Kill"/>
|
|
|
|
</action>
|
|
|
|
|
|
|
|
<action name="merge_claims_datasource">
|
|
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
|
|
<master>yarn</master>
|
|
|
|
<mode>cluster</mode>
|
|
|
|
<name>MergeClaims_datasource</name>
|
|
|
|
<class>eu.dnetlib.dhp.oa.graph.raw.MergeClaimsApplication</class>
|
|
|
|
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
|
|
|
<spark-opts>
|
|
|
|
--executor-memory ${sparkExecutorMemory}
|
|
|
|
--executor-cores ${sparkExecutorCores}
|
|
|
|
--driver-memory=${sparkDriverMemory}
|
|
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
|
|
--conf spark.sql.shuffle.partitions=200
|
|
|
|
</spark-opts>
|
|
|
|
<arg>--rawGraphPath</arg><arg>${workingDir}/graph_raw</arg>
|
|
|
|
<arg>--claimsGraphPath</arg><arg>${workingDir}/graph_claims</arg>
|
2020-04-14 15:54:41 +02:00
|
|
|
<arg>--outputRawGaphPath</arg><arg>${graphOutputPath}</arg>
|
2020-04-10 17:53:07 +02:00
|
|
|
<arg>--graphTableClassName</arg><arg>eu.dnetlib.dhp.schema.oaf.Datasource</arg>
|
|
|
|
</spark>
|
|
|
|
<ok to="wait_merge"/>
|
|
|
|
<error to="Kill"/>
|
|
|
|
</action>
|
|
|
|
|
|
|
|
<action name="merge_claims_organization">
|
|
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
|
|
<master>yarn</master>
|
|
|
|
<mode>cluster</mode>
|
|
|
|
<name>MergeClaims_organization</name>
|
|
|
|
<class>eu.dnetlib.dhp.oa.graph.raw.MergeClaimsApplication</class>
|
|
|
|
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
|
|
|
<spark-opts>
|
|
|
|
--executor-memory ${sparkExecutorMemory}
|
|
|
|
--executor-cores ${sparkExecutorCores}
|
|
|
|
--driver-memory=${sparkDriverMemory}
|
|
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
|
|
--conf spark.sql.shuffle.partitions=200
|
|
|
|
</spark-opts>
|
|
|
|
<arg>--rawGraphPath</arg><arg>${workingDir}/graph_raw</arg>
|
|
|
|
<arg>--claimsGraphPath</arg><arg>${workingDir}/graph_claims</arg>
|
2020-04-14 15:54:41 +02:00
|
|
|
<arg>--outputRawGaphPath</arg><arg>${graphOutputPath}</arg>
|
2020-04-10 17:53:07 +02:00
|
|
|
<arg>--graphTableClassName</arg><arg>eu.dnetlib.dhp.schema.oaf.Organization</arg>
|
|
|
|
</spark>
|
|
|
|
<ok to="wait_merge"/>
|
|
|
|
<error to="Kill"/>
|
|
|
|
</action>
|
|
|
|
|
|
|
|
<action name="merge_claims_project">
|
|
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
|
|
<master>yarn</master>
|
|
|
|
<mode>cluster</mode>
|
|
|
|
<name>MergeClaims_project</name>
|
|
|
|
<class>eu.dnetlib.dhp.oa.graph.raw.MergeClaimsApplication</class>
|
|
|
|
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
|
|
|
<spark-opts>
|
|
|
|
--executor-memory ${sparkExecutorMemory}
|
|
|
|
--executor-cores ${sparkExecutorCores}
|
|
|
|
--driver-memory=${sparkDriverMemory}
|
|
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
|
|
--conf spark.sql.shuffle.partitions=200
|
|
|
|
</spark-opts>
|
|
|
|
<arg>--rawGraphPath</arg><arg>${workingDir}/graph_raw</arg>
|
|
|
|
<arg>--claimsGraphPath</arg><arg>${workingDir}/graph_claims</arg>
|
2020-04-14 15:54:41 +02:00
|
|
|
<arg>--outputRawGaphPath</arg><arg>${graphOutputPath}</arg>
|
2020-04-10 17:53:07 +02:00
|
|
|
<arg>--graphTableClassName</arg><arg>eu.dnetlib.dhp.schema.oaf.Project</arg>
|
|
|
|
</spark>
|
|
|
|
<ok to="wait_merge"/>
|
|
|
|
<error to="Kill"/>
|
|
|
|
</action>
|
|
|
|
|
|
|
|
<join name="wait_merge" to="End"/>
|
|
|
|
|
|
|
|
<end name="End"/>
|
|
|
|
</workflow-app>
|