2019-12-17 16:24:49 +01:00
|
|
|
<workflow-app name="index_infospace_graph" xmlns="uri:oozie:workflow:0.5">
|
2020-03-18 15:43:54 +01:00
|
|
|
|
2019-12-17 16:24:49 +01:00
|
|
|
<parameters>
|
2020-04-04 14:03:43 +02:00
|
|
|
<property>
|
|
|
|
<name>inputGraphRootPath</name>
|
|
|
|
<description>root location of input materialized graph</description>
|
|
|
|
</property>
|
2020-04-06 08:59:58 +02:00
|
|
|
<property>
|
|
|
|
<name>isLookupUrl</name>
|
|
|
|
<description>URL for the isLookup service</description>
|
|
|
|
</property>
|
2020-05-20 14:55:38 +02:00
|
|
|
<property>
|
|
|
|
<name>relPartitions</name>
|
|
|
|
<description>number or partitions for the relations Dataset</description>
|
|
|
|
</property>
|
|
|
|
<property>
|
|
|
|
<name>relationFilter</name>
|
|
|
|
<description>filter applied reading relations (by relClass)</description>
|
|
|
|
</property>
|
|
|
|
<property>
|
2020-07-10 19:03:33 +02:00
|
|
|
<name>sourceMaxRelations</name>
|
|
|
|
<description>maximum number of relations allowed for a each entity grouping by source</description>
|
|
|
|
</property>
|
|
|
|
<property>
|
|
|
|
<name>targetMaxRelations</name>
|
|
|
|
<description>maximum number of relations allowed for a each entity grouping by target</description>
|
2020-05-20 14:55:38 +02:00
|
|
|
</property>
|
|
|
|
<property>
|
|
|
|
<name>format</name>
|
|
|
|
<description>metadata format name (DMF|TMF)</description>
|
|
|
|
</property>
|
|
|
|
<property>
|
|
|
|
<name>batchSize</name>
|
|
|
|
<description>number of records to be included in each indexing request</description>
|
|
|
|
</property>
|
2020-08-03 14:28:08 +02:00
|
|
|
<property>
|
|
|
|
<name>solrDeletionQuery</name>
|
|
|
|
<value>*:*</value>
|
|
|
|
<description>query used in the deleted by query operation</description>
|
|
|
|
</property>
|
2020-11-19 10:34:28 +01:00
|
|
|
|
2023-03-24 13:43:20 +01:00
|
|
|
<property>
|
|
|
|
<name>sparkDriverMemory</name>
|
|
|
|
<description>memory for driver process</description>
|
|
|
|
</property>
|
|
|
|
<property>
|
|
|
|
<name>sparkExecutorMemory</name>
|
|
|
|
<description>memory for individual executor</description>
|
|
|
|
</property>
|
|
|
|
<property>
|
|
|
|
<name>sparkExecutorCores</name>
|
|
|
|
<description>number of cores used by single executor</description>
|
|
|
|
</property>
|
2019-12-17 16:24:49 +01:00
|
|
|
<property>
|
2020-04-01 19:07:30 +02:00
|
|
|
<name>sparkDriverMemoryForJoining</name>
|
|
|
|
<description>memory for driver process</description>
|
|
|
|
</property>
|
|
|
|
<property>
|
|
|
|
<name>sparkExecutorMemoryForJoining</name>
|
|
|
|
<description>memory for individual executor</description>
|
|
|
|
</property>
|
|
|
|
<property>
|
|
|
|
<name>sparkExecutorCoresForJoining</name>
|
|
|
|
<description>number of cores used by single executor</description>
|
2019-12-17 16:24:49 +01:00
|
|
|
</property>
|
|
|
|
<property>
|
2020-04-01 19:07:30 +02:00
|
|
|
<name>sparkDriverMemoryForIndexing</name>
|
2019-12-17 16:24:49 +01:00
|
|
|
<description>memory for driver process</description>
|
|
|
|
</property>
|
|
|
|
<property>
|
2020-04-01 19:07:30 +02:00
|
|
|
<name>sparkExecutorMemoryForIndexing</name>
|
2019-12-17 16:24:49 +01:00
|
|
|
<description>memory for individual executor</description>
|
|
|
|
</property>
|
|
|
|
<property>
|
2020-04-01 19:07:30 +02:00
|
|
|
<name>sparkExecutorCoresForIndexing</name>
|
2019-12-17 16:24:49 +01:00
|
|
|
<description>number of cores used by single executor</description>
|
|
|
|
</property>
|
2020-04-02 18:44:09 +02:00
|
|
|
<property>
|
|
|
|
<name>oozieActionShareLibForSpark2</name>
|
|
|
|
<description>oozie action sharelib for spark 2.*</description>
|
|
|
|
</property>
|
|
|
|
<property>
|
|
|
|
<name>spark2ExtraListeners</name>
|
|
|
|
<value>com.cloudera.spark.lineage.NavigatorAppListener</value>
|
|
|
|
<description>spark 2.* extra listeners classname</description>
|
|
|
|
</property>
|
|
|
|
<property>
|
|
|
|
<name>spark2SqlQueryExecutionListeners</name>
|
|
|
|
<value>com.cloudera.spark.lineage.NavigatorQueryListener</value>
|
|
|
|
<description>spark 2.* sql query execution listeners classname</description>
|
|
|
|
</property>
|
2020-01-21 16:35:44 +01:00
|
|
|
<property>
|
|
|
|
<name>spark2YarnHistoryServerAddress</name>
|
|
|
|
<description>spark 2.* yarn history server address</description>
|
|
|
|
</property>
|
|
|
|
<property>
|
|
|
|
<name>spark2EventLogDir</name>
|
|
|
|
<description>spark 2.* event log dir location</description>
|
|
|
|
</property>
|
2020-05-21 10:00:14 +02:00
|
|
|
<property>
|
|
|
|
<name>sparkNetworkTimeout</name>
|
|
|
|
<description>configures spark.network.timeout</description>
|
|
|
|
</property>
|
2019-12-17 16:24:49 +01:00
|
|
|
</parameters>
|
|
|
|
|
2020-03-18 15:43:54 +01:00
|
|
|
<global>
|
|
|
|
<job-tracker>${jobTracker}</job-tracker>
|
|
|
|
<name-node>${nameNode}</name-node>
|
|
|
|
<configuration>
|
|
|
|
<property>
|
2020-04-02 18:44:09 +02:00
|
|
|
<name>oozie.action.sharelib.for.spark</name>
|
|
|
|
<value>${oozieActionShareLibForSpark2}</value>
|
2020-03-18 15:43:54 +01:00
|
|
|
</property>
|
|
|
|
</configuration>
|
|
|
|
</global>
|
|
|
|
|
2020-06-25 12:55:52 +02:00
|
|
|
<start to="resume_from"/>
|
|
|
|
|
|
|
|
<decision name="resume_from">
|
|
|
|
<switch>
|
|
|
|
<case to="prepare_relations">${wf:conf('resumeFrom') eq 'prepare_relations'}</case>
|
|
|
|
<case to="fork_join_related_entities">${wf:conf('resumeFrom') eq 'fork_join_related_entities'}</case>
|
|
|
|
<case to="fork_join_all_entities">${wf:conf('resumeFrom') eq 'fork_join_all_entities'}</case>
|
|
|
|
<case to="convert_to_xml">${wf:conf('resumeFrom') eq 'convert_to_xml'}</case>
|
2020-08-03 14:28:08 +02:00
|
|
|
<case to="drop_solr_collection">${wf:conf('resumeFrom') eq 'drop_solr_collection'}</case>
|
2020-06-25 12:55:52 +02:00
|
|
|
<case to="to_solr_index">${wf:conf('resumeFrom') eq 'to_solr_index'}</case>
|
|
|
|
<default to="prepare_relations"/>
|
|
|
|
</switch>
|
|
|
|
</decision>
|
2019-12-17 16:24:49 +01:00
|
|
|
|
|
|
|
<kill name="Kill">
|
|
|
|
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
|
|
|
|
</kill>
|
|
|
|
|
2020-04-04 14:03:43 +02:00
|
|
|
<action name="prepare_relations">
|
|
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
|
|
<master>yarn</master>
|
|
|
|
<mode>cluster</mode>
|
|
|
|
<name>PrepareRelations</name>
|
|
|
|
<class>eu.dnetlib.dhp.oa.provision.PrepareRelationsJob</class>
|
|
|
|
<jar>dhp-graph-provision-${projectVersion}.jar</jar>
|
|
|
|
<spark-opts>
|
|
|
|
--executor-cores=${sparkExecutorCoresForJoining}
|
|
|
|
--executor-memory=${sparkExecutorMemoryForJoining}
|
|
|
|
--driver-memory=${sparkDriverMemoryForJoining}
|
|
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
2020-04-21 16:15:04 +02:00
|
|
|
--conf spark.sql.shuffle.partitions=3840
|
2020-04-04 14:03:43 +02:00
|
|
|
</spark-opts>
|
|
|
|
<arg>--inputRelationsPath</arg><arg>${inputGraphRootPath}/relation</arg>
|
|
|
|
<arg>--outputPath</arg><arg>${workingDir}/relation</arg>
|
2020-07-10 19:03:33 +02:00
|
|
|
<arg>--sourceMaxRelations</arg><arg>${sourceMaxRelations}</arg>
|
|
|
|
<arg>--targetMaxRelations</arg><arg>${targetMaxRelations}</arg>
|
2020-07-07 14:21:28 +02:00
|
|
|
<arg>--relationFilter</arg><arg>${relationFilter}</arg>
|
2020-05-29 10:58:15 +02:00
|
|
|
<arg>--relPartitions</arg><arg>5000</arg>
|
2020-04-04 14:03:43 +02:00
|
|
|
</spark>
|
2020-06-25 12:55:52 +02:00
|
|
|
<ok to="fork_join_related_entities"/>
|
|
|
|
<error to="Kill"/>
|
|
|
|
</action>
|
|
|
|
|
|
|
|
<fork name="fork_join_related_entities">
|
|
|
|
<path start="join_relation_publication"/>
|
|
|
|
<path start="join_relation_dataset"/>
|
|
|
|
<path start="join_relation_otherresearchproduct"/>
|
|
|
|
<path start="join_relation_software"/>
|
|
|
|
<path start="join_relation_datasource"/>
|
|
|
|
<path start="join_relation_organization"/>
|
|
|
|
<path start="join_relation_project"/>
|
|
|
|
</fork>
|
|
|
|
|
|
|
|
<action name="join_relation_publication">
|
|
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
|
|
<master>yarn</master>
|
|
|
|
<mode>cluster</mode>
|
|
|
|
<name>Join[relation.target = publication.id]</name>
|
|
|
|
<class>eu.dnetlib.dhp.oa.provision.CreateRelatedEntitiesJob_phase1</class>
|
|
|
|
<jar>dhp-graph-provision-${projectVersion}.jar</jar>
|
|
|
|
<spark-opts>
|
|
|
|
--executor-cores=${sparkExecutorCoresForJoining}
|
|
|
|
--executor-memory=${sparkExecutorMemoryForJoining}
|
|
|
|
--driver-memory=${sparkDriverMemoryForJoining}
|
|
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
2020-07-10 19:03:33 +02:00
|
|
|
--conf spark.sql.shuffle.partitions=15000
|
2020-06-25 12:55:52 +02:00
|
|
|
--conf spark.network.timeout=${sparkNetworkTimeout}
|
|
|
|
</spark-opts>
|
|
|
|
<arg>--inputRelationsPath</arg><arg>${workingDir}/relation</arg>
|
|
|
|
<arg>--inputEntityPath</arg><arg>${inputGraphRootPath}/publication</arg>
|
|
|
|
<arg>--graphTableClassName</arg><arg>eu.dnetlib.dhp.schema.oaf.Publication</arg>
|
|
|
|
<arg>--outputPath</arg><arg>${workingDir}/join_partial/publication</arg>
|
|
|
|
</spark>
|
|
|
|
<ok to="wait_joins"/>
|
|
|
|
<error to="Kill"/>
|
|
|
|
</action>
|
|
|
|
|
|
|
|
<action name="join_relation_dataset">
|
|
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
|
|
<master>yarn</master>
|
|
|
|
<mode>cluster</mode>
|
|
|
|
<name>Join[relation.target = dataset.id]</name>
|
|
|
|
<class>eu.dnetlib.dhp.oa.provision.CreateRelatedEntitiesJob_phase1</class>
|
|
|
|
<jar>dhp-graph-provision-${projectVersion}.jar</jar>
|
|
|
|
<spark-opts>
|
|
|
|
--executor-cores=${sparkExecutorCoresForJoining}
|
|
|
|
--executor-memory=${sparkExecutorMemoryForJoining}
|
|
|
|
--driver-memory=${sparkDriverMemoryForJoining}
|
|
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
2020-07-10 19:03:33 +02:00
|
|
|
--conf spark.sql.shuffle.partitions=15000
|
2020-06-25 12:55:52 +02:00
|
|
|
--conf spark.network.timeout=${sparkNetworkTimeout}
|
|
|
|
</spark-opts>
|
|
|
|
<arg>--inputRelationsPath</arg><arg>${workingDir}/relation</arg>
|
|
|
|
<arg>--inputEntityPath</arg><arg>${inputGraphRootPath}/dataset</arg>
|
|
|
|
<arg>--graphTableClassName</arg><arg>eu.dnetlib.dhp.schema.oaf.Dataset</arg>
|
|
|
|
<arg>--outputPath</arg><arg>${workingDir}/join_partial/dataset</arg>
|
|
|
|
</spark>
|
|
|
|
<ok to="wait_joins"/>
|
|
|
|
<error to="Kill"/>
|
|
|
|
</action>
|
|
|
|
|
|
|
|
<action name="join_relation_otherresearchproduct">
|
|
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
|
|
<master>yarn</master>
|
|
|
|
<mode>cluster</mode>
|
|
|
|
<name>Join[relation.target = otherresearchproduct.id]</name>
|
|
|
|
<class>eu.dnetlib.dhp.oa.provision.CreateRelatedEntitiesJob_phase1</class>
|
|
|
|
<jar>dhp-graph-provision-${projectVersion}.jar</jar>
|
|
|
|
<spark-opts>
|
|
|
|
--executor-cores=${sparkExecutorCoresForJoining}
|
|
|
|
--executor-memory=${sparkExecutorMemoryForJoining}
|
|
|
|
--driver-memory=${sparkDriverMemoryForJoining}
|
|
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
2020-07-10 19:03:33 +02:00
|
|
|
--conf spark.sql.shuffle.partitions=10000
|
2020-06-25 12:55:52 +02:00
|
|
|
--conf spark.network.timeout=${sparkNetworkTimeout}
|
|
|
|
</spark-opts>
|
|
|
|
<arg>--inputRelationsPath</arg><arg>${workingDir}/relation</arg>
|
|
|
|
<arg>--inputEntityPath</arg><arg>${inputGraphRootPath}/otherresearchproduct</arg>
|
|
|
|
<arg>--graphTableClassName</arg><arg>eu.dnetlib.dhp.schema.oaf.OtherResearchProduct</arg>
|
|
|
|
<arg>--outputPath</arg><arg>${workingDir}/join_partial/otherresearchproduct</arg>
|
|
|
|
</spark>
|
|
|
|
<ok to="wait_joins"/>
|
|
|
|
<error to="Kill"/>
|
|
|
|
</action>
|
|
|
|
|
|
|
|
<action name="join_relation_software">
|
|
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
|
|
<master>yarn</master>
|
|
|
|
<mode>cluster</mode>
|
|
|
|
<name>Join[relation.target = software.id]</name>
|
|
|
|
<class>eu.dnetlib.dhp.oa.provision.CreateRelatedEntitiesJob_phase1</class>
|
|
|
|
<jar>dhp-graph-provision-${projectVersion}.jar</jar>
|
|
|
|
<spark-opts>
|
|
|
|
--executor-cores=${sparkExecutorCoresForJoining}
|
|
|
|
--executor-memory=${sparkExecutorMemoryForJoining}
|
|
|
|
--driver-memory=${sparkDriverMemoryForJoining}
|
|
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
2020-07-10 19:03:33 +02:00
|
|
|
--conf spark.sql.shuffle.partitions=5000
|
2020-06-25 12:55:52 +02:00
|
|
|
--conf spark.network.timeout=${sparkNetworkTimeout}
|
|
|
|
</spark-opts>
|
|
|
|
<arg>--inputRelationsPath</arg><arg>${workingDir}/relation</arg>
|
|
|
|
<arg>--inputEntityPath</arg><arg>${inputGraphRootPath}/software</arg>
|
|
|
|
<arg>--graphTableClassName</arg><arg>eu.dnetlib.dhp.schema.oaf.Software</arg>
|
|
|
|
<arg>--outputPath</arg><arg>${workingDir}/join_partial/software</arg>
|
|
|
|
</spark>
|
|
|
|
<ok to="wait_joins"/>
|
2020-04-04 14:03:43 +02:00
|
|
|
<error to="Kill"/>
|
|
|
|
</action>
|
|
|
|
|
2020-06-25 12:55:52 +02:00
|
|
|
<action name="join_relation_datasource">
|
|
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
|
|
<master>yarn</master>
|
|
|
|
<mode>cluster</mode>
|
|
|
|
<name>Join[relation.target = datasource.id]</name>
|
|
|
|
<class>eu.dnetlib.dhp.oa.provision.CreateRelatedEntitiesJob_phase1</class>
|
|
|
|
<jar>dhp-graph-provision-${projectVersion}.jar</jar>
|
|
|
|
<spark-opts>
|
|
|
|
--executor-cores=${sparkExecutorCoresForJoining}
|
|
|
|
--executor-memory=${sparkExecutorMemoryForJoining}
|
|
|
|
--driver-memory=${sparkDriverMemoryForJoining}
|
|
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
2020-07-10 19:03:33 +02:00
|
|
|
--conf spark.sql.shuffle.partitions=5000
|
2020-06-25 12:55:52 +02:00
|
|
|
--conf spark.network.timeout=${sparkNetworkTimeout}
|
|
|
|
</spark-opts>
|
|
|
|
<arg>--inputRelationsPath</arg><arg>${workingDir}/relation</arg>
|
|
|
|
<arg>--inputEntityPath</arg><arg>${inputGraphRootPath}/datasource</arg>
|
|
|
|
<arg>--graphTableClassName</arg><arg>eu.dnetlib.dhp.schema.oaf.Datasource</arg>
|
|
|
|
<arg>--outputPath</arg><arg>${workingDir}/join_partial/datasource</arg>
|
|
|
|
</spark>
|
|
|
|
<ok to="wait_joins"/>
|
|
|
|
<error to="Kill"/>
|
|
|
|
</action>
|
2020-04-04 14:03:43 +02:00
|
|
|
|
2020-06-25 12:55:52 +02:00
|
|
|
<action name="join_relation_organization">
|
|
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
|
|
<master>yarn</master>
|
|
|
|
<mode>cluster</mode>
|
|
|
|
<name>Join[relation.target = organization.id]</name>
|
|
|
|
<class>eu.dnetlib.dhp.oa.provision.CreateRelatedEntitiesJob_phase1</class>
|
|
|
|
<jar>dhp-graph-provision-${projectVersion}.jar</jar>
|
|
|
|
<spark-opts>
|
|
|
|
--executor-cores=${sparkExecutorCoresForJoining}
|
|
|
|
--executor-memory=${sparkExecutorMemoryForJoining}
|
|
|
|
--driver-memory=${sparkDriverMemoryForJoining}
|
|
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
2020-07-10 19:03:33 +02:00
|
|
|
--conf spark.sql.shuffle.partitions=5000
|
2020-06-25 12:55:52 +02:00
|
|
|
--conf spark.network.timeout=${sparkNetworkTimeout}
|
|
|
|
</spark-opts>
|
|
|
|
<arg>--inputRelationsPath</arg><arg>${workingDir}/relation</arg>
|
|
|
|
<arg>--inputEntityPath</arg><arg>${inputGraphRootPath}/organization</arg>
|
|
|
|
<arg>--graphTableClassName</arg><arg>eu.dnetlib.dhp.schema.oaf.Organization</arg>
|
|
|
|
<arg>--outputPath</arg><arg>${workingDir}/join_partial/organization</arg>
|
|
|
|
</spark>
|
|
|
|
<ok to="wait_joins"/>
|
|
|
|
<error to="Kill"/>
|
|
|
|
</action>
|
|
|
|
|
|
|
|
<action name="join_relation_project">
|
|
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
|
|
<master>yarn</master>
|
|
|
|
<mode>cluster</mode>
|
|
|
|
<name>Join[relation.target = project.id]</name>
|
|
|
|
<class>eu.dnetlib.dhp.oa.provision.CreateRelatedEntitiesJob_phase1</class>
|
|
|
|
<jar>dhp-graph-provision-${projectVersion}.jar</jar>
|
|
|
|
<spark-opts>
|
|
|
|
--executor-cores=${sparkExecutorCoresForJoining}
|
|
|
|
--executor-memory=${sparkExecutorMemoryForJoining}
|
|
|
|
--driver-memory=${sparkDriverMemoryForJoining}
|
|
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
2020-07-10 19:03:33 +02:00
|
|
|
--conf spark.sql.shuffle.partitions=5000
|
2020-06-25 12:55:52 +02:00
|
|
|
--conf spark.network.timeout=${sparkNetworkTimeout}
|
|
|
|
</spark-opts>
|
|
|
|
<arg>--inputRelationsPath</arg><arg>${workingDir}/relation</arg>
|
|
|
|
<arg>--inputEntityPath</arg><arg>${inputGraphRootPath}/project</arg>
|
|
|
|
<arg>--graphTableClassName</arg><arg>eu.dnetlib.dhp.schema.oaf.Project</arg>
|
|
|
|
<arg>--outputPath</arg><arg>${workingDir}/join_partial/project</arg>
|
|
|
|
</spark>
|
|
|
|
<ok to="wait_joins"/>
|
|
|
|
<error to="Kill"/>
|
|
|
|
</action>
|
|
|
|
|
|
|
|
<join name="wait_joins" to="fork_join_all_entities"/>
|
|
|
|
|
|
|
|
<fork name="fork_join_all_entities">
|
|
|
|
<path start="join_publication_relations"/>
|
|
|
|
<path start="join_dataset_relations"/>
|
|
|
|
<path start="join_otherresearchproduct_relations"/>
|
|
|
|
<path start="join_software_relations"/>
|
|
|
|
<path start="join_datasource_relations"/>
|
|
|
|
<path start="join_organization_relations"/>
|
|
|
|
<path start="join_project_relations"/>
|
|
|
|
</fork>
|
|
|
|
|
|
|
|
<action name="join_publication_relations">
|
|
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
|
|
<master>yarn</master>
|
|
|
|
<mode>cluster</mode>
|
|
|
|
<name>Join[publication.id = relatedEntity.source]</name>
|
|
|
|
<class>eu.dnetlib.dhp.oa.provision.CreateRelatedEntitiesJob_phase2</class>
|
|
|
|
<jar>dhp-graph-provision-${projectVersion}.jar</jar>
|
|
|
|
<spark-opts>
|
|
|
|
--executor-cores=${sparkExecutorCoresForJoining}
|
|
|
|
--executor-memory=${sparkExecutorMemoryForJoining}
|
|
|
|
--driver-memory=${sparkDriverMemoryForJoining}
|
|
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
2020-07-10 19:03:33 +02:00
|
|
|
--conf spark.sql.shuffle.partitions=15000
|
2020-06-25 12:55:52 +02:00
|
|
|
--conf spark.network.timeout=${sparkNetworkTimeout}
|
|
|
|
</spark-opts>
|
|
|
|
<arg>--inputEntityPath</arg><arg>${inputGraphRootPath}/publication</arg>
|
|
|
|
<arg>--graphTableClassName</arg><arg>eu.dnetlib.dhp.schema.oaf.Publication</arg>
|
|
|
|
<arg>--inputRelatedEntitiesPath</arg><arg>${workingDir}/join_partial</arg>
|
|
|
|
<arg>--outputPath</arg><arg>${workingDir}/join_entities/publication</arg>
|
|
|
|
<arg>--numPartitions</arg><arg>30000</arg>
|
|
|
|
</spark>
|
|
|
|
<ok to="wait_join_phase2"/>
|
|
|
|
<error to="Kill"/>
|
|
|
|
</action>
|
|
|
|
|
|
|
|
<action name="join_dataset_relations">
|
|
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
|
|
<master>yarn</master>
|
|
|
|
<mode>cluster</mode>
|
|
|
|
<name>Join[dataset.id = relatedEntity.source]</name>
|
|
|
|
<class>eu.dnetlib.dhp.oa.provision.CreateRelatedEntitiesJob_phase2</class>
|
|
|
|
<jar>dhp-graph-provision-${projectVersion}.jar</jar>
|
|
|
|
<spark-opts>
|
|
|
|
--executor-cores=${sparkExecutorCoresForJoining}
|
|
|
|
--executor-memory=${sparkExecutorMemoryForJoining}
|
|
|
|
--driver-memory=${sparkDriverMemoryForJoining}
|
|
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
2020-07-10 19:03:33 +02:00
|
|
|
--conf spark.sql.shuffle.partitions=10000
|
2020-06-25 12:55:52 +02:00
|
|
|
--conf spark.network.timeout=${sparkNetworkTimeout}
|
|
|
|
</spark-opts>
|
|
|
|
<arg>--inputEntityPath</arg><arg>${inputGraphRootPath}/dataset</arg>
|
|
|
|
<arg>--graphTableClassName</arg><arg>eu.dnetlib.dhp.schema.oaf.Dataset</arg>
|
|
|
|
<arg>--inputRelatedEntitiesPath</arg><arg>${workingDir}/join_partial</arg>
|
|
|
|
<arg>--outputPath</arg><arg>${workingDir}/join_entities/dataset</arg>
|
|
|
|
<arg>--numPartitions</arg><arg>20000</arg>
|
|
|
|
</spark>
|
|
|
|
<ok to="wait_join_phase2"/>
|
|
|
|
<error to="Kill"/>
|
|
|
|
</action>
|
|
|
|
|
|
|
|
<action name="join_otherresearchproduct_relations">
|
|
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
|
|
<master>yarn</master>
|
|
|
|
<mode>cluster</mode>
|
|
|
|
<name>Join[otherresearchproduct.id = relatedEntity.source]</name>
|
|
|
|
<class>eu.dnetlib.dhp.oa.provision.CreateRelatedEntitiesJob_phase2</class>
|
|
|
|
<jar>dhp-graph-provision-${projectVersion}.jar</jar>
|
|
|
|
<spark-opts>
|
|
|
|
--executor-cores=${sparkExecutorCoresForJoining}
|
|
|
|
--executor-memory=${sparkExecutorMemoryForJoining}
|
|
|
|
--driver-memory=${sparkDriverMemoryForJoining}
|
|
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
2020-07-10 19:03:33 +02:00
|
|
|
--conf spark.sql.shuffle.partitions=10000
|
2020-06-25 12:55:52 +02:00
|
|
|
--conf spark.network.timeout=${sparkNetworkTimeout}
|
|
|
|
</spark-opts>
|
|
|
|
<arg>--inputEntityPath</arg><arg>${inputGraphRootPath}/otherresearchproduct</arg>
|
|
|
|
<arg>--graphTableClassName</arg><arg>eu.dnetlib.dhp.schema.oaf.OtherResearchProduct</arg>
|
|
|
|
<arg>--inputRelatedEntitiesPath</arg><arg>${workingDir}/join_partial</arg>
|
|
|
|
<arg>--outputPath</arg><arg>${workingDir}/join_entities/otherresearchproduct</arg>
|
|
|
|
<arg>--numPartitions</arg><arg>10000</arg>
|
|
|
|
</spark>
|
|
|
|
<ok to="wait_join_phase2"/>
|
|
|
|
<error to="Kill"/>
|
|
|
|
</action>
|
|
|
|
|
|
|
|
<action name="join_software_relations">
|
|
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
|
|
<master>yarn</master>
|
|
|
|
<mode>cluster</mode>
|
|
|
|
<name>Join[software.id = relatedEntity.source]</name>
|
|
|
|
<class>eu.dnetlib.dhp.oa.provision.CreateRelatedEntitiesJob_phase2</class>
|
|
|
|
<jar>dhp-graph-provision-${projectVersion}.jar</jar>
|
|
|
|
<spark-opts>
|
|
|
|
--executor-cores=${sparkExecutorCoresForJoining}
|
|
|
|
--executor-memory=${sparkExecutorMemoryForJoining}
|
|
|
|
--driver-memory=${sparkDriverMemoryForJoining}
|
|
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
2020-07-10 19:03:33 +02:00
|
|
|
--conf spark.sql.shuffle.partitions=5000
|
2020-06-25 12:55:52 +02:00
|
|
|
--conf spark.network.timeout=${sparkNetworkTimeout}
|
|
|
|
</spark-opts>
|
|
|
|
<arg>--inputEntityPath</arg><arg>${inputGraphRootPath}/software</arg>
|
|
|
|
<arg>--graphTableClassName</arg><arg>eu.dnetlib.dhp.schema.oaf.Software</arg>
|
|
|
|
<arg>--inputRelatedEntitiesPath</arg><arg>${workingDir}/join_partial</arg>
|
|
|
|
<arg>--outputPath</arg><arg>${workingDir}/join_entities/software</arg>
|
|
|
|
<arg>--numPartitions</arg><arg>10000</arg>
|
|
|
|
</spark>
|
|
|
|
<ok to="wait_join_phase2"/>
|
|
|
|
<error to="Kill"/>
|
|
|
|
</action>
|
|
|
|
|
|
|
|
<action name="join_datasource_relations">
|
|
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
|
|
<master>yarn</master>
|
|
|
|
<mode>cluster</mode>
|
|
|
|
<name>Join[datasource.id = relatedEntity.source]</name>
|
|
|
|
<class>eu.dnetlib.dhp.oa.provision.CreateRelatedEntitiesJob_phase2</class>
|
|
|
|
<jar>dhp-graph-provision-${projectVersion}.jar</jar>
|
|
|
|
<spark-opts>
|
|
|
|
--executor-cores=${sparkExecutorCoresForJoining}
|
|
|
|
--executor-memory=${sparkExecutorMemoryForJoining}
|
|
|
|
--driver-memory=${sparkDriverMemoryForJoining}
|
|
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
2020-07-10 19:03:33 +02:00
|
|
|
--conf spark.sql.shuffle.partitions=8000
|
2020-06-25 12:55:52 +02:00
|
|
|
--conf spark.network.timeout=${sparkNetworkTimeout}
|
|
|
|
</spark-opts>
|
|
|
|
<arg>--inputEntityPath</arg><arg>${inputGraphRootPath}/datasource</arg>
|
|
|
|
<arg>--graphTableClassName</arg><arg>eu.dnetlib.dhp.schema.oaf.Datasource</arg>
|
|
|
|
<arg>--inputRelatedEntitiesPath</arg><arg>${workingDir}/join_partial</arg>
|
|
|
|
<arg>--outputPath</arg><arg>${workingDir}/join_entities/datasource</arg>
|
|
|
|
<arg>--numPartitions</arg><arg>1000</arg>
|
|
|
|
</spark>
|
|
|
|
<ok to="wait_join_phase2"/>
|
|
|
|
<error to="Kill"/>
|
|
|
|
</action>
|
|
|
|
|
|
|
|
<action name="join_organization_relations">
|
|
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
|
|
<master>yarn</master>
|
|
|
|
<mode>cluster</mode>
|
|
|
|
<name>Join[organization.id = relatedEntity.source]</name>
|
|
|
|
<class>eu.dnetlib.dhp.oa.provision.CreateRelatedEntitiesJob_phase2</class>
|
|
|
|
<jar>dhp-graph-provision-${projectVersion}.jar</jar>
|
|
|
|
<spark-opts>
|
|
|
|
--executor-cores=${sparkExecutorCoresForJoining}
|
|
|
|
--executor-memory=${sparkExecutorMemoryForJoining}
|
|
|
|
--driver-memory=${sparkDriverMemoryForJoining}
|
|
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
2020-07-10 19:03:33 +02:00
|
|
|
--conf spark.sql.shuffle.partitions=10000
|
2020-06-25 12:55:52 +02:00
|
|
|
--conf spark.network.timeout=${sparkNetworkTimeout}
|
|
|
|
</spark-opts>
|
|
|
|
<arg>--inputEntityPath</arg><arg>${inputGraphRootPath}/organization</arg>
|
|
|
|
<arg>--graphTableClassName</arg><arg>eu.dnetlib.dhp.schema.oaf.Organization</arg>
|
|
|
|
<arg>--inputRelatedEntitiesPath</arg><arg>${workingDir}/join_partial</arg>
|
|
|
|
<arg>--outputPath</arg><arg>${workingDir}/join_entities/organization</arg>
|
|
|
|
<arg>--numPartitions</arg><arg>20000</arg>
|
|
|
|
</spark>
|
|
|
|
<ok to="wait_join_phase2"/>
|
|
|
|
<error to="Kill"/>
|
|
|
|
</action>
|
|
|
|
|
|
|
|
<action name="join_project_relations">
|
|
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
|
|
<master>yarn</master>
|
|
|
|
<mode>cluster</mode>
|
|
|
|
<name>Join[project.id = relatedEntity.source]</name>
|
|
|
|
<class>eu.dnetlib.dhp.oa.provision.CreateRelatedEntitiesJob_phase2</class>
|
|
|
|
<jar>dhp-graph-provision-${projectVersion}.jar</jar>
|
|
|
|
<spark-opts>
|
|
|
|
--executor-cores=${sparkExecutorCoresForJoining}
|
|
|
|
--executor-memory=${sparkExecutorMemoryForJoining}
|
|
|
|
--driver-memory=${sparkDriverMemoryForJoining}
|
|
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
2020-07-10 19:03:33 +02:00
|
|
|
--conf spark.sql.shuffle.partitions=5000
|
2020-06-25 12:55:52 +02:00
|
|
|
--conf spark.network.timeout=${sparkNetworkTimeout}
|
|
|
|
</spark-opts>
|
|
|
|
<arg>--inputEntityPath</arg><arg>${inputGraphRootPath}/project</arg>
|
|
|
|
<arg>--graphTableClassName</arg><arg>eu.dnetlib.dhp.schema.oaf.Project</arg>
|
|
|
|
<arg>--inputRelatedEntitiesPath</arg><arg>${workingDir}/join_partial</arg>
|
|
|
|
<arg>--outputPath</arg><arg>${workingDir}/join_entities/project</arg>
|
|
|
|
<arg>--numPartitions</arg><arg>10000</arg>
|
|
|
|
</spark>
|
|
|
|
<ok to="wait_join_phase2"/>
|
|
|
|
<error to="Kill"/>
|
|
|
|
</action>
|
|
|
|
|
|
|
|
<join name="wait_join_phase2" to="convert_to_xml"/>
|
|
|
|
|
|
|
|
<action name="convert_to_xml">
|
|
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
|
|
<master>yarn</master>
|
|
|
|
<mode>cluster</mode>
|
|
|
|
<name>convert_to_xml</name>
|
|
|
|
<class>eu.dnetlib.dhp.oa.provision.XmlConverterJob</class>
|
|
|
|
<jar>dhp-graph-provision-${projectVersion}.jar</jar>
|
|
|
|
<spark-opts>
|
2023-03-24 13:43:20 +01:00
|
|
|
--executor-cores=${sparkExecutorCores}
|
|
|
|
--executor-memory=${sparkExecutorMemory}
|
|
|
|
--driver-memory=${sparkDriverMemory}
|
2020-06-25 12:55:52 +02:00
|
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
|
|
--conf spark.sql.shuffle.partitions=3840
|
|
|
|
--conf spark.network.timeout=${sparkNetworkTimeout}
|
|
|
|
</spark-opts>
|
|
|
|
<arg>--inputPath</arg><arg>${workingDir}/join_entities</arg>
|
|
|
|
<arg>--outputPath</arg><arg>${workingDir}/xml</arg>
|
|
|
|
<arg>--isLookupUrl</arg><arg>${isLookupUrl}</arg>
|
|
|
|
</spark>
|
2020-07-16 13:46:39 +02:00
|
|
|
<ok to="should_index"/>
|
2020-06-25 12:55:52 +02:00
|
|
|
<error to="Kill"/>
|
|
|
|
</action>
|
|
|
|
|
2020-07-16 13:46:39 +02:00
|
|
|
<decision name="should_index">
|
|
|
|
<switch>
|
2020-08-03 14:28:08 +02:00
|
|
|
<case to="drop_solr_collection">${wf:conf('shouldIndex') eq 'true'}</case>
|
2020-07-16 13:46:39 +02:00
|
|
|
<case to="End">${wf:conf('shouldIndex') eq 'false'}</case>
|
2020-08-03 14:28:08 +02:00
|
|
|
<default to="drop_solr_collection"/>
|
2020-07-16 13:46:39 +02:00
|
|
|
</switch>
|
|
|
|
</decision>
|
|
|
|
|
2020-08-03 14:28:08 +02:00
|
|
|
<action name="drop_solr_collection">
|
|
|
|
<java>
|
2020-08-14 10:56:35 +02:00
|
|
|
<configuration>
|
|
|
|
<property>
|
|
|
|
<name>oozie.launcher.mapreduce.user.classpath.first</name>
|
|
|
|
<value>true</value>
|
|
|
|
</property>
|
|
|
|
</configuration>
|
2020-08-03 14:28:08 +02:00
|
|
|
<main-class>eu.dnetlib.dhp.oa.provision.SolrAdminApplication</main-class>
|
|
|
|
<arg>--isLookupUrl</arg><arg>${isLookupUrl}</arg>
|
|
|
|
<arg>--format</arg><arg>${format}</arg>
|
|
|
|
<arg>--action</arg><arg>DELETE_BY_QUERY</arg>
|
|
|
|
<arg>--query</arg><arg>${solrDeletionQuery}</arg>
|
|
|
|
<arg>--commit</arg><arg>true</arg>
|
|
|
|
</java>
|
|
|
|
<ok to="to_solr_index"/>
|
|
|
|
<error to="Kill"/>
|
|
|
|
</action>
|
|
|
|
|
2020-06-25 12:55:52 +02:00
|
|
|
<action name="to_solr_index">
|
|
|
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
|
|
<master>yarn</master>
|
|
|
|
<mode>cluster</mode>
|
|
|
|
<name>to_solr_index</name>
|
|
|
|
<class>eu.dnetlib.dhp.oa.provision.XmlIndexingJob</class>
|
|
|
|
<jar>dhp-graph-provision-${projectVersion}.jar</jar>
|
|
|
|
<spark-opts>
|
|
|
|
--executor-memory=${sparkExecutorMemoryForIndexing}
|
|
|
|
--driver-memory=${sparkDriverMemoryForIndexing}
|
|
|
|
--conf spark.dynamicAllocation.enabled=true
|
|
|
|
--conf spark.dynamicAllocation.maxExecutors=${sparkExecutorCoresForIndexing}
|
|
|
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
|
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
|
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
|
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
|
|
--conf spark.speculation=false
|
|
|
|
--conf spark.hadoop.mapreduce.map.speculative=false
|
|
|
|
--conf spark.hadoop.mapreduce.reduce.speculative=false
|
|
|
|
</spark-opts>
|
|
|
|
<arg>--inputPath</arg><arg>${workingDir}/xml</arg>
|
|
|
|
<arg>--isLookupUrl</arg><arg>${isLookupUrl}</arg>
|
|
|
|
<arg>--format</arg><arg>${format}</arg>
|
|
|
|
<arg>--batchSize</arg><arg>${batchSize}</arg>
|
2020-11-19 10:34:28 +01:00
|
|
|
<arg>--outputFormat</arg><arg>${outputFormat}</arg>
|
|
|
|
<arg>--outputPath</arg><arg>${workingDir}/solr_documents</arg>
|
2020-06-25 12:55:52 +02:00
|
|
|
</spark>
|
2020-08-03 14:28:08 +02:00
|
|
|
<ok to="commit_solr_collection"/>
|
|
|
|
<error to="Kill"/>
|
|
|
|
</action>
|
|
|
|
|
|
|
|
<action name="commit_solr_collection">
|
|
|
|
<java>
|
2020-08-14 10:56:35 +02:00
|
|
|
<configuration>
|
|
|
|
<property>
|
|
|
|
<name>oozie.launcher.mapreduce.user.classpath.first</name>
|
|
|
|
<value>true</value>
|
|
|
|
</property>
|
|
|
|
</configuration>
|
2020-08-03 14:28:08 +02:00
|
|
|
<main-class>eu.dnetlib.dhp.oa.provision.SolrAdminApplication</main-class>
|
|
|
|
<arg>--isLookupUrl</arg><arg>${isLookupUrl}</arg>
|
|
|
|
<arg>--format</arg><arg>${format}</arg>
|
|
|
|
<arg>--action</arg><arg>COMMIT</arg>
|
|
|
|
</java>
|
2020-06-25 12:55:52 +02:00
|
|
|
<ok to="End"/>
|
|
|
|
<error to="Kill"/>
|
|
|
|
</action>
|
2019-12-17 16:24:49 +01:00
|
|
|
|
|
|
|
<end name="End"/>
|
|
|
|
</workflow-app>
|