This commit is contained in:
Miriam Baglioni 2023-04-21 11:32:07 +02:00
parent 563c5d8527
commit 1671e78e59
1 changed files with 2 additions and 169 deletions

View File

@ -297,7 +297,6 @@
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Publication</arg>
<arg>--outputPath</arg><arg>${workingDir}/dump/publication</arg>
<arg>--communityMapPath</arg><arg>${communityMapPath}</arg>
<arg>--dumpType</arg><arg>community</arg>
</spark>
<ok to="join_dump_comm"/>
<error to="Kill"/>
@ -497,7 +496,7 @@
<mode>cluster</mode>
<name>Extend dumped software with information about project</name>
<class>eu.dnetlib.dhp.oa.graph.dump.community.SparkUpdateProjectInfo</class>
<jar>dhp-graph-dump-${projectVersion}.jar</jar>
<jar>dump-${projectVersion}.jar</jar>
<spark-opts>
--executor-memory=${sparkExecutorMemory}
--executor-cores=${sparkExecutorCores}
@ -518,173 +517,7 @@
<join name="join_extend" to="End"/>
<!-- <action name="prepareResultProject">-->
<!-- <spark xmlns="uri:oozie:spark-action:0.2">-->
<!-- <master>yarn</master>-->
<!-- <mode>cluster</mode>-->
<!-- <name>Prepare association result subset of project info</name>-->
<!-- <class>eu.dnetlib.dhp.oa.graph.dump.community.SparkPrepareResultProject</class>-->
<!-- <jar>dump-${projectVersion}.jar</jar>-->
<!-- <spark-opts>-->
<!-- &#45;&#45;executor-memory=${sparkExecutorMemory}-->
<!-- &#45;&#45;executor-cores=${sparkExecutorCores}-->
<!-- &#45;&#45;driver-memory=${sparkDriverMemory}-->
<!-- &#45;&#45;conf spark.extraListeners=${spark2ExtraListeners}-->
<!-- &#45;&#45;conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}-->
<!-- &#45;&#45;conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}-->
<!-- &#45;&#45;conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}-->
<!-- &#45;&#45;conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}-->
<!-- </spark-opts>-->
<!-- <arg>&#45;&#45;sourcePath</arg><arg>${outputPath}/original</arg>-->
<!-- <arg>&#45;&#45;outputPath</arg><arg>${workingDir}/preparedInfo</arg>-->
<!-- </spark>-->
<!-- <ok to="fork_result_linked_to_projects"/>-->
<!-- <error to="Kill"/>-->
<!-- </action>-->
<!-- <fork name="fork_result_linked_to_projects">-->
<!-- <path start="select_publication_linked_to_projects"/>-->
<!-- <path start="select_dataset_linked_to_projects"/>-->
<!-- <path start="select_orp_linked_to_project"/>-->
<!-- <path start="select_software_linked_to_projects"/>-->
<!-- </fork>-->
<!-- <action name="select_publication_linked_to_projects">-->
<!-- <spark xmlns="uri:oozie:spark-action:0.2">-->
<!-- <master>yarn</master>-->
<!-- <mode>cluster</mode>-->
<!-- <name>Dump funder results </name>-->
<!-- <class>eu.dnetlib.dhp.oa.graph.dump.funderresults.SparkResultLinkedToProject</class>-->
<!-- <jar>dump-${projectVersion}.jar</jar>-->
<!-- <spark-opts>-->
<!-- &#45;&#45;executor-memory=${sparkExecutorMemory}-->
<!-- &#45;&#45;executor-cores=${sparkExecutorCores}-->
<!-- &#45;&#45;driver-memory=${sparkDriverMemory}-->
<!-- &#45;&#45;conf spark.extraListeners=${spark2ExtraListeners}-->
<!-- &#45;&#45;conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}-->
<!-- &#45;&#45;conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}-->
<!-- &#45;&#45;conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}-->
<!-- &#45;&#45;conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}-->
<!-- </spark-opts>-->
<!-- <arg>&#45;&#45;sourcePath</arg><arg>${outputPath}/original/publication</arg>-->
<!-- <arg>&#45;&#45;resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Publication</arg>-->
<!-- <arg>&#45;&#45;outputPath</arg><arg>${workingDir}/result/publication</arg>-->
<!-- <arg>&#45;&#45;graphPath</arg><arg>${workingDir}/preparedInfo</arg>-->
<!-- <arg>&#45;&#45;communityMapPath</arg><arg>${communityMapPath}</arg>-->
<!-- </spark>-->
<!-- <ok to="join_link"/>-->
<!-- <error to="Kill"/>-->
<!-- </action>-->
<!-- <action name="select_dataset_linked_to_projects">-->
<!-- <spark xmlns="uri:oozie:spark-action:0.2">-->
<!-- <master>yarn</master>-->
<!-- <mode>cluster</mode>-->
<!-- <name>Dump funder results </name>-->
<!-- <class>eu.dnetlib.dhp.oa.graph.dump.funderresults.SparkResultLinkedToProject</class>-->
<!-- <jar>dump-${projectVersion}.jar</jar>-->
<!-- <spark-opts>-->
<!-- &#45;&#45;executor-memory=${sparkExecutorMemory}-->
<!-- &#45;&#45;executor-cores=${sparkExecutorCores}-->
<!-- &#45;&#45;driver-memory=${sparkDriverMemory}-->
<!-- &#45;&#45;conf spark.extraListeners=${spark2ExtraListeners}-->
<!-- &#45;&#45;conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}-->
<!-- &#45;&#45;conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}-->
<!-- &#45;&#45;conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}-->
<!-- &#45;&#45;conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}-->
<!-- </spark-opts>-->
<!-- <arg>&#45;&#45;sourcePath</arg><arg>${outputPath}/original/dataset</arg>-->
<!-- <arg>&#45;&#45;resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Dataset</arg>-->
<!-- <arg>&#45;&#45;outputPath</arg><arg>${workingDir}/result/dataset</arg>-->
<!-- <arg>&#45;&#45;graphPath</arg><arg>${workingDir}/preparedInfo</arg>-->
<!-- <arg>&#45;&#45;communityMapPath</arg><arg>${communityMapPath}</arg>-->
<!-- </spark>-->
<!-- <ok to="join_link"/>-->
<!-- <error to="Kill"/>-->
<!-- </action>-->
<!-- <action name="select_orp_linked_to_project">-->
<!-- <spark xmlns="uri:oozie:spark-action:0.2">-->
<!-- <master>yarn</master>-->
<!-- <mode>cluster</mode>-->
<!-- <name>Dump funder results </name>-->
<!-- <class>eu.dnetlib.dhp.oa.graph.dump.funderresults.SparkResultLinkedToProject</class>-->
<!-- <jar>dump-${projectVersion}.jar</jar>-->
<!-- <spark-opts>-->
<!-- &#45;&#45;executor-memory=${sparkExecutorMemory}-->
<!-- &#45;&#45;executor-cores=${sparkExecutorCores}-->
<!-- &#45;&#45;driver-memory=${sparkDriverMemory}-->
<!-- &#45;&#45;conf spark.extraListeners=${spark2ExtraListeners}-->
<!-- &#45;&#45;conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}-->
<!-- &#45;&#45;conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}-->
<!-- &#45;&#45;conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}-->
<!-- &#45;&#45;conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}-->
<!-- </spark-opts>-->
<!-- <arg>&#45;&#45;sourcePath</arg><arg>${outputPath}/original/otherresearchproduct</arg>-->
<!-- <arg>&#45;&#45;resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.OtherResearchProduct</arg>-->
<!-- <arg>&#45;&#45;outputPath</arg><arg>${workingDir}/result/otherresearchproduct</arg>-->
<!-- <arg>&#45;&#45;graphPath</arg><arg>${workingDir}/preparedInfo</arg>-->
<!-- <arg>&#45;&#45;communityMapPath</arg><arg>${communityMapPath}</arg>-->
<!-- </spark>-->
<!-- <ok to="join_link"/>-->
<!-- <error to="Kill"/>-->
<!-- </action>-->
<!-- <action name="select_software_linked_to_projects">-->
<!-- <spark xmlns="uri:oozie:spark-action:0.2">-->
<!-- <master>yarn</master>-->
<!-- <mode>cluster</mode>-->
<!-- <name>Dump funder results </name>-->
<!-- <class>eu.dnetlib.dhp.oa.graph.dump.funderresults.SparkResultLinkedToProject</class>-->
<!-- <jar>dump-${projectVersion}.jar</jar>-->
<!-- <spark-opts>-->
<!-- &#45;&#45;executor-memory=${sparkExecutorMemory}-->
<!-- &#45;&#45;executor-cores=${sparkExecutorCores}-->
<!-- &#45;&#45;driver-memory=${sparkDriverMemory}-->
<!-- &#45;&#45;conf spark.extraListeners=${spark2ExtraListeners}-->
<!-- &#45;&#45;conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}-->
<!-- &#45;&#45;conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}-->
<!-- &#45;&#45;conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}-->
<!-- &#45;&#45;conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}-->
<!-- </spark-opts>-->
<!-- <arg>&#45;&#45;sourcePath</arg><arg>${outputPath}/original/software</arg>-->
<!-- <arg>&#45;&#45;resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Software</arg>-->
<!-- <arg>&#45;&#45;outputPath</arg><arg>${workingDir}/result/software</arg>-->
<!-- <arg>&#45;&#45;graphPath</arg><arg>${workingDir}/preparedInfo</arg>-->
<!-- <arg>&#45;&#45;communityMapPath</arg><arg>${communityMapPath}</arg>-->
<!-- </spark>-->
<!-- <ok to="join_link"/>-->
<!-- <error to="Kill"/>-->
<!-- </action>-->
<!-- <join name="join_link" to="dump_funder_results"/>-->
<!-- <action name="dump_funder_results">-->
<!-- <spark xmlns="uri:oozie:spark-action:0.2">-->
<!-- <master>yarn</master>-->
<!-- <mode>cluster</mode>-->
<!-- <name>Dump funder results </name>-->
<!-- <class>eu.dnetlib.dhp.oa.graph.dump.funderresults.SparkDumpFunderResults</class>-->
<!-- <jar>dump-${projectVersion}.jar</jar>-->
<!-- <spark-opts>-->
<!-- &#45;&#45;executor-memory=${sparkExecutorMemory}-->
<!-- &#45;&#45;executor-cores=${sparkExecutorCores}-->
<!-- &#45;&#45;driver-memory=${sparkDriverMemory}-->
<!-- &#45;&#45;conf spark.extraListeners=${spark2ExtraListeners}-->
<!-- &#45;&#45;conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}-->
<!-- &#45;&#45;conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}-->
<!-- &#45;&#45;conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}-->
<!-- &#45;&#45;conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}-->
<!-- </spark-opts>-->
<!-- <arg>&#45;&#45;sourcePath</arg><arg>${workingDir}/result</arg>-->
<!-- <arg>&#45;&#45;outputPath</arg><arg>${outputPath}/dumpSubsetCommunityModel</arg>-->
<!-- </spark>-->
<!-- <ok to="End"/>-->
<!-- <error to="Kill"/>-->
<!-- </action>-->
<fork name="fork_dump_otherentities">
<path start="dump_organization"/>
<path start="dump_project"/>