renamed jar containing the bulktagging and propagation workflows from dhp-[bulktagging|propagation] to dhp-enrichment; adjusted xml formatting

This commit is contained in:
Claudio Atzori 2020-05-12 15:49:28 +02:00
parent 1547ca7e15
commit ec0782e582
7 changed files with 144 additions and 190 deletions

View File

@ -101,7 +101,7 @@
<mode>cluster</mode> <mode>cluster</mode>
<name>bulkTagging-publication</name> <name>bulkTagging-publication</name>
<class>eu.dnetlib.dhp.bulktag.SparkBulkTagJob</class> <class>eu.dnetlib.dhp.bulktag.SparkBulkTagJob</class>
<jar>dhp-bulktag-${projectVersion}.jar</jar> <jar>dhp-enrichment-${projectVersion}.jar</jar>
<spark-opts> <spark-opts>
--num-executors=${sparkExecutorNumber} --num-executors=${sparkExecutorNumber}
--executor-memory=${sparkExecutorMemory} --executor-memory=${sparkExecutorMemory}
@ -130,7 +130,7 @@
<mode>cluster</mode> <mode>cluster</mode>
<name>bulkTagging-dataset</name> <name>bulkTagging-dataset</name>
<class>eu.dnetlib.dhp.bulktag.SparkBulkTagJob</class> <class>eu.dnetlib.dhp.bulktag.SparkBulkTagJob</class>
<jar>dhp-bulktag-${projectVersion}.jar</jar> <jar>dhp-enrichment-${projectVersion}.jar</jar>
<spark-opts> <spark-opts>
--num-executors=${sparkExecutorNumber} --num-executors=${sparkExecutorNumber}
--executor-memory=${sparkExecutorMemory} --executor-memory=${sparkExecutorMemory}
@ -159,7 +159,7 @@
<mode>cluster</mode> <mode>cluster</mode>
<name>bulkTagging-orp</name> <name>bulkTagging-orp</name>
<class>eu.dnetlib.dhp.bulktag.SparkBulkTagJob</class> <class>eu.dnetlib.dhp.bulktag.SparkBulkTagJob</class>
<jar>dhp-bulktag-${projectVersion}.jar</jar> <jar>dhp-enrichment-${projectVersion}.jar</jar>
<spark-opts> <spark-opts>
--num-executors=${sparkExecutorNumber} --num-executors=${sparkExecutorNumber}
--executor-memory=${sparkExecutorMemory} --executor-memory=${sparkExecutorMemory}
@ -188,7 +188,7 @@
<mode>cluster</mode> <mode>cluster</mode>
<name>bulkTagging-software</name> <name>bulkTagging-software</name>
<class>eu.dnetlib.dhp.bulktag.SparkBulkTagJob</class> <class>eu.dnetlib.dhp.bulktag.SparkBulkTagJob</class>
<jar>dhp-bulktag-${projectVersion}.jar</jar> <jar>dhp-enrichment-${projectVersion}.jar</jar>
<spark-opts> <spark-opts>
--num-executors=${sparkExecutorNumber} --num-executors=${sparkExecutorNumber}
--executor-memory=${sparkExecutorMemory} --executor-memory=${sparkExecutorMemory}

View File

@ -92,7 +92,7 @@
<mode>cluster</mode> <mode>cluster</mode>
<name>PrepareDatasourceCountryAssociation</name> <name>PrepareDatasourceCountryAssociation</name>
<class>eu.dnetlib.dhp.countrypropagation.PrepareDatasourceCountryAssociation</class> <class>eu.dnetlib.dhp.countrypropagation.PrepareDatasourceCountryAssociation</class>
<jar>dhp-propagation-${projectVersion}.jar</jar> <jar>dhp-enrichment-${projectVersion}.jar</jar>
<spark-opts> <spark-opts>
--executor-cores=${sparkExecutorCores} --executor-cores=${sparkExecutorCores}
--executor-memory=${sparkExecutorMemory} --executor-memory=${sparkExecutorMemory}
@ -126,7 +126,7 @@
<mode>cluster</mode> <mode>cluster</mode>
<name>prepareResultCountry-Publication</name> <name>prepareResultCountry-Publication</name>
<class>eu.dnetlib.dhp.countrypropagation.PrepareResultCountrySet</class> <class>eu.dnetlib.dhp.countrypropagation.PrepareResultCountrySet</class>
<jar>dhp-propagation-${projectVersion}.jar</jar> <jar>dhp-enrichment-${projectVersion}.jar</jar>
<spark-opts> <spark-opts>
--executor-cores=${sparkExecutorCores} --executor-cores=${sparkExecutorCores}
--executor-memory=${sparkExecutorMemory} --executor-memory=${sparkExecutorMemory}
@ -156,7 +156,7 @@
<mode>cluster</mode> <mode>cluster</mode>
<name>prepareResultCountry-Dataset</name> <name>prepareResultCountry-Dataset</name>
<class>eu.dnetlib.dhp.countrypropagation.PrepareResultCountrySet</class> <class>eu.dnetlib.dhp.countrypropagation.PrepareResultCountrySet</class>
<jar>dhp-propagation-${projectVersion}.jar</jar> <jar>dhp-enrichment-${projectVersion}.jar</jar>
<spark-opts> <spark-opts>
--executor-cores=${sparkExecutorCores} --executor-cores=${sparkExecutorCores}
--executor-memory=${sparkExecutorMemory} --executor-memory=${sparkExecutorMemory}
@ -186,7 +186,7 @@
<mode>cluster</mode> <mode>cluster</mode>
<name>prepareResultCountry-ORP</name> <name>prepareResultCountry-ORP</name>
<class>eu.dnetlib.dhp.countrypropagation.PrepareResultCountrySet</class> <class>eu.dnetlib.dhp.countrypropagation.PrepareResultCountrySet</class>
<jar>dhp-propagation-${projectVersion}.jar</jar> <jar>dhp-enrichment-${projectVersion}.jar</jar>
<spark-opts> <spark-opts>
--executor-cores=${sparkExecutorCores} --executor-cores=${sparkExecutorCores}
--executor-memory=${sparkExecutorMemory} --executor-memory=${sparkExecutorMemory}
@ -216,7 +216,7 @@
<mode>cluster</mode> <mode>cluster</mode>
<name>prepareResultCountry-Software</name> <name>prepareResultCountry-Software</name>
<class>eu.dnetlib.dhp.countrypropagation.PrepareResultCountrySet</class> <class>eu.dnetlib.dhp.countrypropagation.PrepareResultCountrySet</class>
<jar>dhp-propagation-${projectVersion}.jar</jar> <jar>dhp-enrichment-${projectVersion}.jar</jar>
<spark-opts> <spark-opts>
--executor-cores=${sparkExecutorCores} --executor-cores=${sparkExecutorCores}
--executor-memory=${sparkExecutorMemory} --executor-memory=${sparkExecutorMemory}
@ -255,7 +255,7 @@
<mode>cluster</mode> <mode>cluster</mode>
<name>countryPropagationForPublications</name> <name>countryPropagationForPublications</name>
<class>eu.dnetlib.dhp.countrypropagation.SparkCountryPropagationJob</class> <class>eu.dnetlib.dhp.countrypropagation.SparkCountryPropagationJob</class>
<jar>dhp-propagation-${projectVersion}.jar</jar> <jar>dhp-enrichment-${projectVersion}.jar</jar>
<spark-opts> <spark-opts>
--executor-cores=${sparkExecutorCores} --executor-cores=${sparkExecutorCores}
--executor-memory=${sparkExecutorMemory} --executor-memory=${sparkExecutorMemory}
@ -285,7 +285,7 @@
<mode>cluster</mode> <mode>cluster</mode>
<name>countryPropagationForDataset</name> <name>countryPropagationForDataset</name>
<class>eu.dnetlib.dhp.countrypropagation.SparkCountryPropagationJob</class> <class>eu.dnetlib.dhp.countrypropagation.SparkCountryPropagationJob</class>
<jar>dhp-propagation-${projectVersion}.jar</jar> <jar>dhp-enrichment-${projectVersion}.jar</jar>
<spark-opts> <spark-opts>
--executor-cores=${sparkExecutorCores} --executor-cores=${sparkExecutorCores}
--executor-memory=${sparkExecutorMemory} --executor-memory=${sparkExecutorMemory}
@ -315,7 +315,7 @@
<mode>cluster</mode> <mode>cluster</mode>
<name>countryPropagationForORP</name> <name>countryPropagationForORP</name>
<class>eu.dnetlib.dhp.countrypropagation.SparkCountryPropagationJob</class> <class>eu.dnetlib.dhp.countrypropagation.SparkCountryPropagationJob</class>
<jar>dhp-propagation-${projectVersion}.jar</jar> <jar>dhp-enrichment-${projectVersion}.jar</jar>
<spark-opts> <spark-opts>
--executor-cores=${sparkExecutorCores} --executor-cores=${sparkExecutorCores}
--executor-memory=${sparkExecutorMemory} --executor-memory=${sparkExecutorMemory}
@ -345,7 +345,7 @@
<mode>cluster</mode> <mode>cluster</mode>
<name>countryPropagationForSoftware</name> <name>countryPropagationForSoftware</name>
<class>eu.dnetlib.dhp.countrypropagation.SparkCountryPropagationJob</class> <class>eu.dnetlib.dhp.countrypropagation.SparkCountryPropagationJob</class>
<jar>dhp-propagation-${projectVersion}.jar</jar> <jar>dhp-enrichment-${projectVersion}.jar</jar>
<spark-opts> <spark-opts>
--executor-cores=${sparkExecutorCores} --executor-cores=${sparkExecutorCores}
--executor-memory=${sparkExecutorMemory} --executor-memory=${sparkExecutorMemory}

View File

@ -95,7 +95,7 @@
<mode>cluster</mode> <mode>cluster</mode>
<name>ORCIDPropagation-PreparePhase1-Publications</name> <name>ORCIDPropagation-PreparePhase1-Publications</name>
<class>eu.dnetlib.dhp.orcidtoresultfromsemrel.PrepareResultOrcidAssociationStep1</class> <class>eu.dnetlib.dhp.orcidtoresultfromsemrel.PrepareResultOrcidAssociationStep1</class>
<jar>dhp-propagation-${projectVersion}.jar</jar> <jar>dhp-enrichment-${projectVersion}.jar</jar>
<spark-opts> <spark-opts>
--executor-cores=${sparkExecutorCores} --executor-cores=${sparkExecutorCores}
--executor-memory=${sparkExecutorMemory} --executor-memory=${sparkExecutorMemory}
@ -111,16 +111,11 @@
--conf spark.hadoop.mapreduce.map.speculative=false --conf spark.hadoop.mapreduce.map.speculative=false
--conf spark.hadoop.mapreduce.reduce.speculative=false --conf spark.hadoop.mapreduce.reduce.speculative=false
</spark-opts> </spark-opts>
<arg>--sourcePath</arg> <arg>--sourcePath</arg><arg>${sourcePath}</arg>
<arg>${sourcePath}</arg> <arg>--hive_metastore_uris</arg><arg>${hive_metastore_uris}</arg>
<arg>--hive_metastore_uris</arg> <arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Publication</arg>
<arg>${hive_metastore_uris}</arg> <arg>--outputPath</arg><arg>${workingDir}/preparedInfo/targetOrcidAssoc</arg>
<arg>--resultTableName</arg> <arg>--allowedsemrels</arg><arg>${allowedsemrels}</arg>
<arg>eu.dnetlib.dhp.schema.oaf.Publication</arg>
<arg>--outputPath</arg>
<arg>${workingDir}/preparedInfo/targetOrcidAssoc</arg>
<arg>--allowedsemrels</arg>
<arg>${allowedsemrels}</arg>
</spark> </spark>
<ok to="wait"/> <ok to="wait"/>
<error to="Kill"/> <error to="Kill"/>
@ -132,7 +127,7 @@
<mode>cluster</mode> <mode>cluster</mode>
<name>ORCIDPropagation-PreparePhase1-Dataset</name> <name>ORCIDPropagation-PreparePhase1-Dataset</name>
<class>eu.dnetlib.dhp.orcidtoresultfromsemrel.PrepareResultOrcidAssociationStep1</class> <class>eu.dnetlib.dhp.orcidtoresultfromsemrel.PrepareResultOrcidAssociationStep1</class>
<jar>dhp-propagation-${projectVersion}.jar</jar> <jar>dhp-enrichment-${projectVersion}.jar</jar>
<spark-opts> <spark-opts>
--executor-cores=${sparkExecutorCores} --executor-cores=${sparkExecutorCores}
--executor-memory=${sparkExecutorMemory} --executor-memory=${sparkExecutorMemory}
@ -144,16 +139,11 @@
--conf spark.dynamicAllocation.enabled=true --conf spark.dynamicAllocation.enabled=true
--conf spark.dynamicAllocation.maxExecutors=${spark2MaxExecutors} --conf spark.dynamicAllocation.maxExecutors=${spark2MaxExecutors}
</spark-opts> </spark-opts>
<arg>--sourcePath</arg> <arg>--sourcePath</arg><arg>${sourcePath}</arg>
<arg>${sourcePath}</arg> <arg>--hive_metastore_uris</arg><arg>${hive_metastore_uris}</arg>
<arg>--hive_metastore_uris</arg> <arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Dataset</arg>
<arg>${hive_metastore_uris}</arg> <arg>--outputPath</arg><arg>${workingDir}/preparedInfo/targetOrcidAssoc</arg>
<arg>--resultTableName</arg> <arg>--allowedsemrels</arg><arg>${allowedsemrels}</arg>
<arg>eu.dnetlib.dhp.schema.oaf.Dataset</arg>
<arg>--outputPath</arg>
<arg>${workingDir}/preparedInfo/targetOrcidAssoc</arg>
<arg>--allowedsemrels</arg>
<arg>${allowedsemrels}</arg>
</spark> </spark>
<ok to="wait"/> <ok to="wait"/>
<error to="Kill"/> <error to="Kill"/>
@ -165,7 +155,7 @@
<mode>cluster</mode> <mode>cluster</mode>
<name>ORCIDPropagation-PreparePhase1-ORP</name> <name>ORCIDPropagation-PreparePhase1-ORP</name>
<class>eu.dnetlib.dhp.orcidtoresultfromsemrel.PrepareResultOrcidAssociationStep1</class> <class>eu.dnetlib.dhp.orcidtoresultfromsemrel.PrepareResultOrcidAssociationStep1</class>
<jar>dhp-propagation-${projectVersion}.jar</jar> <jar>dhp-enrichment-${projectVersion}.jar</jar>
<spark-opts> <spark-opts>
--executor-cores=${sparkExecutorCores} --executor-cores=${sparkExecutorCores}
--executor-memory=${sparkExecutorMemory} --executor-memory=${sparkExecutorMemory}
@ -177,16 +167,11 @@
--conf spark.dynamicAllocation.enabled=true --conf spark.dynamicAllocation.enabled=true
--conf spark.dynamicAllocation.maxExecutors=${spark2MaxExecutors} --conf spark.dynamicAllocation.maxExecutors=${spark2MaxExecutors}
</spark-opts> </spark-opts>
<arg>--sourcePath</arg> <arg>--sourcePath</arg><arg>${sourcePath}</arg>
<arg>${sourcePath}</arg> <arg>--hive_metastore_uris</arg><arg>${hive_metastore_uris}</arg>
<arg>--hive_metastore_uris</arg> <arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.OtherResearchProduct</arg>
<arg>${hive_metastore_uris}</arg> <arg>--outputPath</arg><arg>${workingDir}/preparedInfo/targetOrcidAssoc</arg>
<arg>--resultTableName</arg> <arg>--allowedsemrels</arg><arg>${allowedsemrels}</arg>
<arg>eu.dnetlib.dhp.schema.oaf.OtherResearchProduct</arg>
<arg>--outputPath</arg>
<arg>${workingDir}/preparedInfo/targetOrcidAssoc</arg>
<arg>--allowedsemrels</arg>
<arg>${allowedsemrels}</arg>
</spark> </spark>
<ok to="wait"/> <ok to="wait"/>
<error to="Kill"/> <error to="Kill"/>
@ -198,7 +183,7 @@
<mode>cluster</mode> <mode>cluster</mode>
<name>ORCIDPropagation-PreparePhase1-Software</name> <name>ORCIDPropagation-PreparePhase1-Software</name>
<class>eu.dnetlib.dhp.orcidtoresultfromsemrel.PrepareResultOrcidAssociationStep1</class> <class>eu.dnetlib.dhp.orcidtoresultfromsemrel.PrepareResultOrcidAssociationStep1</class>
<jar>dhp-propagation-${projectVersion}.jar</jar> <jar>dhp-enrichment-${projectVersion}.jar</jar>
<spark-opts> <spark-opts>
--executor-cores=${sparkExecutorCores} --executor-cores=${sparkExecutorCores}
--executor-memory=${sparkExecutorMemory} --executor-memory=${sparkExecutorMemory}
@ -210,16 +195,11 @@
--conf spark.dynamicAllocation.enabled=true --conf spark.dynamicAllocation.enabled=true
--conf spark.dynamicAllocation.maxExecutors=${spark2MaxExecutors} --conf spark.dynamicAllocation.maxExecutors=${spark2MaxExecutors}
</spark-opts> </spark-opts>
<arg>--sourcePath</arg> <arg>--sourcePath</arg><arg>${sourcePath}</arg>
<arg>${sourcePath}</arg> <arg>--hive_metastore_uris</arg><arg>${hive_metastore_uris}</arg>
<arg>--hive_metastore_uris</arg> <arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Software</arg>
<arg>${hive_metastore_uris}</arg> <arg>--outputPath</arg><arg>${workingDir}/preparedInfo/targetOrcidAssoc</arg>
<arg>--resultTableName</arg> <arg>--allowedsemrels</arg><arg>${allowedsemrels}</arg>
<arg>eu.dnetlib.dhp.schema.oaf.Software</arg>
<arg>--outputPath</arg>
<arg>${workingDir}/preparedInfo/targetOrcidAssoc</arg>
<arg>--allowedsemrels</arg>
<arg>${allowedsemrels}</arg>
</spark> </spark>
<ok to="wait"/> <ok to="wait"/>
<error to="Kill"/> <error to="Kill"/>
@ -233,7 +213,7 @@
<mode>cluster</mode> <mode>cluster</mode>
<name>ORCIDPropagation-PreparePhase2</name> <name>ORCIDPropagation-PreparePhase2</name>
<class>eu.dnetlib.dhp.orcidtoresultfromsemrel.PrepareResultOrcidAssociationStep2</class> <class>eu.dnetlib.dhp.orcidtoresultfromsemrel.PrepareResultOrcidAssociationStep2</class>
<jar>dhp-propagation-${projectVersion}.jar</jar> <jar>dhp-enrichment-${projectVersion}.jar</jar>
<spark-opts> <spark-opts>
--executor-cores=${sparkExecutorCores} --executor-cores=${sparkExecutorCores}
--executor-memory=${sparkExecutorMemory} --executor-memory=${sparkExecutorMemory}
@ -245,13 +225,10 @@
--conf spark.dynamicAllocation.enabled=true --conf spark.dynamicAllocation.enabled=true
--conf spark.dynamicAllocation.maxExecutors=${spark2MaxExecutors} --conf spark.dynamicAllocation.maxExecutors=${spark2MaxExecutors}
</spark-opts> </spark-opts>
<arg>--sourcePath</arg> <arg>--sourcePath</arg><arg>${workingDir}/preparedInfo/targetOrcidAssoc</arg>
<arg>${workingDir}/preparedInfo/targetOrcidAssoc</arg> <arg>--outputPath</arg><arg>${workingDir}/preparedInfo/mergedOrcidAssoc</arg>
<arg>--outputPath</arg>
<arg>${workingDir}/preparedInfo/mergedOrcidAssoc</arg>
</spark> </spark>
<ok to="fork-join-exec-propagation"/> <ok to="fork-join-exec-propagation"/>
<!-- <ok to="End"/>-->
<error to="Kill"/> <error to="Kill"/>
</action> </action>
@ -268,7 +245,7 @@
<mode>cluster</mode> <mode>cluster</mode>
<name>ORCIDPropagation-Publication</name> <name>ORCIDPropagation-Publication</name>
<class>eu.dnetlib.dhp.orcidtoresultfromsemrel.SparkOrcidToResultFromSemRelJob</class> <class>eu.dnetlib.dhp.orcidtoresultfromsemrel.SparkOrcidToResultFromSemRelJob</class>
<jar>dhp-propagation-${projectVersion}.jar</jar> <jar>dhp-enrichment-${projectVersion}.jar</jar>
<spark-opts> <spark-opts>
--executor-cores=${sparkExecutorCores} --executor-cores=${sparkExecutorCores}
--executor-memory=${sparkExecutorMemory} --executor-memory=${sparkExecutorMemory}
@ -284,18 +261,12 @@
--conf spark.hadoop.mapreduce.reduce.speculative=false --conf spark.hadoop.mapreduce.reduce.speculative=false
--conf spark.sql.shuffle.partitions=3840 --conf spark.sql.shuffle.partitions=3840
</spark-opts> </spark-opts>
<arg>--possibleUpdatesPath</arg> <arg>--possibleUpdatesPath</arg><arg>${workingDir}/preparedInfo/mergedOrcidAssoc</arg>
<arg>${workingDir}/preparedInfo/mergedOrcidAssoc</arg> <arg>--sourcePath</arg><arg>${sourcePath}/publication</arg>
<arg>--sourcePath</arg> <arg>--hive_metastore_uris</arg><arg>${hive_metastore_uris}</arg>
<arg>${sourcePath}/publication</arg> <arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Publication</arg>
<arg>--hive_metastore_uris</arg> <arg>--outputPath</arg><arg>${outputPath}/publication</arg>
<arg>${hive_metastore_uris}</arg> <arg>--saveGraph</arg><arg>${saveGraph}</arg>
<arg>--resultTableName</arg>
<arg>eu.dnetlib.dhp.schema.oaf.Publication</arg>
<arg>--outputPath</arg>
<arg>${outputPath}/publication</arg>
<arg>--saveGraph</arg>
<arg>${saveGraph}</arg>
</spark> </spark>
<ok to="wait2"/> <ok to="wait2"/>
<error to="Kill"/> <error to="Kill"/>
@ -306,7 +277,7 @@
<mode>cluster</mode> <mode>cluster</mode>
<name>ORCIDPropagation-Dataset</name> <name>ORCIDPropagation-Dataset</name>
<class>eu.dnetlib.dhp.orcidtoresultfromsemrel.SparkOrcidToResultFromSemRelJob</class> <class>eu.dnetlib.dhp.orcidtoresultfromsemrel.SparkOrcidToResultFromSemRelJob</class>
<jar>dhp-propagation-${projectVersion}.jar</jar> <jar>dhp-enrichment-${projectVersion}.jar</jar>
<spark-opts> <spark-opts>
--executor-cores=${sparkExecutorCores} --executor-cores=${sparkExecutorCores}
--executor-memory=${sparkExecutorMemory} --executor-memory=${sparkExecutorMemory}
@ -321,18 +292,12 @@
--conf spark.hadoop.mapreduce.map.speculative=false --conf spark.hadoop.mapreduce.map.speculative=false
--conf spark.hadoop.mapreduce.reduce.speculative=false --conf spark.hadoop.mapreduce.reduce.speculative=false
</spark-opts> </spark-opts>
<arg>--possibleUpdatesPath</arg> <arg>--possibleUpdatesPath</arg><arg>${workingDir}/preparedInfo/mergedOrcidAssoc</arg>
<arg>${workingDir}/preparedInfo/mergedOrcidAssoc</arg> <arg>--sourcePath</arg><arg>${sourcePath}/dataset</arg>
<arg>--sourcePath</arg> <arg>--hive_metastore_uris</arg><arg>${hive_metastore_uris}</arg>
<arg>${sourcePath}/dataset</arg> <arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Dataset</arg>
<arg>--hive_metastore_uris</arg> <arg>--outputPath</arg><arg>${outputPath}/dataset</arg>
<arg>${hive_metastore_uris}</arg> <arg>--saveGraph</arg><arg>${saveGraph}</arg>
<arg>--resultTableName</arg>
<arg>eu.dnetlib.dhp.schema.oaf.Dataset</arg>
<arg>--outputPath</arg>
<arg>${outputPath}/dataset</arg>
<arg>--saveGraph</arg>
<arg>${saveGraph}</arg>
</spark> </spark>
<ok to="wait2"/> <ok to="wait2"/>
<error to="Kill"/> <error to="Kill"/>
@ -343,7 +308,7 @@
<mode>cluster</mode> <mode>cluster</mode>
<name>ORCIDPropagation-ORP</name> <name>ORCIDPropagation-ORP</name>
<class>eu.dnetlib.dhp.orcidtoresultfromsemrel.SparkOrcidToResultFromSemRelJob</class> <class>eu.dnetlib.dhp.orcidtoresultfromsemrel.SparkOrcidToResultFromSemRelJob</class>
<jar>dhp-propagation-${projectVersion}.jar</jar> <jar>dhp-enrichment-${projectVersion}.jar</jar>
<spark-opts> <spark-opts>
--executor-cores=${sparkExecutorCores} --executor-cores=${sparkExecutorCores}
--executor-memory=${sparkExecutorMemory} --executor-memory=${sparkExecutorMemory}
@ -358,18 +323,12 @@
--conf spark.hadoop.mapreduce.map.speculative=false --conf spark.hadoop.mapreduce.map.speculative=false
--conf spark.hadoop.mapreduce.reduce.speculative=false --conf spark.hadoop.mapreduce.reduce.speculative=false
</spark-opts> </spark-opts>
<arg>--possibleUpdatesPath</arg> <arg>--possibleUpdatesPath</arg><arg>${workingDir}/preparedInfo/mergedOrcidAssoc</arg>
<arg>${workingDir}/preparedInfo/mergedOrcidAssoc</arg> <arg>--sourcePath</arg><arg>${sourcePath}/otherresearchproduct</arg>
<arg>--sourcePath</arg> <arg>--hive_metastore_uris</arg><arg>${hive_metastore_uris}</arg>
<arg>${sourcePath}/otherresearchproduct</arg> <arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.OtherResearchProduct</arg>
<arg>--hive_metastore_uris</arg> <arg>--outputPath</arg><arg>${outputPath}/otherresearchproduct</arg>
<arg>${hive_metastore_uris}</arg> <arg>--saveGraph</arg><arg>${saveGraph}</arg>
<arg>--resultTableName</arg>
<arg>eu.dnetlib.dhp.schema.oaf.OtherResearchProduct</arg>
<arg>--outputPath</arg>
<arg>${outputPath}/otherresearchproduct</arg>
<arg>--saveGraph</arg>
<arg>${saveGraph}</arg>
</spark> </spark>
<ok to="wait2"/> <ok to="wait2"/>
<error to="Kill"/> <error to="Kill"/>
@ -380,7 +339,7 @@
<mode>cluster</mode> <mode>cluster</mode>
<name>ORCIDPropagation-Software</name> <name>ORCIDPropagation-Software</name>
<class>eu.dnetlib.dhp.orcidtoresultfromsemrel.SparkOrcidToResultFromSemRelJob</class> <class>eu.dnetlib.dhp.orcidtoresultfromsemrel.SparkOrcidToResultFromSemRelJob</class>
<jar>dhp-propagation-${projectVersion}.jar</jar> <jar>dhp-enrichment-${projectVersion}.jar</jar>
<spark-opts> <spark-opts>
--executor-cores=${sparkExecutorCores} --executor-cores=${sparkExecutorCores}
--executor-memory=${sparkExecutorMemory} --executor-memory=${sparkExecutorMemory}
@ -395,22 +354,19 @@
--conf spark.hadoop.mapreduce.map.speculative=false --conf spark.hadoop.mapreduce.map.speculative=false
--conf spark.hadoop.mapreduce.reduce.speculative=false --conf spark.hadoop.mapreduce.reduce.speculative=false
</spark-opts> </spark-opts>
<arg>--possibleUpdatesPath</arg> <arg>--possibleUpdatesPath</arg><arg>${workingDir}/preparedInfo/mergedOrcidAssoc</arg>
<arg>${workingDir}/preparedInfo/mergedOrcidAssoc</arg> <arg>--sourcePath</arg><arg>${sourcePath}/software</arg>
<arg>--sourcePath</arg> <arg>--hive_metastore_uris</arg><arg>${hive_metastore_uris}</arg>
<arg>${sourcePath}/software</arg> <arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Software</arg>
<arg>--hive_metastore_uris</arg> <arg>--outputPath</arg><arg>${outputPath}/software</arg>
<arg>${hive_metastore_uris}</arg> <arg>--saveGraph</arg><arg>${saveGraph}</arg>
<arg>--resultTableName</arg>
<arg>eu.dnetlib.dhp.schema.oaf.Software</arg>
<arg>--outputPath</arg>
<arg>${outputPath}/software</arg>
<arg>--saveGraph</arg>
<arg>${saveGraph}</arg>
</spark> </spark>
<ok to="wait2"/> <ok to="wait2"/>
<error to="Kill"/> <error to="Kill"/>
</action> </action>
<join name="wait2" to="End"/> <join name="wait2" to="End"/>
<end name="End"/> <end name="End"/>
</workflow-app> </workflow-app>

View File

@ -128,60 +128,59 @@
<join name="wait" to="prepare_project_results_association"/> <join name="wait" to="prepare_project_results_association"/>
<action name="prepare_project_results_association">
<spark xmlns="uri:oozie:spark-action:0.2">
<master>yarn</master>
<mode>cluster</mode>
<name>PrepareProjectResultsAssociation</name>
<class>eu.dnetlib.dhp.projecttoresult.PrepareProjectResultsAssociation</class>
<jar>dhp-propagation-${projectVersion}.jar</jar>
<spark-opts>
--executor-cores=${sparkExecutorCores}
--executor-memory=${sparkExecutorMemory}
--driver-memory=${sparkDriverMemory}
--conf spark.extraListeners=${spark2ExtraListeners}
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
</spark-opts>
<arg>--sourcePath</arg><arg>${sourcePath}/relation</arg>
<arg>--allowedsemrels</arg><arg>${allowedsemrels}</arg>
<arg>--hive_metastore_uris</arg><arg>${hive_metastore_uris}</arg>
<arg>--potentialUpdatePath</arg><arg>${workingDir}/preparedInfo/potentialUpdates</arg>
<arg>--alreadyLinkedPath</arg><arg>${workingDir}/preparedInfo/alreadyLinked</arg>
</spark>
<ok to="apply_propagation"/>
<error to="Kill"/>
</action>
<action name="prepare_project_results_association"> <action name="apply_propagation">
<spark xmlns="uri:oozie:spark-action:0.2"> <spark xmlns="uri:oozie:spark-action:0.2">
<master>yarn</master> <master>yarn</master>
<mode>cluster</mode> <mode>cluster</mode>
<name>PrepareProjectResultsAssociation</name> <name>ProjectToResultPropagation</name>
<class>eu.dnetlib.dhp.projecttoresult.PrepareProjectResultsAssociation</class> <class>eu.dnetlib.dhp.projecttoresult.SparkResultToProjectThroughSemRelJob</class>
<jar>dhp-propagation-${projectVersion}.jar</jar> <jar>dhp-enrichment-${projectVersion}.jar</jar>
<spark-opts> <spark-opts>
--executor-cores=${sparkExecutorCores} --executor-cores=${sparkExecutorCores}
--executor-memory=${sparkExecutorMemory} --executor-memory=${sparkExecutorMemory}
--driver-memory=${sparkDriverMemory} --driver-memory=${sparkDriverMemory}
--conf spark.extraListeners=${spark2ExtraListeners} --conf spark.extraListeners=${spark2ExtraListeners}
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners} --conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress} --conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir} --conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
</spark-opts> --conf spark.dynamicAllocation.enabled=true
<arg>--sourcePath</arg><arg>${sourcePath}/relation</arg> --conf spark.dynamicAllocation.maxExecutors=${spark2MaxExecutors}
<arg>--allowedsemrels</arg><arg>${allowedsemrels}</arg> </spark-opts>
<arg>--hive_metastore_uris</arg><arg>${hive_metastore_uris}</arg> <arg>--saveGraph</arg><arg>${saveGraph}</arg>
<arg>--potentialUpdatePath</arg><arg>${workingDir}/preparedInfo/potentialUpdates</arg> <arg>--hive_metastore_uris</arg><arg>${hive_metastore_uris}</arg>
<arg>--alreadyLinkedPath</arg><arg>${workingDir}/preparedInfo/alreadyLinked</arg> <arg>--outputPath</arg><arg>${outputPath}/relation</arg>
</spark> <arg>--potentialUpdatePath</arg><arg>${workingDir}/preparedInfo/potentialUpdates</arg>
<ok to="apply_propagation"/> <arg>--alreadyLinkedPath</arg><arg>${workingDir}/preparedInfo/alreadyLinked</arg>
<error to="Kill"/> </spark>
</action> <ok to="End"/>
<error to="Kill"/>
<action name="apply_propagation"> </action>
<spark xmlns="uri:oozie:spark-action:0.2">
<master>yarn</master>
<mode>cluster</mode>
<name>ProjectToResultPropagation</name>
<class>eu.dnetlib.dhp.projecttoresult.SparkResultToProjectThroughSemRelJob</class>
<jar>dhp-propagation-${projectVersion}.jar</jar>
<spark-opts>
--executor-cores=${sparkExecutorCores}
--executor-memory=${sparkExecutorMemory}
--driver-memory=${sparkDriverMemory}
--conf spark.extraListeners=${spark2ExtraListeners}
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
--conf spark.dynamicAllocation.enabled=true
--conf spark.dynamicAllocation.maxExecutors=${spark2MaxExecutors}
</spark-opts>
<arg>--saveGraph</arg><arg>${saveGraph}</arg>
<arg>--hive_metastore_uris</arg><arg>${hive_metastore_uris}</arg>
<arg>--outputPath</arg><arg>${outputPath}/relation</arg>
<arg>--potentialUpdatePath</arg><arg>${workingDir}/preparedInfo/potentialUpdates</arg>
<arg>--alreadyLinkedPath</arg><arg>${workingDir}/preparedInfo/alreadyLinked</arg>
</spark>
<ok to="End"/>
<error to="Kill"/>
</action>
<end name="End"/> <end name="End"/>

View File

@ -88,7 +88,7 @@
<mode>cluster</mode> <mode>cluster</mode>
<name>Prepare-Community-Result-Organization</name> <name>Prepare-Community-Result-Organization</name>
<class>eu.dnetlib.dhp.resulttocommunityfromorganization.PrepareResultCommunitySet</class> <class>eu.dnetlib.dhp.resulttocommunityfromorganization.PrepareResultCommunitySet</class>
<jar>dhp-propagation-${projectVersion}.jar</jar> <jar>dhp-enrichment-${projectVersion}.jar</jar>
<spark-opts> <spark-opts>
--executor-cores=${sparkExecutorCores} --executor-cores=${sparkExecutorCores}
--executor-memory=${sparkExecutorMemory} --executor-memory=${sparkExecutorMemory}
@ -122,7 +122,7 @@
<mode>cluster</mode> <mode>cluster</mode>
<name>community2resultfromorganization-Publication</name> <name>community2resultfromorganization-Publication</name>
<class>eu.dnetlib.dhp.resulttocommunityfromorganization.SparkResultToCommunityFromOrganizationJob</class> <class>eu.dnetlib.dhp.resulttocommunityfromorganization.SparkResultToCommunityFromOrganizationJob</class>
<jar>dhp-propagation-${projectVersion}.jar</jar> <jar>dhp-enrichment-${projectVersion}.jar</jar>
<spark-opts> <spark-opts>
--executor-cores=${sparkExecutorCores} --executor-cores=${sparkExecutorCores}
--executor-memory=${sparkExecutorMemory} --executor-memory=${sparkExecutorMemory}
@ -151,7 +151,7 @@
<mode>cluster</mode> <mode>cluster</mode>
<name>community2resultfromorganization-Dataset</name> <name>community2resultfromorganization-Dataset</name>
<class>eu.dnetlib.dhp.resulttocommunityfromorganization.SparkResultToCommunityFromOrganizationJob</class> <class>eu.dnetlib.dhp.resulttocommunityfromorganization.SparkResultToCommunityFromOrganizationJob</class>
<jar>dhp-propagation-${projectVersion}.jar</jar> <jar>dhp-enrichment-${projectVersion}.jar</jar>
<spark-opts> <spark-opts>
--executor-cores=${sparkExecutorCores} --executor-cores=${sparkExecutorCores}
--executor-memory=${sparkExecutorMemory} --executor-memory=${sparkExecutorMemory}
@ -180,7 +180,7 @@
<mode>cluster</mode> <mode>cluster</mode>
<name>community2resultfromorganization-ORP</name> <name>community2resultfromorganization-ORP</name>
<class>eu.dnetlib.dhp.resulttocommunityfromorganization.SparkResultToCommunityFromOrganizationJob</class> <class>eu.dnetlib.dhp.resulttocommunityfromorganization.SparkResultToCommunityFromOrganizationJob</class>
<jar>dhp-propagation-${projectVersion}.jar</jar> <jar>dhp-enrichment-${projectVersion}.jar</jar>
<spark-opts> <spark-opts>
--executor-cores=${sparkExecutorCores} --executor-cores=${sparkExecutorCores}
--executor-memory=${sparkExecutorMemory} --executor-memory=${sparkExecutorMemory}
@ -209,7 +209,7 @@
<mode>cluster</mode> <mode>cluster</mode>
<name>community2resultfromorganization-Software</name> <name>community2resultfromorganization-Software</name>
<class>eu.dnetlib.dhp.resulttocommunityfromorganization.SparkResultToCommunityFromOrganizationJob</class> <class>eu.dnetlib.dhp.resulttocommunityfromorganization.SparkResultToCommunityFromOrganizationJob</class>
<jar>dhp-propagation-${projectVersion}.jar</jar> <jar>dhp-enrichment-${projectVersion}.jar</jar>
<spark-opts> <spark-opts>
--executor-cores=${sparkExecutorCores} --executor-cores=${sparkExecutorCores}
--executor-memory=${sparkExecutorMemory} --executor-memory=${sparkExecutorMemory}

View File

@ -99,7 +99,7 @@
<mode>cluster</mode> <mode>cluster</mode>
<name>ResultToCommunitySemRel-PreparePhase1-Publications</name> <name>ResultToCommunitySemRel-PreparePhase1-Publications</name>
<class>eu.dnetlib.dhp.resulttocommunityfromsemrel.PrepareResultCommunitySetStep1</class> <class>eu.dnetlib.dhp.resulttocommunityfromsemrel.PrepareResultCommunitySetStep1</class>
<jar>dhp-propagation-${projectVersion}.jar</jar> <jar>dhp-enrichment-${projectVersion}.jar</jar>
<spark-opts> <spark-opts>
--executor-cores=${sparkExecutorCores} --executor-cores=${sparkExecutorCores}
--executor-memory=${sparkExecutorMemory} --executor-memory=${sparkExecutorMemory}
@ -128,7 +128,7 @@
<mode>cluster</mode> <mode>cluster</mode>
<name>ResultToCommunitySemRel-PreparePhase1-Dataset</name> <name>ResultToCommunitySemRel-PreparePhase1-Dataset</name>
<class>eu.dnetlib.dhp.resulttocommunityfromsemrel.PrepareResultCommunitySetStep1</class> <class>eu.dnetlib.dhp.resulttocommunityfromsemrel.PrepareResultCommunitySetStep1</class>
<jar>dhp-propagation-${projectVersion}.jar</jar> <jar>dhp-enrichment-${projectVersion}.jar</jar>
<spark-opts> <spark-opts>
--executor-cores=${sparkExecutorCores} --executor-cores=${sparkExecutorCores}
--executor-memory=${sparkExecutorMemory} --executor-memory=${sparkExecutorMemory}
@ -150,13 +150,14 @@
<ok to="wait"/> <ok to="wait"/>
<error to="Kill"/> <error to="Kill"/>
</action> </action>
<action name="join_prepare_otherresearchproduct"> <action name="join_prepare_otherresearchproduct">
<spark xmlns="uri:oozie:spark-action:0.2"> <spark xmlns="uri:oozie:spark-action:0.2">
<master>yarn</master> <master>yarn</master>
<mode>cluster</mode> <mode>cluster</mode>
<name>ResultToCommunitySemRel-PreparePhase1-ORP</name> <name>ResultToCommunitySemRel-PreparePhase1-ORP</name>
<class>eu.dnetlib.dhp.resulttocommunityfromsemrel.PrepareResultCommunitySetStep1</class> <class>eu.dnetlib.dhp.resulttocommunityfromsemrel.PrepareResultCommunitySetStep1</class>
<jar>dhp-propagation-${projectVersion}.jar</jar> <jar>dhp-enrichment-${projectVersion}.jar</jar>
<spark-opts> <spark-opts>
--executor-cores=${sparkExecutorCores} --executor-cores=${sparkExecutorCores}
--executor-memory=${sparkExecutorMemory} --executor-memory=${sparkExecutorMemory}
@ -185,7 +186,7 @@
<mode>cluster</mode> <mode>cluster</mode>
<name>ResultToCommunitySemRel-PreparePhase1-Software</name> <name>ResultToCommunitySemRel-PreparePhase1-Software</name>
<class>eu.dnetlib.dhp.resulttocommunityfromsemrel.PrepareResultCommunitySetStep1</class> <class>eu.dnetlib.dhp.resulttocommunityfromsemrel.PrepareResultCommunitySetStep1</class>
<jar>dhp-propagation-${projectVersion}.jar</jar> <jar>dhp-enrichment-${projectVersion}.jar</jar>
<spark-opts> <spark-opts>
--executor-cores=${sparkExecutorCores} --executor-cores=${sparkExecutorCores}
--executor-memory=${sparkExecutorMemory} --executor-memory=${sparkExecutorMemory}
@ -216,7 +217,7 @@
<mode>cluster</mode> <mode>cluster</mode>
<name>ResultToCommunityEmRelPropagation-PreparePhase2</name> <name>ResultToCommunityEmRelPropagation-PreparePhase2</name>
<class>eu.dnetlib.dhp.resulttocommunityfromsemrel.PrepareResultCommunitySetStep2</class> <class>eu.dnetlib.dhp.resulttocommunityfromsemrel.PrepareResultCommunitySetStep2</class>
<jar>dhp-propagation-${projectVersion}.jar</jar> <jar>dhp-enrichment-${projectVersion}.jar</jar>
<spark-opts> <spark-opts>
--executor-cores=${sparkExecutorCores} --executor-cores=${sparkExecutorCores}
--executor-memory=${sparkExecutorMemory} --executor-memory=${sparkExecutorMemory}
@ -232,9 +233,7 @@
<arg>--outputPath</arg><arg>${workingDir}/preparedInfo/mergedCommunityAssoc</arg> <arg>--outputPath</arg><arg>${workingDir}/preparedInfo/mergedCommunityAssoc</arg>
</spark> </spark>
<ok to="fork-join-exec-propagation"/> <ok to="fork-join-exec-propagation"/>
<!-- <ok to="End"/>-->
<error to="Kill"/> <error to="Kill"/>
</action> </action>
<fork name="fork-join-exec-propagation"> <fork name="fork-join-exec-propagation">
@ -250,7 +249,7 @@
<mode>cluster</mode> <mode>cluster</mode>
<name>Result2CommunitySemRelPropagation-Publication</name> <name>Result2CommunitySemRelPropagation-Publication</name>
<class>eu.dnetlib.dhp.resulttocommunityfromsemrel.SparkResultToCommunityThroughSemRelJob</class> <class>eu.dnetlib.dhp.resulttocommunityfromsemrel.SparkResultToCommunityThroughSemRelJob</class>
<jar>dhp-propagation-${projectVersion}.jar</jar> <jar>dhp-enrichment-${projectVersion}.jar</jar>
<spark-opts> <spark-opts>
--executor-cores=${sparkExecutorCores} --executor-cores=${sparkExecutorCores}
--executor-memory=${sparkExecutorMemory} --executor-memory=${sparkExecutorMemory}
@ -279,7 +278,7 @@
<mode>cluster</mode> <mode>cluster</mode>
<name>Result2CommunitySemRelPropagation-Dataset</name> <name>Result2CommunitySemRelPropagation-Dataset</name>
<class>eu.dnetlib.dhp.resulttocommunityfromsemrel.SparkResultToCommunityThroughSemRelJob</class> <class>eu.dnetlib.dhp.resulttocommunityfromsemrel.SparkResultToCommunityThroughSemRelJob</class>
<jar>dhp-propagation-${projectVersion}.jar</jar> <jar>dhp-enrichment-${projectVersion}.jar</jar>
<spark-opts> <spark-opts>
--executor-cores=${sparkExecutorCores} --executor-cores=${sparkExecutorCores}
--executor-memory=${sparkExecutorMemory} --executor-memory=${sparkExecutorMemory}
@ -308,7 +307,7 @@
<mode>cluster</mode> <mode>cluster</mode>
<name>Result2CommunitySemRelPropagation-ORP</name> <name>Result2CommunitySemRelPropagation-ORP</name>
<class>eu.dnetlib.dhp.resulttocommunityfromsemrel.SparkResultToCommunityThroughSemRelJob</class> <class>eu.dnetlib.dhp.resulttocommunityfromsemrel.SparkResultToCommunityThroughSemRelJob</class>
<jar>dhp-propagation-${projectVersion}.jar</jar> <jar>dhp-enrichment-${projectVersion}.jar</jar>
<spark-opts> <spark-opts>
--executor-cores=${sparkExecutorCores} --executor-cores=${sparkExecutorCores}
--executor-memory=${sparkExecutorMemory} --executor-memory=${sparkExecutorMemory}
@ -337,7 +336,7 @@
<mode>cluster</mode> <mode>cluster</mode>
<name>Result2CommunitySemRelPropagation-Software</name> <name>Result2CommunitySemRelPropagation-Software</name>
<class>eu.dnetlib.dhp.resulttocommunityfromsemrel.SparkResultToCommunityThroughSemRelJob</class> <class>eu.dnetlib.dhp.resulttocommunityfromsemrel.SparkResultToCommunityThroughSemRelJob</class>
<jar>dhp-propagation-${projectVersion}.jar</jar> <jar>dhp-enrichment-${projectVersion}.jar</jar>
<spark-opts> <spark-opts>
--executor-cores=${sparkExecutorCores} --executor-cores=${sparkExecutorCores}
--executor-memory=${sparkExecutorMemory} --executor-memory=${sparkExecutorMemory}

View File

@ -131,7 +131,7 @@
<mode>cluster</mode> <mode>cluster</mode>
<name>PrepareResultOrganizationAssociation</name> <name>PrepareResultOrganizationAssociation</name>
<class>eu.dnetlib.dhp.resulttoorganizationfrominstrepo.PrepareResultInstRepoAssociation</class> <class>eu.dnetlib.dhp.resulttoorganizationfrominstrepo.PrepareResultInstRepoAssociation</class>
<jar>dhp-propagation-${projectVersion}.jar</jar> <jar>dhp-enrichment-${projectVersion}.jar</jar>
<spark-opts> <spark-opts>
--executor-cores=${sparkExecutorCores} --executor-cores=${sparkExecutorCores}
--executor-memory=${sparkExecutorMemory} --executor-memory=${sparkExecutorMemory}
@ -163,7 +163,7 @@
<mode>cluster</mode> <mode>cluster</mode>
<name>resultToOrganizationFromInstRepoPropagationForPublications</name> <name>resultToOrganizationFromInstRepoPropagationForPublications</name>
<class>eu.dnetlib.dhp.resulttoorganizationfrominstrepo.SparkResultToOrganizationFromIstRepoJob</class> <class>eu.dnetlib.dhp.resulttoorganizationfrominstrepo.SparkResultToOrganizationFromIstRepoJob</class>
<jar>dhp-propagation-${projectVersion}.jar</jar> <jar>dhp-enrichment-${projectVersion}.jar</jar>
<spark-opts> <spark-opts>
--executor-cores=${sparkExecutorCores} --executor-cores=${sparkExecutorCores}
--executor-memory=${sparkExecutorMemory} --executor-memory=${sparkExecutorMemory}
@ -193,7 +193,7 @@
<mode>cluster</mode> <mode>cluster</mode>
<name>resultToOrganizationFromInstRepoPropagationForDataset</name> <name>resultToOrganizationFromInstRepoPropagationForDataset</name>
<class>eu.dnetlib.dhp.resulttoorganizationfrominstrepo.SparkResultToOrganizationFromIstRepoJob</class> <class>eu.dnetlib.dhp.resulttoorganizationfrominstrepo.SparkResultToOrganizationFromIstRepoJob</class>
<jar>dhp-propagation-${projectVersion}.jar</jar> <jar>dhp-enrichment-${projectVersion}.jar</jar>
<spark-opts> <spark-opts>
--executor-cores=${sparkExecutorCores} --executor-cores=${sparkExecutorCores}
--executor-memory=${sparkExecutorMemory} --executor-memory=${sparkExecutorMemory}
@ -223,7 +223,7 @@
<mode>cluster</mode> <mode>cluster</mode>
<name>resultToOrganizationFromInstRepoPropagationForORP</name> <name>resultToOrganizationFromInstRepoPropagationForORP</name>
<class>eu.dnetlib.dhp.resulttoorganizationfrominstrepo.SparkResultToOrganizationFromIstRepoJob</class> <class>eu.dnetlib.dhp.resulttoorganizationfrominstrepo.SparkResultToOrganizationFromIstRepoJob</class>
<jar>dhp-propagation-${projectVersion}.jar</jar> <jar>dhp-enrichment-${projectVersion}.jar</jar>
<spark-opts> <spark-opts>
--executor-cores=${sparkExecutorCores} --executor-cores=${sparkExecutorCores}
--executor-memory=${sparkExecutorMemory} --executor-memory=${sparkExecutorMemory}
@ -253,7 +253,7 @@
<mode>cluster</mode> <mode>cluster</mode>
<name>resultToOrganizationFromInstRepoPropagationForSoftware</name> <name>resultToOrganizationFromInstRepoPropagationForSoftware</name>
<class>eu.dnetlib.dhp.resulttoorganizationfrominstrepo.SparkResultToOrganizationFromIstRepoJob</class> <class>eu.dnetlib.dhp.resulttoorganizationfrominstrepo.SparkResultToOrganizationFromIstRepoJob</class>
<jar>dhp-propagation-${projectVersion}.jar</jar> <jar>dhp-enrichment-${projectVersion}.jar</jar>
<spark-opts> <spark-opts>
--executor-cores=${sparkExecutorCores} --executor-cores=${sparkExecutorCores}
--executor-memory=${sparkExecutorMemory} --executor-memory=${sparkExecutorMemory}