diff --git a/dhp-workflows/dhp-enrichment/src/main/resources/eu/dnetlib/dhp/bulktag/oozie_app/workflow.xml b/dhp-workflows/dhp-enrichment/src/main/resources/eu/dnetlib/dhp/bulktag/oozie_app/workflow.xml
index 524281bc9..754aba4f2 100644
--- a/dhp-workflows/dhp-enrichment/src/main/resources/eu/dnetlib/dhp/bulktag/oozie_app/workflow.xml
+++ b/dhp-workflows/dhp-enrichment/src/main/resources/eu/dnetlib/dhp/bulktag/oozie_app/workflow.xml
@@ -101,7 +101,7 @@
cluster
bulkTagging-publication
eu.dnetlib.dhp.bulktag.SparkBulkTagJob
- dhp-bulktag-${projectVersion}.jar
+ dhp-enrichment-${projectVersion}.jar
--num-executors=${sparkExecutorNumber}
--executor-memory=${sparkExecutorMemory}
@@ -130,7 +130,7 @@
cluster
bulkTagging-dataset
eu.dnetlib.dhp.bulktag.SparkBulkTagJob
- dhp-bulktag-${projectVersion}.jar
+ dhp-enrichment-${projectVersion}.jar
--num-executors=${sparkExecutorNumber}
--executor-memory=${sparkExecutorMemory}
@@ -159,7 +159,7 @@
cluster
bulkTagging-orp
eu.dnetlib.dhp.bulktag.SparkBulkTagJob
- dhp-bulktag-${projectVersion}.jar
+ dhp-enrichment-${projectVersion}.jar
--num-executors=${sparkExecutorNumber}
--executor-memory=${sparkExecutorMemory}
@@ -188,7 +188,7 @@
cluster
bulkTagging-software
eu.dnetlib.dhp.bulktag.SparkBulkTagJob
- dhp-bulktag-${projectVersion}.jar
+ dhp-enrichment-${projectVersion}.jar
--num-executors=${sparkExecutorNumber}
--executor-memory=${sparkExecutorMemory}
diff --git a/dhp-workflows/dhp-enrichment/src/main/resources/eu/dnetlib/dhp/countrypropagation/oozie_app/workflow.xml b/dhp-workflows/dhp-enrichment/src/main/resources/eu/dnetlib/dhp/countrypropagation/oozie_app/workflow.xml
index f269c5442..fc877071d 100644
--- a/dhp-workflows/dhp-enrichment/src/main/resources/eu/dnetlib/dhp/countrypropagation/oozie_app/workflow.xml
+++ b/dhp-workflows/dhp-enrichment/src/main/resources/eu/dnetlib/dhp/countrypropagation/oozie_app/workflow.xml
@@ -92,7 +92,7 @@
cluster
PrepareDatasourceCountryAssociation
eu.dnetlib.dhp.countrypropagation.PrepareDatasourceCountryAssociation
- dhp-propagation-${projectVersion}.jar
+ dhp-enrichment-${projectVersion}.jar
--executor-cores=${sparkExecutorCores}
--executor-memory=${sparkExecutorMemory}
@@ -126,7 +126,7 @@
cluster
prepareResultCountry-Publication
eu.dnetlib.dhp.countrypropagation.PrepareResultCountrySet
- dhp-propagation-${projectVersion}.jar
+ dhp-enrichment-${projectVersion}.jar
--executor-cores=${sparkExecutorCores}
--executor-memory=${sparkExecutorMemory}
@@ -156,7 +156,7 @@
cluster
prepareResultCountry-Dataset
eu.dnetlib.dhp.countrypropagation.PrepareResultCountrySet
- dhp-propagation-${projectVersion}.jar
+ dhp-enrichment-${projectVersion}.jar
--executor-cores=${sparkExecutorCores}
--executor-memory=${sparkExecutorMemory}
@@ -186,7 +186,7 @@
cluster
prepareResultCountry-ORP
eu.dnetlib.dhp.countrypropagation.PrepareResultCountrySet
- dhp-propagation-${projectVersion}.jar
+ dhp-enrichment-${projectVersion}.jar
--executor-cores=${sparkExecutorCores}
--executor-memory=${sparkExecutorMemory}
@@ -216,7 +216,7 @@
cluster
prepareResultCountry-Software
eu.dnetlib.dhp.countrypropagation.PrepareResultCountrySet
- dhp-propagation-${projectVersion}.jar
+ dhp-enrichment-${projectVersion}.jar
--executor-cores=${sparkExecutorCores}
--executor-memory=${sparkExecutorMemory}
@@ -255,7 +255,7 @@
cluster
countryPropagationForPublications
eu.dnetlib.dhp.countrypropagation.SparkCountryPropagationJob
- dhp-propagation-${projectVersion}.jar
+ dhp-enrichment-${projectVersion}.jar
--executor-cores=${sparkExecutorCores}
--executor-memory=${sparkExecutorMemory}
@@ -285,7 +285,7 @@
cluster
countryPropagationForDataset
eu.dnetlib.dhp.countrypropagation.SparkCountryPropagationJob
- dhp-propagation-${projectVersion}.jar
+ dhp-enrichment-${projectVersion}.jar
--executor-cores=${sparkExecutorCores}
--executor-memory=${sparkExecutorMemory}
@@ -315,7 +315,7 @@
cluster
countryPropagationForORP
eu.dnetlib.dhp.countrypropagation.SparkCountryPropagationJob
- dhp-propagation-${projectVersion}.jar
+ dhp-enrichment-${projectVersion}.jar
--executor-cores=${sparkExecutorCores}
--executor-memory=${sparkExecutorMemory}
@@ -345,7 +345,7 @@
cluster
countryPropagationForSoftware
eu.dnetlib.dhp.countrypropagation.SparkCountryPropagationJob
- dhp-propagation-${projectVersion}.jar
+ dhp-enrichment-${projectVersion}.jar
--executor-cores=${sparkExecutorCores}
--executor-memory=${sparkExecutorMemory}
diff --git a/dhp-workflows/dhp-enrichment/src/main/resources/eu/dnetlib/dhp/orcidtoresultfromsemrel/oozie_app/workflow.xml b/dhp-workflows/dhp-enrichment/src/main/resources/eu/dnetlib/dhp/orcidtoresultfromsemrel/oozie_app/workflow.xml
index 7b06b6504..e4429b710 100644
--- a/dhp-workflows/dhp-enrichment/src/main/resources/eu/dnetlib/dhp/orcidtoresultfromsemrel/oozie_app/workflow.xml
+++ b/dhp-workflows/dhp-enrichment/src/main/resources/eu/dnetlib/dhp/orcidtoresultfromsemrel/oozie_app/workflow.xml
@@ -95,7 +95,7 @@
cluster
ORCIDPropagation-PreparePhase1-Publications
eu.dnetlib.dhp.orcidtoresultfromsemrel.PrepareResultOrcidAssociationStep1
- dhp-propagation-${projectVersion}.jar
+ dhp-enrichment-${projectVersion}.jar
--executor-cores=${sparkExecutorCores}
--executor-memory=${sparkExecutorMemory}
@@ -111,16 +111,11 @@
--conf spark.hadoop.mapreduce.map.speculative=false
--conf spark.hadoop.mapreduce.reduce.speculative=false
- --sourcePath
- ${sourcePath}
- --hive_metastore_uris
- ${hive_metastore_uris}
- --resultTableName
- eu.dnetlib.dhp.schema.oaf.Publication
- --outputPath
- ${workingDir}/preparedInfo/targetOrcidAssoc
- --allowedsemrels
- ${allowedsemrels}
+ --sourcePath${sourcePath}
+ --hive_metastore_uris${hive_metastore_uris}
+ --resultTableNameeu.dnetlib.dhp.schema.oaf.Publication
+ --outputPath${workingDir}/preparedInfo/targetOrcidAssoc
+ --allowedsemrels${allowedsemrels}
@@ -132,7 +127,7 @@
cluster
ORCIDPropagation-PreparePhase1-Dataset
eu.dnetlib.dhp.orcidtoresultfromsemrel.PrepareResultOrcidAssociationStep1
- dhp-propagation-${projectVersion}.jar
+ dhp-enrichment-${projectVersion}.jar
--executor-cores=${sparkExecutorCores}
--executor-memory=${sparkExecutorMemory}
@@ -144,16 +139,11 @@
--conf spark.dynamicAllocation.enabled=true
--conf spark.dynamicAllocation.maxExecutors=${spark2MaxExecutors}
- --sourcePath
- ${sourcePath}
- --hive_metastore_uris
- ${hive_metastore_uris}
- --resultTableName
- eu.dnetlib.dhp.schema.oaf.Dataset
- --outputPath
- ${workingDir}/preparedInfo/targetOrcidAssoc
- --allowedsemrels
- ${allowedsemrels}
+ --sourcePath${sourcePath}
+ --hive_metastore_uris${hive_metastore_uris}
+ --resultTableNameeu.dnetlib.dhp.schema.oaf.Dataset
+ --outputPath${workingDir}/preparedInfo/targetOrcidAssoc
+ --allowedsemrels${allowedsemrels}
@@ -165,7 +155,7 @@
cluster
ORCIDPropagation-PreparePhase1-ORP
eu.dnetlib.dhp.orcidtoresultfromsemrel.PrepareResultOrcidAssociationStep1
- dhp-propagation-${projectVersion}.jar
+ dhp-enrichment-${projectVersion}.jar
--executor-cores=${sparkExecutorCores}
--executor-memory=${sparkExecutorMemory}
@@ -177,16 +167,11 @@
--conf spark.dynamicAllocation.enabled=true
--conf spark.dynamicAllocation.maxExecutors=${spark2MaxExecutors}
- --sourcePath
- ${sourcePath}
- --hive_metastore_uris
- ${hive_metastore_uris}
- --resultTableName
- eu.dnetlib.dhp.schema.oaf.OtherResearchProduct
- --outputPath
- ${workingDir}/preparedInfo/targetOrcidAssoc
- --allowedsemrels
- ${allowedsemrels}
+ --sourcePath${sourcePath}
+ --hive_metastore_uris${hive_metastore_uris}
+ --resultTableNameeu.dnetlib.dhp.schema.oaf.OtherResearchProduct
+ --outputPath${workingDir}/preparedInfo/targetOrcidAssoc
+ --allowedsemrels${allowedsemrels}
@@ -198,7 +183,7 @@
cluster
ORCIDPropagation-PreparePhase1-Software
eu.dnetlib.dhp.orcidtoresultfromsemrel.PrepareResultOrcidAssociationStep1
- dhp-propagation-${projectVersion}.jar
+ dhp-enrichment-${projectVersion}.jar
--executor-cores=${sparkExecutorCores}
--executor-memory=${sparkExecutorMemory}
@@ -210,16 +195,11 @@
--conf spark.dynamicAllocation.enabled=true
--conf spark.dynamicAllocation.maxExecutors=${spark2MaxExecutors}
- --sourcePath
- ${sourcePath}
- --hive_metastore_uris
- ${hive_metastore_uris}
- --resultTableName
- eu.dnetlib.dhp.schema.oaf.Software
- --outputPath
- ${workingDir}/preparedInfo/targetOrcidAssoc
- --allowedsemrels
- ${allowedsemrels}
+ --sourcePath${sourcePath}
+ --hive_metastore_uris${hive_metastore_uris}
+ --resultTableNameeu.dnetlib.dhp.schema.oaf.Software
+ --outputPath${workingDir}/preparedInfo/targetOrcidAssoc
+ --allowedsemrels${allowedsemrels}
@@ -233,7 +213,7 @@
cluster
ORCIDPropagation-PreparePhase2
eu.dnetlib.dhp.orcidtoresultfromsemrel.PrepareResultOrcidAssociationStep2
- dhp-propagation-${projectVersion}.jar
+ dhp-enrichment-${projectVersion}.jar
--executor-cores=${sparkExecutorCores}
--executor-memory=${sparkExecutorMemory}
@@ -245,13 +225,10 @@
--conf spark.dynamicAllocation.enabled=true
--conf spark.dynamicAllocation.maxExecutors=${spark2MaxExecutors}
- --sourcePath
- ${workingDir}/preparedInfo/targetOrcidAssoc
- --outputPath
- ${workingDir}/preparedInfo/mergedOrcidAssoc
+ --sourcePath${workingDir}/preparedInfo/targetOrcidAssoc
+ --outputPath${workingDir}/preparedInfo/mergedOrcidAssoc
-
@@ -268,7 +245,7 @@
cluster
ORCIDPropagation-Publication
eu.dnetlib.dhp.orcidtoresultfromsemrel.SparkOrcidToResultFromSemRelJob
- dhp-propagation-${projectVersion}.jar
+ dhp-enrichment-${projectVersion}.jar
--executor-cores=${sparkExecutorCores}
--executor-memory=${sparkExecutorMemory}
@@ -284,18 +261,12 @@
--conf spark.hadoop.mapreduce.reduce.speculative=false
--conf spark.sql.shuffle.partitions=3840
- --possibleUpdatesPath
- ${workingDir}/preparedInfo/mergedOrcidAssoc
- --sourcePath
- ${sourcePath}/publication
- --hive_metastore_uris
- ${hive_metastore_uris}
- --resultTableName
- eu.dnetlib.dhp.schema.oaf.Publication
- --outputPath
- ${outputPath}/publication
- --saveGraph
- ${saveGraph}
+ --possibleUpdatesPath${workingDir}/preparedInfo/mergedOrcidAssoc
+ --sourcePath${sourcePath}/publication
+ --hive_metastore_uris${hive_metastore_uris}
+ --resultTableNameeu.dnetlib.dhp.schema.oaf.Publication
+ --outputPath${outputPath}/publication
+ --saveGraph${saveGraph}
@@ -306,7 +277,7 @@
cluster
ORCIDPropagation-Dataset
eu.dnetlib.dhp.orcidtoresultfromsemrel.SparkOrcidToResultFromSemRelJob
- dhp-propagation-${projectVersion}.jar
+ dhp-enrichment-${projectVersion}.jar
--executor-cores=${sparkExecutorCores}
--executor-memory=${sparkExecutorMemory}
@@ -321,18 +292,12 @@
--conf spark.hadoop.mapreduce.map.speculative=false
--conf spark.hadoop.mapreduce.reduce.speculative=false
- --possibleUpdatesPath
- ${workingDir}/preparedInfo/mergedOrcidAssoc
- --sourcePath
- ${sourcePath}/dataset
- --hive_metastore_uris
- ${hive_metastore_uris}
- --resultTableName
- eu.dnetlib.dhp.schema.oaf.Dataset
- --outputPath
- ${outputPath}/dataset
- --saveGraph
- ${saveGraph}
+ --possibleUpdatesPath${workingDir}/preparedInfo/mergedOrcidAssoc
+ --sourcePath${sourcePath}/dataset
+ --hive_metastore_uris${hive_metastore_uris}
+ --resultTableNameeu.dnetlib.dhp.schema.oaf.Dataset
+ --outputPath${outputPath}/dataset
+ --saveGraph${saveGraph}
@@ -343,7 +308,7 @@
cluster
ORCIDPropagation-ORP
eu.dnetlib.dhp.orcidtoresultfromsemrel.SparkOrcidToResultFromSemRelJob
- dhp-propagation-${projectVersion}.jar
+ dhp-enrichment-${projectVersion}.jar
--executor-cores=${sparkExecutorCores}
--executor-memory=${sparkExecutorMemory}
@@ -358,18 +323,12 @@
--conf spark.hadoop.mapreduce.map.speculative=false
--conf spark.hadoop.mapreduce.reduce.speculative=false
- --possibleUpdatesPath
- ${workingDir}/preparedInfo/mergedOrcidAssoc
- --sourcePath
- ${sourcePath}/otherresearchproduct
- --hive_metastore_uris
- ${hive_metastore_uris}
- --resultTableName
- eu.dnetlib.dhp.schema.oaf.OtherResearchProduct
- --outputPath
- ${outputPath}/otherresearchproduct
- --saveGraph
- ${saveGraph}
+ --possibleUpdatesPath${workingDir}/preparedInfo/mergedOrcidAssoc
+ --sourcePath${sourcePath}/otherresearchproduct
+ --hive_metastore_uris${hive_metastore_uris}
+ --resultTableNameeu.dnetlib.dhp.schema.oaf.OtherResearchProduct
+ --outputPath${outputPath}/otherresearchproduct
+ --saveGraph${saveGraph}
@@ -380,7 +339,7 @@
cluster
ORCIDPropagation-Software
eu.dnetlib.dhp.orcidtoresultfromsemrel.SparkOrcidToResultFromSemRelJob
- dhp-propagation-${projectVersion}.jar
+ dhp-enrichment-${projectVersion}.jar
--executor-cores=${sparkExecutorCores}
--executor-memory=${sparkExecutorMemory}
@@ -395,22 +354,19 @@
--conf spark.hadoop.mapreduce.map.speculative=false
--conf spark.hadoop.mapreduce.reduce.speculative=false
- --possibleUpdatesPath
- ${workingDir}/preparedInfo/mergedOrcidAssoc
- --sourcePath
- ${sourcePath}/software
- --hive_metastore_uris
- ${hive_metastore_uris}
- --resultTableName
- eu.dnetlib.dhp.schema.oaf.Software
- --outputPath
- ${outputPath}/software
- --saveGraph
- ${saveGraph}
+ --possibleUpdatesPath${workingDir}/preparedInfo/mergedOrcidAssoc
+ --sourcePath${sourcePath}/software
+ --hive_metastore_uris${hive_metastore_uris}
+ --resultTableNameeu.dnetlib.dhp.schema.oaf.Software
+ --outputPath${outputPath}/software
+ --saveGraph${saveGraph}
+
+
+
\ No newline at end of file
diff --git a/dhp-workflows/dhp-enrichment/src/main/resources/eu/dnetlib/dhp/projecttoresult/oozie_app/workflow.xml b/dhp-workflows/dhp-enrichment/src/main/resources/eu/dnetlib/dhp/projecttoresult/oozie_app/workflow.xml
index dd7f25846..687d66869 100644
--- a/dhp-workflows/dhp-enrichment/src/main/resources/eu/dnetlib/dhp/projecttoresult/oozie_app/workflow.xml
+++ b/dhp-workflows/dhp-enrichment/src/main/resources/eu/dnetlib/dhp/projecttoresult/oozie_app/workflow.xml
@@ -127,61 +127,60 @@
+
+
+
+ yarn
+ cluster
+ PrepareProjectResultsAssociation
+ eu.dnetlib.dhp.projecttoresult.PrepareProjectResultsAssociation
+ dhp-propagation-${projectVersion}.jar
+
+ --executor-cores=${sparkExecutorCores}
+ --executor-memory=${sparkExecutorMemory}
+ --driver-memory=${sparkDriverMemory}
+ --conf spark.extraListeners=${spark2ExtraListeners}
+ --conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
+ --conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
+ --conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
+
+ --sourcePath${sourcePath}/relation
+ --allowedsemrels${allowedsemrels}
+ --hive_metastore_uris${hive_metastore_uris}
+ --potentialUpdatePath${workingDir}/preparedInfo/potentialUpdates
+ --alreadyLinkedPath${workingDir}/preparedInfo/alreadyLinked
+
+
+
+
-
-
-
- yarn
- cluster
- PrepareProjectResultsAssociation
- eu.dnetlib.dhp.projecttoresult.PrepareProjectResultsAssociation
- dhp-propagation-${projectVersion}.jar
-
- --executor-cores=${sparkExecutorCores}
- --executor-memory=${sparkExecutorMemory}
- --driver-memory=${sparkDriverMemory}
- --conf spark.extraListeners=${spark2ExtraListeners}
- --conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
- --conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
- --conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
-
- --sourcePath${sourcePath}/relation
- --allowedsemrels${allowedsemrels}
- --hive_metastore_uris${hive_metastore_uris}
- --potentialUpdatePath${workingDir}/preparedInfo/potentialUpdates
- --alreadyLinkedPath${workingDir}/preparedInfo/alreadyLinked
-
-
-
-
-
-
-
- yarn
- cluster
- ProjectToResultPropagation
- eu.dnetlib.dhp.projecttoresult.SparkResultToProjectThroughSemRelJob
- dhp-propagation-${projectVersion}.jar
-
- --executor-cores=${sparkExecutorCores}
- --executor-memory=${sparkExecutorMemory}
- --driver-memory=${sparkDriverMemory}
- --conf spark.extraListeners=${spark2ExtraListeners}
- --conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
- --conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
- --conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
- --conf spark.dynamicAllocation.enabled=true
- --conf spark.dynamicAllocation.maxExecutors=${spark2MaxExecutors}
-
- --saveGraph${saveGraph}
- --hive_metastore_uris${hive_metastore_uris}
- --outputPath${outputPath}/relation
- --potentialUpdatePath${workingDir}/preparedInfo/potentialUpdates
- --alreadyLinkedPath${workingDir}/preparedInfo/alreadyLinked
-
-
-
-
+
+
+ yarn
+ cluster
+ ProjectToResultPropagation
+ eu.dnetlib.dhp.projecttoresult.SparkResultToProjectThroughSemRelJob
+ dhp-enrichment-${projectVersion}.jar
+
+ --executor-cores=${sparkExecutorCores}
+ --executor-memory=${sparkExecutorMemory}
+ --driver-memory=${sparkDriverMemory}
+ --conf spark.extraListeners=${spark2ExtraListeners}
+ --conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
+ --conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
+ --conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
+ --conf spark.dynamicAllocation.enabled=true
+ --conf spark.dynamicAllocation.maxExecutors=${spark2MaxExecutors}
+
+ --saveGraph${saveGraph}
+ --hive_metastore_uris${hive_metastore_uris}
+ --outputPath${outputPath}/relation
+ --potentialUpdatePath${workingDir}/preparedInfo/potentialUpdates
+ --alreadyLinkedPath${workingDir}/preparedInfo/alreadyLinked
+
+
+
+
diff --git a/dhp-workflows/dhp-enrichment/src/main/resources/eu/dnetlib/dhp/resulttocommunityfromorganization/oozie_app/workflow.xml b/dhp-workflows/dhp-enrichment/src/main/resources/eu/dnetlib/dhp/resulttocommunityfromorganization/oozie_app/workflow.xml
index 3be69bde6..d481cad05 100644
--- a/dhp-workflows/dhp-enrichment/src/main/resources/eu/dnetlib/dhp/resulttocommunityfromorganization/oozie_app/workflow.xml
+++ b/dhp-workflows/dhp-enrichment/src/main/resources/eu/dnetlib/dhp/resulttocommunityfromorganization/oozie_app/workflow.xml
@@ -88,7 +88,7 @@
cluster
Prepare-Community-Result-Organization
eu.dnetlib.dhp.resulttocommunityfromorganization.PrepareResultCommunitySet
- dhp-propagation-${projectVersion}.jar
+ dhp-enrichment-${projectVersion}.jar
--executor-cores=${sparkExecutorCores}
--executor-memory=${sparkExecutorMemory}
@@ -122,7 +122,7 @@
cluster
community2resultfromorganization-Publication
eu.dnetlib.dhp.resulttocommunityfromorganization.SparkResultToCommunityFromOrganizationJob
- dhp-propagation-${projectVersion}.jar
+ dhp-enrichment-${projectVersion}.jar
--executor-cores=${sparkExecutorCores}
--executor-memory=${sparkExecutorMemory}
@@ -151,7 +151,7 @@
cluster
community2resultfromorganization-Dataset
eu.dnetlib.dhp.resulttocommunityfromorganization.SparkResultToCommunityFromOrganizationJob
- dhp-propagation-${projectVersion}.jar
+ dhp-enrichment-${projectVersion}.jar
--executor-cores=${sparkExecutorCores}
--executor-memory=${sparkExecutorMemory}
@@ -180,7 +180,7 @@
cluster
community2resultfromorganization-ORP
eu.dnetlib.dhp.resulttocommunityfromorganization.SparkResultToCommunityFromOrganizationJob
- dhp-propagation-${projectVersion}.jar
+ dhp-enrichment-${projectVersion}.jar
--executor-cores=${sparkExecutorCores}
--executor-memory=${sparkExecutorMemory}
@@ -209,7 +209,7 @@
cluster
community2resultfromorganization-Software
eu.dnetlib.dhp.resulttocommunityfromorganization.SparkResultToCommunityFromOrganizationJob
- dhp-propagation-${projectVersion}.jar
+ dhp-enrichment-${projectVersion}.jar
--executor-cores=${sparkExecutorCores}
--executor-memory=${sparkExecutorMemory}
diff --git a/dhp-workflows/dhp-enrichment/src/main/resources/eu/dnetlib/dhp/resulttocommunityfromsemrel/oozie_app/workflow.xml b/dhp-workflows/dhp-enrichment/src/main/resources/eu/dnetlib/dhp/resulttocommunityfromsemrel/oozie_app/workflow.xml
index b75b2d31e..81b51443c 100644
--- a/dhp-workflows/dhp-enrichment/src/main/resources/eu/dnetlib/dhp/resulttocommunityfromsemrel/oozie_app/workflow.xml
+++ b/dhp-workflows/dhp-enrichment/src/main/resources/eu/dnetlib/dhp/resulttocommunityfromsemrel/oozie_app/workflow.xml
@@ -99,7 +99,7 @@
cluster
ResultToCommunitySemRel-PreparePhase1-Publications
eu.dnetlib.dhp.resulttocommunityfromsemrel.PrepareResultCommunitySetStep1
- dhp-propagation-${projectVersion}.jar
+ dhp-enrichment-${projectVersion}.jar
--executor-cores=${sparkExecutorCores}
--executor-memory=${sparkExecutorMemory}
@@ -128,7 +128,7 @@
cluster
ResultToCommunitySemRel-PreparePhase1-Dataset
eu.dnetlib.dhp.resulttocommunityfromsemrel.PrepareResultCommunitySetStep1
- dhp-propagation-${projectVersion}.jar
+ dhp-enrichment-${projectVersion}.jar
--executor-cores=${sparkExecutorCores}
--executor-memory=${sparkExecutorMemory}
@@ -150,13 +150,14 @@
+
yarn
cluster
ResultToCommunitySemRel-PreparePhase1-ORP
eu.dnetlib.dhp.resulttocommunityfromsemrel.PrepareResultCommunitySetStep1
- dhp-propagation-${projectVersion}.jar
+ dhp-enrichment-${projectVersion}.jar
--executor-cores=${sparkExecutorCores}
--executor-memory=${sparkExecutorMemory}
@@ -185,7 +186,7 @@
cluster
ResultToCommunitySemRel-PreparePhase1-Software
eu.dnetlib.dhp.resulttocommunityfromsemrel.PrepareResultCommunitySetStep1
- dhp-propagation-${projectVersion}.jar
+ dhp-enrichment-${projectVersion}.jar
--executor-cores=${sparkExecutorCores}
--executor-memory=${sparkExecutorMemory}
@@ -216,7 +217,7 @@
cluster
ResultToCommunityEmRelPropagation-PreparePhase2
eu.dnetlib.dhp.resulttocommunityfromsemrel.PrepareResultCommunitySetStep2
- dhp-propagation-${projectVersion}.jar
+ dhp-enrichment-${projectVersion}.jar
--executor-cores=${sparkExecutorCores}
--executor-memory=${sparkExecutorMemory}
@@ -232,9 +233,7 @@
--outputPath${workingDir}/preparedInfo/mergedCommunityAssoc
-
-
@@ -250,7 +249,7 @@
cluster
Result2CommunitySemRelPropagation-Publication
eu.dnetlib.dhp.resulttocommunityfromsemrel.SparkResultToCommunityThroughSemRelJob
- dhp-propagation-${projectVersion}.jar
+ dhp-enrichment-${projectVersion}.jar
--executor-cores=${sparkExecutorCores}
--executor-memory=${sparkExecutorMemory}
@@ -279,7 +278,7 @@
cluster
Result2CommunitySemRelPropagation-Dataset
eu.dnetlib.dhp.resulttocommunityfromsemrel.SparkResultToCommunityThroughSemRelJob
- dhp-propagation-${projectVersion}.jar
+ dhp-enrichment-${projectVersion}.jar
--executor-cores=${sparkExecutorCores}
--executor-memory=${sparkExecutorMemory}
@@ -308,7 +307,7 @@
cluster
Result2CommunitySemRelPropagation-ORP
eu.dnetlib.dhp.resulttocommunityfromsemrel.SparkResultToCommunityThroughSemRelJob
- dhp-propagation-${projectVersion}.jar
+ dhp-enrichment-${projectVersion}.jar
--executor-cores=${sparkExecutorCores}
--executor-memory=${sparkExecutorMemory}
@@ -337,7 +336,7 @@
cluster
Result2CommunitySemRelPropagation-Software
eu.dnetlib.dhp.resulttocommunityfromsemrel.SparkResultToCommunityThroughSemRelJob
- dhp-propagation-${projectVersion}.jar
+ dhp-enrichment-${projectVersion}.jar
--executor-cores=${sparkExecutorCores}
--executor-memory=${sparkExecutorMemory}
diff --git a/dhp-workflows/dhp-enrichment/src/main/resources/eu/dnetlib/dhp/resulttoorganizationfrominstrepo/oozie_app/workflow.xml b/dhp-workflows/dhp-enrichment/src/main/resources/eu/dnetlib/dhp/resulttoorganizationfrominstrepo/oozie_app/workflow.xml
index 73268fcc7..a1b7f4ad7 100644
--- a/dhp-workflows/dhp-enrichment/src/main/resources/eu/dnetlib/dhp/resulttoorganizationfrominstrepo/oozie_app/workflow.xml
+++ b/dhp-workflows/dhp-enrichment/src/main/resources/eu/dnetlib/dhp/resulttoorganizationfrominstrepo/oozie_app/workflow.xml
@@ -131,7 +131,7 @@
cluster
PrepareResultOrganizationAssociation
eu.dnetlib.dhp.resulttoorganizationfrominstrepo.PrepareResultInstRepoAssociation
- dhp-propagation-${projectVersion}.jar
+ dhp-enrichment-${projectVersion}.jar
--executor-cores=${sparkExecutorCores}
--executor-memory=${sparkExecutorMemory}
@@ -163,7 +163,7 @@
cluster
resultToOrganizationFromInstRepoPropagationForPublications
eu.dnetlib.dhp.resulttoorganizationfrominstrepo.SparkResultToOrganizationFromIstRepoJob
- dhp-propagation-${projectVersion}.jar
+ dhp-enrichment-${projectVersion}.jar
--executor-cores=${sparkExecutorCores}
--executor-memory=${sparkExecutorMemory}
@@ -193,7 +193,7 @@
cluster
resultToOrganizationFromInstRepoPropagationForDataset
eu.dnetlib.dhp.resulttoorganizationfrominstrepo.SparkResultToOrganizationFromIstRepoJob
- dhp-propagation-${projectVersion}.jar
+ dhp-enrichment-${projectVersion}.jar
--executor-cores=${sparkExecutorCores}
--executor-memory=${sparkExecutorMemory}
@@ -223,7 +223,7 @@
cluster
resultToOrganizationFromInstRepoPropagationForORP
eu.dnetlib.dhp.resulttoorganizationfrominstrepo.SparkResultToOrganizationFromIstRepoJob
- dhp-propagation-${projectVersion}.jar
+ dhp-enrichment-${projectVersion}.jar
--executor-cores=${sparkExecutorCores}
--executor-memory=${sparkExecutorMemory}
@@ -253,7 +253,7 @@
cluster
resultToOrganizationFromInstRepoPropagationForSoftware
eu.dnetlib.dhp.resulttoorganizationfrominstrepo.SparkResultToOrganizationFromIstRepoJob
- dhp-propagation-${projectVersion}.jar
+ dhp-enrichment-${projectVersion}.jar
--executor-cores=${sparkExecutorCores}
--executor-memory=${sparkExecutorMemory}