From 4a905932a3db36c61570c24b9aa54283cd30abba Mon Sep 17 00:00:00 2001 From: ikanellos Date: Mon, 15 May 2023 15:24:22 +0300 Subject: [PATCH] Spark properties from job.properties --- .../impact_indicators/oozie_app/workflow.xml | 22 +++++++++---------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/dhp-workflows/dhp-impact-indicators/src/main/resources/eu/dnetlib/dhp/oa/graph/impact_indicators/oozie_app/workflow.xml b/dhp-workflows/dhp-impact-indicators/src/main/resources/eu/dnetlib/dhp/oa/graph/impact_indicators/oozie_app/workflow.xml index f07a27244..ec2bb140f 100644 --- a/dhp-workflows/dhp-impact-indicators/src/main/resources/eu/dnetlib/dhp/oa/graph/impact_indicators/oozie_app/workflow.xml +++ b/dhp-workflows/dhp-impact-indicators/src/main/resources/eu/dnetlib/dhp/oa/graph/impact_indicators/oozie_app/workflow.xml @@ -46,7 +46,7 @@ create_openaire_ranking_graph.py - --executor-memory 20G --executor-cores 4 --driver-memory 20G + --executor-memory ${sparkHighExecutorMemory} --executor-cores ${sparkExecutorCores} --driver-memory ${sparkHighDriverMemory} --master yarn --deploy-mode cluster --conf spark.sql.shuffle.partitions=7680 @@ -100,7 +100,7 @@ CC.py - --executor-memory 18G --executor-cores 4 --driver-memory 10G + --executor-memory ${sparkHighExecutorMemory} --executor-cores ${sparkExecutorCores} --driver-memory ${sparkNormalDriverMemory} --master yarn --deploy-mode cluster --conf spark.sql.shuffle.partitions=7680 @@ -141,7 +141,7 @@ TAR.py - --executor-memory 18G --executor-cores 4 --driver-memory 10G + --executor-memory ${sparkHighExecutorMemory} --executor-cores ${sparkExecutorCores} --driver-memory ${sparkNormalDriverMemory} --master yarn --deploy-mode cluster --conf spark.sql.shuffle.partitions=7680 @@ -189,7 +189,7 @@ CC.py - --executor-memory 18G --executor-cores 4 --driver-memory 10G + --executor-memory ${sparkHighExecutorMemory} --executor-cores ${sparkExecutorCores} --driver-memory ${sparkNormalDriverMemory} --master yarn --deploy-mode cluster --conf spark.sql.shuffle.partitions=7680 @@ -244,7 +244,7 @@ PageRank.py - --executor-memory 18G --executor-cores 4 --driver-memory 10G + --executor-memory ${sparkHighExecutorMemory} --executor-cores ${sparkExecutorCores} --driver-memory ${sparkNormalDriverMemory} --master yarn --deploy-mode cluster --conf spark.sql.shuffle.partitions=7680 @@ -289,7 +289,7 @@ AttRank.py - --executor-memory 18G --executor-cores 4 --driver-memory 10G + --executor-memory ${sparkHighExecutorMemory} --executor-cores ${sparkExecutorCores} --driver-memory ${sparkNormalDriverMemory} --master yarn --deploy-mode cluster --conf spark.sql.shuffle.partitions=7680 @@ -381,7 +381,7 @@ format_ranking_results.py - --executor-memory 10G --executor-cores 4 --driver-memory 10G + --executor-memory ${sparkNormalExecutorMemory} --executor-cores ${sparkExecutorCores} --driver-memory ${sparkNormalDriverMemory} --master yarn --deploy-mode cluster --conf spark.sql.shuffle.partitions=7680 @@ -429,7 +429,7 @@ format_ranking_results.py - --executor-memory 10G --executor-cores 4 --driver-memory 10G + --executor-memory ${sparkNormalExecutorMemory} --executor-cores ${sparkExecutorCores} --driver-memory ${sparkNormalDriverMemory} --master yarn --deploy-mode cluster --conf spark.sql.shuffle.partitions=7680 @@ -484,7 +484,7 @@ map_openaire_ids_to_dois.py - --executor-memory 18G --executor-cores 4 --driver-memory 15G + --executor-memory ${sparkHighExecutorMemory} --executor-cores ${sparkExecutorCores} --driver-memory ${sparkHighDriverMemory} --master yarn --deploy-mode cluster --conf spark.sql.shuffle.partitions=7680 @@ -526,7 +526,7 @@ map_scores_to_dois.py - --executor-memory 18G --executor-cores 4 --driver-memory 15G + --executor-memory ${sparkHighExecutorMemory} --executor-cores ${sparkExecutorCores} --driver-memory ${sparkHighDriverMemory} --master yarn --deploy-mode cluster --conf spark.sql.shuffle.partitions=7680 @@ -609,7 +609,7 @@ projects_impact.py - --executor-memory 18G --executor-cores 4 --driver-memory 10G + --executor-memory ${sparkHighExecutorMemory} --executor-cores ${sparkExecutorCores} --driver-memory ${sparkNormalDriverMemory} --master yarn --deploy-mode cluster --conf spark.sql.shuffle.partitions=7680