Use the "HADOOP_USER_NAME" value from the "workflow-property", in "copyDataToImpalaCluster.sh", in "stats-monitor-updates".

This commit is contained in:
Lampros Smyrnaios 2024-04-11 17:46:33 +03:00
parent abf0b69f29
commit 22745027c8
1 changed files with 1 additions and 6 deletions

View File

@ -6,7 +6,7 @@ then
ln -sfn ${PYTHON_EGG_CACHE}${link_folder} ${link_folder}
fi
#export HADOOP_USER_NAME=$2
export HADOOP_USER_NAME=$2
# Set the active HDFS node of OCEAN and IMPALA cluster.
OCEAN_HDFS_NODE='hdfs://nameservice1'
@ -56,10 +56,6 @@ LOCATION_SED_ARG_2='s/\.location,/\.\`location\`,/g'
LOCATION_SED_ARG_3='s/\.location[[:space:]]/\.\`location\` /g'
export HADOOP_USER="dimitris.pierrakos"
export HADOOP_USER_NAME='dimitris.pierrakos'
function copydb() {
db=$1
@ -204,7 +200,6 @@ function copydb() {
MONITOR_DB=$1
#HADOOP_USER_NAME=$2
copydb $MONITOR_DB'_institutions'
copydb $MONITOR_DB