From b3f9633205ae5bbb7dcb1917dfb68dbc54ed71d7 Mon Sep 17 00:00:00 2001 From: dimitrispie Date: Mon, 15 May 2023 12:51:44 +0300 Subject: [PATCH] Update copyDataToImpalaCluster.sh Added option --user to impala-shell command --- .../oozie_app/copyDataToImpalaCluster.sh | 24 +++++++++---------- 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/dhp-workflows/dhp-stats-update/src/main/resources/eu/dnetlib/dhp/oa/graph/stats/oozie_app/copyDataToImpalaCluster.sh b/dhp-workflows/dhp-stats-update/src/main/resources/eu/dnetlib/dhp/oa/graph/stats/oozie_app/copyDataToImpalaCluster.sh index a99a78965..4ff236d07 100644 --- a/dhp-workflows/dhp-stats-update/src/main/resources/eu/dnetlib/dhp/oa/graph/stats/oozie_app/copyDataToImpalaCluster.sh +++ b/dhp-workflows/dhp-stats-update/src/main/resources/eu/dnetlib/dhp/oa/graph/stats/oozie_app/copyDataToImpalaCluster.sh @@ -22,28 +22,28 @@ function copydb() { hdfs dfs -conf /etc/impala_cluster/hdfs-site.xml -chmod -R 777 /tmp/$FILE/${db}.db # create the databases - impala-shell -i impala-cluster-dn1.openaire.eu -q "drop database if exists ${db} cascade"; - impala-shell -i impala-cluster-dn1.openaire.eu -q "create database ${db}"; + impala-shell --user $HADOOP_USER_NAME -i impala-cluster-dn1.openaire.eu -q "drop database if exists ${db} cascade"; + impala-shell --user $HADOOP_USER_NAME -i impala-cluster-dn1.openaire.eu -q "create database ${db}"; - impala-shell -q "INVALIDATE METADATA" + impala-shell --user $HADOOP_USER_NAME -q "INVALIDATE METADATA" echo "creating schema for ${db}" - for i in `impala-shell -d ${db} --delimited -q "show tables"`; + for i in `impala-shell --user $HADOOP_USER_NAME -d ${db} --delimited -q "show tables"`; do - impala-shell -d ${db} --delimited -q "show create table $i"; - done | sed 's/"$/;/' | sed 's/^"//' | sed 's/[[:space:]]\date[[:space:]]/`date`/g' | impala-shell -i impala-cluster-dn1.openaire.eu -c -f - + impala-shell --user $HADOOP_USER_NAME -d ${db} --delimited -q "show create table $i"; + done | sed 's/"$/;/' | sed 's/^"//' | sed 's/[[:space:]]\date[[:space:]]/`date`/g' | impala-shell --user $HADOOP_USER_NAME -i impala-cluster-dn1.openaire.eu -c -f - # run the same command twice because we may have failures in the first run (due to views pointing to the same db) - for i in `impala-shell -d ${db} --delimited -q "show tables"`; + for i in `impala-shell --user $HADOOP_USER_NAME -d ${db} --delimited -q "show tables"`; do - impala-shell -d ${db} --delimited -q "show create table $i"; - done | sed 's/"$/;/' | sed 's/^"//' | sed 's/[[:space:]]\date[[:space:]]/`date`/g' | impala-shell -i impala-cluster-dn1.openaire.eu -c -f - + impala-shell --user $HADOOP_USER_NAME -d ${db} --delimited -q "show create table $i"; + done | sed 's/"$/;/' | sed 's/^"//' | sed 's/[[:space:]]\date[[:space:]]/`date`/g' | impala-shell --user $HADOOP_USER_NAME -i impala-cluster-dn1.openaire.eu -c -f - # load the data from /tmp in the respective tables echo "copying data in tables and computing stats" - for i in `impala-shell -i impala-cluster-dn1.openaire.eu -d ${db} --delimited -q "show tables"`; + for i in `impala-shell --user $HADOOP_USER_NAME -i impala-cluster-dn1.openaire.eu -d ${db} --delimited -q "show tables"`; do - impala-shell -i impala-cluster-dn1.openaire.eu -d ${db} -q "load data inpath '/tmp/$FILE/${db}.db/$i' into table $i"; - impala-shell -i impala-cluster-dn1.openaire.eu -d ${db} -q "compute stats $i"; + impala-shell --user $HADOOP_USER_NAME -i impala-cluster-dn1.openaire.eu -d ${db} -q "load data inpath '/tmp/$FILE/${db}.db/$i' into table $i"; + impala-shell --user $HADOOP_USER_NAME -i impala-cluster-dn1.openaire.eu -d ${db} -q "compute stats $i"; done # deleting the remaining directory from hdfs