master #59
|
@ -0,0 +1,19 @@
|
||||||
|
export PYTHON_EGG_CACHE=/home/$(whoami)/.python-eggs
|
||||||
|
export link_folder=/tmp/impala-shell-python-egg-cache-$(whoami)
|
||||||
|
if ! [ -L $link_folder ]
|
||||||
|
then
|
||||||
|
rm -Rf "$link_folder"
|
||||||
|
ln -sfn ${PYTHON_EGG_CACHE}${link_folder} ${link_folder}
|
||||||
|
fi
|
||||||
|
|
||||||
|
export SOURCE=$1
|
||||||
|
export SHADOW=$2
|
||||||
|
export HIVE_OPTS="-hiveconf mapred.job.queue.name=analytics -hiveconf hive.spark.client.connect.timeout=120000ms -hiveconf hive.spark.client.server.connect.timeout=300000ms -hiveconf spark.executor.memory=19166291558 -hiveconf spark.yarn.executor.memoryOverhead=3225 -hiveconf spark.driver.memory=11596411699 -hiveconf spark.yarn.driver.memoryOverhead=1228"
|
||||||
|
export HADOOP_USER_NAME="oozie"
|
||||||
|
|
||||||
|
echo "Updating shadow database"
|
||||||
|
hive -e "drop database if exists ${SHADOW} cascade"
|
||||||
|
hive -e "create database if not exists ${SHADOW}"
|
||||||
|
hive $HIVE_OPTS --database ${SOURCE} -e "show tables" | grep -v WARN | sed "s/\(.*\)/create view ${SHADOW}.\1 as select * from ${SOURCE}.\1;/" > foo
|
||||||
|
hive -f foo
|
||||||
|
echo "Updated shadow database"
|
|
@ -22,3 +22,4 @@ echo "Creating monitor database"
|
||||||
cat step20-createMonitorDB.sql | sed "s/TARGET/${TARGET}/g" | sed "s/SOURCE/${SOURCE}/g1" > foo
|
cat step20-createMonitorDB.sql | sed "s/TARGET/${TARGET}/g" | sed "s/SOURCE/${SOURCE}/g1" > foo
|
||||||
hive $HIVE_OPTS -f foo
|
hive $HIVE_OPTS -f foo
|
||||||
echo "Hive shell finished"
|
echo "Hive shell finished"
|
||||||
|
|
||||||
|
|
|
@ -7,12 +7,13 @@ then
|
||||||
fi
|
fi
|
||||||
|
|
||||||
export SOURCE=$1
|
export SOURCE=$1
|
||||||
export TARGET=$2
|
export SHADOW=$2
|
||||||
export SHADOW=$3
|
|
||||||
|
|
||||||
export HIVE_OPTS="-hiveconf mapred.job.queue.name=analytics -hiveconf hive.spark.client.connect.timeout=120000ms -hiveconf hive.spark.client.server.connect.timeout=300000ms -hiveconf spark.executor.memory=19166291558 -hiveconf spark.yarn.executor.memoryOverhead=3225 -hiveconf spark.driver.memory=11596411699 -hiveconf spark.yarn.driver.memoryOverhead=1228"
|
export HIVE_OPTS="-hiveconf mapred.job.queue.name=analytics -hiveconf hive.spark.client.connect.timeout=120000ms -hiveconf hive.spark.client.server.connect.timeout=300000ms -hiveconf spark.executor.memory=19166291558 -hiveconf spark.yarn.executor.memoryOverhead=3225 -hiveconf spark.driver.memory=11596411699 -hiveconf spark.yarn.driver.memoryOverhead=1228"
|
||||||
export HADOOP_USER_NAME="oozie"
|
export HADOOP_USER_NAME="oozie"
|
||||||
|
|
||||||
hive $HIVE_OPTS --database ${TARGET} -e "show tables" | grep -v WARN | sed "s/\(.*\)/analyze table ${TARGET}.\1 compute statistics;/" > foo
|
echo "Updating shadow database"
|
||||||
hive $HIVE_OPTS -f foo
|
hive -e "drop database if exists ${SHADOW} cascade"
|
||||||
echo "Hive shell finished"
|
hive -e "create database if not exists ${SHADOW}"
|
||||||
|
hive $HIVE_OPTS --database ${SOURCE} -e "show tables" | grep -v WARN | sed "s/\(.*\)/create view ${SHADOW}.\1 as select * from ${SOURCE}.\1;/" > foo
|
||||||
|
hive -f foo
|
||||||
|
echo "Updated shadow database"
|
|
@ -366,6 +366,20 @@
|
||||||
<argument>${wf:appPath()}/scripts/step20-createMonitorDB.sql</argument>
|
<argument>${wf:appPath()}/scripts/step20-createMonitorDB.sql</argument>
|
||||||
<file>monitor.sh</file>
|
<file>monitor.sh</file>
|
||||||
</shell>
|
</shell>
|
||||||
|
<ok to="step20-createMonitorDB-post"/>
|
||||||
|
<error to="Kill"/>
|
||||||
|
</action>
|
||||||
|
|
||||||
|
<action name="step20-createMonitorDB-post">
|
||||||
|
<shell xmlns="uri:oozie:shell-action:0.1">
|
||||||
|
<job-tracker>${jobTracker}</job-tracker>
|
||||||
|
<name-node>${nameNode}</name-node>
|
||||||
|
<exec>monitor-post.sh</exec>
|
||||||
|
<argument>${monitor_db_name}</argument>
|
||||||
|
<argument>${monitor_db_shadow_name}</argument>
|
||||||
|
<argument>${wf:appPath()}/scripts/step20-createMonitorDB.sql</argument>
|
||||||
|
<file>monitor.sh</file>
|
||||||
|
</shell>
|
||||||
<ok to="step21-createObservatoryDB-pre"/>
|
<ok to="step21-createObservatoryDB-pre"/>
|
||||||
<error to="Kill"/>
|
<error to="Kill"/>
|
||||||
</action>
|
</action>
|
||||||
|
@ -400,7 +414,6 @@
|
||||||
<job-tracker>${jobTracker}</job-tracker>
|
<job-tracker>${jobTracker}</job-tracker>
|
||||||
<name-node>${nameNode}</name-node>
|
<name-node>${nameNode}</name-node>
|
||||||
<exec>observatory-post.sh</exec>
|
<exec>observatory-post.sh</exec>
|
||||||
<argument>${stats_db_name}</argument>
|
|
||||||
<argument>${observatory_db_name}</argument>
|
<argument>${observatory_db_name}</argument>
|
||||||
<argument>${observatory_db_shadow_name}</argument>
|
<argument>${observatory_db_shadow_name}</argument>
|
||||||
<file>observatory-post.sh</file>
|
<file>observatory-post.sh</file>
|
||||||
|
|
Loading…
Reference in New Issue