forked from D-Net/dnet-hadoop
-Monitor DB workflow
This commit is contained in:
parent
dcb958e146
commit
43f6d4f296
|
@ -0,0 +1,34 @@
|
||||||
|
<configuration>
|
||||||
|
<property>
|
||||||
|
<name>jobTracker</name>
|
||||||
|
<value>${jobTracker}</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>nameNode</name>
|
||||||
|
<value>${nameNode}</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>oozie.use.system.libpath</name>
|
||||||
|
<value>true</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>oozie.action.sharelib.for.spark</name>
|
||||||
|
<value>spark2</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>hive_metastore_uris</name>
|
||||||
|
<value>thrift://iis-cdh5-test-m3.ocean.icm.edu.pl:9083</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>hive_jdbc_url</name>
|
||||||
|
<value>jdbc:hive2://iis-cdh5-test-m3.ocean.icm.edu.pl:10000/;UseNativeQuery=1;?spark.executor.memory=19166291558;spark.yarn.executor.memoryOverhead=3225;spark.driver.memory=11596411699;spark.yarn.driver.memoryOverhead=1228</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>oozie.wf.workflow.notification.url</name>
|
||||||
|
<value>{serviceUrl}/v1/oozieNotification/jobUpdate?jobId=$jobId%26status=$status</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>stats_tool_api_url</name>
|
||||||
|
<value>${stats_tool_api_url}</value>
|
||||||
|
</property>
|
||||||
|
</configuration>
|
|
@ -0,0 +1,21 @@
|
||||||
|
export PYTHON_EGG_CACHE=/home/$(whoami)/.python-eggs
|
||||||
|
export link_folder=/tmp/impala-shell-python-egg-cache-$(whoami)
|
||||||
|
if ! [ -L $link_folder ]
|
||||||
|
then
|
||||||
|
rm -Rf "$link_folder"
|
||||||
|
ln -sfn ${PYTHON_EGG_CACHE}${link_folder} ${link_folder}
|
||||||
|
fi
|
||||||
|
|
||||||
|
export SOURCE=$1
|
||||||
|
export TARGET=$2
|
||||||
|
export SHADOW=$3
|
||||||
|
|
||||||
|
impala-shell -q "invalidate metadata;"
|
||||||
|
impala-shell -d ${TARGET} -q "show tables" --delimited | sed "s/\(.*\)/compute stats ${TARGET}.\1;/" | impala-shell -f -
|
||||||
|
echo "Impala shell finished"
|
||||||
|
|
||||||
|
echo "Updating shadow monitor database"
|
||||||
|
impala-shell -q "create database if not exists ${SHADOW}"
|
||||||
|
impala-shell -d ${SHADOW} -q "show tables" --delimited | sed "s/^/drop view if exists ${SHADOW}./" | sed "s/$/;/" | impala-shell -f -
|
||||||
|
impala-shell -d ${TARGET} -q "show tables" --delimited | sed "s/\(.*\)/create view ${SHADOW}.\1 as select * from ${TARGET}.\1;/" | impala-shell -f -
|
||||||
|
echo "Shadow db ready!"
|
|
@ -0,0 +1,24 @@
|
||||||
|
export PYTHON_EGG_CACHE=/home/$(whoami)/.python-eggs
|
||||||
|
export link_folder=/tmp/impala-shell-python-egg-cache-$(whoami)
|
||||||
|
if ! [ -L $link_folder ]
|
||||||
|
then
|
||||||
|
rm -Rf "$link_folder"
|
||||||
|
ln -sfn ${PYTHON_EGG_CACHE}${link_folder} ${link_folder}
|
||||||
|
fi
|
||||||
|
|
||||||
|
export SOURCE=$1
|
||||||
|
export TARGET=$2
|
||||||
|
export SHADOW=$3
|
||||||
|
export SCRIPT_PATH=$4
|
||||||
|
|
||||||
|
export HIVE_OPTS="-hiveconf mapred.job.queue.name=analytics -hiveconf hive.spark.client.connect.timeout=120000ms -hiveconf hive.spark.client.server.connect.timeout=300000ms -hiveconf spark.executor.memory=19166291558 -hiveconf spark.yarn.executor.memoryOverhead=3225 -hiveconf spark.driver.memory=11596411699 -hiveconf spark.yarn.driver.memoryOverhead=1228"
|
||||||
|
export HADOOP_USER_NAME="oozie"
|
||||||
|
|
||||||
|
echo "Getting file from " $SCRIPT_PATH
|
||||||
|
hdfs dfs -copyToLocal $SCRIPT_PATH
|
||||||
|
|
||||||
|
echo "Creating monitor database"
|
||||||
|
#cat step20-createMonitorDB.sql | sed s/SOURCE/$1/g | sed s/TARGET/$2/g1 > foo
|
||||||
|
cat createMonitorDB.sql | sed "s/TARGET/${TARGET}/g" | sed "s/SOURCE/${SOURCE}/g" > foo
|
||||||
|
hive $HIVE_OPTS -f foo
|
||||||
|
echo "Hive shell finished"
|
|
@ -0,0 +1,105 @@
|
||||||
|
<workflow-app name="Monitor DB" xmlns="uri:oozie:workflow:0.5">
|
||||||
|
<parameters>
|
||||||
|
<property>
|
||||||
|
<name>stats_db_name</name>
|
||||||
|
<description>the target stats database name</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>stats_db_shadow_name</name>
|
||||||
|
<description>the name of the shadow schema</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>monitor_db_name</name>
|
||||||
|
<description>the target monitor db name</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>monitor_db_shadow_name</name>
|
||||||
|
<description>the name of the shadow monitor db</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>stats_tool_api_url</name>
|
||||||
|
<description>The url of the API of the stats tool. Is used to trigger the cache update.</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>hive_metastore_uris</name>
|
||||||
|
<description>hive server metastore URIs</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>hive_jdbc_url</name>
|
||||||
|
<description>hive server jdbc url</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>hive_timeout</name>
|
||||||
|
<description>the time period, in seconds, after which Hive fails a transaction if a Hive client has not sent a hearbeat. The default value is 300 seconds.</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>context_api_url</name>
|
||||||
|
<description>the base url of the context api (https://services.openaire.eu/openaire)</description>
|
||||||
|
</property>
|
||||||
|
</parameters>
|
||||||
|
|
||||||
|
<global>
|
||||||
|
<job-tracker>${jobTracker}</job-tracker>
|
||||||
|
<name-node>${nameNode}</name-node>
|
||||||
|
<configuration>
|
||||||
|
<property>
|
||||||
|
<name>hive.metastore.uris</name>
|
||||||
|
<value>${hive_metastore_uris}</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>hive.txn.timeout</name>
|
||||||
|
<value>${hive_timeout}</value>
|
||||||
|
</property>
|
||||||
|
</configuration>
|
||||||
|
</global>
|
||||||
|
|
||||||
|
<start to="Step1-createMonitorDB"/>
|
||||||
|
|
||||||
|
<kill name="Kill">
|
||||||
|
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
|
||||||
|
</kill>
|
||||||
|
|
||||||
|
<action name="Step1-createMonitorDB">
|
||||||
|
<shell xmlns="uri:oozie:shell-action:0.1">
|
||||||
|
<job-tracker>${jobTracker}</job-tracker>
|
||||||
|
<name-node>${nameNode}</name-node>
|
||||||
|
<exec>monitor.sh</exec>
|
||||||
|
<argument>${stats_db_name}</argument>
|
||||||
|
<argument>${monitor_db_name}</argument>
|
||||||
|
<argument>${monitor_db_shadow_name}</argument>
|
||||||
|
<argument>${wf:appPath()}/scripts/createMonitorDB.sql</argument>
|
||||||
|
<file>monitor.sh</file>
|
||||||
|
</shell>
|
||||||
|
<ok to="Step2-createMonitorDB-post"/>
|
||||||
|
<error to="Kill"/>
|
||||||
|
</action>
|
||||||
|
|
||||||
|
|
||||||
|
<action name="Step2-createMonitorDB-post">
|
||||||
|
<shell xmlns="uri:oozie:shell-action:0.1">
|
||||||
|
<job-tracker>${jobTracker}</job-tracker>
|
||||||
|
<name-node>${nameNode}</name-node>
|
||||||
|
<exec>monitor-post.sh</exec>
|
||||||
|
<argument>${stats_db_name}</argument>
|
||||||
|
<argument>${monitor_db_name}</argument>
|
||||||
|
<argument>${monitor_db_shadow_name}</argument>
|
||||||
|
<file>monitor-post.sh</file>
|
||||||
|
</shell>
|
||||||
|
<ok to="End"/>
|
||||||
|
<error to="Kill"/>
|
||||||
|
</action>
|
||||||
|
|
||||||
|
<action name="Step3-updateCache">
|
||||||
|
<shell xmlns="uri:oozie:shell-action:0.1">
|
||||||
|
<job-tracker>${jobTracker}</job-tracker>
|
||||||
|
<name-node>${nameNode}</name-node>
|
||||||
|
<exec>updateCache.sh</exec>
|
||||||
|
<argument>${stats_tool_api_url}</argument>
|
||||||
|
<file>updateCache.sh</file>
|
||||||
|
</shell>
|
||||||
|
<ok to="End"/>
|
||||||
|
<error to="Kill"/>
|
||||||
|
</action>
|
||||||
|
|
||||||
|
<end name="End"/>
|
||||||
|
</workflow-app>
|
Loading…
Reference in New Issue