1. 在util.sh脚本中自定义函数
#!/usr/bin/env bash
# 检查环境变量是否存在
function assert_env_var_exist {
local var_name=$1
if [ -z $var_name ];then
log "ERROR: " "${BASH_SOURCE[*]}" "${LINENO}" "Failed: Check env var: $var_name"
exit -1
fi
}
# 在hdfs上创建目录
function mkdir_hdfs {
local path=${1}
${HADOOP_HOME}/bin/hdfs dfs -mkdir -p $path
if_error_exit "make hdfs dir $path"
}
2. 调用
#!/bin/bash
source /etc/profile
source util.sh
# hive表join
function join_table(){
local runDt=$1
local t1="/user/hive/test.db/table1/pt=${runDt}*/_SUCCESS" && check_hdfs_exist ${v1} ${v2}
local t2="/user/hive/test.db/table2/pt=${runDt}*/_SUCCESS" && check_hdfs_exist ${v1} ${v2}
hive -hiveconf mp_queue=${mp_queue} \
-hiveconf runDt=${runDt} \
-hiveconf runNextDt=${runNextDt} \
-hiveconf runPrevDt=${runPrevDt} \
-f join.hql
if_error_exit "run table_new: ${runDt}"
${HADOOP_HOME}/bin/hdfs dfs -touchz /user/hive/test.db/table_new/dt=${runDt}/_SUCCESS
}
# 调用
join_table ${dt}