distcp大数据传输(可传参库,多表,多分区)

distcp大数据传输(可传参库,多表,多分区)

#!/bin/bash

#date=$(date +%Y-%m-%d)

# 检查参数数量
if [ $# -eq 2 ]; then
    # 2个参数,进行库表复制传输
    db_name="$1"
    tbs_name="$2"
    # 创建源数据库镜像(权限问题)
	#hadoop fs -fs hdfs://10.48.0.101 -createSnapshot /ucd-prod-vdp-usdp/user/hive/warehouse/${db_name}.db temp-${date}
	# 分割第二个参数
    IFS=',' read -ra tb_array <<< "$tbs_name"
    # 执行复制传输操作
    echo "遍历执行表"
	for pt_name in "${tb_array[@]}"; do
	    echo "$tb_name"
	    echo "执行表复制操作"
    # 具体的复制传输命令
	hadoop distcp  -update -m 30 -pb -strategy dynamic hdfs://10.48.0.101/ucd-prod-vdp-usdp/user/hive/warehouse/${
    
    db_name}.db/${
    
    tb_name}/ /ucd-test-vdp-usdp/user/hive/warehouse/${
    
    db_name}.db/${
    
    tb_name}/
	done
	# 删除源数据镜像
	#hdfs dfs -deleteSnapshot /ucd-prod-vdp-usdp/user/hive/warehouse/${db_name}.db temp-${date}

elif [ $# -eq 3 ]; then
    # 3个参数,进行库表分区复制
    db_name="$1"
    tb_name="$2"
    pts_name="$3"
    # 创建源数据库镜像
	#hadoop fs -fs hdfs://10.48.0.101 -createSnapshot /ucd-prod-vdp-usdp/user/hive/warehouse/${db_name}.db temp-${date}
    # 分割第三个参数
    IFS=',' read -ra pts_array <<< "$pts_name"
    # 遍历打印分区
    echo "遍历打印分区:"
    for pt_name in "${pts_array[@]}"; do
        echo "$pt_name"
	    echo "执行分区复制操作"
		hadoop distcp -update -m 10 -pb -strategy dynamic hdfs://10.48.0.101/ucd-prod-vdp-usdp/user/hive/warehouse/${
    
    db_name}.db/${
    
    tb_name}/dt=${
    
    pt_name} /ucd-test-vdp-usdp/user/hive/warehouse/${
    
    db_name}.db/${
    
    tb_name}/dt=${
    
    {
    
    pt_name}
    done
	# 删除源数据镜像
	#hdfs dfs -deleteSnapshot /ucd-prod-vdp-usdp/user/hive/warehouse/${db_name}.db temp-${date}

else
    # 参数数量不正确,显示用法信息
    echo "用法: $0 <库> <表> [分区1,分区2,...]"
    exit 1
fi


# 检查源表是否有分区
#分区数量=$(beeline  -u 'jdbc:hive2://ucd-prod-vdp-usdp-101.ucdipa.viatris.cc:10009/;principal=hadoop/[email protected];' -e "SHOW PARTITIONS ${db_name}.${tb_name}" | wc -l)
#if [ ${分区数量} -eq 0 ]; then
    # 源表没有分区,修复非分区表元数据
    echo "修复非分区表元数据"
	beeline  -u 'jdbc:hive2://ucd-test-vdp-usdp-101.ucdipa.viatris.cc:10009/;principal=hadoop/_HOST@UCDIPA.VIATRIS.CC;' -e "analyze table ${db_name}.${tb_name} compute statistics"
    #hive -S -e "analyze table ${db_name}.${tb_name} compute statistics"
#else
    # 源表有分区,修复分区表元数据
	echo "修复分区表元数据"
	beeline  -u 'jdbc:hive2://ucd-test-vdp-usdp-101.ucdipa.viatris.cc:10009/;principal=hadoop/_HOST@UCDIPA.VIATRIS.CC;' -e "msck REPAIR TABLE ${db_name}.${tb_name}"
    #hive -S -e "msck REPAIR TABLE ${db_name}.${tb_name}"
#fi

猜你喜欢

转载自blog.csdn.net/qq_43688472/article/details/131945388