hadoop 集群间数据迁移

将HDFS的路径hadoop.tmp.dir修改

<property>
	 <name>hadoop.tmp.dir</name>
	 <value>/home/tmp</value>
</property>
num=8
slave=lenovo
#for i in {1..10};do
for((i=1;i<=num;i++)); do
  host=$slave${i}
  echo "开始安装${host}..."
 
  echo "拷贝几个配置文件"
  #scp  /etc/hosts  root@${host}:/etc

  scp /usr/local/hadoop/conf/core-site.xml  hadoop@${host}:/usr/local/hadoop/conf
 
  scp 4.sh root@${host}:/tmp/4.sh
  ssh root@${host} sh /tmp/4.sh

  echo "安装${host}完毕"
  
done

echo "创建HDFS的目录"

mv  /home/hadoop/tmp /home

#很关键
ln -s /home/tmp /home/hadoop/tmp


echo "将hadoop相关目录权限下放给用户hadoop"
chown -R hadoop:hadoop /home/tmp
chown -R hadoop:hadoop /home/hadoop/tmp
#slave上的权限只能是755,而master上的权限要是777,这样才能保证hive可以正常写入元数据。
chmod -R 755 /home/tmp
rm -rf /usr/local/hadoop/logs/*

参考文档

hadoop 集群间数据迁移

hive:从derby数据库到mysql的迁移

猜你喜欢

转载自yeelor.iteye.com/blog/2029898