Hadoop集群迁移合并

1.安装maven,protobuf,java并配置环境变量

export JAVA_HOME=/usr/local/java
export PATH=$JAVA_HOME/bin:$PATH
export CLASSPATH=.:$JAVA_HOME/lib/dt.jar:$JAVA_HOME/lib/tools.jar

export M2_HOME=/usr/local/maven
export M2=$M2_HOME/bin
export PATH=$M2:$PATH

export HADOOP_HOME=/usr/local/hadoop/
export PATH=$PATH:$HADOOP_HOME/bin
export HADOOP_COMMON_LIB_NATIVE_DIR=$HADOOP_HOME/lib/native
export HADOOP_OPTS="-Djava.library.path=$HADOOP_HOME/lib"


export  HADOOP_COMMON_LIB_NATIVE_DIR=$HADOOP_HOME/lib/native
export  HADOOP_OPTS="-Djava.library.path=$HADOOP_HOME/lib:$HADOOP_COMMON_LIB_NATIVE_DIR"


export HBASE_HOME=/usr/local/hbase
export PATH=$PATH:$HBASE_HOME/bin


export PROTOBUF_HOME=/usr/local/protobuf
export PATH=$PATH:$PROTOBUF_HOME/bin


yum安装:yum  -y  install  svn  ncurses-devel  gcc*
yum -y install lzo-devel zlib-devel autoconf  automake  libtool  cmake  openssl-devel


2.编译hadoop


mvn clean package -Pdist,native -DskipTests -Dtar



3.替换配置文件,修改ip


4.集群之间无秘钥登录


5.创建目录

mkdir /data
mkdir /data/hdfs
mkdir /data/hdfs/tmp
mkdir /data/hdfs/dfs
mkdir /data/hdfs/dfs/name
mkdir /data/hdfs/dfs/data
mkdir /data/yarn/
mkdir /data/yarn/local
mkdir /data/yarn/logs
chown -R hadoop.hadoop /data

原有的数据目录不用管,也不要删除


复制旧集群的namesecondary与name



猜你喜欢

转载自blog.csdn.net/qq_25954159/article/details/72845232