hadoop伪分布配置文件记录
core
<configuration>
<property>
<name>fs.defaultFS</name>
<value>hdfs://192.168.1.8:8020</value>
</property>
<!-- 用来指定Hadoop运行时产生文件的存放目录 -->
<property>
<name>hadoop.tmp.dir</name>
<value>/home/fafa/hadoop_tmp</value>
</property>
</configuration>
hdfs
<configuration>
<property>
<name>dfs.replication</name>
<value>1</value>
</property>
<property>
<name>dfs.namenode.name.dir</name>
<value>/home/fafa/hadoop/hadoop/dfs/name</value>
</property>
<property>
<name>dfs.datanode.data.dir</name>
<value>/home/fafa/hadoop/hadoop/dfs/data</value>
</property>
</configuration>
mapred
<configuration>
<property>
<name>mapred.job.tracker</name>
<value>yarn</value>
</property>
</configuration>
yarn
<configuration>
<!-- Site specific YARN configuration properties -->
<property>
<name>yarn.nodemanager.aux-services</name>
<value>mapreduce_shuffle</value>
</property>
</configuration>
hadoop-env.sh
export JAVA_HOME=/home/fafa/java/jdk
export HADOOP_COMMON_LIB_NATIVE_DIR=${HADOOP_PREFIX}/lib/native
export HADOOP_OPTS="-Djava.library.path=$HADOOP_PREFIX/lib"
yarn-env.sh
export HADOOP_COMMON_LIB_NATIVE_DIR=${HADOOP_PREFIX}/lib/native
export HADOOP_OPTS="-Djava.library.path=$HADOOP_PREFIX/lib"
Starting secondary namenodes [0.0.0.0]