hadoop review 1 build

Refer to http://hadoop.apache.org/docs/r1.0.4/cn/quickstart.htmlEnvironment

Variables
hadoop-env.sh
export JAVA_HOME=/usr/local/java/jdk1.6.0_45/

Modify two configuration files
[root@centos54 conf]# cat hdfs-site.xml
<?xml version="1.0"?>
<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
<configuration>
    <property>
        <name>fs.default.name</name>
        <value>localhost:9000</value>
    </property>
    <property>
        <name>dfs.replication</name>
        <value>1</value>                                                                                                                                         
    </property>
</configuration>



[root@centos54 conf]# cat mapred-site.xml

<?xml version="1.0"?>
<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
<configuration>
    <property>$
        <name>mapred.job.tracker</name>$
        <value>localhost:9001</value>$
    </property>$
</configuration>

ssh local mutual trust, slightly
for ssh
vim /etc/hosts
::1^I^Ilocalhost6.localdomain localhost


export HADOOP_HOME=/data/hadoop/hadoop-1.2.1
export HADOOP_CONF_DIR=$HADOOP_HOME/conf


Initialize hdfs
bin/hadoop namenode -format
bin/start-all.sh

several processes after startup
root@centos54 bin]# jps
23275 NameNode
23396 DataNode
23543 SecondaryNameNode

23641 JobTracker
23768 TaskTracker

Example of running wordcount
mkdir haoning
cd haoning
../bin/hadoop fs -put ../conf input  
../bin/hadoop jar ../hadoop-examples-1.2.1.jar grep input output 'dfs[a-z.]+'



[root@centos54 haoning]# tree
.
|-- input
|   |-- capacity-scheduler.xml
|   |-- configuration.xsl
|   |-- core-site.xml
|   |-- fair-scheduler.xml
|   |-- hadoop-env.sh
|   |-- hadoop-metrics2.properties
|   |-- hadoop-policy.xml
|   |-- hdfs-site.xml
|   |-- log4j.properties
|   |-- mapred-queue-acls.xml
|   |-- mapred-site.xml
|   |-- masters
|   |-- slaves
|   |-- ssl-client.xml.example
|   |-- ssl-server.xml.example
|   |-- task-log4j.properties
| `-- taskcontroller.cfg
`-- output
    |-- _SUCCESS
    `-- part-00000


The results obtained
[root@centos54 haoning]# cat output/*
1       dfs.replication
1 dfs.server.namenode.
1 dfsadmin


Validation results

cd input
[root@centos54 input]# grep dfs *
hadoop-env.sh:# The following applies to multiple commands (fs, dfs, fsck, distcp etc)
hadoop-policy.xml:    dfsadmin and mradmin commands to refresh the security policy in-effect.
hdfs-site.xml:        <name>dfs.replication</name>
log4j.properties:log4j.logger.org.apache.hadoop.hdfs.server.namenode.FSNamesystem.audit=WARN
[root@centos54 input]#


NameNode - http://localhost:50070/
JobTracker - http://localhost:50030/
~                

Guess you like

Origin http://10.200.1.11:23101/article/api/json?id=326595464&siteId=291194637