export PATH=${HADOOP_HOME}/bin:${PATH}
c201-106$ cd hadoop-0.20.2-new
c201-106$ ls
bin CHANGES.txt docs hadoop-0.20.2-examples.jar ivy librecordio README.txt
build.xml conf hadoop-0.20.2-ant.jar hadoop-0.20.2-test.jar ivy.xml LICENSE.txt src
c++ contrib hadoop-0.20.2-core.jar hadoop-0.20.2-tools.jar lib NOTICE.txt webapps
c201-106$ hadoop fs -mkdir input
c201-106$ hadoop fs -put conf/* input/
c201-106$ hadoop fs -ls
Found 5 items
drwxr-xr-x - hr4757 supergroup 0 2010-11-19 18:48 /user/hr4757/input
c201-106$ hadoop fs -ls input
Found 17 items
-rw-r--r-- 3 hr4757 supergroup 3936 2010-11-19 18:48 /user/hr4757/input/capacity-scheduler.xml
-rw-r--r-- 3 hr4757 supergroup 535 2010-11-19 18:48 /user/hr4757/input/configuration.xsl
-rw-r--r-- 3 hr4757 supergroup 178 2010-11-19 18:48 /user/hr4757/input/core-site.xml
-rw-r--r-- 3 hr4757 supergroup 311 2010-11-19 18:48 /user/hr4757/input/core-site.xml.default
-rw-r--r-- 3 hr4757 supergroup 2502 2010-11-19 18:48 /user/hr4757/input/hadoop-env.sh
-rw-r--r-- 3 hr4757 supergroup 1245 2010-11-19 18:48 /user/hr4757/input/hadoop-metrics.properties
-rw-r--r-- 3 hr4757 supergroup 4190 2010-11-19 18:48 /user/hr4757/input/hadoop-policy.xml
-rw-r--r-- 3 hr4757 supergroup 178 2010-11-19 18:48 /user/hr4757/input/hdfs-site.xml
-rw-r--r-- 3 hr4757 supergroup 483 2010-11-19 18:48 /user/hr4757/input/hdfs-site.xml.default
-rw-r--r-- 3 hr4757 supergroup 2815 2010-11-19 18:48 /user/hr4757/input/log4j.properties
-rw-r--r-- 3 hr4757 supergroup 178 2010-11-19 18:48 /user/hr4757/input/mapred-site.xml
-rw-r--r-- 3 hr4757 supergroup 728 2010-11-19 18:48 /user/hr4757/input/mapred-site.xml.default
-rw-r--r-- 3 hr4757 supergroup 10 2010-11-19 18:48 /user/hr4757/input/masters
-rw-r--r-- 3 hr4757 supergroup 10 2010-11-19 18:48 /user/hr4757/input/slaves
-rw-r--r-- 3 hr4757 supergroup 10 2010-11-19 18:48 /user/hr4757/input/slaves.default
-rw-r--r-- 3 hr4757 supergroup 1243 2010-11-19 18:48 /user/hr4757/input/ssl-client.xml.example
-rw-r--r-- 3 hr4757 supergroup 1195 2010-11-19 18:48 /user/hr4757/input/ssl-server.xml.example
c201-106$ hadoop jar hadoop-0.20.2-examples.jar grep input output '[helowrd]+'
...
10/11/17 20:25:09 INFO mapred.FileInputFormat: Total input paths to process : 1
10/11/17 20:25:09 INFO mapred.JobClient: Running job: job_local_0002
10/11/17 20:25:09 INFO mapred.FileInputFormat: Total input paths to process : 1
10/11/17 20:25:09 INFO mapred.MapTask: numReduceTasks: 1
10/11/17 20:25:09 INFO mapred.MapTask: io.sort.mb = 100
10/11/17 20:25:09 INFO mapred.MapTask: data buffer = 79691776/99614720
10/11/17 20:25:09 INFO mapred.MapTask: record buffer = 262144/327680
10/11/17 20:25:09 INFO mapred.MapTask: Starting flush of map output
10/11/17 20:25:09 INFO mapred.MapTask: Finished spill 0
10/11/17 20:25:09 INFO mapred.TaskRunner: Task:attempt_local_0002_m_000000_0 is done. And is in the process of commiting
10/11/17 20:25:09 INFO mapred.LocalJobRunner: file:/home/01603/hr4757/hadoop-0.20.2-new/grep-temp-2082616452/part-00000:0+946
10/11/17 20:25:09 INFO mapred.TaskRunner: Task 'attempt_local_0002_m_000000_0' done.
10/11/17 20:25:09 INFO mapred.LocalJobRunner:
10/11/17 20:25:09 INFO mapred.Merger: Merging 1 sorted segments
10/11/17 20:25:09 INFO mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 598 bytes
10/11/17 20:25:09 INFO mapred.LocalJobRunner:
10/11/17 20:25:09 INFO mapred.TaskRunner: Task:attempt_local_0002_r_000000_0 is done. And is in the process of commiting
10/11/17 20:25:09 INFO mapred.LocalJobRunner:
10/11/17 20:25:09 INFO mapred.TaskRunner: Task attempt_local_0002_r_000000_0 is allowed to commit now
10/11/17 20:25:09 INFO mapred.FileOutputCommitter: Saved output of task 'attempt_local_0002_r_000000_0' to file:/home/01603/hr4757/hadoop-0.20.2-new/output
10/11/17 20:25:09 INFO mapred.LocalJobRunner: reduce > reduce
10/11/17 20:25:09 INFO mapred.TaskRunner: Task 'attempt_local_0002_r_000000_0' done.
10/11/17 20:25:10 INFO mapred.JobClient: map 100% reduce 100%
10/11/17 20:25:10 INFO mapred.JobClient: Job complete: job_local_0002
10/11/17 20:25:10 INFO mapred.JobClient: Counters: 13
10/11/17 20:25:10 INFO mapred.JobClient: FileSystemCounters
10/11/17 20:25:10 INFO mapred.JobClient: FILE_BYTES_READ=645226
10/11/17 20:25:10 INFO mapred.JobClient: FILE_BYTES_WRITTEN=689887
10/11/17 20:25:10 INFO mapred.JobClient: Map-Reduce Framework
10/11/17 20:25:10 INFO mapred.JobClient: Reduce input groups=23
10/11/17 20:25:10 INFO mapred.JobClient: Combine output records=0
10/11/17 20:25:10 INFO mapred.JobClient: Map input records=44
10/11/17 20:25:10 INFO mapred.JobClient: Reduce shuffle bytes=0
10/11/17 20:25:10 INFO mapred.JobClient: Reduce output records=44
10/11/17 20:25:10 INFO mapred.JobClient: Spilled Records=88
10/11/17 20:25:10 INFO mapred.JobClient: Map output bytes=508
10/11/17 20:25:10 INFO mapred.JobClient: Map input bytes=860
10/11/17 20:25:10 INFO mapred.JobClient: Combine input records=0
10/11/17 20:25:10 INFO mapred.JobClient: Map output records=44
10/11/17 20:25:10 INFO mapred.JobClient: Reduce input records=44
c201-106$ mkdir output
c201-106$ hadoop fs -get output/* output/
c201-106$ cat output/*
320 e
255 o
173 l
147 er
111 r
86 ro
65 he
54 de
41 or
41 ed
41 d
33 h
28 re
24 ol
16 le
15 ode
15 hed
12 ll
12 ler
11 red
9 llowed
9 wheel
9 hre
7 w
6 wo
5 ld
5 wh
4 hee
4 ore
4 err
2 lo
2 oll
2 wor
1 ollow
1 oller
1 rel
1 llo
1 hor
1 wee
1 here
1 doo
1 who
1 do
1 rd
c201-106$