diff --git a/hadoop.log b/hadoop.log
index 72b1af430bc6984fad7aa45962cff662b4f9b908..f7ca6177641d3adeacb1c4535e3bf95c001790fb 100644
--- a/hadoop.log
+++ b/hadoop.log
@@ -1,102 +1,1794 @@
-2017-03-10 12:58:10,580 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
-2017-03-10 12:58:11,075 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
-2017-03-10 12:58:11,087 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
-2017-03-10 12:58:11,561 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
-2017-03-10 12:58:11,594 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
-2017-03-10 12:58:11,707 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
-2017-03-10 12:58:11,996 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local1458741767_0001
-2017-03-10 12:58:12,393 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
-2017-03-10 12:58:12,394 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local1458741767_0001
-2017-03-10 12:58:12,399 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
-2017-03-10 12:58:12,419 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
-2017-03-10 12:58:12,422 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
-2017-03-10 12:58:12,540 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
-2017-03-10 12:58:12,543 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1458741767_0001_m_000000_0
-2017-03-10 12:58:12,607 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
-2017-03-10 12:58:12,632 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
-2017-03-10 12:58:12,640 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/input/pg100.txt:0+5589886
-2017-03-10 12:58:12,811 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
-2017-03-10 12:58:12,823 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
-2017-03-10 12:58:12,824 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
-2017-03-10 12:58:12,824 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
-2017-03-10 12:58:12,824 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
-2017-03-10 12:58:12,832 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
-2017-03-10 12:58:13,397 INFO org.apache.hadoop.mapreduce.Job: Job job_local1458741767_0001 running in uber mode : false
-2017-03-10 12:58:13,399 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
-2017-03-10 12:58:14,983 INFO org.apache.hadoop.mapred.LocalJobRunner: 
-2017-03-10 12:58:14,985 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
-2017-03-10 12:58:14,985 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
-2017-03-10 12:58:14,985 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 8925696; bufvoid = 104857600
-2017-03-10 12:58:14,985 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 22120424(88481696); length = 4093973/6553600
-2017-03-10 12:58:16,014 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
-2017-03-10 12:58:16,798 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
-2017-03-10 12:58:16,800 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1458741767_0001_m_000000_0 is done. And is in the process of committing
-2017-03-10 12:58:16,806 INFO org.apache.hadoop.mapred.LocalJobRunner: map
-2017-03-10 12:58:16,806 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1458741767_0001_m_000000_0' done.
-2017-03-10 12:58:16,806 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1458741767_0001_m_000000_0
-2017-03-10 12:58:16,806 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
-2017-03-10 12:58:16,814 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks
-2017-03-10 12:58:16,814 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1458741767_0001_r_000000_0
-2017-03-10 12:58:16,819 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
-2017-03-10 12:58:16,819 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
-2017-03-10 12:58:16,823 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@7f792cec
-2017-03-10 12:58:16,833 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
-2017-03-10 12:58:16,839 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1458741767_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events
-2017-03-10 12:58:16,862 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.deflate]
-2017-03-10 12:58:16,862 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1458741767_0001_m_000000_0 decomp: 491464 len: 167092 to MEMORY
-2017-03-10 12:58:16,869 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 491464 bytes from map-output for attempt_local1458741767_0001_m_000000_0
-2017-03-10 12:58:16,869 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 491464, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->491464
-2017-03-10 12:58:16,870 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
-2017-03-10 12:58:16,871 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
-2017-03-10 12:58:16,871 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
-2017-03-10 12:58:16,876 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
-2017-03-10 12:58:16,876 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 491459 bytes
-2017-03-10 12:58:17,106 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 491464 bytes to disk to satisfy reduce memory limit
-2017-03-10 12:58:17,107 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 167100 bytes from disk
-2017-03-10 12:58:17,107 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
-2017-03-10 12:58:17,107 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
-2017-03-10 12:58:17,108 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 491459 bytes
-2017-03-10 12:58:17,108 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
-2017-03-10 12:58:17,113 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.skip.on is deprecated. Instead, use mapreduce.job.skiprecords
-2017-03-10 12:58:17,416 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 0%
-2017-03-10 12:58:17,746 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1458741767_0001_r_000000_0 is done. And is in the process of committing
-2017-03-10 12:58:17,747 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
-2017-03-10 12:58:17,751 INFO org.apache.hadoop.mapred.Task: Task attempt_local1458741767_0001_r_000000_0 is allowed to commit now
-2017-03-10 12:58:17,752 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1458741767_0001_r_000000_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1458741767_0001_r_000000
-2017-03-10 12:58:17,756 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
-2017-03-10 12:58:17,756 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1458741767_0001_r_000000_0' done.
-2017-03-10 12:58:17,757 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1458741767_0001_r_000000_0
-2017-03-10 12:58:17,758 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete.
-2017-03-10 12:58:18,417 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 100%
-2017-03-10 12:58:18,418 INFO org.apache.hadoop.mapreduce.Job: Job job_local1458741767_0001 completed successfully
-2017-03-10 12:58:18,427 INFO org.apache.hadoop.mapreduce.Job: Counters: 33
+2017-03-11 03:48:40,291 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-11 03:48:40,772 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-11 03:48:40,787 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-11 03:48:41,151 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
+2017-03-11 03:48:41,158 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
+2017-03-11 03:48:41,192 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
+2017-03-11 03:48:41,424 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local1883045034_0001
+2017-03-11 03:48:41,880 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
+2017-03-11 03:48:41,882 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local1883045034_0001
+2017-03-11 03:48:41,881 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
+2017-03-11 03:48:41,902 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 03:48:41,904 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
+2017-03-11 03:48:42,042 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
+2017-03-11 03:48:42,043 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1883045034_0001_m_000000_0
+2017-03-11 03:48:42,081 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 03:48:42,091 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 03:48:42,096 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/input/pg100.txt:0+5589886
+2017-03-11 03:48:42,172 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 03:48:42,172 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 03:48:42,173 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 03:48:42,173 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 03:48:42,173 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 03:48:42,173 WARN org.apache.hadoop.mapred.MapTask: Unable to initialize MapOutputCollector org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+java.lang.ClassCastException: class similarity.LongPair
+	at java.lang.Class.asSubclass(Class.java:3165)
+	at org.apache.hadoop.mapred.JobConf.getOutputKeyComparator(JobConf.java:892)
+	at org.apache.hadoop.mapred.MapTask$MapOutputBuffer.init(MapTask.java:1005)
+	at org.apache.hadoop.mapred.MapTask.createSortingCollector(MapTask.java:402)
+	at org.apache.hadoop.mapred.MapTask.access$100(MapTask.java:81)
+	at org.apache.hadoop.mapred.MapTask$NewOutputCollector.<init>(MapTask.java:698)
+	at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:770)
+	at org.apache.hadoop.mapred.MapTask.run(MapTask.java:341)
+	at org.apache.hadoop.mapred.LocalJobRunner$Job$MapTaskRunnable.run(LocalJobRunner.java:243)
+	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471)
+	at java.util.concurrent.FutureTask.run(FutureTask.java:262)
+	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
+	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
+	at java.lang.Thread.run(Thread.java:745)
+2017-03-11 03:48:42,177 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
+2017-03-11 03:48:42,178 WARN org.apache.hadoop.mapred.LocalJobRunner: job_local1883045034_0001
+java.lang.Exception: java.io.IOException: Initialization of all the collectors failed. Error in last collector was :class similarity.LongPair
+	at org.apache.hadoop.mapred.LocalJobRunner$Job.runTasks(LocalJobRunner.java:462)
+	at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:522)
+Caused by: java.io.IOException: Initialization of all the collectors failed. Error in last collector was :class similarity.LongPair
+	at org.apache.hadoop.mapred.MapTask.createSortingCollector(MapTask.java:414)
+	at org.apache.hadoop.mapred.MapTask.access$100(MapTask.java:81)
+	at org.apache.hadoop.mapred.MapTask$NewOutputCollector.<init>(MapTask.java:698)
+	at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:770)
+	at org.apache.hadoop.mapred.MapTask.run(MapTask.java:341)
+	at org.apache.hadoop.mapred.LocalJobRunner$Job$MapTaskRunnable.run(LocalJobRunner.java:243)
+	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471)
+	at java.util.concurrent.FutureTask.run(FutureTask.java:262)
+	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
+	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
+	at java.lang.Thread.run(Thread.java:745)
+Caused by: java.lang.ClassCastException: class similarity.LongPair
+	at java.lang.Class.asSubclass(Class.java:3165)
+	at org.apache.hadoop.mapred.JobConf.getOutputKeyComparator(JobConf.java:892)
+	at org.apache.hadoop.mapred.MapTask$MapOutputBuffer.init(MapTask.java:1005)
+	at org.apache.hadoop.mapred.MapTask.createSortingCollector(MapTask.java:402)
+	... 10 more
+2017-03-11 03:48:42,901 INFO org.apache.hadoop.mapreduce.Job: Job job_local1883045034_0001 running in uber mode : false
+2017-03-11 03:48:42,902 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
+2017-03-11 03:48:42,904 INFO org.apache.hadoop.mapreduce.Job: Job job_local1883045034_0001 failed with state FAILED due to: NA
+2017-03-11 03:48:42,908 INFO org.apache.hadoop.mapreduce.Job: Counters: 0
+2017-03-11 04:03:00,838 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-11 04:03:01,367 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-11 04:03:01,370 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-11 04:03:01,795 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
+2017-03-11 04:03:01,802 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
+2017-03-11 04:03:01,938 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
+2017-03-11 04:03:02,191 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local590950587_0001
+2017-03-11 04:03:02,704 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
+2017-03-11 04:03:02,704 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local590950587_0001
+2017-03-11 04:03:02,710 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
+2017-03-11 04:03:02,733 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:03:02,735 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
+2017-03-11 04:03:02,865 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
+2017-03-11 04:03:02,869 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local590950587_0001_m_000000_0
+2017-03-11 04:03:02,928 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:03:02,957 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:03:02,962 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/input/pg100.txt:0+5589886
+2017-03-11 04:03:03,048 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 04:03:03,049 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 04:03:03,049 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 04:03:03,049 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 04:03:03,049 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 04:03:03,055 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 04:03:03,124 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 04:03:03,131 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
+2017-03-11 04:03:03,140 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
+2017-03-11 04:03:03,141 WARN org.apache.hadoop.mapred.LocalJobRunner: job_local590950587_0001
+java.lang.Exception: java.lang.NumberFormatException: For input string: ""
+	at org.apache.hadoop.mapred.LocalJobRunner$Job.runTasks(LocalJobRunner.java:462)
+	at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:522)
+Caused by: java.lang.NumberFormatException: For input string: ""
+	at java.lang.NumberFormatException.forInputString(NumberFormatException.java:65)
+	at java.lang.Long.parseLong(Long.java:453)
+	at java.lang.Long.parseLong(Long.java:483)
+	at similarity.NaiveApproach$Map.map(NaiveApproach.java:119)
+	at similarity.NaiveApproach$Map.map(NaiveApproach.java:1)
+	at org.apache.hadoop.mapreduce.Mapper.run(Mapper.java:145)
+	at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:787)
+	at org.apache.hadoop.mapred.MapTask.run(MapTask.java:341)
+	at org.apache.hadoop.mapred.LocalJobRunner$Job$MapTaskRunnable.run(LocalJobRunner.java:243)
+	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471)
+	at java.util.concurrent.FutureTask.run(FutureTask.java:262)
+	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
+	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
+	at java.lang.Thread.run(Thread.java:745)
+2017-03-11 04:03:03,731 INFO org.apache.hadoop.mapreduce.Job: Job job_local590950587_0001 running in uber mode : false
+2017-03-11 04:03:03,732 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
+2017-03-11 04:03:03,735 INFO org.apache.hadoop.mapreduce.Job: Job job_local590950587_0001 failed with state FAILED due to: NA
+2017-03-11 04:03:03,759 INFO org.apache.hadoop.mapreduce.Job: Counters: 0
+2017-03-11 04:03:50,097 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-11 04:03:50,594 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-11 04:03:50,595 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-11 04:03:51,047 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
+2017-03-11 04:03:51,057 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
+2017-03-11 04:03:51,171 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
+2017-03-11 04:03:51,405 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local1119859446_0001
+2017-03-11 04:03:51,850 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
+2017-03-11 04:03:51,850 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local1119859446_0001
+2017-03-11 04:03:51,860 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
+2017-03-11 04:03:51,867 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:03:51,873 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
+2017-03-11 04:03:51,989 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
+2017-03-11 04:03:51,989 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1119859446_0001_m_000000_0
+2017-03-11 04:03:52,051 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:03:52,059 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:03:52,067 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline:0+5323735
+2017-03-11 04:03:52,149 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 04:03:52,149 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 04:03:52,149 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 04:03:52,150 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 04:03:52,150 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 04:03:52,154 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 04:03:52,186 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 04:03:52,195 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
+2017-03-11 04:03:52,205 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
+2017-03-11 04:03:52,206 WARN org.apache.hadoop.mapred.LocalJobRunner: job_local1119859446_0001
+java.lang.Exception: java.io.IOException: Type mismatch in value from map: expected org.apache.hadoop.io.DoubleWritable, received org.apache.hadoop.io.Text
+	at org.apache.hadoop.mapred.LocalJobRunner$Job.runTasks(LocalJobRunner.java:462)
+	at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:522)
+Caused by: java.io.IOException: Type mismatch in value from map: expected org.apache.hadoop.io.DoubleWritable, received org.apache.hadoop.io.Text
+	at org.apache.hadoop.mapred.MapTask$MapOutputBuffer.collect(MapTask.java:1078)
+	at org.apache.hadoop.mapred.MapTask$NewOutputCollector.write(MapTask.java:715)
+	at org.apache.hadoop.mapreduce.task.TaskInputOutputContextImpl.write(TaskInputOutputContextImpl.java:89)
+	at org.apache.hadoop.mapreduce.lib.map.WrappedMapper$Context.write(WrappedMapper.java:112)
+	at similarity.NaiveApproach$Map.map(NaiveApproach.java:125)
+	at similarity.NaiveApproach$Map.map(NaiveApproach.java:1)
+	at org.apache.hadoop.mapreduce.Mapper.run(Mapper.java:145)
+	at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:787)
+	at org.apache.hadoop.mapred.MapTask.run(MapTask.java:341)
+	at org.apache.hadoop.mapred.LocalJobRunner$Job$MapTaskRunnable.run(LocalJobRunner.java:243)
+	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471)
+	at java.util.concurrent.FutureTask.run(FutureTask.java:262)
+	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
+	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
+	at java.lang.Thread.run(Thread.java:745)
+2017-03-11 04:03:52,852 INFO org.apache.hadoop.mapreduce.Job: Job job_local1119859446_0001 running in uber mode : false
+2017-03-11 04:03:52,853 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
+2017-03-11 04:03:52,855 INFO org.apache.hadoop.mapreduce.Job: Job job_local1119859446_0001 failed with state FAILED due to: NA
+2017-03-11 04:03:52,859 INFO org.apache.hadoop.mapreduce.Job: Counters: 0
+2017-03-11 04:06:23,828 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-11 04:06:24,303 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-11 04:06:24,329 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-11 04:06:24,793 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
+2017-03-11 04:06:24,801 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
+2017-03-11 04:06:24,917 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
+2017-03-11 04:06:25,235 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local1773920421_0001
+2017-03-11 04:06:25,643 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
+2017-03-11 04:06:25,644 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local1773920421_0001
+2017-03-11 04:06:25,646 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
+2017-03-11 04:06:25,660 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:06:25,664 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
+2017-03-11 04:06:25,776 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
+2017-03-11 04:06:25,777 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1773920421_0001_m_000000_0
+2017-03-11 04:06:25,849 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:06:25,875 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:06:25,880 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline:0+5323735
+2017-03-11 04:06:25,973 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 04:06:25,973 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 04:06:25,973 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 04:06:25,973 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 04:06:25,973 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 04:06:25,975 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 04:06:26,015 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 04:06:26,027 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
+2017-03-11 04:06:26,041 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
+2017-03-11 04:06:26,042 WARN org.apache.hadoop.mapred.LocalJobRunner: job_local1773920421_0001
+java.lang.Exception: java.io.IOException: Type mismatch in key from map: expected org.apache.hadoop.io.LongWritable, received similarity.LongPair
+	at org.apache.hadoop.mapred.LocalJobRunner$Job.runTasks(LocalJobRunner.java:462)
+	at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:522)
+Caused by: java.io.IOException: Type mismatch in key from map: expected org.apache.hadoop.io.LongWritable, received similarity.LongPair
+	at org.apache.hadoop.mapred.MapTask$MapOutputBuffer.collect(MapTask.java:1073)
+	at org.apache.hadoop.mapred.MapTask$NewOutputCollector.write(MapTask.java:715)
+	at org.apache.hadoop.mapreduce.task.TaskInputOutputContextImpl.write(TaskInputOutputContextImpl.java:89)
+	at org.apache.hadoop.mapreduce.lib.map.WrappedMapper$Context.write(WrappedMapper.java:112)
+	at similarity.NaiveApproach$Map.map(NaiveApproach.java:126)
+	at similarity.NaiveApproach$Map.map(NaiveApproach.java:1)
+	at org.apache.hadoop.mapreduce.Mapper.run(Mapper.java:145)
+	at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:787)
+	at org.apache.hadoop.mapred.MapTask.run(MapTask.java:341)
+	at org.apache.hadoop.mapred.LocalJobRunner$Job$MapTaskRunnable.run(LocalJobRunner.java:243)
+	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471)
+	at java.util.concurrent.FutureTask.run(FutureTask.java:262)
+	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
+	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
+	at java.lang.Thread.run(Thread.java:745)
+2017-03-11 04:06:26,650 INFO org.apache.hadoop.mapreduce.Job: Job job_local1773920421_0001 running in uber mode : false
+2017-03-11 04:06:26,651 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
+2017-03-11 04:06:26,653 INFO org.apache.hadoop.mapreduce.Job: Job job_local1773920421_0001 failed with state FAILED due to: NA
+2017-03-11 04:06:26,658 INFO org.apache.hadoop.mapreduce.Job: Counters: 0
+2017-03-11 04:07:51,343 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-11 04:07:51,835 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-11 04:07:51,844 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-11 04:07:52,378 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
+2017-03-11 04:07:52,387 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
+2017-03-11 04:07:52,426 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
+2017-03-11 04:07:52,660 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local526022282_0001
+2017-03-11 04:07:53,134 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
+2017-03-11 04:07:53,135 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local526022282_0001
+2017-03-11 04:07:53,140 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
+2017-03-11 04:07:53,143 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:07:53,150 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
+2017-03-11 04:07:53,280 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
+2017-03-11 04:07:53,281 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local526022282_0001_m_000000_0
+2017-03-11 04:07:53,398 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:07:53,439 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:07:53,455 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline:0+5323735
+2017-03-11 04:07:53,776 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 04:07:53,785 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 04:07:53,785 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 04:07:53,792 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 04:07:53,793 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 04:07:53,806 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 04:07:53,942 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 04:07:53,973 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
+2017-03-11 04:07:54,024 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
+2017-03-11 04:07:54,027 WARN org.apache.hadoop.mapred.LocalJobRunner: job_local526022282_0001
+java.lang.Exception: java.io.IOException: Type mismatch in key from map: expected org.apache.hadoop.io.LongWritable, received similarity.LongPair
+	at org.apache.hadoop.mapred.LocalJobRunner$Job.runTasks(LocalJobRunner.java:462)
+	at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:522)
+Caused by: java.io.IOException: Type mismatch in key from map: expected org.apache.hadoop.io.LongWritable, received similarity.LongPair
+	at org.apache.hadoop.mapred.MapTask$MapOutputBuffer.collect(MapTask.java:1073)
+	at org.apache.hadoop.mapred.MapTask$NewOutputCollector.write(MapTask.java:715)
+	at org.apache.hadoop.mapreduce.task.TaskInputOutputContextImpl.write(TaskInputOutputContextImpl.java:89)
+	at org.apache.hadoop.mapreduce.lib.map.WrappedMapper$Context.write(WrappedMapper.java:112)
+	at similarity.NaiveApproach$Map.map(NaiveApproach.java:126)
+	at similarity.NaiveApproach$Map.map(NaiveApproach.java:1)
+	at org.apache.hadoop.mapreduce.Mapper.run(Mapper.java:145)
+	at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:787)
+	at org.apache.hadoop.mapred.MapTask.run(MapTask.java:341)
+	at org.apache.hadoop.mapred.LocalJobRunner$Job$MapTaskRunnable.run(LocalJobRunner.java:243)
+	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471)
+	at java.util.concurrent.FutureTask.run(FutureTask.java:262)
+	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
+	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
+	at java.lang.Thread.run(Thread.java:745)
+2017-03-11 04:07:54,137 INFO org.apache.hadoop.mapreduce.Job: Job job_local526022282_0001 running in uber mode : false
+2017-03-11 04:07:54,138 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
+2017-03-11 04:07:54,141 INFO org.apache.hadoop.mapreduce.Job: Job job_local526022282_0001 failed with state FAILED due to: NA
+2017-03-11 04:07:54,166 INFO org.apache.hadoop.mapreduce.Job: Counters: 0
+2017-03-11 04:11:51,758 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-11 04:11:52,358 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-11 04:11:52,361 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-11 04:11:52,853 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
+2017-03-11 04:11:52,874 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
+2017-03-11 04:11:53,013 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
+2017-03-11 04:11:53,344 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local1568897348_0001
+2017-03-11 04:11:54,306 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
+2017-03-11 04:11:54,311 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local1568897348_0001
+2017-03-11 04:11:54,308 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
+2017-03-11 04:11:54,344 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:11:54,350 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
+2017-03-11 04:11:54,539 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
+2017-03-11 04:11:54,540 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1568897348_0001_m_000000_0
+2017-03-11 04:11:54,571 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:11:54,584 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:11:54,587 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline:0+5323735
+2017-03-11 04:11:54,661 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 04:11:54,661 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 04:11:54,661 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 04:11:54,662 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 04:11:54,662 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 04:11:54,666 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 04:11:55,052 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-11 04:11:55,052 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 68406923; bufvoid = 104857600
+2017-03-11 04:11:55,053 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 22344612(89378448); length = 3869785/6553600
+2017-03-11 04:11:55,053 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 72308587 kvi 18077140(72308560)
+2017-03-11 04:11:55,330 INFO org.apache.hadoop.mapreduce.Job: Job job_local1568897348_0001 running in uber mode : false
+2017-03-11 04:11:55,331 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
+2017-03-11 04:11:57,435 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
+2017-03-11 04:11:59,147 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
+2017-03-11 04:11:59,147 INFO org.apache.hadoop.mapred.MapTask: (RESET) equator 72308587 kv 18077140(72308560) kvi 17101736(68406944)
+2017-03-11 04:11:59,263 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-11 04:11:59,263 INFO org.apache.hadoop.mapred.MapTask: bufstart = 72308587; bufend = 33904999; bufvoid = 104857586
+2017-03-11 04:11:59,263 INFO org.apache.hadoop.mapred.MapTask: kvstart = 18077140(72308560); kvend = 13719124(54876496); length = 4358017/6553600
+2017-03-11 04:11:59,263 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 38047511 kvi 9511872(38047488)
+2017-03-11 04:12:00,610 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map
+2017-03-11 04:12:03,620 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map
+2017-03-11 04:12:03,852 INFO org.apache.hadoop.mapred.MapTask: Finished spill 1
+2017-03-11 04:12:03,853 INFO org.apache.hadoop.mapred.MapTask: (RESET) equator 38047511 kv 9511872(38047488) kvi 8656512(34626048)
+2017-03-11 04:12:03,995 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-11 04:12:04,000 INFO org.apache.hadoop.mapred.MapTask: bufstart = 38047511; bufend = 1671792; bufvoid = 104857600
+2017-03-11 04:12:04,000 INFO org.apache.hadoop.mapred.MapTask: kvstart = 9511872(38047488); kvend = 5660820(22643280); length = 3851053/6553600
+2017-03-11 04:12:04,000 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 5714480 kvi 1428616(5714464)
+2017-03-11 04:12:06,623 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map
+2017-03-11 04:12:07,887 INFO org.apache.hadoop.mapred.MapTask: Finished spill 2
+2017-03-11 04:12:07,887 INFO org.apache.hadoop.mapred.MapTask: (RESET) equator 5714480 kv 1428616(5714464) kvi 548152(2192608)
+2017-03-11 04:12:07,981 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-11 04:12:07,981 INFO org.apache.hadoop.mapred.MapTask: bufstart = 5714480; bufend = 74891682; bufvoid = 104857600
+2017-03-11 04:12:07,981 INFO org.apache.hadoop.mapred.MapTask: kvstart = 1428616(5714464); kvend = 23965796(95863184); length = 3677221/6553600
+2017-03-11 04:12:07,982 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 78839250 kvi 19709808(78839232)
+2017-03-11 04:12:09,631 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map
+2017-03-11 04:12:11,605 INFO org.apache.hadoop.mapred.MapTask: Finished spill 3
+2017-03-11 04:12:11,605 INFO org.apache.hadoop.mapred.MapTask: (RESET) equator 78839250 kv 19709808(78839232) kvi 18809832(75239328)
+2017-03-11 04:12:11,705 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-11 04:12:11,705 INFO org.apache.hadoop.mapred.MapTask: bufstart = 78839250; bufend = 42355081; bufvoid = 104857600
+2017-03-11 04:12:11,705 INFO org.apache.hadoop.mapred.MapTask: kvstart = 19709808(78839232); kvend = 15831652(63326608); length = 3878157/6553600
+2017-03-11 04:12:11,705 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 46302649 kvi 11575656(46302624)
+2017-03-11 04:12:12,632 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map
+2017-03-11 04:12:15,474 INFO org.apache.hadoop.mapred.MapTask: Finished spill 4
+2017-03-11 04:12:15,474 INFO org.apache.hadoop.mapred.MapTask: (RESET) equator 46302649 kv 11575656(46302624) kvi 10588776(42355104)
+2017-03-11 04:12:15,575 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-11 04:12:15,575 INFO org.apache.hadoop.mapred.MapTask: bufstart = 46302649; bufend = 8741998; bufvoid = 104857588
+2017-03-11 04:12:15,575 INFO org.apache.hadoop.mapred.MapTask: kvstart = 11575656(46302624); kvend = 7428380(29713520); length = 4147277/6553600
+2017-03-11 04:12:15,575 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 12736558 kvi 3184132(12736528)
+2017-03-11 04:12:15,634 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map
+2017-03-11 04:12:18,636 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map
+2017-03-11 04:12:19,604 INFO org.apache.hadoop.mapred.MapTask: Finished spill 5
+2017-03-11 04:12:19,604 INFO org.apache.hadoop.mapred.MapTask: (RESET) equator 12736558 kv 3184132(12736528) kvi 2209768(8839072)
+2017-03-11 04:12:19,700 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-11 04:12:19,701 INFO org.apache.hadoop.mapred.MapTask: bufstart = 12736558; bufend = 80806124; bufvoid = 104857600
+2017-03-11 04:12:19,701 INFO org.apache.hadoop.mapred.MapTask: kvstart = 3184132(12736528); kvend = 25444400(101777600); length = 3954133/6553600
+2017-03-11 04:12:19,701 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 84800684 kvi 21200164(84800656)
+2017-03-11 04:12:21,645 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map
+2017-03-11 04:12:23,545 INFO org.apache.hadoop.mapred.MapTask: Finished spill 6
+2017-03-11 04:12:23,546 INFO org.apache.hadoop.mapred.MapTask: (RESET) equator 84800684 kv 21200164(84800656) kvi 20269056(81076224)
+2017-03-11 04:12:23,648 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-11 04:12:23,649 INFO org.apache.hadoop.mapred.MapTask: bufstart = 84800684; bufend = 47222339; bufvoid = 104857600
+2017-03-11 04:12:23,649 INFO org.apache.hadoop.mapred.MapTask: kvstart = 21200164(84800656); kvend = 17048460(68193840); length = 4151705/6553600
+2017-03-11 04:12:23,649 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 51216899 kvi 12804220(51216880)
+2017-03-11 04:12:24,647 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map
+2017-03-11 04:12:27,634 INFO org.apache.hadoop.mapred.MapTask: Finished spill 7
+2017-03-11 04:12:27,634 INFO org.apache.hadoop.mapred.MapTask: (RESET) equator 51216899 kv 12804220(51216880) kvi 11805592(47222368)
+2017-03-11 04:12:27,656 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map
+2017-03-11 04:12:27,743 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-11 04:12:27,743 INFO org.apache.hadoop.mapred.MapTask: bufstart = 51216899; bufend = 13351553; bufvoid = 104857594
+2017-03-11 04:12:27,744 INFO org.apache.hadoop.mapred.MapTask: kvstart = 12804220(51216880); kvend = 8580768(34323072); length = 4223453/6553600
+2017-03-11 04:12:27,744 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 17394241 kvi 4348556(17394224)
+2017-03-11 04:12:30,658 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map
+2017-03-11 04:12:31,844 INFO org.apache.hadoop.mapred.MapTask: Finished spill 8
+2017-03-11 04:12:31,845 INFO org.apache.hadoop.mapred.MapTask: (RESET) equator 17394241 kv 4348556(17394224) kvi 3337896(13351584)
+2017-03-11 04:12:31,945 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-11 04:12:31,945 INFO org.apache.hadoop.mapred.MapTask: bufstart = 17394241; bufend = 85442671; bufvoid = 104857600
+2017-03-11 04:12:31,945 INFO org.apache.hadoop.mapred.MapTask: kvstart = 4348556(17394224); kvend = 389144(1556576); length = 3959413/6553600
+2017-03-11 04:12:31,945 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 89485359 kvi 22371332(89485328)
+2017-03-11 04:12:33,663 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map
+2017-03-11 04:12:35,791 INFO org.apache.hadoop.mapred.MapTask: Finished spill 9
+2017-03-11 04:12:35,791 INFO org.apache.hadoop.mapred.MapTask: (RESET) equator 89485359 kv 22371332(89485328) kvi 21471172(85884688)
+2017-03-11 04:12:35,883 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-11 04:12:35,883 INFO org.apache.hadoop.mapred.MapTask: bufstart = 89485359; bufend = 53575344; bufvoid = 104857593
+2017-03-11 04:12:35,884 INFO org.apache.hadoop.mapred.MapTask: kvstart = 22371332(89485328); kvend = 18636712(74546848); length = 3734621/6553600
+2017-03-11 04:12:35,884 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 57569904 kvi 14392472(57569888)
+2017-03-11 04:12:36,674 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map
+2017-03-11 04:14:59,711 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-11 04:15:00,316 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-11 04:15:00,320 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-11 04:15:00,373 WARN org.apache.hadoop.security.UserGroupInformation: PriviledgedActionException as:cloudera (auth:SIMPLE) cause:org.apache.hadoop.mapred.FileAlreadyExistsException: Output directory file:/home/cloudera/workspace/WordCount/output already exists
+2017-03-11 04:15:38,000 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-11 04:15:38,582 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-11 04:15:38,588 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-11 04:15:38,943 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
+2017-03-11 04:15:38,952 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
+2017-03-11 04:15:38,984 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
+2017-03-11 04:15:39,209 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local398888533_0001
+2017-03-11 04:15:39,676 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
+2017-03-11 04:15:39,678 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local398888533_0001
+2017-03-11 04:15:39,678 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
+2017-03-11 04:15:39,698 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:15:39,700 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
+2017-03-11 04:15:39,814 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
+2017-03-11 04:15:39,818 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local398888533_0001_m_000000_0
+2017-03-11 04:15:39,870 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:15:39,884 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:15:39,887 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:0+454
+2017-03-11 04:15:39,971 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 04:15:39,971 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 04:15:39,971 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 04:15:39,971 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 04:15:39,971 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 04:15:39,974 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 04:15:40,007 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 04:15:40,007 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 04:15:40,007 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-11 04:15:40,007 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 4977; bufvoid = 104857600
+2017-03-11 04:15:40,007 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 26214148(104856592); length = 249/6553600
+2017-03-11 04:15:40,016 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
+2017-03-11 04:15:40,038 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
+2017-03-11 04:15:40,046 INFO org.apache.hadoop.mapred.Task: Task:attempt_local398888533_0001_m_000000_0 is done. And is in the process of committing
+2017-03-11 04:15:40,052 INFO org.apache.hadoop.mapred.LocalJobRunner: map
+2017-03-11 04:15:40,052 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local398888533_0001_m_000000_0' done.
+2017-03-11 04:15:40,052 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local398888533_0001_m_000000_0
+2017-03-11 04:15:40,052 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
+2017-03-11 04:15:40,105 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks
+2017-03-11 04:15:40,105 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local398888533_0001_r_000000_0
+2017-03-11 04:15:40,116 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:15:40,117 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:15:40,118 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@6a59828c
+2017-03-11 04:15:40,137 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:15:40,142 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local398888533_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:15:40,167 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.deflate]
+2017-03-11 04:15:40,167 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local398888533_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 04:15:40,171 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local398888533_0001_m_000000_0
+2017-03-11 04:15:40,171 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 04:15:40,172 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:15:40,173 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,173 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:15:40,180 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,180 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:40,181 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:15:40,181 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 04:15:40,182 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:15:40,182 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,183 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:40,195 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,198 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.skip.on is deprecated. Instead, use mapreduce.job.skiprecords
+2017-03-11 04:15:40,201 INFO org.apache.hadoop.mapred.Task: Task:attempt_local398888533_0001_r_000000_0 is done. And is in the process of committing
+2017-03-11 04:15:40,204 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,205 INFO org.apache.hadoop.mapred.Task: Task attempt_local398888533_0001_r_000000_0 is allowed to commit now
+2017-03-11 04:15:40,205 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local398888533_0001_r_000000_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local398888533_0001_r_000000
+2017-03-11 04:15:40,206 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 04:15:40,206 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local398888533_0001_r_000000_0' done.
+2017-03-11 04:15:40,206 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local398888533_0001_r_000000_0
+2017-03-11 04:15:40,206 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local398888533_0001_r_000001_0
+2017-03-11 04:15:40,207 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:15:40,208 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:15:40,208 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@4bc7ca6e
+2017-03-11 04:15:40,209 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:15:40,210 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local398888533_0001_r_000001_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:15:40,211 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local398888533_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 04:15:40,211 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local398888533_0001_m_000000_0
+2017-03-11 04:15:40,211 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 04:15:40,212 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:15:40,219 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,220 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:15:40,221 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,221 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:40,222 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:15:40,223 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 04:15:40,223 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:15:40,223 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,224 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:40,224 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,229 INFO org.apache.hadoop.mapred.Task: Task:attempt_local398888533_0001_r_000001_0 is done. And is in the process of committing
+2017-03-11 04:15:40,230 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,230 INFO org.apache.hadoop.mapred.Task: Task attempt_local398888533_0001_r_000001_0 is allowed to commit now
+2017-03-11 04:15:40,231 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local398888533_0001_r_000001_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local398888533_0001_r_000001
+2017-03-11 04:15:40,232 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 04:15:40,232 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local398888533_0001_r_000001_0' done.
+2017-03-11 04:15:40,233 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local398888533_0001_r_000001_0
+2017-03-11 04:15:40,233 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local398888533_0001_r_000002_0
+2017-03-11 04:15:40,234 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:15:40,234 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:15:40,234 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@18e73d0
+2017-03-11 04:15:40,235 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:15:40,236 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local398888533_0001_r_000002_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:15:40,237 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local398888533_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 04:15:40,237 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local398888533_0001_m_000000_0
+2017-03-11 04:15:40,237 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 04:15:40,238 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:15:40,238 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,238 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:15:40,239 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,239 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:40,240 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:15:40,240 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 04:15:40,240 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:15:40,240 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,250 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:40,250 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,259 INFO org.apache.hadoop.mapred.Task: Task:attempt_local398888533_0001_r_000002_0 is done. And is in the process of committing
+2017-03-11 04:15:40,260 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,261 INFO org.apache.hadoop.mapred.Task: Task attempt_local398888533_0001_r_000002_0 is allowed to commit now
+2017-03-11 04:15:40,262 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local398888533_0001_r_000002_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local398888533_0001_r_000002
+2017-03-11 04:15:40,264 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 04:15:40,264 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local398888533_0001_r_000002_0' done.
+2017-03-11 04:15:40,268 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local398888533_0001_r_000002_0
+2017-03-11 04:15:40,268 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local398888533_0001_r_000003_0
+2017-03-11 04:15:40,271 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:15:40,272 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:15:40,272 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@5f0a2a42
+2017-03-11 04:15:40,273 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:15:40,277 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local398888533_0001_r_000003_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:15:40,279 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local398888533_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 04:15:40,280 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local398888533_0001_m_000000_0
+2017-03-11 04:15:40,280 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 04:15:40,280 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:15:40,281 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,281 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:15:40,282 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,282 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:40,283 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:15:40,283 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 04:15:40,283 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:15:40,283 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,284 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:40,285 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,287 INFO org.apache.hadoop.mapred.Task: Task:attempt_local398888533_0001_r_000003_0 is done. And is in the process of committing
+2017-03-11 04:15:40,288 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,288 INFO org.apache.hadoop.mapred.Task: Task attempt_local398888533_0001_r_000003_0 is allowed to commit now
+2017-03-11 04:15:40,289 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local398888533_0001_r_000003_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local398888533_0001_r_000003
+2017-03-11 04:15:40,289 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 04:15:40,289 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local398888533_0001_r_000003_0' done.
+2017-03-11 04:15:40,290 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local398888533_0001_r_000003_0
+2017-03-11 04:15:40,290 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local398888533_0001_r_000004_0
+2017-03-11 04:15:40,291 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:15:40,291 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:15:40,291 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@5f3ef269
+2017-03-11 04:15:40,292 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:15:40,293 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local398888533_0001_r_000004_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:15:40,294 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local398888533_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 04:15:40,295 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local398888533_0001_m_000000_0
+2017-03-11 04:15:40,295 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 04:15:40,295 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:15:40,296 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,296 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:15:40,297 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,297 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:40,297 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:15:40,297 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 04:15:40,297 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:15:40,297 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,298 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:40,299 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,301 INFO org.apache.hadoop.mapred.Task: Task:attempt_local398888533_0001_r_000004_0 is done. And is in the process of committing
+2017-03-11 04:15:40,302 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,303 INFO org.apache.hadoop.mapred.Task: Task attempt_local398888533_0001_r_000004_0 is allowed to commit now
+2017-03-11 04:15:40,303 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local398888533_0001_r_000004_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local398888533_0001_r_000004
+2017-03-11 04:15:40,305 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 04:15:40,305 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local398888533_0001_r_000004_0' done.
+2017-03-11 04:15:40,305 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local398888533_0001_r_000004_0
+2017-03-11 04:15:40,305 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local398888533_0001_r_000005_0
+2017-03-11 04:15:40,306 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:15:40,307 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:15:40,307 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@1c58f805
+2017-03-11 04:15:40,307 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:15:40,308 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local398888533_0001_r_000005_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:15:40,310 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local398888533_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 04:15:40,318 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local398888533_0001_m_000000_0
+2017-03-11 04:15:40,318 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 04:15:40,319 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:15:40,319 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,319 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:15:40,320 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,320 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:40,321 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:15:40,321 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 04:15:40,321 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:15:40,321 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,321 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:40,322 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,325 INFO org.apache.hadoop.mapred.Task: Task:attempt_local398888533_0001_r_000005_0 is done. And is in the process of committing
+2017-03-11 04:15:40,326 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,326 INFO org.apache.hadoop.mapred.Task: Task attempt_local398888533_0001_r_000005_0 is allowed to commit now
+2017-03-11 04:15:40,328 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local398888533_0001_r_000005_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local398888533_0001_r_000005
+2017-03-11 04:15:40,329 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 04:15:40,329 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local398888533_0001_r_000005_0' done.
+2017-03-11 04:15:40,329 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local398888533_0001_r_000005_0
+2017-03-11 04:15:40,329 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local398888533_0001_r_000006_0
+2017-03-11 04:15:40,331 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:15:40,331 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:15:40,332 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@6ed27650
+2017-03-11 04:15:40,332 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:15:40,333 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local398888533_0001_r_000006_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:15:40,335 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local398888533_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 04:15:40,336 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local398888533_0001_m_000000_0
+2017-03-11 04:15:40,336 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 04:15:40,336 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:15:40,337 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,337 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:15:40,339 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,339 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:40,340 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:15:40,340 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 04:15:40,340 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:15:40,341 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,341 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:40,342 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,347 INFO org.apache.hadoop.mapred.Task: Task:attempt_local398888533_0001_r_000006_0 is done. And is in the process of committing
+2017-03-11 04:15:40,349 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,349 INFO org.apache.hadoop.mapred.Task: Task attempt_local398888533_0001_r_000006_0 is allowed to commit now
+2017-03-11 04:15:40,350 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local398888533_0001_r_000006_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local398888533_0001_r_000006
+2017-03-11 04:15:40,350 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 04:15:40,351 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local398888533_0001_r_000006_0' done.
+2017-03-11 04:15:40,351 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local398888533_0001_r_000006_0
+2017-03-11 04:15:40,351 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local398888533_0001_r_000007_0
+2017-03-11 04:15:40,352 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:15:40,352 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:15:40,353 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@1e059ae6
+2017-03-11 04:15:40,354 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:15:40,355 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local398888533_0001_r_000007_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:15:40,356 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local398888533_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 04:15:40,356 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local398888533_0001_m_000000_0
+2017-03-11 04:15:40,357 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 04:15:40,357 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:15:40,357 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,357 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:15:40,358 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,358 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:40,359 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:15:40,359 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 04:15:40,359 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:15:40,359 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,359 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:40,360 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,368 INFO org.apache.hadoop.mapred.Task: Task:attempt_local398888533_0001_r_000007_0 is done. And is in the process of committing
+2017-03-11 04:15:40,369 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,369 INFO org.apache.hadoop.mapred.Task: Task attempt_local398888533_0001_r_000007_0 is allowed to commit now
+2017-03-11 04:15:40,370 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local398888533_0001_r_000007_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local398888533_0001_r_000007
+2017-03-11 04:15:40,370 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 04:15:40,370 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local398888533_0001_r_000007_0' done.
+2017-03-11 04:15:40,370 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local398888533_0001_r_000007_0
+2017-03-11 04:15:40,370 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local398888533_0001_r_000008_0
+2017-03-11 04:15:40,376 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:15:40,376 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:15:40,376 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@439e91fe
+2017-03-11 04:15:40,377 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:15:40,378 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local398888533_0001_r_000008_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:15:40,379 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local398888533_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 04:15:40,380 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local398888533_0001_m_000000_0
+2017-03-11 04:15:40,380 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 04:15:40,380 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:15:40,381 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,381 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:15:40,383 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,383 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:40,384 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:15:40,384 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 04:15:40,384 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:15:40,384 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,385 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:40,386 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,389 INFO org.apache.hadoop.mapred.Task: Task:attempt_local398888533_0001_r_000008_0 is done. And is in the process of committing
+2017-03-11 04:15:40,392 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,392 INFO org.apache.hadoop.mapred.Task: Task attempt_local398888533_0001_r_000008_0 is allowed to commit now
+2017-03-11 04:15:40,393 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local398888533_0001_r_000008_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local398888533_0001_r_000008
+2017-03-11 04:15:40,393 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 04:15:40,393 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local398888533_0001_r_000008_0' done.
+2017-03-11 04:15:40,394 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local398888533_0001_r_000008_0
+2017-03-11 04:15:40,394 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local398888533_0001_r_000009_0
+2017-03-11 04:15:40,395 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:15:40,395 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:15:40,395 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@6556d991
+2017-03-11 04:15:40,396 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:15:40,397 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local398888533_0001_r_000009_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:15:40,401 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#10 about to shuffle output of map attempt_local398888533_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 04:15:40,402 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local398888533_0001_m_000000_0
+2017-03-11 04:15:40,402 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 04:15:40,402 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:15:40,402 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,403 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:15:40,403 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,404 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:40,405 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:15:40,407 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 04:15:40,408 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:15:40,409 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,411 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:40,411 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,415 INFO org.apache.hadoop.mapred.Task: Task:attempt_local398888533_0001_r_000009_0 is done. And is in the process of committing
+2017-03-11 04:15:40,416 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,416 INFO org.apache.hadoop.mapred.Task: Task attempt_local398888533_0001_r_000009_0 is allowed to commit now
+2017-03-11 04:15:40,417 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local398888533_0001_r_000009_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local398888533_0001_r_000009
+2017-03-11 04:15:40,417 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 04:15:40,418 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local398888533_0001_r_000009_0' done.
+2017-03-11 04:15:40,418 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local398888533_0001_r_000009_0
+2017-03-11 04:15:40,418 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local398888533_0001_r_000010_0
+2017-03-11 04:15:40,419 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:15:40,419 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:15:40,419 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@7d224d90
+2017-03-11 04:15:40,420 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:15:40,421 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local398888533_0001_r_000010_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:15:40,422 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#11 about to shuffle output of map attempt_local398888533_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 04:15:40,422 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local398888533_0001_m_000000_0
+2017-03-11 04:15:40,422 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 04:15:40,423 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:15:40,423 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,423 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:15:40,429 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,429 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:40,436 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:15:40,437 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 04:15:40,437 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:15:40,437 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,437 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:40,438 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,440 INFO org.apache.hadoop.mapred.Task: Task:attempt_local398888533_0001_r_000010_0 is done. And is in the process of committing
+2017-03-11 04:15:40,441 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,441 INFO org.apache.hadoop.mapred.Task: Task attempt_local398888533_0001_r_000010_0 is allowed to commit now
+2017-03-11 04:15:40,443 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local398888533_0001_r_000010_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local398888533_0001_r_000010
+2017-03-11 04:15:40,444 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 04:15:40,444 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local398888533_0001_r_000010_0' done.
+2017-03-11 04:15:40,444 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local398888533_0001_r_000010_0
+2017-03-11 04:15:40,444 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local398888533_0001_r_000011_0
+2017-03-11 04:15:40,447 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:15:40,448 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:15:40,450 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@14c8c644
+2017-03-11 04:15:40,454 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:15:40,455 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local398888533_0001_r_000011_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:15:40,457 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#12 about to shuffle output of map attempt_local398888533_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 04:15:40,457 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local398888533_0001_m_000000_0
+2017-03-11 04:15:40,457 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 04:15:40,458 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:15:40,459 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,460 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:15:40,463 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,463 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:40,464 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:15:40,464 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 04:15:40,464 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:15:40,464 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,464 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:40,465 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,468 INFO org.apache.hadoop.mapred.Task: Task:attempt_local398888533_0001_r_000011_0 is done. And is in the process of committing
+2017-03-11 04:15:40,469 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,469 INFO org.apache.hadoop.mapred.Task: Task attempt_local398888533_0001_r_000011_0 is allowed to commit now
+2017-03-11 04:15:40,470 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local398888533_0001_r_000011_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local398888533_0001_r_000011
+2017-03-11 04:15:40,475 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 04:15:40,475 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local398888533_0001_r_000011_0' done.
+2017-03-11 04:15:40,475 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local398888533_0001_r_000011_0
+2017-03-11 04:15:40,475 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local398888533_0001_r_000012_0
+2017-03-11 04:15:40,477 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:15:40,478 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:15:40,479 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@22be9f8f
+2017-03-11 04:15:40,480 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:15:40,481 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local398888533_0001_r_000012_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:15:40,482 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#13 about to shuffle output of map attempt_local398888533_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 04:15:40,483 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local398888533_0001_m_000000_0
+2017-03-11 04:15:40,483 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 04:15:40,483 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:15:40,484 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,484 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:15:40,485 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,485 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:40,485 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:15:40,486 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 04:15:40,486 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:15:40,486 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,486 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:40,486 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,491 INFO org.apache.hadoop.mapred.Task: Task:attempt_local398888533_0001_r_000012_0 is done. And is in the process of committing
+2017-03-11 04:15:40,493 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,493 INFO org.apache.hadoop.mapred.Task: Task attempt_local398888533_0001_r_000012_0 is allowed to commit now
+2017-03-11 04:15:40,494 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local398888533_0001_r_000012_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local398888533_0001_r_000012
+2017-03-11 04:15:40,495 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 04:15:40,495 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local398888533_0001_r_000012_0' done.
+2017-03-11 04:15:40,495 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local398888533_0001_r_000012_0
+2017-03-11 04:15:40,495 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local398888533_0001_r_000013_0
+2017-03-11 04:15:40,497 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:15:40,498 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:15:40,498 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@2d861958
+2017-03-11 04:15:40,501 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:15:40,503 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local398888533_0001_r_000013_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:15:40,510 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#14 about to shuffle output of map attempt_local398888533_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 04:15:40,511 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local398888533_0001_m_000000_0
+2017-03-11 04:15:40,511 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 04:15:40,511 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:15:40,511 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,512 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:15:40,514 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,514 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:40,514 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:15:40,515 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 04:15:40,515 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:15:40,515 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,516 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:40,516 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,518 INFO org.apache.hadoop.mapred.Task: Task:attempt_local398888533_0001_r_000013_0 is done. And is in the process of committing
+2017-03-11 04:15:40,519 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,519 INFO org.apache.hadoop.mapred.Task: Task attempt_local398888533_0001_r_000013_0 is allowed to commit now
+2017-03-11 04:15:40,520 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local398888533_0001_r_000013_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local398888533_0001_r_000013
+2017-03-11 04:15:40,521 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 04:15:40,521 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local398888533_0001_r_000013_0' done.
+2017-03-11 04:15:40,521 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local398888533_0001_r_000013_0
+2017-03-11 04:15:40,521 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local398888533_0001_r_000014_0
+2017-03-11 04:15:40,524 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:15:40,525 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:15:40,525 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@3db0ab45
+2017-03-11 04:15:40,526 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:15:40,528 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local398888533_0001_r_000014_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:15:40,530 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#15 about to shuffle output of map attempt_local398888533_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 04:15:40,531 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local398888533_0001_m_000000_0
+2017-03-11 04:15:40,532 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 04:15:40,532 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:15:40,533 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,533 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:15:40,534 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,534 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:40,534 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:15:40,534 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 04:15:40,534 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:15:40,535 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,535 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:40,535 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,539 INFO org.apache.hadoop.mapred.Task: Task:attempt_local398888533_0001_r_000014_0 is done. And is in the process of committing
+2017-03-11 04:15:40,543 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,544 INFO org.apache.hadoop.mapred.Task: Task attempt_local398888533_0001_r_000014_0 is allowed to commit now
+2017-03-11 04:15:40,546 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local398888533_0001_r_000014_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local398888533_0001_r_000014
+2017-03-11 04:15:40,547 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 04:15:40,547 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local398888533_0001_r_000014_0' done.
+2017-03-11 04:15:40,547 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local398888533_0001_r_000014_0
+2017-03-11 04:15:40,547 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local398888533_0001_r_000015_0
+2017-03-11 04:15:40,548 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:15:40,548 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:15:40,549 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@b2d2e71
+2017-03-11 04:15:40,550 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:15:40,551 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local398888533_0001_r_000015_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:15:40,553 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#16 about to shuffle output of map attempt_local398888533_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 04:15:40,553 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local398888533_0001_m_000000_0
+2017-03-11 04:15:40,553 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 04:15:40,553 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:15:40,554 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,554 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:15:40,555 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,555 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:40,556 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:15:40,556 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 04:15:40,556 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:15:40,556 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,556 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:40,563 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,588 INFO org.apache.hadoop.mapred.Task: Task:attempt_local398888533_0001_r_000015_0 is done. And is in the process of committing
+2017-03-11 04:15:40,590 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,590 INFO org.apache.hadoop.mapred.Task: Task attempt_local398888533_0001_r_000015_0 is allowed to commit now
+2017-03-11 04:15:40,593 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local398888533_0001_r_000015_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local398888533_0001_r_000015
+2017-03-11 04:15:40,599 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 04:15:40,601 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local398888533_0001_r_000015_0' done.
+2017-03-11 04:15:40,601 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local398888533_0001_r_000015_0
+2017-03-11 04:15:40,601 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local398888533_0001_r_000016_0
+2017-03-11 04:15:40,603 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:15:40,603 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:15:40,603 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@736d0c80
+2017-03-11 04:15:40,604 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:15:40,605 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local398888533_0001_r_000016_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:15:40,607 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#17 about to shuffle output of map attempt_local398888533_0001_m_000000_0 decomp: 5105 len: 460 to MEMORY
+2017-03-11 04:15:40,608 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 5105 bytes from map-output for attempt_local398888533_0001_m_000000_0
+2017-03-11 04:15:40,608 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 5105, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->5105
+2017-03-11 04:15:40,609 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:15:40,611 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,611 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:15:40,612 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,614 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 5087 bytes
+2017-03-11 04:15:40,618 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 5105 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:15:40,620 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 468 bytes from disk
+2017-03-11 04:15:40,620 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:15:40,620 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,621 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 5087 bytes
+2017-03-11 04:15:40,621 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,629 INFO org.apache.hadoop.mapred.Task: Task:attempt_local398888533_0001_r_000016_0 is done. And is in the process of committing
+2017-03-11 04:15:40,630 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,631 INFO org.apache.hadoop.mapred.Task: Task attempt_local398888533_0001_r_000016_0 is allowed to commit now
+2017-03-11 04:15:40,631 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local398888533_0001_r_000016_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local398888533_0001_r_000016
+2017-03-11 04:15:40,632 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 04:15:40,632 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local398888533_0001_r_000016_0' done.
+2017-03-11 04:15:40,632 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local398888533_0001_r_000016_0
+2017-03-11 04:15:40,632 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local398888533_0001_r_000017_0
+2017-03-11 04:15:40,633 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:15:40,633 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:15:40,633 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@66be558a
+2017-03-11 04:15:40,634 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:15:40,635 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local398888533_0001_r_000017_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:15:40,636 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#18 about to shuffle output of map attempt_local398888533_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 04:15:40,636 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local398888533_0001_m_000000_0
+2017-03-11 04:15:40,637 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 04:15:40,637 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:15:40,637 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,637 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:15:40,638 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,638 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:40,639 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:15:40,639 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 04:15:40,639 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:15:40,639 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,639 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:40,640 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,643 INFO org.apache.hadoop.mapred.Task: Task:attempt_local398888533_0001_r_000017_0 is done. And is in the process of committing
+2017-03-11 04:15:40,646 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,647 INFO org.apache.hadoop.mapred.Task: Task attempt_local398888533_0001_r_000017_0 is allowed to commit now
+2017-03-11 04:15:40,650 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local398888533_0001_r_000017_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local398888533_0001_r_000017
+2017-03-11 04:15:40,651 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 04:15:40,651 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local398888533_0001_r_000017_0' done.
+2017-03-11 04:15:40,651 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local398888533_0001_r_000017_0
+2017-03-11 04:15:40,651 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local398888533_0001_r_000018_0
+2017-03-11 04:15:40,652 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:15:40,652 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:15:40,652 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@294dcddd
+2017-03-11 04:15:40,656 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:15:40,657 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local398888533_0001_r_000018_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:15:40,659 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#19 about to shuffle output of map attempt_local398888533_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 04:15:40,659 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local398888533_0001_m_000000_0
+2017-03-11 04:15:40,659 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 04:15:40,659 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:15:40,660 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,660 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:15:40,661 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,661 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:40,661 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:15:40,661 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 04:15:40,662 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:15:40,662 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,662 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:40,662 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,665 INFO org.apache.hadoop.mapred.Task: Task:attempt_local398888533_0001_r_000018_0 is done. And is in the process of committing
+2017-03-11 04:15:40,666 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,666 INFO org.apache.hadoop.mapred.Task: Task attempt_local398888533_0001_r_000018_0 is allowed to commit now
+2017-03-11 04:15:40,667 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local398888533_0001_r_000018_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local398888533_0001_r_000018
+2017-03-11 04:15:40,667 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 04:15:40,668 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local398888533_0001_r_000018_0' done.
+2017-03-11 04:15:40,668 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local398888533_0001_r_000018_0
+2017-03-11 04:15:40,668 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local398888533_0001_r_000019_0
+2017-03-11 04:15:40,668 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:15:40,669 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:15:40,669 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@5afe331c
+2017-03-11 04:15:40,670 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:15:40,671 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local398888533_0001_r_000019_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:15:40,678 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#20 about to shuffle output of map attempt_local398888533_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 04:15:40,680 INFO org.apache.hadoop.mapreduce.Job: Job job_local398888533_0001 running in uber mode : false
+2017-03-11 04:15:40,681 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 100%
+2017-03-11 04:15:40,682 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local398888533_0001_m_000000_0
+2017-03-11 04:15:40,682 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 04:15:40,683 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:15:40,683 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,683 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:15:40,685 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,685 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:40,685 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:15:40,686 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 04:15:40,686 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:15:40,686 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,686 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:40,686 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,694 INFO org.apache.hadoop.mapred.Task: Task:attempt_local398888533_0001_r_000019_0 is done. And is in the process of committing
+2017-03-11 04:15:40,700 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,700 INFO org.apache.hadoop.mapred.Task: Task attempt_local398888533_0001_r_000019_0 is allowed to commit now
+2017-03-11 04:15:40,701 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local398888533_0001_r_000019_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local398888533_0001_r_000019
+2017-03-11 04:15:40,702 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 04:15:40,703 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local398888533_0001_r_000019_0' done.
+2017-03-11 04:15:40,703 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local398888533_0001_r_000019_0
+2017-03-11 04:15:40,703 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local398888533_0001_r_000020_0
+2017-03-11 04:15:40,707 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:15:40,707 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:15:40,707 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@33193e3b
+2017-03-11 04:15:40,708 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:15:40,709 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local398888533_0001_r_000020_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:15:40,710 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#21 about to shuffle output of map attempt_local398888533_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 04:15:40,710 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local398888533_0001_m_000000_0
+2017-03-11 04:15:40,710 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 04:15:40,710 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:15:40,711 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,712 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:15:40,712 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,712 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:40,713 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:15:40,713 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 04:15:40,714 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:15:40,714 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,714 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:40,715 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,719 INFO org.apache.hadoop.mapred.Task: Task:attempt_local398888533_0001_r_000020_0 is done. And is in the process of committing
+2017-03-11 04:15:40,721 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,721 INFO org.apache.hadoop.mapred.Task: Task attempt_local398888533_0001_r_000020_0 is allowed to commit now
+2017-03-11 04:15:40,722 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local398888533_0001_r_000020_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local398888533_0001_r_000020
+2017-03-11 04:15:40,723 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 04:15:40,723 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local398888533_0001_r_000020_0' done.
+2017-03-11 04:15:40,723 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local398888533_0001_r_000020_0
+2017-03-11 04:15:40,723 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local398888533_0001_r_000021_0
+2017-03-11 04:15:40,724 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:15:40,724 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:15:40,724 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@8bf290b
+2017-03-11 04:15:40,726 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:15:40,729 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local398888533_0001_r_000021_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:15:40,731 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#22 about to shuffle output of map attempt_local398888533_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 04:15:40,731 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local398888533_0001_m_000000_0
+2017-03-11 04:15:40,731 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 04:15:40,731 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:15:40,732 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,732 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:15:40,733 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,733 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:40,734 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:15:40,734 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 04:15:40,734 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:15:40,734 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,735 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:40,735 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,740 INFO org.apache.hadoop.mapred.Task: Task:attempt_local398888533_0001_r_000021_0 is done. And is in the process of committing
+2017-03-11 04:15:40,740 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,741 INFO org.apache.hadoop.mapred.Task: Task attempt_local398888533_0001_r_000021_0 is allowed to commit now
+2017-03-11 04:15:40,741 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local398888533_0001_r_000021_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local398888533_0001_r_000021
+2017-03-11 04:15:40,742 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 04:15:40,743 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local398888533_0001_r_000021_0' done.
+2017-03-11 04:15:40,743 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local398888533_0001_r_000021_0
+2017-03-11 04:15:40,743 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local398888533_0001_r_000022_0
+2017-03-11 04:15:40,745 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:15:40,746 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:15:40,746 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@ff2d097
+2017-03-11 04:15:40,747 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:15:40,748 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local398888533_0001_r_000022_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:15:40,750 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#23 about to shuffle output of map attempt_local398888533_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 04:15:40,751 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local398888533_0001_m_000000_0
+2017-03-11 04:15:40,751 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 04:15:40,752 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:15:40,752 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,752 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:15:40,754 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,754 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:40,754 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:15:40,755 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 04:15:40,755 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:15:40,755 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,755 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:40,756 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,768 INFO org.apache.hadoop.mapred.Task: Task:attempt_local398888533_0001_r_000022_0 is done. And is in the process of committing
+2017-03-11 04:15:40,769 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,769 INFO org.apache.hadoop.mapred.Task: Task attempt_local398888533_0001_r_000022_0 is allowed to commit now
+2017-03-11 04:15:40,770 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local398888533_0001_r_000022_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local398888533_0001_r_000022
+2017-03-11 04:15:40,770 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 04:15:40,771 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local398888533_0001_r_000022_0' done.
+2017-03-11 04:15:40,771 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local398888533_0001_r_000022_0
+2017-03-11 04:15:40,771 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local398888533_0001_r_000023_0
+2017-03-11 04:15:40,772 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:15:40,772 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:15:40,772 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@502c186
+2017-03-11 04:15:40,773 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:15:40,774 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local398888533_0001_r_000023_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:15:40,778 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#24 about to shuffle output of map attempt_local398888533_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 04:15:40,781 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local398888533_0001_m_000000_0
+2017-03-11 04:15:40,784 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 04:15:40,784 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:15:40,785 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,785 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:15:40,786 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,786 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:40,786 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:15:40,786 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 04:15:40,786 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:15:40,786 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,787 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:40,787 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,794 INFO org.apache.hadoop.mapred.Task: Task:attempt_local398888533_0001_r_000023_0 is done. And is in the process of committing
+2017-03-11 04:15:40,797 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,801 INFO org.apache.hadoop.mapred.Task: Task attempt_local398888533_0001_r_000023_0 is allowed to commit now
+2017-03-11 04:15:40,801 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local398888533_0001_r_000023_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local398888533_0001_r_000023
+2017-03-11 04:15:40,803 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 04:15:40,803 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local398888533_0001_r_000023_0' done.
+2017-03-11 04:15:40,803 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local398888533_0001_r_000023_0
+2017-03-11 04:15:40,803 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local398888533_0001_r_000024_0
+2017-03-11 04:15:40,807 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:15:40,808 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:15:40,808 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@ad99f2d
+2017-03-11 04:15:40,808 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:15:40,814 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local398888533_0001_r_000024_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:15:40,815 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#25 about to shuffle output of map attempt_local398888533_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 04:15:40,825 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local398888533_0001_m_000000_0
+2017-03-11 04:15:40,825 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 04:15:40,825 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:15:40,826 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,826 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:15:40,827 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,827 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:40,827 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:15:40,828 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 04:15:40,828 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:15:40,828 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,828 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:40,828 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,844 INFO org.apache.hadoop.mapred.Task: Task:attempt_local398888533_0001_r_000024_0 is done. And is in the process of committing
+2017-03-11 04:15:40,845 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,845 INFO org.apache.hadoop.mapred.Task: Task attempt_local398888533_0001_r_000024_0 is allowed to commit now
+2017-03-11 04:15:40,846 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local398888533_0001_r_000024_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local398888533_0001_r_000024
+2017-03-11 04:15:40,846 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 04:15:40,847 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local398888533_0001_r_000024_0' done.
+2017-03-11 04:15:40,847 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local398888533_0001_r_000024_0
+2017-03-11 04:15:40,847 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local398888533_0001_r_000025_0
+2017-03-11 04:15:40,850 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:15:40,852 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:15:40,853 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@7561f05
+2017-03-11 04:15:40,857 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:15:40,863 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local398888533_0001_r_000025_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:15:40,869 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#26 about to shuffle output of map attempt_local398888533_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 04:15:40,870 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local398888533_0001_m_000000_0
+2017-03-11 04:15:40,870 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 04:15:40,870 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:15:40,871 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,871 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:15:40,872 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,872 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:40,873 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:15:40,873 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 04:15:40,873 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:15:40,873 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,878 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:40,879 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,897 INFO org.apache.hadoop.mapred.Task: Task:attempt_local398888533_0001_r_000025_0 is done. And is in the process of committing
+2017-03-11 04:15:40,902 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,902 INFO org.apache.hadoop.mapred.Task: Task attempt_local398888533_0001_r_000025_0 is allowed to commit now
+2017-03-11 04:15:40,903 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local398888533_0001_r_000025_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local398888533_0001_r_000025
+2017-03-11 04:15:40,904 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 04:15:40,904 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local398888533_0001_r_000025_0' done.
+2017-03-11 04:15:40,904 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local398888533_0001_r_000025_0
+2017-03-11 04:15:40,904 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local398888533_0001_r_000026_0
+2017-03-11 04:15:40,905 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:15:40,906 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:15:40,906 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@545e2452
+2017-03-11 04:15:40,908 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:15:40,915 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local398888533_0001_r_000026_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:15:40,920 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#27 about to shuffle output of map attempt_local398888533_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 04:15:40,920 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local398888533_0001_m_000000_0
+2017-03-11 04:15:40,920 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 04:15:40,921 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:15:40,921 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,921 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:15:40,922 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,924 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:40,925 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:15:40,925 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 04:15:40,925 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:15:40,925 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,930 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:40,930 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,945 INFO org.apache.hadoop.mapred.Task: Task:attempt_local398888533_0001_r_000026_0 is done. And is in the process of committing
+2017-03-11 04:15:40,946 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,947 INFO org.apache.hadoop.mapred.Task: Task attempt_local398888533_0001_r_000026_0 is allowed to commit now
+2017-03-11 04:15:40,950 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local398888533_0001_r_000026_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local398888533_0001_r_000026
+2017-03-11 04:15:40,957 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 04:15:40,958 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local398888533_0001_r_000026_0' done.
+2017-03-11 04:15:40,961 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local398888533_0001_r_000026_0
+2017-03-11 04:15:40,962 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local398888533_0001_r_000027_0
+2017-03-11 04:15:40,964 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:15:40,965 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:15:40,965 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@3e6abe5a
+2017-03-11 04:15:40,967 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:15:40,971 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local398888533_0001_r_000027_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:15:40,982 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#28 about to shuffle output of map attempt_local398888533_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 04:15:40,982 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local398888533_0001_m_000000_0
+2017-03-11 04:15:40,982 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 04:15:40,983 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:15:40,983 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,984 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:15:40,984 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,985 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:40,985 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:15:40,986 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 04:15:40,986 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:15:40,986 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,986 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:40,986 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,994 INFO org.apache.hadoop.mapred.Task: Task:attempt_local398888533_0001_r_000027_0 is done. And is in the process of committing
+2017-03-11 04:15:41,005 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,005 INFO org.apache.hadoop.mapred.Task: Task attempt_local398888533_0001_r_000027_0 is allowed to commit now
+2017-03-11 04:15:41,006 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local398888533_0001_r_000027_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local398888533_0001_r_000027
+2017-03-11 04:15:41,012 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 04:15:41,012 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local398888533_0001_r_000027_0' done.
+2017-03-11 04:15:41,012 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local398888533_0001_r_000027_0
+2017-03-11 04:15:41,012 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local398888533_0001_r_000028_0
+2017-03-11 04:15:41,013 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:15:41,014 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:15:41,015 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@55662d9a
+2017-03-11 04:15:41,015 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:15:41,016 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local398888533_0001_r_000028_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:15:41,018 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#29 about to shuffle output of map attempt_local398888533_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 04:15:41,019 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local398888533_0001_m_000000_0
+2017-03-11 04:15:41,019 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 04:15:41,020 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:15:41,021 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,021 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:15:41,023 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:41,024 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:41,024 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:15:41,032 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 04:15:41,032 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:15:41,033 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:41,033 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:41,034 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,041 INFO org.apache.hadoop.mapred.Task: Task:attempt_local398888533_0001_r_000028_0 is done. And is in the process of committing
+2017-03-11 04:15:41,042 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,045 INFO org.apache.hadoop.mapred.Task: Task attempt_local398888533_0001_r_000028_0 is allowed to commit now
+2017-03-11 04:15:41,046 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local398888533_0001_r_000028_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local398888533_0001_r_000028
+2017-03-11 04:15:41,047 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 04:15:41,047 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local398888533_0001_r_000028_0' done.
+2017-03-11 04:15:41,047 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local398888533_0001_r_000028_0
+2017-03-11 04:15:41,047 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local398888533_0001_r_000029_0
+2017-03-11 04:15:41,048 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:15:41,048 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:15:41,048 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@2fb3260
+2017-03-11 04:15:41,049 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:15:41,049 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local398888533_0001_r_000029_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:15:41,061 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#30 about to shuffle output of map attempt_local398888533_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 04:15:41,061 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local398888533_0001_m_000000_0
+2017-03-11 04:15:41,062 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 04:15:41,062 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:15:41,064 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,064 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:15:41,065 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:41,065 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:41,065 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:15:41,066 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 04:15:41,066 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:15:41,066 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:41,066 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:41,066 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,071 INFO org.apache.hadoop.mapred.Task: Task:attempt_local398888533_0001_r_000029_0 is done. And is in the process of committing
+2017-03-11 04:15:41,072 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,072 INFO org.apache.hadoop.mapred.Task: Task attempt_local398888533_0001_r_000029_0 is allowed to commit now
+2017-03-11 04:15:41,072 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local398888533_0001_r_000029_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local398888533_0001_r_000029
+2017-03-11 04:15:41,074 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 04:15:41,074 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local398888533_0001_r_000029_0' done.
+2017-03-11 04:15:41,074 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local398888533_0001_r_000029_0
+2017-03-11 04:15:41,074 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local398888533_0001_r_000030_0
+2017-03-11 04:15:41,075 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:15:41,075 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:15:41,075 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@1a6a1f5b
+2017-03-11 04:15:41,076 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:15:41,076 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local398888533_0001_r_000030_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:15:41,077 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#31 about to shuffle output of map attempt_local398888533_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 04:15:41,078 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local398888533_0001_m_000000_0
+2017-03-11 04:15:41,078 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 04:15:41,078 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:15:41,079 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,079 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:15:41,080 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:41,080 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:41,081 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:15:41,081 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 04:15:41,081 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:15:41,081 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:41,081 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:41,082 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,086 INFO org.apache.hadoop.mapred.Task: Task:attempt_local398888533_0001_r_000030_0 is done. And is in the process of committing
+2017-03-11 04:15:41,087 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,087 INFO org.apache.hadoop.mapred.Task: Task attempt_local398888533_0001_r_000030_0 is allowed to commit now
+2017-03-11 04:15:41,088 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local398888533_0001_r_000030_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local398888533_0001_r_000030
+2017-03-11 04:15:41,088 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 04:15:41,089 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local398888533_0001_r_000030_0' done.
+2017-03-11 04:15:41,089 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local398888533_0001_r_000030_0
+2017-03-11 04:15:41,089 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local398888533_0001_r_000031_0
+2017-03-11 04:15:41,089 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:15:41,090 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:15:41,090 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@4f53314
+2017-03-11 04:15:41,091 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:15:41,093 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local398888533_0001_r_000031_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:15:41,099 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#32 about to shuffle output of map attempt_local398888533_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 04:15:41,099 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local398888533_0001_m_000000_0
+2017-03-11 04:15:41,099 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 04:15:41,102 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:15:41,103 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,103 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:15:41,110 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:41,110 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:41,111 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:15:41,111 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 04:15:41,111 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:15:41,111 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:41,112 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:41,112 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,118 INFO org.apache.hadoop.mapred.Task: Task:attempt_local398888533_0001_r_000031_0 is done. And is in the process of committing
+2017-03-11 04:15:41,119 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,119 INFO org.apache.hadoop.mapred.Task: Task attempt_local398888533_0001_r_000031_0 is allowed to commit now
+2017-03-11 04:15:41,120 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local398888533_0001_r_000031_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local398888533_0001_r_000031
+2017-03-11 04:15:41,124 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 04:15:41,125 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local398888533_0001_r_000031_0' done.
+2017-03-11 04:15:41,125 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local398888533_0001_r_000031_0
+2017-03-11 04:15:41,125 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local398888533_0001_r_000032_0
+2017-03-11 04:15:41,126 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:15:41,127 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:15:41,127 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@57e13166
+2017-03-11 04:15:41,127 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:15:41,129 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local398888533_0001_r_000032_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:15:41,130 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#33 about to shuffle output of map attempt_local398888533_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 04:15:41,131 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local398888533_0001_m_000000_0
+2017-03-11 04:15:41,131 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 04:15:41,131 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:15:41,132 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,132 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:15:41,132 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:41,133 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:41,133 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:15:41,134 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 04:15:41,134 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:15:41,134 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:41,134 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:41,135 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,139 INFO org.apache.hadoop.mapred.Task: Task:attempt_local398888533_0001_r_000032_0 is done. And is in the process of committing
+2017-03-11 04:15:41,140 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,141 INFO org.apache.hadoop.mapred.Task: Task attempt_local398888533_0001_r_000032_0 is allowed to commit now
+2017-03-11 04:15:41,143 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local398888533_0001_r_000032_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local398888533_0001_r_000032
+2017-03-11 04:15:41,145 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 04:15:41,145 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local398888533_0001_r_000032_0' done.
+2017-03-11 04:15:41,145 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local398888533_0001_r_000032_0
+2017-03-11 04:15:41,145 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local398888533_0001_r_000033_0
+2017-03-11 04:15:41,147 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:15:41,147 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:15:41,148 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@66a62053
+2017-03-11 04:15:41,149 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:15:41,150 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local398888533_0001_r_000033_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:15:41,151 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#34 about to shuffle output of map attempt_local398888533_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 04:15:41,151 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local398888533_0001_m_000000_0
+2017-03-11 04:15:41,151 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 04:15:41,151 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:15:41,152 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,152 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:15:41,154 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:41,155 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:41,155 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:15:41,156 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 04:15:41,156 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:15:41,156 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:41,156 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:41,157 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,162 INFO org.apache.hadoop.mapred.Task: Task:attempt_local398888533_0001_r_000033_0 is done. And is in the process of committing
+2017-03-11 04:15:41,163 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,163 INFO org.apache.hadoop.mapred.Task: Task attempt_local398888533_0001_r_000033_0 is allowed to commit now
+2017-03-11 04:15:41,164 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local398888533_0001_r_000033_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local398888533_0001_r_000033
+2017-03-11 04:15:41,165 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 04:15:41,165 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local398888533_0001_r_000033_0' done.
+2017-03-11 04:15:41,165 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local398888533_0001_r_000033_0
+2017-03-11 04:15:41,166 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local398888533_0001_r_000034_0
+2017-03-11 04:15:41,166 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:15:41,167 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:15:41,167 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@6dc6dc2b
+2017-03-11 04:15:41,167 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:15:41,168 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local398888533_0001_r_000034_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:15:41,171 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#35 about to shuffle output of map attempt_local398888533_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 04:15:41,171 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local398888533_0001_m_000000_0
+2017-03-11 04:15:41,171 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 04:15:41,172 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:15:41,172 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,172 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:15:41,173 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:41,179 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:41,180 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:15:41,180 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 04:15:41,180 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:15:41,180 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:41,180 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:41,182 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,191 INFO org.apache.hadoop.mapred.Task: Task:attempt_local398888533_0001_r_000034_0 is done. And is in the process of committing
+2017-03-11 04:15:41,192 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,192 INFO org.apache.hadoop.mapred.Task: Task attempt_local398888533_0001_r_000034_0 is allowed to commit now
+2017-03-11 04:15:41,193 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local398888533_0001_r_000034_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local398888533_0001_r_000034
+2017-03-11 04:15:41,194 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 04:15:41,194 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local398888533_0001_r_000034_0' done.
+2017-03-11 04:15:41,194 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local398888533_0001_r_000034_0
+2017-03-11 04:15:41,194 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local398888533_0001_r_000035_0
+2017-03-11 04:15:41,195 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:15:41,196 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:15:41,196 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@347e12a
+2017-03-11 04:15:41,196 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:15:41,197 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local398888533_0001_r_000035_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:15:41,198 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#36 about to shuffle output of map attempt_local398888533_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 04:15:41,199 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local398888533_0001_m_000000_0
+2017-03-11 04:15:41,199 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 04:15:41,199 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:15:41,200 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,200 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:15:41,201 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:41,201 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:41,201 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:15:41,201 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 04:15:41,201 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:15:41,202 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:41,202 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:41,203 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,209 INFO org.apache.hadoop.mapred.Task: Task:attempt_local398888533_0001_r_000035_0 is done. And is in the process of committing
+2017-03-11 04:15:41,210 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,210 INFO org.apache.hadoop.mapred.Task: Task attempt_local398888533_0001_r_000035_0 is allowed to commit now
+2017-03-11 04:15:41,211 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local398888533_0001_r_000035_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local398888533_0001_r_000035
+2017-03-11 04:15:41,219 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 04:15:41,220 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local398888533_0001_r_000035_0' done.
+2017-03-11 04:15:41,220 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local398888533_0001_r_000035_0
+2017-03-11 04:15:41,220 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local398888533_0001_r_000036_0
+2017-03-11 04:15:41,228 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:15:41,228 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:15:41,228 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@4edb440a
+2017-03-11 04:15:41,240 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:15:41,242 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local398888533_0001_r_000036_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:15:41,247 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#37 about to shuffle output of map attempt_local398888533_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 04:15:41,252 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local398888533_0001_m_000000_0
+2017-03-11 04:15:41,252 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 04:15:41,252 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:15:41,253 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,253 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:15:41,254 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:41,254 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:41,255 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:15:41,256 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 04:15:41,256 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:15:41,256 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:41,256 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:41,257 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,272 INFO org.apache.hadoop.mapred.Task: Task:attempt_local398888533_0001_r_000036_0 is done. And is in the process of committing
+2017-03-11 04:15:41,273 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,273 INFO org.apache.hadoop.mapred.Task: Task attempt_local398888533_0001_r_000036_0 is allowed to commit now
+2017-03-11 04:15:41,276 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local398888533_0001_r_000036_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local398888533_0001_r_000036
+2017-03-11 04:15:41,277 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 04:15:41,278 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local398888533_0001_r_000036_0' done.
+2017-03-11 04:15:41,278 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local398888533_0001_r_000036_0
+2017-03-11 04:15:41,278 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local398888533_0001_r_000037_0
+2017-03-11 04:15:41,282 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:15:41,285 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:15:41,286 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@6349766d
+2017-03-11 04:15:41,290 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:15:41,294 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local398888533_0001_r_000037_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:15:41,300 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#38 about to shuffle output of map attempt_local398888533_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 04:15:41,300 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local398888533_0001_m_000000_0
+2017-03-11 04:15:41,300 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 04:15:41,301 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:15:41,301 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,301 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:15:41,302 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:41,302 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:41,303 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:15:41,303 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 04:15:41,303 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:15:41,303 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:41,303 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:41,304 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,314 INFO org.apache.hadoop.mapred.Task: Task:attempt_local398888533_0001_r_000037_0 is done. And is in the process of committing
+2017-03-11 04:15:41,315 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,315 INFO org.apache.hadoop.mapred.Task: Task attempt_local398888533_0001_r_000037_0 is allowed to commit now
+2017-03-11 04:15:41,316 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local398888533_0001_r_000037_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local398888533_0001_r_000037
+2017-03-11 04:15:41,322 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 04:15:41,322 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local398888533_0001_r_000037_0' done.
+2017-03-11 04:15:41,322 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local398888533_0001_r_000037_0
+2017-03-11 04:15:41,323 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local398888533_0001_r_000038_0
+2017-03-11 04:15:41,326 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:15:41,327 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:15:41,327 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@d62d2df
+2017-03-11 04:15:41,334 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:15:41,335 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local398888533_0001_r_000038_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:15:41,337 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#39 about to shuffle output of map attempt_local398888533_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 04:15:41,337 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local398888533_0001_m_000000_0
+2017-03-11 04:15:41,337 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 04:15:41,337 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:15:41,338 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,338 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:15:41,340 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:41,340 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:41,341 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:15:41,341 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 04:15:41,341 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:15:41,342 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:41,342 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:41,343 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,348 INFO org.apache.hadoop.mapred.Task: Task:attempt_local398888533_0001_r_000038_0 is done. And is in the process of committing
+2017-03-11 04:15:41,348 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,349 INFO org.apache.hadoop.mapred.Task: Task attempt_local398888533_0001_r_000038_0 is allowed to commit now
+2017-03-11 04:15:41,349 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local398888533_0001_r_000038_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local398888533_0001_r_000038
+2017-03-11 04:15:41,350 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 04:15:41,350 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local398888533_0001_r_000038_0' done.
+2017-03-11 04:15:41,350 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local398888533_0001_r_000038_0
+2017-03-11 04:15:41,350 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local398888533_0001_r_000039_0
+2017-03-11 04:15:41,351 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:15:41,352 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:15:41,352 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@eb9012c
+2017-03-11 04:15:41,353 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:15:41,354 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local398888533_0001_r_000039_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:15:41,355 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#40 about to shuffle output of map attempt_local398888533_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 04:15:41,356 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local398888533_0001_m_000000_0
+2017-03-11 04:15:41,356 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 04:15:41,359 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:15:41,360 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,360 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:15:41,362 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:41,363 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:41,364 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:15:41,365 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 04:15:41,365 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:15:41,365 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:41,366 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:41,366 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,370 INFO org.apache.hadoop.mapred.Task: Task:attempt_local398888533_0001_r_000039_0 is done. And is in the process of committing
+2017-03-11 04:15:41,371 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,371 INFO org.apache.hadoop.mapred.Task: Task attempt_local398888533_0001_r_000039_0 is allowed to commit now
+2017-03-11 04:15:41,371 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local398888533_0001_r_000039_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local398888533_0001_r_000039
+2017-03-11 04:15:41,372 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 04:15:41,372 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local398888533_0001_r_000039_0' done.
+2017-03-11 04:15:41,372 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local398888533_0001_r_000039_0
+2017-03-11 04:15:41,372 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local398888533_0001_r_000040_0
+2017-03-11 04:15:41,373 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:15:41,376 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:15:41,377 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@1b0243a2
+2017-03-11 04:15:41,379 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:15:41,380 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local398888533_0001_r_000040_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:15:41,381 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#41 about to shuffle output of map attempt_local398888533_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 04:15:41,381 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local398888533_0001_m_000000_0
+2017-03-11 04:15:41,381 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 04:15:41,382 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:15:41,382 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,382 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:15:41,383 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:41,384 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:41,384 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:15:41,384 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 04:15:41,385 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:15:41,385 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:41,385 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:41,385 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,391 INFO org.apache.hadoop.mapred.Task: Task:attempt_local398888533_0001_r_000040_0 is done. And is in the process of committing
+2017-03-11 04:15:41,396 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,397 INFO org.apache.hadoop.mapred.Task: Task attempt_local398888533_0001_r_000040_0 is allowed to commit now
+2017-03-11 04:15:41,397 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local398888533_0001_r_000040_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local398888533_0001_r_000040
+2017-03-11 04:15:41,398 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 04:15:41,399 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local398888533_0001_r_000040_0' done.
+2017-03-11 04:15:41,399 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local398888533_0001_r_000040_0
+2017-03-11 04:15:41,399 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local398888533_0001_r_000041_0
+2017-03-11 04:15:41,399 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:15:41,400 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:15:41,400 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@1590cd
+2017-03-11 04:15:41,400 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:15:41,405 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local398888533_0001_r_000041_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:15:41,411 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#42 about to shuffle output of map attempt_local398888533_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 04:15:41,412 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local398888533_0001_m_000000_0
+2017-03-11 04:15:41,412 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 04:15:41,413 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:15:41,413 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,413 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:15:41,414 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:41,414 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:41,415 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:15:41,415 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 04:15:41,415 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:15:41,415 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:41,416 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:41,416 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,427 INFO org.apache.hadoop.mapred.Task: Task:attempt_local398888533_0001_r_000041_0 is done. And is in the process of committing
+2017-03-11 04:15:41,429 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,429 INFO org.apache.hadoop.mapred.Task: Task attempt_local398888533_0001_r_000041_0 is allowed to commit now
+2017-03-11 04:15:41,436 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local398888533_0001_r_000041_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local398888533_0001_r_000041
+2017-03-11 04:15:41,437 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 04:15:41,437 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local398888533_0001_r_000041_0' done.
+2017-03-11 04:15:41,437 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local398888533_0001_r_000041_0
+2017-03-11 04:15:41,437 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local398888533_0001_r_000042_0
+2017-03-11 04:15:41,438 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:15:41,438 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:15:41,441 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@4fed51d
+2017-03-11 04:15:41,449 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:15:41,459 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local398888533_0001_r_000042_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:15:41,463 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#43 about to shuffle output of map attempt_local398888533_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 04:15:41,463 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local398888533_0001_m_000000_0
+2017-03-11 04:15:41,467 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 04:15:41,468 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:15:41,468 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,468 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:15:41,469 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:41,469 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:41,471 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:15:41,471 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 04:15:41,471 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:15:41,471 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:41,472 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:41,472 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,485 INFO org.apache.hadoop.mapred.Task: Task:attempt_local398888533_0001_r_000042_0 is done. And is in the process of committing
+2017-03-11 04:15:41,486 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,486 INFO org.apache.hadoop.mapred.Task: Task attempt_local398888533_0001_r_000042_0 is allowed to commit now
+2017-03-11 04:15:41,490 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local398888533_0001_r_000042_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local398888533_0001_r_000042
+2017-03-11 04:15:41,490 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 04:15:41,490 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local398888533_0001_r_000042_0' done.
+2017-03-11 04:15:41,490 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local398888533_0001_r_000042_0
+2017-03-11 04:15:41,491 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local398888533_0001_r_000043_0
+2017-03-11 04:15:41,492 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:15:41,493 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:15:41,493 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@76fae43c
+2017-03-11 04:15:41,508 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:15:41,509 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local398888533_0001_r_000043_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:15:41,510 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#44 about to shuffle output of map attempt_local398888533_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 04:15:41,510 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local398888533_0001_m_000000_0
+2017-03-11 04:15:41,511 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 04:15:41,511 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:15:41,511 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,511 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:15:41,512 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:41,518 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:41,519 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:15:41,519 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 04:15:41,519 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:15:41,519 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:41,519 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:41,520 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,544 INFO org.apache.hadoop.mapred.Task: Task:attempt_local398888533_0001_r_000043_0 is done. And is in the process of committing
+2017-03-11 04:15:41,546 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,550 INFO org.apache.hadoop.mapred.Task: Task attempt_local398888533_0001_r_000043_0 is allowed to commit now
+2017-03-11 04:15:41,551 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local398888533_0001_r_000043_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local398888533_0001_r_000043
+2017-03-11 04:15:41,551 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 04:15:41,553 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local398888533_0001_r_000043_0' done.
+2017-03-11 04:15:41,554 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local398888533_0001_r_000043_0
+2017-03-11 04:15:41,554 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local398888533_0001_r_000044_0
+2017-03-11 04:15:41,560 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:15:41,560 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:15:41,561 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@1f9654c3
+2017-03-11 04:15:41,563 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:15:41,570 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local398888533_0001_r_000044_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:15:41,573 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#45 about to shuffle output of map attempt_local398888533_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 04:15:41,573 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local398888533_0001_m_000000_0
+2017-03-11 04:15:41,573 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 04:15:41,574 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:15:41,575 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,575 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:15:41,575 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:41,576 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:41,576 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:15:41,576 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 04:15:41,576 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:15:41,576 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:41,577 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:41,577 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,582 INFO org.apache.hadoop.mapred.Task: Task:attempt_local398888533_0001_r_000044_0 is done. And is in the process of committing
+2017-03-11 04:15:41,583 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,583 INFO org.apache.hadoop.mapred.Task: Task attempt_local398888533_0001_r_000044_0 is allowed to commit now
+2017-03-11 04:15:41,584 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local398888533_0001_r_000044_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local398888533_0001_r_000044
+2017-03-11 04:15:41,587 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 04:15:41,587 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local398888533_0001_r_000044_0' done.
+2017-03-11 04:15:41,587 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local398888533_0001_r_000044_0
+2017-03-11 04:15:41,587 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local398888533_0001_r_000045_0
+2017-03-11 04:15:41,588 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:15:41,588 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:15:41,588 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@4a1daaa
+2017-03-11 04:15:41,589 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:15:41,590 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local398888533_0001_r_000045_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:15:41,591 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#46 about to shuffle output of map attempt_local398888533_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 04:15:41,591 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local398888533_0001_m_000000_0
+2017-03-11 04:15:41,591 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 04:15:41,591 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:15:41,592 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,592 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:15:41,593 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:41,593 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:41,594 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:15:41,594 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 04:15:41,594 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:15:41,594 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:41,594 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:41,595 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,598 INFO org.apache.hadoop.mapred.Task: Task:attempt_local398888533_0001_r_000045_0 is done. And is in the process of committing
+2017-03-11 04:15:41,598 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,599 INFO org.apache.hadoop.mapred.Task: Task attempt_local398888533_0001_r_000045_0 is allowed to commit now
+2017-03-11 04:15:41,599 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local398888533_0001_r_000045_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local398888533_0001_r_000045
+2017-03-11 04:15:41,600 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 04:15:41,600 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local398888533_0001_r_000045_0' done.
+2017-03-11 04:15:41,600 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local398888533_0001_r_000045_0
+2017-03-11 04:15:41,600 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local398888533_0001_r_000046_0
+2017-03-11 04:15:41,600 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:15:41,601 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:15:41,601 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@674612ad
+2017-03-11 04:15:41,602 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:15:41,603 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local398888533_0001_r_000046_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:15:41,604 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#47 about to shuffle output of map attempt_local398888533_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 04:15:41,604 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local398888533_0001_m_000000_0
+2017-03-11 04:15:41,605 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 04:15:41,605 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:15:41,605 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,605 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:15:41,606 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:41,606 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:41,607 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:15:41,607 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 04:15:41,607 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:15:41,607 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:41,607 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:41,608 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,610 INFO org.apache.hadoop.mapred.Task: Task:attempt_local398888533_0001_r_000046_0 is done. And is in the process of committing
+2017-03-11 04:15:41,611 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,611 INFO org.apache.hadoop.mapred.Task: Task attempt_local398888533_0001_r_000046_0 is allowed to commit now
+2017-03-11 04:15:41,612 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local398888533_0001_r_000046_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local398888533_0001_r_000046
+2017-03-11 04:15:41,613 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 04:15:41,613 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local398888533_0001_r_000046_0' done.
+2017-03-11 04:15:41,613 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local398888533_0001_r_000046_0
+2017-03-11 04:15:41,613 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local398888533_0001_r_000047_0
+2017-03-11 04:15:41,620 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:15:41,620 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:15:41,621 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@46bf27
+2017-03-11 04:15:41,621 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:15:41,622 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local398888533_0001_r_000047_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:15:41,623 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#48 about to shuffle output of map attempt_local398888533_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 04:15:41,624 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local398888533_0001_m_000000_0
+2017-03-11 04:15:41,624 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 04:15:41,624 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:15:41,625 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,625 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:15:41,626 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:41,626 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:41,627 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:15:41,627 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 04:15:41,628 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:15:41,628 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:41,628 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:41,628 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,633 INFO org.apache.hadoop.mapred.Task: Task:attempt_local398888533_0001_r_000047_0 is done. And is in the process of committing
+2017-03-11 04:15:41,633 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,633 INFO org.apache.hadoop.mapred.Task: Task attempt_local398888533_0001_r_000047_0 is allowed to commit now
+2017-03-11 04:15:41,635 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local398888533_0001_r_000047_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local398888533_0001_r_000047
+2017-03-11 04:15:41,635 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 04:15:41,635 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local398888533_0001_r_000047_0' done.
+2017-03-11 04:15:41,635 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local398888533_0001_r_000047_0
+2017-03-11 04:15:41,635 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local398888533_0001_r_000048_0
+2017-03-11 04:15:41,636 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:15:41,636 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:15:41,636 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@3b88bc85
+2017-03-11 04:15:41,637 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:15:41,638 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local398888533_0001_r_000048_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:15:41,639 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#49 about to shuffle output of map attempt_local398888533_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 04:15:41,639 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local398888533_0001_m_000000_0
+2017-03-11 04:15:41,639 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 04:15:41,639 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:15:41,640 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,640 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:15:41,643 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:41,644 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:41,645 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:15:41,646 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 04:15:41,646 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:15:41,646 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:41,646 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:41,647 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,649 INFO org.apache.hadoop.mapred.Task: Task:attempt_local398888533_0001_r_000048_0 is done. And is in the process of committing
+2017-03-11 04:15:41,650 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,650 INFO org.apache.hadoop.mapred.Task: Task attempt_local398888533_0001_r_000048_0 is allowed to commit now
+2017-03-11 04:15:41,650 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local398888533_0001_r_000048_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local398888533_0001_r_000048
+2017-03-11 04:15:41,653 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 04:15:41,653 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local398888533_0001_r_000048_0' done.
+2017-03-11 04:15:41,653 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local398888533_0001_r_000048_0
+2017-03-11 04:15:41,653 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local398888533_0001_r_000049_0
+2017-03-11 04:15:41,654 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:15:41,654 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:15:41,654 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@4876e144
+2017-03-11 04:15:41,655 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:15:41,655 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local398888533_0001_r_000049_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:15:41,656 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#50 about to shuffle output of map attempt_local398888533_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 04:15:41,657 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local398888533_0001_m_000000_0
+2017-03-11 04:15:41,657 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 04:15:41,657 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:15:41,658 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,658 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:15:41,663 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:41,665 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:41,666 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:15:41,666 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 04:15:41,666 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:15:41,666 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:41,666 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:41,667 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,675 INFO org.apache.hadoop.mapred.Task: Task:attempt_local398888533_0001_r_000049_0 is done. And is in the process of committing
+2017-03-11 04:15:41,676 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,676 INFO org.apache.hadoop.mapred.Task: Task attempt_local398888533_0001_r_000049_0 is allowed to commit now
+2017-03-11 04:15:41,677 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local398888533_0001_r_000049_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local398888533_0001_r_000049
+2017-03-11 04:15:41,678 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 04:15:41,678 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local398888533_0001_r_000049_0' done.
+2017-03-11 04:15:41,678 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local398888533_0001_r_000049_0
+2017-03-11 04:15:41,678 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete.
+2017-03-11 04:15:42,684 INFO org.apache.hadoop.mapreduce.Job: Job job_local398888533_0001 completed successfully
+2017-03-11 04:15:42,802 INFO org.apache.hadoop.mapreduce.Job: Counters: 33
 	File System Counters
-		FILE: Number of bytes read=11514350
-		FILE: Number of bytes written=1395729
+		FILE: Number of bytes read=2535091
+		FILE: Number of bytes written=13621454
 		FILE: Number of read operations=0
 		FILE: Number of large read operations=0
 		FILE: Number of write operations=0
 	Map-Reduce Framework
-		Map input records=124787
-		Map output records=1023494
-		Map output bytes=8925696
-		Map output materialized bytes=167092
-		Input split bytes=120
-		Combine input records=1023494
-		Combine output records=34513
-		Reduce input groups=34513
-		Reduce shuffle bytes=167092
-		Reduce input records=34513
-		Reduce output records=34513
-		Spilled Records=69026
-		Shuffled Maps =1
+		Map input records=7
+		Map output records=63
+		Map output bytes=4977
+		Map output materialized bytes=1146
+		Input split bytes=122
+		Combine input records=0
+		Combine output records=0
+		Reduce input groups=63
+		Reduce shuffle bytes=1146
+		Reduce input records=63
+		Reduce output records=0
+		Spilled Records=126
+		Shuffled Maps =50
 		Failed Shuffles=0
-		Merged Map outputs=1
-		GC time elapsed (ms)=109
+		Merged Map outputs=50
+		GC time elapsed (ms)=44
 		CPU time spent (ms)=0
 		Physical memory (bytes) snapshot=0
 		Virtual memory (bytes) snapshot=0
-		Total committed heap usage (bytes)=331227136
+		Total committed heap usage (bytes)=8446291968
 	Shuffle Errors
 		BAD_ID=0
 		CONNECTION=0
@@ -105,104 +1797,102 @@
 		WRONG_MAP=0
 		WRONG_REDUCE=0
 	File Input Format Counters 
-		Bytes Read=5589886
+		Bytes Read=454
 	File Output Format Counters 
-		Bytes Written=363879
-2017-03-10 14:05:48,287 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
-2017-03-10 14:05:48,833 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
-2017-03-10 14:05:48,841 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
-2017-03-10 14:05:49,279 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
-2017-03-10 14:05:49,288 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
-2017-03-10 14:05:49,331 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
-2017-03-10 14:05:49,610 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local1590990832_0001
-2017-03-10 14:05:50,040 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
-2017-03-10 14:05:50,042 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local1590990832_0001
-2017-03-10 14:05:50,046 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
-2017-03-10 14:05:50,059 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
-2017-03-10 14:05:50,067 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
-2017-03-10 14:05:50,190 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
-2017-03-10 14:05:50,192 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1590990832_0001_m_000000_0
-2017-03-10 14:05:50,229 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
-2017-03-10 14:05:50,244 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
-2017-03-10 14:05:50,247 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/input/pg100.txt:0+5589886
-2017-03-10 14:05:50,339 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
-2017-03-10 14:05:50,339 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
-2017-03-10 14:05:50,339 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
-2017-03-10 14:05:50,339 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
-2017-03-10 14:05:50,339 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
-2017-03-10 14:05:50,344 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
-2017-03-10 14:05:51,047 INFO org.apache.hadoop.mapreduce.Job: Job job_local1590990832_0001 running in uber mode : false
-2017-03-10 14:05:51,050 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
-2017-03-10 14:05:52,504 INFO org.apache.hadoop.mapred.LocalJobRunner: 
-2017-03-10 14:05:52,504 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
-2017-03-10 14:05:52,504 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
-2017-03-10 14:05:52,504 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 8925696; bufvoid = 104857600
-2017-03-10 14:05:52,504 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 22120424(88481696); length = 4093973/6553600
-2017-03-10 14:05:53,471 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
-2017-03-10 14:05:54,161 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
-2017-03-10 14:05:54,164 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1590990832_0001_m_000000_0 is done. And is in the process of committing
-2017-03-10 14:05:54,170 INFO org.apache.hadoop.mapred.LocalJobRunner: map
-2017-03-10 14:05:54,170 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1590990832_0001_m_000000_0' done.
-2017-03-10 14:05:54,170 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1590990832_0001_m_000000_0
-2017-03-10 14:05:54,170 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
-2017-03-10 14:05:54,178 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks
-2017-03-10 14:05:54,178 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1590990832_0001_r_000000_0
-2017-03-10 14:05:54,183 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
-2017-03-10 14:05:54,183 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
-2017-03-10 14:05:54,185 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@7f792cec
-2017-03-10 14:05:54,198 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
-2017-03-10 14:05:54,206 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1590990832_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events
-2017-03-10 14:05:54,277 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.deflate]
-2017-03-10 14:05:54,277 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1590990832_0001_m_000000_0 decomp: 491464 len: 167092 to MEMORY
-2017-03-10 14:05:54,297 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 491464 bytes from map-output for attempt_local1590990832_0001_m_000000_0
-2017-03-10 14:05:54,298 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 491464, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->491464
-2017-03-10 14:05:54,302 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
-2017-03-10 14:05:54,305 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
-2017-03-10 14:05:54,305 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
-2017-03-10 14:05:54,315 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
-2017-03-10 14:05:54,319 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 491459 bytes
-2017-03-10 14:05:54,702 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 491464 bytes to disk to satisfy reduce memory limit
-2017-03-10 14:05:54,702 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 167100 bytes from disk
-2017-03-10 14:05:54,702 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
-2017-03-10 14:05:54,703 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
-2017-03-10 14:05:54,704 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 491459 bytes
-2017-03-10 14:05:54,704 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
-2017-03-10 14:05:54,714 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.skip.on is deprecated. Instead, use mapreduce.job.skiprecords
-2017-03-10 14:05:55,077 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 0%
-2017-03-10 14:05:55,853 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1590990832_0001_r_000000_0 is done. And is in the process of committing
-2017-03-10 14:05:55,858 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
-2017-03-10 14:05:55,859 INFO org.apache.hadoop.mapred.Task: Task attempt_local1590990832_0001_r_000000_0 is allowed to commit now
-2017-03-10 14:05:55,859 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1590990832_0001_r_000000_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1590990832_0001_r_000000
-2017-03-10 14:05:55,861 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
-2017-03-10 14:05:55,861 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1590990832_0001_r_000000_0' done.
-2017-03-10 14:05:55,861 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1590990832_0001_r_000000_0
-2017-03-10 14:05:55,861 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete.
-2017-03-10 14:05:56,079 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 100%
-2017-03-10 14:05:56,080 INFO org.apache.hadoop.mapreduce.Job: Job job_local1590990832_0001 completed successfully
-2017-03-10 14:05:56,090 INFO org.apache.hadoop.mapreduce.Job: Counters: 33
+		Bytes Written=400
+2017-03-11 04:16:54,623 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-11 04:16:55,099 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-11 04:16:55,113 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-11 04:16:55,444 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
+2017-03-11 04:16:55,452 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
+2017-03-11 04:16:55,485 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
+2017-03-11 04:16:55,706 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local1882890440_0001
+2017-03-11 04:16:56,148 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
+2017-03-11 04:16:56,149 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local1882890440_0001
+2017-03-11 04:16:56,162 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
+2017-03-11 04:16:56,170 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:16:56,172 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
+2017-03-11 04:16:56,300 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
+2017-03-11 04:16:56,300 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1882890440_0001_m_000000_0
+2017-03-11 04:16:56,341 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:16:56,354 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:16:56,358 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:0+454
+2017-03-11 04:16:56,445 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 04:16:56,445 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 04:16:56,445 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 04:16:56,445 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 04:16:56,445 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 04:16:56,448 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 04:16:56,479 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 04:16:56,479 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 04:16:56,479 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-11 04:16:56,479 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 4977; bufvoid = 104857600
+2017-03-11 04:16:56,479 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 26214148(104856592); length = 249/6553600
+2017-03-11 04:16:56,490 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
+2017-03-11 04:16:56,496 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
+2017-03-11 04:16:56,501 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1882890440_0001_m_000000_0 is done. And is in the process of committing
+2017-03-11 04:16:56,510 INFO org.apache.hadoop.mapred.LocalJobRunner: map
+2017-03-11 04:16:56,511 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1882890440_0001_m_000000_0' done.
+2017-03-11 04:16:56,511 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1882890440_0001_m_000000_0
+2017-03-11 04:16:56,511 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
+2017-03-11 04:16:56,514 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks
+2017-03-11 04:16:56,514 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1882890440_0001_r_000000_0
+2017-03-11 04:16:56,523 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:16:56,523 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:16:56,525 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@644fdf4b
+2017-03-11 04:16:56,536 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:16:56,542 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1882890440_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:16:56,566 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.deflate]
+2017-03-11 04:16:56,570 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1882890440_0001_m_000000_0 decomp: 5105 len: 460 to MEMORY
+2017-03-11 04:16:56,575 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 5105 bytes from map-output for attempt_local1882890440_0001_m_000000_0
+2017-03-11 04:16:56,576 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 5105, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->5105
+2017-03-11 04:16:56,577 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:16:56,578 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:16:56,578 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:16:56,587 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:16:56,588 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 5087 bytes
+2017-03-11 04:16:56,590 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 5105 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:16:56,591 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 468 bytes from disk
+2017-03-11 04:16:56,591 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:16:56,591 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:16:56,592 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 5087 bytes
+2017-03-11 04:16:56,597 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:16:56,609 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.skip.on is deprecated. Instead, use mapreduce.job.skiprecords
+2017-03-11 04:16:56,615 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1882890440_0001_r_000000_0 is done. And is in the process of committing
+2017-03-11 04:16:56,617 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:16:56,617 INFO org.apache.hadoop.mapred.Task: Task attempt_local1882890440_0001_r_000000_0 is allowed to commit now
+2017-03-11 04:16:56,618 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1882890440_0001_r_000000_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1882890440_0001_r_000000
+2017-03-11 04:16:56,618 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 04:16:56,618 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1882890440_0001_r_000000_0' done.
+2017-03-11 04:16:56,619 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1882890440_0001_r_000000_0
+2017-03-11 04:16:56,619 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete.
+2017-03-11 04:16:57,150 INFO org.apache.hadoop.mapreduce.Job: Job job_local1882890440_0001 running in uber mode : false
+2017-03-11 04:16:57,151 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 100%
+2017-03-11 04:16:57,152 INFO org.apache.hadoop.mapreduce.Job: Job job_local1882890440_0001 completed successfully
+2017-03-11 04:16:57,163 INFO org.apache.hadoop.mapreduce.Job: Counters: 33
 	File System Counters
-		FILE: Number of bytes read=11514350
-		FILE: Number of bytes written=1395729
+		FILE: Number of bytes read=2226
+		FILE: Number of bytes written=531632
 		FILE: Number of read operations=0
 		FILE: Number of large read operations=0
 		FILE: Number of write operations=0
 	Map-Reduce Framework
-		Map input records=124787
-		Map output records=1023494
-		Map output bytes=8925696
-		Map output materialized bytes=167092
-		Input split bytes=120
-		Combine input records=1023494
-		Combine output records=34513
-		Reduce input groups=34513
-		Reduce shuffle bytes=167092
-		Reduce input records=34513
-		Reduce output records=34513
-		Spilled Records=69026
+		Map input records=7
+		Map output records=63
+		Map output bytes=4977
+		Map output materialized bytes=460
+		Input split bytes=122
+		Combine input records=0
+		Combine output records=0
+		Reduce input groups=63
+		Reduce shuffle bytes=460
+		Reduce input records=63
+		Reduce output records=0
+		Spilled Records=126
 		Shuffled Maps =1
 		Failed Shuffles=0
 		Merged Map outputs=1
-		GC time elapsed (ms)=80
+		GC time elapsed (ms)=28
 		CPU time spent (ms)=0
 		Physical memory (bytes) snapshot=0
 		Virtual memory (bytes) snapshot=0
@@ -215,104 +1905,106 @@
 		WRONG_MAP=0
 		WRONG_REDUCE=0
 	File Input Format Counters 
-		Bytes Read=5589886
+		Bytes Read=454
 	File Output Format Counters 
-		Bytes Written=363879
-2017-03-10 14:07:44,622 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
-2017-03-10 14:07:45,122 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
-2017-03-10 14:07:45,129 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
-2017-03-10 14:07:45,628 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
-2017-03-10 14:07:45,645 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
-2017-03-10 14:07:45,678 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
-2017-03-10 14:07:45,909 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local1520504035_0001
-2017-03-10 14:07:46,336 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
-2017-03-10 14:07:46,338 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local1520504035_0001
-2017-03-10 14:07:46,337 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
-2017-03-10 14:07:46,344 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
-2017-03-10 14:07:46,346 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
-2017-03-10 14:07:46,457 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
-2017-03-10 14:07:46,460 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1520504035_0001_m_000000_0
-2017-03-10 14:07:46,523 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
-2017-03-10 14:07:46,542 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
-2017-03-10 14:07:46,545 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/input/pg100.txt:0+5589886
-2017-03-10 14:07:46,634 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
-2017-03-10 14:07:46,634 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
-2017-03-10 14:07:46,634 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
-2017-03-10 14:07:46,634 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
-2017-03-10 14:07:46,635 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
-2017-03-10 14:07:46,639 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
-2017-03-10 14:07:47,343 INFO org.apache.hadoop.mapreduce.Job: Job job_local1520504035_0001 running in uber mode : false
-2017-03-10 14:07:47,344 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
-2017-03-10 14:07:48,802 INFO org.apache.hadoop.mapred.LocalJobRunner: 
-2017-03-10 14:07:48,802 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
-2017-03-10 14:07:48,802 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
-2017-03-10 14:07:48,802 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 8925696; bufvoid = 104857600
-2017-03-10 14:07:48,802 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 22120424(88481696); length = 4093973/6553600
-2017-03-10 14:07:49,778 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
-2017-03-10 14:07:50,507 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
-2017-03-10 14:07:50,510 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1520504035_0001_m_000000_0 is done. And is in the process of committing
-2017-03-10 14:07:50,516 INFO org.apache.hadoop.mapred.LocalJobRunner: map
-2017-03-10 14:07:50,516 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1520504035_0001_m_000000_0' done.
-2017-03-10 14:07:50,516 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1520504035_0001_m_000000_0
-2017-03-10 14:07:50,516 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
-2017-03-10 14:07:50,523 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks
-2017-03-10 14:07:50,523 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1520504035_0001_r_000000_0
-2017-03-10 14:07:50,529 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
-2017-03-10 14:07:50,529 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
-2017-03-10 14:07:50,531 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@7f792cec
-2017-03-10 14:07:50,541 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
-2017-03-10 14:07:50,548 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1520504035_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events
-2017-03-10 14:07:50,573 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.deflate]
-2017-03-10 14:07:50,574 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1520504035_0001_m_000000_0 decomp: 491464 len: 167092 to MEMORY
-2017-03-10 14:07:50,580 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 491464 bytes from map-output for attempt_local1520504035_0001_m_000000_0
-2017-03-10 14:07:50,581 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 491464, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->491464
-2017-03-10 14:07:50,582 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
-2017-03-10 14:07:50,582 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
-2017-03-10 14:07:50,582 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
-2017-03-10 14:07:50,587 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
-2017-03-10 14:07:50,588 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 491459 bytes
-2017-03-10 14:07:50,811 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 491464 bytes to disk to satisfy reduce memory limit
-2017-03-10 14:07:50,811 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 167100 bytes from disk
-2017-03-10 14:07:50,812 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
-2017-03-10 14:07:50,812 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
-2017-03-10 14:07:50,813 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 491459 bytes
-2017-03-10 14:07:50,813 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
-2017-03-10 14:07:50,818 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.skip.on is deprecated. Instead, use mapreduce.job.skiprecords
-2017-03-10 14:07:51,355 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 0%
-2017-03-10 14:07:51,511 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1520504035_0001_r_000000_0 is done. And is in the process of committing
-2017-03-10 14:07:51,512 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
-2017-03-10 14:07:51,515 INFO org.apache.hadoop.mapred.Task: Task attempt_local1520504035_0001_r_000000_0 is allowed to commit now
-2017-03-10 14:07:51,516 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1520504035_0001_r_000000_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1520504035_0001_r_000000
-2017-03-10 14:07:51,516 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
-2017-03-10 14:07:51,518 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1520504035_0001_r_000000_0' done.
-2017-03-10 14:07:51,518 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1520504035_0001_r_000000_0
-2017-03-10 14:07:51,518 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete.
-2017-03-10 14:07:52,356 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 100%
-2017-03-10 14:07:52,356 INFO org.apache.hadoop.mapreduce.Job: Job job_local1520504035_0001 completed successfully
-2017-03-10 14:07:52,367 INFO org.apache.hadoop.mapreduce.Job: Counters: 33
+		Bytes Written=8
+2017-03-11 04:18:08,388 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-11 04:18:08,880 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-11 04:18:08,895 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-11 04:18:08,921 WARN org.apache.hadoop.security.UserGroupInformation: PriviledgedActionException as:cloudera (auth:SIMPLE) cause:org.apache.hadoop.mapred.FileAlreadyExistsException: Output directory file:/home/cloudera/workspace/WordCount/output already exists
+2017-03-11 04:18:26,782 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-11 04:18:27,289 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-11 04:18:27,292 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-11 04:18:27,758 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
+2017-03-11 04:18:27,784 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
+2017-03-11 04:18:27,882 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
+2017-03-11 04:18:28,141 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local596953480_0001
+2017-03-11 04:18:28,593 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
+2017-03-11 04:18:28,594 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local596953480_0001
+2017-03-11 04:18:28,598 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
+2017-03-11 04:18:28,602 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:18:28,606 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
+2017-03-11 04:18:28,736 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
+2017-03-11 04:18:28,737 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local596953480_0001_m_000000_0
+2017-03-11 04:18:28,769 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:18:28,778 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:18:28,781 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:0+454
+2017-03-11 04:18:28,861 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 04:18:28,861 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 04:18:28,861 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 04:18:28,861 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 04:18:28,861 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 04:18:28,866 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 04:18:28,901 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 04:18:28,902 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 04:18:28,902 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-11 04:18:28,902 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 4977; bufvoid = 104857600
+2017-03-11 04:18:28,902 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 26214148(104856592); length = 249/6553600
+2017-03-11 04:18:28,912 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
+2017-03-11 04:18:28,918 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
+2017-03-11 04:18:28,920 INFO org.apache.hadoop.mapred.Task: Task:attempt_local596953480_0001_m_000000_0 is done. And is in the process of committing
+2017-03-11 04:18:28,926 INFO org.apache.hadoop.mapred.LocalJobRunner: map
+2017-03-11 04:18:28,926 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local596953480_0001_m_000000_0' done.
+2017-03-11 04:18:28,926 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local596953480_0001_m_000000_0
+2017-03-11 04:18:28,926 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
+2017-03-11 04:18:28,929 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks
+2017-03-11 04:18:28,930 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local596953480_0001_r_000000_0
+2017-03-11 04:18:28,934 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:18:28,934 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:18:28,936 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@644fdf4b
+2017-03-11 04:18:28,946 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:18:28,952 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local596953480_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:18:28,979 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.deflate]
+2017-03-11 04:18:28,980 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local596953480_0001_m_000000_0 decomp: 5105 len: 460 to MEMORY
+2017-03-11 04:18:28,985 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 5105 bytes from map-output for attempt_local596953480_0001_m_000000_0
+2017-03-11 04:18:28,985 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 5105, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->5105
+2017-03-11 04:18:28,986 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:18:28,988 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:18:28,988 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:18:28,997 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:18:28,997 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 5087 bytes
+2017-03-11 04:18:28,999 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 5105 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:18:28,999 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 468 bytes from disk
+2017-03-11 04:18:29,000 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:18:29,000 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:18:29,000 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 5087 bytes
+2017-03-11 04:18:29,007 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:18:29,015 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.skip.on is deprecated. Instead, use mapreduce.job.skiprecords
+2017-03-11 04:18:29,029 INFO org.apache.hadoop.mapred.Task: Task:attempt_local596953480_0001_r_000000_0 is done. And is in the process of committing
+2017-03-11 04:18:29,033 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:18:29,034 INFO org.apache.hadoop.mapred.Task: Task attempt_local596953480_0001_r_000000_0 is allowed to commit now
+2017-03-11 04:18:29,034 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local596953480_0001_r_000000_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local596953480_0001_r_000000
+2017-03-11 04:18:29,035 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 04:18:29,035 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local596953480_0001_r_000000_0' done.
+2017-03-11 04:18:29,035 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local596953480_0001_r_000000_0
+2017-03-11 04:18:29,035 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete.
+2017-03-11 04:18:29,595 INFO org.apache.hadoop.mapreduce.Job: Job job_local596953480_0001 running in uber mode : false
+2017-03-11 04:18:29,597 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 100%
+2017-03-11 04:18:29,599 INFO org.apache.hadoop.mapreduce.Job: Job job_local596953480_0001 completed successfully
+2017-03-11 04:18:29,610 INFO org.apache.hadoop.mapreduce.Job: Counters: 33
 	File System Counters
-		FILE: Number of bytes read=11514350
-		FILE: Number of bytes written=1397073
+		FILE: Number of bytes read=2226
+		FILE: Number of bytes written=528828
 		FILE: Number of read operations=0
 		FILE: Number of large read operations=0
 		FILE: Number of write operations=0
 	Map-Reduce Framework
-		Map input records=124787
-		Map output records=1023494
-		Map output bytes=8925696
-		Map output materialized bytes=167092
-		Input split bytes=120
-		Combine input records=1023494
-		Combine output records=34513
-		Reduce input groups=34513
-		Reduce shuffle bytes=167092
-		Reduce input records=34513
-		Reduce output records=34513
-		Spilled Records=69026
+		Map input records=7
+		Map output records=63
+		Map output bytes=4977
+		Map output materialized bytes=460
+		Input split bytes=122
+		Combine input records=0
+		Combine output records=0
+		Reduce input groups=63
+		Reduce shuffle bytes=460
+		Reduce input records=63
+		Reduce output records=0
+		Spilled Records=126
 		Shuffled Maps =1
 		Failed Shuffles=0
 		Merged Map outputs=1
-		GC time elapsed (ms)=86
+		GC time elapsed (ms)=26
 		CPU time spent (ms)=0
 		Physical memory (bytes) snapshot=0
 		Virtual memory (bytes) snapshot=0
@@ -325,108 +2017,102 @@
 		WRONG_MAP=0
 		WRONG_REDUCE=0
 	File Input Format Counters 
-		Bytes Read=5589886
+		Bytes Read=454
 	File Output Format Counters 
-		Bytes Written=363879
-2017-03-10 14:08:46,208 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
-2017-03-10 14:08:46,725 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
-2017-03-10 14:08:46,726 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
-2017-03-10 14:08:46,791 WARN org.apache.hadoop.security.UserGroupInformation: PriviledgedActionException as:cloudera (auth:SIMPLE) cause:org.apache.hadoop.mapred.FileAlreadyExistsException: Output directory file:/home/cloudera/workspace/WordCount/output already exists
-2017-03-10 14:09:00,496 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
-2017-03-10 14:09:00,991 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
-2017-03-10 14:09:00,992 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
-2017-03-10 14:09:01,486 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
-2017-03-10 14:09:01,504 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
-2017-03-10 14:09:01,622 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
-2017-03-10 14:09:01,930 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local1768387477_0001
-2017-03-10 14:09:02,340 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
-2017-03-10 14:09:02,341 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local1768387477_0001
-2017-03-10 14:09:02,345 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
-2017-03-10 14:09:02,348 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
-2017-03-10 14:09:02,366 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
-2017-03-10 14:09:02,467 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
-2017-03-10 14:09:02,468 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1768387477_0001_m_000000_0
-2017-03-10 14:09:02,532 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
-2017-03-10 14:09:02,561 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
-2017-03-10 14:09:02,564 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/input/pg100.txt:0+5589886
-2017-03-10 14:09:02,644 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
-2017-03-10 14:09:02,645 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
-2017-03-10 14:09:02,645 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
-2017-03-10 14:09:02,645 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
-2017-03-10 14:09:02,645 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
-2017-03-10 14:09:02,652 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
-2017-03-10 14:09:03,343 INFO org.apache.hadoop.mapreduce.Job: Job job_local1768387477_0001 running in uber mode : false
-2017-03-10 14:09:03,344 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
-2017-03-10 14:09:04,790 INFO org.apache.hadoop.mapred.LocalJobRunner: 
-2017-03-10 14:09:04,792 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
-2017-03-10 14:09:04,792 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
-2017-03-10 14:09:04,792 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 8925696; bufvoid = 104857600
-2017-03-10 14:09:04,792 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 22120424(88481696); length = 4093973/6553600
-2017-03-10 14:09:05,819 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
-2017-03-10 14:09:06,544 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
-2017-03-10 14:09:06,546 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1768387477_0001_m_000000_0 is done. And is in the process of committing
-2017-03-10 14:09:06,552 INFO org.apache.hadoop.mapred.LocalJobRunner: map
-2017-03-10 14:09:06,552 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1768387477_0001_m_000000_0' done.
-2017-03-10 14:09:06,552 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1768387477_0001_m_000000_0
-2017-03-10 14:09:06,552 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
-2017-03-10 14:09:06,560 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks
-2017-03-10 14:09:06,561 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1768387477_0001_r_000000_0
-2017-03-10 14:09:06,569 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
-2017-03-10 14:09:06,570 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
-2017-03-10 14:09:06,572 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@7f792cec
-2017-03-10 14:09:06,582 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
-2017-03-10 14:09:06,588 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1768387477_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events
-2017-03-10 14:09:06,616 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.deflate]
-2017-03-10 14:09:06,617 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1768387477_0001_m_000000_0 decomp: 491464 len: 167092 to MEMORY
-2017-03-10 14:09:06,626 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 491464 bytes from map-output for attempt_local1768387477_0001_m_000000_0
-2017-03-10 14:09:06,627 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 491464, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->491464
-2017-03-10 14:09:06,628 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
-2017-03-10 14:09:06,629 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
-2017-03-10 14:09:06,629 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
-2017-03-10 14:09:06,636 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
-2017-03-10 14:09:06,636 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 491459 bytes
-2017-03-10 14:09:06,896 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 491464 bytes to disk to satisfy reduce memory limit
-2017-03-10 14:09:06,896 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 167100 bytes from disk
-2017-03-10 14:09:06,897 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
-2017-03-10 14:09:06,897 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
-2017-03-10 14:09:06,898 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 491459 bytes
-2017-03-10 14:09:06,898 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
-2017-03-10 14:09:06,903 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.skip.on is deprecated. Instead, use mapreduce.job.skiprecords
-2017-03-10 14:09:07,352 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 0%
-2017-03-10 14:09:07,776 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1768387477_0001_r_000000_0 is done. And is in the process of committing
-2017-03-10 14:09:07,792 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
-2017-03-10 14:09:07,792 INFO org.apache.hadoop.mapred.Task: Task attempt_local1768387477_0001_r_000000_0 is allowed to commit now
-2017-03-10 14:09:07,792 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1768387477_0001_r_000000_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1768387477_0001_r_000000
-2017-03-10 14:09:07,793 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
-2017-03-10 14:09:07,793 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1768387477_0001_r_000000_0' done.
-2017-03-10 14:09:07,793 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1768387477_0001_r_000000_0
-2017-03-10 14:09:07,793 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete.
-2017-03-10 14:09:08,353 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 100%
-2017-03-10 14:09:08,354 INFO org.apache.hadoop.mapreduce.Job: Job job_local1768387477_0001 completed successfully
-2017-03-10 14:09:08,363 INFO org.apache.hadoop.mapreduce.Job: Counters: 33
+		Bytes Written=8
+2017-03-11 04:19:23,256 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-11 04:19:23,762 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-11 04:19:23,769 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-11 04:19:24,095 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
+2017-03-11 04:19:24,103 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
+2017-03-11 04:19:24,140 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
+2017-03-11 04:19:24,352 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local469373854_0001
+2017-03-11 04:19:24,810 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
+2017-03-11 04:19:24,811 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local469373854_0001
+2017-03-11 04:19:24,826 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
+2017-03-11 04:19:24,830 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:19:24,849 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
+2017-03-11 04:19:24,958 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
+2017-03-11 04:19:24,961 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local469373854_0001_m_000000_0
+2017-03-11 04:19:24,995 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:19:25,004 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:19:25,009 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:0+454
+2017-03-11 04:19:25,100 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 04:19:25,100 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 04:19:25,101 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 04:19:25,101 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 04:19:25,101 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 04:19:25,104 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 04:19:25,137 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 04:19:25,137 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 04:19:25,137 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-11 04:19:25,137 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 4977; bufvoid = 104857600
+2017-03-11 04:19:25,137 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 26214148(104856592); length = 249/6553600
+2017-03-11 04:19:25,149 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
+2017-03-11 04:19:25,159 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
+2017-03-11 04:19:25,163 INFO org.apache.hadoop.mapred.Task: Task:attempt_local469373854_0001_m_000000_0 is done. And is in the process of committing
+2017-03-11 04:19:25,170 INFO org.apache.hadoop.mapred.LocalJobRunner: map
+2017-03-11 04:19:25,170 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local469373854_0001_m_000000_0' done.
+2017-03-11 04:19:25,170 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local469373854_0001_m_000000_0
+2017-03-11 04:19:25,171 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
+2017-03-11 04:19:25,174 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks
+2017-03-11 04:19:25,174 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local469373854_0001_r_000000_0
+2017-03-11 04:19:25,184 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:19:25,184 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:19:25,186 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@644fdf4b
+2017-03-11 04:19:25,196 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:19:25,202 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local469373854_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:19:25,230 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.deflate]
+2017-03-11 04:19:25,230 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local469373854_0001_m_000000_0 decomp: 5105 len: 460 to MEMORY
+2017-03-11 04:19:25,234 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 5105 bytes from map-output for attempt_local469373854_0001_m_000000_0
+2017-03-11 04:19:25,234 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 5105, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->5105
+2017-03-11 04:19:25,235 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:19:25,235 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:19:25,236 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:19:25,241 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:19:25,243 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 5087 bytes
+2017-03-11 04:19:25,247 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 5105 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:19:25,247 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 468 bytes from disk
+2017-03-11 04:19:25,248 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:19:25,248 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:19:25,248 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 5087 bytes
+2017-03-11 04:19:25,259 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:19:25,263 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.skip.on is deprecated. Instead, use mapreduce.job.skiprecords
+2017-03-11 04:19:25,276 INFO org.apache.hadoop.mapred.Task: Task:attempt_local469373854_0001_r_000000_0 is done. And is in the process of committing
+2017-03-11 04:19:25,278 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:19:25,279 INFO org.apache.hadoop.mapred.Task: Task attempt_local469373854_0001_r_000000_0 is allowed to commit now
+2017-03-11 04:19:25,279 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local469373854_0001_r_000000_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local469373854_0001_r_000000
+2017-03-11 04:19:25,280 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 04:19:25,280 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local469373854_0001_r_000000_0' done.
+2017-03-11 04:19:25,285 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local469373854_0001_r_000000_0
+2017-03-11 04:19:25,285 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete.
+2017-03-11 04:19:25,813 INFO org.apache.hadoop.mapreduce.Job: Job job_local469373854_0001 running in uber mode : false
+2017-03-11 04:19:25,814 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 100%
+2017-03-11 04:19:25,815 INFO org.apache.hadoop.mapreduce.Job: Job job_local469373854_0001 completed successfully
+2017-03-11 04:19:25,828 INFO org.apache.hadoop.mapreduce.Job: Counters: 33
 	File System Counters
-		FILE: Number of bytes read=11514350
-		FILE: Number of bytes written=1397073
+		FILE: Number of bytes read=2226
+		FILE: Number of bytes written=528828
 		FILE: Number of read operations=0
 		FILE: Number of large read operations=0
 		FILE: Number of write operations=0
 	Map-Reduce Framework
-		Map input records=124787
-		Map output records=1023494
-		Map output bytes=8925696
-		Map output materialized bytes=167092
-		Input split bytes=120
-		Combine input records=1023494
-		Combine output records=34513
-		Reduce input groups=34513
-		Reduce shuffle bytes=167092
-		Reduce input records=34513
-		Reduce output records=34513
-		Spilled Records=69026
+		Map input records=7
+		Map output records=63
+		Map output bytes=4977
+		Map output materialized bytes=460
+		Input split bytes=122
+		Combine input records=0
+		Combine output records=0
+		Reduce input groups=63
+		Reduce shuffle bytes=460
+		Reduce input records=63
+		Reduce output records=0
+		Spilled Records=126
 		Shuffled Maps =1
 		Failed Shuffles=0
 		Merged Map outputs=1
-		GC time elapsed (ms)=76
+		GC time elapsed (ms)=23
 		CPU time spent (ms)=0
 		Physical memory (bytes) snapshot=0
 		Virtual memory (bytes) snapshot=0
@@ -439,108 +2125,115 @@
 		WRONG_MAP=0
 		WRONG_REDUCE=0
 	File Input Format Counters 
-		Bytes Read=5589886
+		Bytes Read=454
 	File Output Format Counters 
-		Bytes Written=363879
-2017-03-10 14:10:49,958 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
-2017-03-10 14:10:50,420 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
-2017-03-10 14:10:50,423 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
-2017-03-10 14:10:50,893 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
-2017-03-10 14:10:50,902 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
-2017-03-10 14:10:50,935 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
-2017-03-10 14:10:51,165 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local321623198_0001
-2017-03-10 14:10:51,576 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
-2017-03-10 14:10:51,577 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local321623198_0001
-2017-03-10 14:10:51,586 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
-2017-03-10 14:10:51,596 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
-2017-03-10 14:10:51,607 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
-2017-03-10 14:10:51,726 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
-2017-03-10 14:10:51,728 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local321623198_0001_m_000000_0
-2017-03-10 14:10:51,786 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
-2017-03-10 14:10:51,801 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
-2017-03-10 14:10:51,805 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/input/pg100.txt:0+5589886
-2017-03-10 14:10:51,887 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
-2017-03-10 14:10:51,887 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
-2017-03-10 14:10:51,887 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
-2017-03-10 14:10:51,888 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
-2017-03-10 14:10:51,888 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
-2017-03-10 14:10:51,891 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
-2017-03-10 14:10:52,584 INFO org.apache.hadoop.mapreduce.Job: Job job_local321623198_0001 running in uber mode : false
-2017-03-10 14:10:52,587 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
-2017-03-10 14:10:54,371 INFO org.apache.hadoop.mapred.LocalJobRunner: 
-2017-03-10 14:10:54,373 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
-2017-03-10 14:10:54,373 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
-2017-03-10 14:10:54,374 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 8925696; bufvoid = 104857600
-2017-03-10 14:10:54,374 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 22120424(88481696); length = 4093973/6553600
-2017-03-10 14:10:55,678 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
-2017-03-10 14:10:56,656 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
-2017-03-10 14:10:56,659 INFO org.apache.hadoop.mapred.Task: Task:attempt_local321623198_0001_m_000000_0 is done. And is in the process of committing
-2017-03-10 14:10:56,666 INFO org.apache.hadoop.mapred.LocalJobRunner: map
-2017-03-10 14:10:56,666 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local321623198_0001_m_000000_0' done.
-2017-03-10 14:10:56,666 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local321623198_0001_m_000000_0
-2017-03-10 14:10:56,667 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
-2017-03-10 14:10:56,676 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks
-2017-03-10 14:10:56,676 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local321623198_0001_r_000000_0
-2017-03-10 14:10:56,684 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
-2017-03-10 14:10:56,685 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
-2017-03-10 14:10:56,689 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@405d65c3
-2017-03-10 14:10:56,704 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
-2017-03-10 14:10:56,710 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local321623198_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events
-2017-03-10 14:10:56,742 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.deflate]
-2017-03-10 14:10:56,742 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local321623198_0001_m_000000_0 decomp: 491464 len: 167092 to MEMORY
-2017-03-10 14:10:56,749 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 491464 bytes from map-output for attempt_local321623198_0001_m_000000_0
-2017-03-10 14:10:56,749 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 491464, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->491464
-2017-03-10 14:10:56,750 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
-2017-03-10 14:10:56,752 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
-2017-03-10 14:10:56,752 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
-2017-03-10 14:10:56,756 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
-2017-03-10 14:10:56,757 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 491459 bytes
-2017-03-10 14:10:57,102 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 491464 bytes to disk to satisfy reduce memory limit
-2017-03-10 14:10:57,102 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 167100 bytes from disk
-2017-03-10 14:10:57,103 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
-2017-03-10 14:10:57,103 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
-2017-03-10 14:10:57,104 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 491459 bytes
-2017-03-10 14:10:57,105 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
-2017-03-10 14:10:57,109 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.skip.on is deprecated. Instead, use mapreduce.job.skiprecords
-2017-03-10 14:10:57,598 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 0%
-2017-03-10 14:10:57,797 INFO org.apache.hadoop.mapred.Task: Task:attempt_local321623198_0001_r_000000_0 is done. And is in the process of committing
-2017-03-10 14:10:57,801 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
-2017-03-10 14:10:57,802 INFO org.apache.hadoop.mapred.Task: Task attempt_local321623198_0001_r_000000_0 is allowed to commit now
-2017-03-10 14:10:57,802 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local321623198_0001_r_000000_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local321623198_0001_r_000000
-2017-03-10 14:10:57,803 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
-2017-03-10 14:10:57,803 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local321623198_0001_r_000000_0' done.
-2017-03-10 14:10:57,803 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local321623198_0001_r_000000_0
-2017-03-10 14:10:57,803 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete.
-2017-03-10 14:10:58,598 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 100%
-2017-03-10 14:10:58,599 INFO org.apache.hadoop.mapreduce.Job: Job job_local321623198_0001 completed successfully
-2017-03-10 14:10:58,612 INFO org.apache.hadoop.mapreduce.Job: Counters: 33
+		Bytes Written=8
+2017-03-11 04:31:48,025 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-11 04:31:48,620 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-11 04:31:48,621 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-11 04:31:49,123 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
+2017-03-11 04:31:49,135 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
+2017-03-11 04:31:49,274 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
+2017-03-11 04:31:49,726 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local372885088_0001
+2017-03-11 04:31:50,486 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
+2017-03-11 04:31:50,487 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local372885088_0001
+2017-03-11 04:31:50,494 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
+2017-03-11 04:31:50,506 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:31:50,523 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
+2017-03-11 04:31:50,653 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
+2017-03-11 04:31:50,654 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local372885088_0001_m_000000_0
+2017-03-11 04:31:50,721 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:31:50,734 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:31:50,738 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:0+454
+2017-03-11 04:31:50,816 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 04:31:50,816 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 04:31:50,817 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 04:31:50,817 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 04:31:50,817 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 04:31:50,820 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 04:31:50,857 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 04:31:50,857 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 04:31:50,857 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-11 04:31:50,857 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 4977; bufvoid = 104857600
+2017-03-11 04:31:50,857 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 26214148(104856592); length = 249/6553600
+2017-03-11 04:31:50,871 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
+2017-03-11 04:31:50,877 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
+2017-03-11 04:31:50,880 INFO org.apache.hadoop.mapred.Task: Task:attempt_local372885088_0001_m_000000_0 is done. And is in the process of committing
+2017-03-11 04:31:50,888 INFO org.apache.hadoop.mapred.LocalJobRunner: map
+2017-03-11 04:31:50,888 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local372885088_0001_m_000000_0' done.
+2017-03-11 04:31:50,888 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local372885088_0001_m_000000_0
+2017-03-11 04:31:50,889 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
+2017-03-11 04:31:50,892 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks
+2017-03-11 04:31:50,892 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local372885088_0001_r_000000_0
+2017-03-11 04:31:50,897 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:31:50,901 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:31:50,904 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@684528a3
+2017-03-11 04:31:50,916 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:31:50,922 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local372885088_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:31:50,954 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.deflate]
+2017-03-11 04:31:50,955 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local372885088_0001_m_000000_0 decomp: 5105 len: 525 to MEMORY
+2017-03-11 04:31:50,958 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 5105 bytes from map-output for attempt_local372885088_0001_m_000000_0
+2017-03-11 04:31:50,958 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 5105, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->5105
+2017-03-11 04:31:50,959 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:31:50,961 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:31:50,961 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:31:50,970 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:31:50,970 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 5087 bytes
+2017-03-11 04:31:50,972 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 5105 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:31:50,972 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 533 bytes from disk
+2017-03-11 04:31:50,973 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:31:50,973 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:31:50,973 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 5087 bytes
+2017-03-11 04:31:50,982 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:31:50,991 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.skip.on is deprecated. Instead, use mapreduce.job.skiprecords
+2017-03-11 04:31:50,995 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete.
+2017-03-11 04:31:50,997 WARN org.apache.hadoop.mapred.LocalJobRunner: job_local372885088_0001
+java.lang.Exception: java.lang.ArrayIndexOutOfBoundsException: 2
+	at org.apache.hadoop.mapred.LocalJobRunner$Job.runTasks(LocalJobRunner.java:462)
+	at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:529)
+Caused by: java.lang.ArrayIndexOutOfBoundsException: 2
+	at similarity.NaiveApproach$Reduce.reduce(NaiveApproach.java:163)
+	at similarity.NaiveApproach$Reduce.reduce(NaiveApproach.java:1)
+	at org.apache.hadoop.mapreduce.Reducer.run(Reducer.java:171)
+	at org.apache.hadoop.mapred.ReduceTask.runNewReducer(ReduceTask.java:627)
+	at org.apache.hadoop.mapred.ReduceTask.run(ReduceTask.java:389)
+	at org.apache.hadoop.mapred.LocalJobRunner$Job$ReduceTaskRunnable.run(LocalJobRunner.java:319)
+	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471)
+	at java.util.concurrent.FutureTask.run(FutureTask.java:262)
+	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
+	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
+	at java.lang.Thread.run(Thread.java:745)
+2017-03-11 04:31:51,488 INFO org.apache.hadoop.mapreduce.Job: Job job_local372885088_0001 running in uber mode : false
+2017-03-11 04:31:51,490 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 0%
+2017-03-11 04:31:51,491 INFO org.apache.hadoop.mapreduce.Job: Job job_local372885088_0001 failed with state FAILED due to: NA
+2017-03-11 04:31:51,503 INFO org.apache.hadoop.mapreduce.Job: Counters: 33
 	File System Counters
-		FILE: Number of bytes read=11514350
-		FILE: Number of bytes written=1394261
+		FILE: Number of bytes read=633
+		FILE: Number of bytes written=264241
 		FILE: Number of read operations=0
 		FILE: Number of large read operations=0
 		FILE: Number of write operations=0
 	Map-Reduce Framework
-		Map input records=124787
-		Map output records=1023494
-		Map output bytes=8925696
-		Map output materialized bytes=167092
-		Input split bytes=120
-		Combine input records=1023494
-		Combine output records=34513
-		Reduce input groups=34513
-		Reduce shuffle bytes=167092
-		Reduce input records=34513
-		Reduce output records=34513
-		Spilled Records=69026
+		Map input records=7
+		Map output records=63
+		Map output bytes=4977
+		Map output materialized bytes=525
+		Input split bytes=122
+		Combine input records=0
+		Combine output records=0
+		Reduce input groups=0
+		Reduce shuffle bytes=525
+		Reduce input records=0
+		Reduce output records=0
+		Spilled Records=63
 		Shuffled Maps =1
 		Failed Shuffles=0
 		Merged Map outputs=1
-		GC time elapsed (ms)=82
+		GC time elapsed (ms)=26
 		CPU time spent (ms)=0
 		Physical memory (bytes) snapshot=0
 		Virtual memory (bytes) snapshot=0
-		Total committed heap usage (bytes)=331227136
+		Total committed heap usage (bytes)=165613568
 	Shuffle Errors
 		BAD_ID=0
 		CONNECTION=0
@@ -549,108 +2242,115 @@
 		WRONG_MAP=0
 		WRONG_REDUCE=0
 	File Input Format Counters 
-		Bytes Read=5589886
+		Bytes Read=454
 	File Output Format Counters 
-		Bytes Written=363879
-2017-03-10 14:11:49,324 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
-2017-03-10 14:11:49,809 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
-2017-03-10 14:11:49,819 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
-2017-03-10 14:11:50,294 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
-2017-03-10 14:11:50,309 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
-2017-03-10 14:11:50,418 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
-2017-03-10 14:11:50,734 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local1130190814_0001
-2017-03-10 14:11:51,124 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
-2017-03-10 14:11:51,125 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local1130190814_0001
-2017-03-10 14:11:51,125 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
-2017-03-10 14:11:51,137 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
-2017-03-10 14:11:51,145 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
-2017-03-10 14:11:51,262 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
-2017-03-10 14:11:51,262 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1130190814_0001_m_000000_0
-2017-03-10 14:11:51,319 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
-2017-03-10 14:11:51,345 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
-2017-03-10 14:11:51,348 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/input/pg100.txt:0+5589886
-2017-03-10 14:11:51,448 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
-2017-03-10 14:11:51,448 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
-2017-03-10 14:11:51,448 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
-2017-03-10 14:11:51,448 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
-2017-03-10 14:11:51,448 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
-2017-03-10 14:11:51,451 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
-2017-03-10 14:11:52,138 INFO org.apache.hadoop.mapreduce.Job: Job job_local1130190814_0001 running in uber mode : false
-2017-03-10 14:11:52,139 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
-2017-03-10 14:11:53,548 INFO org.apache.hadoop.mapred.LocalJobRunner: 
-2017-03-10 14:11:53,549 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
-2017-03-10 14:11:53,549 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
-2017-03-10 14:11:53,549 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 8925696; bufvoid = 104857600
-2017-03-10 14:11:53,549 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 22120424(88481696); length = 4093973/6553600
-2017-03-10 14:11:54,505 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
-2017-03-10 14:11:55,315 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
-2017-03-10 14:11:55,318 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1130190814_0001_m_000000_0 is done. And is in the process of committing
-2017-03-10 14:11:55,323 INFO org.apache.hadoop.mapred.LocalJobRunner: map
-2017-03-10 14:11:55,324 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1130190814_0001_m_000000_0' done.
-2017-03-10 14:11:55,324 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1130190814_0001_m_000000_0
-2017-03-10 14:11:55,324 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
-2017-03-10 14:11:55,331 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks
-2017-03-10 14:11:55,332 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1130190814_0001_r_000000_0
-2017-03-10 14:11:55,337 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
-2017-03-10 14:11:55,337 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
-2017-03-10 14:11:55,339 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@33c40638
-2017-03-10 14:11:55,352 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
-2017-03-10 14:11:55,358 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1130190814_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events
-2017-03-10 14:11:55,386 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.deflate]
-2017-03-10 14:11:55,387 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1130190814_0001_m_000000_0 decomp: 491464 len: 167092 to MEMORY
-2017-03-10 14:11:55,397 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 491464 bytes from map-output for attempt_local1130190814_0001_m_000000_0
-2017-03-10 14:11:55,397 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 491464, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->491464
-2017-03-10 14:11:55,398 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
-2017-03-10 14:11:55,399 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
-2017-03-10 14:11:55,399 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
-2017-03-10 14:11:55,403 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
-2017-03-10 14:11:55,404 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 491459 bytes
-2017-03-10 14:11:55,647 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 491464 bytes to disk to satisfy reduce memory limit
-2017-03-10 14:11:55,647 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 167100 bytes from disk
-2017-03-10 14:11:55,648 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
-2017-03-10 14:11:55,648 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
-2017-03-10 14:11:55,649 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 491459 bytes
-2017-03-10 14:11:55,649 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
-2017-03-10 14:11:55,654 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.skip.on is deprecated. Instead, use mapreduce.job.skiprecords
-2017-03-10 14:11:56,162 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 0%
-2017-03-10 14:11:56,278 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1130190814_0001_r_000000_0 is done. And is in the process of committing
-2017-03-10 14:11:56,289 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
-2017-03-10 14:11:56,292 INFO org.apache.hadoop.mapred.Task: Task attempt_local1130190814_0001_r_000000_0 is allowed to commit now
-2017-03-10 14:11:56,294 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1130190814_0001_r_000000_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1130190814_0001_r_000000
-2017-03-10 14:11:56,294 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
-2017-03-10 14:11:56,295 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1130190814_0001_r_000000_0' done.
-2017-03-10 14:11:56,295 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1130190814_0001_r_000000_0
-2017-03-10 14:11:56,295 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete.
-2017-03-10 14:11:57,163 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 100%
-2017-03-10 14:11:57,163 INFO org.apache.hadoop.mapreduce.Job: Job job_local1130190814_0001 completed successfully
-2017-03-10 14:11:57,176 INFO org.apache.hadoop.mapreduce.Job: Counters: 33
+		Bytes Written=0
+2017-03-11 04:35:38,749 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-11 04:35:39,238 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-11 04:35:39,245 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-11 04:35:39,678 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
+2017-03-11 04:35:39,699 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
+2017-03-11 04:35:39,807 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
+2017-03-11 04:35:40,047 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local95551546_0001
+2017-03-11 04:35:40,533 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
+2017-03-11 04:35:40,534 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local95551546_0001
+2017-03-11 04:35:40,538 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
+2017-03-11 04:35:40,551 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:35:40,562 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
+2017-03-11 04:35:40,672 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
+2017-03-11 04:35:40,674 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local95551546_0001_m_000000_0
+2017-03-11 04:35:40,720 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:35:40,735 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:35:40,742 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:0+454
+2017-03-11 04:35:40,819 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 04:35:40,824 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 04:35:40,824 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 04:35:40,824 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 04:35:40,824 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 04:35:40,827 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 04:35:40,867 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 04:35:40,867 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 04:35:40,867 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-11 04:35:40,867 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 4424; bufvoid = 104857600
+2017-03-11 04:35:40,867 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 26214176(104856704); length = 221/6553600
+2017-03-11 04:35:40,881 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
+2017-03-11 04:35:40,889 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
+2017-03-11 04:35:40,892 INFO org.apache.hadoop.mapred.Task: Task:attempt_local95551546_0001_m_000000_0 is done. And is in the process of committing
+2017-03-11 04:35:40,901 INFO org.apache.hadoop.mapred.LocalJobRunner: map
+2017-03-11 04:35:40,901 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local95551546_0001_m_000000_0' done.
+2017-03-11 04:35:40,901 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local95551546_0001_m_000000_0
+2017-03-11 04:35:40,902 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
+2017-03-11 04:35:40,905 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks
+2017-03-11 04:35:40,905 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local95551546_0001_r_000000_0
+2017-03-11 04:35:40,910 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:35:40,910 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:35:40,913 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@6dac133e
+2017-03-11 04:35:40,931 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:35:40,937 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local95551546_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:35:40,957 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.deflate]
+2017-03-11 04:35:40,958 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local95551546_0001_m_000000_0 decomp: 4538 len: 489 to MEMORY
+2017-03-11 04:35:40,961 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 4538 bytes from map-output for attempt_local95551546_0001_m_000000_0
+2017-03-11 04:35:40,961 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 4538, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->4538
+2017-03-11 04:35:40,962 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:35:40,963 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:35:40,963 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:35:40,969 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:35:40,969 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 4520 bytes
+2017-03-11 04:35:40,971 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 4538 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:35:40,971 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 497 bytes from disk
+2017-03-11 04:35:40,972 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:35:40,972 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:35:40,972 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 4520 bytes
+2017-03-11 04:35:40,978 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:35:40,988 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.skip.on is deprecated. Instead, use mapreduce.job.skiprecords
+2017-03-11 04:35:40,994 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete.
+2017-03-11 04:35:40,999 WARN org.apache.hadoop.mapred.LocalJobRunner: job_local95551546_0001
+java.lang.Exception: java.lang.ArrayIndexOutOfBoundsException: 2
+	at org.apache.hadoop.mapred.LocalJobRunner$Job.runTasks(LocalJobRunner.java:462)
+	at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:529)
+Caused by: java.lang.ArrayIndexOutOfBoundsException: 2
+	at similarity.NaiveApproach$Reduce.reduce(NaiveApproach.java:167)
+	at similarity.NaiveApproach$Reduce.reduce(NaiveApproach.java:1)
+	at org.apache.hadoop.mapreduce.Reducer.run(Reducer.java:171)
+	at org.apache.hadoop.mapred.ReduceTask.runNewReducer(ReduceTask.java:627)
+	at org.apache.hadoop.mapred.ReduceTask.run(ReduceTask.java:389)
+	at org.apache.hadoop.mapred.LocalJobRunner$Job$ReduceTaskRunnable.run(LocalJobRunner.java:319)
+	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471)
+	at java.util.concurrent.FutureTask.run(FutureTask.java:262)
+	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
+	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
+	at java.lang.Thread.run(Thread.java:745)
+2017-03-11 04:35:41,536 INFO org.apache.hadoop.mapreduce.Job: Job job_local95551546_0001 running in uber mode : false
+2017-03-11 04:35:41,537 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 0%
+2017-03-11 04:35:41,539 INFO org.apache.hadoop.mapreduce.Job: Job job_local95551546_0001 failed with state FAILED due to: NA
+2017-03-11 04:35:41,550 INFO org.apache.hadoop.mapreduce.Job: Counters: 33
 	File System Counters
-		FILE: Number of bytes read=11514350
-		FILE: Number of bytes written=1397073
+		FILE: Number of bytes read=633
+		FILE: Number of bytes written=262799
 		FILE: Number of read operations=0
 		FILE: Number of large read operations=0
 		FILE: Number of write operations=0
 	Map-Reduce Framework
-		Map input records=124787
-		Map output records=1023494
-		Map output bytes=8925696
-		Map output materialized bytes=167092
-		Input split bytes=120
-		Combine input records=1023494
-		Combine output records=34513
-		Reduce input groups=34513
-		Reduce shuffle bytes=167092
-		Reduce input records=34513
-		Reduce output records=34513
-		Spilled Records=69026
+		Map input records=7
+		Map output records=56
+		Map output bytes=4424
+		Map output materialized bytes=489
+		Input split bytes=122
+		Combine input records=0
+		Combine output records=0
+		Reduce input groups=0
+		Reduce shuffle bytes=489
+		Reduce input records=0
+		Reduce output records=0
+		Spilled Records=56
 		Shuffled Maps =1
 		Failed Shuffles=0
 		Merged Map outputs=1
-		GC time elapsed (ms)=84
+		GC time elapsed (ms)=26
 		CPU time spent (ms)=0
 		Physical memory (bytes) snapshot=0
 		Virtual memory (bytes) snapshot=0
-		Total committed heap usage (bytes)=331227136
+		Total committed heap usage (bytes)=165613568
 	Shuffle Errors
 		BAD_ID=0
 		CONNECTION=0
@@ -659,108 +2359,106 @@
 		WRONG_MAP=0
 		WRONG_REDUCE=0
 	File Input Format Counters 
-		Bytes Read=5589886
+		Bytes Read=454
 	File Output Format Counters 
-		Bytes Written=363879
-2017-03-10 14:12:54,192 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
-2017-03-10 14:12:54,666 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
-2017-03-10 14:12:54,677 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
-2017-03-10 14:12:54,726 WARN org.apache.hadoop.security.UserGroupInformation: PriviledgedActionException as:cloudera (auth:SIMPLE) cause:org.apache.hadoop.mapred.FileAlreadyExistsException: Output directory file:/home/cloudera/workspace/WordCount/output already exists
-2017-03-10 14:13:16,264 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
-2017-03-10 14:13:16,742 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
-2017-03-10 14:13:16,746 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
-2017-03-10 14:13:17,210 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
-2017-03-10 14:13:17,218 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
-2017-03-10 14:13:17,249 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
-2017-03-10 14:13:17,469 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local1259538348_0001
-2017-03-10 14:13:17,909 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
-2017-03-10 14:13:17,911 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local1259538348_0001
-2017-03-10 14:13:17,915 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
-2017-03-10 14:13:17,923 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
-2017-03-10 14:13:17,937 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
-2017-03-10 14:13:18,056 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
-2017-03-10 14:13:18,059 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1259538348_0001_m_000000_0
-2017-03-10 14:13:18,117 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
-2017-03-10 14:13:18,128 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
-2017-03-10 14:13:18,132 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/input/pg100.txt:0+5589886
-2017-03-10 14:13:18,215 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
-2017-03-10 14:13:18,216 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
-2017-03-10 14:13:18,216 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
-2017-03-10 14:13:18,216 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
-2017-03-10 14:13:18,216 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
-2017-03-10 14:13:18,226 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
-2017-03-10 14:13:18,912 INFO org.apache.hadoop.mapreduce.Job: Job job_local1259538348_0001 running in uber mode : false
-2017-03-10 14:13:18,913 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
-2017-03-10 14:13:20,442 INFO org.apache.hadoop.mapred.LocalJobRunner: 
-2017-03-10 14:13:20,443 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
-2017-03-10 14:13:20,443 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
-2017-03-10 14:13:20,443 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 8925696; bufvoid = 104857600
-2017-03-10 14:13:20,443 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 22120424(88481696); length = 4093973/6553600
-2017-03-10 14:13:21,376 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
-2017-03-10 14:13:22,115 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
-2017-03-10 14:13:22,117 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1259538348_0001_m_000000_0 is done. And is in the process of committing
-2017-03-10 14:13:22,124 INFO org.apache.hadoop.mapred.LocalJobRunner: map
-2017-03-10 14:13:22,124 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1259538348_0001_m_000000_0' done.
-2017-03-10 14:13:22,124 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1259538348_0001_m_000000_0
-2017-03-10 14:13:22,124 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
-2017-03-10 14:13:22,132 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks
-2017-03-10 14:13:22,132 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1259538348_0001_r_000000_0
-2017-03-10 14:13:22,137 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
-2017-03-10 14:13:22,137 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
-2017-03-10 14:13:22,140 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@7f792cec
-2017-03-10 14:13:22,151 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
-2017-03-10 14:13:22,156 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1259538348_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events
-2017-03-10 14:13:22,209 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.deflate]
-2017-03-10 14:13:22,209 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1259538348_0001_m_000000_0 decomp: 491464 len: 167092 to MEMORY
-2017-03-10 14:13:22,215 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 491464 bytes from map-output for attempt_local1259538348_0001_m_000000_0
-2017-03-10 14:13:22,217 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 491464, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->491464
-2017-03-10 14:13:22,219 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
-2017-03-10 14:13:22,220 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
-2017-03-10 14:13:22,224 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
-2017-03-10 14:13:22,229 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
-2017-03-10 14:13:22,230 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 491459 bytes
-2017-03-10 14:13:22,458 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 491464 bytes to disk to satisfy reduce memory limit
-2017-03-10 14:13:22,459 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 167100 bytes from disk
-2017-03-10 14:13:22,459 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
-2017-03-10 14:13:22,459 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
-2017-03-10 14:13:22,460 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 491459 bytes
-2017-03-10 14:13:22,460 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
-2017-03-10 14:13:22,465 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.skip.on is deprecated. Instead, use mapreduce.job.skiprecords
-2017-03-10 14:13:22,936 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 0%
-2017-03-10 14:13:23,199 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1259538348_0001_r_000000_0 is done. And is in the process of committing
-2017-03-10 14:13:23,200 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
-2017-03-10 14:13:23,201 INFO org.apache.hadoop.mapred.Task: Task attempt_local1259538348_0001_r_000000_0 is allowed to commit now
-2017-03-10 14:13:23,201 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1259538348_0001_r_000000_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1259538348_0001_r_000000
-2017-03-10 14:13:23,202 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
-2017-03-10 14:13:23,202 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1259538348_0001_r_000000_0' done.
-2017-03-10 14:13:23,202 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1259538348_0001_r_000000_0
-2017-03-10 14:13:23,202 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete.
-2017-03-10 14:13:23,936 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 100%
-2017-03-10 14:13:23,937 INFO org.apache.hadoop.mapreduce.Job: Job job_local1259538348_0001 completed successfully
-2017-03-10 14:13:23,948 INFO org.apache.hadoop.mapreduce.Job: Counters: 33
+		Bytes Written=0
+2017-03-11 04:37:40,474 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-11 04:37:41,035 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-11 04:37:41,040 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-11 04:37:41,085 WARN org.apache.hadoop.security.UserGroupInformation: PriviledgedActionException as:cloudera (auth:SIMPLE) cause:org.apache.hadoop.mapred.FileAlreadyExistsException: Output directory file:/home/cloudera/workspace/WordCount/output already exists
+2017-03-11 04:37:59,919 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-11 04:38:00,447 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-11 04:38:00,448 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-11 04:38:00,886 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
+2017-03-11 04:38:00,894 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
+2017-03-11 04:38:01,020 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
+2017-03-11 04:38:01,264 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local2110978304_0001
+2017-03-11 04:38:01,718 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
+2017-03-11 04:38:01,719 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local2110978304_0001
+2017-03-11 04:38:01,720 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
+2017-03-11 04:38:01,734 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:38:01,743 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
+2017-03-11 04:38:01,854 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
+2017-03-11 04:38:01,855 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2110978304_0001_m_000000_0
+2017-03-11 04:38:01,905 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:38:01,916 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:38:01,920 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:0+454
+2017-03-11 04:38:01,995 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 04:38:01,995 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 04:38:01,995 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 04:38:01,995 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 04:38:01,996 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 04:38:02,005 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 04:38:02,048 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 04:38:02,048 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 04:38:02,048 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-11 04:38:02,048 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 4424; bufvoid = 104857600
+2017-03-11 04:38:02,048 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 26214176(104856704); length = 221/6553600
+2017-03-11 04:38:02,057 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
+2017-03-11 04:38:02,063 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
+2017-03-11 04:38:02,066 INFO org.apache.hadoop.mapred.Task: Task:attempt_local2110978304_0001_m_000000_0 is done. And is in the process of committing
+2017-03-11 04:38:02,072 INFO org.apache.hadoop.mapred.LocalJobRunner: map
+2017-03-11 04:38:02,072 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local2110978304_0001_m_000000_0' done.
+2017-03-11 04:38:02,072 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local2110978304_0001_m_000000_0
+2017-03-11 04:38:02,072 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
+2017-03-11 04:38:02,075 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks
+2017-03-11 04:38:02,076 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2110978304_0001_r_000000_0
+2017-03-11 04:38:02,081 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:38:02,081 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:38:02,085 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@6dac133e
+2017-03-11 04:38:02,100 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:38:02,104 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local2110978304_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:38:02,123 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.deflate]
+2017-03-11 04:38:02,123 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local2110978304_0001_m_000000_0 decomp: 4538 len: 501 to MEMORY
+2017-03-11 04:38:02,128 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 4538 bytes from map-output for attempt_local2110978304_0001_m_000000_0
+2017-03-11 04:38:02,128 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 4538, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->4538
+2017-03-11 04:38:02,130 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:38:02,131 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:38:02,131 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:38:02,138 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:38:02,138 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 4520 bytes
+2017-03-11 04:38:02,139 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 4538 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:38:02,140 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 509 bytes from disk
+2017-03-11 04:38:02,140 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:38:02,140 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:38:02,141 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 4520 bytes
+2017-03-11 04:38:02,145 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:38:02,159 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.skip.on is deprecated. Instead, use mapreduce.job.skiprecords
+2017-03-11 04:38:02,169 INFO org.apache.hadoop.mapred.Task: Task:attempt_local2110978304_0001_r_000000_0 is done. And is in the process of committing
+2017-03-11 04:38:02,171 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:38:02,171 INFO org.apache.hadoop.mapred.Task: Task attempt_local2110978304_0001_r_000000_0 is allowed to commit now
+2017-03-11 04:38:02,172 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local2110978304_0001_r_000000_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local2110978304_0001_r_000000
+2017-03-11 04:38:02,172 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 04:38:02,172 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local2110978304_0001_r_000000_0' done.
+2017-03-11 04:38:02,173 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local2110978304_0001_r_000000_0
+2017-03-11 04:38:02,173 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete.
+2017-03-11 04:38:02,725 INFO org.apache.hadoop.mapreduce.Job: Job job_local2110978304_0001 running in uber mode : false
+2017-03-11 04:38:02,726 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 100%
+2017-03-11 04:38:02,728 INFO org.apache.hadoop.mapreduce.Job: Job job_local2110978304_0001 completed successfully
+2017-03-11 04:38:02,739 INFO org.apache.hadoop.mapreduce.Job: Counters: 33
 	File System Counters
-		FILE: Number of bytes read=11514350
-		FILE: Number of bytes written=1397073
+		FILE: Number of bytes read=2308
+		FILE: Number of bytes written=531767
 		FILE: Number of read operations=0
 		FILE: Number of large read operations=0
 		FILE: Number of write operations=0
 	Map-Reduce Framework
-		Map input records=124787
-		Map output records=1023494
-		Map output bytes=8925696
-		Map output materialized bytes=167092
-		Input split bytes=120
-		Combine input records=1023494
-		Combine output records=34513
-		Reduce input groups=34513
-		Reduce shuffle bytes=167092
-		Reduce input records=34513
-		Reduce output records=34513
-		Spilled Records=69026
+		Map input records=7
+		Map output records=56
+		Map output bytes=4424
+		Map output materialized bytes=501
+		Input split bytes=122
+		Combine input records=0
+		Combine output records=0
+		Reduce input groups=35
+		Reduce shuffle bytes=501
+		Reduce input records=56
+		Reduce output records=1
+		Spilled Records=112
 		Shuffled Maps =1
 		Failed Shuffles=0
 		Merged Map outputs=1
-		GC time elapsed (ms)=79
+		GC time elapsed (ms)=28
 		CPU time spent (ms)=0
 		Physical memory (bytes) snapshot=0
 		Virtual memory (bytes) snapshot=0
@@ -773,104 +2471,102 @@
 		WRONG_MAP=0
 		WRONG_REDUCE=0
 	File Input Format Counters 
-		Bytes Read=5589886
+		Bytes Read=454
 	File Output Format Counters 
-		Bytes Written=363879
-2017-03-10 14:15:07,671 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
-2017-03-10 14:15:08,143 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
-2017-03-10 14:15:08,146 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
-2017-03-10 14:15:08,597 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
-2017-03-10 14:15:08,610 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
-2017-03-10 14:15:08,649 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
-2017-03-10 14:15:08,885 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local744282859_0001
-2017-03-10 14:15:09,357 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
-2017-03-10 14:15:09,358 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local744282859_0001
-2017-03-10 14:15:09,361 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
-2017-03-10 14:15:09,374 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
-2017-03-10 14:15:09,376 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
-2017-03-10 14:15:09,515 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
-2017-03-10 14:15:09,516 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local744282859_0001_m_000000_0
-2017-03-10 14:15:09,574 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
-2017-03-10 14:15:09,584 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
-2017-03-10 14:15:09,587 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/input/pg100.txt:0+5589886
-2017-03-10 14:15:09,674 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
-2017-03-10 14:15:09,674 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
-2017-03-10 14:15:09,674 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
-2017-03-10 14:15:09,674 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
-2017-03-10 14:15:09,674 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
-2017-03-10 14:15:09,679 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
-2017-03-10 14:15:10,364 INFO org.apache.hadoop.mapreduce.Job: Job job_local744282859_0001 running in uber mode : false
-2017-03-10 14:15:10,366 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
-2017-03-10 14:15:11,981 INFO org.apache.hadoop.mapred.LocalJobRunner: 
-2017-03-10 14:15:11,982 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
-2017-03-10 14:15:11,982 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
-2017-03-10 14:15:11,982 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 8925696; bufvoid = 104857600
-2017-03-10 14:15:11,982 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 22120424(88481696); length = 4093973/6553600
-2017-03-10 14:15:13,023 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
-2017-03-10 14:15:13,726 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
-2017-03-10 14:15:13,728 INFO org.apache.hadoop.mapred.Task: Task:attempt_local744282859_0001_m_000000_0 is done. And is in the process of committing
-2017-03-10 14:15:13,739 INFO org.apache.hadoop.mapred.LocalJobRunner: map
-2017-03-10 14:15:13,739 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local744282859_0001_m_000000_0' done.
-2017-03-10 14:15:13,739 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local744282859_0001_m_000000_0
-2017-03-10 14:15:13,739 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
-2017-03-10 14:15:13,748 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks
-2017-03-10 14:15:13,749 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local744282859_0001_r_000000_0
-2017-03-10 14:15:13,753 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
-2017-03-10 14:15:13,754 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
-2017-03-10 14:15:13,756 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@534d4113
-2017-03-10 14:15:13,766 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
-2017-03-10 14:15:13,772 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local744282859_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events
-2017-03-10 14:15:13,800 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.deflate]
-2017-03-10 14:15:13,800 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local744282859_0001_m_000000_0 decomp: 491464 len: 167092 to MEMORY
-2017-03-10 14:15:13,807 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 491464 bytes from map-output for attempt_local744282859_0001_m_000000_0
-2017-03-10 14:15:13,807 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 491464, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->491464
-2017-03-10 14:15:13,809 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
-2017-03-10 14:15:13,813 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
-2017-03-10 14:15:13,814 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
-2017-03-10 14:15:13,819 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
-2017-03-10 14:15:13,819 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 491459 bytes
-2017-03-10 14:15:14,078 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 491464 bytes to disk to satisfy reduce memory limit
-2017-03-10 14:15:14,079 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 167100 bytes from disk
-2017-03-10 14:15:14,079 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
-2017-03-10 14:15:14,079 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
-2017-03-10 14:15:14,080 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 491459 bytes
-2017-03-10 14:15:14,080 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
-2017-03-10 14:15:14,085 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.skip.on is deprecated. Instead, use mapreduce.job.skiprecords
-2017-03-10 14:15:14,378 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 0%
-2017-03-10 14:15:14,760 INFO org.apache.hadoop.mapred.Task: Task:attempt_local744282859_0001_r_000000_0 is done. And is in the process of committing
-2017-03-10 14:15:14,761 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
-2017-03-10 14:15:14,761 INFO org.apache.hadoop.mapred.Task: Task attempt_local744282859_0001_r_000000_0 is allowed to commit now
-2017-03-10 14:15:14,762 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local744282859_0001_r_000000_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local744282859_0001_r_000000
-2017-03-10 14:15:14,762 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
-2017-03-10 14:15:14,762 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local744282859_0001_r_000000_0' done.
-2017-03-10 14:15:14,762 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local744282859_0001_r_000000_0
-2017-03-10 14:15:14,762 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete.
-2017-03-10 14:15:15,379 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 100%
-2017-03-10 14:15:15,380 INFO org.apache.hadoop.mapreduce.Job: Job job_local744282859_0001 completed successfully
-2017-03-10 14:15:15,390 INFO org.apache.hadoop.mapreduce.Job: Counters: 33
+		Bytes Written=20
+2017-03-11 04:42:06,971 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-11 04:42:08,074 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-11 04:42:08,087 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-11 04:42:09,013 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
+2017-03-11 04:42:09,048 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
+2017-03-11 04:42:09,250 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
+2017-03-11 04:42:09,647 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local1746989117_0001
+2017-03-11 04:42:10,180 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
+2017-03-11 04:42:10,191 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
+2017-03-11 04:42:10,192 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local1746989117_0001
+2017-03-11 04:42:10,213 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:42:10,222 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
+2017-03-11 04:42:10,451 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
+2017-03-11 04:42:10,452 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1746989117_0001_m_000000_0
+2017-03-11 04:42:10,541 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:42:10,562 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:42:10,572 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:0+454
+2017-03-11 04:42:10,712 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 04:42:10,712 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 04:42:10,712 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 04:42:10,713 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 04:42:10,713 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 04:42:10,718 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 04:42:10,770 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 04:42:10,770 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 04:42:10,770 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-11 04:42:10,771 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 4424; bufvoid = 104857600
+2017-03-11 04:42:10,771 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 26214176(104856704); length = 221/6553600
+2017-03-11 04:42:10,789 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
+2017-03-11 04:42:10,804 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
+2017-03-11 04:42:10,808 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1746989117_0001_m_000000_0 is done. And is in the process of committing
+2017-03-11 04:42:10,826 INFO org.apache.hadoop.mapred.LocalJobRunner: map
+2017-03-11 04:42:10,832 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1746989117_0001_m_000000_0' done.
+2017-03-11 04:42:10,836 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1746989117_0001_m_000000_0
+2017-03-11 04:42:10,838 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
+2017-03-11 04:42:10,843 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks
+2017-03-11 04:42:10,844 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1746989117_0001_r_000000_0
+2017-03-11 04:42:10,855 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:42:10,855 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:42:10,866 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@4d83f728
+2017-03-11 04:42:10,891 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:42:10,899 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1746989117_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:42:10,943 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.deflate]
+2017-03-11 04:42:10,943 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1746989117_0001_m_000000_0 decomp: 4538 len: 501 to MEMORY
+2017-03-11 04:42:10,952 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 4538 bytes from map-output for attempt_local1746989117_0001_m_000000_0
+2017-03-11 04:42:10,952 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 4538, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->4538
+2017-03-11 04:42:10,954 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:42:10,955 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:42:10,956 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:42:10,967 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:42:10,967 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 4520 bytes
+2017-03-11 04:42:10,970 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 4538 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:42:10,970 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 509 bytes from disk
+2017-03-11 04:42:10,971 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:42:10,971 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:42:10,972 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 4520 bytes
+2017-03-11 04:42:10,979 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:42:11,006 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.skip.on is deprecated. Instead, use mapreduce.job.skiprecords
+2017-03-11 04:42:11,026 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1746989117_0001_r_000000_0 is done. And is in the process of committing
+2017-03-11 04:42:11,029 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:42:11,030 INFO org.apache.hadoop.mapred.Task: Task attempt_local1746989117_0001_r_000000_0 is allowed to commit now
+2017-03-11 04:42:11,032 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1746989117_0001_r_000000_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1746989117_0001_r_000000
+2017-03-11 04:42:11,032 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 04:42:11,033 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1746989117_0001_r_000000_0' done.
+2017-03-11 04:42:11,033 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1746989117_0001_r_000000_0
+2017-03-11 04:42:11,033 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete.
+2017-03-11 04:42:11,212 INFO org.apache.hadoop.mapreduce.Job: Job job_local1746989117_0001 running in uber mode : false
+2017-03-11 04:42:11,213 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 100%
+2017-03-11 04:42:11,215 INFO org.apache.hadoop.mapreduce.Job: Job job_local1746989117_0001 completed successfully
+2017-03-11 04:42:11,233 INFO org.apache.hadoop.mapreduce.Job: Counters: 34
 	File System Counters
-		FILE: Number of bytes read=11514350
-		FILE: Number of bytes written=1394261
+		FILE: Number of bytes read=2308
+		FILE: Number of bytes written=531767
 		FILE: Number of read operations=0
 		FILE: Number of large read operations=0
 		FILE: Number of write operations=0
 	Map-Reduce Framework
-		Map input records=124787
-		Map output records=1023494
-		Map output bytes=8925696
-		Map output materialized bytes=167092
-		Input split bytes=120
-		Combine input records=1023494
-		Combine output records=34513
-		Reduce input groups=34513
-		Reduce shuffle bytes=167092
-		Reduce input records=34513
-		Reduce output records=34513
-		Spilled Records=69026
+		Map input records=7
+		Map output records=56
+		Map output bytes=4424
+		Map output materialized bytes=501
+		Input split bytes=122
+		Combine input records=0
+		Combine output records=0
+		Reduce input groups=35
+		Reduce shuffle bytes=501
+		Reduce input records=56
+		Reduce output records=1
+		Spilled Records=112
 		Shuffled Maps =1
 		Failed Shuffles=0
 		Merged Map outputs=1
-		GC time elapsed (ms)=90
+		GC time elapsed (ms)=49
 		CPU time spent (ms)=0
 		Physical memory (bytes) snapshot=0
 		Virtual memory (bytes) snapshot=0
@@ -883,269 +2579,43 @@
 		WRONG_MAP=0
 		WRONG_REDUCE=0
 	File Input Format Counters 
-		Bytes Read=5589886
+		Bytes Read=454
 	File Output Format Counters 
-		Bytes Written=363879
-2017-03-10 14:16:55,128 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
-2017-03-10 14:16:55,605 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
-2017-03-10 14:16:55,614 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
-2017-03-10 14:16:55,649 WARN org.apache.hadoop.security.UserGroupInformation: PriviledgedActionException as:cloudera (auth:SIMPLE) cause:org.apache.hadoop.mapred.FileAlreadyExistsException: Output directory file:/home/cloudera/workspace/WordCount/output already exists
-2017-03-10 14:17:14,700 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
-2017-03-10 14:17:15,157 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
-2017-03-10 14:17:15,169 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
-2017-03-10 14:17:15,624 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
-2017-03-10 14:17:15,635 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
-2017-03-10 14:17:15,668 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
-2017-03-10 14:17:15,901 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local1420822781_0001
-2017-03-10 14:17:16,319 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
-2017-03-10 14:17:16,321 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local1420822781_0001
-2017-03-10 14:17:16,323 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
-2017-03-10 14:17:16,335 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
-2017-03-10 14:17:16,340 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
-2017-03-10 14:17:16,441 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
-2017-03-10 14:17:16,442 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1420822781_0001_m_000000_0
-2017-03-10 14:17:16,518 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
-2017-03-10 14:17:16,531 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
-2017-03-10 14:17:16,534 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/input/pg100.txt:0+5589886
-2017-03-10 14:17:16,616 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
-2017-03-10 14:17:16,617 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
-2017-03-10 14:17:16,617 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
-2017-03-10 14:17:16,617 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
-2017-03-10 14:17:16,617 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
-2017-03-10 14:17:16,621 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
-2017-03-10 14:17:16,682 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
-2017-03-10 14:17:16,697 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
-2017-03-10 14:17:16,704 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
-2017-03-10 14:17:16,705 WARN org.apache.hadoop.mapred.LocalJobRunner: job_local1420822781_0001
-java.lang.Exception: java.lang.ArrayIndexOutOfBoundsException: 1
-	at org.apache.hadoop.mapred.LocalJobRunner$Job.runTasks(LocalJobRunner.java:462)
-	at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:522)
-Caused by: java.lang.ArrayIndexOutOfBoundsException: 1
-	at similarity.WordSort$Map.loadWordFreq(WordSort.java:87)
-	at similarity.WordSort$Map.setup(WordSort.java:118)
-	at org.apache.hadoop.mapreduce.Mapper.run(Mapper.java:142)
-	at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:787)
-	at org.apache.hadoop.mapred.MapTask.run(MapTask.java:341)
-	at org.apache.hadoop.mapred.LocalJobRunner$Job$MapTaskRunnable.run(LocalJobRunner.java:243)
-	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471)
-	at java.util.concurrent.FutureTask.run(FutureTask.java:262)
-	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
-	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
-	at java.lang.Thread.run(Thread.java:745)
-2017-03-10 14:17:17,328 INFO org.apache.hadoop.mapreduce.Job: Job job_local1420822781_0001 running in uber mode : false
-2017-03-10 14:17:17,329 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
-2017-03-10 14:17:17,331 INFO org.apache.hadoop.mapreduce.Job: Job job_local1420822781_0001 failed with state FAILED due to: NA
-2017-03-10 14:17:17,336 INFO org.apache.hadoop.mapreduce.Job: Counters: 0
-2017-03-10 14:26:12,465 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
-2017-03-10 14:26:12,973 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
-2017-03-10 14:26:12,986 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
-2017-03-10 14:26:13,468 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
-2017-03-10 14:26:13,490 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
-2017-03-10 14:26:13,599 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
-2017-03-10 14:26:13,979 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local1562971559_0001
-2017-03-10 14:26:14,383 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
-2017-03-10 14:26:14,385 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local1562971559_0001
-2017-03-10 14:26:14,384 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
-2017-03-10 14:26:14,391 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
-2017-03-10 14:26:14,399 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
-2017-03-10 14:26:14,512 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
-2017-03-10 14:26:14,513 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1562971559_0001_m_000000_0
-2017-03-10 14:26:14,576 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
-2017-03-10 14:26:14,613 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
-2017-03-10 14:26:14,617 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/input/pg100.txt:0+5589886
-2017-03-10 14:26:14,762 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
-2017-03-10 14:26:14,762 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
-2017-03-10 14:26:14,763 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
-2017-03-10 14:26:14,763 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
-2017-03-10 14:26:14,763 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
-2017-03-10 14:26:14,766 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
-2017-03-10 14:26:15,398 INFO org.apache.hadoop.mapreduce.Job: Job job_local1562971559_0001 running in uber mode : false
-2017-03-10 14:26:15,398 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
-2017-03-10 14:26:15,473 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
-2017-03-10 14:26:15,481 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
-2017-03-10 14:26:15,488 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
-2017-03-10 14:26:15,490 WARN org.apache.hadoop.mapred.LocalJobRunner: job_local1562971559_0001
-java.lang.Exception: java.lang.NullPointerException
-	at org.apache.hadoop.mapred.LocalJobRunner$Job.runTasks(LocalJobRunner.java:462)
-	at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:522)
-Caused by: java.lang.NullPointerException
-	at similarity.WordSort$Map$1.compare(WordSort.java:135)
-	at similarity.WordSort$Map$1.compare(WordSort.java:1)
-	at java.util.TimSort.countRunAndMakeAscending(TimSort.java:324)
-	at java.util.TimSort.sort(TimSort.java:189)
-	at java.util.TimSort.sort(TimSort.java:173)
-	at java.util.Arrays.sort(Arrays.java:659)
-	at java.util.Collections.sort(Collections.java:217)
-	at similarity.WordSort$Map.map(WordSort.java:131)
-	at similarity.WordSort$Map.map(WordSort.java:1)
-	at org.apache.hadoop.mapreduce.Mapper.run(Mapper.java:145)
-	at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:787)
-	at org.apache.hadoop.mapred.MapTask.run(MapTask.java:341)
-	at org.apache.hadoop.mapred.LocalJobRunner$Job$MapTaskRunnable.run(LocalJobRunner.java:243)
-	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471)
-	at java.util.concurrent.FutureTask.run(FutureTask.java:262)
-	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
-	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
-	at java.lang.Thread.run(Thread.java:745)
-2017-03-10 14:26:16,400 INFO org.apache.hadoop.mapreduce.Job: Job job_local1562971559_0001 failed with state FAILED due to: NA
-2017-03-10 14:26:16,402 INFO org.apache.hadoop.mapreduce.Job: Counters: 0
-2017-03-10 14:29:37,807 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
-2017-03-10 14:29:38,356 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
-2017-03-10 14:29:38,361 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
-2017-03-10 14:29:38,865 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
-2017-03-10 14:29:38,874 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
-2017-03-10 14:29:38,909 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
-2017-03-10 14:29:39,202 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local705841975_0001
-2017-03-10 14:29:39,678 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
-2017-03-10 14:29:39,680 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local705841975_0001
-2017-03-10 14:29:39,685 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
-2017-03-10 14:29:39,703 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
-2017-03-10 14:29:39,705 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
-2017-03-10 14:29:39,828 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
-2017-03-10 14:29:39,829 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local705841975_0001_m_000000_0
-2017-03-10 14:29:39,879 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
-2017-03-10 14:29:39,891 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
-2017-03-10 14:29:39,896 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/input/pg100.txt:0+5589886
-2017-03-10 14:29:39,983 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
-2017-03-10 14:29:39,984 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
-2017-03-10 14:29:39,984 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
-2017-03-10 14:29:39,984 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
-2017-03-10 14:29:39,984 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
-2017-03-10 14:29:39,988 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
-2017-03-10 14:29:40,687 INFO org.apache.hadoop.mapreduce.Job: Job job_local705841975_0001 running in uber mode : false
-2017-03-10 14:29:40,690 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
-2017-03-10 14:29:40,701 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
-2017-03-10 14:29:40,708 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
-2017-03-10 14:29:40,717 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
-2017-03-10 14:29:40,718 WARN org.apache.hadoop.mapred.LocalJobRunner: job_local705841975_0001
-java.lang.Exception: java.lang.NullPointerException
-	at org.apache.hadoop.mapred.LocalJobRunner$Job.runTasks(LocalJobRunner.java:462)
-	at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:522)
-Caused by: java.lang.NullPointerException
-	at similarity.WordSort$Map$1.compare(WordSort.java:138)
-	at similarity.WordSort$Map$1.compare(WordSort.java:1)
-	at java.util.TimSort.countRunAndMakeAscending(TimSort.java:324)
-	at java.util.TimSort.sort(TimSort.java:189)
-	at java.util.TimSort.sort(TimSort.java:173)
-	at java.util.Arrays.sort(Arrays.java:659)
-	at java.util.Collections.sort(Collections.java:217)
-	at similarity.WordSort$Map.map(WordSort.java:134)
-	at similarity.WordSort$Map.map(WordSort.java:1)
-	at org.apache.hadoop.mapreduce.Mapper.run(Mapper.java:145)
-	at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:787)
-	at org.apache.hadoop.mapred.MapTask.run(MapTask.java:341)
-	at org.apache.hadoop.mapred.LocalJobRunner$Job$MapTaskRunnable.run(LocalJobRunner.java:243)
-	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471)
-	at java.util.concurrent.FutureTask.run(FutureTask.java:262)
-	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
-	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
-	at java.lang.Thread.run(Thread.java:745)
-2017-03-10 14:29:41,693 INFO org.apache.hadoop.mapreduce.Job: Job job_local705841975_0001 failed with state FAILED due to: NA
-2017-03-10 14:29:41,695 INFO org.apache.hadoop.mapreduce.Job: Counters: 0
-2017-03-10 14:34:26,674 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
-2017-03-10 14:34:27,174 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
-2017-03-10 14:34:27,179 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
-2017-03-10 14:34:27,223 WARN org.apache.hadoop.security.UserGroupInformation: PriviledgedActionException as:cloudera (auth:SIMPLE) cause:org.apache.hadoop.mapred.FileAlreadyExistsException: Output directory file:/home/cloudera/workspace/WordCount already exists
-2017-03-10 14:34:55,125 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
-2017-03-10 14:34:55,598 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
-2017-03-10 14:34:55,612 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
-2017-03-10 14:34:56,114 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
-2017-03-10 14:34:56,121 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
-2017-03-10 14:34:56,259 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
-2017-03-10 14:34:56,554 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local1013376941_0001
-2017-03-10 14:34:56,980 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
-2017-03-10 14:34:56,982 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
-2017-03-10 14:34:56,983 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local1013376941_0001
-2017-03-10 14:34:56,990 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
-2017-03-10 14:34:56,995 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
-2017-03-10 14:34:57,121 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
-2017-03-10 14:34:57,124 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1013376941_0001_m_000000_0
-2017-03-10 14:34:57,181 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
-2017-03-10 14:34:57,198 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
-2017-03-10 14:34:57,202 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/input/pg100.txt:0+5589886
-2017-03-10 14:34:57,295 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
-2017-03-10 14:34:57,298 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
-2017-03-10 14:34:57,298 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
-2017-03-10 14:34:57,298 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
-2017-03-10 14:34:57,298 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
-2017-03-10 14:34:57,300 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
-2017-03-10 14:34:57,967 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
-2017-03-10 14:34:57,974 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
-2017-03-10 14:34:57,988 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
-2017-03-10 14:34:57,990 INFO org.apache.hadoop.mapreduce.Job: Job job_local1013376941_0001 running in uber mode : false
-2017-03-10 14:34:57,991 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
-2017-03-10 14:34:57,993 WARN org.apache.hadoop.mapred.LocalJobRunner: job_local1013376941_0001
-java.lang.Exception: java.lang.NullPointerException
-	at org.apache.hadoop.mapred.LocalJobRunner$Job.runTasks(LocalJobRunner.java:462)
-	at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:522)
-Caused by: java.lang.NullPointerException
-	at similarity.WordSort$Map$1.compare(WordSort.java:146)
-	at similarity.WordSort$Map$1.compare(WordSort.java:1)
-	at java.util.TimSort.countRunAndMakeAscending(TimSort.java:324)
-	at java.util.TimSort.sort(TimSort.java:189)
-	at java.util.TimSort.sort(TimSort.java:173)
-	at java.util.Arrays.sort(Arrays.java:659)
-	at java.util.Collections.sort(Collections.java:217)
-	at similarity.WordSort$Map.map(WordSort.java:142)
-	at similarity.WordSort$Map.map(WordSort.java:1)
-	at org.apache.hadoop.mapreduce.Mapper.run(Mapper.java:145)
-	at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:787)
-	at org.apache.hadoop.mapred.MapTask.run(MapTask.java:341)
-	at org.apache.hadoop.mapred.LocalJobRunner$Job$MapTaskRunnable.run(LocalJobRunner.java:243)
-	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471)
-	at java.util.concurrent.FutureTask.run(FutureTask.java:262)
-	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
-	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
-	at java.lang.Thread.run(Thread.java:745)
-2017-03-10 14:34:57,995 INFO org.apache.hadoop.mapreduce.Job: Job job_local1013376941_0001 failed with state FAILED due to: NA
-2017-03-10 14:34:57,998 INFO org.apache.hadoop.mapreduce.Job: Counters: 0
-2017-03-10 14:38:51,972 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
-2017-03-10 14:38:52,515 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
-2017-03-10 14:38:52,527 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
-2017-03-10 14:38:53,098 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
-2017-03-10 14:38:53,128 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
-2017-03-10 14:38:53,247 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
-2017-03-10 14:38:53,578 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local1410438889_0001
-2017-03-10 14:38:54,016 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
-2017-03-10 14:38:54,017 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local1410438889_0001
-2017-03-10 14:38:54,024 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
-2017-03-10 14:38:54,064 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
-2017-03-10 14:38:54,065 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
-2017-03-10 14:38:54,219 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
-2017-03-10 14:38:54,220 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1410438889_0001_m_000000_0
-2017-03-10 14:38:54,279 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
-2017-03-10 14:38:54,293 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
-2017-03-10 14:38:54,296 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/input/pg100.txt:0+5589886
-2017-03-10 14:38:54,396 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
-2017-03-10 14:38:54,396 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
-2017-03-10 14:38:54,396 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
-2017-03-10 14:38:54,397 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
-2017-03-10 14:38:54,397 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
-2017-03-10 14:38:54,400 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
-2017-03-10 14:38:55,024 INFO org.apache.hadoop.mapreduce.Job: Job job_local1410438889_0001 running in uber mode : false
-2017-03-10 14:38:55,027 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
-2017-03-10 14:38:55,119 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
-2017-03-10 14:38:55,119 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
-2017-03-10 14:38:55,119 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 325; bufvoid = 104857600
-2017-03-10 14:38:55,119 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 26214376(104857504); length = 21/6553600
-2017-03-10 14:38:55,128 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
-2017-03-10 14:38:55,137 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
-2017-03-10 14:38:55,141 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
-2017-03-10 14:38:55,141 WARN org.apache.hadoop.mapred.LocalJobRunner: job_local1410438889_0001
-java.lang.Exception: java.lang.NullPointerException
+		Bytes Written=20
+	similarity.NaiveApproach$CompCounter
+		NUM=21
+2017-03-11 04:59:27,630 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-11 04:59:28,293 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-11 04:59:28,317 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-11 04:59:28,995 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
+2017-03-11 04:59:29,010 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
+2017-03-11 04:59:29,098 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
+2017-03-11 04:59:29,457 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local1838228410_0001
+2017-03-11 04:59:30,030 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
+2017-03-11 04:59:30,031 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local1838228410_0001
+2017-03-11 04:59:30,045 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
+2017-03-11 04:59:30,062 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:59:30,068 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
+2017-03-11 04:59:30,263 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
+2017-03-11 04:59:30,263 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1838228410_0001_m_000000_0
+2017-03-11 04:59:30,337 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:59:30,378 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:59:30,387 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:0+454
+2017-03-11 04:59:30,595 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 04:59:30,595 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 04:59:30,595 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 04:59:30,597 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 04:59:30,605 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 04:59:30,607 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 04:59:30,649 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 04:59:30,669 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
+2017-03-11 04:59:30,721 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
+2017-03-11 04:59:30,722 WARN org.apache.hadoop.mapred.LocalJobRunner: job_local1838228410_0001
+java.lang.Exception: java.lang.ClassCastException: org.apache.hadoop.io.Text cannot be cast to org.apache.hadoop.io.LongWritable
 	at org.apache.hadoop.mapred.LocalJobRunner$Job.runTasks(LocalJobRunner.java:462)
 	at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:522)
-Caused by: java.lang.NullPointerException
-	at java.lang.Integer.compareTo(Integer.java:1003)
-	at similarity.WordSort$Map$1.compare(WordSort.java:144)
-	at similarity.WordSort$Map$1.compare(WordSort.java:1)
-	at java.util.TimSort.countRunAndMakeAscending(TimSort.java:324)
-	at java.util.TimSort.sort(TimSort.java:189)
-	at java.util.TimSort.sort(TimSort.java:173)
-	at java.util.Arrays.sort(Arrays.java:659)
-	at java.util.Collections.sort(Collections.java:217)
-	at similarity.WordSort$Map.map(WordSort.java:140)
-	at similarity.WordSort$Map.map(WordSort.java:1)
+Caused by: java.lang.ClassCastException: org.apache.hadoop.io.Text cannot be cast to org.apache.hadoop.io.LongWritable
+	at similarity.NaiveApproach$Map.map(NaiveApproach.java:1)
 	at org.apache.hadoop.mapreduce.Mapper.run(Mapper.java:145)
 	at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:787)
 	at org.apache.hadoop.mapred.MapTask.run(MapTask.java:341)
@@ -1155,55 +2625,42 @@ Caused by: java.lang.NullPointerException
 	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
 	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
 	at java.lang.Thread.run(Thread.java:745)
-2017-03-10 14:38:56,030 INFO org.apache.hadoop.mapreduce.Job: Job job_local1410438889_0001 failed with state FAILED due to: NA
-2017-03-10 14:38:56,035 INFO org.apache.hadoop.mapreduce.Job: Counters: 0
-2017-03-10 14:40:16,992 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
-2017-03-10 14:40:17,522 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
-2017-03-10 14:40:17,536 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
-2017-03-10 14:40:18,047 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
-2017-03-10 14:40:18,055 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
-2017-03-10 14:40:18,089 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
-2017-03-10 14:40:18,383 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local483843492_0001
-2017-03-10 14:40:18,855 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
-2017-03-10 14:40:18,856 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local483843492_0001
-2017-03-10 14:40:18,860 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
-2017-03-10 14:40:18,887 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
-2017-03-10 14:40:18,889 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
-2017-03-10 14:40:19,021 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
-2017-03-10 14:40:19,023 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local483843492_0001_m_000000_0
-2017-03-10 14:40:19,078 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
-2017-03-10 14:40:19,106 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
-2017-03-10 14:40:19,109 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/input/pg100.txt:0+5589886
-2017-03-10 14:40:19,243 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
-2017-03-10 14:40:19,243 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
-2017-03-10 14:40:19,243 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
-2017-03-10 14:40:19,244 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
-2017-03-10 14:40:19,244 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
-2017-03-10 14:40:19,249 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
-2017-03-10 14:40:19,852 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
-2017-03-10 14:40:19,852 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
-2017-03-10 14:40:19,852 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 325; bufvoid = 104857600
-2017-03-10 14:40:19,852 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 26214376(104857504); length = 21/6553600
-2017-03-10 14:40:19,858 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
-2017-03-10 14:40:19,860 INFO org.apache.hadoop.mapreduce.Job: Job job_local483843492_0001 running in uber mode : false
-2017-03-10 14:40:19,861 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
-2017-03-10 14:40:19,866 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
-2017-03-10 14:40:19,869 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
-2017-03-10 14:40:19,870 WARN org.apache.hadoop.mapred.LocalJobRunner: job_local483843492_0001
-java.lang.Exception: java.lang.NullPointerException
+2017-03-11 04:59:31,033 INFO org.apache.hadoop.mapreduce.Job: Job job_local1838228410_0001 running in uber mode : false
+2017-03-11 04:59:31,034 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
+2017-03-11 04:59:31,036 INFO org.apache.hadoop.mapreduce.Job: Job job_local1838228410_0001 failed with state FAILED due to: NA
+2017-03-11 04:59:31,049 INFO org.apache.hadoop.mapreduce.Job: Counters: 0
+2017-03-11 05:00:57,011 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-11 05:00:57,702 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-11 05:00:57,707 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-11 05:00:58,350 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
+2017-03-11 05:00:58,358 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
+2017-03-11 05:00:58,452 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
+2017-03-11 05:00:58,816 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local1553338044_0001
+2017-03-11 05:00:59,400 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
+2017-03-11 05:00:59,401 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local1553338044_0001
+2017-03-11 05:00:59,410 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
+2017-03-11 05:00:59,417 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:00:59,445 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
+2017-03-11 05:00:59,609 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
+2017-03-11 05:00:59,609 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1553338044_0001_m_000000_0
+2017-03-11 05:00:59,680 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:00:59,731 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 05:00:59,738 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:0+454
+2017-03-11 05:00:59,903 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 05:00:59,904 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 05:00:59,904 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 05:00:59,904 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 05:00:59,904 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 05:00:59,914 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 05:00:59,968 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 05:00:59,983 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
+2017-03-11 05:01:00,044 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
+2017-03-11 05:01:00,045 WARN org.apache.hadoop.mapred.LocalJobRunner: job_local1553338044_0001
+java.lang.Exception: java.lang.ClassCastException: org.apache.hadoop.io.Text cannot be cast to org.apache.hadoop.io.LongWritable
 	at org.apache.hadoop.mapred.LocalJobRunner$Job.runTasks(LocalJobRunner.java:462)
 	at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:522)
-Caused by: java.lang.NullPointerException
-	at java.lang.Integer.compareTo(Integer.java:1003)
-	at similarity.WordSort$Map$1.compare(WordSort.java:144)
-	at similarity.WordSort$Map$1.compare(WordSort.java:1)
-	at java.util.TimSort.countRunAndMakeAscending(TimSort.java:324)
-	at java.util.TimSort.sort(TimSort.java:189)
-	at java.util.TimSort.sort(TimSort.java:173)
-	at java.util.Arrays.sort(Arrays.java:659)
-	at java.util.Collections.sort(Collections.java:217)
-	at similarity.WordSort$Map.map(WordSort.java:140)
-	at similarity.WordSort$Map.map(WordSort.java:1)
+Caused by: java.lang.ClassCastException: org.apache.hadoop.io.Text cannot be cast to org.apache.hadoop.io.LongWritable
+	at similarity.NaiveApproach$Map.map(NaiveApproach.java:1)
 	at org.apache.hadoop.mapreduce.Mapper.run(Mapper.java:145)
 	at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:787)
 	at org.apache.hadoop.mapred.MapTask.run(MapTask.java:341)
@@ -1213,107 +2670,1438 @@ Caused by: java.lang.NullPointerException
 	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
 	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
 	at java.lang.Thread.run(Thread.java:745)
-2017-03-10 14:40:20,864 INFO org.apache.hadoop.mapreduce.Job: Job job_local483843492_0001 failed with state FAILED due to: NA
-2017-03-10 14:40:20,871 INFO org.apache.hadoop.mapreduce.Job: Counters: 0
-2017-03-10 14:41:43,006 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
-2017-03-10 14:41:43,480 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
-2017-03-10 14:41:43,485 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
-2017-03-10 14:41:44,008 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
-2017-03-10 14:41:44,042 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
-2017-03-10 14:41:44,079 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
-2017-03-10 14:41:44,328 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local1726509137_0001
-2017-03-10 14:41:44,799 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
-2017-03-10 14:41:44,800 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local1726509137_0001
-2017-03-10 14:41:44,806 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
-2017-03-10 14:41:44,835 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
-2017-03-10 14:41:44,838 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
-2017-03-10 14:41:44,977 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
-2017-03-10 14:41:44,978 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1726509137_0001_m_000000_0
-2017-03-10 14:41:45,030 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
-2017-03-10 14:41:45,051 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
-2017-03-10 14:41:45,058 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/input/pg100.txt:0+5589886
-2017-03-10 14:41:45,138 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
-2017-03-10 14:41:45,142 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
-2017-03-10 14:41:45,143 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
-2017-03-10 14:41:45,143 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
-2017-03-10 14:41:45,143 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
-2017-03-10 14:41:45,149 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
-2017-03-10 14:41:45,805 INFO org.apache.hadoop.mapreduce.Job: Job job_local1726509137_0001 running in uber mode : false
-2017-03-10 14:41:45,806 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
-2017-03-10 14:41:48,519 INFO org.apache.hadoop.mapred.LocalJobRunner: 
-2017-03-10 14:41:48,519 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
-2017-03-10 14:41:48,519 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
-2017-03-10 14:41:48,519 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 4776007; bufvoid = 104857600
-2017-03-10 14:41:48,519 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 25753980(103015920); length = 460417/6553600
-2017-03-10 14:41:48,780 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
-2017-03-10 14:41:49,395 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
-2017-03-10 14:41:49,398 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1726509137_0001_m_000000_0 is done. And is in the process of committing
-2017-03-10 14:41:49,404 INFO org.apache.hadoop.mapred.LocalJobRunner: map
-2017-03-10 14:41:49,408 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1726509137_0001_m_000000_0' done.
-2017-03-10 14:41:49,409 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1726509137_0001_m_000000_0
-2017-03-10 14:41:49,409 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
-2017-03-10 14:41:49,411 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks
-2017-03-10 14:41:49,411 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1726509137_0001_r_000000_0
-2017-03-10 14:41:49,416 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
-2017-03-10 14:41:49,417 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
-2017-03-10 14:41:49,419 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@70d586bc
-2017-03-10 14:41:49,429 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
-2017-03-10 14:41:49,435 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1726509137_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events
-2017-03-10 14:41:49,461 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.deflate]
-2017-03-10 14:41:49,461 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1726509137_0001_m_000000_0 decomp: 5006219 len: 2376884 to MEMORY
-2017-03-10 14:41:49,517 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 5006219 bytes from map-output for attempt_local1726509137_0001_m_000000_0
-2017-03-10 14:41:49,521 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 5006219, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->5006219
-2017-03-10 14:41:49,523 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
-2017-03-10 14:41:49,524 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
-2017-03-10 14:41:49,524 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
-2017-03-10 14:41:49,529 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
-2017-03-10 14:41:49,529 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 5006209 bytes
-2017-03-10 14:41:49,818 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 0%
-2017-03-10 14:41:50,226 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 5006219 bytes to disk to satisfy reduce memory limit
-2017-03-10 14:41:50,227 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 2376892 bytes from disk
-2017-03-10 14:41:50,227 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
-2017-03-10 14:41:50,228 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
-2017-03-10 14:41:50,228 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 5006209 bytes
-2017-03-10 14:41:50,230 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
-2017-03-10 14:41:50,235 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.skip.on is deprecated. Instead, use mapreduce.job.skiprecords
-2017-03-10 14:41:50,888 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1726509137_0001_r_000000_0 is done. And is in the process of committing
-2017-03-10 14:41:50,898 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
-2017-03-10 14:41:50,899 INFO org.apache.hadoop.mapred.Task: Task attempt_local1726509137_0001_r_000000_0 is allowed to commit now
-2017-03-10 14:41:50,900 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1726509137_0001_r_000000_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1726509137_0001_r_000000
-2017-03-10 14:41:50,901 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
-2017-03-10 14:41:50,901 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1726509137_0001_r_000000_0' done.
-2017-03-10 14:41:50,901 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1726509137_0001_r_000000_0
-2017-03-10 14:41:50,901 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete.
-2017-03-10 14:41:51,822 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 100%
-2017-03-10 14:41:51,822 INFO org.apache.hadoop.mapreduce.Job: Job job_local1726509137_0001 completed successfully
-2017-03-10 14:41:51,834 INFO org.apache.hadoop.mapreduce.Job: Counters: 33
+2017-03-11 05:01:00,417 INFO org.apache.hadoop.mapreduce.Job: Job job_local1553338044_0001 running in uber mode : false
+2017-03-11 05:01:00,418 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
+2017-03-11 05:01:00,420 INFO org.apache.hadoop.mapreduce.Job: Job job_local1553338044_0001 failed with state FAILED due to: NA
+2017-03-11 05:01:00,435 INFO org.apache.hadoop.mapreduce.Job: Counters: 0
+2017-03-11 05:04:44,882 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-11 05:04:45,593 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-11 05:04:45,593 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-11 05:04:46,184 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
+2017-03-11 05:04:46,199 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
+2017-03-11 05:04:46,277 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
+2017-03-11 05:04:46,636 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local1689988440_0001
+2017-03-11 05:04:47,203 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
+2017-03-11 05:04:47,204 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local1689988440_0001
+2017-03-11 05:04:47,207 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
+2017-03-11 05:04:47,224 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:04:47,237 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
+2017-03-11 05:04:47,394 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
+2017-03-11 05:04:47,396 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1689988440_0001_m_000000_0
+2017-03-11 05:04:47,476 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:04:47,511 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 05:04:47,515 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:0+454
+2017-03-11 05:04:47,713 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 05:04:47,713 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 05:04:47,713 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 05:04:47,713 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 05:04:47,713 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 05:04:47,718 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 05:04:48,208 INFO org.apache.hadoop.mapreduce.Job: Job job_local1689988440_0001 running in uber mode : false
+2017-03-11 05:04:48,209 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
+2017-03-11 05:04:48,259 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 05:04:48,266 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 05:04:48,266 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-11 05:04:48,266 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 69006658; bufvoid = 104857600
+2017-03-11 05:04:48,266 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 22720392(90881568); length = 3494005/6553600
+2017-03-11 05:04:53,498 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:0+454 > sort
+2017-03-11 05:04:54,259 INFO org.apache.hadoop.mapreduce.Job:  map 67% reduce 0%
+2017-03-11 05:04:54,793 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
+2017-03-11 05:04:56,504 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:0+454 > sort
+2017-03-11 05:04:58,795 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
+2017-03-11 05:04:58,801 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1689988440_0001_m_000000_0 is done. And is in the process of committing
+2017-03-11 05:04:58,806 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:0+454
+2017-03-11 05:04:58,806 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1689988440_0001_m_000000_0' done.
+2017-03-11 05:04:58,806 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1689988440_0001_m_000000_0
+2017-03-11 05:04:58,807 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
+2017-03-11 05:04:58,882 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks
+2017-03-11 05:04:58,882 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1689988440_0001_r_000000_0
+2017-03-11 05:04:58,904 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:04:58,904 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 05:04:58,908 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@86b66eb
+2017-03-11 05:04:58,948 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 05:04:58,959 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1689988440_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 05:04:59,027 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.deflate]
+2017-03-11 05:04:59,028 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1689988440_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 05:04:59,034 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1689988440_0001_m_000000_0
+2017-03-11 05:04:59,038 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 05:04:59,044 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 05:04:59,045 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:04:59,045 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 05:04:59,051 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:04:59,057 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:04:59,057 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 05:04:59,058 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 05:04:59,058 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 05:04:59,059 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:04:59,060 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:04:59,066 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:04:59,082 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.skip.on is deprecated. Instead, use mapreduce.job.skiprecords
+2017-03-11 05:04:59,085 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1689988440_0001_r_000000_0 is done. And is in the process of committing
+2017-03-11 05:04:59,097 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:04:59,097 INFO org.apache.hadoop.mapred.Task: Task attempt_local1689988440_0001_r_000000_0 is allowed to commit now
+2017-03-11 05:04:59,099 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1689988440_0001_r_000000_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1689988440_0001_r_000000
+2017-03-11 05:04:59,100 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:04:59,100 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1689988440_0001_r_000000_0' done.
+2017-03-11 05:04:59,100 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1689988440_0001_r_000000_0
+2017-03-11 05:04:59,100 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1689988440_0001_r_000001_0
+2017-03-11 05:04:59,101 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:04:59,101 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 05:04:59,102 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@17649784
+2017-03-11 05:04:59,112 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 05:04:59,117 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1689988440_0001_r_000001_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 05:04:59,121 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1689988440_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 05:04:59,121 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1689988440_0001_m_000000_0
+2017-03-11 05:04:59,127 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 05:04:59,127 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 05:04:59,128 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:04:59,128 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 05:04:59,131 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:04:59,131 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:04:59,132 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 05:04:59,133 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 05:04:59,133 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 05:04:59,133 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:04:59,134 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:04:59,134 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:04:59,144 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1689988440_0001_r_000001_0 is done. And is in the process of committing
+2017-03-11 05:04:59,145 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:04:59,145 INFO org.apache.hadoop.mapred.Task: Task attempt_local1689988440_0001_r_000001_0 is allowed to commit now
+2017-03-11 05:04:59,146 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1689988440_0001_r_000001_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1689988440_0001_r_000001
+2017-03-11 05:04:59,146 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:04:59,147 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1689988440_0001_r_000001_0' done.
+2017-03-11 05:04:59,147 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1689988440_0001_r_000001_0
+2017-03-11 05:04:59,147 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1689988440_0001_r_000002_0
+2017-03-11 05:04:59,152 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:04:59,153 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 05:04:59,157 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@3e1f95a9
+2017-03-11 05:04:59,160 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 05:04:59,173 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1689988440_0001_r_000002_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 05:04:59,181 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1689988440_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 05:04:59,187 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1689988440_0001_m_000000_0
+2017-03-11 05:04:59,188 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 05:04:59,188 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 05:04:59,189 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:04:59,189 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 05:04:59,193 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:04:59,194 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:04:59,194 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 05:04:59,194 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 05:04:59,194 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 05:04:59,194 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:04:59,195 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:04:59,195 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:04:59,204 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1689988440_0001_r_000002_0 is done. And is in the process of committing
+2017-03-11 05:04:59,205 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:04:59,205 INFO org.apache.hadoop.mapred.Task: Task attempt_local1689988440_0001_r_000002_0 is allowed to commit now
+2017-03-11 05:04:59,206 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1689988440_0001_r_000002_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1689988440_0001_r_000002
+2017-03-11 05:04:59,210 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:04:59,210 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1689988440_0001_r_000002_0' done.
+2017-03-11 05:04:59,210 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1689988440_0001_r_000002_0
+2017-03-11 05:04:59,210 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1689988440_0001_r_000003_0
+2017-03-11 05:04:59,221 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:04:59,221 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 05:04:59,222 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@75302bb6
+2017-03-11 05:04:59,222 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 05:04:59,223 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1689988440_0001_r_000003_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 05:04:59,227 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1689988440_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 05:04:59,228 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1689988440_0001_m_000000_0
+2017-03-11 05:04:59,238 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 05:04:59,238 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 05:04:59,239 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:04:59,240 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 05:04:59,242 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:04:59,242 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:04:59,243 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 05:04:59,244 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 05:04:59,244 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 05:04:59,244 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:04:59,245 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:04:59,245 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:04:59,261 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1689988440_0001_r_000003_0 is done. And is in the process of committing
+2017-03-11 05:04:59,264 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:04:59,264 INFO org.apache.hadoop.mapred.Task: Task attempt_local1689988440_0001_r_000003_0 is allowed to commit now
+2017-03-11 05:04:59,265 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1689988440_0001_r_000003_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1689988440_0001_r_000003
+2017-03-11 05:04:59,267 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:04:59,268 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1689988440_0001_r_000003_0' done.
+2017-03-11 05:04:59,272 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1689988440_0001_r_000003_0
+2017-03-11 05:04:59,273 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1689988440_0001_r_000004_0
+2017-03-11 05:04:59,279 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 100%
+2017-03-11 05:04:59,280 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:04:59,281 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 05:04:59,282 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@596bb944
+2017-03-11 05:04:59,283 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 05:04:59,287 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1689988440_0001_r_000004_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 05:04:59,295 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1689988440_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 05:04:59,296 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1689988440_0001_m_000000_0
+2017-03-11 05:04:59,296 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 05:04:59,296 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 05:04:59,298 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:04:59,298 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 05:04:59,299 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:04:59,299 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:04:59,302 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 05:04:59,303 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 05:04:59,303 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 05:04:59,304 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:04:59,308 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:04:59,308 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:04:59,318 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1689988440_0001_r_000004_0 is done. And is in the process of committing
+2017-03-11 05:04:59,324 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:04:59,324 INFO org.apache.hadoop.mapred.Task: Task attempt_local1689988440_0001_r_000004_0 is allowed to commit now
+2017-03-11 05:04:59,325 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1689988440_0001_r_000004_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1689988440_0001_r_000004
+2017-03-11 05:04:59,326 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:04:59,326 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1689988440_0001_r_000004_0' done.
+2017-03-11 05:04:59,326 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1689988440_0001_r_000004_0
+2017-03-11 05:04:59,326 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1689988440_0001_r_000005_0
+2017-03-11 05:04:59,333 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:04:59,334 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 05:04:59,334 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@1c7419bd
+2017-03-11 05:04:59,335 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 05:04:59,340 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1689988440_0001_r_000005_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 05:04:59,346 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local1689988440_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 05:04:59,352 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1689988440_0001_m_000000_0
+2017-03-11 05:04:59,352 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 05:04:59,353 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 05:04:59,354 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:04:59,355 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 05:04:59,356 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:04:59,356 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:04:59,356 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 05:04:59,357 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 05:04:59,357 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 05:04:59,357 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:04:59,357 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:04:59,362 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:04:59,371 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1689988440_0001_r_000005_0 is done. And is in the process of committing
+2017-03-11 05:04:59,373 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:04:59,373 INFO org.apache.hadoop.mapred.Task: Task attempt_local1689988440_0001_r_000005_0 is allowed to commit now
+2017-03-11 05:04:59,376 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1689988440_0001_r_000005_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1689988440_0001_r_000005
+2017-03-11 05:04:59,380 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:04:59,380 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1689988440_0001_r_000005_0' done.
+2017-03-11 05:04:59,380 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1689988440_0001_r_000005_0
+2017-03-11 05:04:59,380 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1689988440_0001_r_000006_0
+2017-03-11 05:04:59,387 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:04:59,388 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 05:04:59,388 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@222c39b9
+2017-03-11 05:04:59,390 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 05:04:59,394 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1689988440_0001_r_000006_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 05:04:59,398 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local1689988440_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 05:04:59,402 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1689988440_0001_m_000000_0
+2017-03-11 05:04:59,407 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 05:04:59,407 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 05:04:59,408 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:04:59,408 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 05:04:59,417 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:04:59,418 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:04:59,418 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 05:04:59,418 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 05:04:59,419 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 05:04:59,419 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:04:59,420 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:04:59,424 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:04:59,439 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1689988440_0001_r_000006_0 is done. And is in the process of committing
+2017-03-11 05:04:59,440 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:04:59,440 INFO org.apache.hadoop.mapred.Task: Task attempt_local1689988440_0001_r_000006_0 is allowed to commit now
+2017-03-11 05:04:59,440 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1689988440_0001_r_000006_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1689988440_0001_r_000006
+2017-03-11 05:04:59,441 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:04:59,442 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1689988440_0001_r_000006_0' done.
+2017-03-11 05:04:59,442 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1689988440_0001_r_000006_0
+2017-03-11 05:04:59,443 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1689988440_0001_r_000007_0
+2017-03-11 05:04:59,448 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:04:59,449 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 05:04:59,449 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@5b7b6f08
+2017-03-11 05:04:59,451 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 05:04:59,458 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1689988440_0001_r_000007_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 05:04:59,463 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local1689988440_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 05:04:59,464 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1689988440_0001_m_000000_0
+2017-03-11 05:04:59,464 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 05:04:59,464 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 05:04:59,465 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:04:59,465 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 05:04:59,466 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:04:59,466 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:04:59,467 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 05:04:59,470 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 05:04:59,471 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 05:04:59,471 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:04:59,472 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:04:59,473 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:04:59,492 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1689988440_0001_r_000007_0 is done. And is in the process of committing
+2017-03-11 05:04:59,498 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:04:59,498 INFO org.apache.hadoop.mapred.Task: Task attempt_local1689988440_0001_r_000007_0 is allowed to commit now
+2017-03-11 05:04:59,500 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1689988440_0001_r_000007_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1689988440_0001_r_000007
+2017-03-11 05:04:59,500 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:04:59,501 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1689988440_0001_r_000007_0' done.
+2017-03-11 05:04:59,501 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1689988440_0001_r_000007_0
+2017-03-11 05:04:59,501 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1689988440_0001_r_000008_0
+2017-03-11 05:04:59,502 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:04:59,502 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 05:04:59,502 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@36f7b10b
+2017-03-11 05:04:59,509 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 05:04:59,510 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1689988440_0001_r_000008_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 05:04:59,516 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local1689988440_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 05:04:59,516 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1689988440_0001_m_000000_0
+2017-03-11 05:04:59,517 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 05:04:59,517 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 05:04:59,517 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:04:59,518 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 05:04:59,519 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:04:59,519 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:04:59,520 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 05:04:59,520 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 05:04:59,520 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 05:04:59,520 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:04:59,520 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:04:59,521 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:04:59,532 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1689988440_0001_r_000008_0 is done. And is in the process of committing
+2017-03-11 05:04:59,535 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:04:59,536 INFO org.apache.hadoop.mapred.Task: Task attempt_local1689988440_0001_r_000008_0 is allowed to commit now
+2017-03-11 05:04:59,538 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1689988440_0001_r_000008_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1689988440_0001_r_000008
+2017-03-11 05:04:59,553 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:04:59,553 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1689988440_0001_r_000008_0' done.
+2017-03-11 05:04:59,555 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1689988440_0001_r_000008_0
+2017-03-11 05:04:59,555 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1689988440_0001_r_000009_0
+2017-03-11 05:04:59,558 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:04:59,559 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 05:04:59,559 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@22d0ce11
+2017-03-11 05:04:59,560 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 05:04:59,561 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1689988440_0001_r_000009_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 05:04:59,573 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#10 about to shuffle output of map attempt_local1689988440_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 05:04:59,573 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1689988440_0001_m_000000_0
+2017-03-11 05:04:59,574 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 05:04:59,574 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 05:04:59,575 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:04:59,575 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 05:04:59,578 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:04:59,578 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:04:59,579 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 05:04:59,579 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 05:04:59,579 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 05:04:59,579 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:04:59,580 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:04:59,582 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:04:59,598 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1689988440_0001_r_000009_0 is done. And is in the process of committing
+2017-03-11 05:04:59,599 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:04:59,599 INFO org.apache.hadoop.mapred.Task: Task attempt_local1689988440_0001_r_000009_0 is allowed to commit now
+2017-03-11 05:04:59,600 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1689988440_0001_r_000009_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1689988440_0001_r_000009
+2017-03-11 05:04:59,600 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:04:59,601 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1689988440_0001_r_000009_0' done.
+2017-03-11 05:04:59,601 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1689988440_0001_r_000009_0
+2017-03-11 05:04:59,601 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1689988440_0001_r_000010_0
+2017-03-11 05:04:59,602 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:04:59,602 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 05:04:59,602 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@2455dbb6
+2017-03-11 05:04:59,611 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 05:04:59,618 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1689988440_0001_r_000010_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 05:04:59,621 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#11 about to shuffle output of map attempt_local1689988440_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 05:04:59,625 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1689988440_0001_m_000000_0
+2017-03-11 05:04:59,629 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 05:04:59,630 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 05:04:59,630 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:04:59,630 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 05:04:59,633 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:04:59,633 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:04:59,633 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 05:04:59,634 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 05:04:59,635 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 05:04:59,635 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:04:59,635 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:04:59,636 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:04:59,654 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1689988440_0001_r_000010_0 is done. And is in the process of committing
+2017-03-11 05:04:59,655 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:04:59,656 INFO org.apache.hadoop.mapred.Task: Task attempt_local1689988440_0001_r_000010_0 is allowed to commit now
+2017-03-11 05:04:59,657 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1689988440_0001_r_000010_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1689988440_0001_r_000010
+2017-03-11 05:04:59,657 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:04:59,657 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1689988440_0001_r_000010_0' done.
+2017-03-11 05:04:59,661 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1689988440_0001_r_000010_0
+2017-03-11 05:04:59,662 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1689988440_0001_r_000011_0
+2017-03-11 05:04:59,665 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:04:59,666 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 05:04:59,667 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@657f6c43
+2017-03-11 05:04:59,673 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 05:04:59,676 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1689988440_0001_r_000011_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 05:04:59,688 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#12 about to shuffle output of map attempt_local1689988440_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 05:04:59,688 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1689988440_0001_m_000000_0
+2017-03-11 05:04:59,688 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 05:04:59,689 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 05:04:59,689 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:04:59,690 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 05:04:59,691 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:04:59,692 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:04:59,692 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 05:04:59,692 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 05:04:59,692 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 05:04:59,693 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:04:59,693 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:04:59,694 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:04:59,703 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1689988440_0001_r_000011_0 is done. And is in the process of committing
+2017-03-11 05:04:59,704 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:04:59,704 INFO org.apache.hadoop.mapred.Task: Task attempt_local1689988440_0001_r_000011_0 is allowed to commit now
+2017-03-11 05:04:59,718 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1689988440_0001_r_000011_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1689988440_0001_r_000011
+2017-03-11 05:04:59,719 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:04:59,720 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1689988440_0001_r_000011_0' done.
+2017-03-11 05:04:59,720 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1689988440_0001_r_000011_0
+2017-03-11 05:04:59,720 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1689988440_0001_r_000012_0
+2017-03-11 05:04:59,725 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:04:59,726 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 05:04:59,726 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@6e88a13e
+2017-03-11 05:04:59,730 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 05:04:59,738 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1689988440_0001_r_000012_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 05:04:59,742 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#13 about to shuffle output of map attempt_local1689988440_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 05:04:59,743 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1689988440_0001_m_000000_0
+2017-03-11 05:04:59,743 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 05:04:59,743 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 05:04:59,743 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:04:59,743 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 05:04:59,745 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:04:59,745 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:04:59,746 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 05:04:59,746 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 05:04:59,746 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 05:04:59,746 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:04:59,747 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:04:59,749 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:04:59,761 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1689988440_0001_r_000012_0 is done. And is in the process of committing
+2017-03-11 05:04:59,762 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:04:59,762 INFO org.apache.hadoop.mapred.Task: Task attempt_local1689988440_0001_r_000012_0 is allowed to commit now
+2017-03-11 05:04:59,763 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1689988440_0001_r_000012_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1689988440_0001_r_000012
+2017-03-11 05:04:59,764 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:04:59,764 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1689988440_0001_r_000012_0' done.
+2017-03-11 05:04:59,764 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1689988440_0001_r_000012_0
+2017-03-11 05:04:59,764 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1689988440_0001_r_000013_0
+2017-03-11 05:04:59,766 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:04:59,766 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 05:04:59,766 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@33ed55b9
+2017-03-11 05:04:59,772 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 05:04:59,780 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1689988440_0001_r_000013_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 05:04:59,787 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#14 about to shuffle output of map attempt_local1689988440_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 05:04:59,788 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1689988440_0001_m_000000_0
+2017-03-11 05:04:59,789 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 05:04:59,789 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 05:04:59,790 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:04:59,790 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 05:04:59,791 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:04:59,791 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:04:59,792 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 05:04:59,792 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 05:04:59,792 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 05:04:59,792 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:04:59,792 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:04:59,792 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:04:59,806 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1689988440_0001_r_000013_0 is done. And is in the process of committing
+2017-03-11 05:04:59,807 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:04:59,807 INFO org.apache.hadoop.mapred.Task: Task attempt_local1689988440_0001_r_000013_0 is allowed to commit now
+2017-03-11 05:04:59,808 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1689988440_0001_r_000013_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1689988440_0001_r_000013
+2017-03-11 05:04:59,809 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:04:59,809 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1689988440_0001_r_000013_0' done.
+2017-03-11 05:04:59,809 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1689988440_0001_r_000013_0
+2017-03-11 05:04:59,809 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1689988440_0001_r_000014_0
+2017-03-11 05:04:59,815 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:04:59,815 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 05:04:59,816 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@1197c2dd
+2017-03-11 05:04:59,816 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 05:04:59,824 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1689988440_0001_r_000014_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 05:04:59,838 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#15 about to shuffle output of map attempt_local1689988440_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 05:04:59,838 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1689988440_0001_m_000000_0
+2017-03-11 05:04:59,839 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 05:04:59,839 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 05:04:59,839 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:04:59,839 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 05:04:59,841 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:04:59,842 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:04:59,843 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 05:04:59,844 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 05:04:59,844 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 05:04:59,844 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:04:59,844 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:04:59,844 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:04:59,856 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1689988440_0001_r_000014_0 is done. And is in the process of committing
+2017-03-11 05:04:59,857 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:04:59,858 INFO org.apache.hadoop.mapred.Task: Task attempt_local1689988440_0001_r_000014_0 is allowed to commit now
+2017-03-11 05:04:59,858 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1689988440_0001_r_000014_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1689988440_0001_r_000014
+2017-03-11 05:04:59,859 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:04:59,859 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1689988440_0001_r_000014_0' done.
+2017-03-11 05:04:59,867 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1689988440_0001_r_000014_0
+2017-03-11 05:04:59,867 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1689988440_0001_r_000015_0
+2017-03-11 05:04:59,868 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:04:59,868 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 05:04:59,869 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@682c1f1b
+2017-03-11 05:04:59,869 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 05:04:59,885 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1689988440_0001_r_000015_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 05:04:59,887 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#16 about to shuffle output of map attempt_local1689988440_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 05:04:59,888 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1689988440_0001_m_000000_0
+2017-03-11 05:04:59,888 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 05:04:59,888 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 05:04:59,889 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:04:59,889 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 05:04:59,894 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:04:59,894 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:04:59,896 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 05:04:59,896 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 05:04:59,896 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 05:04:59,896 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:04:59,897 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:04:59,897 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:04:59,928 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1689988440_0001_r_000015_0 is done. And is in the process of committing
+2017-03-11 05:04:59,929 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:04:59,929 INFO org.apache.hadoop.mapred.Task: Task attempt_local1689988440_0001_r_000015_0 is allowed to commit now
+2017-03-11 05:04:59,930 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1689988440_0001_r_000015_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1689988440_0001_r_000015
+2017-03-11 05:04:59,930 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:04:59,930 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1689988440_0001_r_000015_0' done.
+2017-03-11 05:04:59,931 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1689988440_0001_r_000015_0
+2017-03-11 05:04:59,931 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1689988440_0001_r_000016_0
+2017-03-11 05:04:59,940 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:04:59,941 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 05:04:59,941 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@792a89f4
+2017-03-11 05:04:59,945 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 05:04:59,951 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1689988440_0001_r_000016_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 05:04:59,960 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#17 about to shuffle output of map attempt_local1689988440_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 05:04:59,963 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1689988440_0001_m_000000_0
+2017-03-11 05:04:59,965 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 05:04:59,965 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 05:04:59,966 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:04:59,966 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 05:04:59,968 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:04:59,969 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:04:59,969 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 05:04:59,970 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 05:04:59,970 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 05:04:59,970 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:04:59,971 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:04:59,972 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:04:59,992 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1689988440_0001_r_000016_0 is done. And is in the process of committing
+2017-03-11 05:04:59,993 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:04:59,993 INFO org.apache.hadoop.mapred.Task: Task attempt_local1689988440_0001_r_000016_0 is allowed to commit now
+2017-03-11 05:04:59,994 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1689988440_0001_r_000016_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1689988440_0001_r_000016
+2017-03-11 05:04:59,994 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:04:59,995 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1689988440_0001_r_000016_0' done.
+2017-03-11 05:04:59,995 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1689988440_0001_r_000016_0
+2017-03-11 05:04:59,995 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1689988440_0001_r_000017_0
+2017-03-11 05:05:00,000 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:05:00,001 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 05:05:00,001 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@10bd314a
+2017-03-11 05:05:00,006 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 05:05:00,016 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1689988440_0001_r_000017_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 05:05:00,025 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#18 about to shuffle output of map attempt_local1689988440_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 05:05:00,026 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1689988440_0001_m_000000_0
+2017-03-11 05:05:00,027 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 05:05:00,027 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 05:05:00,027 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:00,027 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 05:05:00,029 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:00,029 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:00,030 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 05:05:00,030 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 05:05:00,030 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 05:05:00,030 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:00,030 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:00,031 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:00,039 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1689988440_0001_r_000017_0 is done. And is in the process of committing
+2017-03-11 05:05:00,040 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:00,040 INFO org.apache.hadoop.mapred.Task: Task attempt_local1689988440_0001_r_000017_0 is allowed to commit now
+2017-03-11 05:05:00,041 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1689988440_0001_r_000017_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1689988440_0001_r_000017
+2017-03-11 05:05:00,047 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:05:00,047 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1689988440_0001_r_000017_0' done.
+2017-03-11 05:05:00,047 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1689988440_0001_r_000017_0
+2017-03-11 05:05:00,050 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1689988440_0001_r_000018_0
+2017-03-11 05:05:00,058 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:05:00,058 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 05:05:00,059 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@6944e8c1
+2017-03-11 05:05:00,060 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 05:05:00,067 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1689988440_0001_r_000018_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 05:05:00,181 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#19 about to shuffle output of map attempt_local1689988440_0001_m_000000_0 decomp: 70753664 len: 2672270 to MEMORY
+2017-03-11 05:05:00,479 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 70753664 bytes from map-output for attempt_local1689988440_0001_m_000000_0
+2017-03-11 05:05:00,479 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 70753664, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->70753664
+2017-03-11 05:05:00,480 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 05:05:00,480 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:00,480 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 05:05:00,481 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:00,481 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 70753646 bytes
+2017-03-11 05:05:01,283 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 36%
+2017-03-11 05:05:04,836 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 70753664 bytes to disk to satisfy reduce memory limit
+2017-03-11 05:05:04,837 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 2672278 bytes from disk
+2017-03-11 05:05:04,837 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 05:05:04,837 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:04,840 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 70753646 bytes
+2017-03-11 05:05:04,840 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:06,069 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:05:06,302 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 38%
+2017-03-11 05:05:08,242 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1689988440_0001_r_000018_0 is done. And is in the process of committing
+2017-03-11 05:05:08,243 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:05:08,243 INFO org.apache.hadoop.mapred.Task: Task attempt_local1689988440_0001_r_000018_0 is allowed to commit now
+2017-03-11 05:05:08,244 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1689988440_0001_r_000018_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1689988440_0001_r_000018
+2017-03-11 05:05:08,250 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:05:08,250 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1689988440_0001_r_000018_0' done.
+2017-03-11 05:05:08,250 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1689988440_0001_r_000018_0
+2017-03-11 05:05:08,250 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1689988440_0001_r_000019_0
+2017-03-11 05:05:08,259 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:05:08,260 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 05:05:08,260 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@4c9534e6
+2017-03-11 05:05:08,276 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 05:05:08,277 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1689988440_0001_r_000019_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 05:05:08,286 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#20 about to shuffle output of map attempt_local1689988440_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 05:05:08,289 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1689988440_0001_m_000000_0
+2017-03-11 05:05:08,289 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 05:05:08,300 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 05:05:08,300 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:08,301 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 05:05:08,301 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:08,302 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:08,304 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 05:05:08,304 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 05:05:08,306 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 05:05:08,307 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:08,307 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:08,308 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:08,316 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1689988440_0001_r_000019_0 is done. And is in the process of committing
+2017-03-11 05:05:08,317 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:08,317 INFO org.apache.hadoop.mapred.Task: Task attempt_local1689988440_0001_r_000019_0 is allowed to commit now
+2017-03-11 05:05:08,318 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1689988440_0001_r_000019_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1689988440_0001_r_000019
+2017-03-11 05:05:08,324 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:05:08,327 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1689988440_0001_r_000019_0' done.
+2017-03-11 05:05:08,327 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1689988440_0001_r_000019_0
+2017-03-11 05:05:08,327 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1689988440_0001_r_000020_0
+2017-03-11 05:05:08,328 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:05:08,329 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 05:05:08,329 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@417ef051
+2017-03-11 05:05:08,332 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 05:05:08,363 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1689988440_0001_r_000020_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 05:05:08,376 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#21 about to shuffle output of map attempt_local1689988440_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 05:05:08,381 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1689988440_0001_m_000000_0
+2017-03-11 05:05:08,381 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 05:05:08,382 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 05:05:08,382 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:08,382 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 05:05:08,383 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:08,384 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:08,385 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 05:05:08,385 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 05:05:08,385 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 05:05:08,385 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:08,385 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:08,386 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:08,394 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1689988440_0001_r_000020_0 is done. And is in the process of committing
+2017-03-11 05:05:08,395 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:08,397 INFO org.apache.hadoop.mapred.Task: Task attempt_local1689988440_0001_r_000020_0 is allowed to commit now
+2017-03-11 05:05:08,399 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1689988440_0001_r_000020_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1689988440_0001_r_000020
+2017-03-11 05:05:08,405 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:05:08,405 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1689988440_0001_r_000020_0' done.
+2017-03-11 05:05:08,405 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1689988440_0001_r_000020_0
+2017-03-11 05:05:08,405 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1689988440_0001_r_000021_0
+2017-03-11 05:05:08,406 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:05:08,406 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 05:05:08,406 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@284c7ff4
+2017-03-11 05:05:08,414 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 05:05:08,415 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1689988440_0001_r_000021_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 05:05:08,417 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#22 about to shuffle output of map attempt_local1689988440_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 05:05:08,424 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1689988440_0001_m_000000_0
+2017-03-11 05:05:08,424 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 05:05:08,424 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 05:05:08,425 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:08,425 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 05:05:08,426 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:08,426 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:08,427 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 05:05:08,427 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 05:05:08,427 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 05:05:08,427 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:08,427 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:08,429 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:08,431 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1689988440_0001_r_000021_0 is done. And is in the process of committing
+2017-03-11 05:05:08,432 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:08,433 INFO org.apache.hadoop.mapred.Task: Task attempt_local1689988440_0001_r_000021_0 is allowed to commit now
+2017-03-11 05:05:08,433 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1689988440_0001_r_000021_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1689988440_0001_r_000021
+2017-03-11 05:05:08,434 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:05:08,434 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1689988440_0001_r_000021_0' done.
+2017-03-11 05:05:08,435 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1689988440_0001_r_000021_0
+2017-03-11 05:05:08,435 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1689988440_0001_r_000022_0
+2017-03-11 05:05:08,440 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:05:08,440 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 05:05:08,440 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@37871111
+2017-03-11 05:05:08,445 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 05:05:08,450 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1689988440_0001_r_000022_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 05:05:08,452 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#23 about to shuffle output of map attempt_local1689988440_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 05:05:08,459 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1689988440_0001_m_000000_0
+2017-03-11 05:05:08,460 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 05:05:08,462 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 05:05:08,463 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:08,463 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 05:05:08,464 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:08,464 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:08,464 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 05:05:08,465 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 05:05:08,465 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 05:05:08,465 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:08,465 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:08,465 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:08,474 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1689988440_0001_r_000022_0 is done. And is in the process of committing
+2017-03-11 05:05:08,476 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:08,476 INFO org.apache.hadoop.mapred.Task: Task attempt_local1689988440_0001_r_000022_0 is allowed to commit now
+2017-03-11 05:05:08,477 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1689988440_0001_r_000022_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1689988440_0001_r_000022
+2017-03-11 05:05:08,478 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:05:08,478 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1689988440_0001_r_000022_0' done.
+2017-03-11 05:05:08,478 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1689988440_0001_r_000022_0
+2017-03-11 05:05:08,478 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1689988440_0001_r_000023_0
+2017-03-11 05:05:08,483 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:05:08,483 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 05:05:08,483 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@6bd1f8d3
+2017-03-11 05:05:08,484 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 05:05:08,494 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1689988440_0001_r_000023_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 05:05:08,506 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#24 about to shuffle output of map attempt_local1689988440_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 05:05:08,507 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1689988440_0001_m_000000_0
+2017-03-11 05:05:08,507 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 05:05:08,508 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 05:05:08,508 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:08,508 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 05:05:08,509 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:08,509 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:08,510 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 05:05:08,510 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 05:05:08,510 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 05:05:08,510 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:08,511 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:08,511 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:08,528 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1689988440_0001_r_000023_0 is done. And is in the process of committing
+2017-03-11 05:05:08,529 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:08,529 INFO org.apache.hadoop.mapred.Task: Task attempt_local1689988440_0001_r_000023_0 is allowed to commit now
+2017-03-11 05:05:08,534 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1689988440_0001_r_000023_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1689988440_0001_r_000023
+2017-03-11 05:05:08,535 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:05:08,536 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1689988440_0001_r_000023_0' done.
+2017-03-11 05:05:08,536 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1689988440_0001_r_000023_0
+2017-03-11 05:05:08,536 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1689988440_0001_r_000024_0
+2017-03-11 05:05:08,550 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:05:08,551 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 05:05:08,551 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@36d85d0d
+2017-03-11 05:05:08,552 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 05:05:08,564 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1689988440_0001_r_000024_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 05:05:08,568 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#25 about to shuffle output of map attempt_local1689988440_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 05:05:08,568 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1689988440_0001_m_000000_0
+2017-03-11 05:05:08,568 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 05:05:08,576 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 05:05:08,577 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:08,577 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 05:05:08,578 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:08,578 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:08,579 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 05:05:08,579 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 05:05:08,579 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 05:05:08,579 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:08,580 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:08,580 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:08,593 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1689988440_0001_r_000024_0 is done. And is in the process of committing
+2017-03-11 05:05:08,593 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:08,594 INFO org.apache.hadoop.mapred.Task: Task attempt_local1689988440_0001_r_000024_0 is allowed to commit now
+2017-03-11 05:05:08,594 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1689988440_0001_r_000024_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1689988440_0001_r_000024
+2017-03-11 05:05:08,595 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:05:08,595 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1689988440_0001_r_000024_0' done.
+2017-03-11 05:05:08,595 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1689988440_0001_r_000024_0
+2017-03-11 05:05:08,595 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1689988440_0001_r_000025_0
+2017-03-11 05:05:08,600 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:05:08,600 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 05:05:08,601 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@7d317302
+2017-03-11 05:05:08,607 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 05:05:08,616 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1689988440_0001_r_000025_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 05:05:08,624 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#26 about to shuffle output of map attempt_local1689988440_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 05:05:08,625 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1689988440_0001_m_000000_0
+2017-03-11 05:05:08,625 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 05:05:08,625 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 05:05:08,626 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:08,626 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 05:05:08,627 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:08,628 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:08,628 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 05:05:08,628 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 05:05:08,628 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 05:05:08,628 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:08,629 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:08,629 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:08,644 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1689988440_0001_r_000025_0 is done. And is in the process of committing
+2017-03-11 05:05:08,648 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:08,648 INFO org.apache.hadoop.mapred.Task: Task attempt_local1689988440_0001_r_000025_0 is allowed to commit now
+2017-03-11 05:05:08,649 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1689988440_0001_r_000025_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1689988440_0001_r_000025
+2017-03-11 05:05:08,658 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:05:08,658 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1689988440_0001_r_000025_0' done.
+2017-03-11 05:05:08,658 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1689988440_0001_r_000025_0
+2017-03-11 05:05:08,661 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1689988440_0001_r_000026_0
+2017-03-11 05:05:08,670 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:05:08,670 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 05:05:08,670 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@3956ed04
+2017-03-11 05:05:08,671 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 05:05:08,678 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1689988440_0001_r_000026_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 05:05:08,687 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#27 about to shuffle output of map attempt_local1689988440_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 05:05:08,688 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1689988440_0001_m_000000_0
+2017-03-11 05:05:08,692 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 05:05:08,693 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 05:05:08,693 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:08,693 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 05:05:08,694 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:08,694 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:08,695 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 05:05:08,695 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 05:05:08,695 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 05:05:08,695 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:08,707 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:08,707 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:08,715 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1689988440_0001_r_000026_0 is done. And is in the process of committing
+2017-03-11 05:05:08,716 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:08,716 INFO org.apache.hadoop.mapred.Task: Task attempt_local1689988440_0001_r_000026_0 is allowed to commit now
+2017-03-11 05:05:08,721 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1689988440_0001_r_000026_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1689988440_0001_r_000026
+2017-03-11 05:05:08,721 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:05:08,730 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1689988440_0001_r_000026_0' done.
+2017-03-11 05:05:08,730 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1689988440_0001_r_000026_0
+2017-03-11 05:05:08,730 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1689988440_0001_r_000027_0
+2017-03-11 05:05:08,735 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:05:08,736 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 05:05:08,737 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@3d7bcc32
+2017-03-11 05:05:08,744 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 05:05:08,748 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1689988440_0001_r_000027_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 05:05:08,765 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#28 about to shuffle output of map attempt_local1689988440_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 05:05:08,765 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1689988440_0001_m_000000_0
+2017-03-11 05:05:08,767 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 05:05:08,767 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 05:05:08,769 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:08,770 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 05:05:08,770 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:08,771 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:08,771 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 05:05:08,771 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 05:05:08,771 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 05:05:08,772 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:08,772 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:08,772 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:08,785 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1689988440_0001_r_000027_0 is done. And is in the process of committing
+2017-03-11 05:05:08,786 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:08,787 INFO org.apache.hadoop.mapred.Task: Task attempt_local1689988440_0001_r_000027_0 is allowed to commit now
+2017-03-11 05:05:08,788 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1689988440_0001_r_000027_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1689988440_0001_r_000027
+2017-03-11 05:05:08,790 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:05:08,791 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1689988440_0001_r_000027_0' done.
+2017-03-11 05:05:08,791 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1689988440_0001_r_000027_0
+2017-03-11 05:05:08,791 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1689988440_0001_r_000028_0
+2017-03-11 05:05:08,799 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:05:08,800 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 05:05:08,800 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@60cfbf92
+2017-03-11 05:05:08,806 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 05:05:08,816 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1689988440_0001_r_000028_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 05:05:08,822 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#29 about to shuffle output of map attempt_local1689988440_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 05:05:08,825 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1689988440_0001_m_000000_0
+2017-03-11 05:05:08,825 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 05:05:08,826 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 05:05:08,826 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:08,826 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 05:05:08,827 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:08,831 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:08,832 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 05:05:08,835 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 05:05:08,835 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 05:05:08,835 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:08,836 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:08,839 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:08,845 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1689988440_0001_r_000028_0 is done. And is in the process of committing
+2017-03-11 05:05:08,846 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:08,846 INFO org.apache.hadoop.mapred.Task: Task attempt_local1689988440_0001_r_000028_0 is allowed to commit now
+2017-03-11 05:05:08,847 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1689988440_0001_r_000028_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1689988440_0001_r_000028
+2017-03-11 05:05:08,848 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:05:08,848 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1689988440_0001_r_000028_0' done.
+2017-03-11 05:05:08,848 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1689988440_0001_r_000028_0
+2017-03-11 05:05:08,848 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1689988440_0001_r_000029_0
+2017-03-11 05:05:08,853 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:05:08,853 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 05:05:08,854 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@4cf8744c
+2017-03-11 05:05:08,854 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 05:05:08,869 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1689988440_0001_r_000029_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 05:05:08,875 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#30 about to shuffle output of map attempt_local1689988440_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 05:05:08,877 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1689988440_0001_m_000000_0
+2017-03-11 05:05:08,878 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 05:05:08,878 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 05:05:08,879 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:08,879 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 05:05:08,880 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:08,880 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:08,881 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 05:05:08,881 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 05:05:08,881 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 05:05:08,881 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:08,882 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:08,882 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:08,894 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1689988440_0001_r_000029_0 is done. And is in the process of committing
+2017-03-11 05:05:08,899 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:08,901 INFO org.apache.hadoop.mapred.Task: Task attempt_local1689988440_0001_r_000029_0 is allowed to commit now
+2017-03-11 05:05:08,905 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1689988440_0001_r_000029_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1689988440_0001_r_000029
+2017-03-11 05:05:08,906 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:05:08,906 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1689988440_0001_r_000029_0' done.
+2017-03-11 05:05:08,906 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1689988440_0001_r_000029_0
+2017-03-11 05:05:08,909 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1689988440_0001_r_000030_0
+2017-03-11 05:05:08,910 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:05:08,911 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 05:05:08,911 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@6786091b
+2017-03-11 05:05:08,911 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 05:05:08,935 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1689988440_0001_r_000030_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 05:05:08,937 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#31 about to shuffle output of map attempt_local1689988440_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 05:05:08,945 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1689988440_0001_m_000000_0
+2017-03-11 05:05:08,952 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 05:05:08,956 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 05:05:08,956 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:08,957 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 05:05:08,957 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:08,958 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:08,959 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 05:05:08,959 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 05:05:08,959 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 05:05:08,959 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:08,959 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:08,960 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:08,967 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1689988440_0001_r_000030_0 is done. And is in the process of committing
+2017-03-11 05:05:08,967 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:08,968 INFO org.apache.hadoop.mapred.Task: Task attempt_local1689988440_0001_r_000030_0 is allowed to commit now
+2017-03-11 05:05:08,968 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1689988440_0001_r_000030_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1689988440_0001_r_000030
+2017-03-11 05:05:08,969 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:05:08,969 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1689988440_0001_r_000030_0' done.
+2017-03-11 05:05:08,971 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1689988440_0001_r_000030_0
+2017-03-11 05:05:08,971 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1689988440_0001_r_000031_0
+2017-03-11 05:05:08,977 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:05:08,978 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 05:05:08,978 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@149675a1
+2017-03-11 05:05:08,989 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 05:05:08,998 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1689988440_0001_r_000031_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 05:05:09,000 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#32 about to shuffle output of map attempt_local1689988440_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 05:05:09,001 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1689988440_0001_m_000000_0
+2017-03-11 05:05:09,001 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 05:05:09,001 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 05:05:09,002 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,003 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 05:05:09,005 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:09,006 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:09,007 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 05:05:09,009 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 05:05:09,009 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 05:05:09,009 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:09,009 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:09,009 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,018 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1689988440_0001_r_000031_0 is done. And is in the process of committing
+2017-03-11 05:05:09,019 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,019 INFO org.apache.hadoop.mapred.Task: Task attempt_local1689988440_0001_r_000031_0 is allowed to commit now
+2017-03-11 05:05:09,020 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1689988440_0001_r_000031_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1689988440_0001_r_000031
+2017-03-11 05:05:09,020 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:05:09,020 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1689988440_0001_r_000031_0' done.
+2017-03-11 05:05:09,020 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1689988440_0001_r_000031_0
+2017-03-11 05:05:09,021 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1689988440_0001_r_000032_0
+2017-03-11 05:05:09,027 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:05:09,027 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 05:05:09,028 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@68d4fa15
+2017-03-11 05:05:09,028 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 05:05:09,033 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1689988440_0001_r_000032_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 05:05:09,035 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#33 about to shuffle output of map attempt_local1689988440_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 05:05:09,040 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1689988440_0001_m_000000_0
+2017-03-11 05:05:09,043 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 05:05:09,043 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 05:05:09,043 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,044 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 05:05:09,044 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:09,046 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:09,046 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 05:05:09,046 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 05:05:09,046 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 05:05:09,046 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:09,047 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:09,048 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,055 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1689988440_0001_r_000032_0 is done. And is in the process of committing
+2017-03-11 05:05:09,057 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,057 INFO org.apache.hadoop.mapred.Task: Task attempt_local1689988440_0001_r_000032_0 is allowed to commit now
+2017-03-11 05:05:09,057 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1689988440_0001_r_000032_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1689988440_0001_r_000032
+2017-03-11 05:05:09,058 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:05:09,059 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1689988440_0001_r_000032_0' done.
+2017-03-11 05:05:09,059 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1689988440_0001_r_000032_0
+2017-03-11 05:05:09,059 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1689988440_0001_r_000033_0
+2017-03-11 05:05:09,063 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:05:09,065 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 05:05:09,065 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@37420057
+2017-03-11 05:05:09,066 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 05:05:09,072 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1689988440_0001_r_000033_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 05:05:09,073 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#34 about to shuffle output of map attempt_local1689988440_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 05:05:09,073 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1689988440_0001_m_000000_0
+2017-03-11 05:05:09,076 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 05:05:09,081 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 05:05:09,082 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,083 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 05:05:09,085 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:09,085 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:09,086 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 05:05:09,086 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 05:05:09,086 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 05:05:09,086 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:09,086 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:09,087 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,111 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1689988440_0001_r_000033_0 is done. And is in the process of committing
+2017-03-11 05:05:09,114 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,115 INFO org.apache.hadoop.mapred.Task: Task attempt_local1689988440_0001_r_000033_0 is allowed to commit now
+2017-03-11 05:05:09,116 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1689988440_0001_r_000033_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1689988440_0001_r_000033
+2017-03-11 05:05:09,124 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:05:09,124 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1689988440_0001_r_000033_0' done.
+2017-03-11 05:05:09,124 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1689988440_0001_r_000033_0
+2017-03-11 05:05:09,124 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1689988440_0001_r_000034_0
+2017-03-11 05:05:09,129 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:05:09,130 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 05:05:09,131 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@2950ece0
+2017-03-11 05:05:09,133 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 05:05:09,147 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1689988440_0001_r_000034_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 05:05:09,155 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#35 about to shuffle output of map attempt_local1689988440_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 05:05:09,157 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1689988440_0001_m_000000_0
+2017-03-11 05:05:09,157 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 05:05:09,157 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 05:05:09,158 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,158 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 05:05:09,158 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:09,159 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:09,159 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 05:05:09,159 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 05:05:09,159 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 05:05:09,159 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:09,160 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:09,160 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,172 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1689988440_0001_r_000034_0 is done. And is in the process of committing
+2017-03-11 05:05:09,174 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,174 INFO org.apache.hadoop.mapred.Task: Task attempt_local1689988440_0001_r_000034_0 is allowed to commit now
+2017-03-11 05:05:09,183 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1689988440_0001_r_000034_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1689988440_0001_r_000034
+2017-03-11 05:05:09,187 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:05:09,187 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1689988440_0001_r_000034_0' done.
+2017-03-11 05:05:09,187 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1689988440_0001_r_000034_0
+2017-03-11 05:05:09,187 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1689988440_0001_r_000035_0
+2017-03-11 05:05:09,191 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:05:09,191 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 05:05:09,191 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@24026868
+2017-03-11 05:05:09,195 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 05:05:09,204 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1689988440_0001_r_000035_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 05:05:09,208 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#36 about to shuffle output of map attempt_local1689988440_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 05:05:09,208 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1689988440_0001_m_000000_0
+2017-03-11 05:05:09,208 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 05:05:09,209 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 05:05:09,209 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,209 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 05:05:09,210 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:09,219 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:09,219 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 05:05:09,219 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 05:05:09,220 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 05:05:09,220 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:09,220 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:09,220 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,235 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1689988440_0001_r_000035_0 is done. And is in the process of committing
+2017-03-11 05:05:09,239 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,240 INFO org.apache.hadoop.mapred.Task: Task attempt_local1689988440_0001_r_000035_0 is allowed to commit now
+2017-03-11 05:05:09,240 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1689988440_0001_r_000035_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1689988440_0001_r_000035
+2017-03-11 05:05:09,241 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:05:09,246 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1689988440_0001_r_000035_0' done.
+2017-03-11 05:05:09,246 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1689988440_0001_r_000035_0
+2017-03-11 05:05:09,246 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1689988440_0001_r_000036_0
+2017-03-11 05:05:09,255 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:05:09,256 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 05:05:09,256 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@68fd6f7
+2017-03-11 05:05:09,257 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 05:05:09,260 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1689988440_0001_r_000036_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 05:05:09,266 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#37 about to shuffle output of map attempt_local1689988440_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 05:05:09,274 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1689988440_0001_m_000000_0
+2017-03-11 05:05:09,274 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 05:05:09,274 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 05:05:09,275 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,275 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 05:05:09,275 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:09,276 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:09,276 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 05:05:09,276 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 05:05:09,276 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 05:05:09,276 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:09,277 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:09,282 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,287 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1689988440_0001_r_000036_0 is done. And is in the process of committing
+2017-03-11 05:05:09,288 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,288 INFO org.apache.hadoop.mapred.Task: Task attempt_local1689988440_0001_r_000036_0 is allowed to commit now
+2017-03-11 05:05:09,288 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1689988440_0001_r_000036_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1689988440_0001_r_000036
+2017-03-11 05:05:09,293 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:05:09,293 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1689988440_0001_r_000036_0' done.
+2017-03-11 05:05:09,293 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1689988440_0001_r_000036_0
+2017-03-11 05:05:09,293 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1689988440_0001_r_000037_0
+2017-03-11 05:05:09,304 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:05:09,306 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 100%
+2017-03-11 05:05:09,308 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 05:05:09,308 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@644f2668
+2017-03-11 05:05:09,309 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 05:05:09,309 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1689988440_0001_r_000037_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 05:05:09,320 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#38 about to shuffle output of map attempt_local1689988440_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 05:05:09,322 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1689988440_0001_m_000000_0
+2017-03-11 05:05:09,322 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 05:05:09,322 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 05:05:09,322 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,322 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 05:05:09,323 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:09,324 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:09,324 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 05:05:09,324 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 05:05:09,324 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 05:05:09,324 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:09,325 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:09,325 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,336 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1689988440_0001_r_000037_0 is done. And is in the process of committing
+2017-03-11 05:05:09,337 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,338 INFO org.apache.hadoop.mapred.Task: Task attempt_local1689988440_0001_r_000037_0 is allowed to commit now
+2017-03-11 05:05:09,338 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1689988440_0001_r_000037_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1689988440_0001_r_000037
+2017-03-11 05:05:09,344 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:05:09,344 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1689988440_0001_r_000037_0' done.
+2017-03-11 05:05:09,344 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1689988440_0001_r_000037_0
+2017-03-11 05:05:09,344 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1689988440_0001_r_000038_0
+2017-03-11 05:05:09,350 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:05:09,351 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 05:05:09,351 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@69348fa3
+2017-03-11 05:05:09,355 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 05:05:09,360 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1689988440_0001_r_000038_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 05:05:09,367 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#39 about to shuffle output of map attempt_local1689988440_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 05:05:09,367 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1689988440_0001_m_000000_0
+2017-03-11 05:05:09,375 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 05:05:09,375 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 05:05:09,376 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,376 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 05:05:09,377 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:09,377 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:09,378 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 05:05:09,378 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 05:05:09,378 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 05:05:09,378 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:09,379 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:09,379 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,381 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1689988440_0001_r_000038_0 is done. And is in the process of committing
+2017-03-11 05:05:09,382 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,382 INFO org.apache.hadoop.mapred.Task: Task attempt_local1689988440_0001_r_000038_0 is allowed to commit now
+2017-03-11 05:05:09,382 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1689988440_0001_r_000038_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1689988440_0001_r_000038
+2017-03-11 05:05:09,383 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:05:09,383 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1689988440_0001_r_000038_0' done.
+2017-03-11 05:05:09,383 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1689988440_0001_r_000038_0
+2017-03-11 05:05:09,383 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1689988440_0001_r_000039_0
+2017-03-11 05:05:09,384 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:05:09,384 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 05:05:09,385 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@3e7b0986
+2017-03-11 05:05:09,385 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 05:05:09,394 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1689988440_0001_r_000039_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 05:05:09,397 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#40 about to shuffle output of map attempt_local1689988440_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 05:05:09,397 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1689988440_0001_m_000000_0
+2017-03-11 05:05:09,398 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 05:05:09,398 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 05:05:09,398 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,399 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 05:05:09,399 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:09,400 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:09,400 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 05:05:09,400 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 05:05:09,400 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 05:05:09,400 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:09,401 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:09,401 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,407 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1689988440_0001_r_000039_0 is done. And is in the process of committing
+2017-03-11 05:05:09,408 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,408 INFO org.apache.hadoop.mapred.Task: Task attempt_local1689988440_0001_r_000039_0 is allowed to commit now
+2017-03-11 05:05:09,409 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1689988440_0001_r_000039_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1689988440_0001_r_000039
+2017-03-11 05:05:09,409 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:05:09,421 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1689988440_0001_r_000039_0' done.
+2017-03-11 05:05:09,421 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1689988440_0001_r_000039_0
+2017-03-11 05:05:09,422 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1689988440_0001_r_000040_0
+2017-03-11 05:05:09,428 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:05:09,428 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 05:05:09,428 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@5e71a70b
+2017-03-11 05:05:09,431 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 05:05:09,437 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1689988440_0001_r_000040_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 05:05:09,442 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#41 about to shuffle output of map attempt_local1689988440_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 05:05:09,448 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1689988440_0001_m_000000_0
+2017-03-11 05:05:09,449 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 05:05:09,449 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 05:05:09,450 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,450 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 05:05:09,451 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:09,452 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:09,452 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 05:05:09,452 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 05:05:09,452 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 05:05:09,452 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:09,453 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:09,453 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,466 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1689988440_0001_r_000040_0 is done. And is in the process of committing
+2017-03-11 05:05:09,467 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,468 INFO org.apache.hadoop.mapred.Task: Task attempt_local1689988440_0001_r_000040_0 is allowed to commit now
+2017-03-11 05:05:09,469 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1689988440_0001_r_000040_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1689988440_0001_r_000040
+2017-03-11 05:05:09,469 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:05:09,469 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1689988440_0001_r_000040_0' done.
+2017-03-11 05:05:09,469 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1689988440_0001_r_000040_0
+2017-03-11 05:05:09,469 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1689988440_0001_r_000041_0
+2017-03-11 05:05:09,470 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:05:09,471 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 05:05:09,471 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@170287bf
+2017-03-11 05:05:09,471 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 05:05:09,486 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1689988440_0001_r_000041_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 05:05:09,499 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#42 about to shuffle output of map attempt_local1689988440_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 05:05:09,499 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1689988440_0001_m_000000_0
+2017-03-11 05:05:09,504 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 05:05:09,504 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 05:05:09,505 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,505 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 05:05:09,505 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:09,506 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:09,506 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 05:05:09,507 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 05:05:09,508 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 05:05:09,508 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:09,508 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:09,509 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,536 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1689988440_0001_r_000041_0 is done. And is in the process of committing
+2017-03-11 05:05:09,537 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,537 INFO org.apache.hadoop.mapred.Task: Task attempt_local1689988440_0001_r_000041_0 is allowed to commit now
+2017-03-11 05:05:09,540 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1689988440_0001_r_000041_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1689988440_0001_r_000041
+2017-03-11 05:05:09,540 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:05:09,540 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1689988440_0001_r_000041_0' done.
+2017-03-11 05:05:09,540 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1689988440_0001_r_000041_0
+2017-03-11 05:05:09,540 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1689988440_0001_r_000042_0
+2017-03-11 05:05:09,545 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:05:09,547 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 05:05:09,547 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@4abfa6aa
+2017-03-11 05:05:09,550 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 05:05:09,561 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1689988440_0001_r_000042_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 05:05:09,563 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#43 about to shuffle output of map attempt_local1689988440_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 05:05:09,571 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1689988440_0001_m_000000_0
+2017-03-11 05:05:09,572 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 05:05:09,575 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 05:05:09,575 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,576 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 05:05:09,576 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:09,576 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:09,577 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 05:05:09,577 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 05:05:09,577 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 05:05:09,577 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:09,577 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:09,578 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,582 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1689988440_0001_r_000042_0 is done. And is in the process of committing
+2017-03-11 05:05:09,583 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,583 INFO org.apache.hadoop.mapred.Task: Task attempt_local1689988440_0001_r_000042_0 is allowed to commit now
+2017-03-11 05:05:09,584 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1689988440_0001_r_000042_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1689988440_0001_r_000042
+2017-03-11 05:05:09,584 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:05:09,584 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1689988440_0001_r_000042_0' done.
+2017-03-11 05:05:09,584 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1689988440_0001_r_000042_0
+2017-03-11 05:05:09,584 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1689988440_0001_r_000043_0
+2017-03-11 05:05:09,595 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:05:09,595 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 05:05:09,595 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@41e69be8
+2017-03-11 05:05:09,605 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 05:05:09,611 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1689988440_0001_r_000043_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 05:05:09,618 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#44 about to shuffle output of map attempt_local1689988440_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 05:05:09,618 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1689988440_0001_m_000000_0
+2017-03-11 05:05:09,624 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 05:05:09,625 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 05:05:09,625 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,625 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 05:05:09,627 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:09,627 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:09,629 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 05:05:09,629 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 05:05:09,629 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 05:05:09,629 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:09,629 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:09,633 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,646 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1689988440_0001_r_000043_0 is done. And is in the process of committing
+2017-03-11 05:05:09,646 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,647 INFO org.apache.hadoop.mapred.Task: Task attempt_local1689988440_0001_r_000043_0 is allowed to commit now
+2017-03-11 05:05:09,650 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1689988440_0001_r_000043_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1689988440_0001_r_000043
+2017-03-11 05:05:09,655 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:05:09,656 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1689988440_0001_r_000043_0' done.
+2017-03-11 05:05:09,656 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1689988440_0001_r_000043_0
+2017-03-11 05:05:09,656 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1689988440_0001_r_000044_0
+2017-03-11 05:05:09,657 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:05:09,657 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 05:05:09,657 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@2ff68a99
+2017-03-11 05:05:09,665 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 05:05:09,667 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1689988440_0001_r_000044_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 05:05:09,668 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#45 about to shuffle output of map attempt_local1689988440_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 05:05:09,668 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1689988440_0001_m_000000_0
+2017-03-11 05:05:09,668 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 05:05:09,673 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 05:05:09,674 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,674 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 05:05:09,675 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:09,675 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:09,675 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 05:05:09,675 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 05:05:09,675 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 05:05:09,675 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:09,676 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:09,676 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,685 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1689988440_0001_r_000044_0 is done. And is in the process of committing
+2017-03-11 05:05:09,685 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,685 INFO org.apache.hadoop.mapred.Task: Task attempt_local1689988440_0001_r_000044_0 is allowed to commit now
+2017-03-11 05:05:09,692 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1689988440_0001_r_000044_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1689988440_0001_r_000044
+2017-03-11 05:05:09,693 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:05:09,693 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1689988440_0001_r_000044_0' done.
+2017-03-11 05:05:09,693 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1689988440_0001_r_000044_0
+2017-03-11 05:05:09,693 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1689988440_0001_r_000045_0
+2017-03-11 05:05:09,694 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:05:09,697 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 05:05:09,697 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@5d2abe5d
+2017-03-11 05:05:09,698 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 05:05:09,706 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1689988440_0001_r_000045_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 05:05:09,708 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#46 about to shuffle output of map attempt_local1689988440_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 05:05:09,709 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1689988440_0001_m_000000_0
+2017-03-11 05:05:09,709 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 05:05:09,709 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 05:05:09,710 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,710 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 05:05:09,712 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:09,713 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:09,713 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 05:05:09,713 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 05:05:09,713 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 05:05:09,713 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:09,714 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:09,717 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,722 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1689988440_0001_r_000045_0 is done. And is in the process of committing
+2017-03-11 05:05:09,723 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,723 INFO org.apache.hadoop.mapred.Task: Task attempt_local1689988440_0001_r_000045_0 is allowed to commit now
+2017-03-11 05:05:09,723 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1689988440_0001_r_000045_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1689988440_0001_r_000045
+2017-03-11 05:05:09,724 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:05:09,724 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1689988440_0001_r_000045_0' done.
+2017-03-11 05:05:09,724 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1689988440_0001_r_000045_0
+2017-03-11 05:05:09,724 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1689988440_0001_r_000046_0
+2017-03-11 05:05:09,725 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:05:09,725 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 05:05:09,725 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@6ab39ec9
+2017-03-11 05:05:09,731 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 05:05:09,735 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1689988440_0001_r_000046_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 05:05:09,738 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#47 about to shuffle output of map attempt_local1689988440_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 05:05:09,751 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1689988440_0001_m_000000_0
+2017-03-11 05:05:09,751 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 05:05:09,752 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 05:05:09,752 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,752 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 05:05:09,753 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:09,753 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:09,753 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 05:05:09,754 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 05:05:09,754 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 05:05:09,754 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:09,754 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:09,755 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,760 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1689988440_0001_r_000046_0 is done. And is in the process of committing
+2017-03-11 05:05:09,761 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,761 INFO org.apache.hadoop.mapred.Task: Task attempt_local1689988440_0001_r_000046_0 is allowed to commit now
+2017-03-11 05:05:09,762 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1689988440_0001_r_000046_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1689988440_0001_r_000046
+2017-03-11 05:05:09,762 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:05:09,762 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1689988440_0001_r_000046_0' done.
+2017-03-11 05:05:09,762 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1689988440_0001_r_000046_0
+2017-03-11 05:05:09,763 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1689988440_0001_r_000047_0
+2017-03-11 05:05:09,767 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:05:09,768 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 05:05:09,768 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@6e6fdcaf
+2017-03-11 05:05:09,769 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 05:05:09,773 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1689988440_0001_r_000047_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 05:05:09,783 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#48 about to shuffle output of map attempt_local1689988440_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 05:05:09,784 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1689988440_0001_m_000000_0
+2017-03-11 05:05:09,786 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 05:05:09,786 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 05:05:09,787 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,787 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 05:05:09,787 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:09,787 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:09,788 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 05:05:09,788 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 05:05:09,788 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 05:05:09,788 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:09,788 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:09,789 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,795 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1689988440_0001_r_000047_0 is done. And is in the process of committing
+2017-03-11 05:05:09,795 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,795 INFO org.apache.hadoop.mapred.Task: Task attempt_local1689988440_0001_r_000047_0 is allowed to commit now
+2017-03-11 05:05:09,796 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1689988440_0001_r_000047_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1689988440_0001_r_000047
+2017-03-11 05:05:09,797 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:05:09,797 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1689988440_0001_r_000047_0' done.
+2017-03-11 05:05:09,797 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1689988440_0001_r_000047_0
+2017-03-11 05:05:09,797 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1689988440_0001_r_000048_0
+2017-03-11 05:05:09,803 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:05:09,804 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 05:05:09,804 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@7fecdc36
+2017-03-11 05:05:09,804 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 05:05:09,805 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1689988440_0001_r_000048_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 05:05:09,808 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#49 about to shuffle output of map attempt_local1689988440_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 05:05:09,809 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1689988440_0001_m_000000_0
+2017-03-11 05:05:09,809 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 05:05:09,813 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 05:05:09,814 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,814 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 05:05:09,815 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:09,815 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:09,815 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 05:05:09,815 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 05:05:09,815 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 05:05:09,815 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:09,816 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:09,816 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,819 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1689988440_0001_r_000048_0 is done. And is in the process of committing
+2017-03-11 05:05:09,819 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,820 INFO org.apache.hadoop.mapred.Task: Task attempt_local1689988440_0001_r_000048_0 is allowed to commit now
+2017-03-11 05:05:09,820 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1689988440_0001_r_000048_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1689988440_0001_r_000048
+2017-03-11 05:05:09,821 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:05:09,821 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1689988440_0001_r_000048_0' done.
+2017-03-11 05:05:09,821 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1689988440_0001_r_000048_0
+2017-03-11 05:05:09,822 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1689988440_0001_r_000049_0
+2017-03-11 05:05:09,827 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:05:09,827 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 05:05:09,827 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@7993bafc
+2017-03-11 05:05:09,828 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 05:05:09,834 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1689988440_0001_r_000049_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 05:05:09,836 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#50 about to shuffle output of map attempt_local1689988440_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 05:05:09,837 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1689988440_0001_m_000000_0
+2017-03-11 05:05:09,837 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 05:05:09,837 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 05:05:09,838 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,838 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 05:05:09,839 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:09,839 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:09,839 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 05:05:09,847 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 05:05:09,847 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 05:05:09,848 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:09,848 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:09,848 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,868 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1689988440_0001_r_000049_0 is done. And is in the process of committing
+2017-03-11 05:05:09,869 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,869 INFO org.apache.hadoop.mapred.Task: Task attempt_local1689988440_0001_r_000049_0 is allowed to commit now
+2017-03-11 05:05:09,870 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1689988440_0001_r_000049_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1689988440_0001_r_000049
+2017-03-11 05:05:09,871 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:05:09,871 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1689988440_0001_r_000049_0' done.
+2017-03-11 05:05:09,871 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1689988440_0001_r_000049_0
+2017-03-11 05:05:09,871 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete.
+2017-03-11 05:05:10,307 INFO org.apache.hadoop.mapreduce.Job: Job job_local1689988440_0001 completed successfully
+2017-03-11 05:05:10,494 INFO org.apache.hadoop.mapreduce.Job: Counters: 34
 	File System Counters
-		FILE: Number of bytes read=16657772
-		FILE: Number of bytes written=12451545
+		FILE: Number of bytes read=175845044
+		FILE: Number of bytes written=235476648
 		FILE: Number of read operations=0
 		FILE: Number of large read operations=0
 		FILE: Number of write operations=0
 	Map-Reduce Framework
-		Map input records=124787
-		Map output records=115105
-		Map output bytes=4776007
-		Map output materialized bytes=2376884
-		Input split bytes=120
+		Map input records=7
+		Map output records=873502
+		Map output bytes=69006658
+		Map output materialized bytes=2672956
+		Input split bytes=122
 		Combine input records=0
 		Combine output records=0
-		Reduce input groups=115105
-		Reduce shuffle bytes=2376884
-		Reduce input records=115105
-		Reduce output records=115105
-		Spilled Records=230210
-		Shuffled Maps =1
+		Reduce input groups=873481
+		Reduce shuffle bytes=2672956
+		Reduce input records=873502
+		Reduce output records=1
+		Spilled Records=1747004
+		Shuffled Maps =50
 		Failed Shuffles=0
-		Merged Map outputs=1
-		GC time elapsed (ms)=140
+		Merged Map outputs=50
+		GC time elapsed (ms)=169
 		CPU time spent (ms)=0
 		Physical memory (bytes) snapshot=0
 		Virtual memory (bytes) snapshot=0
-		Total committed heap usage (bytes)=331227136
+		Total committed heap usage (bytes)=9956241408
 	Shuffle Errors
 		BAD_ID=0
 		CONNECTION=0
@@ -1322,108 +4110,122 @@ Caused by: java.lang.NullPointerException
 		WRONG_MAP=0
 		WRONG_REDUCE=0
 	File Input Format Counters 
-		Bytes Read=5589886
+		Bytes Read=454
 	File Output Format Counters 
-		Bytes Written=4789771
-2017-03-10 14:54:30,023 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
-2017-03-10 14:54:30,544 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
-2017-03-10 14:54:30,565 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
-2017-03-10 14:54:31,208 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
-2017-03-10 14:54:31,222 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
-2017-03-10 14:54:31,311 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
-2017-03-10 14:54:31,680 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local520002813_0001
-2017-03-10 14:54:32,173 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
-2017-03-10 14:54:32,174 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local520002813_0001
-2017-03-10 14:54:32,184 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
-2017-03-10 14:54:32,195 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
-2017-03-10 14:54:32,197 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
-2017-03-10 14:54:32,352 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
-2017-03-10 14:54:32,354 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local520002813_0001_m_000000_0
-2017-03-10 14:54:32,427 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
-2017-03-10 14:54:32,457 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
-2017-03-10 14:54:32,463 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/input/pg100.txt:0+5589886
-2017-03-10 14:54:32,548 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
-2017-03-10 14:54:32,549 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
-2017-03-10 14:54:32,550 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
-2017-03-10 14:54:32,550 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
-2017-03-10 14:54:32,550 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
-2017-03-10 14:54:32,559 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
-2017-03-10 14:54:33,178 INFO org.apache.hadoop.mapreduce.Job: Job job_local520002813_0001 running in uber mode : false
-2017-03-10 14:54:33,179 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
-2017-03-10 14:54:35,891 INFO org.apache.hadoop.mapred.LocalJobRunner: 
-2017-03-10 14:54:35,891 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
-2017-03-10 14:54:35,891 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
-2017-03-10 14:54:35,891 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 5541084; bufvoid = 104857600
-2017-03-10 14:54:35,891 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 25753980(103015920); length = 460417/6553600
-2017-03-10 14:54:36,206 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
-2017-03-10 14:54:37,261 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
-2017-03-10 14:54:37,264 INFO org.apache.hadoop.mapred.Task: Task:attempt_local520002813_0001_m_000000_0 is done. And is in the process of committing
-2017-03-10 14:54:37,275 INFO org.apache.hadoop.mapred.LocalJobRunner: map
-2017-03-10 14:54:37,280 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local520002813_0001_m_000000_0' done.
-2017-03-10 14:54:37,280 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local520002813_0001_m_000000_0
-2017-03-10 14:54:37,280 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
-2017-03-10 14:54:37,284 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks
-2017-03-10 14:54:37,284 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local520002813_0001_r_000000_0
-2017-03-10 14:54:37,289 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
-2017-03-10 14:54:37,289 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
-2017-03-10 14:54:37,293 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@22c4d1da
-2017-03-10 14:54:37,303 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
-2017-03-10 14:54:37,312 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local520002813_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events
-2017-03-10 14:54:37,342 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.deflate]
-2017-03-10 14:54:37,342 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local520002813_0001_m_000000_0 decomp: 5771296 len: 2500118 to MEMORY
-2017-03-10 14:54:37,405 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 5771296 bytes from map-output for attempt_local520002813_0001_m_000000_0
-2017-03-10 14:54:37,408 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 5771296, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->5771296
-2017-03-10 14:54:37,410 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
-2017-03-10 14:54:37,410 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
-2017-03-10 14:54:37,411 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
-2017-03-10 14:54:37,420 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
-2017-03-10 14:54:37,420 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 5771286 bytes
-2017-03-10 14:54:38,188 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 0%
-2017-03-10 14:54:38,209 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 5771296 bytes to disk to satisfy reduce memory limit
-2017-03-10 14:54:38,209 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 2500126 bytes from disk
-2017-03-10 14:54:38,210 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
-2017-03-10 14:54:38,210 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
-2017-03-10 14:54:38,210 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 5771286 bytes
-2017-03-10 14:54:38,211 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
-2017-03-10 14:54:38,219 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.skip.on is deprecated. Instead, use mapreduce.job.skiprecords
-2017-03-10 14:54:39,174 INFO org.apache.hadoop.mapred.Task: Task:attempt_local520002813_0001_r_000000_0 is done. And is in the process of committing
-2017-03-10 14:54:39,181 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
-2017-03-10 14:54:39,181 INFO org.apache.hadoop.mapred.Task: Task attempt_local520002813_0001_r_000000_0 is allowed to commit now
-2017-03-10 14:54:39,187 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local520002813_0001_r_000000_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local520002813_0001_r_000000
-2017-03-10 14:54:39,188 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
-2017-03-10 14:54:39,188 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local520002813_0001_r_000000_0' done.
-2017-03-10 14:54:39,188 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local520002813_0001_r_000000_0
-2017-03-10 14:54:39,188 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete.
-2017-03-10 14:54:39,190 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 100%
-2017-03-10 14:54:40,191 INFO org.apache.hadoop.mapreduce.Job: Job job_local520002813_0001 completed successfully
-2017-03-10 14:54:40,203 INFO org.apache.hadoop.mapreduce.Job: Counters: 34
+		Bytes Written=412
+	similarity.NaiveApproach$CompCounter
+		NUM=21
+2017-03-11 05:15:56,663 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-11 05:15:59,556 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-11 05:15:59,585 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-11 05:16:01,136 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
+2017-03-11 05:16:01,188 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
+2017-03-11 05:16:01,385 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
+2017-03-11 05:16:02,358 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local1188539971_0001
+2017-03-11 05:16:03,356 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
+2017-03-11 05:16:03,357 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local1188539971_0001
+2017-03-11 05:16:03,363 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
+2017-03-11 05:16:03,396 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:16:03,405 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
+2017-03-11 05:16:03,718 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
+2017-03-11 05:16:03,719 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1188539971_0001_m_000000_0
+2017-03-11 05:16:03,880 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:16:03,951 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 05:16:03,956 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:0+447
+2017-03-11 05:16:04,364 INFO org.apache.hadoop.mapreduce.Job: Job job_local1188539971_0001 running in uber mode : false
+2017-03-11 05:16:04,377 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
+2017-03-11 05:16:04,622 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 05:16:04,622 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 05:16:04,622 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 05:16:04,622 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 05:16:04,622 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 05:16:04,651 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 05:16:05,498 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 05:16:05,501 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 05:16:05,501 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-11 05:16:05,501 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 68133156; bufvoid = 104857600
+2017-03-11 05:16:05,501 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 22720392(90881568); length = 3494005/6553600
+2017-03-11 05:16:10,017 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:0+447 > sort
+2017-03-11 05:16:10,425 INFO org.apache.hadoop.mapreduce.Job:  map 67% reduce 0%
+2017-03-11 05:16:13,020 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:0+447 > sort
+2017-03-11 05:16:16,024 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:0+447 > sort
+2017-03-11 05:16:18,089 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
+2017-03-11 05:16:19,040 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:0+447 > sort
+2017-03-11 05:16:22,042 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:0+447 > sort
+2017-03-11 05:16:25,467 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
+2017-03-11 05:16:25,495 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1188539971_0001_m_000000_0 is done. And is in the process of committing
+2017-03-11 05:16:25,505 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:0+447
+2017-03-11 05:16:25,505 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1188539971_0001_m_000000_0' done.
+2017-03-11 05:16:25,506 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1188539971_0001_m_000000_0
+2017-03-11 05:16:25,509 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
+2017-03-11 05:16:25,529 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks
+2017-03-11 05:16:25,529 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1188539971_0001_r_000000_0
+2017-03-11 05:16:25,562 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:16:25,563 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 05:16:25,583 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@7bfaa588
+2017-03-11 05:16:25,662 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 05:16:25,688 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1188539971_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 05:16:25,901 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.deflate]
+2017-03-11 05:16:25,902 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1188539971_0001_m_000000_0 decomp: 69880162 len: 2722285 to MEMORY
+2017-03-11 05:16:26,317 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 69880162 bytes from map-output for attempt_local1188539971_0001_m_000000_0
+2017-03-11 05:16:26,340 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 69880162, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->69880162
+2017-03-11 05:16:26,345 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 05:16:26,347 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:16:26,347 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 05:16:26,375 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:16:26,376 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 69880144 bytes
+2017-03-11 05:16:26,461 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 0%
+2017-03-11 05:16:31,564 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > sort
+2017-03-11 05:16:32,471 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 56%
+2017-03-11 05:16:33,512 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 69880162 bytes to disk to satisfy reduce memory limit
+2017-03-11 05:16:33,514 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 2722293 bytes from disk
+2017-03-11 05:16:33,528 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 05:16:33,528 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:16:33,529 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 69880144 bytes
+2017-03-11 05:16:33,529 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > sort
+2017-03-11 05:16:33,574 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.skip.on is deprecated. Instead, use mapreduce.job.skiprecords
+2017-03-11 05:16:34,582 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:16:35,475 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 68%
+2017-03-11 05:16:37,584 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:16:38,482 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 91%
+2017-03-11 05:16:38,777 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1188539971_0001_r_000000_0 is done. And is in the process of committing
+2017-03-11 05:16:38,778 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:16:38,778 INFO org.apache.hadoop.mapred.Task: Task attempt_local1188539971_0001_r_000000_0 is allowed to commit now
+2017-03-11 05:16:38,789 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1188539971_0001_r_000000_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1188539971_0001_r_000000
+2017-03-11 05:16:38,792 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:16:38,793 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1188539971_0001_r_000000_0' done.
+2017-03-11 05:16:38,793 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1188539971_0001_r_000000_0
+2017-03-11 05:16:38,793 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete.
+2017-03-11 05:16:39,492 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 100%
+2017-03-11 05:16:39,492 INFO org.apache.hadoop.mapreduce.Job: Job job_local1188539971_0001 completed successfully
+2017-03-11 05:16:39,548 INFO org.apache.hadoop.mapreduce.Job: Counters: 34
 	File System Counters
-		FILE: Number of bytes read=16904240
-		FILE: Number of bytes written=13589484
+		FILE: Number of bytes read=5445872
+		FILE: Number of bytes written=8698043
 		FILE: Number of read operations=0
 		FILE: Number of large read operations=0
 		FILE: Number of write operations=0
 	Map-Reduce Framework
-		Map input records=124787
-		Map output records=115105
-		Map output bytes=5541084
-		Map output materialized bytes=2500118
-		Input split bytes=120
+		Map input records=7
+		Map output records=873502
+		Map output bytes=68133156
+		Map output materialized bytes=2722285
+		Input split bytes=122
 		Combine input records=0
 		Combine output records=0
-		Reduce input groups=115105
-		Reduce shuffle bytes=2500118
-		Reduce input records=115105
-		Reduce output records=115105
-		Spilled Records=230210
+		Reduce input groups=873481
+		Reduce shuffle bytes=2722285
+		Reduce input records=873502
+		Reduce output records=1
+		Spilled Records=1747004
 		Shuffled Maps =1
 		Failed Shuffles=0
 		Merged Map outputs=1
-		GC time elapsed (ms)=124
+		GC time elapsed (ms)=250
 		CPU time spent (ms)=0
 		Physical memory (bytes) snapshot=0
 		Virtual memory (bytes) snapshot=0
-		Total committed heap usage (bytes)=331227136
+		Total committed heap usage (bytes)=378413056
 	Shuffle Errors
 		BAD_ID=0
 		CONNECTION=0
@@ -1432,110 +4234,869 @@ Caused by: java.lang.NullPointerException
 		WRONG_MAP=0
 		WRONG_REDUCE=0
 	File Input Format Counters 
-		Bytes Read=5589886
+		Bytes Read=447
 	File Output Format Counters 
-		Bytes Written=5560824
-	similarity.WordSort$DocLineCounter
-		NUM=124787
-2017-03-10 14:58:53,626 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
-2017-03-10 14:58:54,092 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
-2017-03-10 14:58:54,097 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
-2017-03-10 14:58:54,564 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
-2017-03-10 14:58:54,572 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
-2017-03-10 14:58:54,619 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
-2017-03-10 14:58:54,850 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local544686432_0001
-2017-03-10 14:58:55,274 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
-2017-03-10 14:58:55,275 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local544686432_0001
-2017-03-10 14:58:55,284 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
-2017-03-10 14:58:55,298 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
-2017-03-10 14:58:55,301 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
-2017-03-10 14:58:55,409 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
-2017-03-10 14:58:55,411 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local544686432_0001_m_000000_0
-2017-03-10 14:58:55,473 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
-2017-03-10 14:58:55,485 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
-2017-03-10 14:58:55,487 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/input/pg100.txt:0+5589886
-2017-03-10 14:58:55,566 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
-2017-03-10 14:58:55,567 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
-2017-03-10 14:58:55,567 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
-2017-03-10 14:58:55,567 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
-2017-03-10 14:58:55,567 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
-2017-03-10 14:58:55,569 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
-2017-03-10 14:58:56,279 INFO org.apache.hadoop.mapreduce.Job: Job job_local544686432_0001 running in uber mode : false
-2017-03-10 14:58:56,280 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
-2017-03-10 14:58:58,471 INFO org.apache.hadoop.mapred.LocalJobRunner: 
-2017-03-10 14:58:58,472 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
-2017-03-10 14:58:58,472 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
-2017-03-10 14:58:58,472 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 5541084; bufvoid = 104857600
-2017-03-10 14:58:58,472 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 25753980(103015920); length = 460417/6553600
-2017-03-10 14:58:58,671 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
-2017-03-10 14:58:59,336 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
-2017-03-10 14:58:59,339 INFO org.apache.hadoop.mapred.Task: Task:attempt_local544686432_0001_m_000000_0 is done. And is in the process of committing
-2017-03-10 14:58:59,344 INFO org.apache.hadoop.mapred.LocalJobRunner: map
-2017-03-10 14:58:59,344 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local544686432_0001_m_000000_0' done.
-2017-03-10 14:58:59,344 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local544686432_0001_m_000000_0
-2017-03-10 14:58:59,345 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
-2017-03-10 14:58:59,347 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks
-2017-03-10 14:58:59,348 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local544686432_0001_r_000000_0
-2017-03-10 14:58:59,353 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
-2017-03-10 14:58:59,353 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
-2017-03-10 14:58:59,355 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@8a93430
-2017-03-10 14:58:59,364 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
-2017-03-10 14:58:59,370 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local544686432_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events
-2017-03-10 14:58:59,395 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.deflate]
-2017-03-10 14:58:59,395 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local544686432_0001_m_000000_0 decomp: 5771296 len: 2405517 to MEMORY
-2017-03-10 14:58:59,447 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 5771296 bytes from map-output for attempt_local544686432_0001_m_000000_0
-2017-03-10 14:58:59,447 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 5771296, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->5771296
-2017-03-10 14:58:59,448 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
-2017-03-10 14:58:59,450 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
-2017-03-10 14:58:59,450 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
-2017-03-10 14:58:59,454 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
-2017-03-10 14:58:59,455 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 5771286 bytes
-2017-03-10 14:59:00,203 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 5771296 bytes to disk to satisfy reduce memory limit
-2017-03-10 14:59:00,203 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 2405525 bytes from disk
-2017-03-10 14:59:00,204 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
-2017-03-10 14:59:00,204 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
-2017-03-10 14:59:00,205 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 5771286 bytes
-2017-03-10 14:59:00,205 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
-2017-03-10 14:59:00,210 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.skip.on is deprecated. Instead, use mapreduce.job.skiprecords
-2017-03-10 14:59:00,290 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 0%
-2017-03-10 14:59:00,890 INFO org.apache.hadoop.mapred.Task: Task:attempt_local544686432_0001_r_000000_0 is done. And is in the process of committing
-2017-03-10 14:59:00,895 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
-2017-03-10 14:59:00,895 INFO org.apache.hadoop.mapred.Task: Task attempt_local544686432_0001_r_000000_0 is allowed to commit now
-2017-03-10 14:59:00,896 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local544686432_0001_r_000000_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local544686432_0001_r_000000
-2017-03-10 14:59:00,897 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
-2017-03-10 14:59:00,897 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local544686432_0001_r_000000_0' done.
-2017-03-10 14:59:00,901 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local544686432_0001_r_000000_0
-2017-03-10 14:59:00,901 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete.
-2017-03-10 14:59:01,290 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 100%
-2017-03-10 14:59:01,291 INFO org.apache.hadoop.mapreduce.Job: Job job_local544686432_0001 completed successfully
-2017-03-10 14:59:01,312 INFO org.apache.hadoop.mapreduce.Job: Counters: 34
+		Bytes Written=20
+	similarity.NaiveApproach$CompCounter
+		NUM=21
+2017-03-11 06:34:28,878 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-11 06:34:31,481 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-11 06:34:31,482 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-11 06:34:32,242 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
+2017-03-11 06:34:32,254 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
+2017-03-11 06:34:32,409 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
+2017-03-11 06:34:33,014 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local123106002_0001
+2017-03-11 06:34:33,807 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
+2017-03-11 06:34:33,808 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local123106002_0001
+2017-03-11 06:34:33,812 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
+2017-03-11 06:34:33,833 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 06:34:33,850 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
+2017-03-11 06:34:34,074 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
+2017-03-11 06:34:34,075 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local123106002_0001_m_000000_0
+2017-03-11 06:34:34,230 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 06:34:34,304 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 06:34:34,307 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:0+447
+2017-03-11 06:34:34,676 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 06:34:34,676 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 06:34:34,676 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 06:34:34,677 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 06:34:34,678 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 06:34:34,704 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 06:34:34,825 INFO org.apache.hadoop.mapreduce.Job: Job job_local123106002_0001 running in uber mode : false
+2017-03-11 06:34:34,830 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
+2017-03-11 06:34:35,810 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 06:34:35,810 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 06:34:35,810 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-11 06:34:35,810 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 68133156; bufvoid = 104857600
+2017-03-11 06:34:35,810 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 22720392(90881568); length = 3494005/6553600
+2017-03-11 06:45:21,439 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-11 06:45:23,459 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-11 06:45:23,466 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-11 06:45:24,206 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
+2017-03-11 06:45:24,225 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
+2017-03-11 06:45:24,358 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
+2017-03-11 06:45:24,923 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local271795742_0001
+2017-03-11 06:45:25,694 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
+2017-03-11 06:45:25,695 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local271795742_0001
+2017-03-11 06:45:25,702 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
+2017-03-11 06:45:25,715 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 06:45:25,734 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
+2017-03-11 06:45:25,966 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
+2017-03-11 06:45:25,971 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local271795742_0001_m_000000_0
+2017-03-11 06:45:26,116 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 06:45:26,170 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 06:45:26,173 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:0+447
+2017-03-11 06:45:26,540 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 06:45:26,540 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 06:45:26,540 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 06:45:26,541 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 06:45:26,541 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 06:45:26,563 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 06:45:26,712 INFO org.apache.hadoop.mapreduce.Job: Job job_local271795742_0001 running in uber mode : false
+2017-03-11 06:45:26,715 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
+2017-03-11 06:45:27,336 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 06:45:27,336 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 06:45:27,336 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-11 06:45:27,336 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 68133156; bufvoid = 104857600
+2017-03-11 06:45:27,337 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 22720392(90881568); length = 3494005/6553600
+2017-03-11 06:45:32,174 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:0+447 > sort
+2017-03-11 06:45:32,731 INFO org.apache.hadoop.mapreduce.Job:  map 67% reduce 0%
+2017-03-11 06:45:35,198 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:0+447 > sort
+2017-03-11 06:48:34,358 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-11 06:48:36,420 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-11 06:48:36,423 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-11 06:48:36,486 WARN org.apache.hadoop.security.UserGroupInformation: PriviledgedActionException as:cloudera (auth:SIMPLE) cause:org.apache.hadoop.mapred.FileAlreadyExistsException: Output directory file:/home/cloudera/workspace/WordCount/output already exists
+2017-03-11 06:48:47,258 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-11 06:48:49,178 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-11 06:48:49,194 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-11 06:48:49,953 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
+2017-03-11 06:48:49,982 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
+2017-03-11 06:48:50,130 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:23
+2017-03-11 06:48:50,734 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local768169073_0001
+2017-03-11 06:48:51,469 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
+2017-03-11 06:48:51,470 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local768169073_0001
+2017-03-11 06:48:51,483 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
+2017-03-11 06:48:51,514 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 06:48:51,539 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
+2017-03-11 06:48:51,866 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
+2017-03-11 06:48:51,868 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local768169073_0001_m_000000_0
+2017-03-11 06:48:51,980 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 06:48:52,036 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 06:48:52,044 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:0+20
+2017-03-11 06:48:52,442 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 06:48:52,443 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 06:48:52,443 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 06:48:52,443 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 06:48:52,443 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 06:48:52,462 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 06:48:52,498 INFO org.apache.hadoop.mapreduce.Job: Job job_local768169073_0001 running in uber mode : false
+2017-03-11 06:48:52,501 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
+2017-03-11 06:48:52,897 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 06:48:52,902 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 06:48:52,902 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-11 06:48:52,902 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 10606810; bufvoid = 104857600
+2017-03-11 06:48:52,902 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 25715256(102861024); length = 499141/6553600
+2017-03-11 06:48:54,008 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
+2017-03-11 06:48:54,942 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
+2017-03-11 06:48:54,955 INFO org.apache.hadoop.mapred.Task: Task:attempt_local768169073_0001_m_000000_0 is done. And is in the process of committing
+2017-03-11 06:48:54,980 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:0+20
+2017-03-11 06:48:54,980 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local768169073_0001_m_000000_0' done.
+2017-03-11 06:48:54,980 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local768169073_0001_m_000000_0
+2017-03-11 06:48:54,981 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local768169073_0001_m_000001_0
+2017-03-11 06:48:54,986 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 06:48:54,987 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 06:48:54,988 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:20+20
+2017-03-11 06:48:55,124 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 06:48:55,130 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 06:48:55,130 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 06:48:55,130 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 06:48:55,130 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 06:48:55,138 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 06:48:55,143 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 06:48:55,149 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 06:48:55,160 INFO org.apache.hadoop.mapred.Task: Task:attempt_local768169073_0001_m_000001_0 is done. And is in the process of committing
+2017-03-11 06:48:55,162 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:20+20
+2017-03-11 06:48:55,163 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local768169073_0001_m_000001_0' done.
+2017-03-11 06:48:55,163 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local768169073_0001_m_000001_0
+2017-03-11 06:48:55,163 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local768169073_0001_m_000002_0
+2017-03-11 06:48:55,166 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 06:48:55,166 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 06:48:55,168 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:40+20
+2017-03-11 06:48:55,290 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 06:48:55,290 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 06:48:55,290 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 06:48:55,291 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 06:48:55,291 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 06:48:55,299 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 06:48:55,301 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 06:48:55,301 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 06:48:55,313 INFO org.apache.hadoop.mapred.Task: Task:attempt_local768169073_0001_m_000002_0 is done. And is in the process of committing
+2017-03-11 06:48:55,331 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:40+20
+2017-03-11 06:48:55,331 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local768169073_0001_m_000002_0' done.
+2017-03-11 06:48:55,331 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local768169073_0001_m_000002_0
+2017-03-11 06:48:55,331 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local768169073_0001_m_000003_0
+2017-03-11 06:48:55,336 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 06:48:55,338 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 06:48:55,339 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:60+20
+2017-03-11 06:48:55,511 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 0%
+2017-03-11 06:48:55,545 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 06:48:55,545 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 06:48:55,545 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 06:48:55,545 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 06:48:55,546 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 06:48:55,552 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 06:48:55,625 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 06:48:55,632 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 06:48:55,633 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-11 06:48:55,636 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 10606810; bufvoid = 104857600
+2017-03-11 06:48:55,639 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 25715256(102861024); length = 499141/6553600
+2017-03-11 06:55:36,696 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-11 06:55:39,364 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-11 06:55:39,372 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-11 06:55:40,232 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
+2017-03-11 06:55:40,270 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
+2017-03-11 06:55:40,471 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:447
+2017-03-11 06:55:41,164 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local1197297442_0001
+2017-03-11 06:55:41,900 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
+2017-03-11 06:55:41,901 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local1197297442_0001
+2017-03-11 06:55:41,908 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
+2017-03-11 06:55:41,938 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 06:55:41,960 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
+2017-03-11 06:55:42,690 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
+2017-03-11 06:55:42,691 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1197297442_0001_m_000000_0
+2017-03-11 06:55:42,840 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 06:55:42,904 INFO org.apache.hadoop.mapreduce.Job: Job job_local1197297442_0001 running in uber mode : false
+2017-03-11 06:55:42,906 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
+2017-03-11 06:55:42,924 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 06:55:42,931 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:0+1
+2017-03-11 06:55:43,638 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 06:55:43,638 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 06:55:43,638 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 06:55:43,638 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 06:55:43,638 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 06:55:43,667 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 06:55:44,372 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 06:55:44,372 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 06:55:44,373 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-11 06:55:44,373 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 10606810; bufvoid = 104857600
+2017-03-11 06:55:44,373 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 25715256(102861024); length = 499141/6553600
+2017-03-11 06:55:45,670 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
+2017-03-11 06:55:46,599 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
+2017-03-11 06:55:46,622 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1197297442_0001_m_000000_0 is done. And is in the process of committing
+2017-03-11 06:55:46,650 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:0+1
+2017-03-11 06:55:46,650 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1197297442_0001_m_000000_0' done.
+2017-03-11 06:55:46,650 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1197297442_0001_m_000000_0
+2017-03-11 06:55:46,650 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1197297442_0001_m_000001_0
+2017-03-11 06:55:46,657 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 06:55:46,658 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 06:55:46,668 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:1+1
+2017-03-11 06:55:46,961 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 0%
+2017-03-11 06:55:46,996 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 06:55:46,999 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 06:55:47,000 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 06:55:47,003 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 06:55:47,004 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 06:55:47,010 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 06:55:47,023 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 06:55:47,023 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 06:55:47,031 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1197297442_0001_m_000001_0 is done. And is in the process of committing
+2017-03-11 06:55:47,038 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:1+1
+2017-03-11 06:55:47,039 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1197297442_0001_m_000001_0' done.
+2017-03-11 06:55:47,039 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1197297442_0001_m_000001_0
+2017-03-11 06:55:47,040 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1197297442_0001_m_000002_0
+2017-03-11 06:55:47,046 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 06:55:47,047 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 06:55:47,049 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:2+1
+2017-03-11 06:55:47,324 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 06:55:47,330 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 06:55:47,331 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 06:55:47,338 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 06:55:47,338 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 06:55:47,344 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 06:55:47,345 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 06:55:47,346 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 06:55:47,354 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1197297442_0001_m_000002_0 is done. And is in the process of committing
+2017-03-11 06:55:47,356 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:2+1
+2017-03-11 06:55:47,360 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1197297442_0001_m_000002_0' done.
+2017-03-11 06:55:47,360 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1197297442_0001_m_000002_0
+2017-03-11 06:55:47,360 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1197297442_0001_m_000003_0
+2017-03-11 06:55:47,364 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 06:55:47,370 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 06:55:47,371 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:3+1
+2017-03-11 06:55:47,679 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 06:55:47,687 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 06:55:47,688 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 06:55:47,688 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 06:55:47,688 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 06:55:47,688 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 06:55:47,690 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 06:55:47,690 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 06:55:47,692 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1197297442_0001_m_000003_0 is done. And is in the process of committing
+2017-03-11 06:55:47,699 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:3+1
+2017-03-11 06:55:47,708 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1197297442_0001_m_000003_0' done.
+2017-03-11 06:55:47,709 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1197297442_0001_m_000003_0
+2017-03-11 06:55:47,709 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1197297442_0001_m_000004_0
+2017-03-11 06:55:47,714 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 06:55:47,714 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 06:55:47,715 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:4+1
+2017-03-11 06:55:48,021 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 06:55:48,021 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 06:55:48,021 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 06:55:48,021 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 06:55:48,021 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 06:55:48,026 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 06:55:48,027 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 06:55:48,030 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 06:55:48,054 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1197297442_0001_m_000004_0 is done. And is in the process of committing
+2017-03-11 06:55:48,057 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:4+1
+2017-03-11 06:55:48,057 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1197297442_0001_m_000004_0' done.
+2017-03-11 06:55:48,059 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1197297442_0001_m_000004_0
+2017-03-11 06:55:48,060 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1197297442_0001_m_000005_0
+2017-03-11 06:55:48,068 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 06:55:48,069 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 06:55:48,077 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:5+1
+2017-03-11 06:55:48,380 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 06:55:48,381 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 06:55:48,381 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 06:55:48,381 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 06:55:48,381 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 06:55:48,381 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 06:55:48,383 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 06:55:48,383 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 06:55:48,385 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1197297442_0001_m_000005_0 is done. And is in the process of committing
+2017-03-11 06:55:48,396 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:5+1
+2017-03-11 06:55:48,396 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1197297442_0001_m_000005_0' done.
+2017-03-11 06:55:48,396 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1197297442_0001_m_000005_0
+2017-03-11 06:55:48,396 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1197297442_0001_m_000006_0
+2017-03-11 06:55:48,403 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 06:55:48,404 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 06:55:48,406 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:6+1
+2017-03-11 06:55:48,764 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 06:55:48,780 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 06:55:48,780 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 06:55:48,780 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 06:55:48,780 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 06:55:48,787 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 06:55:48,790 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 06:55:48,795 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 06:55:48,797 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1197297442_0001_m_000006_0 is done. And is in the process of committing
+2017-03-11 06:55:48,801 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:6+1
+2017-03-11 06:55:48,801 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1197297442_0001_m_000006_0' done.
+2017-03-11 06:55:48,801 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1197297442_0001_m_000006_0
+2017-03-11 06:55:48,801 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1197297442_0001_m_000007_0
+2017-03-11 06:55:48,810 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 06:55:48,811 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 06:55:48,812 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:7+1
+2017-03-11 06:55:49,155 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 06:55:49,160 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 06:55:49,160 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 06:55:49,161 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 06:55:49,161 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 06:55:49,170 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 06:55:49,173 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 06:55:49,173 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 06:55:49,175 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1197297442_0001_m_000007_0 is done. And is in the process of committing
+2017-03-11 06:55:49,192 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:7+1
+2017-03-11 06:55:49,192 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1197297442_0001_m_000007_0' done.
+2017-03-11 06:55:49,193 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1197297442_0001_m_000007_0
+2017-03-11 06:55:49,195 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1197297442_0001_m_000008_0
+2017-03-11 06:55:49,198 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 06:55:49,199 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 06:55:49,199 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:8+1
+2017-03-11 06:55:49,595 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 06:55:49,599 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 06:55:49,600 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 06:55:49,600 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 06:55:49,600 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 06:55:49,606 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 06:55:49,609 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 06:55:49,609 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 06:55:49,618 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1197297442_0001_m_000008_0 is done. And is in the process of committing
+2017-03-11 06:55:49,621 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:8+1
+2017-03-11 06:55:49,623 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1197297442_0001_m_000008_0' done.
+2017-03-11 06:55:49,624 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1197297442_0001_m_000008_0
+2017-03-11 06:55:49,630 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1197297442_0001_m_000009_0
+2017-03-11 06:55:49,635 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 06:55:49,635 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 06:55:49,636 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:9+1
+2017-03-11 06:55:50,136 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 06:55:50,137 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 06:55:50,141 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 06:55:50,141 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 06:55:50,141 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 06:55:50,151 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 06:55:50,152 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 06:55:50,153 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 06:55:50,170 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1197297442_0001_m_000009_0 is done. And is in the process of committing
+2017-03-11 06:55:50,193 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:9+1
+2017-03-11 06:55:50,201 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1197297442_0001_m_000009_0' done.
+2017-03-11 06:55:50,202 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1197297442_0001_m_000009_0
+2017-03-11 06:55:50,202 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1197297442_0001_m_000010_0
+2017-03-11 06:55:50,214 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 06:55:50,215 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 06:55:50,238 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:10+1
+2017-03-11 06:55:50,823 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 06:55:50,825 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 06:55:50,836 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 06:55:50,836 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 06:55:50,837 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 06:55:50,850 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 06:55:50,873 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 06:55:50,873 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 06:55:50,910 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1197297442_0001_m_000010_0 is done. And is in the process of committing
+2017-03-11 06:55:50,912 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:10+1
+2017-03-11 06:55:50,920 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1197297442_0001_m_000010_0' done.
+2017-03-11 06:55:50,921 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1197297442_0001_m_000010_0
+2017-03-11 06:55:50,927 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1197297442_0001_m_000011_0
+2017-03-11 06:55:50,956 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 06:55:50,957 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 06:55:50,971 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:11+1
+2017-03-11 06:55:51,538 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 06:55:51,539 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 06:55:51,539 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 06:55:51,539 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 06:55:51,539 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 06:55:51,544 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 06:55:51,545 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 06:55:51,545 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 06:55:51,554 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1197297442_0001_m_000011_0 is done. And is in the process of committing
+2017-03-11 06:55:51,569 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:11+1
+2017-03-11 06:55:51,578 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1197297442_0001_m_000011_0' done.
+2017-03-11 06:55:51,578 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1197297442_0001_m_000011_0
+2017-03-11 06:55:51,578 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1197297442_0001_m_000012_0
+2017-03-11 06:55:51,583 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 06:55:51,584 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 06:55:51,585 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:12+1
+2017-03-11 06:55:52,243 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 06:55:52,245 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 06:55:52,247 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 06:55:52,255 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 06:55:52,255 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 06:55:52,275 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 06:55:52,278 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 06:55:52,292 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 06:55:52,315 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1197297442_0001_m_000012_0 is done. And is in the process of committing
+2017-03-11 06:55:52,330 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:12+1
+2017-03-11 06:55:52,330 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1197297442_0001_m_000012_0' done.
+2017-03-11 06:55:52,331 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1197297442_0001_m_000012_0
+2017-03-11 06:55:52,331 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1197297442_0001_m_000013_0
+2017-03-11 06:55:52,358 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 06:55:52,358 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 06:55:52,389 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:13+1
+2017-03-11 06:55:53,147 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 06:55:53,162 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 06:55:53,162 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 06:55:53,163 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 06:55:53,163 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 06:55:53,171 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 06:55:53,215 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 06:55:53,233 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 06:55:53,244 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1197297442_0001_m_000013_0 is done. And is in the process of committing
+2017-03-11 06:55:53,319 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:13+1
+2017-03-11 06:55:53,320 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1197297442_0001_m_000013_0' done.
+2017-03-11 06:55:53,320 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1197297442_0001_m_000013_0
+2017-03-11 06:55:53,320 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1197297442_0001_m_000014_0
+2017-03-11 06:55:53,334 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 06:55:53,358 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 06:55:53,383 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:14+1
+2017-03-11 06:55:54,012 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 06:55:54,012 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 06:55:54,014 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 06:55:54,014 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 06:55:54,014 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 06:55:54,023 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 06:55:54,024 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 06:55:54,033 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 06:55:54,035 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1197297442_0001_m_000014_0 is done. And is in the process of committing
+2017-03-11 06:55:54,059 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:14+1
+2017-03-11 06:55:54,072 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1197297442_0001_m_000014_0' done.
+2017-03-11 06:55:54,072 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1197297442_0001_m_000014_0
+2017-03-11 06:55:54,072 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1197297442_0001_m_000015_0
+2017-03-11 06:55:54,089 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 06:55:54,090 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 06:55:54,091 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:15+1
+2017-03-11 06:55:54,583 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 06:55:54,583 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 06:55:54,583 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 06:55:54,584 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 06:55:54,584 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 06:55:54,592 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 06:55:54,593 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 06:55:54,593 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 06:55:54,608 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1197297442_0001_m_000015_0 is done. And is in the process of committing
+2017-03-11 06:55:54,610 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:15+1
+2017-03-11 06:55:54,610 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1197297442_0001_m_000015_0' done.
+2017-03-11 06:55:54,610 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1197297442_0001_m_000015_0
+2017-03-11 06:55:54,613 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1197297442_0001_m_000016_0
+2017-03-11 06:55:54,628 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 06:55:54,628 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 06:55:54,637 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:16+1
+2017-03-11 06:55:55,032 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 06:55:55,047 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 06:55:55,048 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 06:55:55,048 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 06:55:55,048 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 06:55:55,053 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 06:55:55,054 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 06:55:55,055 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 06:55:55,058 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1197297442_0001_m_000016_0 is done. And is in the process of committing
+2017-03-11 06:55:55,060 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:16+1
+2017-03-11 06:55:55,064 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1197297442_0001_m_000016_0' done.
+2017-03-11 06:55:55,064 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1197297442_0001_m_000016_0
+2017-03-11 06:55:55,064 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1197297442_0001_m_000017_0
+2017-03-11 06:55:55,079 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 06:55:55,085 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 06:55:55,086 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:17+1
+2017-03-11 06:55:55,500 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 06:55:55,500 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 06:55:55,500 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 06:55:55,500 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 06:55:55,500 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 06:55:55,510 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 06:55:55,511 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 06:55:55,511 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 06:55:55,513 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1197297442_0001_m_000017_0 is done. And is in the process of committing
+2017-03-11 06:55:55,518 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:17+1
+2017-03-11 06:55:55,518 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1197297442_0001_m_000017_0' done.
+2017-03-11 06:55:55,518 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1197297442_0001_m_000017_0
+2017-03-11 06:55:55,518 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1197297442_0001_m_000018_0
+2017-03-11 06:55:55,528 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 06:55:55,535 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 06:55:55,536 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:18+1
+2017-03-11 06:55:56,018 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 06:55:56,019 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 06:55:56,019 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 06:55:56,019 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 06:55:56,019 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 06:55:56,030 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 06:55:56,031 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 06:55:56,031 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 06:55:56,036 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1197297442_0001_m_000018_0 is done. And is in the process of committing
+2017-03-11 06:55:56,057 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:18+1
+2017-03-11 06:55:56,057 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1197297442_0001_m_000018_0' done.
+2017-03-11 06:55:56,066 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1197297442_0001_m_000018_0
+2017-03-11 06:55:56,066 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1197297442_0001_m_000019_0
+2017-03-11 06:55:56,078 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 06:55:56,079 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 06:55:56,080 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:19+1
+2017-03-11 06:55:56,547 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 06:55:56,547 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 06:55:56,547 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 06:55:56,547 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 06:55:56,547 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 06:55:56,557 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 06:55:56,558 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 06:55:56,558 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 06:55:56,579 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1197297442_0001_m_000019_0 is done. And is in the process of committing
+2017-03-11 06:55:56,581 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:19+1
+2017-03-11 06:55:56,581 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1197297442_0001_m_000019_0' done.
+2017-03-11 06:55:56,581 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1197297442_0001_m_000019_0
+2017-03-11 06:55:56,582 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1197297442_0001_m_000020_0
+2017-03-11 06:55:56,590 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 06:55:56,591 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 06:55:56,592 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:20+1
+2017-03-11 06:55:56,943 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 06:55:56,949 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 06:55:56,950 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 06:55:56,950 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 06:55:56,951 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 06:55:56,960 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 06:55:56,961 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 06:55:56,961 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 06:55:56,973 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1197297442_0001_m_000020_0 is done. And is in the process of committing
+2017-03-11 06:55:56,974 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:20+1
+2017-03-11 06:55:56,974 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1197297442_0001_m_000020_0' done.
+2017-03-11 06:55:56,974 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1197297442_0001_m_000020_0
+2017-03-11 06:55:56,974 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1197297442_0001_m_000021_0
+2017-03-11 06:55:56,991 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 06:55:56,992 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 06:55:56,992 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:21+1
+2017-03-11 06:55:57,401 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 06:55:57,401 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 06:55:57,405 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 06:55:57,405 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 06:55:57,405 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 06:55:57,419 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 06:55:57,420 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 06:55:57,420 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 06:55:57,431 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1197297442_0001_m_000021_0 is done. And is in the process of committing
+2017-03-11 06:55:57,432 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:21+1
+2017-03-11 06:55:57,437 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1197297442_0001_m_000021_0' done.
+2017-03-11 06:55:57,437 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1197297442_0001_m_000021_0
+2017-03-11 06:55:57,437 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1197297442_0001_m_000022_0
+2017-03-11 06:55:57,438 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 06:55:57,439 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 06:55:57,440 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:22+1
+2017-03-11 06:55:57,896 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 06:55:57,896 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 06:55:57,896 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 06:55:57,896 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 06:55:57,896 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 06:55:57,919 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 06:55:57,921 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 06:55:57,921 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 06:55:57,930 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1197297442_0001_m_000022_0 is done. And is in the process of committing
+2017-03-11 06:55:57,931 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:22+1
+2017-03-11 06:55:57,943 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1197297442_0001_m_000022_0' done.
+2017-03-11 06:55:57,943 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1197297442_0001_m_000022_0
+2017-03-11 06:55:57,943 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1197297442_0001_m_000023_0
+2017-03-11 06:55:57,952 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 06:55:57,952 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 06:55:57,953 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:23+1
+2017-03-11 06:55:58,488 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 06:55:58,488 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 06:55:58,489 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 06:55:58,489 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 06:55:58,489 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 06:55:58,497 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 06:55:58,498 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 06:55:58,499 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 06:55:58,520 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1197297442_0001_m_000023_0 is done. And is in the process of committing
+2017-03-11 06:55:58,522 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:23+1
+2017-03-11 06:55:58,536 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1197297442_0001_m_000023_0' done.
+2017-03-11 06:55:58,538 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1197297442_0001_m_000023_0
+2017-03-11 06:55:58,538 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1197297442_0001_m_000024_0
+2017-03-11 06:55:58,548 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 06:55:58,548 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 06:55:58,561 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:24+1
+2017-03-11 07:07:29,361 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-11 07:07:30,506 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-11 07:07:30,522 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-11 07:07:30,899 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
+2017-03-11 07:07:30,908 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
+2017-03-11 07:07:30,948 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:10
+2017-03-11 07:07:31,232 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local271824515_0001
+2017-03-11 07:07:31,617 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
+2017-03-11 07:07:31,622 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
+2017-03-11 07:07:31,622 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local271824515_0001
+2017-03-11 07:07:31,635 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 07:07:31,652 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
+2017-03-11 07:07:31,780 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
+2017-03-11 07:07:31,781 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local271824515_0001_m_000000_0
+2017-03-11 07:07:31,844 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 07:07:31,889 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 07:07:31,893 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:0+49
+2017-03-11 07:07:32,023 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 07:07:32,024 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 07:07:32,024 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 07:07:32,024 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 07:07:32,024 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 07:07:32,040 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 07:07:32,360 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 07:07:32,360 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 07:07:32,360 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-11 07:07:32,360 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 10606810; bufvoid = 104857600
+2017-03-11 07:07:32,360 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 25715256(102861024); length = 499141/6553600
+2017-03-11 07:07:32,633 INFO org.apache.hadoop.mapreduce.Job: Job job_local271824515_0001 running in uber mode : false
+2017-03-11 07:07:32,634 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
+2017-03-11 07:07:32,995 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
+2017-03-11 07:07:33,436 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
+2017-03-11 07:07:33,443 INFO org.apache.hadoop.mapred.Task: Task:attempt_local271824515_0001_m_000000_0 is done. And is in the process of committing
+2017-03-11 07:07:33,470 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:0+49
+2017-03-11 07:07:33,472 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local271824515_0001_m_000000_0' done.
+2017-03-11 07:07:33,472 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local271824515_0001_m_000000_0
+2017-03-11 07:07:33,473 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local271824515_0001_m_000001_0
+2017-03-11 07:07:33,479 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 07:07:33,480 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 07:07:33,481 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:49+49
+2017-03-11 07:07:33,531 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 07:07:33,532 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 07:07:33,532 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 07:07:33,532 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 07:07:33,532 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 07:07:33,532 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 07:07:33,561 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 07:07:33,561 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 07:07:33,562 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-11 07:07:33,562 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 10606810; bufvoid = 104857600
+2017-03-11 07:07:33,562 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 25715256(102861024); length = 499141/6553600
+2017-03-11 07:07:33,638 INFO org.apache.hadoop.mapreduce.Job:  map 10% reduce 0%
+2017-03-11 07:07:34,100 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
+2017-03-11 07:07:34,101 INFO org.apache.hadoop.mapred.Task: Task:attempt_local271824515_0001_m_000001_0 is done. And is in the process of committing
+2017-03-11 07:07:34,102 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:49+49
+2017-03-11 07:07:34,103 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local271824515_0001_m_000001_0' done.
+2017-03-11 07:07:34,103 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local271824515_0001_m_000001_0
+2017-03-11 07:07:34,103 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local271824515_0001_m_000002_0
+2017-03-11 07:07:34,103 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 07:07:34,104 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 07:07:34,105 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:98+49
+2017-03-11 07:07:34,142 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 07:07:34,142 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 07:07:34,142 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 07:07:34,142 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 07:07:34,142 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 07:07:34,143 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 07:07:34,161 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 07:07:34,161 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 07:07:34,162 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-11 07:07:34,162 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 10107666; bufvoid = 104857600
+2017-03-11 07:07:34,162 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 25715256(102861024); length = 499141/6553600
+2017-03-11 07:07:34,640 INFO org.apache.hadoop.mapreduce.Job:  map 20% reduce 0%
+2017-03-11 07:07:34,684 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
+2017-03-11 07:07:34,686 INFO org.apache.hadoop.mapred.Task: Task:attempt_local271824515_0001_m_000002_0 is done. And is in the process of committing
+2017-03-11 07:07:34,691 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:98+49
+2017-03-11 07:07:34,691 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local271824515_0001_m_000002_0' done.
+2017-03-11 07:07:34,691 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local271824515_0001_m_000002_0
+2017-03-11 07:07:34,691 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local271824515_0001_m_000003_0
+2017-03-11 07:07:34,697 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 07:07:34,697 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 07:07:34,699 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:147+49
+2017-03-11 07:07:34,752 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 07:07:34,753 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 07:07:34,753 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 07:07:34,753 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 07:07:34,753 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 07:07:34,754 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 07:07:34,756 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 07:07:34,756 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 07:07:34,759 INFO org.apache.hadoop.mapred.Task: Task:attempt_local271824515_0001_m_000003_0 is done. And is in the process of committing
+2017-03-11 07:07:34,761 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:147+49
+2017-03-11 07:07:34,762 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local271824515_0001_m_000003_0' done.
+2017-03-11 07:07:34,762 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local271824515_0001_m_000003_0
+2017-03-11 07:07:34,762 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local271824515_0001_m_000004_0
+2017-03-11 07:07:34,763 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 07:07:34,763 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 07:07:34,764 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:196+49
+2017-03-11 07:07:34,821 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 07:07:34,821 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 07:07:34,821 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 07:07:34,821 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 07:07:34,821 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 07:07:34,822 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 07:07:34,841 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 07:07:34,841 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 07:07:34,842 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-11 07:07:34,842 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 9858094; bufvoid = 104857600
+2017-03-11 07:07:34,842 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 25715256(102861024); length = 499141/6553600
+2017-03-11 07:07:35,322 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
+2017-03-11 07:07:35,324 INFO org.apache.hadoop.mapred.Task: Task:attempt_local271824515_0001_m_000004_0 is done. And is in the process of committing
+2017-03-11 07:07:35,325 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:196+49
+2017-03-11 07:07:35,325 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local271824515_0001_m_000004_0' done.
+2017-03-11 07:07:35,326 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local271824515_0001_m_000004_0
+2017-03-11 07:07:35,326 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local271824515_0001_m_000005_0
+2017-03-11 07:07:35,326 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 07:07:35,327 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 07:07:35,329 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:245+49
+2017-03-11 07:07:35,370 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 07:07:35,370 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 07:07:35,370 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 07:07:35,370 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 07:07:35,370 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 07:07:35,371 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 07:07:35,388 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 07:07:35,388 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 07:07:35,388 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-11 07:07:35,388 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 9358950; bufvoid = 104857600
+2017-03-11 07:07:35,388 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 25715256(102861024); length = 499141/6553600
+2017-03-11 07:07:35,641 INFO org.apache.hadoop.mapreduce.Job:  map 50% reduce 0%
+2017-03-11 07:07:35,905 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
+2017-03-11 07:07:35,908 INFO org.apache.hadoop.mapred.Task: Task:attempt_local271824515_0001_m_000005_0 is done. And is in the process of committing
+2017-03-11 07:07:35,909 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:245+49
+2017-03-11 07:07:35,909 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local271824515_0001_m_000005_0' done.
+2017-03-11 07:07:35,909 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local271824515_0001_m_000005_0
+2017-03-11 07:07:35,909 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local271824515_0001_m_000006_0
+2017-03-11 07:07:35,911 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 07:07:35,911 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 07:07:35,912 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:294+49
+2017-03-11 07:07:35,964 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 07:07:35,964 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 07:07:35,964 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 07:07:35,964 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 07:07:35,964 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 07:07:35,966 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 07:07:35,991 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 07:07:35,991 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 07:07:35,991 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-11 07:07:35,991 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 8111090; bufvoid = 104857600
+2017-03-11 07:07:35,991 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 25715256(102861024); length = 499141/6553600
+2017-03-11 07:07:36,453 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
+2017-03-11 07:07:36,454 INFO org.apache.hadoop.mapred.Task: Task:attempt_local271824515_0001_m_000006_0 is done. And is in the process of committing
+2017-03-11 07:07:36,456 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:294+49
+2017-03-11 07:07:36,456 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local271824515_0001_m_000006_0' done.
+2017-03-11 07:07:36,456 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local271824515_0001_m_000006_0
+2017-03-11 07:07:36,456 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local271824515_0001_m_000007_0
+2017-03-11 07:07:36,457 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 07:07:36,457 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 07:07:36,458 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:343+49
+2017-03-11 07:07:36,502 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 07:07:36,502 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 07:07:36,502 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 07:07:36,502 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 07:07:36,502 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 07:07:36,503 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 07:07:36,523 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 07:07:36,523 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 07:07:36,523 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-11 07:07:36,523 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 9483736; bufvoid = 104857600
+2017-03-11 07:07:36,523 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 25715256(102861024); length = 499141/6553600
+2017-03-11 07:07:36,644 INFO org.apache.hadoop.mapreduce.Job:  map 70% reduce 0%
+2017-03-11 07:07:37,047 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
+2017-03-11 07:07:37,050 INFO org.apache.hadoop.mapred.Task: Task:attempt_local271824515_0001_m_000007_0 is done. And is in the process of committing
+2017-03-11 07:07:37,051 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:343+49
+2017-03-11 07:07:37,051 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local271824515_0001_m_000007_0' done.
+2017-03-11 07:07:37,051 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local271824515_0001_m_000007_0
+2017-03-11 07:07:37,051 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local271824515_0001_m_000008_0
+2017-03-11 07:07:37,053 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 07:07:37,053 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 07:07:37,054 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:392+49
+2017-03-11 07:07:37,105 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 07:07:37,105 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 07:07:37,105 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 07:07:37,105 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 07:07:37,105 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 07:07:37,105 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 07:07:37,106 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 07:07:37,107 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 07:07:37,109 INFO org.apache.hadoop.mapred.Task: Task:attempt_local271824515_0001_m_000008_0 is done. And is in the process of committing
+2017-03-11 07:07:37,111 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:392+49
+2017-03-11 07:07:37,111 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local271824515_0001_m_000008_0' done.
+2017-03-11 07:07:37,111 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local271824515_0001_m_000008_0
+2017-03-11 07:07:37,111 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local271824515_0001_m_000009_0
+2017-03-11 07:07:37,113 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 07:07:37,114 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 07:07:37,115 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:441+6
+2017-03-11 07:07:37,174 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 07:07:37,178 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 07:07:37,179 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 07:07:37,179 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 07:07:37,179 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 07:07:37,185 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 07:07:37,186 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 07:07:37,186 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 07:07:37,194 INFO org.apache.hadoop.mapred.Task: Task:attempt_local271824515_0001_m_000009_0 is done. And is in the process of committing
+2017-03-11 07:07:37,195 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:441+6
+2017-03-11 07:07:37,195 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local271824515_0001_m_000009_0' done.
+2017-03-11 07:07:37,196 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local271824515_0001_m_000009_0
+2017-03-11 07:07:37,196 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
+2017-03-11 07:07:37,210 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks
+2017-03-11 07:07:37,210 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local271824515_0001_r_000000_0
+2017-03-11 07:07:37,219 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 07:07:37,219 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 07:07:37,224 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@20af4ace
+2017-03-11 07:07:37,250 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 07:07:37,259 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local271824515_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 07:07:37,362 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.deflate]
+2017-03-11 07:07:37,367 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local271824515_0001_m_000001_0 decomp: 10856384 len: 390723 to MEMORY
+2017-03-11 07:07:37,408 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 10856384 bytes from map-output for attempt_local271824515_0001_m_000001_0
+2017-03-11 07:07:37,408 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 10856384, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->10856384
+2017-03-11 07:07:37,423 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local271824515_0001_m_000007_0 decomp: 9733310 len: 390694 to MEMORY
+2017-03-11 07:07:37,462 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 9733310 bytes from map-output for attempt_local271824515_0001_m_000007_0
+2017-03-11 07:07:37,462 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 9733310, inMemoryMapOutputs.size() -> 2, commitMemory -> 10856384, usedMemory ->20589694
+2017-03-11 07:07:37,476 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local271824515_0001_m_000004_0 decomp: 10107668 len: 390714 to MEMORY
+2017-03-11 07:07:37,521 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 10107668 bytes from map-output for attempt_local271824515_0001_m_000004_0
+2017-03-11 07:07:37,521 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 10107668, inMemoryMapOutputs.size() -> 3, commitMemory -> 20589694, usedMemory ->30697362
+2017-03-11 07:07:37,522 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local271824515_0001_m_000009_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 07:07:37,528 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local271824515_0001_m_000009_0
+2017-03-11 07:07:37,528 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 4, commitMemory -> 30697362, usedMemory ->30697364
+2017-03-11 07:07:37,538 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local271824515_0001_m_000005_0 decomp: 9608524 len: 390704 to MEMORY
+2017-03-11 07:07:37,577 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 9608524 bytes from map-output for attempt_local271824515_0001_m_000005_0
+2017-03-11 07:07:37,582 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 9608524, inMemoryMapOutputs.size() -> 5, commitMemory -> 30697364, usedMemory ->40305888
+2017-03-11 07:07:37,584 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local271824515_0001_m_000008_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 07:07:37,584 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local271824515_0001_m_000008_0
+2017-03-11 07:07:37,584 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 6, commitMemory -> 40305888, usedMemory ->40305890
+2017-03-11 07:07:37,645 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local271824515_0001_m_000002_0 decomp: 10357240 len: 390732 to MEMORY
+2017-03-11 07:07:37,647 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 0%
+2017-03-11 07:07:37,687 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 10357240 bytes from map-output for attempt_local271824515_0001_m_000002_0
+2017-03-11 07:07:37,687 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 10357240, inMemoryMapOutputs.size() -> 7, commitMemory -> 40305890, usedMemory ->50663130
+2017-03-11 07:07:37,693 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local271824515_0001_m_000000_0 decomp: 10856384 len: 390722 to MEMORY
+2017-03-11 07:07:37,743 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 10856384 bytes from map-output for attempt_local271824515_0001_m_000000_0
+2017-03-11 07:07:37,744 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 10856384, inMemoryMapOutputs.size() -> 8, commitMemory -> 50663130, usedMemory ->61519514
+2017-03-11 07:07:37,745 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local271824515_0001_m_000003_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 07:07:37,746 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local271824515_0001_m_000003_0
+2017-03-11 07:07:37,746 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 9, commitMemory -> 61519514, usedMemory ->61519516
+2017-03-11 07:07:37,748 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local271824515_0001_m_000006_0 decomp: 8360664 len: 373255 to MEMORY
+2017-03-11 07:07:37,794 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 8360664 bytes from map-output for attempt_local271824515_0001_m_000006_0
+2017-03-11 07:07:37,795 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 8360664, inMemoryMapOutputs.size() -> 10, commitMemory -> 61519516, usedMemory ->69880180
+2017-03-11 07:07:37,795 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 07:07:37,800 INFO org.apache.hadoop.mapred.LocalJobRunner: 10 / 10 copied.
+2017-03-11 07:07:37,800 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 10 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 07:07:37,810 INFO org.apache.hadoop.mapred.Merger: Merging 10 sorted segments
+2017-03-11 07:07:37,813 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 7 segments left of total size: 69880048 bytes
+2017-03-11 07:07:40,634 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 10 segments, 69880180 bytes to disk to satisfy reduce memory limit
+2017-03-11 07:07:40,635 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 2722292 bytes from disk
+2017-03-11 07:07:40,635 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 07:07:40,636 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 07:07:40,636 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 69880144 bytes
+2017-03-11 07:07:40,636 INFO org.apache.hadoop.mapred.LocalJobRunner: 10 / 10 copied.
+2017-03-11 07:07:40,643 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.skip.on is deprecated. Instead, use mapreduce.job.skiprecords
+2017-03-11 07:07:42,562 INFO org.apache.hadoop.mapred.Task: Task:attempt_local271824515_0001_r_000000_0 is done. And is in the process of committing
+2017-03-11 07:07:42,565 INFO org.apache.hadoop.mapred.LocalJobRunner: 10 / 10 copied.
+2017-03-11 07:07:42,565 INFO org.apache.hadoop.mapred.Task: Task attempt_local271824515_0001_r_000000_0 is allowed to commit now
+2017-03-11 07:07:42,566 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local271824515_0001_r_000000_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local271824515_0001_r_000000
+2017-03-11 07:07:42,567 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 07:07:42,567 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local271824515_0001_r_000000_0' done.
+2017-03-11 07:07:42,567 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local271824515_0001_r_000000_0
+2017-03-11 07:07:42,567 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete.
+2017-03-11 07:07:42,659 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 100%
+2017-03-11 07:07:42,660 INFO org.apache.hadoop.mapreduce.Job: Job job_local271824515_0001 completed successfully
+2017-03-11 07:07:42,691 INFO org.apache.hadoop.mapreduce.Job: Counters: 34
 	File System Counters
-		FILE: Number of bytes read=16715038
-		FILE: Number of bytes written=13110192
+		FILE: Number of bytes read=5530668
+		FILE: Number of bytes written=25877978
 		FILE: Number of read operations=0
 		FILE: Number of large read operations=0
 		FILE: Number of write operations=0
 	Map-Reduce Framework
-		Map input records=124787
-		Map output records=115105
-		Map output bytes=5541084
-		Map output materialized bytes=2405517
-		Input split bytes=120
+		Map input records=7
+		Map output records=873502
+		Map output bytes=68133156
+		Map output materialized bytes=2717586
+		Input split bytes=1220
 		Combine input records=0
 		Combine output records=0
-		Reduce input groups=115105
-		Reduce shuffle bytes=2405517
-		Reduce input records=115105
-		Reduce output records=115105
-		Spilled Records=230210
-		Shuffled Maps =1
+		Reduce input groups=873481
+		Reduce shuffle bytes=2717586
+		Reduce input records=873502
+		Reduce output records=1
+		Spilled Records=1747004
+		Shuffled Maps =10
 		Failed Shuffles=0
-		Merged Map outputs=1
-		GC time elapsed (ms)=113
+		Merged Map outputs=10
+		GC time elapsed (ms)=378
 		CPU time spent (ms)=0
 		Physical memory (bytes) snapshot=0
 		Virtual memory (bytes) snapshot=0
-		Total committed heap usage (bytes)=331227136
+		Total committed heap usage (bytes)=2030252032
 	Shuffle Errors
 		BAD_ID=0
 		CONNECTION=0
@@ -1544,114 +5105,447 @@ Caused by: java.lang.NullPointerException
 		WRONG_MAP=0
 		WRONG_REDUCE=0
 	File Input Format Counters 
-		Bytes Read=5589886
+		Bytes Read=2265
 	File Output Format Counters 
-		Bytes Written=5365335
-	similarity.WordSort$DocLineCounter
-		NUM=124787
-2017-03-10 16:04:16,924 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
-2017-03-10 16:04:17,595 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
-2017-03-10 16:04:17,605 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
-2017-03-10 16:04:17,661 WARN org.apache.hadoop.security.UserGroupInformation: PriviledgedActionException as:cloudera (auth:SIMPLE) cause:org.apache.hadoop.mapred.FileAlreadyExistsException: Output directory file:/home/cloudera/workspace/WordCount/output already exists
-2017-03-10 16:04:38,662 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
-2017-03-10 16:04:39,236 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
-2017-03-10 16:04:39,243 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
-2017-03-10 16:04:39,836 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
-2017-03-10 16:04:39,846 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
-2017-03-10 16:04:39,896 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
-2017-03-10 16:04:40,232 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local2107526736_0001
-2017-03-10 16:04:40,744 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
-2017-03-10 16:04:40,745 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local2107526736_0001
-2017-03-10 16:04:40,753 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
-2017-03-10 16:04:40,762 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
-2017-03-10 16:04:40,772 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
-2017-03-10 16:04:40,922 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
-2017-03-10 16:04:40,925 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2107526736_0001_m_000000_0
-2017-03-10 16:04:40,998 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
-2017-03-10 16:04:41,021 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
-2017-03-10 16:04:41,024 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/input/pg100.txt:0+5589886
-2017-03-10 16:04:41,117 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
-2017-03-10 16:04:41,117 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
-2017-03-10 16:04:41,117 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
-2017-03-10 16:04:41,117 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
-2017-03-10 16:04:41,117 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
-2017-03-10 16:04:41,121 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
-2017-03-10 16:04:41,747 INFO org.apache.hadoop.mapreduce.Job: Job job_local2107526736_0001 running in uber mode : false
-2017-03-10 16:04:41,749 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
-2017-03-10 16:04:44,514 INFO org.apache.hadoop.mapred.LocalJobRunner: 
-2017-03-10 16:04:44,515 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
-2017-03-10 16:04:44,515 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
-2017-03-10 16:04:44,515 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 5541084; bufvoid = 104857600
-2017-03-10 16:04:44,515 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 25753980(103015920); length = 460417/6553600
-2017-03-10 16:04:44,850 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
-2017-03-10 16:04:45,645 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
-2017-03-10 16:04:45,648 INFO org.apache.hadoop.mapred.Task: Task:attempt_local2107526736_0001_m_000000_0 is done. And is in the process of committing
-2017-03-10 16:04:45,653 INFO org.apache.hadoop.mapred.LocalJobRunner: map
-2017-03-10 16:04:45,654 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local2107526736_0001_m_000000_0' done.
-2017-03-10 16:04:45,654 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local2107526736_0001_m_000000_0
-2017-03-10 16:04:45,654 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
-2017-03-10 16:04:45,658 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks
-2017-03-10 16:04:45,659 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2107526736_0001_r_000000_0
-2017-03-10 16:04:45,664 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
-2017-03-10 16:04:45,664 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
-2017-03-10 16:04:45,666 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@a9b03af
-2017-03-10 16:04:45,680 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
-2017-03-10 16:04:45,687 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local2107526736_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events
-2017-03-10 16:04:45,730 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.deflate]
-2017-03-10 16:04:45,730 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local2107526736_0001_m_000000_0 decomp: 5771296 len: 2404026 to MEMORY
-2017-03-10 16:04:45,758 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 0%
-2017-03-10 16:04:45,813 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 5771296 bytes from map-output for attempt_local2107526736_0001_m_000000_0
-2017-03-10 16:04:45,813 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 5771296, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->5771296
-2017-03-10 16:04:45,817 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
-2017-03-10 16:04:45,818 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
-2017-03-10 16:04:45,818 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
-2017-03-10 16:04:45,825 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
-2017-03-10 16:04:45,826 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 5771286 bytes
-2017-03-10 16:04:46,671 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 5771296 bytes to disk to satisfy reduce memory limit
-2017-03-10 16:04:46,672 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 2404034 bytes from disk
-2017-03-10 16:04:46,672 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
-2017-03-10 16:04:46,672 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
-2017-03-10 16:04:46,673 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 5771286 bytes
-2017-03-10 16:04:46,674 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
-2017-03-10 16:04:46,680 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.skip.on is deprecated. Instead, use mapreduce.job.skiprecords
-2017-03-10 16:04:47,525 INFO org.apache.hadoop.mapred.Task: Task:attempt_local2107526736_0001_r_000000_0 is done. And is in the process of committing
-2017-03-10 16:04:47,532 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
-2017-03-10 16:04:47,532 INFO org.apache.hadoop.mapred.Task: Task attempt_local2107526736_0001_r_000000_0 is allowed to commit now
-2017-03-10 16:04:47,533 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local2107526736_0001_r_000000_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local2107526736_0001_r_000000
-2017-03-10 16:04:47,533 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
-2017-03-10 16:04:47,534 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local2107526736_0001_r_000000_0' done.
-2017-03-10 16:04:47,534 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local2107526736_0001_r_000000_0
-2017-03-10 16:04:47,534 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete.
-2017-03-10 16:04:47,766 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 100%
-2017-03-10 16:04:47,767 INFO org.apache.hadoop.mapreduce.Job: Job job_local2107526736_0001 completed successfully
-2017-03-10 16:04:47,785 INFO org.apache.hadoop.mapreduce.Job: Counters: 34
+		Bytes Written=20
+	similarity.NaiveApproach$CompCounter
+		NUM=21
+2017-03-11 07:08:38,936 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-11 07:08:39,968 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-11 07:08:39,985 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-11 07:08:40,408 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
+2017-03-11 07:08:40,430 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
+2017-03-11 07:08:40,553 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:7
+2017-03-11 07:08:40,788 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local32959239_0001
+2017-03-11 07:08:41,182 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
+2017-03-11 07:08:41,184 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local32959239_0001
+2017-03-11 07:08:41,183 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
+2017-03-11 07:08:41,191 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 07:08:41,202 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
+2017-03-11 07:08:41,313 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
+2017-03-11 07:08:41,314 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local32959239_0001_m_000000_0
+2017-03-11 07:08:41,372 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 07:08:41,393 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 07:08:41,406 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:378+69
+2017-03-11 07:08:41,487 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 07:08:41,487 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 07:08:41,487 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 07:08:41,487 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 07:08:41,487 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 07:08:41,490 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 07:08:41,674 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 07:08:41,674 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 07:08:41,674 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-11 07:08:41,674 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 9483736; bufvoid = 104857600
+2017-03-11 07:08:41,674 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 25715256(102861024); length = 499141/6553600
+2017-03-11 07:08:42,101 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
+2017-03-11 07:08:42,191 INFO org.apache.hadoop.mapreduce.Job: Job job_local32959239_0001 running in uber mode : false
+2017-03-11 07:08:42,194 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
+2017-03-11 07:08:42,440 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
+2017-03-11 07:08:42,443 INFO org.apache.hadoop.mapred.Task: Task:attempt_local32959239_0001_m_000000_0 is done. And is in the process of committing
+2017-03-11 07:08:42,455 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:378+69
+2017-03-11 07:08:42,455 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local32959239_0001_m_000000_0' done.
+2017-03-11 07:08:42,455 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local32959239_0001_m_000000_0
+2017-03-11 07:08:42,455 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local32959239_0001_m_000001_0
+2017-03-11 07:08:42,459 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 07:08:42,460 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 07:08:42,461 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:0+63
+2017-03-11 07:08:42,502 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 07:08:42,502 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 07:08:42,502 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 07:08:42,502 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 07:08:42,502 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 07:08:42,503 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 07:08:42,531 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 07:08:42,531 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 07:08:42,532 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-11 07:08:42,532 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 10606810; bufvoid = 104857600
+2017-03-11 07:08:42,532 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 25715256(102861024); length = 499141/6553600
+2017-03-11 07:08:43,039 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
+2017-03-11 07:08:43,040 INFO org.apache.hadoop.mapred.Task: Task:attempt_local32959239_0001_m_000001_0 is done. And is in the process of committing
+2017-03-11 07:08:43,042 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:0+63
+2017-03-11 07:08:43,042 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local32959239_0001_m_000001_0' done.
+2017-03-11 07:08:43,043 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local32959239_0001_m_000001_0
+2017-03-11 07:08:43,043 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local32959239_0001_m_000002_0
+2017-03-11 07:08:43,043 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 07:08:43,044 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 07:08:43,044 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:63+63
+2017-03-11 07:08:43,081 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 07:08:43,081 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 07:08:43,081 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 07:08:43,081 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 07:08:43,081 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 07:08:43,082 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 07:08:43,100 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 07:08:43,100 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 07:08:43,100 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-11 07:08:43,100 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 10606810; bufvoid = 104857600
+2017-03-11 07:08:43,100 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 25715256(102861024); length = 499141/6553600
+2017-03-11 07:08:43,199 INFO org.apache.hadoop.mapreduce.Job:  map 29% reduce 0%
+2017-03-11 07:08:43,591 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
+2017-03-11 07:08:43,593 INFO org.apache.hadoop.mapred.Task: Task:attempt_local32959239_0001_m_000002_0 is done. And is in the process of committing
+2017-03-11 07:08:43,595 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:63+63
+2017-03-11 07:08:43,595 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local32959239_0001_m_000002_0' done.
+2017-03-11 07:08:43,595 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local32959239_0001_m_000002_0
+2017-03-11 07:08:43,596 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local32959239_0001_m_000003_0
+2017-03-11 07:08:43,602 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 07:08:43,602 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 07:08:43,603 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:126+63
+2017-03-11 07:08:43,651 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 07:08:43,652 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 07:08:43,652 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 07:08:43,652 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 07:08:43,652 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 07:08:43,653 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 07:08:43,669 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 07:08:43,670 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 07:08:43,670 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-11 07:08:43,670 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 10107666; bufvoid = 104857600
+2017-03-11 07:08:43,670 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 25715256(102861024); length = 499141/6553600
+2017-03-11 07:08:44,147 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
+2017-03-11 07:08:44,148 INFO org.apache.hadoop.mapred.Task: Task:attempt_local32959239_0001_m_000003_0 is done. And is in the process of committing
+2017-03-11 07:08:44,150 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:126+63
+2017-03-11 07:08:44,150 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local32959239_0001_m_000003_0' done.
+2017-03-11 07:08:44,150 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local32959239_0001_m_000003_0
+2017-03-11 07:08:44,150 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local32959239_0001_m_000004_0
+2017-03-11 07:08:44,151 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 07:08:44,151 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 07:08:44,152 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:189+63
+2017-03-11 07:08:44,195 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 07:08:44,195 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 07:08:44,195 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 07:08:44,195 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 07:08:44,195 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 07:08:44,196 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 07:08:44,201 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 0%
+2017-03-11 07:08:44,218 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 07:08:44,218 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 07:08:44,218 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-11 07:08:44,218 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 9858094; bufvoid = 104857600
+2017-03-11 07:08:44,218 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 25715256(102861024); length = 499141/6553600
+2017-03-11 07:08:44,698 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
+2017-03-11 07:08:44,700 INFO org.apache.hadoop.mapred.Task: Task:attempt_local32959239_0001_m_000004_0 is done. And is in the process of committing
+2017-03-11 07:08:44,701 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:189+63
+2017-03-11 07:08:44,701 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local32959239_0001_m_000004_0' done.
+2017-03-11 07:08:44,701 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local32959239_0001_m_000004_0
+2017-03-11 07:08:44,701 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local32959239_0001_m_000005_0
+2017-03-11 07:08:44,703 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 07:08:44,703 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 07:08:44,705 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:252+63
+2017-03-11 07:08:44,747 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 07:08:44,747 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 07:08:44,747 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 07:08:44,747 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 07:08:44,747 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 07:08:44,747 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 07:08:44,770 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 07:08:44,772 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 07:08:44,772 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-11 07:08:44,772 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 9358950; bufvoid = 104857600
+2017-03-11 07:08:44,772 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 25715256(102861024); length = 499141/6553600
+2017-03-11 07:08:45,202 INFO org.apache.hadoop.mapreduce.Job:  map 71% reduce 0%
+2017-03-11 07:08:45,253 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
+2017-03-11 07:08:45,255 INFO org.apache.hadoop.mapred.Task: Task:attempt_local32959239_0001_m_000005_0 is done. And is in the process of committing
+2017-03-11 07:08:45,256 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:252+63
+2017-03-11 07:08:45,256 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local32959239_0001_m_000005_0' done.
+2017-03-11 07:08:45,256 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local32959239_0001_m_000005_0
+2017-03-11 07:08:45,256 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local32959239_0001_m_000006_0
+2017-03-11 07:08:45,257 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 07:08:45,257 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 07:08:45,261 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:315+63
+2017-03-11 07:08:45,308 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 07:08:45,316 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 07:08:45,317 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 07:08:45,317 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 07:08:45,317 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 07:08:45,319 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 07:08:45,336 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 07:08:45,336 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 07:08:45,336 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-11 07:08:45,336 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 8111090; bufvoid = 104857600
+2017-03-11 07:08:45,336 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 25715256(102861024); length = 499141/6553600
+2017-03-11 07:08:45,798 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
+2017-03-11 07:08:45,799 INFO org.apache.hadoop.mapred.Task: Task:attempt_local32959239_0001_m_000006_0 is done. And is in the process of committing
+2017-03-11 07:08:45,800 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:315+63
+2017-03-11 07:08:45,800 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local32959239_0001_m_000006_0' done.
+2017-03-11 07:08:45,801 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local32959239_0001_m_000006_0
+2017-03-11 07:08:45,801 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
+2017-03-11 07:08:45,803 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks
+2017-03-11 07:08:45,803 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local32959239_0001_r_000000_0
+2017-03-11 07:08:45,808 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 07:08:45,808 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 07:08:45,810 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@5a5fbef7
+2017-03-11 07:08:45,819 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 07:08:45,825 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local32959239_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 07:08:45,848 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.deflate]
+2017-03-11 07:08:45,848 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local32959239_0001_m_000000_0 decomp: 9733310 len: 390694 to MEMORY
+2017-03-11 07:08:45,868 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 9733310 bytes from map-output for attempt_local32959239_0001_m_000000_0
+2017-03-11 07:08:45,868 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 9733310, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->9733310
+2017-03-11 07:08:45,877 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local32959239_0001_m_000003_0 decomp: 10357240 len: 390732 to MEMORY
+2017-03-11 07:08:45,900 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 10357240 bytes from map-output for attempt_local32959239_0001_m_000003_0
+2017-03-11 07:08:45,900 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 10357240, inMemoryMapOutputs.size() -> 2, commitMemory -> 9733310, usedMemory ->20090550
+2017-03-11 07:08:45,912 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local32959239_0001_m_000002_0 decomp: 10856384 len: 390723 to MEMORY
+2017-03-11 07:08:45,940 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 10856384 bytes from map-output for attempt_local32959239_0001_m_000002_0
+2017-03-11 07:08:45,941 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 10856384, inMemoryMapOutputs.size() -> 3, commitMemory -> 20090550, usedMemory ->30946934
+2017-03-11 07:08:45,952 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local32959239_0001_m_000005_0 decomp: 9608524 len: 390704 to MEMORY
+2017-03-11 07:08:45,970 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 9608524 bytes from map-output for attempt_local32959239_0001_m_000005_0
+2017-03-11 07:08:45,970 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 9608524, inMemoryMapOutputs.size() -> 4, commitMemory -> 30946934, usedMemory ->40555458
+2017-03-11 07:08:46,035 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local32959239_0001_m_000001_0 decomp: 10856384 len: 390722 to MEMORY
+2017-03-11 07:08:46,057 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 10856384 bytes from map-output for attempt_local32959239_0001_m_000001_0
+2017-03-11 07:08:46,057 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 10856384, inMemoryMapOutputs.size() -> 5, commitMemory -> 40555458, usedMemory ->51411842
+2017-03-11 07:08:46,060 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local32959239_0001_m_000004_0 decomp: 10107668 len: 390714 to MEMORY
+2017-03-11 07:08:46,080 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 10107668 bytes from map-output for attempt_local32959239_0001_m_000004_0
+2017-03-11 07:08:46,081 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 10107668, inMemoryMapOutputs.size() -> 6, commitMemory -> 51411842, usedMemory ->61519510
+2017-03-11 07:08:46,085 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local32959239_0001_m_000006_0 decomp: 8360664 len: 373255 to MEMORY
+2017-03-11 07:08:46,105 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 8360664 bytes from map-output for attempt_local32959239_0001_m_000006_0
+2017-03-11 07:08:46,105 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 8360664, inMemoryMapOutputs.size() -> 7, commitMemory -> 61519510, usedMemory ->69880174
+2017-03-11 07:08:46,105 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 07:08:46,106 INFO org.apache.hadoop.mapred.LocalJobRunner: 7 / 7 copied.
+2017-03-11 07:08:46,107 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 7 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 07:08:46,116 INFO org.apache.hadoop.mapred.Merger: Merging 7 sorted segments
+2017-03-11 07:08:46,116 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 7 segments left of total size: 69880048 bytes
+2017-03-11 07:08:46,202 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 0%
+2017-03-11 07:16:05,536 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-11 07:16:06,648 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-11 07:16:06,649 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-11 07:16:06,696 WARN org.apache.hadoop.security.UserGroupInformation: PriviledgedActionException as:cloudera (auth:SIMPLE) cause:org.apache.hadoop.mapred.FileAlreadyExistsException: Output directory file:/home/cloudera/workspace/WordCount/output already exists
+2017-03-11 07:16:44,548 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-11 07:16:45,572 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-11 07:16:45,573 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-11 07:16:45,909 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
+2017-03-11 07:16:45,916 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
+2017-03-11 07:16:45,964 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:100
+2017-03-11 07:16:46,184 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local1601005211_0001
+2017-03-11 07:16:46,588 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
+2017-03-11 07:16:46,590 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local1601005211_0001
+2017-03-11 07:16:46,592 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
+2017-03-11 07:16:46,598 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 07:16:46,607 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
+2017-03-11 07:16:46,834 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
+2017-03-11 07:16:46,835 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1601005211_0001_m_000000_0
+2017-03-11 07:16:46,893 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 07:16:46,923 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 07:16:46,927 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline:5270463+53272
+2017-03-11 07:16:47,103 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 07:16:47,104 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 07:16:47,104 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 07:16:47,104 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 07:16:47,104 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 07:16:47,119 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 07:16:47,595 INFO org.apache.hadoop.mapreduce.Job: Job job_local1601005211_0001 running in uber mode : false
+2017-03-11 07:16:47,596 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
+2017-03-11 07:16:47,689 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-11 07:16:47,689 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 68862070; bufvoid = 104857600
+2017-03-11 07:16:47,689 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 22458400(89833600); length = 3755997/6553600
+2017-03-11 07:16:47,689 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 72632214 kvi 18158048(72632192)
+2017-03-11 07:17:11,947 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-11 07:17:12,980 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-11 07:17:12,982 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-11 07:17:13,031 WARN org.apache.hadoop.security.UserGroupInformation: PriviledgedActionException as:cloudera (auth:SIMPLE) cause:org.apache.hadoop.mapred.FileAlreadyExistsException: Output directory file:/home/cloudera/workspace/WordCount/output already exists
+2017-03-11 07:17:29,666 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-11 07:17:30,685 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-11 07:17:30,693 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-11 07:17:31,123 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
+2017-03-11 07:17:31,129 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
+2017-03-11 07:17:31,905 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:126756
+2017-03-11 07:17:32,074 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local514307539_0001
+2017-03-11 07:17:32,462 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
+2017-03-11 07:17:32,463 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local514307539_0001
+2017-03-11 07:17:32,465 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
+2017-03-11 07:17:32,478 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 07:17:32,484 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
+2017-03-11 07:17:33,568 INFO org.apache.hadoop.mapreduce.Job: Job job_local514307539_0001 running in uber mode : false
+2017-03-11 07:17:33,573 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
+2017-03-11 07:17:55,648 WARN org.apache.hadoop.mapred.LocalJobRunner: job_local514307539_0001
+java.lang.OutOfMemoryError: Java heap space
+	at java.util.Hashtable$Entry.clone(Hashtable.java:1052)
+	at java.util.Hashtable$Entry.clone(Hashtable.java:1052)
+	at java.util.Hashtable.clone(Hashtable.java:613)
+	at org.apache.hadoop.conf.Configuration.<init>(Configuration.java:707)
+	at org.apache.hadoop.mapred.JobConf.<init>(JobConf.java:447)
+	at org.apache.hadoop.mapred.LocalJobRunner$Job$MapTaskRunnable.<init>(LocalJobRunner.java:217)
+	at org.apache.hadoop.mapred.LocalJobRunner$Job.getMapTaskRunnables(LocalJobRunner.java:272)
+	at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:517)
+2017-03-11 07:17:55,650 INFO org.apache.hadoop.mapreduce.Job: Job job_local514307539_0001 failed with state FAILED due to: NA
+2017-03-11 07:17:55,725 INFO org.apache.hadoop.mapreduce.Job: Counters: 0
+2017-03-11 07:29:33,617 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-11 07:29:34,538 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.max.split.size is deprecated. Instead, use mapreduce.input.fileinputformat.split.maxsize
+2017-03-11 07:29:34,669 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-11 07:29:34,671 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-11 07:29:34,998 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
+2017-03-11 07:29:35,009 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
+2017-03-11 07:29:35,043 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
+2017-03-11 07:29:35,279 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local1380958930_0001
+2017-03-11 07:29:35,673 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
+2017-03-11 07:29:35,684 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
+2017-03-11 07:29:35,685 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local1380958930_0001
+2017-03-11 07:29:35,690 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 07:29:35,695 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
+2017-03-11 07:29:35,790 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
+2017-03-11 07:29:35,792 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1380958930_0001_m_000000_0
+2017-03-11 07:29:35,851 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 07:29:35,883 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 07:29:35,887 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline:0+5323735
+2017-03-11 07:29:35,971 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 07:29:35,971 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 07:29:35,971 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 07:29:35,971 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 07:29:35,971 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 07:29:35,975 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 07:29:36,283 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-11 07:29:36,283 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 68406970; bufvoid = 104857600
+2017-03-11 07:29:36,283 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 22344608(89378432); length = 3869789/6553600
+2017-03-11 07:29:36,283 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 72308634 kvi 18077152(72308608)
+2017-03-11 07:29:36,693 INFO org.apache.hadoop.mapreduce.Job: Job job_local1380958930_0001 running in uber mode : false
+2017-03-11 07:29:36,694 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
+2017-03-11 07:30:00,879 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-11 07:30:01,767 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.max.split.size is deprecated. Instead, use mapreduce.input.fileinputformat.split.maxsize
+2017-03-11 07:30:01,965 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-11 07:30:01,966 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-11 07:30:02,539 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
+2017-03-11 07:30:02,547 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
+2017-03-11 07:30:03,396 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:126756
+2017-03-11 07:30:03,577 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local651046040_0001
+2017-03-11 07:30:03,964 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
+2017-03-11 07:30:03,965 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local651046040_0001
+2017-03-11 07:30:03,968 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
+2017-03-11 07:30:03,973 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 07:30:03,979 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
+2017-03-11 07:30:04,973 INFO org.apache.hadoop.mapreduce.Job: Job job_local651046040_0001 running in uber mode : false
+2017-03-11 07:30:04,974 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
+2017-03-11 07:48:06,292 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-11 07:48:10,718 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-11 07:48:10,727 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-11 07:48:12,423 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
+2017-03-11 07:48:12,551 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
+2017-03-11 07:48:13,113 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
+2017-03-11 07:48:14,255 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local931272447_0001
+2017-03-11 07:48:15,831 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
+2017-03-11 07:48:15,832 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local931272447_0001
+2017-03-11 07:48:15,853 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
+2017-03-11 07:48:15,910 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 07:48:15,928 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
+2017-03-11 07:48:16,509 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
+2017-03-11 07:48:16,510 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local931272447_0001_m_000000_0
+2017-03-11 07:48:16,747 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 07:48:16,843 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 07:48:16,855 INFO org.apache.hadoop.mapreduce.Job: Job job_local931272447_0001 running in uber mode : false
+2017-03-11 07:48:16,860 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
+2017-03-11 07:48:16,875 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:0+447
+2017-03-11 07:48:17,752 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 07:48:17,752 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 07:48:17,752 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 07:48:17,752 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 07:48:17,761 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 07:48:17,842 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 07:48:19,674 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 07:48:19,675 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 07:48:19,675 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-11 07:48:19,675 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 68133156; bufvoid = 104857600
+2017-03-11 07:48:19,675 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 22720392(90881568); length = 3494005/6553600
+2017-03-11 07:48:22,914 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:0+447 > sort
+2017-03-11 07:48:23,893 INFO org.apache.hadoop.mapreduce.Job:  map 67% reduce 0%
+2017-03-11 07:48:25,919 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:0+447 > sort
+2017-03-11 07:48:28,920 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:0+447 > sort
+2017-03-11 07:48:31,922 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:0+447 > sort
+2017-03-11 07:48:34,923 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:0+447 > sort
+2017-03-11 07:48:37,928 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:0+447 > sort
+2017-03-11 07:48:40,933 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:0+447 > sort
+2017-03-11 07:48:43,934 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:0+447 > sort
+2017-03-11 07:48:46,936 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:0+447 > sort
+2017-03-11 07:48:47,095 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
+2017-03-11 07:48:49,941 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:0+447 > sort
+2017-03-11 07:48:52,942 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:0+447 > sort
+2017-03-11 07:49:03,704 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
+2017-03-11 07:49:03,730 INFO org.apache.hadoop.mapred.Task: Task:attempt_local931272447_0001_m_000000_0 is done. And is in the process of committing
+2017-03-11 07:49:03,742 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:0+447
+2017-03-11 07:49:03,742 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local931272447_0001_m_000000_0' done.
+2017-03-11 07:49:03,742 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local931272447_0001_m_000000_0
+2017-03-11 07:49:03,749 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
+2017-03-11 07:49:03,765 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks
+2017-03-11 07:49:03,766 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local931272447_0001_r_000000_0
+2017-03-11 07:49:03,849 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 07:49:03,850 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 07:49:03,885 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@8a93430
+2017-03-11 07:49:04,010 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 0%
+2017-03-11 07:49:04,052 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 07:49:04,101 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local931272447_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 07:49:04,675 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.deflate]
+2017-03-11 07:49:04,688 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local931272447_0001_m_000000_0 decomp: 69880162 len: 2722285 to MEMORY
+2017-03-11 07:49:05,714 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 69880162 bytes from map-output for attempt_local931272447_0001_m_000000_0
+2017-03-11 07:49:05,714 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 69880162, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->69880162
+2017-03-11 07:49:05,733 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 07:49:05,734 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 07:49:05,736 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 07:49:05,740 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 07:49:05,763 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 69880144 bytes
+2017-03-11 07:49:09,855 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > sort
+2017-03-11 07:49:10,019 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 41%
+2017-03-11 07:49:12,856 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > sort
+2017-03-11 07:49:13,022 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 46%
+2017-03-11 07:49:15,858 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > sort
+2017-03-11 07:49:16,027 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 51%
+2017-03-11 07:49:18,861 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > sort
+2017-03-11 07:49:19,030 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 57%
+2017-03-11 07:49:21,862 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > sort
+2017-03-11 07:49:22,033 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 62%
+2017-03-11 07:49:24,512 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 69880162 bytes to disk to satisfy reduce memory limit
+2017-03-11 07:49:24,513 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 2722293 bytes from disk
+2017-03-11 07:49:24,514 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 07:49:24,514 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 07:49:24,548 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 69880144 bytes
+2017-03-11 07:49:24,548 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > sort
+2017-03-11 07:49:24,634 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.skip.on is deprecated. Instead, use mapreduce.job.skiprecords
+2017-03-11 07:49:24,871 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 07:49:25,035 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 67%
+2017-03-11 07:49:27,873 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 07:49:28,040 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 70%
+2017-03-11 07:49:30,875 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 07:49:31,046 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 77%
+2017-03-11 07:49:33,881 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 07:49:34,050 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 85%
+2017-03-11 07:49:36,883 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 07:49:37,054 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 92%
+2017-03-11 07:49:39,924 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 07:49:40,057 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 98%
+2017-03-11 07:49:40,708 INFO org.apache.hadoop.mapred.Task: Task:attempt_local931272447_0001_r_000000_0 is done. And is in the process of committing
+2017-03-11 07:49:40,710 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 07:49:40,710 INFO org.apache.hadoop.mapred.Task: Task attempt_local931272447_0001_r_000000_0 is allowed to commit now
+2017-03-11 07:49:40,717 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local931272447_0001_r_000000_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local931272447_0001_r_000000
+2017-03-11 07:49:40,719 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 07:49:40,720 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local931272447_0001_r_000000_0' done.
+2017-03-11 07:49:40,722 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local931272447_0001_r_000000_0
+2017-03-11 07:49:40,723 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete.
+2017-03-11 07:49:41,058 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 100%
+2017-03-11 07:49:41,058 INFO org.apache.hadoop.mapreduce.Job: Job job_local931272447_0001 completed successfully
+2017-03-11 07:49:41,100 INFO org.apache.hadoop.mapreduce.Job: Counters: 34
 	File System Counters
-		FILE: Number of bytes read=16712056
-		FILE: Number of bytes written=13108535
+		FILE: Number of bytes read=5445872
+		FILE: Number of bytes written=8695995
 		FILE: Number of read operations=0
 		FILE: Number of large read operations=0
 		FILE: Number of write operations=0
 	Map-Reduce Framework
-		Map input records=124787
-		Map output records=115105
-		Map output bytes=5541084
-		Map output materialized bytes=2404026
-		Input split bytes=120
+		Map input records=7
+		Map output records=873502
+		Map output bytes=68133156
+		Map output materialized bytes=2722285
+		Input split bytes=122
 		Combine input records=0
 		Combine output records=0
-		Reduce input groups=115105
-		Reduce shuffle bytes=2404026
-		Reduce input records=115105
-		Reduce output records=115105
-		Spilled Records=230210
+		Reduce input groups=873481
+		Reduce shuffle bytes=2722285
+		Reduce input records=873502
+		Reduce output records=1
+		Spilled Records=1747004
 		Shuffled Maps =1
 		Failed Shuffles=0
 		Merged Map outputs=1
-		GC time elapsed (ms)=142
+		GC time elapsed (ms)=388
 		CPU time spent (ms)=0
 		Physical memory (bytes) snapshot=0
 		Virtual memory (bytes) snapshot=0
-		Total committed heap usage (bytes)=331227136
+		Total committed heap usage (bytes)=378413056
 	Shuffle Errors
 		BAD_ID=0
 		CONNECTION=0
@@ -1660,8 +5554,8 @@ Caused by: java.lang.NullPointerException
 		WRONG_MAP=0
 		WRONG_REDUCE=0
 	File Input Format Counters 
-		Bytes Read=5589886
+		Bytes Read=447
 	File Output Format Counters 
-		Bytes Written=5365335
-	similarity.WordSort$DocLineCounter
-		NUM=124787
+		Bytes Written=20
+	similarity.NaiveApproach$CompCounter
+		NUM=21
diff --git a/hadoop.log.2017-03-10 b/hadoop.log.2017-03-10
new file mode 100644
index 0000000000000000000000000000000000000000..72b1af430bc6984fad7aa45962cff662b4f9b908
--- /dev/null
+++ b/hadoop.log.2017-03-10
@@ -0,0 +1,1667 @@
+2017-03-10 12:58:10,580 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-10 12:58:11,075 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-10 12:58:11,087 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-10 12:58:11,561 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
+2017-03-10 12:58:11,594 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
+2017-03-10 12:58:11,707 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
+2017-03-10 12:58:11,996 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local1458741767_0001
+2017-03-10 12:58:12,393 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
+2017-03-10 12:58:12,394 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local1458741767_0001
+2017-03-10 12:58:12,399 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
+2017-03-10 12:58:12,419 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-10 12:58:12,422 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
+2017-03-10 12:58:12,540 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
+2017-03-10 12:58:12,543 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1458741767_0001_m_000000_0
+2017-03-10 12:58:12,607 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-10 12:58:12,632 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-10 12:58:12,640 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/input/pg100.txt:0+5589886
+2017-03-10 12:58:12,811 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-10 12:58:12,823 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-10 12:58:12,824 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-10 12:58:12,824 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-10 12:58:12,824 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-10 12:58:12,832 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-10 12:58:13,397 INFO org.apache.hadoop.mapreduce.Job: Job job_local1458741767_0001 running in uber mode : false
+2017-03-10 12:58:13,399 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
+2017-03-10 12:58:14,983 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-10 12:58:14,985 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-10 12:58:14,985 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-10 12:58:14,985 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 8925696; bufvoid = 104857600
+2017-03-10 12:58:14,985 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 22120424(88481696); length = 4093973/6553600
+2017-03-10 12:58:16,014 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
+2017-03-10 12:58:16,798 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
+2017-03-10 12:58:16,800 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1458741767_0001_m_000000_0 is done. And is in the process of committing
+2017-03-10 12:58:16,806 INFO org.apache.hadoop.mapred.LocalJobRunner: map
+2017-03-10 12:58:16,806 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1458741767_0001_m_000000_0' done.
+2017-03-10 12:58:16,806 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1458741767_0001_m_000000_0
+2017-03-10 12:58:16,806 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
+2017-03-10 12:58:16,814 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks
+2017-03-10 12:58:16,814 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1458741767_0001_r_000000_0
+2017-03-10 12:58:16,819 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-10 12:58:16,819 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-10 12:58:16,823 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@7f792cec
+2017-03-10 12:58:16,833 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-10 12:58:16,839 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1458741767_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-10 12:58:16,862 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.deflate]
+2017-03-10 12:58:16,862 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1458741767_0001_m_000000_0 decomp: 491464 len: 167092 to MEMORY
+2017-03-10 12:58:16,869 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 491464 bytes from map-output for attempt_local1458741767_0001_m_000000_0
+2017-03-10 12:58:16,869 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 491464, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->491464
+2017-03-10 12:58:16,870 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-10 12:58:16,871 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-10 12:58:16,871 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-10 12:58:16,876 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-10 12:58:16,876 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 491459 bytes
+2017-03-10 12:58:17,106 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 491464 bytes to disk to satisfy reduce memory limit
+2017-03-10 12:58:17,107 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 167100 bytes from disk
+2017-03-10 12:58:17,107 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-10 12:58:17,107 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-10 12:58:17,108 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 491459 bytes
+2017-03-10 12:58:17,108 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-10 12:58:17,113 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.skip.on is deprecated. Instead, use mapreduce.job.skiprecords
+2017-03-10 12:58:17,416 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 0%
+2017-03-10 12:58:17,746 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1458741767_0001_r_000000_0 is done. And is in the process of committing
+2017-03-10 12:58:17,747 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-10 12:58:17,751 INFO org.apache.hadoop.mapred.Task: Task attempt_local1458741767_0001_r_000000_0 is allowed to commit now
+2017-03-10 12:58:17,752 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1458741767_0001_r_000000_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1458741767_0001_r_000000
+2017-03-10 12:58:17,756 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-10 12:58:17,756 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1458741767_0001_r_000000_0' done.
+2017-03-10 12:58:17,757 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1458741767_0001_r_000000_0
+2017-03-10 12:58:17,758 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete.
+2017-03-10 12:58:18,417 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 100%
+2017-03-10 12:58:18,418 INFO org.apache.hadoop.mapreduce.Job: Job job_local1458741767_0001 completed successfully
+2017-03-10 12:58:18,427 INFO org.apache.hadoop.mapreduce.Job: Counters: 33
+	File System Counters
+		FILE: Number of bytes read=11514350
+		FILE: Number of bytes written=1395729
+		FILE: Number of read operations=0
+		FILE: Number of large read operations=0
+		FILE: Number of write operations=0
+	Map-Reduce Framework
+		Map input records=124787
+		Map output records=1023494
+		Map output bytes=8925696
+		Map output materialized bytes=167092
+		Input split bytes=120
+		Combine input records=1023494
+		Combine output records=34513
+		Reduce input groups=34513
+		Reduce shuffle bytes=167092
+		Reduce input records=34513
+		Reduce output records=34513
+		Spilled Records=69026
+		Shuffled Maps =1
+		Failed Shuffles=0
+		Merged Map outputs=1
+		GC time elapsed (ms)=109
+		CPU time spent (ms)=0
+		Physical memory (bytes) snapshot=0
+		Virtual memory (bytes) snapshot=0
+		Total committed heap usage (bytes)=331227136
+	Shuffle Errors
+		BAD_ID=0
+		CONNECTION=0
+		IO_ERROR=0
+		WRONG_LENGTH=0
+		WRONG_MAP=0
+		WRONG_REDUCE=0
+	File Input Format Counters 
+		Bytes Read=5589886
+	File Output Format Counters 
+		Bytes Written=363879
+2017-03-10 14:05:48,287 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-10 14:05:48,833 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-10 14:05:48,841 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-10 14:05:49,279 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
+2017-03-10 14:05:49,288 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
+2017-03-10 14:05:49,331 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
+2017-03-10 14:05:49,610 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local1590990832_0001
+2017-03-10 14:05:50,040 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
+2017-03-10 14:05:50,042 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local1590990832_0001
+2017-03-10 14:05:50,046 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
+2017-03-10 14:05:50,059 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-10 14:05:50,067 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
+2017-03-10 14:05:50,190 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
+2017-03-10 14:05:50,192 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1590990832_0001_m_000000_0
+2017-03-10 14:05:50,229 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-10 14:05:50,244 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-10 14:05:50,247 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/input/pg100.txt:0+5589886
+2017-03-10 14:05:50,339 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-10 14:05:50,339 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-10 14:05:50,339 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-10 14:05:50,339 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-10 14:05:50,339 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-10 14:05:50,344 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-10 14:05:51,047 INFO org.apache.hadoop.mapreduce.Job: Job job_local1590990832_0001 running in uber mode : false
+2017-03-10 14:05:51,050 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
+2017-03-10 14:05:52,504 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-10 14:05:52,504 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-10 14:05:52,504 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-10 14:05:52,504 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 8925696; bufvoid = 104857600
+2017-03-10 14:05:52,504 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 22120424(88481696); length = 4093973/6553600
+2017-03-10 14:05:53,471 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
+2017-03-10 14:05:54,161 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
+2017-03-10 14:05:54,164 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1590990832_0001_m_000000_0 is done. And is in the process of committing
+2017-03-10 14:05:54,170 INFO org.apache.hadoop.mapred.LocalJobRunner: map
+2017-03-10 14:05:54,170 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1590990832_0001_m_000000_0' done.
+2017-03-10 14:05:54,170 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1590990832_0001_m_000000_0
+2017-03-10 14:05:54,170 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
+2017-03-10 14:05:54,178 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks
+2017-03-10 14:05:54,178 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1590990832_0001_r_000000_0
+2017-03-10 14:05:54,183 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-10 14:05:54,183 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-10 14:05:54,185 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@7f792cec
+2017-03-10 14:05:54,198 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-10 14:05:54,206 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1590990832_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-10 14:05:54,277 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.deflate]
+2017-03-10 14:05:54,277 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1590990832_0001_m_000000_0 decomp: 491464 len: 167092 to MEMORY
+2017-03-10 14:05:54,297 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 491464 bytes from map-output for attempt_local1590990832_0001_m_000000_0
+2017-03-10 14:05:54,298 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 491464, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->491464
+2017-03-10 14:05:54,302 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-10 14:05:54,305 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-10 14:05:54,305 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-10 14:05:54,315 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-10 14:05:54,319 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 491459 bytes
+2017-03-10 14:05:54,702 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 491464 bytes to disk to satisfy reduce memory limit
+2017-03-10 14:05:54,702 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 167100 bytes from disk
+2017-03-10 14:05:54,702 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-10 14:05:54,703 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-10 14:05:54,704 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 491459 bytes
+2017-03-10 14:05:54,704 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-10 14:05:54,714 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.skip.on is deprecated. Instead, use mapreduce.job.skiprecords
+2017-03-10 14:05:55,077 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 0%
+2017-03-10 14:05:55,853 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1590990832_0001_r_000000_0 is done. And is in the process of committing
+2017-03-10 14:05:55,858 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-10 14:05:55,859 INFO org.apache.hadoop.mapred.Task: Task attempt_local1590990832_0001_r_000000_0 is allowed to commit now
+2017-03-10 14:05:55,859 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1590990832_0001_r_000000_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1590990832_0001_r_000000
+2017-03-10 14:05:55,861 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-10 14:05:55,861 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1590990832_0001_r_000000_0' done.
+2017-03-10 14:05:55,861 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1590990832_0001_r_000000_0
+2017-03-10 14:05:55,861 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete.
+2017-03-10 14:05:56,079 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 100%
+2017-03-10 14:05:56,080 INFO org.apache.hadoop.mapreduce.Job: Job job_local1590990832_0001 completed successfully
+2017-03-10 14:05:56,090 INFO org.apache.hadoop.mapreduce.Job: Counters: 33
+	File System Counters
+		FILE: Number of bytes read=11514350
+		FILE: Number of bytes written=1395729
+		FILE: Number of read operations=0
+		FILE: Number of large read operations=0
+		FILE: Number of write operations=0
+	Map-Reduce Framework
+		Map input records=124787
+		Map output records=1023494
+		Map output bytes=8925696
+		Map output materialized bytes=167092
+		Input split bytes=120
+		Combine input records=1023494
+		Combine output records=34513
+		Reduce input groups=34513
+		Reduce shuffle bytes=167092
+		Reduce input records=34513
+		Reduce output records=34513
+		Spilled Records=69026
+		Shuffled Maps =1
+		Failed Shuffles=0
+		Merged Map outputs=1
+		GC time elapsed (ms)=80
+		CPU time spent (ms)=0
+		Physical memory (bytes) snapshot=0
+		Virtual memory (bytes) snapshot=0
+		Total committed heap usage (bytes)=331227136
+	Shuffle Errors
+		BAD_ID=0
+		CONNECTION=0
+		IO_ERROR=0
+		WRONG_LENGTH=0
+		WRONG_MAP=0
+		WRONG_REDUCE=0
+	File Input Format Counters 
+		Bytes Read=5589886
+	File Output Format Counters 
+		Bytes Written=363879
+2017-03-10 14:07:44,622 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-10 14:07:45,122 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-10 14:07:45,129 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-10 14:07:45,628 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
+2017-03-10 14:07:45,645 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
+2017-03-10 14:07:45,678 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
+2017-03-10 14:07:45,909 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local1520504035_0001
+2017-03-10 14:07:46,336 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
+2017-03-10 14:07:46,338 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local1520504035_0001
+2017-03-10 14:07:46,337 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
+2017-03-10 14:07:46,344 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-10 14:07:46,346 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
+2017-03-10 14:07:46,457 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
+2017-03-10 14:07:46,460 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1520504035_0001_m_000000_0
+2017-03-10 14:07:46,523 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-10 14:07:46,542 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-10 14:07:46,545 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/input/pg100.txt:0+5589886
+2017-03-10 14:07:46,634 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-10 14:07:46,634 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-10 14:07:46,634 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-10 14:07:46,634 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-10 14:07:46,635 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-10 14:07:46,639 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-10 14:07:47,343 INFO org.apache.hadoop.mapreduce.Job: Job job_local1520504035_0001 running in uber mode : false
+2017-03-10 14:07:47,344 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
+2017-03-10 14:07:48,802 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-10 14:07:48,802 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-10 14:07:48,802 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-10 14:07:48,802 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 8925696; bufvoid = 104857600
+2017-03-10 14:07:48,802 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 22120424(88481696); length = 4093973/6553600
+2017-03-10 14:07:49,778 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
+2017-03-10 14:07:50,507 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
+2017-03-10 14:07:50,510 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1520504035_0001_m_000000_0 is done. And is in the process of committing
+2017-03-10 14:07:50,516 INFO org.apache.hadoop.mapred.LocalJobRunner: map
+2017-03-10 14:07:50,516 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1520504035_0001_m_000000_0' done.
+2017-03-10 14:07:50,516 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1520504035_0001_m_000000_0
+2017-03-10 14:07:50,516 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
+2017-03-10 14:07:50,523 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks
+2017-03-10 14:07:50,523 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1520504035_0001_r_000000_0
+2017-03-10 14:07:50,529 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-10 14:07:50,529 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-10 14:07:50,531 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@7f792cec
+2017-03-10 14:07:50,541 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-10 14:07:50,548 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1520504035_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-10 14:07:50,573 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.deflate]
+2017-03-10 14:07:50,574 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1520504035_0001_m_000000_0 decomp: 491464 len: 167092 to MEMORY
+2017-03-10 14:07:50,580 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 491464 bytes from map-output for attempt_local1520504035_0001_m_000000_0
+2017-03-10 14:07:50,581 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 491464, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->491464
+2017-03-10 14:07:50,582 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-10 14:07:50,582 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-10 14:07:50,582 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-10 14:07:50,587 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-10 14:07:50,588 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 491459 bytes
+2017-03-10 14:07:50,811 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 491464 bytes to disk to satisfy reduce memory limit
+2017-03-10 14:07:50,811 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 167100 bytes from disk
+2017-03-10 14:07:50,812 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-10 14:07:50,812 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-10 14:07:50,813 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 491459 bytes
+2017-03-10 14:07:50,813 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-10 14:07:50,818 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.skip.on is deprecated. Instead, use mapreduce.job.skiprecords
+2017-03-10 14:07:51,355 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 0%
+2017-03-10 14:07:51,511 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1520504035_0001_r_000000_0 is done. And is in the process of committing
+2017-03-10 14:07:51,512 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-10 14:07:51,515 INFO org.apache.hadoop.mapred.Task: Task attempt_local1520504035_0001_r_000000_0 is allowed to commit now
+2017-03-10 14:07:51,516 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1520504035_0001_r_000000_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1520504035_0001_r_000000
+2017-03-10 14:07:51,516 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-10 14:07:51,518 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1520504035_0001_r_000000_0' done.
+2017-03-10 14:07:51,518 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1520504035_0001_r_000000_0
+2017-03-10 14:07:51,518 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete.
+2017-03-10 14:07:52,356 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 100%
+2017-03-10 14:07:52,356 INFO org.apache.hadoop.mapreduce.Job: Job job_local1520504035_0001 completed successfully
+2017-03-10 14:07:52,367 INFO org.apache.hadoop.mapreduce.Job: Counters: 33
+	File System Counters
+		FILE: Number of bytes read=11514350
+		FILE: Number of bytes written=1397073
+		FILE: Number of read operations=0
+		FILE: Number of large read operations=0
+		FILE: Number of write operations=0
+	Map-Reduce Framework
+		Map input records=124787
+		Map output records=1023494
+		Map output bytes=8925696
+		Map output materialized bytes=167092
+		Input split bytes=120
+		Combine input records=1023494
+		Combine output records=34513
+		Reduce input groups=34513
+		Reduce shuffle bytes=167092
+		Reduce input records=34513
+		Reduce output records=34513
+		Spilled Records=69026
+		Shuffled Maps =1
+		Failed Shuffles=0
+		Merged Map outputs=1
+		GC time elapsed (ms)=86
+		CPU time spent (ms)=0
+		Physical memory (bytes) snapshot=0
+		Virtual memory (bytes) snapshot=0
+		Total committed heap usage (bytes)=331227136
+	Shuffle Errors
+		BAD_ID=0
+		CONNECTION=0
+		IO_ERROR=0
+		WRONG_LENGTH=0
+		WRONG_MAP=0
+		WRONG_REDUCE=0
+	File Input Format Counters 
+		Bytes Read=5589886
+	File Output Format Counters 
+		Bytes Written=363879
+2017-03-10 14:08:46,208 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-10 14:08:46,725 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-10 14:08:46,726 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-10 14:08:46,791 WARN org.apache.hadoop.security.UserGroupInformation: PriviledgedActionException as:cloudera (auth:SIMPLE) cause:org.apache.hadoop.mapred.FileAlreadyExistsException: Output directory file:/home/cloudera/workspace/WordCount/output already exists
+2017-03-10 14:09:00,496 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-10 14:09:00,991 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-10 14:09:00,992 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-10 14:09:01,486 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
+2017-03-10 14:09:01,504 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
+2017-03-10 14:09:01,622 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
+2017-03-10 14:09:01,930 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local1768387477_0001
+2017-03-10 14:09:02,340 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
+2017-03-10 14:09:02,341 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local1768387477_0001
+2017-03-10 14:09:02,345 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
+2017-03-10 14:09:02,348 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-10 14:09:02,366 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
+2017-03-10 14:09:02,467 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
+2017-03-10 14:09:02,468 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1768387477_0001_m_000000_0
+2017-03-10 14:09:02,532 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-10 14:09:02,561 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-10 14:09:02,564 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/input/pg100.txt:0+5589886
+2017-03-10 14:09:02,644 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-10 14:09:02,645 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-10 14:09:02,645 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-10 14:09:02,645 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-10 14:09:02,645 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-10 14:09:02,652 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-10 14:09:03,343 INFO org.apache.hadoop.mapreduce.Job: Job job_local1768387477_0001 running in uber mode : false
+2017-03-10 14:09:03,344 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
+2017-03-10 14:09:04,790 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-10 14:09:04,792 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-10 14:09:04,792 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-10 14:09:04,792 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 8925696; bufvoid = 104857600
+2017-03-10 14:09:04,792 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 22120424(88481696); length = 4093973/6553600
+2017-03-10 14:09:05,819 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
+2017-03-10 14:09:06,544 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
+2017-03-10 14:09:06,546 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1768387477_0001_m_000000_0 is done. And is in the process of committing
+2017-03-10 14:09:06,552 INFO org.apache.hadoop.mapred.LocalJobRunner: map
+2017-03-10 14:09:06,552 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1768387477_0001_m_000000_0' done.
+2017-03-10 14:09:06,552 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1768387477_0001_m_000000_0
+2017-03-10 14:09:06,552 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
+2017-03-10 14:09:06,560 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks
+2017-03-10 14:09:06,561 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1768387477_0001_r_000000_0
+2017-03-10 14:09:06,569 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-10 14:09:06,570 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-10 14:09:06,572 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@7f792cec
+2017-03-10 14:09:06,582 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-10 14:09:06,588 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1768387477_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-10 14:09:06,616 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.deflate]
+2017-03-10 14:09:06,617 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1768387477_0001_m_000000_0 decomp: 491464 len: 167092 to MEMORY
+2017-03-10 14:09:06,626 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 491464 bytes from map-output for attempt_local1768387477_0001_m_000000_0
+2017-03-10 14:09:06,627 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 491464, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->491464
+2017-03-10 14:09:06,628 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-10 14:09:06,629 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-10 14:09:06,629 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-10 14:09:06,636 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-10 14:09:06,636 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 491459 bytes
+2017-03-10 14:09:06,896 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 491464 bytes to disk to satisfy reduce memory limit
+2017-03-10 14:09:06,896 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 167100 bytes from disk
+2017-03-10 14:09:06,897 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-10 14:09:06,897 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-10 14:09:06,898 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 491459 bytes
+2017-03-10 14:09:06,898 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-10 14:09:06,903 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.skip.on is deprecated. Instead, use mapreduce.job.skiprecords
+2017-03-10 14:09:07,352 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 0%
+2017-03-10 14:09:07,776 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1768387477_0001_r_000000_0 is done. And is in the process of committing
+2017-03-10 14:09:07,792 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-10 14:09:07,792 INFO org.apache.hadoop.mapred.Task: Task attempt_local1768387477_0001_r_000000_0 is allowed to commit now
+2017-03-10 14:09:07,792 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1768387477_0001_r_000000_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1768387477_0001_r_000000
+2017-03-10 14:09:07,793 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-10 14:09:07,793 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1768387477_0001_r_000000_0' done.
+2017-03-10 14:09:07,793 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1768387477_0001_r_000000_0
+2017-03-10 14:09:07,793 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete.
+2017-03-10 14:09:08,353 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 100%
+2017-03-10 14:09:08,354 INFO org.apache.hadoop.mapreduce.Job: Job job_local1768387477_0001 completed successfully
+2017-03-10 14:09:08,363 INFO org.apache.hadoop.mapreduce.Job: Counters: 33
+	File System Counters
+		FILE: Number of bytes read=11514350
+		FILE: Number of bytes written=1397073
+		FILE: Number of read operations=0
+		FILE: Number of large read operations=0
+		FILE: Number of write operations=0
+	Map-Reduce Framework
+		Map input records=124787
+		Map output records=1023494
+		Map output bytes=8925696
+		Map output materialized bytes=167092
+		Input split bytes=120
+		Combine input records=1023494
+		Combine output records=34513
+		Reduce input groups=34513
+		Reduce shuffle bytes=167092
+		Reduce input records=34513
+		Reduce output records=34513
+		Spilled Records=69026
+		Shuffled Maps =1
+		Failed Shuffles=0
+		Merged Map outputs=1
+		GC time elapsed (ms)=76
+		CPU time spent (ms)=0
+		Physical memory (bytes) snapshot=0
+		Virtual memory (bytes) snapshot=0
+		Total committed heap usage (bytes)=331227136
+	Shuffle Errors
+		BAD_ID=0
+		CONNECTION=0
+		IO_ERROR=0
+		WRONG_LENGTH=0
+		WRONG_MAP=0
+		WRONG_REDUCE=0
+	File Input Format Counters 
+		Bytes Read=5589886
+	File Output Format Counters 
+		Bytes Written=363879
+2017-03-10 14:10:49,958 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-10 14:10:50,420 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-10 14:10:50,423 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-10 14:10:50,893 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
+2017-03-10 14:10:50,902 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
+2017-03-10 14:10:50,935 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
+2017-03-10 14:10:51,165 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local321623198_0001
+2017-03-10 14:10:51,576 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
+2017-03-10 14:10:51,577 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local321623198_0001
+2017-03-10 14:10:51,586 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
+2017-03-10 14:10:51,596 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-10 14:10:51,607 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
+2017-03-10 14:10:51,726 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
+2017-03-10 14:10:51,728 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local321623198_0001_m_000000_0
+2017-03-10 14:10:51,786 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-10 14:10:51,801 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-10 14:10:51,805 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/input/pg100.txt:0+5589886
+2017-03-10 14:10:51,887 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-10 14:10:51,887 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-10 14:10:51,887 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-10 14:10:51,888 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-10 14:10:51,888 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-10 14:10:51,891 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-10 14:10:52,584 INFO org.apache.hadoop.mapreduce.Job: Job job_local321623198_0001 running in uber mode : false
+2017-03-10 14:10:52,587 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
+2017-03-10 14:10:54,371 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-10 14:10:54,373 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-10 14:10:54,373 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-10 14:10:54,374 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 8925696; bufvoid = 104857600
+2017-03-10 14:10:54,374 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 22120424(88481696); length = 4093973/6553600
+2017-03-10 14:10:55,678 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
+2017-03-10 14:10:56,656 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
+2017-03-10 14:10:56,659 INFO org.apache.hadoop.mapred.Task: Task:attempt_local321623198_0001_m_000000_0 is done. And is in the process of committing
+2017-03-10 14:10:56,666 INFO org.apache.hadoop.mapred.LocalJobRunner: map
+2017-03-10 14:10:56,666 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local321623198_0001_m_000000_0' done.
+2017-03-10 14:10:56,666 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local321623198_0001_m_000000_0
+2017-03-10 14:10:56,667 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
+2017-03-10 14:10:56,676 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks
+2017-03-10 14:10:56,676 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local321623198_0001_r_000000_0
+2017-03-10 14:10:56,684 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-10 14:10:56,685 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-10 14:10:56,689 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@405d65c3
+2017-03-10 14:10:56,704 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-10 14:10:56,710 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local321623198_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-10 14:10:56,742 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.deflate]
+2017-03-10 14:10:56,742 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local321623198_0001_m_000000_0 decomp: 491464 len: 167092 to MEMORY
+2017-03-10 14:10:56,749 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 491464 bytes from map-output for attempt_local321623198_0001_m_000000_0
+2017-03-10 14:10:56,749 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 491464, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->491464
+2017-03-10 14:10:56,750 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-10 14:10:56,752 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-10 14:10:56,752 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-10 14:10:56,756 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-10 14:10:56,757 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 491459 bytes
+2017-03-10 14:10:57,102 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 491464 bytes to disk to satisfy reduce memory limit
+2017-03-10 14:10:57,102 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 167100 bytes from disk
+2017-03-10 14:10:57,103 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-10 14:10:57,103 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-10 14:10:57,104 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 491459 bytes
+2017-03-10 14:10:57,105 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-10 14:10:57,109 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.skip.on is deprecated. Instead, use mapreduce.job.skiprecords
+2017-03-10 14:10:57,598 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 0%
+2017-03-10 14:10:57,797 INFO org.apache.hadoop.mapred.Task: Task:attempt_local321623198_0001_r_000000_0 is done. And is in the process of committing
+2017-03-10 14:10:57,801 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-10 14:10:57,802 INFO org.apache.hadoop.mapred.Task: Task attempt_local321623198_0001_r_000000_0 is allowed to commit now
+2017-03-10 14:10:57,802 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local321623198_0001_r_000000_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local321623198_0001_r_000000
+2017-03-10 14:10:57,803 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-10 14:10:57,803 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local321623198_0001_r_000000_0' done.
+2017-03-10 14:10:57,803 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local321623198_0001_r_000000_0
+2017-03-10 14:10:57,803 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete.
+2017-03-10 14:10:58,598 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 100%
+2017-03-10 14:10:58,599 INFO org.apache.hadoop.mapreduce.Job: Job job_local321623198_0001 completed successfully
+2017-03-10 14:10:58,612 INFO org.apache.hadoop.mapreduce.Job: Counters: 33
+	File System Counters
+		FILE: Number of bytes read=11514350
+		FILE: Number of bytes written=1394261
+		FILE: Number of read operations=0
+		FILE: Number of large read operations=0
+		FILE: Number of write operations=0
+	Map-Reduce Framework
+		Map input records=124787
+		Map output records=1023494
+		Map output bytes=8925696
+		Map output materialized bytes=167092
+		Input split bytes=120
+		Combine input records=1023494
+		Combine output records=34513
+		Reduce input groups=34513
+		Reduce shuffle bytes=167092
+		Reduce input records=34513
+		Reduce output records=34513
+		Spilled Records=69026
+		Shuffled Maps =1
+		Failed Shuffles=0
+		Merged Map outputs=1
+		GC time elapsed (ms)=82
+		CPU time spent (ms)=0
+		Physical memory (bytes) snapshot=0
+		Virtual memory (bytes) snapshot=0
+		Total committed heap usage (bytes)=331227136
+	Shuffle Errors
+		BAD_ID=0
+		CONNECTION=0
+		IO_ERROR=0
+		WRONG_LENGTH=0
+		WRONG_MAP=0
+		WRONG_REDUCE=0
+	File Input Format Counters 
+		Bytes Read=5589886
+	File Output Format Counters 
+		Bytes Written=363879
+2017-03-10 14:11:49,324 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-10 14:11:49,809 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-10 14:11:49,819 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-10 14:11:50,294 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
+2017-03-10 14:11:50,309 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
+2017-03-10 14:11:50,418 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
+2017-03-10 14:11:50,734 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local1130190814_0001
+2017-03-10 14:11:51,124 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
+2017-03-10 14:11:51,125 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local1130190814_0001
+2017-03-10 14:11:51,125 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
+2017-03-10 14:11:51,137 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-10 14:11:51,145 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
+2017-03-10 14:11:51,262 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
+2017-03-10 14:11:51,262 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1130190814_0001_m_000000_0
+2017-03-10 14:11:51,319 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-10 14:11:51,345 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-10 14:11:51,348 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/input/pg100.txt:0+5589886
+2017-03-10 14:11:51,448 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-10 14:11:51,448 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-10 14:11:51,448 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-10 14:11:51,448 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-10 14:11:51,448 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-10 14:11:51,451 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-10 14:11:52,138 INFO org.apache.hadoop.mapreduce.Job: Job job_local1130190814_0001 running in uber mode : false
+2017-03-10 14:11:52,139 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
+2017-03-10 14:11:53,548 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-10 14:11:53,549 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-10 14:11:53,549 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-10 14:11:53,549 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 8925696; bufvoid = 104857600
+2017-03-10 14:11:53,549 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 22120424(88481696); length = 4093973/6553600
+2017-03-10 14:11:54,505 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
+2017-03-10 14:11:55,315 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
+2017-03-10 14:11:55,318 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1130190814_0001_m_000000_0 is done. And is in the process of committing
+2017-03-10 14:11:55,323 INFO org.apache.hadoop.mapred.LocalJobRunner: map
+2017-03-10 14:11:55,324 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1130190814_0001_m_000000_0' done.
+2017-03-10 14:11:55,324 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1130190814_0001_m_000000_0
+2017-03-10 14:11:55,324 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
+2017-03-10 14:11:55,331 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks
+2017-03-10 14:11:55,332 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1130190814_0001_r_000000_0
+2017-03-10 14:11:55,337 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-10 14:11:55,337 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-10 14:11:55,339 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@33c40638
+2017-03-10 14:11:55,352 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-10 14:11:55,358 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1130190814_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-10 14:11:55,386 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.deflate]
+2017-03-10 14:11:55,387 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1130190814_0001_m_000000_0 decomp: 491464 len: 167092 to MEMORY
+2017-03-10 14:11:55,397 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 491464 bytes from map-output for attempt_local1130190814_0001_m_000000_0
+2017-03-10 14:11:55,397 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 491464, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->491464
+2017-03-10 14:11:55,398 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-10 14:11:55,399 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-10 14:11:55,399 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-10 14:11:55,403 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-10 14:11:55,404 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 491459 bytes
+2017-03-10 14:11:55,647 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 491464 bytes to disk to satisfy reduce memory limit
+2017-03-10 14:11:55,647 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 167100 bytes from disk
+2017-03-10 14:11:55,648 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-10 14:11:55,648 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-10 14:11:55,649 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 491459 bytes
+2017-03-10 14:11:55,649 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-10 14:11:55,654 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.skip.on is deprecated. Instead, use mapreduce.job.skiprecords
+2017-03-10 14:11:56,162 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 0%
+2017-03-10 14:11:56,278 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1130190814_0001_r_000000_0 is done. And is in the process of committing
+2017-03-10 14:11:56,289 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-10 14:11:56,292 INFO org.apache.hadoop.mapred.Task: Task attempt_local1130190814_0001_r_000000_0 is allowed to commit now
+2017-03-10 14:11:56,294 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1130190814_0001_r_000000_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1130190814_0001_r_000000
+2017-03-10 14:11:56,294 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-10 14:11:56,295 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1130190814_0001_r_000000_0' done.
+2017-03-10 14:11:56,295 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1130190814_0001_r_000000_0
+2017-03-10 14:11:56,295 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete.
+2017-03-10 14:11:57,163 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 100%
+2017-03-10 14:11:57,163 INFO org.apache.hadoop.mapreduce.Job: Job job_local1130190814_0001 completed successfully
+2017-03-10 14:11:57,176 INFO org.apache.hadoop.mapreduce.Job: Counters: 33
+	File System Counters
+		FILE: Number of bytes read=11514350
+		FILE: Number of bytes written=1397073
+		FILE: Number of read operations=0
+		FILE: Number of large read operations=0
+		FILE: Number of write operations=0
+	Map-Reduce Framework
+		Map input records=124787
+		Map output records=1023494
+		Map output bytes=8925696
+		Map output materialized bytes=167092
+		Input split bytes=120
+		Combine input records=1023494
+		Combine output records=34513
+		Reduce input groups=34513
+		Reduce shuffle bytes=167092
+		Reduce input records=34513
+		Reduce output records=34513
+		Spilled Records=69026
+		Shuffled Maps =1
+		Failed Shuffles=0
+		Merged Map outputs=1
+		GC time elapsed (ms)=84
+		CPU time spent (ms)=0
+		Physical memory (bytes) snapshot=0
+		Virtual memory (bytes) snapshot=0
+		Total committed heap usage (bytes)=331227136
+	Shuffle Errors
+		BAD_ID=0
+		CONNECTION=0
+		IO_ERROR=0
+		WRONG_LENGTH=0
+		WRONG_MAP=0
+		WRONG_REDUCE=0
+	File Input Format Counters 
+		Bytes Read=5589886
+	File Output Format Counters 
+		Bytes Written=363879
+2017-03-10 14:12:54,192 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-10 14:12:54,666 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-10 14:12:54,677 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-10 14:12:54,726 WARN org.apache.hadoop.security.UserGroupInformation: PriviledgedActionException as:cloudera (auth:SIMPLE) cause:org.apache.hadoop.mapred.FileAlreadyExistsException: Output directory file:/home/cloudera/workspace/WordCount/output already exists
+2017-03-10 14:13:16,264 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-10 14:13:16,742 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-10 14:13:16,746 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-10 14:13:17,210 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
+2017-03-10 14:13:17,218 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
+2017-03-10 14:13:17,249 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
+2017-03-10 14:13:17,469 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local1259538348_0001
+2017-03-10 14:13:17,909 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
+2017-03-10 14:13:17,911 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local1259538348_0001
+2017-03-10 14:13:17,915 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
+2017-03-10 14:13:17,923 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-10 14:13:17,937 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
+2017-03-10 14:13:18,056 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
+2017-03-10 14:13:18,059 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1259538348_0001_m_000000_0
+2017-03-10 14:13:18,117 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-10 14:13:18,128 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-10 14:13:18,132 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/input/pg100.txt:0+5589886
+2017-03-10 14:13:18,215 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-10 14:13:18,216 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-10 14:13:18,216 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-10 14:13:18,216 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-10 14:13:18,216 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-10 14:13:18,226 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-10 14:13:18,912 INFO org.apache.hadoop.mapreduce.Job: Job job_local1259538348_0001 running in uber mode : false
+2017-03-10 14:13:18,913 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
+2017-03-10 14:13:20,442 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-10 14:13:20,443 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-10 14:13:20,443 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-10 14:13:20,443 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 8925696; bufvoid = 104857600
+2017-03-10 14:13:20,443 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 22120424(88481696); length = 4093973/6553600
+2017-03-10 14:13:21,376 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
+2017-03-10 14:13:22,115 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
+2017-03-10 14:13:22,117 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1259538348_0001_m_000000_0 is done. And is in the process of committing
+2017-03-10 14:13:22,124 INFO org.apache.hadoop.mapred.LocalJobRunner: map
+2017-03-10 14:13:22,124 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1259538348_0001_m_000000_0' done.
+2017-03-10 14:13:22,124 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1259538348_0001_m_000000_0
+2017-03-10 14:13:22,124 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
+2017-03-10 14:13:22,132 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks
+2017-03-10 14:13:22,132 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1259538348_0001_r_000000_0
+2017-03-10 14:13:22,137 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-10 14:13:22,137 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-10 14:13:22,140 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@7f792cec
+2017-03-10 14:13:22,151 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-10 14:13:22,156 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1259538348_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-10 14:13:22,209 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.deflate]
+2017-03-10 14:13:22,209 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1259538348_0001_m_000000_0 decomp: 491464 len: 167092 to MEMORY
+2017-03-10 14:13:22,215 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 491464 bytes from map-output for attempt_local1259538348_0001_m_000000_0
+2017-03-10 14:13:22,217 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 491464, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->491464
+2017-03-10 14:13:22,219 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-10 14:13:22,220 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-10 14:13:22,224 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-10 14:13:22,229 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-10 14:13:22,230 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 491459 bytes
+2017-03-10 14:13:22,458 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 491464 bytes to disk to satisfy reduce memory limit
+2017-03-10 14:13:22,459 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 167100 bytes from disk
+2017-03-10 14:13:22,459 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-10 14:13:22,459 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-10 14:13:22,460 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 491459 bytes
+2017-03-10 14:13:22,460 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-10 14:13:22,465 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.skip.on is deprecated. Instead, use mapreduce.job.skiprecords
+2017-03-10 14:13:22,936 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 0%
+2017-03-10 14:13:23,199 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1259538348_0001_r_000000_0 is done. And is in the process of committing
+2017-03-10 14:13:23,200 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-10 14:13:23,201 INFO org.apache.hadoop.mapred.Task: Task attempt_local1259538348_0001_r_000000_0 is allowed to commit now
+2017-03-10 14:13:23,201 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1259538348_0001_r_000000_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1259538348_0001_r_000000
+2017-03-10 14:13:23,202 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-10 14:13:23,202 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1259538348_0001_r_000000_0' done.
+2017-03-10 14:13:23,202 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1259538348_0001_r_000000_0
+2017-03-10 14:13:23,202 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete.
+2017-03-10 14:13:23,936 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 100%
+2017-03-10 14:13:23,937 INFO org.apache.hadoop.mapreduce.Job: Job job_local1259538348_0001 completed successfully
+2017-03-10 14:13:23,948 INFO org.apache.hadoop.mapreduce.Job: Counters: 33
+	File System Counters
+		FILE: Number of bytes read=11514350
+		FILE: Number of bytes written=1397073
+		FILE: Number of read operations=0
+		FILE: Number of large read operations=0
+		FILE: Number of write operations=0
+	Map-Reduce Framework
+		Map input records=124787
+		Map output records=1023494
+		Map output bytes=8925696
+		Map output materialized bytes=167092
+		Input split bytes=120
+		Combine input records=1023494
+		Combine output records=34513
+		Reduce input groups=34513
+		Reduce shuffle bytes=167092
+		Reduce input records=34513
+		Reduce output records=34513
+		Spilled Records=69026
+		Shuffled Maps =1
+		Failed Shuffles=0
+		Merged Map outputs=1
+		GC time elapsed (ms)=79
+		CPU time spent (ms)=0
+		Physical memory (bytes) snapshot=0
+		Virtual memory (bytes) snapshot=0
+		Total committed heap usage (bytes)=331227136
+	Shuffle Errors
+		BAD_ID=0
+		CONNECTION=0
+		IO_ERROR=0
+		WRONG_LENGTH=0
+		WRONG_MAP=0
+		WRONG_REDUCE=0
+	File Input Format Counters 
+		Bytes Read=5589886
+	File Output Format Counters 
+		Bytes Written=363879
+2017-03-10 14:15:07,671 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-10 14:15:08,143 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-10 14:15:08,146 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-10 14:15:08,597 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
+2017-03-10 14:15:08,610 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
+2017-03-10 14:15:08,649 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
+2017-03-10 14:15:08,885 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local744282859_0001
+2017-03-10 14:15:09,357 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
+2017-03-10 14:15:09,358 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local744282859_0001
+2017-03-10 14:15:09,361 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
+2017-03-10 14:15:09,374 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-10 14:15:09,376 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
+2017-03-10 14:15:09,515 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
+2017-03-10 14:15:09,516 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local744282859_0001_m_000000_0
+2017-03-10 14:15:09,574 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-10 14:15:09,584 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-10 14:15:09,587 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/input/pg100.txt:0+5589886
+2017-03-10 14:15:09,674 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-10 14:15:09,674 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-10 14:15:09,674 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-10 14:15:09,674 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-10 14:15:09,674 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-10 14:15:09,679 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-10 14:15:10,364 INFO org.apache.hadoop.mapreduce.Job: Job job_local744282859_0001 running in uber mode : false
+2017-03-10 14:15:10,366 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
+2017-03-10 14:15:11,981 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-10 14:15:11,982 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-10 14:15:11,982 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-10 14:15:11,982 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 8925696; bufvoid = 104857600
+2017-03-10 14:15:11,982 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 22120424(88481696); length = 4093973/6553600
+2017-03-10 14:15:13,023 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
+2017-03-10 14:15:13,726 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
+2017-03-10 14:15:13,728 INFO org.apache.hadoop.mapred.Task: Task:attempt_local744282859_0001_m_000000_0 is done. And is in the process of committing
+2017-03-10 14:15:13,739 INFO org.apache.hadoop.mapred.LocalJobRunner: map
+2017-03-10 14:15:13,739 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local744282859_0001_m_000000_0' done.
+2017-03-10 14:15:13,739 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local744282859_0001_m_000000_0
+2017-03-10 14:15:13,739 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
+2017-03-10 14:15:13,748 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks
+2017-03-10 14:15:13,749 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local744282859_0001_r_000000_0
+2017-03-10 14:15:13,753 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-10 14:15:13,754 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-10 14:15:13,756 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@534d4113
+2017-03-10 14:15:13,766 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-10 14:15:13,772 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local744282859_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-10 14:15:13,800 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.deflate]
+2017-03-10 14:15:13,800 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local744282859_0001_m_000000_0 decomp: 491464 len: 167092 to MEMORY
+2017-03-10 14:15:13,807 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 491464 bytes from map-output for attempt_local744282859_0001_m_000000_0
+2017-03-10 14:15:13,807 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 491464, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->491464
+2017-03-10 14:15:13,809 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-10 14:15:13,813 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-10 14:15:13,814 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-10 14:15:13,819 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-10 14:15:13,819 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 491459 bytes
+2017-03-10 14:15:14,078 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 491464 bytes to disk to satisfy reduce memory limit
+2017-03-10 14:15:14,079 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 167100 bytes from disk
+2017-03-10 14:15:14,079 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-10 14:15:14,079 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-10 14:15:14,080 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 491459 bytes
+2017-03-10 14:15:14,080 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-10 14:15:14,085 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.skip.on is deprecated. Instead, use mapreduce.job.skiprecords
+2017-03-10 14:15:14,378 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 0%
+2017-03-10 14:15:14,760 INFO org.apache.hadoop.mapred.Task: Task:attempt_local744282859_0001_r_000000_0 is done. And is in the process of committing
+2017-03-10 14:15:14,761 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-10 14:15:14,761 INFO org.apache.hadoop.mapred.Task: Task attempt_local744282859_0001_r_000000_0 is allowed to commit now
+2017-03-10 14:15:14,762 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local744282859_0001_r_000000_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local744282859_0001_r_000000
+2017-03-10 14:15:14,762 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-10 14:15:14,762 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local744282859_0001_r_000000_0' done.
+2017-03-10 14:15:14,762 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local744282859_0001_r_000000_0
+2017-03-10 14:15:14,762 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete.
+2017-03-10 14:15:15,379 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 100%
+2017-03-10 14:15:15,380 INFO org.apache.hadoop.mapreduce.Job: Job job_local744282859_0001 completed successfully
+2017-03-10 14:15:15,390 INFO org.apache.hadoop.mapreduce.Job: Counters: 33
+	File System Counters
+		FILE: Number of bytes read=11514350
+		FILE: Number of bytes written=1394261
+		FILE: Number of read operations=0
+		FILE: Number of large read operations=0
+		FILE: Number of write operations=0
+	Map-Reduce Framework
+		Map input records=124787
+		Map output records=1023494
+		Map output bytes=8925696
+		Map output materialized bytes=167092
+		Input split bytes=120
+		Combine input records=1023494
+		Combine output records=34513
+		Reduce input groups=34513
+		Reduce shuffle bytes=167092
+		Reduce input records=34513
+		Reduce output records=34513
+		Spilled Records=69026
+		Shuffled Maps =1
+		Failed Shuffles=0
+		Merged Map outputs=1
+		GC time elapsed (ms)=90
+		CPU time spent (ms)=0
+		Physical memory (bytes) snapshot=0
+		Virtual memory (bytes) snapshot=0
+		Total committed heap usage (bytes)=331227136
+	Shuffle Errors
+		BAD_ID=0
+		CONNECTION=0
+		IO_ERROR=0
+		WRONG_LENGTH=0
+		WRONG_MAP=0
+		WRONG_REDUCE=0
+	File Input Format Counters 
+		Bytes Read=5589886
+	File Output Format Counters 
+		Bytes Written=363879
+2017-03-10 14:16:55,128 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-10 14:16:55,605 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-10 14:16:55,614 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-10 14:16:55,649 WARN org.apache.hadoop.security.UserGroupInformation: PriviledgedActionException as:cloudera (auth:SIMPLE) cause:org.apache.hadoop.mapred.FileAlreadyExistsException: Output directory file:/home/cloudera/workspace/WordCount/output already exists
+2017-03-10 14:17:14,700 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-10 14:17:15,157 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-10 14:17:15,169 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-10 14:17:15,624 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
+2017-03-10 14:17:15,635 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
+2017-03-10 14:17:15,668 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
+2017-03-10 14:17:15,901 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local1420822781_0001
+2017-03-10 14:17:16,319 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
+2017-03-10 14:17:16,321 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local1420822781_0001
+2017-03-10 14:17:16,323 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
+2017-03-10 14:17:16,335 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-10 14:17:16,340 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
+2017-03-10 14:17:16,441 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
+2017-03-10 14:17:16,442 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1420822781_0001_m_000000_0
+2017-03-10 14:17:16,518 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-10 14:17:16,531 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-10 14:17:16,534 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/input/pg100.txt:0+5589886
+2017-03-10 14:17:16,616 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-10 14:17:16,617 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-10 14:17:16,617 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-10 14:17:16,617 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-10 14:17:16,617 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-10 14:17:16,621 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-10 14:17:16,682 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-10 14:17:16,697 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
+2017-03-10 14:17:16,704 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
+2017-03-10 14:17:16,705 WARN org.apache.hadoop.mapred.LocalJobRunner: job_local1420822781_0001
+java.lang.Exception: java.lang.ArrayIndexOutOfBoundsException: 1
+	at org.apache.hadoop.mapred.LocalJobRunner$Job.runTasks(LocalJobRunner.java:462)
+	at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:522)
+Caused by: java.lang.ArrayIndexOutOfBoundsException: 1
+	at similarity.WordSort$Map.loadWordFreq(WordSort.java:87)
+	at similarity.WordSort$Map.setup(WordSort.java:118)
+	at org.apache.hadoop.mapreduce.Mapper.run(Mapper.java:142)
+	at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:787)
+	at org.apache.hadoop.mapred.MapTask.run(MapTask.java:341)
+	at org.apache.hadoop.mapred.LocalJobRunner$Job$MapTaskRunnable.run(LocalJobRunner.java:243)
+	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471)
+	at java.util.concurrent.FutureTask.run(FutureTask.java:262)
+	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
+	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
+	at java.lang.Thread.run(Thread.java:745)
+2017-03-10 14:17:17,328 INFO org.apache.hadoop.mapreduce.Job: Job job_local1420822781_0001 running in uber mode : false
+2017-03-10 14:17:17,329 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
+2017-03-10 14:17:17,331 INFO org.apache.hadoop.mapreduce.Job: Job job_local1420822781_0001 failed with state FAILED due to: NA
+2017-03-10 14:17:17,336 INFO org.apache.hadoop.mapreduce.Job: Counters: 0
+2017-03-10 14:26:12,465 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-10 14:26:12,973 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-10 14:26:12,986 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-10 14:26:13,468 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
+2017-03-10 14:26:13,490 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
+2017-03-10 14:26:13,599 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
+2017-03-10 14:26:13,979 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local1562971559_0001
+2017-03-10 14:26:14,383 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
+2017-03-10 14:26:14,385 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local1562971559_0001
+2017-03-10 14:26:14,384 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
+2017-03-10 14:26:14,391 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-10 14:26:14,399 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
+2017-03-10 14:26:14,512 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
+2017-03-10 14:26:14,513 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1562971559_0001_m_000000_0
+2017-03-10 14:26:14,576 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-10 14:26:14,613 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-10 14:26:14,617 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/input/pg100.txt:0+5589886
+2017-03-10 14:26:14,762 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-10 14:26:14,762 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-10 14:26:14,763 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-10 14:26:14,763 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-10 14:26:14,763 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-10 14:26:14,766 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-10 14:26:15,398 INFO org.apache.hadoop.mapreduce.Job: Job job_local1562971559_0001 running in uber mode : false
+2017-03-10 14:26:15,398 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
+2017-03-10 14:26:15,473 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-10 14:26:15,481 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
+2017-03-10 14:26:15,488 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
+2017-03-10 14:26:15,490 WARN org.apache.hadoop.mapred.LocalJobRunner: job_local1562971559_0001
+java.lang.Exception: java.lang.NullPointerException
+	at org.apache.hadoop.mapred.LocalJobRunner$Job.runTasks(LocalJobRunner.java:462)
+	at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:522)
+Caused by: java.lang.NullPointerException
+	at similarity.WordSort$Map$1.compare(WordSort.java:135)
+	at similarity.WordSort$Map$1.compare(WordSort.java:1)
+	at java.util.TimSort.countRunAndMakeAscending(TimSort.java:324)
+	at java.util.TimSort.sort(TimSort.java:189)
+	at java.util.TimSort.sort(TimSort.java:173)
+	at java.util.Arrays.sort(Arrays.java:659)
+	at java.util.Collections.sort(Collections.java:217)
+	at similarity.WordSort$Map.map(WordSort.java:131)
+	at similarity.WordSort$Map.map(WordSort.java:1)
+	at org.apache.hadoop.mapreduce.Mapper.run(Mapper.java:145)
+	at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:787)
+	at org.apache.hadoop.mapred.MapTask.run(MapTask.java:341)
+	at org.apache.hadoop.mapred.LocalJobRunner$Job$MapTaskRunnable.run(LocalJobRunner.java:243)
+	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471)
+	at java.util.concurrent.FutureTask.run(FutureTask.java:262)
+	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
+	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
+	at java.lang.Thread.run(Thread.java:745)
+2017-03-10 14:26:16,400 INFO org.apache.hadoop.mapreduce.Job: Job job_local1562971559_0001 failed with state FAILED due to: NA
+2017-03-10 14:26:16,402 INFO org.apache.hadoop.mapreduce.Job: Counters: 0
+2017-03-10 14:29:37,807 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-10 14:29:38,356 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-10 14:29:38,361 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-10 14:29:38,865 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
+2017-03-10 14:29:38,874 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
+2017-03-10 14:29:38,909 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
+2017-03-10 14:29:39,202 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local705841975_0001
+2017-03-10 14:29:39,678 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
+2017-03-10 14:29:39,680 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local705841975_0001
+2017-03-10 14:29:39,685 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
+2017-03-10 14:29:39,703 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-10 14:29:39,705 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
+2017-03-10 14:29:39,828 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
+2017-03-10 14:29:39,829 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local705841975_0001_m_000000_0
+2017-03-10 14:29:39,879 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-10 14:29:39,891 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-10 14:29:39,896 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/input/pg100.txt:0+5589886
+2017-03-10 14:29:39,983 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-10 14:29:39,984 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-10 14:29:39,984 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-10 14:29:39,984 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-10 14:29:39,984 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-10 14:29:39,988 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-10 14:29:40,687 INFO org.apache.hadoop.mapreduce.Job: Job job_local705841975_0001 running in uber mode : false
+2017-03-10 14:29:40,690 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
+2017-03-10 14:29:40,701 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-10 14:29:40,708 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
+2017-03-10 14:29:40,717 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
+2017-03-10 14:29:40,718 WARN org.apache.hadoop.mapred.LocalJobRunner: job_local705841975_0001
+java.lang.Exception: java.lang.NullPointerException
+	at org.apache.hadoop.mapred.LocalJobRunner$Job.runTasks(LocalJobRunner.java:462)
+	at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:522)
+Caused by: java.lang.NullPointerException
+	at similarity.WordSort$Map$1.compare(WordSort.java:138)
+	at similarity.WordSort$Map$1.compare(WordSort.java:1)
+	at java.util.TimSort.countRunAndMakeAscending(TimSort.java:324)
+	at java.util.TimSort.sort(TimSort.java:189)
+	at java.util.TimSort.sort(TimSort.java:173)
+	at java.util.Arrays.sort(Arrays.java:659)
+	at java.util.Collections.sort(Collections.java:217)
+	at similarity.WordSort$Map.map(WordSort.java:134)
+	at similarity.WordSort$Map.map(WordSort.java:1)
+	at org.apache.hadoop.mapreduce.Mapper.run(Mapper.java:145)
+	at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:787)
+	at org.apache.hadoop.mapred.MapTask.run(MapTask.java:341)
+	at org.apache.hadoop.mapred.LocalJobRunner$Job$MapTaskRunnable.run(LocalJobRunner.java:243)
+	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471)
+	at java.util.concurrent.FutureTask.run(FutureTask.java:262)
+	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
+	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
+	at java.lang.Thread.run(Thread.java:745)
+2017-03-10 14:29:41,693 INFO org.apache.hadoop.mapreduce.Job: Job job_local705841975_0001 failed with state FAILED due to: NA
+2017-03-10 14:29:41,695 INFO org.apache.hadoop.mapreduce.Job: Counters: 0
+2017-03-10 14:34:26,674 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-10 14:34:27,174 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-10 14:34:27,179 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-10 14:34:27,223 WARN org.apache.hadoop.security.UserGroupInformation: PriviledgedActionException as:cloudera (auth:SIMPLE) cause:org.apache.hadoop.mapred.FileAlreadyExistsException: Output directory file:/home/cloudera/workspace/WordCount already exists
+2017-03-10 14:34:55,125 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-10 14:34:55,598 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-10 14:34:55,612 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-10 14:34:56,114 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
+2017-03-10 14:34:56,121 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
+2017-03-10 14:34:56,259 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
+2017-03-10 14:34:56,554 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local1013376941_0001
+2017-03-10 14:34:56,980 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
+2017-03-10 14:34:56,982 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
+2017-03-10 14:34:56,983 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local1013376941_0001
+2017-03-10 14:34:56,990 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-10 14:34:56,995 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
+2017-03-10 14:34:57,121 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
+2017-03-10 14:34:57,124 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1013376941_0001_m_000000_0
+2017-03-10 14:34:57,181 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-10 14:34:57,198 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-10 14:34:57,202 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/input/pg100.txt:0+5589886
+2017-03-10 14:34:57,295 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-10 14:34:57,298 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-10 14:34:57,298 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-10 14:34:57,298 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-10 14:34:57,298 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-10 14:34:57,300 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-10 14:34:57,967 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-10 14:34:57,974 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
+2017-03-10 14:34:57,988 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
+2017-03-10 14:34:57,990 INFO org.apache.hadoop.mapreduce.Job: Job job_local1013376941_0001 running in uber mode : false
+2017-03-10 14:34:57,991 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
+2017-03-10 14:34:57,993 WARN org.apache.hadoop.mapred.LocalJobRunner: job_local1013376941_0001
+java.lang.Exception: java.lang.NullPointerException
+	at org.apache.hadoop.mapred.LocalJobRunner$Job.runTasks(LocalJobRunner.java:462)
+	at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:522)
+Caused by: java.lang.NullPointerException
+	at similarity.WordSort$Map$1.compare(WordSort.java:146)
+	at similarity.WordSort$Map$1.compare(WordSort.java:1)
+	at java.util.TimSort.countRunAndMakeAscending(TimSort.java:324)
+	at java.util.TimSort.sort(TimSort.java:189)
+	at java.util.TimSort.sort(TimSort.java:173)
+	at java.util.Arrays.sort(Arrays.java:659)
+	at java.util.Collections.sort(Collections.java:217)
+	at similarity.WordSort$Map.map(WordSort.java:142)
+	at similarity.WordSort$Map.map(WordSort.java:1)
+	at org.apache.hadoop.mapreduce.Mapper.run(Mapper.java:145)
+	at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:787)
+	at org.apache.hadoop.mapred.MapTask.run(MapTask.java:341)
+	at org.apache.hadoop.mapred.LocalJobRunner$Job$MapTaskRunnable.run(LocalJobRunner.java:243)
+	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471)
+	at java.util.concurrent.FutureTask.run(FutureTask.java:262)
+	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
+	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
+	at java.lang.Thread.run(Thread.java:745)
+2017-03-10 14:34:57,995 INFO org.apache.hadoop.mapreduce.Job: Job job_local1013376941_0001 failed with state FAILED due to: NA
+2017-03-10 14:34:57,998 INFO org.apache.hadoop.mapreduce.Job: Counters: 0
+2017-03-10 14:38:51,972 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-10 14:38:52,515 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-10 14:38:52,527 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-10 14:38:53,098 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
+2017-03-10 14:38:53,128 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
+2017-03-10 14:38:53,247 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
+2017-03-10 14:38:53,578 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local1410438889_0001
+2017-03-10 14:38:54,016 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
+2017-03-10 14:38:54,017 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local1410438889_0001
+2017-03-10 14:38:54,024 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
+2017-03-10 14:38:54,064 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-10 14:38:54,065 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
+2017-03-10 14:38:54,219 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
+2017-03-10 14:38:54,220 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1410438889_0001_m_000000_0
+2017-03-10 14:38:54,279 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-10 14:38:54,293 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-10 14:38:54,296 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/input/pg100.txt:0+5589886
+2017-03-10 14:38:54,396 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-10 14:38:54,396 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-10 14:38:54,396 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-10 14:38:54,397 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-10 14:38:54,397 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-10 14:38:54,400 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-10 14:38:55,024 INFO org.apache.hadoop.mapreduce.Job: Job job_local1410438889_0001 running in uber mode : false
+2017-03-10 14:38:55,027 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
+2017-03-10 14:38:55,119 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-10 14:38:55,119 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-10 14:38:55,119 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 325; bufvoid = 104857600
+2017-03-10 14:38:55,119 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 26214376(104857504); length = 21/6553600
+2017-03-10 14:38:55,128 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
+2017-03-10 14:38:55,137 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
+2017-03-10 14:38:55,141 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
+2017-03-10 14:38:55,141 WARN org.apache.hadoop.mapred.LocalJobRunner: job_local1410438889_0001
+java.lang.Exception: java.lang.NullPointerException
+	at org.apache.hadoop.mapred.LocalJobRunner$Job.runTasks(LocalJobRunner.java:462)
+	at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:522)
+Caused by: java.lang.NullPointerException
+	at java.lang.Integer.compareTo(Integer.java:1003)
+	at similarity.WordSort$Map$1.compare(WordSort.java:144)
+	at similarity.WordSort$Map$1.compare(WordSort.java:1)
+	at java.util.TimSort.countRunAndMakeAscending(TimSort.java:324)
+	at java.util.TimSort.sort(TimSort.java:189)
+	at java.util.TimSort.sort(TimSort.java:173)
+	at java.util.Arrays.sort(Arrays.java:659)
+	at java.util.Collections.sort(Collections.java:217)
+	at similarity.WordSort$Map.map(WordSort.java:140)
+	at similarity.WordSort$Map.map(WordSort.java:1)
+	at org.apache.hadoop.mapreduce.Mapper.run(Mapper.java:145)
+	at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:787)
+	at org.apache.hadoop.mapred.MapTask.run(MapTask.java:341)
+	at org.apache.hadoop.mapred.LocalJobRunner$Job$MapTaskRunnable.run(LocalJobRunner.java:243)
+	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471)
+	at java.util.concurrent.FutureTask.run(FutureTask.java:262)
+	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
+	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
+	at java.lang.Thread.run(Thread.java:745)
+2017-03-10 14:38:56,030 INFO org.apache.hadoop.mapreduce.Job: Job job_local1410438889_0001 failed with state FAILED due to: NA
+2017-03-10 14:38:56,035 INFO org.apache.hadoop.mapreduce.Job: Counters: 0
+2017-03-10 14:40:16,992 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-10 14:40:17,522 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-10 14:40:17,536 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-10 14:40:18,047 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
+2017-03-10 14:40:18,055 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
+2017-03-10 14:40:18,089 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
+2017-03-10 14:40:18,383 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local483843492_0001
+2017-03-10 14:40:18,855 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
+2017-03-10 14:40:18,856 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local483843492_0001
+2017-03-10 14:40:18,860 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
+2017-03-10 14:40:18,887 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-10 14:40:18,889 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
+2017-03-10 14:40:19,021 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
+2017-03-10 14:40:19,023 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local483843492_0001_m_000000_0
+2017-03-10 14:40:19,078 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-10 14:40:19,106 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-10 14:40:19,109 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/input/pg100.txt:0+5589886
+2017-03-10 14:40:19,243 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-10 14:40:19,243 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-10 14:40:19,243 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-10 14:40:19,244 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-10 14:40:19,244 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-10 14:40:19,249 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-10 14:40:19,852 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-10 14:40:19,852 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-10 14:40:19,852 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 325; bufvoid = 104857600
+2017-03-10 14:40:19,852 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 26214376(104857504); length = 21/6553600
+2017-03-10 14:40:19,858 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
+2017-03-10 14:40:19,860 INFO org.apache.hadoop.mapreduce.Job: Job job_local483843492_0001 running in uber mode : false
+2017-03-10 14:40:19,861 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
+2017-03-10 14:40:19,866 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
+2017-03-10 14:40:19,869 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
+2017-03-10 14:40:19,870 WARN org.apache.hadoop.mapred.LocalJobRunner: job_local483843492_0001
+java.lang.Exception: java.lang.NullPointerException
+	at org.apache.hadoop.mapred.LocalJobRunner$Job.runTasks(LocalJobRunner.java:462)
+	at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:522)
+Caused by: java.lang.NullPointerException
+	at java.lang.Integer.compareTo(Integer.java:1003)
+	at similarity.WordSort$Map$1.compare(WordSort.java:144)
+	at similarity.WordSort$Map$1.compare(WordSort.java:1)
+	at java.util.TimSort.countRunAndMakeAscending(TimSort.java:324)
+	at java.util.TimSort.sort(TimSort.java:189)
+	at java.util.TimSort.sort(TimSort.java:173)
+	at java.util.Arrays.sort(Arrays.java:659)
+	at java.util.Collections.sort(Collections.java:217)
+	at similarity.WordSort$Map.map(WordSort.java:140)
+	at similarity.WordSort$Map.map(WordSort.java:1)
+	at org.apache.hadoop.mapreduce.Mapper.run(Mapper.java:145)
+	at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:787)
+	at org.apache.hadoop.mapred.MapTask.run(MapTask.java:341)
+	at org.apache.hadoop.mapred.LocalJobRunner$Job$MapTaskRunnable.run(LocalJobRunner.java:243)
+	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471)
+	at java.util.concurrent.FutureTask.run(FutureTask.java:262)
+	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
+	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
+	at java.lang.Thread.run(Thread.java:745)
+2017-03-10 14:40:20,864 INFO org.apache.hadoop.mapreduce.Job: Job job_local483843492_0001 failed with state FAILED due to: NA
+2017-03-10 14:40:20,871 INFO org.apache.hadoop.mapreduce.Job: Counters: 0
+2017-03-10 14:41:43,006 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-10 14:41:43,480 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-10 14:41:43,485 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-10 14:41:44,008 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
+2017-03-10 14:41:44,042 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
+2017-03-10 14:41:44,079 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
+2017-03-10 14:41:44,328 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local1726509137_0001
+2017-03-10 14:41:44,799 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
+2017-03-10 14:41:44,800 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local1726509137_0001
+2017-03-10 14:41:44,806 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
+2017-03-10 14:41:44,835 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-10 14:41:44,838 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
+2017-03-10 14:41:44,977 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
+2017-03-10 14:41:44,978 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1726509137_0001_m_000000_0
+2017-03-10 14:41:45,030 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-10 14:41:45,051 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-10 14:41:45,058 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/input/pg100.txt:0+5589886
+2017-03-10 14:41:45,138 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-10 14:41:45,142 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-10 14:41:45,143 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-10 14:41:45,143 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-10 14:41:45,143 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-10 14:41:45,149 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-10 14:41:45,805 INFO org.apache.hadoop.mapreduce.Job: Job job_local1726509137_0001 running in uber mode : false
+2017-03-10 14:41:45,806 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
+2017-03-10 14:41:48,519 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-10 14:41:48,519 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-10 14:41:48,519 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-10 14:41:48,519 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 4776007; bufvoid = 104857600
+2017-03-10 14:41:48,519 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 25753980(103015920); length = 460417/6553600
+2017-03-10 14:41:48,780 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
+2017-03-10 14:41:49,395 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
+2017-03-10 14:41:49,398 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1726509137_0001_m_000000_0 is done. And is in the process of committing
+2017-03-10 14:41:49,404 INFO org.apache.hadoop.mapred.LocalJobRunner: map
+2017-03-10 14:41:49,408 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1726509137_0001_m_000000_0' done.
+2017-03-10 14:41:49,409 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1726509137_0001_m_000000_0
+2017-03-10 14:41:49,409 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
+2017-03-10 14:41:49,411 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks
+2017-03-10 14:41:49,411 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1726509137_0001_r_000000_0
+2017-03-10 14:41:49,416 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-10 14:41:49,417 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-10 14:41:49,419 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@70d586bc
+2017-03-10 14:41:49,429 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-10 14:41:49,435 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1726509137_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-10 14:41:49,461 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.deflate]
+2017-03-10 14:41:49,461 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1726509137_0001_m_000000_0 decomp: 5006219 len: 2376884 to MEMORY
+2017-03-10 14:41:49,517 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 5006219 bytes from map-output for attempt_local1726509137_0001_m_000000_0
+2017-03-10 14:41:49,521 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 5006219, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->5006219
+2017-03-10 14:41:49,523 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-10 14:41:49,524 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-10 14:41:49,524 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-10 14:41:49,529 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-10 14:41:49,529 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 5006209 bytes
+2017-03-10 14:41:49,818 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 0%
+2017-03-10 14:41:50,226 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 5006219 bytes to disk to satisfy reduce memory limit
+2017-03-10 14:41:50,227 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 2376892 bytes from disk
+2017-03-10 14:41:50,227 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-10 14:41:50,228 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-10 14:41:50,228 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 5006209 bytes
+2017-03-10 14:41:50,230 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-10 14:41:50,235 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.skip.on is deprecated. Instead, use mapreduce.job.skiprecords
+2017-03-10 14:41:50,888 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1726509137_0001_r_000000_0 is done. And is in the process of committing
+2017-03-10 14:41:50,898 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-10 14:41:50,899 INFO org.apache.hadoop.mapred.Task: Task attempt_local1726509137_0001_r_000000_0 is allowed to commit now
+2017-03-10 14:41:50,900 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1726509137_0001_r_000000_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1726509137_0001_r_000000
+2017-03-10 14:41:50,901 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-10 14:41:50,901 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1726509137_0001_r_000000_0' done.
+2017-03-10 14:41:50,901 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1726509137_0001_r_000000_0
+2017-03-10 14:41:50,901 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete.
+2017-03-10 14:41:51,822 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 100%
+2017-03-10 14:41:51,822 INFO org.apache.hadoop.mapreduce.Job: Job job_local1726509137_0001 completed successfully
+2017-03-10 14:41:51,834 INFO org.apache.hadoop.mapreduce.Job: Counters: 33
+	File System Counters
+		FILE: Number of bytes read=16657772
+		FILE: Number of bytes written=12451545
+		FILE: Number of read operations=0
+		FILE: Number of large read operations=0
+		FILE: Number of write operations=0
+	Map-Reduce Framework
+		Map input records=124787
+		Map output records=115105
+		Map output bytes=4776007
+		Map output materialized bytes=2376884
+		Input split bytes=120
+		Combine input records=0
+		Combine output records=0
+		Reduce input groups=115105
+		Reduce shuffle bytes=2376884
+		Reduce input records=115105
+		Reduce output records=115105
+		Spilled Records=230210
+		Shuffled Maps =1
+		Failed Shuffles=0
+		Merged Map outputs=1
+		GC time elapsed (ms)=140
+		CPU time spent (ms)=0
+		Physical memory (bytes) snapshot=0
+		Virtual memory (bytes) snapshot=0
+		Total committed heap usage (bytes)=331227136
+	Shuffle Errors
+		BAD_ID=0
+		CONNECTION=0
+		IO_ERROR=0
+		WRONG_LENGTH=0
+		WRONG_MAP=0
+		WRONG_REDUCE=0
+	File Input Format Counters 
+		Bytes Read=5589886
+	File Output Format Counters 
+		Bytes Written=4789771
+2017-03-10 14:54:30,023 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-10 14:54:30,544 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-10 14:54:30,565 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-10 14:54:31,208 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
+2017-03-10 14:54:31,222 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
+2017-03-10 14:54:31,311 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
+2017-03-10 14:54:31,680 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local520002813_0001
+2017-03-10 14:54:32,173 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
+2017-03-10 14:54:32,174 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local520002813_0001
+2017-03-10 14:54:32,184 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
+2017-03-10 14:54:32,195 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-10 14:54:32,197 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
+2017-03-10 14:54:32,352 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
+2017-03-10 14:54:32,354 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local520002813_0001_m_000000_0
+2017-03-10 14:54:32,427 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-10 14:54:32,457 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-10 14:54:32,463 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/input/pg100.txt:0+5589886
+2017-03-10 14:54:32,548 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-10 14:54:32,549 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-10 14:54:32,550 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-10 14:54:32,550 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-10 14:54:32,550 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-10 14:54:32,559 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-10 14:54:33,178 INFO org.apache.hadoop.mapreduce.Job: Job job_local520002813_0001 running in uber mode : false
+2017-03-10 14:54:33,179 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
+2017-03-10 14:54:35,891 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-10 14:54:35,891 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-10 14:54:35,891 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-10 14:54:35,891 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 5541084; bufvoid = 104857600
+2017-03-10 14:54:35,891 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 25753980(103015920); length = 460417/6553600
+2017-03-10 14:54:36,206 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
+2017-03-10 14:54:37,261 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
+2017-03-10 14:54:37,264 INFO org.apache.hadoop.mapred.Task: Task:attempt_local520002813_0001_m_000000_0 is done. And is in the process of committing
+2017-03-10 14:54:37,275 INFO org.apache.hadoop.mapred.LocalJobRunner: map
+2017-03-10 14:54:37,280 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local520002813_0001_m_000000_0' done.
+2017-03-10 14:54:37,280 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local520002813_0001_m_000000_0
+2017-03-10 14:54:37,280 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
+2017-03-10 14:54:37,284 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks
+2017-03-10 14:54:37,284 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local520002813_0001_r_000000_0
+2017-03-10 14:54:37,289 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-10 14:54:37,289 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-10 14:54:37,293 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@22c4d1da
+2017-03-10 14:54:37,303 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-10 14:54:37,312 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local520002813_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-10 14:54:37,342 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.deflate]
+2017-03-10 14:54:37,342 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local520002813_0001_m_000000_0 decomp: 5771296 len: 2500118 to MEMORY
+2017-03-10 14:54:37,405 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 5771296 bytes from map-output for attempt_local520002813_0001_m_000000_0
+2017-03-10 14:54:37,408 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 5771296, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->5771296
+2017-03-10 14:54:37,410 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-10 14:54:37,410 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-10 14:54:37,411 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-10 14:54:37,420 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-10 14:54:37,420 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 5771286 bytes
+2017-03-10 14:54:38,188 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 0%
+2017-03-10 14:54:38,209 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 5771296 bytes to disk to satisfy reduce memory limit
+2017-03-10 14:54:38,209 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 2500126 bytes from disk
+2017-03-10 14:54:38,210 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-10 14:54:38,210 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-10 14:54:38,210 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 5771286 bytes
+2017-03-10 14:54:38,211 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-10 14:54:38,219 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.skip.on is deprecated. Instead, use mapreduce.job.skiprecords
+2017-03-10 14:54:39,174 INFO org.apache.hadoop.mapred.Task: Task:attempt_local520002813_0001_r_000000_0 is done. And is in the process of committing
+2017-03-10 14:54:39,181 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-10 14:54:39,181 INFO org.apache.hadoop.mapred.Task: Task attempt_local520002813_0001_r_000000_0 is allowed to commit now
+2017-03-10 14:54:39,187 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local520002813_0001_r_000000_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local520002813_0001_r_000000
+2017-03-10 14:54:39,188 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-10 14:54:39,188 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local520002813_0001_r_000000_0' done.
+2017-03-10 14:54:39,188 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local520002813_0001_r_000000_0
+2017-03-10 14:54:39,188 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete.
+2017-03-10 14:54:39,190 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 100%
+2017-03-10 14:54:40,191 INFO org.apache.hadoop.mapreduce.Job: Job job_local520002813_0001 completed successfully
+2017-03-10 14:54:40,203 INFO org.apache.hadoop.mapreduce.Job: Counters: 34
+	File System Counters
+		FILE: Number of bytes read=16904240
+		FILE: Number of bytes written=13589484
+		FILE: Number of read operations=0
+		FILE: Number of large read operations=0
+		FILE: Number of write operations=0
+	Map-Reduce Framework
+		Map input records=124787
+		Map output records=115105
+		Map output bytes=5541084
+		Map output materialized bytes=2500118
+		Input split bytes=120
+		Combine input records=0
+		Combine output records=0
+		Reduce input groups=115105
+		Reduce shuffle bytes=2500118
+		Reduce input records=115105
+		Reduce output records=115105
+		Spilled Records=230210
+		Shuffled Maps =1
+		Failed Shuffles=0
+		Merged Map outputs=1
+		GC time elapsed (ms)=124
+		CPU time spent (ms)=0
+		Physical memory (bytes) snapshot=0
+		Virtual memory (bytes) snapshot=0
+		Total committed heap usage (bytes)=331227136
+	Shuffle Errors
+		BAD_ID=0
+		CONNECTION=0
+		IO_ERROR=0
+		WRONG_LENGTH=0
+		WRONG_MAP=0
+		WRONG_REDUCE=0
+	File Input Format Counters 
+		Bytes Read=5589886
+	File Output Format Counters 
+		Bytes Written=5560824
+	similarity.WordSort$DocLineCounter
+		NUM=124787
+2017-03-10 14:58:53,626 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-10 14:58:54,092 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-10 14:58:54,097 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-10 14:58:54,564 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
+2017-03-10 14:58:54,572 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
+2017-03-10 14:58:54,619 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
+2017-03-10 14:58:54,850 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local544686432_0001
+2017-03-10 14:58:55,274 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
+2017-03-10 14:58:55,275 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local544686432_0001
+2017-03-10 14:58:55,284 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
+2017-03-10 14:58:55,298 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-10 14:58:55,301 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
+2017-03-10 14:58:55,409 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
+2017-03-10 14:58:55,411 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local544686432_0001_m_000000_0
+2017-03-10 14:58:55,473 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-10 14:58:55,485 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-10 14:58:55,487 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/input/pg100.txt:0+5589886
+2017-03-10 14:58:55,566 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-10 14:58:55,567 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-10 14:58:55,567 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-10 14:58:55,567 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-10 14:58:55,567 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-10 14:58:55,569 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-10 14:58:56,279 INFO org.apache.hadoop.mapreduce.Job: Job job_local544686432_0001 running in uber mode : false
+2017-03-10 14:58:56,280 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
+2017-03-10 14:58:58,471 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-10 14:58:58,472 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-10 14:58:58,472 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-10 14:58:58,472 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 5541084; bufvoid = 104857600
+2017-03-10 14:58:58,472 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 25753980(103015920); length = 460417/6553600
+2017-03-10 14:58:58,671 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
+2017-03-10 14:58:59,336 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
+2017-03-10 14:58:59,339 INFO org.apache.hadoop.mapred.Task: Task:attempt_local544686432_0001_m_000000_0 is done. And is in the process of committing
+2017-03-10 14:58:59,344 INFO org.apache.hadoop.mapred.LocalJobRunner: map
+2017-03-10 14:58:59,344 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local544686432_0001_m_000000_0' done.
+2017-03-10 14:58:59,344 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local544686432_0001_m_000000_0
+2017-03-10 14:58:59,345 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
+2017-03-10 14:58:59,347 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks
+2017-03-10 14:58:59,348 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local544686432_0001_r_000000_0
+2017-03-10 14:58:59,353 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-10 14:58:59,353 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-10 14:58:59,355 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@8a93430
+2017-03-10 14:58:59,364 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-10 14:58:59,370 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local544686432_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-10 14:58:59,395 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.deflate]
+2017-03-10 14:58:59,395 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local544686432_0001_m_000000_0 decomp: 5771296 len: 2405517 to MEMORY
+2017-03-10 14:58:59,447 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 5771296 bytes from map-output for attempt_local544686432_0001_m_000000_0
+2017-03-10 14:58:59,447 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 5771296, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->5771296
+2017-03-10 14:58:59,448 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-10 14:58:59,450 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-10 14:58:59,450 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-10 14:58:59,454 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-10 14:58:59,455 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 5771286 bytes
+2017-03-10 14:59:00,203 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 5771296 bytes to disk to satisfy reduce memory limit
+2017-03-10 14:59:00,203 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 2405525 bytes from disk
+2017-03-10 14:59:00,204 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-10 14:59:00,204 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-10 14:59:00,205 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 5771286 bytes
+2017-03-10 14:59:00,205 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-10 14:59:00,210 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.skip.on is deprecated. Instead, use mapreduce.job.skiprecords
+2017-03-10 14:59:00,290 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 0%
+2017-03-10 14:59:00,890 INFO org.apache.hadoop.mapred.Task: Task:attempt_local544686432_0001_r_000000_0 is done. And is in the process of committing
+2017-03-10 14:59:00,895 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-10 14:59:00,895 INFO org.apache.hadoop.mapred.Task: Task attempt_local544686432_0001_r_000000_0 is allowed to commit now
+2017-03-10 14:59:00,896 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local544686432_0001_r_000000_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local544686432_0001_r_000000
+2017-03-10 14:59:00,897 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-10 14:59:00,897 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local544686432_0001_r_000000_0' done.
+2017-03-10 14:59:00,901 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local544686432_0001_r_000000_0
+2017-03-10 14:59:00,901 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete.
+2017-03-10 14:59:01,290 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 100%
+2017-03-10 14:59:01,291 INFO org.apache.hadoop.mapreduce.Job: Job job_local544686432_0001 completed successfully
+2017-03-10 14:59:01,312 INFO org.apache.hadoop.mapreduce.Job: Counters: 34
+	File System Counters
+		FILE: Number of bytes read=16715038
+		FILE: Number of bytes written=13110192
+		FILE: Number of read operations=0
+		FILE: Number of large read operations=0
+		FILE: Number of write operations=0
+	Map-Reduce Framework
+		Map input records=124787
+		Map output records=115105
+		Map output bytes=5541084
+		Map output materialized bytes=2405517
+		Input split bytes=120
+		Combine input records=0
+		Combine output records=0
+		Reduce input groups=115105
+		Reduce shuffle bytes=2405517
+		Reduce input records=115105
+		Reduce output records=115105
+		Spilled Records=230210
+		Shuffled Maps =1
+		Failed Shuffles=0
+		Merged Map outputs=1
+		GC time elapsed (ms)=113
+		CPU time spent (ms)=0
+		Physical memory (bytes) snapshot=0
+		Virtual memory (bytes) snapshot=0
+		Total committed heap usage (bytes)=331227136
+	Shuffle Errors
+		BAD_ID=0
+		CONNECTION=0
+		IO_ERROR=0
+		WRONG_LENGTH=0
+		WRONG_MAP=0
+		WRONG_REDUCE=0
+	File Input Format Counters 
+		Bytes Read=5589886
+	File Output Format Counters 
+		Bytes Written=5365335
+	similarity.WordSort$DocLineCounter
+		NUM=124787
+2017-03-10 16:04:16,924 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-10 16:04:17,595 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-10 16:04:17,605 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-10 16:04:17,661 WARN org.apache.hadoop.security.UserGroupInformation: PriviledgedActionException as:cloudera (auth:SIMPLE) cause:org.apache.hadoop.mapred.FileAlreadyExistsException: Output directory file:/home/cloudera/workspace/WordCount/output already exists
+2017-03-10 16:04:38,662 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-10 16:04:39,236 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-10 16:04:39,243 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-10 16:04:39,836 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
+2017-03-10 16:04:39,846 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
+2017-03-10 16:04:39,896 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
+2017-03-10 16:04:40,232 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local2107526736_0001
+2017-03-10 16:04:40,744 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
+2017-03-10 16:04:40,745 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local2107526736_0001
+2017-03-10 16:04:40,753 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
+2017-03-10 16:04:40,762 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-10 16:04:40,772 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
+2017-03-10 16:04:40,922 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
+2017-03-10 16:04:40,925 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2107526736_0001_m_000000_0
+2017-03-10 16:04:40,998 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-10 16:04:41,021 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-10 16:04:41,024 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/input/pg100.txt:0+5589886
+2017-03-10 16:04:41,117 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-10 16:04:41,117 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-10 16:04:41,117 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-10 16:04:41,117 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-10 16:04:41,117 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-10 16:04:41,121 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-10 16:04:41,747 INFO org.apache.hadoop.mapreduce.Job: Job job_local2107526736_0001 running in uber mode : false
+2017-03-10 16:04:41,749 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
+2017-03-10 16:04:44,514 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-10 16:04:44,515 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-10 16:04:44,515 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-10 16:04:44,515 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 5541084; bufvoid = 104857600
+2017-03-10 16:04:44,515 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 25753980(103015920); length = 460417/6553600
+2017-03-10 16:04:44,850 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
+2017-03-10 16:04:45,645 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
+2017-03-10 16:04:45,648 INFO org.apache.hadoop.mapred.Task: Task:attempt_local2107526736_0001_m_000000_0 is done. And is in the process of committing
+2017-03-10 16:04:45,653 INFO org.apache.hadoop.mapred.LocalJobRunner: map
+2017-03-10 16:04:45,654 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local2107526736_0001_m_000000_0' done.
+2017-03-10 16:04:45,654 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local2107526736_0001_m_000000_0
+2017-03-10 16:04:45,654 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
+2017-03-10 16:04:45,658 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks
+2017-03-10 16:04:45,659 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2107526736_0001_r_000000_0
+2017-03-10 16:04:45,664 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-10 16:04:45,664 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-10 16:04:45,666 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@a9b03af
+2017-03-10 16:04:45,680 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-10 16:04:45,687 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local2107526736_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-10 16:04:45,730 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.deflate]
+2017-03-10 16:04:45,730 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local2107526736_0001_m_000000_0 decomp: 5771296 len: 2404026 to MEMORY
+2017-03-10 16:04:45,758 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 0%
+2017-03-10 16:04:45,813 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 5771296 bytes from map-output for attempt_local2107526736_0001_m_000000_0
+2017-03-10 16:04:45,813 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 5771296, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->5771296
+2017-03-10 16:04:45,817 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-10 16:04:45,818 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-10 16:04:45,818 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-10 16:04:45,825 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-10 16:04:45,826 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 5771286 bytes
+2017-03-10 16:04:46,671 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 5771296 bytes to disk to satisfy reduce memory limit
+2017-03-10 16:04:46,672 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 2404034 bytes from disk
+2017-03-10 16:04:46,672 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-10 16:04:46,672 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-10 16:04:46,673 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 5771286 bytes
+2017-03-10 16:04:46,674 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-10 16:04:46,680 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.skip.on is deprecated. Instead, use mapreduce.job.skiprecords
+2017-03-10 16:04:47,525 INFO org.apache.hadoop.mapred.Task: Task:attempt_local2107526736_0001_r_000000_0 is done. And is in the process of committing
+2017-03-10 16:04:47,532 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-10 16:04:47,532 INFO org.apache.hadoop.mapred.Task: Task attempt_local2107526736_0001_r_000000_0 is allowed to commit now
+2017-03-10 16:04:47,533 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local2107526736_0001_r_000000_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local2107526736_0001_r_000000
+2017-03-10 16:04:47,533 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-10 16:04:47,534 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local2107526736_0001_r_000000_0' done.
+2017-03-10 16:04:47,534 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local2107526736_0001_r_000000_0
+2017-03-10 16:04:47,534 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete.
+2017-03-10 16:04:47,766 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 100%
+2017-03-10 16:04:47,767 INFO org.apache.hadoop.mapreduce.Job: Job job_local2107526736_0001 completed successfully
+2017-03-10 16:04:47,785 INFO org.apache.hadoop.mapreduce.Job: Counters: 34
+	File System Counters
+		FILE: Number of bytes read=16712056
+		FILE: Number of bytes written=13108535
+		FILE: Number of read operations=0
+		FILE: Number of large read operations=0
+		FILE: Number of write operations=0
+	Map-Reduce Framework
+		Map input records=124787
+		Map output records=115105
+		Map output bytes=5541084
+		Map output materialized bytes=2404026
+		Input split bytes=120
+		Combine input records=0
+		Combine output records=0
+		Reduce input groups=115105
+		Reduce shuffle bytes=2404026
+		Reduce input records=115105
+		Reduce output records=115105
+		Spilled Records=230210
+		Shuffled Maps =1
+		Failed Shuffles=0
+		Merged Map outputs=1
+		GC time elapsed (ms)=142
+		CPU time spent (ms)=0
+		Physical memory (bytes) snapshot=0
+		Virtual memory (bytes) snapshot=0
+		Total committed heap usage (bytes)=331227136
+	Shuffle Errors
+		BAD_ID=0
+		CONNECTION=0
+		IO_ERROR=0
+		WRONG_LENGTH=0
+		WRONG_MAP=0
+		WRONG_REDUCE=0
+	File Input Format Counters 
+		Bytes Read=5589886
+	File Output Format Counters 
+		Bytes Written=5365335
+	similarity.WordSort$DocLineCounter
+		NUM=124787
diff --git a/linenumber_sample b/linenumber_sample
new file mode 100644
index 0000000000000000000000000000000000000000..e37d32abba426c06b752a5e53f48c595c84e9270
--- /dev/null
+++ b/linenumber_sample
@@ -0,0 +1 @@
+1000
\ No newline at end of file
diff --git a/output/.part-r-00000.crc b/output/.part-r-00000.crc
index 3cd8bddcead5ef43f18e4e33ef6a1799e9921951..42e5f645f06fdca9d852a9b0009d8028a7d44f1c 100644
Binary files a/output/.part-r-00000.crc and b/output/.part-r-00000.crc differ
diff --git a/output/part-r-00000 b/output/part-r-00000
index 66121c09ea06a4b57d5d6a16b8ef56cc4c6c8f48..c732e9d57969bb43e4cbbf5792e47dfd1bd799a8 100644
--- a/output/part-r-00000
+++ b/output/part-r-00000
@@ -1,2 +1 @@
-be	28565
-up	8609
+2,1	1.0
diff --git a/sortedline_sample b/sortedline_sample
new file mode 100644
index 0000000000000000000000000000000000000000..c090de74548305b3ddc18fcc4d2b6d320c09979c
--- /dev/null
+++ b/sortedline_sample
@@ -0,0 +1,855 @@
+1	EBook,Complete,Works,Gutenberg,Shakespeare,William,Project,by,The,of
+2	Shakespeare,William
+4	anyone,anywhere,eBook,cost,use,This,at,no,for,with,is,of,and,the
+5	restrictions,whatsoever,copy,almost,away,give,may,You,or,no,it
+6	reuse,included,License,Gutenberg,Project,terms,under,it,of,the
+7	wwwgutenbergorg,online,eBook,or,at,this,with
+9	Details,COPYRIGHTED,Below,eBook,Gutenberg,Project,This,is,a
+10	guidelines,file,copyright,Please,follow,this,in,the
+12	Title,Complete,Works,Shakespeare,William,The,of
+14	Author,Shakespeare,William
+16	Posting,2011,September,Date,100,EBook,1
+17	1994,Release,Date,January
+19	Language,English
+22	START,EBOOK,WORKS,GUTENBERG,SHAKESPEARE,COMPLETE,PROJECT,THIS,WILLIAM,OF
+27	Produced,Future,Inc,World,Library,their,from,by,of,the
+32	100th,Etext,presented,file,Gutenberg,Project,This,by,is,and,the
+33	cooperation,Inc,World,Library,presented,their,from,with,is,in
+34	CDROMS,Future,Library,Gutenberg,Shakespeare,Project,of,and,the
+35	releases,Etexts,Domain,Public,placed,often,NOT,are,that,in,the
+37	Shakespeare
+39	implications,Etext,copyright,certain,read,has,This,should,you
+41	VERSION,WORKS,COMPLETE,THIS,WILLIAM,THE,ELECTRONIC,OF
+42	INC,WORLD,19901993,COPYRIGHT,LIBRARY,SHAKESPEARE,IS,BY,AND
+43	COLLEGE,BENEDICTINE,ILLINOIS,PROVIDED,ETEXT,GUTENBERG,PROJECT,BY,OF
+44	MACHINE,PERMISSION,READABLE,WITH,MAY,BE,COPIES,ELECTRONIC,AND
+45	YOUR,LONG,AS,SUCH,SO,OTHERS,1,DISTRIBUTED,COPIES,ARE,FOR,OR
+46	PERSONAL,ONLY,USED,USE,NOT,2,DISTRIBUTED,ARE,AND,OR
+47	INCLUDES,COMMERCIALLY,DISTRIBUTION,PROHIBITED,COMMERCIAL,ANY,BY
+48	SERVICE,MEMBERSHIP,DOWNLOAD,CHARGES,TIME,THAT,FOR,OR
+50	cooperate,World,Library,Gutenberg,Project,proud,The,with,is,to
+51	presentation,Complete,Works,Shakespeare,William,The,in,of,the
+52	HOWEVER,education,reading,entertainment,THIS,for,your,and
+53	NEITHER,NOR,PUBLIC,DOMAIN,SHAREWARE,UNDER,LIBRARY,THE,IS,AND
+54	FUTURE,PRESENTATION,CONDITIONS,NO,MAY,CHARGES,THIS,THE,OF
+55	ACCESS,MATERIAL,MADE,ENCOURAGED,TO,YOU,BE,ANY,THIS,ARE,FOR
+56	AWAY,ANYONE,ALLOWED,LIKE,GIVE,IT,BUT,NO,TO,YOU,CHARGES,ARE
+61	SMALL,PRINT,SHAKESPEARE,COMPLETE,for
+63	VERSION,WORKS,COMPLETE,THIS,WILLIAM,THE,ELECTRONIC,OF
+64	INC,WORLD,19901993,COPYRIGHT,LIBRARY,SHAKESPEARE,IS,BY
+65	PROVIDED,ETEXT,GUTENBERG,PROJECT,IS,BY,AND,OF
+66	COLLEGE,PERMISSION,BENEDICTINE,ILLINOIS,WITH
+68	etexts,unlike,etext,Gutenbergtm,Project,Since,many,other,this
+69	materials,methods,protected,copyright,since,is,you,and,the
+70	Projects,copy,reputation,effect,use,right,will,your,to,and,the
+71	limited,distribute,copyright,laws,other,by,it,is,and,the
+72	statement,Print,Small,conditions,this,of,the
+74	LICENSE,1
+76	DISTRIBUTE,ENCOURAGED,TO,YOU,MAY,ELECTRONIC,ARE,AND,A
+77	MACHINE,READABLE,LONG,AS,SUCH,ETEXT,SO,THIS,COPIES,OF
+78	YOUR,PERSONAL,ONLY,USE,NOT,OTHERS,2,1,ARE,FOR,AND,OR
+79	COMMERCIALLY,PROHIBITED,COMMERCIAL,USED,DISTRIBUTED,OR
+80	INCLUDES,SERVICE,DISTRIBUTION,DOWNLOAD,CHARGES,THAT,ANY,BY,FOR
+81	MEMBERSHIP,TIME,FOR,OR
+83	B,license,conditions,honor,subject,This,that,is,you,to,the
+84	provisions,replacement,refund,print,small,this,of,and,the
+85	statement,exact,distribute,copies,etext,this,that,you,of,and
+86	statement,Print,copies,including,Small,Such,can,this,be
+87	resulting,proprietary,compressed,including,form,any,or
+88	software,processing,hypertext,long,word,or,from,so,as
+89	EITHER
+91	displayed,readable,clearly,etext,does,1,when,The,is,and
+92	characters,intended,contain,those,other,than,by,not,the
+93	tilde,asterisk,author,although,work,of,and,the
+94	underline,punctuation,characters,used,convey,may,be,to
+95	additional,characters,author,intended,used,may,by,be,and,the
+96	indicate,hypertext,links,OR,to
+98	convertible,readily,reader,etext,2,at,no,by,The,is,the
+99	equivalent,EBCDIC,ASCII,expense,plain,form,into,or,by,the
+100	displays,program,etext,instance,case,as,for,that,is,the
+101	processors,word,OR,most,with
+103	agree,provide,3,request,You,or,at,no,on,to
+104	additional,etext,expense,copy,fee,cost,plain,or,in,a,of,the
+105	ASCII
+107	DISCLAIMER,WARRANTY,DAMAGES,LIMITED,2,OF
+109	incomplete,Defect,contain,etext,form,This,may,in,a,of,the
+110	data,inaccurate,transcription,errors,copyright,corrupt,or,a
+111	damaged,disk,virus,infringement,computer,defective,other,or,a
+112	codes,equipment,damage,read,cannot,or,But,by,your,be,that
+113	Replacement,Refund,described,Right,below,or,for,of,the
+114	etext,party,receive,Project,other,any,may,from,as,this,you,and
+115	GUTENBERGtm,disclaims,liability,etext,PROJECT,all,for,you,a,to
+116	HAVE,damages,legal,expenses,costs,fees,including,YOU,and
+117	LIABILITY,STRICT,NEGLIGENCE,REMEDIES,UNDER,NO,FOR,OR
+118	CONTRACT,INCLUDING,BREACH,BUT,WARRANTY,LIMITED,TO,NOT,OR,OF
+119	CONSEQUENTIAL,INDIRECT,PUNITIVE,INCIDENTAL,EVEN,IF,DAMAGES,OR
+120	NOTICE,POSSIBILITY,GIVE,DAMAGES,YOU,SUCH,THE,OF
+122	receiv,90,Defect,etext,discover,days,within,If,this,in,you,a,of
+123	ing,refund,paid,receive,money,any,can,if,it,you,a,of,the
+124	explanatory,sending,note,within,time,an,by,for,it,that,to,the
+125	physical,received,person,If,from,on,it,you,a
+126	medium,note,person,return,such,must,may,your,it,with,you,and
+127	alternatively,replacement,copy,choose,give,If,you,a,to
+128	electronically,received,choose,person,such,may,it,to
+129	alternatively,opportunity,receive,second,give,it,you,a,to
+130	electronically
+132	OTHERWISE,ASIS,OTHER,NO,TO,YOU,PROVIDED,ETEXT,THIS,IS
+133	MADE,EXPRESS,IMPLIED,KIND,WARRANTIES,TO,YOU,AS,ANY,ARE,OR,OF
+134	ON,MEDIUM,IT,INCLUDING,BUT,TO,MAY,BE,ETEXT,NOT,ANY,THE,OR
+135	MERCHANTABILITY,FITNESS,WARRANTIES,LIMITED,TO,FOR,OR,OF,A
+136	PARTICULAR,PURPOSE,disclaimers,states,allow,Some,do,not,of
+137	consequen,exclusion,warranties,implied,limitation,or,of,the
+138	tial,exclusions,disclaimers,damages,above,may,so,not,and,the
+139	legal,apply,rights,other,may,have,you,to,and
+141	INDEMNITY,indemnify,its,3,Project,hold,You,will,and,the
+142	lia,directors,agents,members,harmless,officers,from,all,and
+143	bility,legal,fees,including,expense,arise,cost,that,and
+144	indirectly,directly,following,any,or,from,do,that,you,of,the
+145	B,distribution,alteration,etext,cause,A,this,of
+146	modification,Defect,C,etext,addition,any,or,to,the
+148	WANT,MONEY,DONT,SEND,WHAT,HAVE,EVEN,IF,TO,4,YOU
+149	dedicated,increasing,Gutenberg,number,Project,is,of,to,the
+150	licensed,distributed,domain,freely,works,public,can,be,that,and
+151	gratefully,machine,accepts,readable,Project,form,The,in
+152	OCR,scanning,machines,software,contributions,money,time,in
+153	licenses,etexts,domain,copyright,royalty,public,free,and
+154	Pro,whatever,Money,paid,else,think,can,should,be,you,of,to
+155	ject,Benedictine,Illinois,Association,College,Gutenberg
+157	Kramer,Attorney,Print,B,Small,Charles,This,by
+158	726002026compuservecom,TEL,2122545093,Internet
+159	SMALL,PRINT,SHAKESPEARE,COMPLETE,FOR
+160	V120893,Print,Small
+162	VERSION,WORKS,COMPLETE,THIS,WILLIAM,THE,ELECTRONIC,OF
+163	INC,WORLD,19901993,COPYRIGHT,LIBRARY,SHAKESPEARE,IS,BY,AND
+164	COLLEGE,BENEDICTINE,ILLINOIS,PROVIDED,ETEXT,GUTENBERG,PROJECT,BY,OF
+165	MACHINE,PERMISSION,READABLE,WITH,MAY,BE,COPIES,ELECTRONIC,AND
+166	YOUR,LONG,AS,SUCH,SO,OTHERS,1,DISTRIBUTED,COPIES,ARE,FOR,OR
+167	PERSONAL,ONLY,USED,USE,NOT,2,DISTRIBUTED,ARE,AND,OR
+168	INCLUDES,COMMERCIALLY,DISTRIBUTION,PROHIBITED,COMMERCIAL,ANY,BY
+169	SERVICE,MEMBERSHIP,DOWNLOAD,CHARGES,TIME,THAT,FOR,OR
+174	1609
+176	SONNETS,THE
+178	Shakespeare,William,by
+182	1
+183	increase,creatures,fairest,desire,From,we
+184	thereby,beautys,rose,might,die,never,That
+185	riper,decease,time,should,But,by,as,the
+186	memory,heir,tender,might,bear,His,his
+187	contracted,bright,thine,eyes,own,But,thou,to
+188	selfsubstantial,Feedst,fuel,flame,lights,thy,with
+189	famine,abundance,Making,lies,where,a
+190	foe,cruel,self,Thy,sweet,too,thy,to
+191	ornament,worlds,fresh,art,Thou,now,that,the
+192	gaudy,herald,spring,only,And,to,the
+193	buriest,bud,content,Within,thine,own,thy
+194	niggarding,churl,makst,waste,tender,And,in
+195	glutton,Pity,else,world,or,this,be,the
+196	worlds,due,eat,grave,by,thee,To,and,the
+199	2
+200	besiege,forty,winters,brow,When,shall,thy
+201	trenches,dig,beautys,deep,field,thy,And,in
+202	gazed,youths,livery,proud,Thy,now,on,so
+203	tattered,weed,held,small,worth,Will,be,a,of
+204	asked,beauty,lies,being,where,Then,all,thy
+205	lusty,treasure,days,Where,all,thy,of,the
+206	sunken,deep,within,thine,eyes,own,say,To
+207	alleating,thriftless,praise,Were,shame,an,and
+208	deserved,beautys,praise,use,much,How,more,thy
+209	couldst,child,answer,fair,mine,This,If,thou,of
+210	count,sum,excuse,Shall,old,make,my,and
+211	Proving,succession,beauty,thine,by,his
+212	new,old,made,art,This,when,were,thou,be,to
+213	feelst,warm,cold,blood,when,see,thy,thou,it,And
+216	3
+217	viewest,glass,Look,face,tell,thy,thou,in,and,the
+218	form,another,face,Now,time,should,that,is,the
+219	renewest,repair,fresh,Whose,if,now,thou,not
+220	unbless,beguile,mother,dost,world,Thou,some,the
+221	uneared,womb,whose,where,fair,she,For,so,is
+222	Disdains,tillage,husbandry,thy,of,the
+223	fond,tomb,who,Or,so,will,he,be,is,the
+224	selflove,posterity,stop,Of,his,to
+225	glass,mothers,art,Thou,she,thee,thy,in,and
+226	Calls,April,prime,lovely,back,her,of,the
+227	windows,age,through,shalt,thine,So,see,thou,of
+228	Despite,wrinkles,golden,time,thy,this,of
+229	remembered,live,if,But,thou,be,not,to
+230	Die,image,single,dies,thine,thee,with,and
+233	4
+234	Unthrifty,loveliness,spend,why,dost,thou
+235	legacy,beautys,self,Upon,thy
+236	bequest,Natures,lend,gives,nothing,doth,but
+237	frank,lends,free,those,being,she,are,And,to
+238	niggard,beauteous,abuse,why,dost,Then,thou
+239	largess,bounteous,given,give,thee,The,to
+240	Profitless,usurer,why,use,dost,thou
+241	sums,sum,canst,live,great,So,yet,not,a,of
+242	traffic,having,self,alone,For,thy,with
+243	deceive,self,dost,sweet,Thou,thy,of
+244	calls,nature,gone,Then,how,when,thee,be,to
+245	acceptable,audit,canst,leave,What,thou
+246	tombed,unused,beauty,Thy,must,thee,be,with
+247	executor,used,lives,Which,th,be,to
+250	5
+251	frame,Those,hours,work,gentle,did,with,that
+252	gaze,dwell,lovely,eye,every,where,doth,The
+253	tyrants,same,play,Will,very,to,the
+254	unfair,excel,fairly,doth,which,And,that
+255	neverresting,leads,summer,time,For,on
+256	confounds,hideous,winter,there,To,him,and
+257	Sap,checked,frost,lusty,leaves,quite,gone,with,and
+258	oersnowed,bareness,Beauty,every,where,and
+259	distillation,summers,left,Then,were,not
+260	pent,liquid,glass,prisoner,walls,A,in,of
+261	Beautys,bereft,effect,beauty,were,with
+262	remembrance,Nor,nor,was,what,no,it
+263	distilled,winter,flowers,meet,though,they,But,with
+264	Leese,substance,lives,show,still,sweet,their,but
+267	6
+268	deface,ragged,winters,Then,hand,let,not
+269	distilled,summer,ere,In,thee,thy,thou,be
+270	vial,treasure,Make,place,sweet,some,thou
+271	selfkilled,beautys,treasure,ere,With,be,it
+272	usury,forbidden,use,That,not,is
+273	happies,loan,willing,pay,those,Which,that,the
+274	breed,self,Thats,another,thee,thy,for,to
+275	happier,ten,times,Or,one,for,be,it
+276	happier,Ten,self,times,art,were,than,thy,thou
+277	refigured,ten,times,thine,If,thee,of
+278	depart,shouldst,could,Then,death,if,what,do,thou
+279	posterity,Leaving,living,thee,in
+280	selfwilled,Be,fair,art,much,too,thou,for,not
+281	worms,conquest,deaths,heir,thine,make,To,be,and
+284	7
+285	orient,Lo,gracious,light,when,in,the
+286	Lifts,burning,each,under,eye,head,up,his
+287	newappearing,homage,Doth,sight,his,to
+288	Serving,sacred,majesty,looks,his,with
+289	climbed,steepup,hill,heavenly,having,And,the
+290	Resembling,middle,strong,age,youth,his,in
+291	adore,mortal,beauty,looks,Yet,still,his
+292	Attending,pilgrimage,golden,on,his
+293	highmost,car,pitch,weary,when,from,But,with
+294	reeleth,feeble,Like,age,day,from,he,the
+295	converted,duteous,fore,eyes,now,are,The
+296	tract,low,another,From,way,look,his,and
+297	outgoing,noon,self,So,thy,thou,in
+298	Unlooked,diest,unless,get,son,on,thou,a
+301	8
+302	hearst,sadly,Music,music,why,hear,thou,to
+303	Sweets,sweets,delights,joy,war,with,not,in
+304	receivst,gladly,lovst,Why,which,thou,not,that
+305	receivst,annoy,pleasure,else,thine,Or,with
+306	welltuned,concord,sounds,true,If,of,the
+307	unions,offend,married,ear,thine,By,do
+308	confounds,sweetly,chide,who,They,thee,do,but
+309	singleness,shouldst,parts,bear,In,thou,that,the
+310	string,Mark,husband,another,sweet,how,one,to
+311	ordering,mutual,Strikes,each,by,in
+312	Resembling,sire,happy,child,mother,and
+313	pleasing,sing,note,Who,one,do,all,in
+314	speechless,seeming,song,Whose,many,being,one
+315	Sings,single,wilt,prove,none,Thou,thee,this,to
+318	9
+319	widows,wet,eye,fear,Is,for,it,a,to
+320	consumst,single,self,life,That,thy,thou,in
+321	issueless,hap,Ah,shalt,die,if,thou,to
+322	makeless,wail,wife,world,like,thee,The,will,a
+323	widow,weep,still,world,thy,The,will,be,and
+324	behind,form,left,hast,no,That,thee,thou,of
+325	widow,private,keep,every,When,may,well
+326	childrens,husbands,shape,mind,eyes,By,her,in
+327	unthrift,spend,Look,world,doth,an,what,in,the
+328	Shifts,enjoys,place,still,world,but,for,his,it,the
+329	beautys,waste,end,world,an,hath,But,in,the
+330	destroys,user,unused,kept,so,it,And,the
+331	sits,toward,bosom,others,No,love,that,in
+332	commits,murdrous,shame,himself,such,That,on
+335	10
+336	bearst,deny,shame,any,For,love,thou,that,to
+337	unprovident,self,Who,art,thy,so,for
+338	Grant,beloved,wilt,many,art,if,thou,of
+339	evident,lovst,none,most,But,thou,that,is
+340	possessed,murdrous,hate,art,For,so,thou,with
+341	stickst,conspire,gainst,self,That,thy,thou,not,to
+342	ruinate,Seeking,roof,beauteous,that,to
+343	chief,repair,desire,Which,should,thy,be,to
+344	change,mind,thought,may,O,thy,that,my,I
+345	lodged,fairer,hate,gentle,Shall,than,love,be
+346	presence,gracious,kind,Be,thy,as,is,and
+347	kindhearted,least,self,prove,Or,at,thy,to
+348	self,Make,another,love,thee,for,me,of
+349	beauty,thine,live,still,may,or,That,thee,in
+352	11
+353	wane,growst,fast,shalt,As,so,as,thou
+354	departest,thine,In,which,one,from,thou,that,of
+355	bestowst,youngly,fresh,blood,which,thou,And,that
+356	convertest,mayst,youth,thine,call,Thou,when,from,thou
+357	Herein,increase,wisdom,lives,beauty,and
+358	decay,folly,Without,cold,age,this,and
+359	minded,cease,times,were,should,If,all,so,the
+360	threescore,year,world,away,make,would,And,the
+361	store,nature,whom,those,made,Let,hath,for,not
+362	featureless,Harsh,barrenly,perish,rude,and
+363	endowed,gave,Look,whom,best,she,more,thee
+364	bounteous,cherish,bounty,gift,shouldst,Which,thou,in
+365	carved,thereby,meant,seal,She,thee,her,for,and
+366	print,copy,shouldst,die,Thou,let,more,not,that
+369	12
+370	clock,count,tells,When,time,do,that,I,the
+371	sunk,hideous,brave,night,day,see,And,in,the
+372	violet,prime,behold,past,When,I,the
+373	silvered,sable,curls,white,oer,all,with,And
+374	lofty,trees,barren,leaves,When,see,of,I
+375	erst,canopy,herd,heat,Which,did,from,the
+376	sheaves,girded,summers,green,up,all,And,in
+377	bristly,Borne,bier,beard,white,on,with,and,the
+378	question,beauty,Then,make,do,thy,of,I
+379	wastes,among,time,go,must,That,thou,of,the
+380	beauties,sweets,forsake,themselves,Since,do,and
+381	fast,grow,others,die,see,they,as,And
+382	scythe,Times,defence,gainst,nothing,can,make,And
+383	breed,Save,takes,brave,hence,when,thee,he,him,to
+386	13
+387	self,were,love,O,are,but,your,that,you
+388	longer,self,yours,live,No,than,here,your,you
+389	prepare,Against,coming,end,should,this,you
+390	semblance,sweet,other,give,some,your,And,to
+391	lease,beauty,hold,So,which,should,that,in,you
+392	determination,Find,were,then,no,you
+393	selfs,decease,self,after,Your,again,your
+394	issue,form,bear,sweet,When,should,your
+395	decay,lets,fall,house,Who,fair,so,a,to
+396	uphold,husbandry,might,honour,Which,in
+397	stormy,gusts,winters,Against,day,of,the
+398	barren,eternal,deaths,rage,cold,And,of
+399	unthrifts,dear,none,know,love,O,but,my,you
+400	son,father,had,let,say,You,so,your,a
+403	14
+404	judgement,stars,pluck,Not,from,do,my,I,the
+405	astronomy,methinks,yet,have,And,I
+406	luck,evil,tell,or,good,But,not,of,to
+407	dearths,seasons,plagues,quality,Of,or,of
+408	minutes,brief,fortune,Nor,tell,can,to,I
+409	Pointing,thunder,rain,wind,each,his,to,and
+410	princes,Or,go,say,well,if,shall,it,with
+411	predict,oft,find,heaven,By,that,in,I
+412	derive,knowledge,thine,eyes,from,But,my,I
+413	constant,stars,read,art,such,them,And,in,I
+414	thrive,beauty,together,truth,As,shall,and
+415	convert,store,wouldst,self,If,from,thy,thou,to
+416	prognosticate,else,Or,thee,this,of,I
+417	truths,date,beautys,doom,end,Thy,is,and
+420	15
+421	consider,grows,thing,every,When,that,I
+422	Holds,perfection,moment,little,but,in,a
+423	presenteth,huge,stage,nought,shows,That,but,this
+424	comment,influence,Whereon,secret,stars,in,the
+425	plants,increase,perceive,men,When,as,that,I
+426	Cheered,checked,selfsame,sky,even,by,and,the
+427	Vaunt,decrease,sap,youthful,height,their,at,in
+428	memory,brave,wear,state,out,their,And,of
+429	inconstant,conceit,stay,Then,this,of,the
+430	Sets,rich,sight,youth,before,most,in,my,you
+431	debateth,wasteful,decay,Where,time,with
+432	sullied,change,youth,night,day,To,your,of,to
+433	Time,war,love,all,for,with,And,in,you,of
+434	engraft,takes,new,As,from,he,you,I
+437	16
+438	mightier,wherefore,way,But,do,not,you,a
+439	tyrant,Time,Make,bloody,war,upon,this
+440	fortify,decay,self,your,And,in
+441	rhyme,barren,blessed,means,With,than,more,my
+442	top,hours,happy,stand,Now,on,you,of,the
+443	unset,gardens,maiden,many,yet,And
+444	flowers,virtuous,living,wish,bear,With,would,you
+445	liker,Much,counterfeit,painted,than,your
+446	lines,repair,life,So,should,that,of,the
+447	pencil,pupil,Times,pen,Which,or,this,my
+448	inward,Neither,outward,worth,nor,fair,in
+449	self,Can,live,eyes,men,make,your,in,you,of
+450	keeps,self,still,away,give,To,your
+451	skill,drawn,live,sweet,own,must,by,your,And,you
+454	17
+455	verse,believe,Who,time,come,will,in,my,to
+456	filled,deserts,high,most,were,If,your,it,with
+457	tomb,knows,Though,heaven,yet,but,as,it,is,a
+458	hides,shows,parts,half,life,Which,your,not,and
+459	write,beauty,could,eyes,If,your,of,I,the
+460	graces,numbers,number,fresh,all,your,And,in
+461	poet,age,lies,say,come,would,The,this,to
+462	touched,touches,earthly,heavenly,faces,Such,neer
+463	yellowed,papers,age,So,should,their,with,my
+464	scorned,less,truth,tongue,Be,old,men,than,like,of
+465	termed,poets,rights,rage,true,your,be,And,a
+466	stretched,metre,antique,song,an,And,of
+467	alive,child,yours,time,some,were,But,that,of
+468	rhyme,twice,live,should,You,it,in,my,and
+471	18
+472	compare,summers,Shall,day,thee,a,to,I
+473	temperate,lovely,art,Thou,more,and
+474	Rough,darling,buds,winds,shake,May,do,of,the
+475	lease,date,summers,short,too,hath,all,And,a
+476	Sometime,shines,hot,eye,heaven,too,of,the
+477	dimmed,complexion,often,gold,his,And,is
+478	declines,sometime,every,fair,from,And
+479	untrimmed,changing,natures,chance,course,By,or
+480	fade,eternal,summer,But,shall,thy,not
+481	owst,possession,lose,Nor,fair,thou,that,of
+482	wandrest,brag,shade,Nor,death,shall,thou,his,in
+483	growst,lines,eternal,When,time,thou,in,to
+484	breathe,long,eyes,men,So,can,see,or,as
+485	gives,lives,long,life,So,thee,this,to,and
+488	19
+489	Devouring,paws,blunt,lions,Time,thou,the
+490	brood,devour,earth,sweet,own,make,her,And,the
+491	tigers,jaws,Pluck,keen,fierce,teeth,from,the
+492	longlived,phoenix,burn,blood,her,And,in,the
+493	fleetst,seasons,sorry,glad,Make,as,thou,and
+494	swiftfooted,whateer,Time,wilt,do,thou,And
+495	fading,sweets,wide,world,To,all,her,and,the
+496	crime,heinous,forbid,most,one,But,thee,I
+497	carve,brow,hours,loves,fair,O,thy,with,not,my
+498	antique,lines,pen,draw,Nor,thine,there,no,with
+499	untainted,allow,Him,course,do,thy,in
+500	succeeding,pattern,beautys,men,For,to
+501	despite,Time,worst,wrong,Yet,old,do,thy
+502	verse,young,live,ever,My,love,shall,in,my
+505	20
+506	painted,natures,womans,face,own,hand,A,with
+507	Hast,passion,mistress,master,thou,my,of,the
+508	acquainted,womans,gentle,heart,A,but,not
+509	shifting,womens,fashion,change,false,With,as,is
+510	rolling,theirs,bright,less,An,false,eye,than,more,in
+511	gazeth,Gilding,whereupon,object,it,the
+512	hues,controlling,hue,man,A,all,his,in
+513	amazeth,steals,womens,mens,souls,eyes,Which,and
+514	created,wert,woman,first,thou,for,And,a
+515	adoting,wrought,fell,Till,nature,she,thee,as
+516	defeated,addition,by,thee,And,me,of
+517	adding,purpose,thing,nothing,By,one,my,to
+518	pricked,womens,pleasure,since,out,she,But,thee,for
+519	treasure,Mine,loves,use,their,love,thy,be,and
+522	21
+523	muse,So,as,it,with,me,not,that,is
+524	Stirred,verse,painted,beauty,by,his,a,to
+525	ornament,self,use,heaven,Who,doth,for,it
+526	rehearse,every,fair,doth,his,with,And
+527	couplement,compare,Making,proud,a,of
+528	gems,seas,moon,rich,sun,earth,With,with,and
+529	Aprils,firstborn,flowers,rare,things,With,all,and
+530	rondure,hems,huge,heavens,air,That,this,in
+531	write,truly,true,let,love,O,but,me,in
+532	believe,fair,then,love,as,And,me,is,my
+533	bright,mothers,child,though,any,As,so,not
+534	candles,fixed,heavens,air,gold,those,As,in
+535	hearsay,Let,say,like,well,them,more,that,of
+536	sell,praise,purpose,will,not,that,to,I
+539	22
+540	persuade,glass,old,My,am,shall,me,not,I
+541	date,youth,long,So,one,are,as,thou,of,and
+542	furrows,behold,times,when,But,thee,in,I
+543	expiate,days,look,Then,death,should,my,I
+544	cover,beauty,doth,For,thee,all,that
+545	seemly,raiment,Is,heart,but,my,of,the
+546	breast,thine,live,doth,Which,thy,as,me,in
+547	elder,art,can,How,than,then,thou,be,I
+548	wary,thyself,therefore,love,O,so,be,of
+549	self,As,thee,but,will,for,not,my,I
+550	chary,Bearing,keep,heart,which,thy,so,will,I
+551	faring,babe,nurse,tender,ill,As,from,her
+552	Presume,slain,heart,mine,when,on,thy,not,is
+553	gavst,back,thine,again,Thou,give,me,not,to
+556	23
+557	unperfect,actor,stage,As,an,on,the
+558	beside,put,part,fear,Who,his,with,is
+559	replete,fierce,rage,thing,Or,much,some,too,with
+560	weakens,strengths,abundance,Whose,own,heart,his
+561	forget,trust,fear,So,say,for,of,to,I
+562	rite,ceremony,perfect,loves,The,of
+563	decay,strength,seem,loves,own,mine,And,in,to
+564	Oercharged,burthen,loves,might,own,mine,with,of
+565	eloquence,looks,let,then,O,be,my,the
+566	presagers,dumb,speaking,breast,And,my,of
+567	recompense,plead,look,Who,love,for,and
+568	expressed,More,tongue,than,hath,more,that
+569	silent,learn,writ,read,hath,love,what,O,to
+570	belongs,fine,wit,loves,eyes,hear,To,with,to
+573	24
+574	stelled,played,painter,Mine,eye,hath,and,the
+575	beautys,table,form,Thy,heart,in,my,of
+576	frame,wherein,held,body,tis,My,is,the
+577	painters,perspective,best,art,it,And,is
+578	painter,skill,through,see,must,For,his,you,the
+579	pictured,image,lies,find,where,true,To,your
+580	shop,bosoms,hanging,still,Which,is,in,my
+581	glazed,windows,thine,eyes,hath,That,his,with
+582	turns,done,eyes,Now,see,what,good,have,for
+583	drawn,shape,Mine,thine,eyes,thy,have,for,me,and
+584	wherethrough,windows,breast,sun,Are,my,to,the
+585	Delights,peep,gaze,therein,on,thee,to
+586	cunning,want,grace,Yet,eyes,art,their,this,to
+587	draw,They,heart,see,know,they,what,but,not,the
+590	25
+591	stars,favour,those,who,Let,their,are,with,in
+592	titles,boast,public,proud,honour,Of,and
+593	bars,triumph,Whilst,fortune,whom,such,of,I
+594	Unlooked,joy,honour,most,for,that,in,I
+595	favourites,spread,leaves,princes,Great,fair,their
+596	marigold,suns,eye,at,But,as,the
+597	buried,pride,themselves,lies,their,And,in
+598	frown,glory,die,For,they,their,at,in,a
+599	famoused,painful,warrior,fight,The,for
+600	foiled,victories,After,thousand,once,a
+601	razed,quite,book,honour,Is,from,of,the
+602	toiled,forgot,rest,which,all,he,for,And,the
+603	beloved,happy,Then,love,am,that,and,I
+604	removed,remove,Where,nor,may,be,not,I
+607	26
+608	vassalage,whom,Lord,love,in,my,of,to
+609	strongly,knit,merit,duty,Thy,hath,my
+610	embassage,written,send,thee,To,this,I
+611	witness,duty,wit,show,To,not,my,to
+612	Duty,wit,poor,great,mine,which,so,as
+613	wanting,bare,seem,May,show,words,make,it,in,to
+614	conceit,hope,thine,some,good,But,that,of,I
+615	bestow,naked,souls,thought,In,all,thy,will,it
+616	guides,whatsoever,moving,star,Till,that,my
+617	Points,graciously,aspect,fair,on,with,me
+618	tattered,apparel,puts,loving,on,And,my
+619	respect,worthy,show,sweet,To,thy,me,of
+620	boast,dare,Then,how,may,love,thee,do,to,I
+621	mayst,Till,prove,show,head,where,then,thou,me,not,my
+624	27
+625	Weary,toil,haste,bed,with,me,my,to,I
+626	respose,tired,travel,limbs,dear,The,for,with
+627	journey,begins,head,then,But,in,my,a
+628	expired,bodys,works,work,mind,when,To,my
+629	abide,thoughts,far,where,then,For,from,my,I
+630	Intend,zealous,pilgrimage,thee,a,to
+631	drooping,eyelids,wide,open,keep,And,my
+632	Looking,darkness,blind,see,which,on,do,the
+633	imaginary,Save,souls,sight,that,my
+634	sightless,Presents,shadow,view,thy,my,to
+635	ghastly,hung,jewel,night,Which,like,in,a
+636	beauteous,Makes,black,new,face,night,old,her,and
+637	limbs,Lo,mind,thus,night,day,by,my
+638	quiet,self,find,For,no,thee,for,my,and
+641	28
+642	plight,return,happy,can,How,then,in,I
+643	debarred,benefit,rest,am,That,of,the
+644	eased,oppression,days,night,When,by,not,is
+645	oppressed,night,day,But,by,and
+646	eithers,reign,enemies,each,though,And,to
+647	torture,consent,shake,hands,Do,me,in,to
+648	complain,toil,other,one,by,The,to,the
+649	toil,farther,far,off,still,How,from,thee,I
+650	bright,please,day,art,tell,thou,him,to,I,the
+651	blot,clouds,grace,dost,heaven,when,do,him,And,the
+652	swartcomplexioned,flatter,night,So,I,the
+653	gildst,twire,sparkling,stars,even,When,thou,not,the
+654	daily,sorrows,longer,draw,day,doth,But,my
+655	nightly,length,stronger,griefs,seem,night,doth,make,And
+658	29
+659	disgrace,Fortune,mens,eyes,When,with,in,and
+660	outcast,beweep,alone,state,all,my,I
+661	bootless,deaf,cries,trouble,heaven,with,And,my
+662	fate,curse,self,look,upon,And,my,and
+663	Wishing,rich,hope,like,one,more,me,in,to
+664	Featured,possessed,friends,like,him,with
+665	Desiring,scope,mans,art,this,that,and
+666	contented,enjoy,least,most,With,what,I
+667	despising,self,almost,thoughts,Yet,these,in,my
+668	Haply,state,think,then,on,thee,my,and,I
+669	arising,lark,Like,break,day,at,of,to,the
+670	hymns,sullen,sings,gate,heavens,earth,From,at
+671	remembered,brings,wealth,sweet,such,For,love,thy
+672	scorn,kings,change,state,then,That,with,my,to,I
+675	30
+676	sessions,silent,thought,sweet,When,of,to,the
+677	summon,remembrance,past,things,up,of,I
+678	sought,sigh,lack,thing,many,a,of,I,the
+679	wail,waste,woes,new,times,dear,old,with,And,my
+680	unused,flow,drown,eye,Then,can,an,to,I
+681	dateless,hid,deaths,precious,friends,night,For,in
+682	cancelled,afresh,woe,weep,loves,since,long,And
+683	vanished,expense,moan,sight,many,th,And,a,of
+684	foregone,grievances,grieve,Then,can,at,I
+685	heavily,woe,oer,tell,from,And,to
+686	forebemoaned,moan,account,sad,The,of
+687	paid,pay,new,before,Which,if,as,not,I
+688	while,dear,friend,think,if,But,on,thee,I,the
+689	restored,losses,sorrows,end,All,are,and
+692	31
+693	endeared,bosom,hearts,Thy,all,with,is
+694	lacking,supposed,dead,Which,by,have,I
+695	reigns,parts,loving,loves,there,love,all,And,and
+696	buried,thought,those,friends,which,all,And,I
+697	obsequious,tear,holy,many,How,a,and
+698	religious,stoln,Hath,dear,eye,mine,love,from
+699	interest,appear,dead,which,As,now,of,the
+700	hidden,removed,lie,things,But,thee,that,in
+701	buried,grave,live,where,doth,art,Thou,love,the
+702	Hung,trophies,lovers,gone,with,my,of,the
+703	parts,Who,give,did,their,thee,all,me,of,to
+704	due,alone,thine,many,now,That,is,of
+705	images,loved,view,Their,thee,in,I
+706	hast,they,all,thou,And,me,of,the
+709	32
+710	wellcontented,survive,day,If,thou,my
+711	churl,cover,dust,bones,death,When,shall,with,that,my
+712	resurvey,fortune,shalt,once,more,by,And
+713	deceased,lines,lover,rude,These,poor,thy,of
+714	bettring,Compare,time,them,with,of,the
+715	outstripped,pen,though,every,they,by,be,And
+716	Reserve,rhyme,their,love,them,for,not,my
+717	Exceeded,happier,height,men,by,of,the
+718	vouchsafe,loving,thought,then,O,but,this,me
+719	Muse,growing,grown,Had,age,friends,this,with,my
+720	dearer,birth,brought,had,than,A,love,this,his
+721	equipage,ranks,march,better,To,in,of
+722	poets,died,prove,since,better,But,he,and
+723	Theirs,style,read,Ill,their,love,for,his
+726	33
+727	Full,glorious,morning,seen,many,have,a,I
+728	Flatter,tops,mountain,sovereign,eye,with,the
+729	meadows,Kissing,green,golden,face,with,the
+730	Gilding,alchemy,streams,heavenly,pale,with
+731	basest,permit,Anon,ride,clouds,to,the
+732	celestial,rack,ugly,face,With,on,his
+733	forlorn,visage,hide,world,from,his,And,the
+734	Stealing,unseen,west,disgrace,this,with,to
+735	morn,shine,early,sun,Even,did,one,so,my
+736	splendour,triumphant,brow,With,on,all,my
+737	alack,hour,mine,out,one,was,But,but,he
+738	masked,region,cloud,hath,now,from,The,him,me
+739	disdaineth,whit,Yet,love,no,him,this,for,my
+740	Suns,staineth,stain,heavens,sun,world,when,may,of,the
+743	34
+744	beauteous,promise,didst,day,Why,such,thou,a
+745	cloak,travel,without,forth,make,And,me,my
+746	oertake,clouds,base,way,let,To,me,in,my
+747	bravry,Hiding,smoke,rotten,their,thy,in
+748	cloud,through,break,enough,Tis,thou,not,that,the
+749	stormbeaten,rain,dry,face,on,To,my,the
+750	salve,speak,can,such,well,man,For,no,a,of
+751	heals,cures,disgrace,wound,That,not,and,the
+752	physic,grief,shame,Nor,give,can,thy,my,to
+753	repent,loss,Though,still,yet,thou,have,I,the
+754	lends,relief,offenders,Th,weak,sorrow,but
+755	offences,cross,bears,strong,To,him,that,the
+756	sheds,pearl,Ah,tears,those,which,love,are,thy,but
+757	ransom,deeds,rich,ill,they,are,all,And,and
+760	35
+761	grieved,hast,done,No,which,more,at,thou,be,that
+762	Roses,fountains,mud,thorns,silver,have,and
+763	Clouds,eclipses,stain,moon,sun,both,and
+764	bud,loathsome,canker,sweetest,lives,And,in
+765	faults,even,All,men,make,this,in,and,I
+766	Authorizing,trespass,compare,thy,with
+767	salving,corrupting,amiss,self,My,thy
+768	Excusing,sins,than,more,are,thy
+769	sensual,sense,fault,bring,For,thy,in,to,I
+770	advocate,adverse,party,Thy,thy,is
+771	commence,plea,lawful,gainst,self,And,my,a
+772	civil,Such,hate,war,love,is,in,my,and
+773	accessary,needs,must,an,That,be,I
+774	sourly,robs,thief,sweet,which,from,To,me,that
+777	36
+778	twain,confess,two,Let,must,we,be,me,that
+779	undivided,Although,loves,one,our,are
+780	blots,remain,those,So,shall,do,with,me,that
+781	borne,Without,alone,help,by,thy,be,me
+782	respect,loves,two,In,there,one,our,but,is
+783	separable,spite,lives,Though,our,in,a
+784	alter,sole,effect,loves,though,Which,it,not
+785	delight,steal,hours,loves,Yet,sweet,doth,from,it
+786	acknowledge,evermore,may,thee,not,I
+787	bewailed,guilt,Lest,shame,should,thee,do,my
+788	kindness,public,Nor,honour,thou,with,me
+789	Unless,honour,name,take,from,thy,thou,that
+790	sort,such,love,But,thee,do,so,not,in,I
+791	report,being,mine,As,good,thy,thou,is
+794	37
+795	decrepit,delight,takes,father,As,a
+796	active,deeds,child,youth,see,do,To,his,of
+797	lame,Fortunes,dearest,spite,made,So,by,I
+798	comfort,worth,Take,truth,all,thy,my,of,and
+799	whether,wealth,birth,beauty,wit,For,or
+800	Or,any,these,or,more,all,of
+801	Entitled,crowned,parts,sit,do,thy,in
+802	engrafted,store,make,love,this,my,to,I
+803	despised,lame,poor,nor,So,then,am,not,I
+804	substance,Whilst,shadow,doth,give,such,this,that
+805	sufficed,abundance,am,That,thy,in,I
+806	glory,live,part,by,all,thy,And,a,of
+807	wish,Look,best,what,thee,that,is,in,I
+808	ten,happy,wish,times,This,then,have,me,I
+811	38
+812	invent,muse,subject,want,can,How,my,to
+813	pourst,verse,breathe,While,dost,into,thou,that,my
+814	Thine,argument,excellent,sweet,own,too
+815	rehearse,vulgar,paper,every,For,to
+816	aught,thanks,self,give,if,O,thy,me,in,the
+817	perusal,Worthy,sight,stand,against,thy
+818	whos,dumb,write,cannot,For,thee,so,that,to
+819	invention,self,light,dost,When,give,thy,thou
+820	tenth,Muse,ten,worth,times,Be,more,thou,in,the
+821	rhymers,invocate,nine,Than,those,old,which
+822	calls,bring,forth,let,on,thee,he,him,And,that
+823	Eternal,outlive,date,numbers,long,to
+824	curious,muse,slight,days,please,these,If,do,my
+825	pain,praise,thine,mine,shall,but,The,be,the
+828	39
+829	manners,sing,worth,how,may,O,thy,with,I
+830	part,better,art,When,all,thou,me,of,the
+831	self,praise,bring,own,can,mine,What,to
+832	ist,praise,own,mine,when,what,thee,but,And,I
+833	divided,Even,live,let,us,this,for
+834	single,lose,dear,name,one,love,our,And,of
+835	separation,give,may,That,by,this,I
+836	deservst,due,alone,which,That,thee,thou,to
+837	torment,absence,wouldst,prove,what,O,thou,a
+838	sour,leisure,Were,gave,leave,sweet,thy,it,not
+839	entertain,thoughts,time,love,To,with,of,the
+840	sweetly,deceive,thoughts,doth,Which,time,so,and
+841	teachest,twain,how,make,one,thou,And,that,to
+842	praising,remain,hence,who,By,doth,here,him
+845	40
+846	yea,Take,loves,take,love,them,all,my
+847	hadst,hast,before,than,then,more,What,thou
+848	mayst,call,true,No,love,thou,that,my
+849	hadst,All,thine,before,mine,was,more,thou,this
+850	receivest,Then,if,love,thou,for,my
+851	usest,blame,cannot,love,thee,thou,for,my,I
+852	deceivest,blamed,self,yet,if,But,thy,thou,be
+853	refusest,wilful,taste,self,By,what,thy,of
+854	robbery,forgive,thief,gentle,do,thy,I
+855	poverty,Although,steal,thee,all,thou,my
+856	greater,knows,grief,yet,love,it,And,is,a
+857	hates,injury,greater,known,wrong,bear,than,To
+858	Lascivious,shows,ill,grace,whom,well,all,in
+859	spites,Kill,foes,yet,must,we,be,with,me,not
+862	41
+863	commits,liberty,wrongs,Those,pretty,that
+864	absent,sometime,When,heart,am,from,thy,I
+865	befits,years,beauty,full,Thy,well,thy,and
+866	temptation,follows,still,where,art,For,thou
+867	Gentle,won,therefore,art,thou,be,to,and
+868	Beauteous,assailed,therefore,art,thou,be,to
+869	woos,womans,woman,son,when,what,And,a
+870	prevailed,sourly,till,Will,leave,her,he,have
+871	mightst,forbear,seat,Ay,yet,but,thou,me,my
+872	straying,chide,beauty,youth,thy,And,and
+873	riot,lead,even,Who,there,their,thee,in
+874	twofold,forced,break,truth,Where,art,thou,a,to
+875	Hers,tempting,beauty,by,thee,thy,her,to
+876	Thine,beauty,false,being,by,thy,me,to
+879	42
+880	grief,hast,That,all,her,thou,it,not,is,my
+881	dearly,loved,said,yet,may,her,be,it,And,I
+882	wailing,chief,hath,she,That,thee,is,my,of
+883	nearly,touches,loss,A,love,more,me,that,in
+884	Loving,offenders,excuse,ye,thus,will,I
+885	knowst,because,dost,Thou,love,her,thou,I
+886	abuse,sake,even,doth,she,so,for,And,me,my
+887	Suffring,approve,sake,friend,her,for,my,to
+888	gain,loss,lose,loves,If,thee,is,my,I
+889	losing,loss,found,friend,hath,her,And,that,my
+890	twain,Both,each,lose,find,both,other,and,I
+891	cross,sake,lay,both,on,this,for,And,me,my
+892	heres,joy,friend,one,But,are,my,and,I,the
+893	flattery,Sweet,alone,loves,then,she,but,me
+896	43
+897	wink,best,eyes,When,most,mine,see,then,do,I
+898	unrespected,view,things,day,For,they,all,the
+899	dreams,sleep,look,when,they,But,on,thee,in,I
+900	darkly,directed,bright,dark,are,And,in
+901	shadows,bright,shadow,whose,Then,doth,make,thou
+902	shadows,form,happy,show,How,would,thy
+903	clearer,clear,light,day,much,To,thy,with,the
+904	unseeing,shade,shines,eyes,When,thy,so,to
+905	blessed,eyes,made,mine,How,say,would,be,I
+906	looking,living,day,By,on,thee,in,the
+907	imperfect,shade,dead,night,fair,When,thy,in
+908	sightless,Through,heavy,sleep,stay,eyes,doth,on
+909	nights,days,All,till,see,are,thee,to,I
+910	dreams,nights,bright,days,show,when,thee,do,And,me
+913	44
+914	substance,dull,flesh,thought,were,If,my,of,the
+915	Injurious,distance,stop,way,should,not,my
+916	space,despite,brought,then,For,would,be,of,I
+917	remote,limits,far,dost,stay,From,where,thou
+918	although,foot,matter,stand,No,did,then,my
+919	farthest,removed,earth,Upon,from,thee,the
+920	jump,nimble,sea,land,thought,both,can,For,and
+921	soon,place,where,think,As,would,as,he,be,the
+922	ah,kills,thought,am,But,me,not,that,I
+923	lengths,miles,leap,large,gone,art,when,To,thou,of
+924	wrought,water,earth,much,But,so,that,of,and
+925	moan,leisure,attend,times,must,with,my,I
+926	Receiving,elements,slow,nought,by,so
+927	badges,eithers,woe,heavy,tears,But,of
+930	45
+931	purging,slight,air,fire,two,other,The,and
+932	wherever,abide,Are,both,thee,with,I
+933	desire,thought,first,other,The,my,the
+934	presentabsent,slide,swift,motion,These,with
+935	quicker,elements,gone,these,when,For,are
+936	embassy,tender,In,love,thee,of,to
+937	four,alone,being,two,made,life,My,with,of
+938	Sinks,oppressed,melancholy,down,death,with,to
+939	recured,lifes,composition,Until,be
+940	returned,messengers,swift,those,By,from,thee
+941	assured,even,back,Who,again,come,now,but
+942	recounting,health,fair,Of,thy,it,me,to
+943	longer,glad,joy,told,This,then,no,but,I
+944	grow,straight,sad,send,back,again,them,and,I
+947	46
+948	Mine,mortal,war,eye,heart,at,are,a,and
+949	divide,conquest,sight,How,thy,of,to,the
+950	pictures,bar,Mine,sight,eye,heart,would,thy,my
+951	freedom,right,eye,heart,mine,My,that,of,the
+952	plead,lie,dost,doth,heart,My,thou,him,that,in
+953	pierced,crystal,closet,eyes,never,A,with
+954	defendant,plea,deny,doth,But,that,the
+955	appearance,lies,says,fair,thy,him,And,in
+956	impanelled,title,side,To,this,is
+957	tenants,quest,thoughts,heart,A,all,of,to,the
+958	determined,verdict,their,by,And,is
+959	moiety,clear,hearts,dear,part,eyes,The,and,the
+960	outward,due,part,thus,eyes,mine,As,thy,is
+961	inward,hearts,right,heart,love,thy,And,my,of
+964	47
+965	Betwixt,league,took,eye,heart,mine,is,a,and
+966	turns,each,unto,other,doth,now,good,And,the
+967	famished,eye,look,When,mine,for,that,is,a
+968	smother,sighs,himself,Or,doth,heart,love,with,in
+969	picture,feast,loves,eye,doth,With,then,my
+970	banquet,painted,bids,heart,And,my,to,the
+971	guest,Another,hearts,eye,time,mine,is,my
+972	share,thoughts,part,doth,love,his,And,in,a,of
+973	picture,either,So,or,love,by,thy,my
+974	self,present,Thy,still,away,art,with,me
+975	farther,move,canst,thoughts,than,For,thou,not,my
+976	still,they,them,am,thee,with,And,and,I
+977	picture,sight,sleep,Or,if,they,thy,in,my
+978	Awakes,delight,hearts,eyes,heart,my,to,and
+981	48
+982	careful,took,way,How,when,was,my,I
+983	truest,bars,trifle,thrust,Each,under,to
+984	unused,use,stay,might,That,it,my,to
+985	wards,falsehood,trust,sure,hands,From,in,of
+986	trifles,jewels,whom,But,are,thou,my,to
+987	greatest,comfort,worthy,Most,grief,now,my
+988	dearest,care,only,best,Thou,mine,of,and
+989	vulgar,prey,thief,Art,left,every,of,the
+990	locked,chest,Thee,any,up,have,not,in,I
+991	Save,feel,though,where,art,thou,not,I
+992	closure,breast,Within,gentle,my,of,the
+993	whence,mayst,pleasure,From,part,come,at,thou,and
+994	stoln,thence,wilt,even,fear,thou,be,And,I
+995	thievish,proves,prize,truth,dear,For,so,for,a
+998	49
+999	Against,ever,time,come,if,that
+1000	defects,frown,When,see,on,shall,thee,my,I
\ No newline at end of file