From 63a80049615bb47f4c0aa64bf110f78fb05ef4a8 Mon Sep 17 00:00:00 2001
From: Wen Yao Jin <wen-yao.jin@student.ecp.fr>
Date: Sat, 11 Mar 2017 09:49:06 -0800
Subject: [PATCH] go

---
 hadoop.log               | 6690 ++++++++++++++++++++++++++++++--------
 hadoop.log.2017-03-10    | 1667 ++++++++++
 linenumber_sample        |    1 +
 output/.part-r-00000.crc |  Bin 41600 -> 12 bytes
 output/part-r-00000      |    3 +-
 sortedline_sample        |  855 +++++
 6 files changed, 7816 insertions(+), 1400 deletions(-)
 create mode 100644 hadoop.log.2017-03-10
 create mode 100644 linenumber_sample
 create mode 100644 sortedline_sample

diff --git a/hadoop.log b/hadoop.log
index 72b1af4..f7ca617 100644
--- a/hadoop.log
+++ b/hadoop.log
@@ -1,102 +1,1794 @@
-2017-03-10 12:58:10,580 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
-2017-03-10 12:58:11,075 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
-2017-03-10 12:58:11,087 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
-2017-03-10 12:58:11,561 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
-2017-03-10 12:58:11,594 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
-2017-03-10 12:58:11,707 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
-2017-03-10 12:58:11,996 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local1458741767_0001
-2017-03-10 12:58:12,393 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
-2017-03-10 12:58:12,394 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local1458741767_0001
-2017-03-10 12:58:12,399 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
-2017-03-10 12:58:12,419 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
-2017-03-10 12:58:12,422 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
-2017-03-10 12:58:12,540 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
-2017-03-10 12:58:12,543 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1458741767_0001_m_000000_0
-2017-03-10 12:58:12,607 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
-2017-03-10 12:58:12,632 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
-2017-03-10 12:58:12,640 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/input/pg100.txt:0+5589886
-2017-03-10 12:58:12,811 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
-2017-03-10 12:58:12,823 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
-2017-03-10 12:58:12,824 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
-2017-03-10 12:58:12,824 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
-2017-03-10 12:58:12,824 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
-2017-03-10 12:58:12,832 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
-2017-03-10 12:58:13,397 INFO org.apache.hadoop.mapreduce.Job: Job job_local1458741767_0001 running in uber mode : false
-2017-03-10 12:58:13,399 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
-2017-03-10 12:58:14,983 INFO org.apache.hadoop.mapred.LocalJobRunner: 
-2017-03-10 12:58:14,985 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
-2017-03-10 12:58:14,985 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
-2017-03-10 12:58:14,985 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 8925696; bufvoid = 104857600
-2017-03-10 12:58:14,985 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 22120424(88481696); length = 4093973/6553600
-2017-03-10 12:58:16,014 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
-2017-03-10 12:58:16,798 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
-2017-03-10 12:58:16,800 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1458741767_0001_m_000000_0 is done. And is in the process of committing
-2017-03-10 12:58:16,806 INFO org.apache.hadoop.mapred.LocalJobRunner: map
-2017-03-10 12:58:16,806 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1458741767_0001_m_000000_0' done.
-2017-03-10 12:58:16,806 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1458741767_0001_m_000000_0
-2017-03-10 12:58:16,806 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
-2017-03-10 12:58:16,814 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks
-2017-03-10 12:58:16,814 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1458741767_0001_r_000000_0
-2017-03-10 12:58:16,819 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
-2017-03-10 12:58:16,819 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
-2017-03-10 12:58:16,823 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@7f792cec
-2017-03-10 12:58:16,833 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
-2017-03-10 12:58:16,839 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1458741767_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events
-2017-03-10 12:58:16,862 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.deflate]
-2017-03-10 12:58:16,862 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1458741767_0001_m_000000_0 decomp: 491464 len: 167092 to MEMORY
-2017-03-10 12:58:16,869 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 491464 bytes from map-output for attempt_local1458741767_0001_m_000000_0
-2017-03-10 12:58:16,869 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 491464, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->491464
-2017-03-10 12:58:16,870 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
-2017-03-10 12:58:16,871 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
-2017-03-10 12:58:16,871 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
-2017-03-10 12:58:16,876 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
-2017-03-10 12:58:16,876 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 491459 bytes
-2017-03-10 12:58:17,106 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 491464 bytes to disk to satisfy reduce memory limit
-2017-03-10 12:58:17,107 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 167100 bytes from disk
-2017-03-10 12:58:17,107 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
-2017-03-10 12:58:17,107 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
-2017-03-10 12:58:17,108 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 491459 bytes
-2017-03-10 12:58:17,108 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
-2017-03-10 12:58:17,113 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.skip.on is deprecated. Instead, use mapreduce.job.skiprecords
-2017-03-10 12:58:17,416 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 0%
-2017-03-10 12:58:17,746 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1458741767_0001_r_000000_0 is done. And is in the process of committing
-2017-03-10 12:58:17,747 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
-2017-03-10 12:58:17,751 INFO org.apache.hadoop.mapred.Task: Task attempt_local1458741767_0001_r_000000_0 is allowed to commit now
-2017-03-10 12:58:17,752 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1458741767_0001_r_000000_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1458741767_0001_r_000000
-2017-03-10 12:58:17,756 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
-2017-03-10 12:58:17,756 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1458741767_0001_r_000000_0' done.
-2017-03-10 12:58:17,757 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1458741767_0001_r_000000_0
-2017-03-10 12:58:17,758 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete.
-2017-03-10 12:58:18,417 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 100%
-2017-03-10 12:58:18,418 INFO org.apache.hadoop.mapreduce.Job: Job job_local1458741767_0001 completed successfully
-2017-03-10 12:58:18,427 INFO org.apache.hadoop.mapreduce.Job: Counters: 33
+2017-03-11 03:48:40,291 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-11 03:48:40,772 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-11 03:48:40,787 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-11 03:48:41,151 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
+2017-03-11 03:48:41,158 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
+2017-03-11 03:48:41,192 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
+2017-03-11 03:48:41,424 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local1883045034_0001
+2017-03-11 03:48:41,880 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
+2017-03-11 03:48:41,882 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local1883045034_0001
+2017-03-11 03:48:41,881 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
+2017-03-11 03:48:41,902 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 03:48:41,904 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
+2017-03-11 03:48:42,042 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
+2017-03-11 03:48:42,043 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1883045034_0001_m_000000_0
+2017-03-11 03:48:42,081 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 03:48:42,091 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 03:48:42,096 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/input/pg100.txt:0+5589886
+2017-03-11 03:48:42,172 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 03:48:42,172 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 03:48:42,173 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 03:48:42,173 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 03:48:42,173 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 03:48:42,173 WARN org.apache.hadoop.mapred.MapTask: Unable to initialize MapOutputCollector org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+java.lang.ClassCastException: class similarity.LongPair
+	at java.lang.Class.asSubclass(Class.java:3165)
+	at org.apache.hadoop.mapred.JobConf.getOutputKeyComparator(JobConf.java:892)
+	at org.apache.hadoop.mapred.MapTask$MapOutputBuffer.init(MapTask.java:1005)
+	at org.apache.hadoop.mapred.MapTask.createSortingCollector(MapTask.java:402)
+	at org.apache.hadoop.mapred.MapTask.access$100(MapTask.java:81)
+	at org.apache.hadoop.mapred.MapTask$NewOutputCollector.<init>(MapTask.java:698)
+	at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:770)
+	at org.apache.hadoop.mapred.MapTask.run(MapTask.java:341)
+	at org.apache.hadoop.mapred.LocalJobRunner$Job$MapTaskRunnable.run(LocalJobRunner.java:243)
+	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471)
+	at java.util.concurrent.FutureTask.run(FutureTask.java:262)
+	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
+	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
+	at java.lang.Thread.run(Thread.java:745)
+2017-03-11 03:48:42,177 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
+2017-03-11 03:48:42,178 WARN org.apache.hadoop.mapred.LocalJobRunner: job_local1883045034_0001
+java.lang.Exception: java.io.IOException: Initialization of all the collectors failed. Error in last collector was :class similarity.LongPair
+	at org.apache.hadoop.mapred.LocalJobRunner$Job.runTasks(LocalJobRunner.java:462)
+	at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:522)
+Caused by: java.io.IOException: Initialization of all the collectors failed. Error in last collector was :class similarity.LongPair
+	at org.apache.hadoop.mapred.MapTask.createSortingCollector(MapTask.java:414)
+	at org.apache.hadoop.mapred.MapTask.access$100(MapTask.java:81)
+	at org.apache.hadoop.mapred.MapTask$NewOutputCollector.<init>(MapTask.java:698)
+	at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:770)
+	at org.apache.hadoop.mapred.MapTask.run(MapTask.java:341)
+	at org.apache.hadoop.mapred.LocalJobRunner$Job$MapTaskRunnable.run(LocalJobRunner.java:243)
+	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471)
+	at java.util.concurrent.FutureTask.run(FutureTask.java:262)
+	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
+	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
+	at java.lang.Thread.run(Thread.java:745)
+Caused by: java.lang.ClassCastException: class similarity.LongPair
+	at java.lang.Class.asSubclass(Class.java:3165)
+	at org.apache.hadoop.mapred.JobConf.getOutputKeyComparator(JobConf.java:892)
+	at org.apache.hadoop.mapred.MapTask$MapOutputBuffer.init(MapTask.java:1005)
+	at org.apache.hadoop.mapred.MapTask.createSortingCollector(MapTask.java:402)
+	... 10 more
+2017-03-11 03:48:42,901 INFO org.apache.hadoop.mapreduce.Job: Job job_local1883045034_0001 running in uber mode : false
+2017-03-11 03:48:42,902 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
+2017-03-11 03:48:42,904 INFO org.apache.hadoop.mapreduce.Job: Job job_local1883045034_0001 failed with state FAILED due to: NA
+2017-03-11 03:48:42,908 INFO org.apache.hadoop.mapreduce.Job: Counters: 0
+2017-03-11 04:03:00,838 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-11 04:03:01,367 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-11 04:03:01,370 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-11 04:03:01,795 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
+2017-03-11 04:03:01,802 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
+2017-03-11 04:03:01,938 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
+2017-03-11 04:03:02,191 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local590950587_0001
+2017-03-11 04:03:02,704 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
+2017-03-11 04:03:02,704 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local590950587_0001
+2017-03-11 04:03:02,710 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
+2017-03-11 04:03:02,733 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:03:02,735 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
+2017-03-11 04:03:02,865 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
+2017-03-11 04:03:02,869 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local590950587_0001_m_000000_0
+2017-03-11 04:03:02,928 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:03:02,957 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:03:02,962 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/input/pg100.txt:0+5589886
+2017-03-11 04:03:03,048 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 04:03:03,049 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 04:03:03,049 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 04:03:03,049 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 04:03:03,049 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 04:03:03,055 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 04:03:03,124 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 04:03:03,131 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
+2017-03-11 04:03:03,140 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
+2017-03-11 04:03:03,141 WARN org.apache.hadoop.mapred.LocalJobRunner: job_local590950587_0001
+java.lang.Exception: java.lang.NumberFormatException: For input string: ""
+	at org.apache.hadoop.mapred.LocalJobRunner$Job.runTasks(LocalJobRunner.java:462)
+	at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:522)
+Caused by: java.lang.NumberFormatException: For input string: ""
+	at java.lang.NumberFormatException.forInputString(NumberFormatException.java:65)
+	at java.lang.Long.parseLong(Long.java:453)
+	at java.lang.Long.parseLong(Long.java:483)
+	at similarity.NaiveApproach$Map.map(NaiveApproach.java:119)
+	at similarity.NaiveApproach$Map.map(NaiveApproach.java:1)
+	at org.apache.hadoop.mapreduce.Mapper.run(Mapper.java:145)
+	at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:787)
+	at org.apache.hadoop.mapred.MapTask.run(MapTask.java:341)
+	at org.apache.hadoop.mapred.LocalJobRunner$Job$MapTaskRunnable.run(LocalJobRunner.java:243)
+	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471)
+	at java.util.concurrent.FutureTask.run(FutureTask.java:262)
+	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
+	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
+	at java.lang.Thread.run(Thread.java:745)
+2017-03-11 04:03:03,731 INFO org.apache.hadoop.mapreduce.Job: Job job_local590950587_0001 running in uber mode : false
+2017-03-11 04:03:03,732 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
+2017-03-11 04:03:03,735 INFO org.apache.hadoop.mapreduce.Job: Job job_local590950587_0001 failed with state FAILED due to: NA
+2017-03-11 04:03:03,759 INFO org.apache.hadoop.mapreduce.Job: Counters: 0
+2017-03-11 04:03:50,097 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-11 04:03:50,594 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-11 04:03:50,595 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-11 04:03:51,047 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
+2017-03-11 04:03:51,057 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
+2017-03-11 04:03:51,171 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
+2017-03-11 04:03:51,405 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local1119859446_0001
+2017-03-11 04:03:51,850 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
+2017-03-11 04:03:51,850 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local1119859446_0001
+2017-03-11 04:03:51,860 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
+2017-03-11 04:03:51,867 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:03:51,873 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
+2017-03-11 04:03:51,989 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
+2017-03-11 04:03:51,989 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1119859446_0001_m_000000_0
+2017-03-11 04:03:52,051 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:03:52,059 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:03:52,067 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline:0+5323735
+2017-03-11 04:03:52,149 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 04:03:52,149 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 04:03:52,149 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 04:03:52,150 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 04:03:52,150 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 04:03:52,154 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 04:03:52,186 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 04:03:52,195 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
+2017-03-11 04:03:52,205 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
+2017-03-11 04:03:52,206 WARN org.apache.hadoop.mapred.LocalJobRunner: job_local1119859446_0001
+java.lang.Exception: java.io.IOException: Type mismatch in value from map: expected org.apache.hadoop.io.DoubleWritable, received org.apache.hadoop.io.Text
+	at org.apache.hadoop.mapred.LocalJobRunner$Job.runTasks(LocalJobRunner.java:462)
+	at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:522)
+Caused by: java.io.IOException: Type mismatch in value from map: expected org.apache.hadoop.io.DoubleWritable, received org.apache.hadoop.io.Text
+	at org.apache.hadoop.mapred.MapTask$MapOutputBuffer.collect(MapTask.java:1078)
+	at org.apache.hadoop.mapred.MapTask$NewOutputCollector.write(MapTask.java:715)
+	at org.apache.hadoop.mapreduce.task.TaskInputOutputContextImpl.write(TaskInputOutputContextImpl.java:89)
+	at org.apache.hadoop.mapreduce.lib.map.WrappedMapper$Context.write(WrappedMapper.java:112)
+	at similarity.NaiveApproach$Map.map(NaiveApproach.java:125)
+	at similarity.NaiveApproach$Map.map(NaiveApproach.java:1)
+	at org.apache.hadoop.mapreduce.Mapper.run(Mapper.java:145)
+	at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:787)
+	at org.apache.hadoop.mapred.MapTask.run(MapTask.java:341)
+	at org.apache.hadoop.mapred.LocalJobRunner$Job$MapTaskRunnable.run(LocalJobRunner.java:243)
+	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471)
+	at java.util.concurrent.FutureTask.run(FutureTask.java:262)
+	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
+	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
+	at java.lang.Thread.run(Thread.java:745)
+2017-03-11 04:03:52,852 INFO org.apache.hadoop.mapreduce.Job: Job job_local1119859446_0001 running in uber mode : false
+2017-03-11 04:03:52,853 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
+2017-03-11 04:03:52,855 INFO org.apache.hadoop.mapreduce.Job: Job job_local1119859446_0001 failed with state FAILED due to: NA
+2017-03-11 04:03:52,859 INFO org.apache.hadoop.mapreduce.Job: Counters: 0
+2017-03-11 04:06:23,828 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-11 04:06:24,303 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-11 04:06:24,329 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-11 04:06:24,793 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
+2017-03-11 04:06:24,801 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
+2017-03-11 04:06:24,917 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
+2017-03-11 04:06:25,235 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local1773920421_0001
+2017-03-11 04:06:25,643 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
+2017-03-11 04:06:25,644 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local1773920421_0001
+2017-03-11 04:06:25,646 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
+2017-03-11 04:06:25,660 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:06:25,664 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
+2017-03-11 04:06:25,776 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
+2017-03-11 04:06:25,777 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1773920421_0001_m_000000_0
+2017-03-11 04:06:25,849 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:06:25,875 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:06:25,880 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline:0+5323735
+2017-03-11 04:06:25,973 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 04:06:25,973 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 04:06:25,973 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 04:06:25,973 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 04:06:25,973 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 04:06:25,975 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 04:06:26,015 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 04:06:26,027 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
+2017-03-11 04:06:26,041 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
+2017-03-11 04:06:26,042 WARN org.apache.hadoop.mapred.LocalJobRunner: job_local1773920421_0001
+java.lang.Exception: java.io.IOException: Type mismatch in key from map: expected org.apache.hadoop.io.LongWritable, received similarity.LongPair
+	at org.apache.hadoop.mapred.LocalJobRunner$Job.runTasks(LocalJobRunner.java:462)
+	at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:522)
+Caused by: java.io.IOException: Type mismatch in key from map: expected org.apache.hadoop.io.LongWritable, received similarity.LongPair
+	at org.apache.hadoop.mapred.MapTask$MapOutputBuffer.collect(MapTask.java:1073)
+	at org.apache.hadoop.mapred.MapTask$NewOutputCollector.write(MapTask.java:715)
+	at org.apache.hadoop.mapreduce.task.TaskInputOutputContextImpl.write(TaskInputOutputContextImpl.java:89)
+	at org.apache.hadoop.mapreduce.lib.map.WrappedMapper$Context.write(WrappedMapper.java:112)
+	at similarity.NaiveApproach$Map.map(NaiveApproach.java:126)
+	at similarity.NaiveApproach$Map.map(NaiveApproach.java:1)
+	at org.apache.hadoop.mapreduce.Mapper.run(Mapper.java:145)
+	at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:787)
+	at org.apache.hadoop.mapred.MapTask.run(MapTask.java:341)
+	at org.apache.hadoop.mapred.LocalJobRunner$Job$MapTaskRunnable.run(LocalJobRunner.java:243)
+	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471)
+	at java.util.concurrent.FutureTask.run(FutureTask.java:262)
+	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
+	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
+	at java.lang.Thread.run(Thread.java:745)
+2017-03-11 04:06:26,650 INFO org.apache.hadoop.mapreduce.Job: Job job_local1773920421_0001 running in uber mode : false
+2017-03-11 04:06:26,651 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
+2017-03-11 04:06:26,653 INFO org.apache.hadoop.mapreduce.Job: Job job_local1773920421_0001 failed with state FAILED due to: NA
+2017-03-11 04:06:26,658 INFO org.apache.hadoop.mapreduce.Job: Counters: 0
+2017-03-11 04:07:51,343 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-11 04:07:51,835 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-11 04:07:51,844 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-11 04:07:52,378 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
+2017-03-11 04:07:52,387 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
+2017-03-11 04:07:52,426 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
+2017-03-11 04:07:52,660 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local526022282_0001
+2017-03-11 04:07:53,134 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
+2017-03-11 04:07:53,135 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local526022282_0001
+2017-03-11 04:07:53,140 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
+2017-03-11 04:07:53,143 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:07:53,150 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
+2017-03-11 04:07:53,280 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
+2017-03-11 04:07:53,281 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local526022282_0001_m_000000_0
+2017-03-11 04:07:53,398 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:07:53,439 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:07:53,455 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline:0+5323735
+2017-03-11 04:07:53,776 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 04:07:53,785 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 04:07:53,785 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 04:07:53,792 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 04:07:53,793 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 04:07:53,806 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 04:07:53,942 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 04:07:53,973 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
+2017-03-11 04:07:54,024 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
+2017-03-11 04:07:54,027 WARN org.apache.hadoop.mapred.LocalJobRunner: job_local526022282_0001
+java.lang.Exception: java.io.IOException: Type mismatch in key from map: expected org.apache.hadoop.io.LongWritable, received similarity.LongPair
+	at org.apache.hadoop.mapred.LocalJobRunner$Job.runTasks(LocalJobRunner.java:462)
+	at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:522)
+Caused by: java.io.IOException: Type mismatch in key from map: expected org.apache.hadoop.io.LongWritable, received similarity.LongPair
+	at org.apache.hadoop.mapred.MapTask$MapOutputBuffer.collect(MapTask.java:1073)
+	at org.apache.hadoop.mapred.MapTask$NewOutputCollector.write(MapTask.java:715)
+	at org.apache.hadoop.mapreduce.task.TaskInputOutputContextImpl.write(TaskInputOutputContextImpl.java:89)
+	at org.apache.hadoop.mapreduce.lib.map.WrappedMapper$Context.write(WrappedMapper.java:112)
+	at similarity.NaiveApproach$Map.map(NaiveApproach.java:126)
+	at similarity.NaiveApproach$Map.map(NaiveApproach.java:1)
+	at org.apache.hadoop.mapreduce.Mapper.run(Mapper.java:145)
+	at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:787)
+	at org.apache.hadoop.mapred.MapTask.run(MapTask.java:341)
+	at org.apache.hadoop.mapred.LocalJobRunner$Job$MapTaskRunnable.run(LocalJobRunner.java:243)
+	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471)
+	at java.util.concurrent.FutureTask.run(FutureTask.java:262)
+	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
+	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
+	at java.lang.Thread.run(Thread.java:745)
+2017-03-11 04:07:54,137 INFO org.apache.hadoop.mapreduce.Job: Job job_local526022282_0001 running in uber mode : false
+2017-03-11 04:07:54,138 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
+2017-03-11 04:07:54,141 INFO org.apache.hadoop.mapreduce.Job: Job job_local526022282_0001 failed with state FAILED due to: NA
+2017-03-11 04:07:54,166 INFO org.apache.hadoop.mapreduce.Job: Counters: 0
+2017-03-11 04:11:51,758 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-11 04:11:52,358 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-11 04:11:52,361 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-11 04:11:52,853 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
+2017-03-11 04:11:52,874 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
+2017-03-11 04:11:53,013 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
+2017-03-11 04:11:53,344 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local1568897348_0001
+2017-03-11 04:11:54,306 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
+2017-03-11 04:11:54,311 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local1568897348_0001
+2017-03-11 04:11:54,308 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
+2017-03-11 04:11:54,344 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:11:54,350 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
+2017-03-11 04:11:54,539 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
+2017-03-11 04:11:54,540 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1568897348_0001_m_000000_0
+2017-03-11 04:11:54,571 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:11:54,584 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:11:54,587 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline:0+5323735
+2017-03-11 04:11:54,661 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 04:11:54,661 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 04:11:54,661 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 04:11:54,662 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 04:11:54,662 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 04:11:54,666 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 04:11:55,052 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-11 04:11:55,052 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 68406923; bufvoid = 104857600
+2017-03-11 04:11:55,053 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 22344612(89378448); length = 3869785/6553600
+2017-03-11 04:11:55,053 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 72308587 kvi 18077140(72308560)
+2017-03-11 04:11:55,330 INFO org.apache.hadoop.mapreduce.Job: Job job_local1568897348_0001 running in uber mode : false
+2017-03-11 04:11:55,331 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
+2017-03-11 04:11:57,435 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
+2017-03-11 04:11:59,147 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
+2017-03-11 04:11:59,147 INFO org.apache.hadoop.mapred.MapTask: (RESET) equator 72308587 kv 18077140(72308560) kvi 17101736(68406944)
+2017-03-11 04:11:59,263 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-11 04:11:59,263 INFO org.apache.hadoop.mapred.MapTask: bufstart = 72308587; bufend = 33904999; bufvoid = 104857586
+2017-03-11 04:11:59,263 INFO org.apache.hadoop.mapred.MapTask: kvstart = 18077140(72308560); kvend = 13719124(54876496); length = 4358017/6553600
+2017-03-11 04:11:59,263 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 38047511 kvi 9511872(38047488)
+2017-03-11 04:12:00,610 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map
+2017-03-11 04:12:03,620 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map
+2017-03-11 04:12:03,852 INFO org.apache.hadoop.mapred.MapTask: Finished spill 1
+2017-03-11 04:12:03,853 INFO org.apache.hadoop.mapred.MapTask: (RESET) equator 38047511 kv 9511872(38047488) kvi 8656512(34626048)
+2017-03-11 04:12:03,995 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-11 04:12:04,000 INFO org.apache.hadoop.mapred.MapTask: bufstart = 38047511; bufend = 1671792; bufvoid = 104857600
+2017-03-11 04:12:04,000 INFO org.apache.hadoop.mapred.MapTask: kvstart = 9511872(38047488); kvend = 5660820(22643280); length = 3851053/6553600
+2017-03-11 04:12:04,000 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 5714480 kvi 1428616(5714464)
+2017-03-11 04:12:06,623 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map
+2017-03-11 04:12:07,887 INFO org.apache.hadoop.mapred.MapTask: Finished spill 2
+2017-03-11 04:12:07,887 INFO org.apache.hadoop.mapred.MapTask: (RESET) equator 5714480 kv 1428616(5714464) kvi 548152(2192608)
+2017-03-11 04:12:07,981 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-11 04:12:07,981 INFO org.apache.hadoop.mapred.MapTask: bufstart = 5714480; bufend = 74891682; bufvoid = 104857600
+2017-03-11 04:12:07,981 INFO org.apache.hadoop.mapred.MapTask: kvstart = 1428616(5714464); kvend = 23965796(95863184); length = 3677221/6553600
+2017-03-11 04:12:07,982 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 78839250 kvi 19709808(78839232)
+2017-03-11 04:12:09,631 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map
+2017-03-11 04:12:11,605 INFO org.apache.hadoop.mapred.MapTask: Finished spill 3
+2017-03-11 04:12:11,605 INFO org.apache.hadoop.mapred.MapTask: (RESET) equator 78839250 kv 19709808(78839232) kvi 18809832(75239328)
+2017-03-11 04:12:11,705 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-11 04:12:11,705 INFO org.apache.hadoop.mapred.MapTask: bufstart = 78839250; bufend = 42355081; bufvoid = 104857600
+2017-03-11 04:12:11,705 INFO org.apache.hadoop.mapred.MapTask: kvstart = 19709808(78839232); kvend = 15831652(63326608); length = 3878157/6553600
+2017-03-11 04:12:11,705 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 46302649 kvi 11575656(46302624)
+2017-03-11 04:12:12,632 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map
+2017-03-11 04:12:15,474 INFO org.apache.hadoop.mapred.MapTask: Finished spill 4
+2017-03-11 04:12:15,474 INFO org.apache.hadoop.mapred.MapTask: (RESET) equator 46302649 kv 11575656(46302624) kvi 10588776(42355104)
+2017-03-11 04:12:15,575 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-11 04:12:15,575 INFO org.apache.hadoop.mapred.MapTask: bufstart = 46302649; bufend = 8741998; bufvoid = 104857588
+2017-03-11 04:12:15,575 INFO org.apache.hadoop.mapred.MapTask: kvstart = 11575656(46302624); kvend = 7428380(29713520); length = 4147277/6553600
+2017-03-11 04:12:15,575 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 12736558 kvi 3184132(12736528)
+2017-03-11 04:12:15,634 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map
+2017-03-11 04:12:18,636 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map
+2017-03-11 04:12:19,604 INFO org.apache.hadoop.mapred.MapTask: Finished spill 5
+2017-03-11 04:12:19,604 INFO org.apache.hadoop.mapred.MapTask: (RESET) equator 12736558 kv 3184132(12736528) kvi 2209768(8839072)
+2017-03-11 04:12:19,700 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-11 04:12:19,701 INFO org.apache.hadoop.mapred.MapTask: bufstart = 12736558; bufend = 80806124; bufvoid = 104857600
+2017-03-11 04:12:19,701 INFO org.apache.hadoop.mapred.MapTask: kvstart = 3184132(12736528); kvend = 25444400(101777600); length = 3954133/6553600
+2017-03-11 04:12:19,701 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 84800684 kvi 21200164(84800656)
+2017-03-11 04:12:21,645 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map
+2017-03-11 04:12:23,545 INFO org.apache.hadoop.mapred.MapTask: Finished spill 6
+2017-03-11 04:12:23,546 INFO org.apache.hadoop.mapred.MapTask: (RESET) equator 84800684 kv 21200164(84800656) kvi 20269056(81076224)
+2017-03-11 04:12:23,648 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-11 04:12:23,649 INFO org.apache.hadoop.mapred.MapTask: bufstart = 84800684; bufend = 47222339; bufvoid = 104857600
+2017-03-11 04:12:23,649 INFO org.apache.hadoop.mapred.MapTask: kvstart = 21200164(84800656); kvend = 17048460(68193840); length = 4151705/6553600
+2017-03-11 04:12:23,649 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 51216899 kvi 12804220(51216880)
+2017-03-11 04:12:24,647 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map
+2017-03-11 04:12:27,634 INFO org.apache.hadoop.mapred.MapTask: Finished spill 7
+2017-03-11 04:12:27,634 INFO org.apache.hadoop.mapred.MapTask: (RESET) equator 51216899 kv 12804220(51216880) kvi 11805592(47222368)
+2017-03-11 04:12:27,656 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map
+2017-03-11 04:12:27,743 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-11 04:12:27,743 INFO org.apache.hadoop.mapred.MapTask: bufstart = 51216899; bufend = 13351553; bufvoid = 104857594
+2017-03-11 04:12:27,744 INFO org.apache.hadoop.mapred.MapTask: kvstart = 12804220(51216880); kvend = 8580768(34323072); length = 4223453/6553600
+2017-03-11 04:12:27,744 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 17394241 kvi 4348556(17394224)
+2017-03-11 04:12:30,658 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map
+2017-03-11 04:12:31,844 INFO org.apache.hadoop.mapred.MapTask: Finished spill 8
+2017-03-11 04:12:31,845 INFO org.apache.hadoop.mapred.MapTask: (RESET) equator 17394241 kv 4348556(17394224) kvi 3337896(13351584)
+2017-03-11 04:12:31,945 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-11 04:12:31,945 INFO org.apache.hadoop.mapred.MapTask: bufstart = 17394241; bufend = 85442671; bufvoid = 104857600
+2017-03-11 04:12:31,945 INFO org.apache.hadoop.mapred.MapTask: kvstart = 4348556(17394224); kvend = 389144(1556576); length = 3959413/6553600
+2017-03-11 04:12:31,945 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 89485359 kvi 22371332(89485328)
+2017-03-11 04:12:33,663 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map
+2017-03-11 04:12:35,791 INFO org.apache.hadoop.mapred.MapTask: Finished spill 9
+2017-03-11 04:12:35,791 INFO org.apache.hadoop.mapred.MapTask: (RESET) equator 89485359 kv 22371332(89485328) kvi 21471172(85884688)
+2017-03-11 04:12:35,883 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-11 04:12:35,883 INFO org.apache.hadoop.mapred.MapTask: bufstart = 89485359; bufend = 53575344; bufvoid = 104857593
+2017-03-11 04:12:35,884 INFO org.apache.hadoop.mapred.MapTask: kvstart = 22371332(89485328); kvend = 18636712(74546848); length = 3734621/6553600
+2017-03-11 04:12:35,884 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 57569904 kvi 14392472(57569888)
+2017-03-11 04:12:36,674 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map
+2017-03-11 04:14:59,711 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-11 04:15:00,316 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-11 04:15:00,320 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-11 04:15:00,373 WARN org.apache.hadoop.security.UserGroupInformation: PriviledgedActionException as:cloudera (auth:SIMPLE) cause:org.apache.hadoop.mapred.FileAlreadyExistsException: Output directory file:/home/cloudera/workspace/WordCount/output already exists
+2017-03-11 04:15:38,000 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-11 04:15:38,582 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-11 04:15:38,588 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-11 04:15:38,943 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
+2017-03-11 04:15:38,952 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
+2017-03-11 04:15:38,984 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
+2017-03-11 04:15:39,209 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local398888533_0001
+2017-03-11 04:15:39,676 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
+2017-03-11 04:15:39,678 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local398888533_0001
+2017-03-11 04:15:39,678 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
+2017-03-11 04:15:39,698 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:15:39,700 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
+2017-03-11 04:15:39,814 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
+2017-03-11 04:15:39,818 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local398888533_0001_m_000000_0
+2017-03-11 04:15:39,870 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:15:39,884 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:15:39,887 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:0+454
+2017-03-11 04:15:39,971 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 04:15:39,971 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 04:15:39,971 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 04:15:39,971 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 04:15:39,971 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 04:15:39,974 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 04:15:40,007 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 04:15:40,007 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 04:15:40,007 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-11 04:15:40,007 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 4977; bufvoid = 104857600
+2017-03-11 04:15:40,007 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 26214148(104856592); length = 249/6553600
+2017-03-11 04:15:40,016 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
+2017-03-11 04:15:40,038 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
+2017-03-11 04:15:40,046 INFO org.apache.hadoop.mapred.Task: Task:attempt_local398888533_0001_m_000000_0 is done. And is in the process of committing
+2017-03-11 04:15:40,052 INFO org.apache.hadoop.mapred.LocalJobRunner: map
+2017-03-11 04:15:40,052 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local398888533_0001_m_000000_0' done.
+2017-03-11 04:15:40,052 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local398888533_0001_m_000000_0
+2017-03-11 04:15:40,052 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
+2017-03-11 04:15:40,105 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks
+2017-03-11 04:15:40,105 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local398888533_0001_r_000000_0
+2017-03-11 04:15:40,116 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:15:40,117 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:15:40,118 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@6a59828c
+2017-03-11 04:15:40,137 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:15:40,142 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local398888533_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:15:40,167 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.deflate]
+2017-03-11 04:15:40,167 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local398888533_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 04:15:40,171 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local398888533_0001_m_000000_0
+2017-03-11 04:15:40,171 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 04:15:40,172 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:15:40,173 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,173 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:15:40,180 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,180 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:40,181 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:15:40,181 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 04:15:40,182 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:15:40,182 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,183 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:40,195 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,198 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.skip.on is deprecated. Instead, use mapreduce.job.skiprecords
+2017-03-11 04:15:40,201 INFO org.apache.hadoop.mapred.Task: Task:attempt_local398888533_0001_r_000000_0 is done. And is in the process of committing
+2017-03-11 04:15:40,204 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,205 INFO org.apache.hadoop.mapred.Task: Task attempt_local398888533_0001_r_000000_0 is allowed to commit now
+2017-03-11 04:15:40,205 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local398888533_0001_r_000000_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local398888533_0001_r_000000
+2017-03-11 04:15:40,206 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 04:15:40,206 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local398888533_0001_r_000000_0' done.
+2017-03-11 04:15:40,206 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local398888533_0001_r_000000_0
+2017-03-11 04:15:40,206 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local398888533_0001_r_000001_0
+2017-03-11 04:15:40,207 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:15:40,208 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:15:40,208 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@4bc7ca6e
+2017-03-11 04:15:40,209 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:15:40,210 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local398888533_0001_r_000001_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:15:40,211 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local398888533_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 04:15:40,211 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local398888533_0001_m_000000_0
+2017-03-11 04:15:40,211 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 04:15:40,212 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:15:40,219 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,220 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:15:40,221 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,221 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:40,222 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:15:40,223 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 04:15:40,223 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:15:40,223 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,224 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:40,224 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,229 INFO org.apache.hadoop.mapred.Task: Task:attempt_local398888533_0001_r_000001_0 is done. And is in the process of committing
+2017-03-11 04:15:40,230 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,230 INFO org.apache.hadoop.mapred.Task: Task attempt_local398888533_0001_r_000001_0 is allowed to commit now
+2017-03-11 04:15:40,231 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local398888533_0001_r_000001_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local398888533_0001_r_000001
+2017-03-11 04:15:40,232 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 04:15:40,232 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local398888533_0001_r_000001_0' done.
+2017-03-11 04:15:40,233 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local398888533_0001_r_000001_0
+2017-03-11 04:15:40,233 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local398888533_0001_r_000002_0
+2017-03-11 04:15:40,234 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:15:40,234 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:15:40,234 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@18e73d0
+2017-03-11 04:15:40,235 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:15:40,236 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local398888533_0001_r_000002_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:15:40,237 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local398888533_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 04:15:40,237 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local398888533_0001_m_000000_0
+2017-03-11 04:15:40,237 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 04:15:40,238 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:15:40,238 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,238 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:15:40,239 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,239 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:40,240 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:15:40,240 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 04:15:40,240 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:15:40,240 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,250 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:40,250 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,259 INFO org.apache.hadoop.mapred.Task: Task:attempt_local398888533_0001_r_000002_0 is done. And is in the process of committing
+2017-03-11 04:15:40,260 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,261 INFO org.apache.hadoop.mapred.Task: Task attempt_local398888533_0001_r_000002_0 is allowed to commit now
+2017-03-11 04:15:40,262 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local398888533_0001_r_000002_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local398888533_0001_r_000002
+2017-03-11 04:15:40,264 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 04:15:40,264 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local398888533_0001_r_000002_0' done.
+2017-03-11 04:15:40,268 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local398888533_0001_r_000002_0
+2017-03-11 04:15:40,268 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local398888533_0001_r_000003_0
+2017-03-11 04:15:40,271 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:15:40,272 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:15:40,272 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@5f0a2a42
+2017-03-11 04:15:40,273 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:15:40,277 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local398888533_0001_r_000003_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:15:40,279 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local398888533_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 04:15:40,280 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local398888533_0001_m_000000_0
+2017-03-11 04:15:40,280 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 04:15:40,280 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:15:40,281 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,281 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:15:40,282 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,282 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:40,283 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:15:40,283 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 04:15:40,283 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:15:40,283 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,284 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:40,285 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,287 INFO org.apache.hadoop.mapred.Task: Task:attempt_local398888533_0001_r_000003_0 is done. And is in the process of committing
+2017-03-11 04:15:40,288 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,288 INFO org.apache.hadoop.mapred.Task: Task attempt_local398888533_0001_r_000003_0 is allowed to commit now
+2017-03-11 04:15:40,289 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local398888533_0001_r_000003_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local398888533_0001_r_000003
+2017-03-11 04:15:40,289 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 04:15:40,289 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local398888533_0001_r_000003_0' done.
+2017-03-11 04:15:40,290 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local398888533_0001_r_000003_0
+2017-03-11 04:15:40,290 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local398888533_0001_r_000004_0
+2017-03-11 04:15:40,291 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:15:40,291 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:15:40,291 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@5f3ef269
+2017-03-11 04:15:40,292 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:15:40,293 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local398888533_0001_r_000004_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:15:40,294 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local398888533_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 04:15:40,295 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local398888533_0001_m_000000_0
+2017-03-11 04:15:40,295 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 04:15:40,295 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:15:40,296 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,296 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:15:40,297 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,297 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:40,297 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:15:40,297 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 04:15:40,297 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:15:40,297 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,298 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:40,299 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,301 INFO org.apache.hadoop.mapred.Task: Task:attempt_local398888533_0001_r_000004_0 is done. And is in the process of committing
+2017-03-11 04:15:40,302 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,303 INFO org.apache.hadoop.mapred.Task: Task attempt_local398888533_0001_r_000004_0 is allowed to commit now
+2017-03-11 04:15:40,303 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local398888533_0001_r_000004_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local398888533_0001_r_000004
+2017-03-11 04:15:40,305 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 04:15:40,305 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local398888533_0001_r_000004_0' done.
+2017-03-11 04:15:40,305 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local398888533_0001_r_000004_0
+2017-03-11 04:15:40,305 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local398888533_0001_r_000005_0
+2017-03-11 04:15:40,306 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:15:40,307 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:15:40,307 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@1c58f805
+2017-03-11 04:15:40,307 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:15:40,308 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local398888533_0001_r_000005_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:15:40,310 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local398888533_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 04:15:40,318 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local398888533_0001_m_000000_0
+2017-03-11 04:15:40,318 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 04:15:40,319 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:15:40,319 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,319 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:15:40,320 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,320 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:40,321 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:15:40,321 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 04:15:40,321 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:15:40,321 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,321 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:40,322 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,325 INFO org.apache.hadoop.mapred.Task: Task:attempt_local398888533_0001_r_000005_0 is done. And is in the process of committing
+2017-03-11 04:15:40,326 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,326 INFO org.apache.hadoop.mapred.Task: Task attempt_local398888533_0001_r_000005_0 is allowed to commit now
+2017-03-11 04:15:40,328 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local398888533_0001_r_000005_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local398888533_0001_r_000005
+2017-03-11 04:15:40,329 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 04:15:40,329 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local398888533_0001_r_000005_0' done.
+2017-03-11 04:15:40,329 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local398888533_0001_r_000005_0
+2017-03-11 04:15:40,329 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local398888533_0001_r_000006_0
+2017-03-11 04:15:40,331 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:15:40,331 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:15:40,332 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@6ed27650
+2017-03-11 04:15:40,332 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:15:40,333 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local398888533_0001_r_000006_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:15:40,335 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local398888533_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 04:15:40,336 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local398888533_0001_m_000000_0
+2017-03-11 04:15:40,336 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 04:15:40,336 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:15:40,337 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,337 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:15:40,339 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,339 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:40,340 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:15:40,340 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 04:15:40,340 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:15:40,341 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,341 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:40,342 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,347 INFO org.apache.hadoop.mapred.Task: Task:attempt_local398888533_0001_r_000006_0 is done. And is in the process of committing
+2017-03-11 04:15:40,349 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,349 INFO org.apache.hadoop.mapred.Task: Task attempt_local398888533_0001_r_000006_0 is allowed to commit now
+2017-03-11 04:15:40,350 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local398888533_0001_r_000006_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local398888533_0001_r_000006
+2017-03-11 04:15:40,350 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 04:15:40,351 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local398888533_0001_r_000006_0' done.
+2017-03-11 04:15:40,351 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local398888533_0001_r_000006_0
+2017-03-11 04:15:40,351 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local398888533_0001_r_000007_0
+2017-03-11 04:15:40,352 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:15:40,352 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:15:40,353 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@1e059ae6
+2017-03-11 04:15:40,354 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:15:40,355 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local398888533_0001_r_000007_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:15:40,356 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local398888533_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 04:15:40,356 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local398888533_0001_m_000000_0
+2017-03-11 04:15:40,357 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 04:15:40,357 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:15:40,357 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,357 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:15:40,358 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,358 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:40,359 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:15:40,359 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 04:15:40,359 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:15:40,359 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,359 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:40,360 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,368 INFO org.apache.hadoop.mapred.Task: Task:attempt_local398888533_0001_r_000007_0 is done. And is in the process of committing
+2017-03-11 04:15:40,369 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,369 INFO org.apache.hadoop.mapred.Task: Task attempt_local398888533_0001_r_000007_0 is allowed to commit now
+2017-03-11 04:15:40,370 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local398888533_0001_r_000007_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local398888533_0001_r_000007
+2017-03-11 04:15:40,370 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 04:15:40,370 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local398888533_0001_r_000007_0' done.
+2017-03-11 04:15:40,370 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local398888533_0001_r_000007_0
+2017-03-11 04:15:40,370 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local398888533_0001_r_000008_0
+2017-03-11 04:15:40,376 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:15:40,376 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:15:40,376 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@439e91fe
+2017-03-11 04:15:40,377 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:15:40,378 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local398888533_0001_r_000008_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:15:40,379 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local398888533_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 04:15:40,380 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local398888533_0001_m_000000_0
+2017-03-11 04:15:40,380 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 04:15:40,380 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:15:40,381 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,381 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:15:40,383 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,383 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:40,384 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:15:40,384 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 04:15:40,384 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:15:40,384 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,385 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:40,386 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,389 INFO org.apache.hadoop.mapred.Task: Task:attempt_local398888533_0001_r_000008_0 is done. And is in the process of committing
+2017-03-11 04:15:40,392 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,392 INFO org.apache.hadoop.mapred.Task: Task attempt_local398888533_0001_r_000008_0 is allowed to commit now
+2017-03-11 04:15:40,393 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local398888533_0001_r_000008_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local398888533_0001_r_000008
+2017-03-11 04:15:40,393 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 04:15:40,393 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local398888533_0001_r_000008_0' done.
+2017-03-11 04:15:40,394 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local398888533_0001_r_000008_0
+2017-03-11 04:15:40,394 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local398888533_0001_r_000009_0
+2017-03-11 04:15:40,395 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:15:40,395 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:15:40,395 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@6556d991
+2017-03-11 04:15:40,396 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:15:40,397 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local398888533_0001_r_000009_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:15:40,401 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#10 about to shuffle output of map attempt_local398888533_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 04:15:40,402 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local398888533_0001_m_000000_0
+2017-03-11 04:15:40,402 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 04:15:40,402 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:15:40,402 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,403 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:15:40,403 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,404 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:40,405 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:15:40,407 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 04:15:40,408 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:15:40,409 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,411 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:40,411 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,415 INFO org.apache.hadoop.mapred.Task: Task:attempt_local398888533_0001_r_000009_0 is done. And is in the process of committing
+2017-03-11 04:15:40,416 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,416 INFO org.apache.hadoop.mapred.Task: Task attempt_local398888533_0001_r_000009_0 is allowed to commit now
+2017-03-11 04:15:40,417 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local398888533_0001_r_000009_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local398888533_0001_r_000009
+2017-03-11 04:15:40,417 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 04:15:40,418 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local398888533_0001_r_000009_0' done.
+2017-03-11 04:15:40,418 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local398888533_0001_r_000009_0
+2017-03-11 04:15:40,418 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local398888533_0001_r_000010_0
+2017-03-11 04:15:40,419 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:15:40,419 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:15:40,419 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@7d224d90
+2017-03-11 04:15:40,420 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:15:40,421 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local398888533_0001_r_000010_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:15:40,422 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#11 about to shuffle output of map attempt_local398888533_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 04:15:40,422 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local398888533_0001_m_000000_0
+2017-03-11 04:15:40,422 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 04:15:40,423 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:15:40,423 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,423 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:15:40,429 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,429 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:40,436 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:15:40,437 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 04:15:40,437 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:15:40,437 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,437 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:40,438 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,440 INFO org.apache.hadoop.mapred.Task: Task:attempt_local398888533_0001_r_000010_0 is done. And is in the process of committing
+2017-03-11 04:15:40,441 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,441 INFO org.apache.hadoop.mapred.Task: Task attempt_local398888533_0001_r_000010_0 is allowed to commit now
+2017-03-11 04:15:40,443 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local398888533_0001_r_000010_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local398888533_0001_r_000010
+2017-03-11 04:15:40,444 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 04:15:40,444 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local398888533_0001_r_000010_0' done.
+2017-03-11 04:15:40,444 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local398888533_0001_r_000010_0
+2017-03-11 04:15:40,444 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local398888533_0001_r_000011_0
+2017-03-11 04:15:40,447 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:15:40,448 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:15:40,450 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@14c8c644
+2017-03-11 04:15:40,454 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:15:40,455 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local398888533_0001_r_000011_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:15:40,457 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#12 about to shuffle output of map attempt_local398888533_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 04:15:40,457 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local398888533_0001_m_000000_0
+2017-03-11 04:15:40,457 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 04:15:40,458 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:15:40,459 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,460 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:15:40,463 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,463 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:40,464 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:15:40,464 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 04:15:40,464 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:15:40,464 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,464 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:40,465 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,468 INFO org.apache.hadoop.mapred.Task: Task:attempt_local398888533_0001_r_000011_0 is done. And is in the process of committing
+2017-03-11 04:15:40,469 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,469 INFO org.apache.hadoop.mapred.Task: Task attempt_local398888533_0001_r_000011_0 is allowed to commit now
+2017-03-11 04:15:40,470 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local398888533_0001_r_000011_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local398888533_0001_r_000011
+2017-03-11 04:15:40,475 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 04:15:40,475 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local398888533_0001_r_000011_0' done.
+2017-03-11 04:15:40,475 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local398888533_0001_r_000011_0
+2017-03-11 04:15:40,475 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local398888533_0001_r_000012_0
+2017-03-11 04:15:40,477 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:15:40,478 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:15:40,479 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@22be9f8f
+2017-03-11 04:15:40,480 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:15:40,481 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local398888533_0001_r_000012_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:15:40,482 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#13 about to shuffle output of map attempt_local398888533_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 04:15:40,483 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local398888533_0001_m_000000_0
+2017-03-11 04:15:40,483 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 04:15:40,483 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:15:40,484 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,484 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:15:40,485 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,485 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:40,485 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:15:40,486 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 04:15:40,486 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:15:40,486 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,486 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:40,486 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,491 INFO org.apache.hadoop.mapred.Task: Task:attempt_local398888533_0001_r_000012_0 is done. And is in the process of committing
+2017-03-11 04:15:40,493 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,493 INFO org.apache.hadoop.mapred.Task: Task attempt_local398888533_0001_r_000012_0 is allowed to commit now
+2017-03-11 04:15:40,494 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local398888533_0001_r_000012_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local398888533_0001_r_000012
+2017-03-11 04:15:40,495 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 04:15:40,495 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local398888533_0001_r_000012_0' done.
+2017-03-11 04:15:40,495 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local398888533_0001_r_000012_0
+2017-03-11 04:15:40,495 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local398888533_0001_r_000013_0
+2017-03-11 04:15:40,497 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:15:40,498 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:15:40,498 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@2d861958
+2017-03-11 04:15:40,501 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:15:40,503 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local398888533_0001_r_000013_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:15:40,510 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#14 about to shuffle output of map attempt_local398888533_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 04:15:40,511 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local398888533_0001_m_000000_0
+2017-03-11 04:15:40,511 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 04:15:40,511 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:15:40,511 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,512 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:15:40,514 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,514 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:40,514 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:15:40,515 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 04:15:40,515 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:15:40,515 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,516 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:40,516 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,518 INFO org.apache.hadoop.mapred.Task: Task:attempt_local398888533_0001_r_000013_0 is done. And is in the process of committing
+2017-03-11 04:15:40,519 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,519 INFO org.apache.hadoop.mapred.Task: Task attempt_local398888533_0001_r_000013_0 is allowed to commit now
+2017-03-11 04:15:40,520 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local398888533_0001_r_000013_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local398888533_0001_r_000013
+2017-03-11 04:15:40,521 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 04:15:40,521 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local398888533_0001_r_000013_0' done.
+2017-03-11 04:15:40,521 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local398888533_0001_r_000013_0
+2017-03-11 04:15:40,521 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local398888533_0001_r_000014_0
+2017-03-11 04:15:40,524 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:15:40,525 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:15:40,525 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@3db0ab45
+2017-03-11 04:15:40,526 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:15:40,528 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local398888533_0001_r_000014_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:15:40,530 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#15 about to shuffle output of map attempt_local398888533_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 04:15:40,531 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local398888533_0001_m_000000_0
+2017-03-11 04:15:40,532 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 04:15:40,532 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:15:40,533 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,533 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:15:40,534 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,534 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:40,534 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:15:40,534 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 04:15:40,534 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:15:40,535 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,535 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:40,535 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,539 INFO org.apache.hadoop.mapred.Task: Task:attempt_local398888533_0001_r_000014_0 is done. And is in the process of committing
+2017-03-11 04:15:40,543 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,544 INFO org.apache.hadoop.mapred.Task: Task attempt_local398888533_0001_r_000014_0 is allowed to commit now
+2017-03-11 04:15:40,546 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local398888533_0001_r_000014_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local398888533_0001_r_000014
+2017-03-11 04:15:40,547 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 04:15:40,547 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local398888533_0001_r_000014_0' done.
+2017-03-11 04:15:40,547 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local398888533_0001_r_000014_0
+2017-03-11 04:15:40,547 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local398888533_0001_r_000015_0
+2017-03-11 04:15:40,548 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:15:40,548 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:15:40,549 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@b2d2e71
+2017-03-11 04:15:40,550 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:15:40,551 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local398888533_0001_r_000015_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:15:40,553 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#16 about to shuffle output of map attempt_local398888533_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 04:15:40,553 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local398888533_0001_m_000000_0
+2017-03-11 04:15:40,553 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 04:15:40,553 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:15:40,554 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,554 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:15:40,555 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,555 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:40,556 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:15:40,556 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 04:15:40,556 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:15:40,556 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,556 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:40,563 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,588 INFO org.apache.hadoop.mapred.Task: Task:attempt_local398888533_0001_r_000015_0 is done. And is in the process of committing
+2017-03-11 04:15:40,590 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,590 INFO org.apache.hadoop.mapred.Task: Task attempt_local398888533_0001_r_000015_0 is allowed to commit now
+2017-03-11 04:15:40,593 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local398888533_0001_r_000015_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local398888533_0001_r_000015
+2017-03-11 04:15:40,599 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 04:15:40,601 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local398888533_0001_r_000015_0' done.
+2017-03-11 04:15:40,601 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local398888533_0001_r_000015_0
+2017-03-11 04:15:40,601 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local398888533_0001_r_000016_0
+2017-03-11 04:15:40,603 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:15:40,603 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:15:40,603 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@736d0c80
+2017-03-11 04:15:40,604 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:15:40,605 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local398888533_0001_r_000016_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:15:40,607 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#17 about to shuffle output of map attempt_local398888533_0001_m_000000_0 decomp: 5105 len: 460 to MEMORY
+2017-03-11 04:15:40,608 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 5105 bytes from map-output for attempt_local398888533_0001_m_000000_0
+2017-03-11 04:15:40,608 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 5105, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->5105
+2017-03-11 04:15:40,609 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:15:40,611 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,611 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:15:40,612 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,614 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 5087 bytes
+2017-03-11 04:15:40,618 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 5105 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:15:40,620 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 468 bytes from disk
+2017-03-11 04:15:40,620 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:15:40,620 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,621 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 5087 bytes
+2017-03-11 04:15:40,621 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,629 INFO org.apache.hadoop.mapred.Task: Task:attempt_local398888533_0001_r_000016_0 is done. And is in the process of committing
+2017-03-11 04:15:40,630 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,631 INFO org.apache.hadoop.mapred.Task: Task attempt_local398888533_0001_r_000016_0 is allowed to commit now
+2017-03-11 04:15:40,631 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local398888533_0001_r_000016_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local398888533_0001_r_000016
+2017-03-11 04:15:40,632 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 04:15:40,632 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local398888533_0001_r_000016_0' done.
+2017-03-11 04:15:40,632 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local398888533_0001_r_000016_0
+2017-03-11 04:15:40,632 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local398888533_0001_r_000017_0
+2017-03-11 04:15:40,633 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:15:40,633 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:15:40,633 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@66be558a
+2017-03-11 04:15:40,634 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:15:40,635 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local398888533_0001_r_000017_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:15:40,636 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#18 about to shuffle output of map attempt_local398888533_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 04:15:40,636 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local398888533_0001_m_000000_0
+2017-03-11 04:15:40,637 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 04:15:40,637 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:15:40,637 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,637 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:15:40,638 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,638 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:40,639 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:15:40,639 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 04:15:40,639 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:15:40,639 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,639 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:40,640 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,643 INFO org.apache.hadoop.mapred.Task: Task:attempt_local398888533_0001_r_000017_0 is done. And is in the process of committing
+2017-03-11 04:15:40,646 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,647 INFO org.apache.hadoop.mapred.Task: Task attempt_local398888533_0001_r_000017_0 is allowed to commit now
+2017-03-11 04:15:40,650 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local398888533_0001_r_000017_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local398888533_0001_r_000017
+2017-03-11 04:15:40,651 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 04:15:40,651 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local398888533_0001_r_000017_0' done.
+2017-03-11 04:15:40,651 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local398888533_0001_r_000017_0
+2017-03-11 04:15:40,651 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local398888533_0001_r_000018_0
+2017-03-11 04:15:40,652 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:15:40,652 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:15:40,652 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@294dcddd
+2017-03-11 04:15:40,656 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:15:40,657 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local398888533_0001_r_000018_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:15:40,659 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#19 about to shuffle output of map attempt_local398888533_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 04:15:40,659 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local398888533_0001_m_000000_0
+2017-03-11 04:15:40,659 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 04:15:40,659 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:15:40,660 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,660 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:15:40,661 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,661 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:40,661 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:15:40,661 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 04:15:40,662 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:15:40,662 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,662 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:40,662 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,665 INFO org.apache.hadoop.mapred.Task: Task:attempt_local398888533_0001_r_000018_0 is done. And is in the process of committing
+2017-03-11 04:15:40,666 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,666 INFO org.apache.hadoop.mapred.Task: Task attempt_local398888533_0001_r_000018_0 is allowed to commit now
+2017-03-11 04:15:40,667 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local398888533_0001_r_000018_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local398888533_0001_r_000018
+2017-03-11 04:15:40,667 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 04:15:40,668 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local398888533_0001_r_000018_0' done.
+2017-03-11 04:15:40,668 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local398888533_0001_r_000018_0
+2017-03-11 04:15:40,668 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local398888533_0001_r_000019_0
+2017-03-11 04:15:40,668 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:15:40,669 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:15:40,669 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@5afe331c
+2017-03-11 04:15:40,670 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:15:40,671 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local398888533_0001_r_000019_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:15:40,678 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#20 about to shuffle output of map attempt_local398888533_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 04:15:40,680 INFO org.apache.hadoop.mapreduce.Job: Job job_local398888533_0001 running in uber mode : false
+2017-03-11 04:15:40,681 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 100%
+2017-03-11 04:15:40,682 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local398888533_0001_m_000000_0
+2017-03-11 04:15:40,682 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 04:15:40,683 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:15:40,683 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,683 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:15:40,685 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,685 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:40,685 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:15:40,686 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 04:15:40,686 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:15:40,686 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,686 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:40,686 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,694 INFO org.apache.hadoop.mapred.Task: Task:attempt_local398888533_0001_r_000019_0 is done. And is in the process of committing
+2017-03-11 04:15:40,700 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,700 INFO org.apache.hadoop.mapred.Task: Task attempt_local398888533_0001_r_000019_0 is allowed to commit now
+2017-03-11 04:15:40,701 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local398888533_0001_r_000019_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local398888533_0001_r_000019
+2017-03-11 04:15:40,702 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 04:15:40,703 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local398888533_0001_r_000019_0' done.
+2017-03-11 04:15:40,703 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local398888533_0001_r_000019_0
+2017-03-11 04:15:40,703 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local398888533_0001_r_000020_0
+2017-03-11 04:15:40,707 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:15:40,707 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:15:40,707 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@33193e3b
+2017-03-11 04:15:40,708 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:15:40,709 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local398888533_0001_r_000020_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:15:40,710 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#21 about to shuffle output of map attempt_local398888533_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 04:15:40,710 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local398888533_0001_m_000000_0
+2017-03-11 04:15:40,710 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 04:15:40,710 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:15:40,711 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,712 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:15:40,712 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,712 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:40,713 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:15:40,713 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 04:15:40,714 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:15:40,714 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,714 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:40,715 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,719 INFO org.apache.hadoop.mapred.Task: Task:attempt_local398888533_0001_r_000020_0 is done. And is in the process of committing
+2017-03-11 04:15:40,721 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,721 INFO org.apache.hadoop.mapred.Task: Task attempt_local398888533_0001_r_000020_0 is allowed to commit now
+2017-03-11 04:15:40,722 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local398888533_0001_r_000020_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local398888533_0001_r_000020
+2017-03-11 04:15:40,723 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 04:15:40,723 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local398888533_0001_r_000020_0' done.
+2017-03-11 04:15:40,723 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local398888533_0001_r_000020_0
+2017-03-11 04:15:40,723 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local398888533_0001_r_000021_0
+2017-03-11 04:15:40,724 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:15:40,724 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:15:40,724 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@8bf290b
+2017-03-11 04:15:40,726 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:15:40,729 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local398888533_0001_r_000021_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:15:40,731 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#22 about to shuffle output of map attempt_local398888533_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 04:15:40,731 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local398888533_0001_m_000000_0
+2017-03-11 04:15:40,731 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 04:15:40,731 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:15:40,732 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,732 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:15:40,733 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,733 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:40,734 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:15:40,734 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 04:15:40,734 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:15:40,734 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,735 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:40,735 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,740 INFO org.apache.hadoop.mapred.Task: Task:attempt_local398888533_0001_r_000021_0 is done. And is in the process of committing
+2017-03-11 04:15:40,740 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,741 INFO org.apache.hadoop.mapred.Task: Task attempt_local398888533_0001_r_000021_0 is allowed to commit now
+2017-03-11 04:15:40,741 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local398888533_0001_r_000021_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local398888533_0001_r_000021
+2017-03-11 04:15:40,742 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 04:15:40,743 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local398888533_0001_r_000021_0' done.
+2017-03-11 04:15:40,743 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local398888533_0001_r_000021_0
+2017-03-11 04:15:40,743 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local398888533_0001_r_000022_0
+2017-03-11 04:15:40,745 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:15:40,746 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:15:40,746 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@ff2d097
+2017-03-11 04:15:40,747 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:15:40,748 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local398888533_0001_r_000022_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:15:40,750 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#23 about to shuffle output of map attempt_local398888533_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 04:15:40,751 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local398888533_0001_m_000000_0
+2017-03-11 04:15:40,751 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 04:15:40,752 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:15:40,752 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,752 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:15:40,754 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,754 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:40,754 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:15:40,755 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 04:15:40,755 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:15:40,755 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,755 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:40,756 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,768 INFO org.apache.hadoop.mapred.Task: Task:attempt_local398888533_0001_r_000022_0 is done. And is in the process of committing
+2017-03-11 04:15:40,769 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,769 INFO org.apache.hadoop.mapred.Task: Task attempt_local398888533_0001_r_000022_0 is allowed to commit now
+2017-03-11 04:15:40,770 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local398888533_0001_r_000022_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local398888533_0001_r_000022
+2017-03-11 04:15:40,770 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 04:15:40,771 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local398888533_0001_r_000022_0' done.
+2017-03-11 04:15:40,771 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local398888533_0001_r_000022_0
+2017-03-11 04:15:40,771 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local398888533_0001_r_000023_0
+2017-03-11 04:15:40,772 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:15:40,772 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:15:40,772 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@502c186
+2017-03-11 04:15:40,773 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:15:40,774 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local398888533_0001_r_000023_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:15:40,778 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#24 about to shuffle output of map attempt_local398888533_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 04:15:40,781 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local398888533_0001_m_000000_0
+2017-03-11 04:15:40,784 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 04:15:40,784 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:15:40,785 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,785 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:15:40,786 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,786 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:40,786 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:15:40,786 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 04:15:40,786 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:15:40,786 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,787 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:40,787 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,794 INFO org.apache.hadoop.mapred.Task: Task:attempt_local398888533_0001_r_000023_0 is done. And is in the process of committing
+2017-03-11 04:15:40,797 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,801 INFO org.apache.hadoop.mapred.Task: Task attempt_local398888533_0001_r_000023_0 is allowed to commit now
+2017-03-11 04:15:40,801 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local398888533_0001_r_000023_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local398888533_0001_r_000023
+2017-03-11 04:15:40,803 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 04:15:40,803 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local398888533_0001_r_000023_0' done.
+2017-03-11 04:15:40,803 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local398888533_0001_r_000023_0
+2017-03-11 04:15:40,803 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local398888533_0001_r_000024_0
+2017-03-11 04:15:40,807 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:15:40,808 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:15:40,808 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@ad99f2d
+2017-03-11 04:15:40,808 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:15:40,814 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local398888533_0001_r_000024_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:15:40,815 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#25 about to shuffle output of map attempt_local398888533_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 04:15:40,825 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local398888533_0001_m_000000_0
+2017-03-11 04:15:40,825 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 04:15:40,825 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:15:40,826 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,826 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:15:40,827 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,827 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:40,827 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:15:40,828 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 04:15:40,828 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:15:40,828 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,828 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:40,828 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,844 INFO org.apache.hadoop.mapred.Task: Task:attempt_local398888533_0001_r_000024_0 is done. And is in the process of committing
+2017-03-11 04:15:40,845 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,845 INFO org.apache.hadoop.mapred.Task: Task attempt_local398888533_0001_r_000024_0 is allowed to commit now
+2017-03-11 04:15:40,846 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local398888533_0001_r_000024_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local398888533_0001_r_000024
+2017-03-11 04:15:40,846 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 04:15:40,847 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local398888533_0001_r_000024_0' done.
+2017-03-11 04:15:40,847 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local398888533_0001_r_000024_0
+2017-03-11 04:15:40,847 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local398888533_0001_r_000025_0
+2017-03-11 04:15:40,850 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:15:40,852 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:15:40,853 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@7561f05
+2017-03-11 04:15:40,857 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:15:40,863 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local398888533_0001_r_000025_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:15:40,869 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#26 about to shuffle output of map attempt_local398888533_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 04:15:40,870 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local398888533_0001_m_000000_0
+2017-03-11 04:15:40,870 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 04:15:40,870 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:15:40,871 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,871 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:15:40,872 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,872 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:40,873 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:15:40,873 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 04:15:40,873 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:15:40,873 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,878 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:40,879 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,897 INFO org.apache.hadoop.mapred.Task: Task:attempt_local398888533_0001_r_000025_0 is done. And is in the process of committing
+2017-03-11 04:15:40,902 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,902 INFO org.apache.hadoop.mapred.Task: Task attempt_local398888533_0001_r_000025_0 is allowed to commit now
+2017-03-11 04:15:40,903 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local398888533_0001_r_000025_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local398888533_0001_r_000025
+2017-03-11 04:15:40,904 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 04:15:40,904 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local398888533_0001_r_000025_0' done.
+2017-03-11 04:15:40,904 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local398888533_0001_r_000025_0
+2017-03-11 04:15:40,904 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local398888533_0001_r_000026_0
+2017-03-11 04:15:40,905 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:15:40,906 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:15:40,906 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@545e2452
+2017-03-11 04:15:40,908 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:15:40,915 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local398888533_0001_r_000026_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:15:40,920 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#27 about to shuffle output of map attempt_local398888533_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 04:15:40,920 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local398888533_0001_m_000000_0
+2017-03-11 04:15:40,920 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 04:15:40,921 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:15:40,921 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,921 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:15:40,922 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,924 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:40,925 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:15:40,925 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 04:15:40,925 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:15:40,925 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,930 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:40,930 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,945 INFO org.apache.hadoop.mapred.Task: Task:attempt_local398888533_0001_r_000026_0 is done. And is in the process of committing
+2017-03-11 04:15:40,946 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,947 INFO org.apache.hadoop.mapred.Task: Task attempt_local398888533_0001_r_000026_0 is allowed to commit now
+2017-03-11 04:15:40,950 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local398888533_0001_r_000026_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local398888533_0001_r_000026
+2017-03-11 04:15:40,957 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 04:15:40,958 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local398888533_0001_r_000026_0' done.
+2017-03-11 04:15:40,961 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local398888533_0001_r_000026_0
+2017-03-11 04:15:40,962 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local398888533_0001_r_000027_0
+2017-03-11 04:15:40,964 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:15:40,965 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:15:40,965 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@3e6abe5a
+2017-03-11 04:15:40,967 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:15:40,971 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local398888533_0001_r_000027_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:15:40,982 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#28 about to shuffle output of map attempt_local398888533_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 04:15:40,982 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local398888533_0001_m_000000_0
+2017-03-11 04:15:40,982 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 04:15:40,983 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:15:40,983 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,984 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:15:40,984 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,985 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:40,985 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:15:40,986 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 04:15:40,986 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:15:40,986 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:40,986 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:40,986 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:40,994 INFO org.apache.hadoop.mapred.Task: Task:attempt_local398888533_0001_r_000027_0 is done. And is in the process of committing
+2017-03-11 04:15:41,005 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,005 INFO org.apache.hadoop.mapred.Task: Task attempt_local398888533_0001_r_000027_0 is allowed to commit now
+2017-03-11 04:15:41,006 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local398888533_0001_r_000027_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local398888533_0001_r_000027
+2017-03-11 04:15:41,012 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 04:15:41,012 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local398888533_0001_r_000027_0' done.
+2017-03-11 04:15:41,012 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local398888533_0001_r_000027_0
+2017-03-11 04:15:41,012 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local398888533_0001_r_000028_0
+2017-03-11 04:15:41,013 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:15:41,014 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:15:41,015 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@55662d9a
+2017-03-11 04:15:41,015 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:15:41,016 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local398888533_0001_r_000028_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:15:41,018 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#29 about to shuffle output of map attempt_local398888533_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 04:15:41,019 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local398888533_0001_m_000000_0
+2017-03-11 04:15:41,019 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 04:15:41,020 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:15:41,021 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,021 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:15:41,023 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:41,024 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:41,024 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:15:41,032 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 04:15:41,032 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:15:41,033 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:41,033 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:41,034 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,041 INFO org.apache.hadoop.mapred.Task: Task:attempt_local398888533_0001_r_000028_0 is done. And is in the process of committing
+2017-03-11 04:15:41,042 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,045 INFO org.apache.hadoop.mapred.Task: Task attempt_local398888533_0001_r_000028_0 is allowed to commit now
+2017-03-11 04:15:41,046 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local398888533_0001_r_000028_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local398888533_0001_r_000028
+2017-03-11 04:15:41,047 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 04:15:41,047 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local398888533_0001_r_000028_0' done.
+2017-03-11 04:15:41,047 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local398888533_0001_r_000028_0
+2017-03-11 04:15:41,047 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local398888533_0001_r_000029_0
+2017-03-11 04:15:41,048 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:15:41,048 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:15:41,048 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@2fb3260
+2017-03-11 04:15:41,049 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:15:41,049 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local398888533_0001_r_000029_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:15:41,061 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#30 about to shuffle output of map attempt_local398888533_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 04:15:41,061 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local398888533_0001_m_000000_0
+2017-03-11 04:15:41,062 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 04:15:41,062 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:15:41,064 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,064 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:15:41,065 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:41,065 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:41,065 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:15:41,066 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 04:15:41,066 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:15:41,066 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:41,066 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:41,066 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,071 INFO org.apache.hadoop.mapred.Task: Task:attempt_local398888533_0001_r_000029_0 is done. And is in the process of committing
+2017-03-11 04:15:41,072 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,072 INFO org.apache.hadoop.mapred.Task: Task attempt_local398888533_0001_r_000029_0 is allowed to commit now
+2017-03-11 04:15:41,072 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local398888533_0001_r_000029_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local398888533_0001_r_000029
+2017-03-11 04:15:41,074 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 04:15:41,074 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local398888533_0001_r_000029_0' done.
+2017-03-11 04:15:41,074 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local398888533_0001_r_000029_0
+2017-03-11 04:15:41,074 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local398888533_0001_r_000030_0
+2017-03-11 04:15:41,075 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:15:41,075 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:15:41,075 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@1a6a1f5b
+2017-03-11 04:15:41,076 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:15:41,076 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local398888533_0001_r_000030_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:15:41,077 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#31 about to shuffle output of map attempt_local398888533_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 04:15:41,078 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local398888533_0001_m_000000_0
+2017-03-11 04:15:41,078 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 04:15:41,078 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:15:41,079 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,079 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:15:41,080 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:41,080 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:41,081 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:15:41,081 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 04:15:41,081 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:15:41,081 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:41,081 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:41,082 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,086 INFO org.apache.hadoop.mapred.Task: Task:attempt_local398888533_0001_r_000030_0 is done. And is in the process of committing
+2017-03-11 04:15:41,087 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,087 INFO org.apache.hadoop.mapred.Task: Task attempt_local398888533_0001_r_000030_0 is allowed to commit now
+2017-03-11 04:15:41,088 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local398888533_0001_r_000030_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local398888533_0001_r_000030
+2017-03-11 04:15:41,088 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 04:15:41,089 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local398888533_0001_r_000030_0' done.
+2017-03-11 04:15:41,089 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local398888533_0001_r_000030_0
+2017-03-11 04:15:41,089 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local398888533_0001_r_000031_0
+2017-03-11 04:15:41,089 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:15:41,090 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:15:41,090 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@4f53314
+2017-03-11 04:15:41,091 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:15:41,093 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local398888533_0001_r_000031_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:15:41,099 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#32 about to shuffle output of map attempt_local398888533_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 04:15:41,099 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local398888533_0001_m_000000_0
+2017-03-11 04:15:41,099 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 04:15:41,102 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:15:41,103 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,103 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:15:41,110 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:41,110 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:41,111 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:15:41,111 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 04:15:41,111 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:15:41,111 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:41,112 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:41,112 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,118 INFO org.apache.hadoop.mapred.Task: Task:attempt_local398888533_0001_r_000031_0 is done. And is in the process of committing
+2017-03-11 04:15:41,119 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,119 INFO org.apache.hadoop.mapred.Task: Task attempt_local398888533_0001_r_000031_0 is allowed to commit now
+2017-03-11 04:15:41,120 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local398888533_0001_r_000031_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local398888533_0001_r_000031
+2017-03-11 04:15:41,124 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 04:15:41,125 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local398888533_0001_r_000031_0' done.
+2017-03-11 04:15:41,125 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local398888533_0001_r_000031_0
+2017-03-11 04:15:41,125 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local398888533_0001_r_000032_0
+2017-03-11 04:15:41,126 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:15:41,127 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:15:41,127 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@57e13166
+2017-03-11 04:15:41,127 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:15:41,129 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local398888533_0001_r_000032_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:15:41,130 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#33 about to shuffle output of map attempt_local398888533_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 04:15:41,131 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local398888533_0001_m_000000_0
+2017-03-11 04:15:41,131 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 04:15:41,131 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:15:41,132 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,132 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:15:41,132 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:41,133 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:41,133 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:15:41,134 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 04:15:41,134 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:15:41,134 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:41,134 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:41,135 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,139 INFO org.apache.hadoop.mapred.Task: Task:attempt_local398888533_0001_r_000032_0 is done. And is in the process of committing
+2017-03-11 04:15:41,140 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,141 INFO org.apache.hadoop.mapred.Task: Task attempt_local398888533_0001_r_000032_0 is allowed to commit now
+2017-03-11 04:15:41,143 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local398888533_0001_r_000032_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local398888533_0001_r_000032
+2017-03-11 04:15:41,145 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 04:15:41,145 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local398888533_0001_r_000032_0' done.
+2017-03-11 04:15:41,145 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local398888533_0001_r_000032_0
+2017-03-11 04:15:41,145 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local398888533_0001_r_000033_0
+2017-03-11 04:15:41,147 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:15:41,147 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:15:41,148 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@66a62053
+2017-03-11 04:15:41,149 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:15:41,150 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local398888533_0001_r_000033_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:15:41,151 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#34 about to shuffle output of map attempt_local398888533_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 04:15:41,151 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local398888533_0001_m_000000_0
+2017-03-11 04:15:41,151 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 04:15:41,151 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:15:41,152 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,152 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:15:41,154 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:41,155 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:41,155 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:15:41,156 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 04:15:41,156 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:15:41,156 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:41,156 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:41,157 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,162 INFO org.apache.hadoop.mapred.Task: Task:attempt_local398888533_0001_r_000033_0 is done. And is in the process of committing
+2017-03-11 04:15:41,163 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,163 INFO org.apache.hadoop.mapred.Task: Task attempt_local398888533_0001_r_000033_0 is allowed to commit now
+2017-03-11 04:15:41,164 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local398888533_0001_r_000033_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local398888533_0001_r_000033
+2017-03-11 04:15:41,165 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 04:15:41,165 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local398888533_0001_r_000033_0' done.
+2017-03-11 04:15:41,165 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local398888533_0001_r_000033_0
+2017-03-11 04:15:41,166 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local398888533_0001_r_000034_0
+2017-03-11 04:15:41,166 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:15:41,167 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:15:41,167 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@6dc6dc2b
+2017-03-11 04:15:41,167 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:15:41,168 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local398888533_0001_r_000034_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:15:41,171 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#35 about to shuffle output of map attempt_local398888533_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 04:15:41,171 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local398888533_0001_m_000000_0
+2017-03-11 04:15:41,171 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 04:15:41,172 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:15:41,172 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,172 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:15:41,173 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:41,179 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:41,180 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:15:41,180 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 04:15:41,180 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:15:41,180 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:41,180 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:41,182 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,191 INFO org.apache.hadoop.mapred.Task: Task:attempt_local398888533_0001_r_000034_0 is done. And is in the process of committing
+2017-03-11 04:15:41,192 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,192 INFO org.apache.hadoop.mapred.Task: Task attempt_local398888533_0001_r_000034_0 is allowed to commit now
+2017-03-11 04:15:41,193 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local398888533_0001_r_000034_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local398888533_0001_r_000034
+2017-03-11 04:15:41,194 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 04:15:41,194 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local398888533_0001_r_000034_0' done.
+2017-03-11 04:15:41,194 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local398888533_0001_r_000034_0
+2017-03-11 04:15:41,194 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local398888533_0001_r_000035_0
+2017-03-11 04:15:41,195 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:15:41,196 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:15:41,196 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@347e12a
+2017-03-11 04:15:41,196 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:15:41,197 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local398888533_0001_r_000035_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:15:41,198 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#36 about to shuffle output of map attempt_local398888533_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 04:15:41,199 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local398888533_0001_m_000000_0
+2017-03-11 04:15:41,199 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 04:15:41,199 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:15:41,200 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,200 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:15:41,201 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:41,201 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:41,201 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:15:41,201 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 04:15:41,201 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:15:41,202 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:41,202 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:41,203 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,209 INFO org.apache.hadoop.mapred.Task: Task:attempt_local398888533_0001_r_000035_0 is done. And is in the process of committing
+2017-03-11 04:15:41,210 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,210 INFO org.apache.hadoop.mapred.Task: Task attempt_local398888533_0001_r_000035_0 is allowed to commit now
+2017-03-11 04:15:41,211 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local398888533_0001_r_000035_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local398888533_0001_r_000035
+2017-03-11 04:15:41,219 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 04:15:41,220 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local398888533_0001_r_000035_0' done.
+2017-03-11 04:15:41,220 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local398888533_0001_r_000035_0
+2017-03-11 04:15:41,220 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local398888533_0001_r_000036_0
+2017-03-11 04:15:41,228 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:15:41,228 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:15:41,228 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@4edb440a
+2017-03-11 04:15:41,240 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:15:41,242 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local398888533_0001_r_000036_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:15:41,247 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#37 about to shuffle output of map attempt_local398888533_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 04:15:41,252 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local398888533_0001_m_000000_0
+2017-03-11 04:15:41,252 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 04:15:41,252 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:15:41,253 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,253 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:15:41,254 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:41,254 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:41,255 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:15:41,256 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 04:15:41,256 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:15:41,256 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:41,256 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:41,257 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,272 INFO org.apache.hadoop.mapred.Task: Task:attempt_local398888533_0001_r_000036_0 is done. And is in the process of committing
+2017-03-11 04:15:41,273 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,273 INFO org.apache.hadoop.mapred.Task: Task attempt_local398888533_0001_r_000036_0 is allowed to commit now
+2017-03-11 04:15:41,276 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local398888533_0001_r_000036_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local398888533_0001_r_000036
+2017-03-11 04:15:41,277 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 04:15:41,278 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local398888533_0001_r_000036_0' done.
+2017-03-11 04:15:41,278 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local398888533_0001_r_000036_0
+2017-03-11 04:15:41,278 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local398888533_0001_r_000037_0
+2017-03-11 04:15:41,282 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:15:41,285 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:15:41,286 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@6349766d
+2017-03-11 04:15:41,290 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:15:41,294 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local398888533_0001_r_000037_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:15:41,300 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#38 about to shuffle output of map attempt_local398888533_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 04:15:41,300 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local398888533_0001_m_000000_0
+2017-03-11 04:15:41,300 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 04:15:41,301 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:15:41,301 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,301 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:15:41,302 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:41,302 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:41,303 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:15:41,303 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 04:15:41,303 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:15:41,303 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:41,303 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:41,304 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,314 INFO org.apache.hadoop.mapred.Task: Task:attempt_local398888533_0001_r_000037_0 is done. And is in the process of committing
+2017-03-11 04:15:41,315 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,315 INFO org.apache.hadoop.mapred.Task: Task attempt_local398888533_0001_r_000037_0 is allowed to commit now
+2017-03-11 04:15:41,316 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local398888533_0001_r_000037_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local398888533_0001_r_000037
+2017-03-11 04:15:41,322 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 04:15:41,322 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local398888533_0001_r_000037_0' done.
+2017-03-11 04:15:41,322 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local398888533_0001_r_000037_0
+2017-03-11 04:15:41,323 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local398888533_0001_r_000038_0
+2017-03-11 04:15:41,326 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:15:41,327 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:15:41,327 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@d62d2df
+2017-03-11 04:15:41,334 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:15:41,335 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local398888533_0001_r_000038_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:15:41,337 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#39 about to shuffle output of map attempt_local398888533_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 04:15:41,337 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local398888533_0001_m_000000_0
+2017-03-11 04:15:41,337 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 04:15:41,337 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:15:41,338 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,338 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:15:41,340 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:41,340 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:41,341 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:15:41,341 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 04:15:41,341 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:15:41,342 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:41,342 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:41,343 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,348 INFO org.apache.hadoop.mapred.Task: Task:attempt_local398888533_0001_r_000038_0 is done. And is in the process of committing
+2017-03-11 04:15:41,348 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,349 INFO org.apache.hadoop.mapred.Task: Task attempt_local398888533_0001_r_000038_0 is allowed to commit now
+2017-03-11 04:15:41,349 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local398888533_0001_r_000038_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local398888533_0001_r_000038
+2017-03-11 04:15:41,350 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 04:15:41,350 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local398888533_0001_r_000038_0' done.
+2017-03-11 04:15:41,350 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local398888533_0001_r_000038_0
+2017-03-11 04:15:41,350 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local398888533_0001_r_000039_0
+2017-03-11 04:15:41,351 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:15:41,352 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:15:41,352 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@eb9012c
+2017-03-11 04:15:41,353 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:15:41,354 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local398888533_0001_r_000039_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:15:41,355 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#40 about to shuffle output of map attempt_local398888533_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 04:15:41,356 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local398888533_0001_m_000000_0
+2017-03-11 04:15:41,356 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 04:15:41,359 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:15:41,360 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,360 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:15:41,362 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:41,363 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:41,364 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:15:41,365 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 04:15:41,365 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:15:41,365 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:41,366 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:41,366 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,370 INFO org.apache.hadoop.mapred.Task: Task:attempt_local398888533_0001_r_000039_0 is done. And is in the process of committing
+2017-03-11 04:15:41,371 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,371 INFO org.apache.hadoop.mapred.Task: Task attempt_local398888533_0001_r_000039_0 is allowed to commit now
+2017-03-11 04:15:41,371 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local398888533_0001_r_000039_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local398888533_0001_r_000039
+2017-03-11 04:15:41,372 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 04:15:41,372 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local398888533_0001_r_000039_0' done.
+2017-03-11 04:15:41,372 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local398888533_0001_r_000039_0
+2017-03-11 04:15:41,372 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local398888533_0001_r_000040_0
+2017-03-11 04:15:41,373 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:15:41,376 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:15:41,377 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@1b0243a2
+2017-03-11 04:15:41,379 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:15:41,380 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local398888533_0001_r_000040_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:15:41,381 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#41 about to shuffle output of map attempt_local398888533_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 04:15:41,381 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local398888533_0001_m_000000_0
+2017-03-11 04:15:41,381 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 04:15:41,382 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:15:41,382 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,382 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:15:41,383 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:41,384 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:41,384 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:15:41,384 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 04:15:41,385 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:15:41,385 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:41,385 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:41,385 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,391 INFO org.apache.hadoop.mapred.Task: Task:attempt_local398888533_0001_r_000040_0 is done. And is in the process of committing
+2017-03-11 04:15:41,396 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,397 INFO org.apache.hadoop.mapred.Task: Task attempt_local398888533_0001_r_000040_0 is allowed to commit now
+2017-03-11 04:15:41,397 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local398888533_0001_r_000040_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local398888533_0001_r_000040
+2017-03-11 04:15:41,398 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 04:15:41,399 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local398888533_0001_r_000040_0' done.
+2017-03-11 04:15:41,399 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local398888533_0001_r_000040_0
+2017-03-11 04:15:41,399 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local398888533_0001_r_000041_0
+2017-03-11 04:15:41,399 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:15:41,400 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:15:41,400 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@1590cd
+2017-03-11 04:15:41,400 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:15:41,405 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local398888533_0001_r_000041_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:15:41,411 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#42 about to shuffle output of map attempt_local398888533_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 04:15:41,412 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local398888533_0001_m_000000_0
+2017-03-11 04:15:41,412 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 04:15:41,413 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:15:41,413 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,413 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:15:41,414 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:41,414 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:41,415 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:15:41,415 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 04:15:41,415 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:15:41,415 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:41,416 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:41,416 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,427 INFO org.apache.hadoop.mapred.Task: Task:attempt_local398888533_0001_r_000041_0 is done. And is in the process of committing
+2017-03-11 04:15:41,429 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,429 INFO org.apache.hadoop.mapred.Task: Task attempt_local398888533_0001_r_000041_0 is allowed to commit now
+2017-03-11 04:15:41,436 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local398888533_0001_r_000041_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local398888533_0001_r_000041
+2017-03-11 04:15:41,437 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 04:15:41,437 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local398888533_0001_r_000041_0' done.
+2017-03-11 04:15:41,437 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local398888533_0001_r_000041_0
+2017-03-11 04:15:41,437 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local398888533_0001_r_000042_0
+2017-03-11 04:15:41,438 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:15:41,438 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:15:41,441 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@4fed51d
+2017-03-11 04:15:41,449 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:15:41,459 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local398888533_0001_r_000042_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:15:41,463 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#43 about to shuffle output of map attempt_local398888533_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 04:15:41,463 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local398888533_0001_m_000000_0
+2017-03-11 04:15:41,467 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 04:15:41,468 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:15:41,468 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,468 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:15:41,469 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:41,469 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:41,471 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:15:41,471 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 04:15:41,471 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:15:41,471 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:41,472 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:41,472 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,485 INFO org.apache.hadoop.mapred.Task: Task:attempt_local398888533_0001_r_000042_0 is done. And is in the process of committing
+2017-03-11 04:15:41,486 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,486 INFO org.apache.hadoop.mapred.Task: Task attempt_local398888533_0001_r_000042_0 is allowed to commit now
+2017-03-11 04:15:41,490 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local398888533_0001_r_000042_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local398888533_0001_r_000042
+2017-03-11 04:15:41,490 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 04:15:41,490 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local398888533_0001_r_000042_0' done.
+2017-03-11 04:15:41,490 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local398888533_0001_r_000042_0
+2017-03-11 04:15:41,491 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local398888533_0001_r_000043_0
+2017-03-11 04:15:41,492 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:15:41,493 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:15:41,493 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@76fae43c
+2017-03-11 04:15:41,508 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:15:41,509 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local398888533_0001_r_000043_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:15:41,510 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#44 about to shuffle output of map attempt_local398888533_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 04:15:41,510 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local398888533_0001_m_000000_0
+2017-03-11 04:15:41,511 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 04:15:41,511 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:15:41,511 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,511 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:15:41,512 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:41,518 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:41,519 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:15:41,519 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 04:15:41,519 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:15:41,519 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:41,519 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:41,520 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,544 INFO org.apache.hadoop.mapred.Task: Task:attempt_local398888533_0001_r_000043_0 is done. And is in the process of committing
+2017-03-11 04:15:41,546 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,550 INFO org.apache.hadoop.mapred.Task: Task attempt_local398888533_0001_r_000043_0 is allowed to commit now
+2017-03-11 04:15:41,551 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local398888533_0001_r_000043_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local398888533_0001_r_000043
+2017-03-11 04:15:41,551 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 04:15:41,553 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local398888533_0001_r_000043_0' done.
+2017-03-11 04:15:41,554 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local398888533_0001_r_000043_0
+2017-03-11 04:15:41,554 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local398888533_0001_r_000044_0
+2017-03-11 04:15:41,560 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:15:41,560 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:15:41,561 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@1f9654c3
+2017-03-11 04:15:41,563 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:15:41,570 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local398888533_0001_r_000044_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:15:41,573 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#45 about to shuffle output of map attempt_local398888533_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 04:15:41,573 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local398888533_0001_m_000000_0
+2017-03-11 04:15:41,573 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 04:15:41,574 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:15:41,575 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,575 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:15:41,575 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:41,576 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:41,576 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:15:41,576 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 04:15:41,576 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:15:41,576 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:41,577 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:41,577 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,582 INFO org.apache.hadoop.mapred.Task: Task:attempt_local398888533_0001_r_000044_0 is done. And is in the process of committing
+2017-03-11 04:15:41,583 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,583 INFO org.apache.hadoop.mapred.Task: Task attempt_local398888533_0001_r_000044_0 is allowed to commit now
+2017-03-11 04:15:41,584 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local398888533_0001_r_000044_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local398888533_0001_r_000044
+2017-03-11 04:15:41,587 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 04:15:41,587 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local398888533_0001_r_000044_0' done.
+2017-03-11 04:15:41,587 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local398888533_0001_r_000044_0
+2017-03-11 04:15:41,587 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local398888533_0001_r_000045_0
+2017-03-11 04:15:41,588 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:15:41,588 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:15:41,588 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@4a1daaa
+2017-03-11 04:15:41,589 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:15:41,590 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local398888533_0001_r_000045_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:15:41,591 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#46 about to shuffle output of map attempt_local398888533_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 04:15:41,591 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local398888533_0001_m_000000_0
+2017-03-11 04:15:41,591 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 04:15:41,591 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:15:41,592 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,592 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:15:41,593 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:41,593 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:41,594 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:15:41,594 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 04:15:41,594 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:15:41,594 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:41,594 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:41,595 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,598 INFO org.apache.hadoop.mapred.Task: Task:attempt_local398888533_0001_r_000045_0 is done. And is in the process of committing
+2017-03-11 04:15:41,598 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,599 INFO org.apache.hadoop.mapred.Task: Task attempt_local398888533_0001_r_000045_0 is allowed to commit now
+2017-03-11 04:15:41,599 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local398888533_0001_r_000045_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local398888533_0001_r_000045
+2017-03-11 04:15:41,600 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 04:15:41,600 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local398888533_0001_r_000045_0' done.
+2017-03-11 04:15:41,600 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local398888533_0001_r_000045_0
+2017-03-11 04:15:41,600 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local398888533_0001_r_000046_0
+2017-03-11 04:15:41,600 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:15:41,601 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:15:41,601 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@674612ad
+2017-03-11 04:15:41,602 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:15:41,603 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local398888533_0001_r_000046_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:15:41,604 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#47 about to shuffle output of map attempt_local398888533_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 04:15:41,604 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local398888533_0001_m_000000_0
+2017-03-11 04:15:41,605 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 04:15:41,605 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:15:41,605 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,605 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:15:41,606 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:41,606 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:41,607 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:15:41,607 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 04:15:41,607 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:15:41,607 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:41,607 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:41,608 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,610 INFO org.apache.hadoop.mapred.Task: Task:attempt_local398888533_0001_r_000046_0 is done. And is in the process of committing
+2017-03-11 04:15:41,611 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,611 INFO org.apache.hadoop.mapred.Task: Task attempt_local398888533_0001_r_000046_0 is allowed to commit now
+2017-03-11 04:15:41,612 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local398888533_0001_r_000046_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local398888533_0001_r_000046
+2017-03-11 04:15:41,613 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 04:15:41,613 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local398888533_0001_r_000046_0' done.
+2017-03-11 04:15:41,613 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local398888533_0001_r_000046_0
+2017-03-11 04:15:41,613 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local398888533_0001_r_000047_0
+2017-03-11 04:15:41,620 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:15:41,620 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:15:41,621 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@46bf27
+2017-03-11 04:15:41,621 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:15:41,622 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local398888533_0001_r_000047_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:15:41,623 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#48 about to shuffle output of map attempt_local398888533_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 04:15:41,624 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local398888533_0001_m_000000_0
+2017-03-11 04:15:41,624 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 04:15:41,624 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:15:41,625 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,625 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:15:41,626 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:41,626 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:41,627 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:15:41,627 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 04:15:41,628 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:15:41,628 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:41,628 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:41,628 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,633 INFO org.apache.hadoop.mapred.Task: Task:attempt_local398888533_0001_r_000047_0 is done. And is in the process of committing
+2017-03-11 04:15:41,633 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,633 INFO org.apache.hadoop.mapred.Task: Task attempt_local398888533_0001_r_000047_0 is allowed to commit now
+2017-03-11 04:15:41,635 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local398888533_0001_r_000047_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local398888533_0001_r_000047
+2017-03-11 04:15:41,635 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 04:15:41,635 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local398888533_0001_r_000047_0' done.
+2017-03-11 04:15:41,635 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local398888533_0001_r_000047_0
+2017-03-11 04:15:41,635 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local398888533_0001_r_000048_0
+2017-03-11 04:15:41,636 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:15:41,636 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:15:41,636 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@3b88bc85
+2017-03-11 04:15:41,637 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:15:41,638 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local398888533_0001_r_000048_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:15:41,639 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#49 about to shuffle output of map attempt_local398888533_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 04:15:41,639 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local398888533_0001_m_000000_0
+2017-03-11 04:15:41,639 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 04:15:41,639 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:15:41,640 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,640 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:15:41,643 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:41,644 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:41,645 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:15:41,646 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 04:15:41,646 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:15:41,646 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:41,646 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:41,647 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,649 INFO org.apache.hadoop.mapred.Task: Task:attempt_local398888533_0001_r_000048_0 is done. And is in the process of committing
+2017-03-11 04:15:41,650 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,650 INFO org.apache.hadoop.mapred.Task: Task attempt_local398888533_0001_r_000048_0 is allowed to commit now
+2017-03-11 04:15:41,650 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local398888533_0001_r_000048_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local398888533_0001_r_000048
+2017-03-11 04:15:41,653 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 04:15:41,653 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local398888533_0001_r_000048_0' done.
+2017-03-11 04:15:41,653 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local398888533_0001_r_000048_0
+2017-03-11 04:15:41,653 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local398888533_0001_r_000049_0
+2017-03-11 04:15:41,654 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:15:41,654 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:15:41,654 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@4876e144
+2017-03-11 04:15:41,655 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:15:41,655 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local398888533_0001_r_000049_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:15:41,656 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#50 about to shuffle output of map attempt_local398888533_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 04:15:41,657 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local398888533_0001_m_000000_0
+2017-03-11 04:15:41,657 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 04:15:41,657 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:15:41,658 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,658 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:15:41,663 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:41,665 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:41,666 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:15:41,666 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 04:15:41,666 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:15:41,666 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:15:41,666 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 04:15:41,667 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,675 INFO org.apache.hadoop.mapred.Task: Task:attempt_local398888533_0001_r_000049_0 is done. And is in the process of committing
+2017-03-11 04:15:41,676 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:15:41,676 INFO org.apache.hadoop.mapred.Task: Task attempt_local398888533_0001_r_000049_0 is allowed to commit now
+2017-03-11 04:15:41,677 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local398888533_0001_r_000049_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local398888533_0001_r_000049
+2017-03-11 04:15:41,678 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 04:15:41,678 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local398888533_0001_r_000049_0' done.
+2017-03-11 04:15:41,678 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local398888533_0001_r_000049_0
+2017-03-11 04:15:41,678 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete.
+2017-03-11 04:15:42,684 INFO org.apache.hadoop.mapreduce.Job: Job job_local398888533_0001 completed successfully
+2017-03-11 04:15:42,802 INFO org.apache.hadoop.mapreduce.Job: Counters: 33
 	File System Counters
-		FILE: Number of bytes read=11514350
-		FILE: Number of bytes written=1395729
+		FILE: Number of bytes read=2535091
+		FILE: Number of bytes written=13621454
 		FILE: Number of read operations=0
 		FILE: Number of large read operations=0
 		FILE: Number of write operations=0
 	Map-Reduce Framework
-		Map input records=124787
-		Map output records=1023494
-		Map output bytes=8925696
-		Map output materialized bytes=167092
-		Input split bytes=120
-		Combine input records=1023494
-		Combine output records=34513
-		Reduce input groups=34513
-		Reduce shuffle bytes=167092
-		Reduce input records=34513
-		Reduce output records=34513
-		Spilled Records=69026
-		Shuffled Maps =1
+		Map input records=7
+		Map output records=63
+		Map output bytes=4977
+		Map output materialized bytes=1146
+		Input split bytes=122
+		Combine input records=0
+		Combine output records=0
+		Reduce input groups=63
+		Reduce shuffle bytes=1146
+		Reduce input records=63
+		Reduce output records=0
+		Spilled Records=126
+		Shuffled Maps =50
 		Failed Shuffles=0
-		Merged Map outputs=1
-		GC time elapsed (ms)=109
+		Merged Map outputs=50
+		GC time elapsed (ms)=44
 		CPU time spent (ms)=0
 		Physical memory (bytes) snapshot=0
 		Virtual memory (bytes) snapshot=0
-		Total committed heap usage (bytes)=331227136
+		Total committed heap usage (bytes)=8446291968
 	Shuffle Errors
 		BAD_ID=0
 		CONNECTION=0
@@ -105,104 +1797,102 @@
 		WRONG_MAP=0
 		WRONG_REDUCE=0
 	File Input Format Counters 
-		Bytes Read=5589886
+		Bytes Read=454
 	File Output Format Counters 
-		Bytes Written=363879
-2017-03-10 14:05:48,287 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
-2017-03-10 14:05:48,833 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
-2017-03-10 14:05:48,841 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
-2017-03-10 14:05:49,279 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
-2017-03-10 14:05:49,288 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
-2017-03-10 14:05:49,331 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
-2017-03-10 14:05:49,610 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local1590990832_0001
-2017-03-10 14:05:50,040 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
-2017-03-10 14:05:50,042 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local1590990832_0001
-2017-03-10 14:05:50,046 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
-2017-03-10 14:05:50,059 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
-2017-03-10 14:05:50,067 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
-2017-03-10 14:05:50,190 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
-2017-03-10 14:05:50,192 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1590990832_0001_m_000000_0
-2017-03-10 14:05:50,229 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
-2017-03-10 14:05:50,244 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
-2017-03-10 14:05:50,247 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/input/pg100.txt:0+5589886
-2017-03-10 14:05:50,339 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
-2017-03-10 14:05:50,339 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
-2017-03-10 14:05:50,339 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
-2017-03-10 14:05:50,339 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
-2017-03-10 14:05:50,339 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
-2017-03-10 14:05:50,344 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
-2017-03-10 14:05:51,047 INFO org.apache.hadoop.mapreduce.Job: Job job_local1590990832_0001 running in uber mode : false
-2017-03-10 14:05:51,050 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
-2017-03-10 14:05:52,504 INFO org.apache.hadoop.mapred.LocalJobRunner: 
-2017-03-10 14:05:52,504 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
-2017-03-10 14:05:52,504 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
-2017-03-10 14:05:52,504 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 8925696; bufvoid = 104857600
-2017-03-10 14:05:52,504 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 22120424(88481696); length = 4093973/6553600
-2017-03-10 14:05:53,471 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
-2017-03-10 14:05:54,161 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
-2017-03-10 14:05:54,164 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1590990832_0001_m_000000_0 is done. And is in the process of committing
-2017-03-10 14:05:54,170 INFO org.apache.hadoop.mapred.LocalJobRunner: map
-2017-03-10 14:05:54,170 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1590990832_0001_m_000000_0' done.
-2017-03-10 14:05:54,170 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1590990832_0001_m_000000_0
-2017-03-10 14:05:54,170 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
-2017-03-10 14:05:54,178 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks
-2017-03-10 14:05:54,178 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1590990832_0001_r_000000_0
-2017-03-10 14:05:54,183 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
-2017-03-10 14:05:54,183 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
-2017-03-10 14:05:54,185 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@7f792cec
-2017-03-10 14:05:54,198 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
-2017-03-10 14:05:54,206 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1590990832_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events
-2017-03-10 14:05:54,277 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.deflate]
-2017-03-10 14:05:54,277 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1590990832_0001_m_000000_0 decomp: 491464 len: 167092 to MEMORY
-2017-03-10 14:05:54,297 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 491464 bytes from map-output for attempt_local1590990832_0001_m_000000_0
-2017-03-10 14:05:54,298 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 491464, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->491464
-2017-03-10 14:05:54,302 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
-2017-03-10 14:05:54,305 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
-2017-03-10 14:05:54,305 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
-2017-03-10 14:05:54,315 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
-2017-03-10 14:05:54,319 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 491459 bytes
-2017-03-10 14:05:54,702 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 491464 bytes to disk to satisfy reduce memory limit
-2017-03-10 14:05:54,702 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 167100 bytes from disk
-2017-03-10 14:05:54,702 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
-2017-03-10 14:05:54,703 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
-2017-03-10 14:05:54,704 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 491459 bytes
-2017-03-10 14:05:54,704 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
-2017-03-10 14:05:54,714 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.skip.on is deprecated. Instead, use mapreduce.job.skiprecords
-2017-03-10 14:05:55,077 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 0%
-2017-03-10 14:05:55,853 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1590990832_0001_r_000000_0 is done. And is in the process of committing
-2017-03-10 14:05:55,858 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
-2017-03-10 14:05:55,859 INFO org.apache.hadoop.mapred.Task: Task attempt_local1590990832_0001_r_000000_0 is allowed to commit now
-2017-03-10 14:05:55,859 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1590990832_0001_r_000000_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1590990832_0001_r_000000
-2017-03-10 14:05:55,861 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
-2017-03-10 14:05:55,861 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1590990832_0001_r_000000_0' done.
-2017-03-10 14:05:55,861 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1590990832_0001_r_000000_0
-2017-03-10 14:05:55,861 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete.
-2017-03-10 14:05:56,079 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 100%
-2017-03-10 14:05:56,080 INFO org.apache.hadoop.mapreduce.Job: Job job_local1590990832_0001 completed successfully
-2017-03-10 14:05:56,090 INFO org.apache.hadoop.mapreduce.Job: Counters: 33
+		Bytes Written=400
+2017-03-11 04:16:54,623 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-11 04:16:55,099 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-11 04:16:55,113 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-11 04:16:55,444 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
+2017-03-11 04:16:55,452 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
+2017-03-11 04:16:55,485 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
+2017-03-11 04:16:55,706 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local1882890440_0001
+2017-03-11 04:16:56,148 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
+2017-03-11 04:16:56,149 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local1882890440_0001
+2017-03-11 04:16:56,162 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
+2017-03-11 04:16:56,170 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:16:56,172 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
+2017-03-11 04:16:56,300 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
+2017-03-11 04:16:56,300 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1882890440_0001_m_000000_0
+2017-03-11 04:16:56,341 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:16:56,354 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:16:56,358 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:0+454
+2017-03-11 04:16:56,445 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 04:16:56,445 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 04:16:56,445 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 04:16:56,445 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 04:16:56,445 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 04:16:56,448 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 04:16:56,479 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 04:16:56,479 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 04:16:56,479 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-11 04:16:56,479 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 4977; bufvoid = 104857600
+2017-03-11 04:16:56,479 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 26214148(104856592); length = 249/6553600
+2017-03-11 04:16:56,490 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
+2017-03-11 04:16:56,496 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
+2017-03-11 04:16:56,501 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1882890440_0001_m_000000_0 is done. And is in the process of committing
+2017-03-11 04:16:56,510 INFO org.apache.hadoop.mapred.LocalJobRunner: map
+2017-03-11 04:16:56,511 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1882890440_0001_m_000000_0' done.
+2017-03-11 04:16:56,511 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1882890440_0001_m_000000_0
+2017-03-11 04:16:56,511 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
+2017-03-11 04:16:56,514 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks
+2017-03-11 04:16:56,514 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1882890440_0001_r_000000_0
+2017-03-11 04:16:56,523 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:16:56,523 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:16:56,525 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@644fdf4b
+2017-03-11 04:16:56,536 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:16:56,542 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1882890440_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:16:56,566 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.deflate]
+2017-03-11 04:16:56,570 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1882890440_0001_m_000000_0 decomp: 5105 len: 460 to MEMORY
+2017-03-11 04:16:56,575 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 5105 bytes from map-output for attempt_local1882890440_0001_m_000000_0
+2017-03-11 04:16:56,576 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 5105, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->5105
+2017-03-11 04:16:56,577 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:16:56,578 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:16:56,578 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:16:56,587 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:16:56,588 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 5087 bytes
+2017-03-11 04:16:56,590 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 5105 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:16:56,591 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 468 bytes from disk
+2017-03-11 04:16:56,591 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:16:56,591 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:16:56,592 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 5087 bytes
+2017-03-11 04:16:56,597 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:16:56,609 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.skip.on is deprecated. Instead, use mapreduce.job.skiprecords
+2017-03-11 04:16:56,615 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1882890440_0001_r_000000_0 is done. And is in the process of committing
+2017-03-11 04:16:56,617 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:16:56,617 INFO org.apache.hadoop.mapred.Task: Task attempt_local1882890440_0001_r_000000_0 is allowed to commit now
+2017-03-11 04:16:56,618 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1882890440_0001_r_000000_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1882890440_0001_r_000000
+2017-03-11 04:16:56,618 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 04:16:56,618 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1882890440_0001_r_000000_0' done.
+2017-03-11 04:16:56,619 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1882890440_0001_r_000000_0
+2017-03-11 04:16:56,619 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete.
+2017-03-11 04:16:57,150 INFO org.apache.hadoop.mapreduce.Job: Job job_local1882890440_0001 running in uber mode : false
+2017-03-11 04:16:57,151 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 100%
+2017-03-11 04:16:57,152 INFO org.apache.hadoop.mapreduce.Job: Job job_local1882890440_0001 completed successfully
+2017-03-11 04:16:57,163 INFO org.apache.hadoop.mapreduce.Job: Counters: 33
 	File System Counters
-		FILE: Number of bytes read=11514350
-		FILE: Number of bytes written=1395729
+		FILE: Number of bytes read=2226
+		FILE: Number of bytes written=531632
 		FILE: Number of read operations=0
 		FILE: Number of large read operations=0
 		FILE: Number of write operations=0
 	Map-Reduce Framework
-		Map input records=124787
-		Map output records=1023494
-		Map output bytes=8925696
-		Map output materialized bytes=167092
-		Input split bytes=120
-		Combine input records=1023494
-		Combine output records=34513
-		Reduce input groups=34513
-		Reduce shuffle bytes=167092
-		Reduce input records=34513
-		Reduce output records=34513
-		Spilled Records=69026
+		Map input records=7
+		Map output records=63
+		Map output bytes=4977
+		Map output materialized bytes=460
+		Input split bytes=122
+		Combine input records=0
+		Combine output records=0
+		Reduce input groups=63
+		Reduce shuffle bytes=460
+		Reduce input records=63
+		Reduce output records=0
+		Spilled Records=126
 		Shuffled Maps =1
 		Failed Shuffles=0
 		Merged Map outputs=1
-		GC time elapsed (ms)=80
+		GC time elapsed (ms)=28
 		CPU time spent (ms)=0
 		Physical memory (bytes) snapshot=0
 		Virtual memory (bytes) snapshot=0
@@ -215,104 +1905,106 @@
 		WRONG_MAP=0
 		WRONG_REDUCE=0
 	File Input Format Counters 
-		Bytes Read=5589886
+		Bytes Read=454
 	File Output Format Counters 
-		Bytes Written=363879
-2017-03-10 14:07:44,622 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
-2017-03-10 14:07:45,122 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
-2017-03-10 14:07:45,129 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
-2017-03-10 14:07:45,628 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
-2017-03-10 14:07:45,645 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
-2017-03-10 14:07:45,678 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
-2017-03-10 14:07:45,909 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local1520504035_0001
-2017-03-10 14:07:46,336 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
-2017-03-10 14:07:46,338 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local1520504035_0001
-2017-03-10 14:07:46,337 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
-2017-03-10 14:07:46,344 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
-2017-03-10 14:07:46,346 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
-2017-03-10 14:07:46,457 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
-2017-03-10 14:07:46,460 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1520504035_0001_m_000000_0
-2017-03-10 14:07:46,523 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
-2017-03-10 14:07:46,542 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
-2017-03-10 14:07:46,545 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/input/pg100.txt:0+5589886
-2017-03-10 14:07:46,634 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
-2017-03-10 14:07:46,634 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
-2017-03-10 14:07:46,634 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
-2017-03-10 14:07:46,634 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
-2017-03-10 14:07:46,635 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
-2017-03-10 14:07:46,639 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
-2017-03-10 14:07:47,343 INFO org.apache.hadoop.mapreduce.Job: Job job_local1520504035_0001 running in uber mode : false
-2017-03-10 14:07:47,344 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
-2017-03-10 14:07:48,802 INFO org.apache.hadoop.mapred.LocalJobRunner: 
-2017-03-10 14:07:48,802 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
-2017-03-10 14:07:48,802 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
-2017-03-10 14:07:48,802 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 8925696; bufvoid = 104857600
-2017-03-10 14:07:48,802 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 22120424(88481696); length = 4093973/6553600
-2017-03-10 14:07:49,778 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
-2017-03-10 14:07:50,507 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
-2017-03-10 14:07:50,510 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1520504035_0001_m_000000_0 is done. And is in the process of committing
-2017-03-10 14:07:50,516 INFO org.apache.hadoop.mapred.LocalJobRunner: map
-2017-03-10 14:07:50,516 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1520504035_0001_m_000000_0' done.
-2017-03-10 14:07:50,516 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1520504035_0001_m_000000_0
-2017-03-10 14:07:50,516 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
-2017-03-10 14:07:50,523 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks
-2017-03-10 14:07:50,523 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1520504035_0001_r_000000_0
-2017-03-10 14:07:50,529 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
-2017-03-10 14:07:50,529 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
-2017-03-10 14:07:50,531 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@7f792cec
-2017-03-10 14:07:50,541 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
-2017-03-10 14:07:50,548 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1520504035_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events
-2017-03-10 14:07:50,573 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.deflate]
-2017-03-10 14:07:50,574 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1520504035_0001_m_000000_0 decomp: 491464 len: 167092 to MEMORY
-2017-03-10 14:07:50,580 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 491464 bytes from map-output for attempt_local1520504035_0001_m_000000_0
-2017-03-10 14:07:50,581 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 491464, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->491464
-2017-03-10 14:07:50,582 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
-2017-03-10 14:07:50,582 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
-2017-03-10 14:07:50,582 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
-2017-03-10 14:07:50,587 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
-2017-03-10 14:07:50,588 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 491459 bytes
-2017-03-10 14:07:50,811 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 491464 bytes to disk to satisfy reduce memory limit
-2017-03-10 14:07:50,811 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 167100 bytes from disk
-2017-03-10 14:07:50,812 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
-2017-03-10 14:07:50,812 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
-2017-03-10 14:07:50,813 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 491459 bytes
-2017-03-10 14:07:50,813 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
-2017-03-10 14:07:50,818 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.skip.on is deprecated. Instead, use mapreduce.job.skiprecords
-2017-03-10 14:07:51,355 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 0%
-2017-03-10 14:07:51,511 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1520504035_0001_r_000000_0 is done. And is in the process of committing
-2017-03-10 14:07:51,512 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
-2017-03-10 14:07:51,515 INFO org.apache.hadoop.mapred.Task: Task attempt_local1520504035_0001_r_000000_0 is allowed to commit now
-2017-03-10 14:07:51,516 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1520504035_0001_r_000000_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1520504035_0001_r_000000
-2017-03-10 14:07:51,516 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
-2017-03-10 14:07:51,518 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1520504035_0001_r_000000_0' done.
-2017-03-10 14:07:51,518 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1520504035_0001_r_000000_0
-2017-03-10 14:07:51,518 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete.
-2017-03-10 14:07:52,356 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 100%
-2017-03-10 14:07:52,356 INFO org.apache.hadoop.mapreduce.Job: Job job_local1520504035_0001 completed successfully
-2017-03-10 14:07:52,367 INFO org.apache.hadoop.mapreduce.Job: Counters: 33
+		Bytes Written=8
+2017-03-11 04:18:08,388 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-11 04:18:08,880 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-11 04:18:08,895 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-11 04:18:08,921 WARN org.apache.hadoop.security.UserGroupInformation: PriviledgedActionException as:cloudera (auth:SIMPLE) cause:org.apache.hadoop.mapred.FileAlreadyExistsException: Output directory file:/home/cloudera/workspace/WordCount/output already exists
+2017-03-11 04:18:26,782 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-11 04:18:27,289 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-11 04:18:27,292 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-11 04:18:27,758 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
+2017-03-11 04:18:27,784 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
+2017-03-11 04:18:27,882 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
+2017-03-11 04:18:28,141 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local596953480_0001
+2017-03-11 04:18:28,593 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
+2017-03-11 04:18:28,594 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local596953480_0001
+2017-03-11 04:18:28,598 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
+2017-03-11 04:18:28,602 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:18:28,606 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
+2017-03-11 04:18:28,736 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
+2017-03-11 04:18:28,737 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local596953480_0001_m_000000_0
+2017-03-11 04:18:28,769 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:18:28,778 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:18:28,781 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:0+454
+2017-03-11 04:18:28,861 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 04:18:28,861 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 04:18:28,861 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 04:18:28,861 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 04:18:28,861 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 04:18:28,866 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 04:18:28,901 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 04:18:28,902 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 04:18:28,902 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-11 04:18:28,902 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 4977; bufvoid = 104857600
+2017-03-11 04:18:28,902 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 26214148(104856592); length = 249/6553600
+2017-03-11 04:18:28,912 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
+2017-03-11 04:18:28,918 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
+2017-03-11 04:18:28,920 INFO org.apache.hadoop.mapred.Task: Task:attempt_local596953480_0001_m_000000_0 is done. And is in the process of committing
+2017-03-11 04:18:28,926 INFO org.apache.hadoop.mapred.LocalJobRunner: map
+2017-03-11 04:18:28,926 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local596953480_0001_m_000000_0' done.
+2017-03-11 04:18:28,926 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local596953480_0001_m_000000_0
+2017-03-11 04:18:28,926 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
+2017-03-11 04:18:28,929 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks
+2017-03-11 04:18:28,930 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local596953480_0001_r_000000_0
+2017-03-11 04:18:28,934 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:18:28,934 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:18:28,936 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@644fdf4b
+2017-03-11 04:18:28,946 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:18:28,952 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local596953480_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:18:28,979 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.deflate]
+2017-03-11 04:18:28,980 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local596953480_0001_m_000000_0 decomp: 5105 len: 460 to MEMORY
+2017-03-11 04:18:28,985 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 5105 bytes from map-output for attempt_local596953480_0001_m_000000_0
+2017-03-11 04:18:28,985 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 5105, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->5105
+2017-03-11 04:18:28,986 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:18:28,988 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:18:28,988 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:18:28,997 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:18:28,997 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 5087 bytes
+2017-03-11 04:18:28,999 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 5105 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:18:28,999 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 468 bytes from disk
+2017-03-11 04:18:29,000 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:18:29,000 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:18:29,000 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 5087 bytes
+2017-03-11 04:18:29,007 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:18:29,015 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.skip.on is deprecated. Instead, use mapreduce.job.skiprecords
+2017-03-11 04:18:29,029 INFO org.apache.hadoop.mapred.Task: Task:attempt_local596953480_0001_r_000000_0 is done. And is in the process of committing
+2017-03-11 04:18:29,033 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:18:29,034 INFO org.apache.hadoop.mapred.Task: Task attempt_local596953480_0001_r_000000_0 is allowed to commit now
+2017-03-11 04:18:29,034 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local596953480_0001_r_000000_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local596953480_0001_r_000000
+2017-03-11 04:18:29,035 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 04:18:29,035 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local596953480_0001_r_000000_0' done.
+2017-03-11 04:18:29,035 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local596953480_0001_r_000000_0
+2017-03-11 04:18:29,035 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete.
+2017-03-11 04:18:29,595 INFO org.apache.hadoop.mapreduce.Job: Job job_local596953480_0001 running in uber mode : false
+2017-03-11 04:18:29,597 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 100%
+2017-03-11 04:18:29,599 INFO org.apache.hadoop.mapreduce.Job: Job job_local596953480_0001 completed successfully
+2017-03-11 04:18:29,610 INFO org.apache.hadoop.mapreduce.Job: Counters: 33
 	File System Counters
-		FILE: Number of bytes read=11514350
-		FILE: Number of bytes written=1397073
+		FILE: Number of bytes read=2226
+		FILE: Number of bytes written=528828
 		FILE: Number of read operations=0
 		FILE: Number of large read operations=0
 		FILE: Number of write operations=0
 	Map-Reduce Framework
-		Map input records=124787
-		Map output records=1023494
-		Map output bytes=8925696
-		Map output materialized bytes=167092
-		Input split bytes=120
-		Combine input records=1023494
-		Combine output records=34513
-		Reduce input groups=34513
-		Reduce shuffle bytes=167092
-		Reduce input records=34513
-		Reduce output records=34513
-		Spilled Records=69026
+		Map input records=7
+		Map output records=63
+		Map output bytes=4977
+		Map output materialized bytes=460
+		Input split bytes=122
+		Combine input records=0
+		Combine output records=0
+		Reduce input groups=63
+		Reduce shuffle bytes=460
+		Reduce input records=63
+		Reduce output records=0
+		Spilled Records=126
 		Shuffled Maps =1
 		Failed Shuffles=0
 		Merged Map outputs=1
-		GC time elapsed (ms)=86
+		GC time elapsed (ms)=26
 		CPU time spent (ms)=0
 		Physical memory (bytes) snapshot=0
 		Virtual memory (bytes) snapshot=0
@@ -325,108 +2017,102 @@
 		WRONG_MAP=0
 		WRONG_REDUCE=0
 	File Input Format Counters 
-		Bytes Read=5589886
+		Bytes Read=454
 	File Output Format Counters 
-		Bytes Written=363879
-2017-03-10 14:08:46,208 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
-2017-03-10 14:08:46,725 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
-2017-03-10 14:08:46,726 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
-2017-03-10 14:08:46,791 WARN org.apache.hadoop.security.UserGroupInformation: PriviledgedActionException as:cloudera (auth:SIMPLE) cause:org.apache.hadoop.mapred.FileAlreadyExistsException: Output directory file:/home/cloudera/workspace/WordCount/output already exists
-2017-03-10 14:09:00,496 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
-2017-03-10 14:09:00,991 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
-2017-03-10 14:09:00,992 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
-2017-03-10 14:09:01,486 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
-2017-03-10 14:09:01,504 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
-2017-03-10 14:09:01,622 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
-2017-03-10 14:09:01,930 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local1768387477_0001
-2017-03-10 14:09:02,340 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
-2017-03-10 14:09:02,341 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local1768387477_0001
-2017-03-10 14:09:02,345 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
-2017-03-10 14:09:02,348 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
-2017-03-10 14:09:02,366 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
-2017-03-10 14:09:02,467 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
-2017-03-10 14:09:02,468 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1768387477_0001_m_000000_0
-2017-03-10 14:09:02,532 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
-2017-03-10 14:09:02,561 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
-2017-03-10 14:09:02,564 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/input/pg100.txt:0+5589886
-2017-03-10 14:09:02,644 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
-2017-03-10 14:09:02,645 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
-2017-03-10 14:09:02,645 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
-2017-03-10 14:09:02,645 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
-2017-03-10 14:09:02,645 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
-2017-03-10 14:09:02,652 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
-2017-03-10 14:09:03,343 INFO org.apache.hadoop.mapreduce.Job: Job job_local1768387477_0001 running in uber mode : false
-2017-03-10 14:09:03,344 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
-2017-03-10 14:09:04,790 INFO org.apache.hadoop.mapred.LocalJobRunner: 
-2017-03-10 14:09:04,792 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
-2017-03-10 14:09:04,792 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
-2017-03-10 14:09:04,792 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 8925696; bufvoid = 104857600
-2017-03-10 14:09:04,792 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 22120424(88481696); length = 4093973/6553600
-2017-03-10 14:09:05,819 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
-2017-03-10 14:09:06,544 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
-2017-03-10 14:09:06,546 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1768387477_0001_m_000000_0 is done. And is in the process of committing
-2017-03-10 14:09:06,552 INFO org.apache.hadoop.mapred.LocalJobRunner: map
-2017-03-10 14:09:06,552 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1768387477_0001_m_000000_0' done.
-2017-03-10 14:09:06,552 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1768387477_0001_m_000000_0
-2017-03-10 14:09:06,552 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
-2017-03-10 14:09:06,560 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks
-2017-03-10 14:09:06,561 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1768387477_0001_r_000000_0
-2017-03-10 14:09:06,569 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
-2017-03-10 14:09:06,570 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
-2017-03-10 14:09:06,572 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@7f792cec
-2017-03-10 14:09:06,582 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
-2017-03-10 14:09:06,588 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1768387477_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events
-2017-03-10 14:09:06,616 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.deflate]
-2017-03-10 14:09:06,617 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1768387477_0001_m_000000_0 decomp: 491464 len: 167092 to MEMORY
-2017-03-10 14:09:06,626 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 491464 bytes from map-output for attempt_local1768387477_0001_m_000000_0
-2017-03-10 14:09:06,627 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 491464, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->491464
-2017-03-10 14:09:06,628 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
-2017-03-10 14:09:06,629 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
-2017-03-10 14:09:06,629 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
-2017-03-10 14:09:06,636 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
-2017-03-10 14:09:06,636 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 491459 bytes
-2017-03-10 14:09:06,896 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 491464 bytes to disk to satisfy reduce memory limit
-2017-03-10 14:09:06,896 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 167100 bytes from disk
-2017-03-10 14:09:06,897 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
-2017-03-10 14:09:06,897 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
-2017-03-10 14:09:06,898 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 491459 bytes
-2017-03-10 14:09:06,898 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
-2017-03-10 14:09:06,903 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.skip.on is deprecated. Instead, use mapreduce.job.skiprecords
-2017-03-10 14:09:07,352 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 0%
-2017-03-10 14:09:07,776 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1768387477_0001_r_000000_0 is done. And is in the process of committing
-2017-03-10 14:09:07,792 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
-2017-03-10 14:09:07,792 INFO org.apache.hadoop.mapred.Task: Task attempt_local1768387477_0001_r_000000_0 is allowed to commit now
-2017-03-10 14:09:07,792 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1768387477_0001_r_000000_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1768387477_0001_r_000000
-2017-03-10 14:09:07,793 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
-2017-03-10 14:09:07,793 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1768387477_0001_r_000000_0' done.
-2017-03-10 14:09:07,793 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1768387477_0001_r_000000_0
-2017-03-10 14:09:07,793 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete.
-2017-03-10 14:09:08,353 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 100%
-2017-03-10 14:09:08,354 INFO org.apache.hadoop.mapreduce.Job: Job job_local1768387477_0001 completed successfully
-2017-03-10 14:09:08,363 INFO org.apache.hadoop.mapreduce.Job: Counters: 33
+		Bytes Written=8
+2017-03-11 04:19:23,256 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-11 04:19:23,762 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-11 04:19:23,769 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-11 04:19:24,095 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
+2017-03-11 04:19:24,103 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
+2017-03-11 04:19:24,140 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
+2017-03-11 04:19:24,352 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local469373854_0001
+2017-03-11 04:19:24,810 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
+2017-03-11 04:19:24,811 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local469373854_0001
+2017-03-11 04:19:24,826 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
+2017-03-11 04:19:24,830 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:19:24,849 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
+2017-03-11 04:19:24,958 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
+2017-03-11 04:19:24,961 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local469373854_0001_m_000000_0
+2017-03-11 04:19:24,995 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:19:25,004 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:19:25,009 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:0+454
+2017-03-11 04:19:25,100 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 04:19:25,100 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 04:19:25,101 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 04:19:25,101 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 04:19:25,101 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 04:19:25,104 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 04:19:25,137 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 04:19:25,137 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 04:19:25,137 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-11 04:19:25,137 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 4977; bufvoid = 104857600
+2017-03-11 04:19:25,137 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 26214148(104856592); length = 249/6553600
+2017-03-11 04:19:25,149 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
+2017-03-11 04:19:25,159 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
+2017-03-11 04:19:25,163 INFO org.apache.hadoop.mapred.Task: Task:attempt_local469373854_0001_m_000000_0 is done. And is in the process of committing
+2017-03-11 04:19:25,170 INFO org.apache.hadoop.mapred.LocalJobRunner: map
+2017-03-11 04:19:25,170 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local469373854_0001_m_000000_0' done.
+2017-03-11 04:19:25,170 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local469373854_0001_m_000000_0
+2017-03-11 04:19:25,171 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
+2017-03-11 04:19:25,174 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks
+2017-03-11 04:19:25,174 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local469373854_0001_r_000000_0
+2017-03-11 04:19:25,184 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:19:25,184 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:19:25,186 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@644fdf4b
+2017-03-11 04:19:25,196 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:19:25,202 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local469373854_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:19:25,230 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.deflate]
+2017-03-11 04:19:25,230 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local469373854_0001_m_000000_0 decomp: 5105 len: 460 to MEMORY
+2017-03-11 04:19:25,234 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 5105 bytes from map-output for attempt_local469373854_0001_m_000000_0
+2017-03-11 04:19:25,234 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 5105, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->5105
+2017-03-11 04:19:25,235 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:19:25,235 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:19:25,236 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:19:25,241 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:19:25,243 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 5087 bytes
+2017-03-11 04:19:25,247 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 5105 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:19:25,247 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 468 bytes from disk
+2017-03-11 04:19:25,248 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:19:25,248 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:19:25,248 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 5087 bytes
+2017-03-11 04:19:25,259 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:19:25,263 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.skip.on is deprecated. Instead, use mapreduce.job.skiprecords
+2017-03-11 04:19:25,276 INFO org.apache.hadoop.mapred.Task: Task:attempt_local469373854_0001_r_000000_0 is done. And is in the process of committing
+2017-03-11 04:19:25,278 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:19:25,279 INFO org.apache.hadoop.mapred.Task: Task attempt_local469373854_0001_r_000000_0 is allowed to commit now
+2017-03-11 04:19:25,279 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local469373854_0001_r_000000_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local469373854_0001_r_000000
+2017-03-11 04:19:25,280 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 04:19:25,280 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local469373854_0001_r_000000_0' done.
+2017-03-11 04:19:25,285 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local469373854_0001_r_000000_0
+2017-03-11 04:19:25,285 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete.
+2017-03-11 04:19:25,813 INFO org.apache.hadoop.mapreduce.Job: Job job_local469373854_0001 running in uber mode : false
+2017-03-11 04:19:25,814 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 100%
+2017-03-11 04:19:25,815 INFO org.apache.hadoop.mapreduce.Job: Job job_local469373854_0001 completed successfully
+2017-03-11 04:19:25,828 INFO org.apache.hadoop.mapreduce.Job: Counters: 33
 	File System Counters
-		FILE: Number of bytes read=11514350
-		FILE: Number of bytes written=1397073
+		FILE: Number of bytes read=2226
+		FILE: Number of bytes written=528828
 		FILE: Number of read operations=0
 		FILE: Number of large read operations=0
 		FILE: Number of write operations=0
 	Map-Reduce Framework
-		Map input records=124787
-		Map output records=1023494
-		Map output bytes=8925696
-		Map output materialized bytes=167092
-		Input split bytes=120
-		Combine input records=1023494
-		Combine output records=34513
-		Reduce input groups=34513
-		Reduce shuffle bytes=167092
-		Reduce input records=34513
-		Reduce output records=34513
-		Spilled Records=69026
+		Map input records=7
+		Map output records=63
+		Map output bytes=4977
+		Map output materialized bytes=460
+		Input split bytes=122
+		Combine input records=0
+		Combine output records=0
+		Reduce input groups=63
+		Reduce shuffle bytes=460
+		Reduce input records=63
+		Reduce output records=0
+		Spilled Records=126
 		Shuffled Maps =1
 		Failed Shuffles=0
 		Merged Map outputs=1
-		GC time elapsed (ms)=76
+		GC time elapsed (ms)=23
 		CPU time spent (ms)=0
 		Physical memory (bytes) snapshot=0
 		Virtual memory (bytes) snapshot=0
@@ -439,108 +2125,115 @@
 		WRONG_MAP=0
 		WRONG_REDUCE=0
 	File Input Format Counters 
-		Bytes Read=5589886
+		Bytes Read=454
 	File Output Format Counters 
-		Bytes Written=363879
-2017-03-10 14:10:49,958 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
-2017-03-10 14:10:50,420 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
-2017-03-10 14:10:50,423 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
-2017-03-10 14:10:50,893 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
-2017-03-10 14:10:50,902 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
-2017-03-10 14:10:50,935 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
-2017-03-10 14:10:51,165 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local321623198_0001
-2017-03-10 14:10:51,576 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
-2017-03-10 14:10:51,577 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local321623198_0001
-2017-03-10 14:10:51,586 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
-2017-03-10 14:10:51,596 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
-2017-03-10 14:10:51,607 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
-2017-03-10 14:10:51,726 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
-2017-03-10 14:10:51,728 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local321623198_0001_m_000000_0
-2017-03-10 14:10:51,786 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
-2017-03-10 14:10:51,801 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
-2017-03-10 14:10:51,805 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/input/pg100.txt:0+5589886
-2017-03-10 14:10:51,887 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
-2017-03-10 14:10:51,887 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
-2017-03-10 14:10:51,887 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
-2017-03-10 14:10:51,888 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
-2017-03-10 14:10:51,888 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
-2017-03-10 14:10:51,891 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
-2017-03-10 14:10:52,584 INFO org.apache.hadoop.mapreduce.Job: Job job_local321623198_0001 running in uber mode : false
-2017-03-10 14:10:52,587 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
-2017-03-10 14:10:54,371 INFO org.apache.hadoop.mapred.LocalJobRunner: 
-2017-03-10 14:10:54,373 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
-2017-03-10 14:10:54,373 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
-2017-03-10 14:10:54,374 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 8925696; bufvoid = 104857600
-2017-03-10 14:10:54,374 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 22120424(88481696); length = 4093973/6553600
-2017-03-10 14:10:55,678 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
-2017-03-10 14:10:56,656 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
-2017-03-10 14:10:56,659 INFO org.apache.hadoop.mapred.Task: Task:attempt_local321623198_0001_m_000000_0 is done. And is in the process of committing
-2017-03-10 14:10:56,666 INFO org.apache.hadoop.mapred.LocalJobRunner: map
-2017-03-10 14:10:56,666 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local321623198_0001_m_000000_0' done.
-2017-03-10 14:10:56,666 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local321623198_0001_m_000000_0
-2017-03-10 14:10:56,667 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
-2017-03-10 14:10:56,676 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks
-2017-03-10 14:10:56,676 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local321623198_0001_r_000000_0
-2017-03-10 14:10:56,684 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
-2017-03-10 14:10:56,685 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
-2017-03-10 14:10:56,689 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@405d65c3
-2017-03-10 14:10:56,704 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
-2017-03-10 14:10:56,710 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local321623198_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events
-2017-03-10 14:10:56,742 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.deflate]
-2017-03-10 14:10:56,742 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local321623198_0001_m_000000_0 decomp: 491464 len: 167092 to MEMORY
-2017-03-10 14:10:56,749 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 491464 bytes from map-output for attempt_local321623198_0001_m_000000_0
-2017-03-10 14:10:56,749 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 491464, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->491464
-2017-03-10 14:10:56,750 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
-2017-03-10 14:10:56,752 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
-2017-03-10 14:10:56,752 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
-2017-03-10 14:10:56,756 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
-2017-03-10 14:10:56,757 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 491459 bytes
-2017-03-10 14:10:57,102 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 491464 bytes to disk to satisfy reduce memory limit
-2017-03-10 14:10:57,102 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 167100 bytes from disk
-2017-03-10 14:10:57,103 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
-2017-03-10 14:10:57,103 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
-2017-03-10 14:10:57,104 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 491459 bytes
-2017-03-10 14:10:57,105 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
-2017-03-10 14:10:57,109 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.skip.on is deprecated. Instead, use mapreduce.job.skiprecords
-2017-03-10 14:10:57,598 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 0%
-2017-03-10 14:10:57,797 INFO org.apache.hadoop.mapred.Task: Task:attempt_local321623198_0001_r_000000_0 is done. And is in the process of committing
-2017-03-10 14:10:57,801 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
-2017-03-10 14:10:57,802 INFO org.apache.hadoop.mapred.Task: Task attempt_local321623198_0001_r_000000_0 is allowed to commit now
-2017-03-10 14:10:57,802 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local321623198_0001_r_000000_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local321623198_0001_r_000000
-2017-03-10 14:10:57,803 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
-2017-03-10 14:10:57,803 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local321623198_0001_r_000000_0' done.
-2017-03-10 14:10:57,803 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local321623198_0001_r_000000_0
-2017-03-10 14:10:57,803 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete.
-2017-03-10 14:10:58,598 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 100%
-2017-03-10 14:10:58,599 INFO org.apache.hadoop.mapreduce.Job: Job job_local321623198_0001 completed successfully
-2017-03-10 14:10:58,612 INFO org.apache.hadoop.mapreduce.Job: Counters: 33
+		Bytes Written=8
+2017-03-11 04:31:48,025 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-11 04:31:48,620 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-11 04:31:48,621 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-11 04:31:49,123 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
+2017-03-11 04:31:49,135 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
+2017-03-11 04:31:49,274 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
+2017-03-11 04:31:49,726 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local372885088_0001
+2017-03-11 04:31:50,486 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
+2017-03-11 04:31:50,487 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local372885088_0001
+2017-03-11 04:31:50,494 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
+2017-03-11 04:31:50,506 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:31:50,523 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
+2017-03-11 04:31:50,653 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
+2017-03-11 04:31:50,654 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local372885088_0001_m_000000_0
+2017-03-11 04:31:50,721 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:31:50,734 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:31:50,738 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:0+454
+2017-03-11 04:31:50,816 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 04:31:50,816 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 04:31:50,817 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 04:31:50,817 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 04:31:50,817 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 04:31:50,820 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 04:31:50,857 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 04:31:50,857 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 04:31:50,857 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-11 04:31:50,857 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 4977; bufvoid = 104857600
+2017-03-11 04:31:50,857 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 26214148(104856592); length = 249/6553600
+2017-03-11 04:31:50,871 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
+2017-03-11 04:31:50,877 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
+2017-03-11 04:31:50,880 INFO org.apache.hadoop.mapred.Task: Task:attempt_local372885088_0001_m_000000_0 is done. And is in the process of committing
+2017-03-11 04:31:50,888 INFO org.apache.hadoop.mapred.LocalJobRunner: map
+2017-03-11 04:31:50,888 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local372885088_0001_m_000000_0' done.
+2017-03-11 04:31:50,888 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local372885088_0001_m_000000_0
+2017-03-11 04:31:50,889 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
+2017-03-11 04:31:50,892 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks
+2017-03-11 04:31:50,892 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local372885088_0001_r_000000_0
+2017-03-11 04:31:50,897 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:31:50,901 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:31:50,904 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@684528a3
+2017-03-11 04:31:50,916 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:31:50,922 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local372885088_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:31:50,954 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.deflate]
+2017-03-11 04:31:50,955 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local372885088_0001_m_000000_0 decomp: 5105 len: 525 to MEMORY
+2017-03-11 04:31:50,958 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 5105 bytes from map-output for attempt_local372885088_0001_m_000000_0
+2017-03-11 04:31:50,958 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 5105, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->5105
+2017-03-11 04:31:50,959 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:31:50,961 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:31:50,961 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:31:50,970 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:31:50,970 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 5087 bytes
+2017-03-11 04:31:50,972 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 5105 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:31:50,972 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 533 bytes from disk
+2017-03-11 04:31:50,973 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:31:50,973 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:31:50,973 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 5087 bytes
+2017-03-11 04:31:50,982 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:31:50,991 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.skip.on is deprecated. Instead, use mapreduce.job.skiprecords
+2017-03-11 04:31:50,995 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete.
+2017-03-11 04:31:50,997 WARN org.apache.hadoop.mapred.LocalJobRunner: job_local372885088_0001
+java.lang.Exception: java.lang.ArrayIndexOutOfBoundsException: 2
+	at org.apache.hadoop.mapred.LocalJobRunner$Job.runTasks(LocalJobRunner.java:462)
+	at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:529)
+Caused by: java.lang.ArrayIndexOutOfBoundsException: 2
+	at similarity.NaiveApproach$Reduce.reduce(NaiveApproach.java:163)
+	at similarity.NaiveApproach$Reduce.reduce(NaiveApproach.java:1)
+	at org.apache.hadoop.mapreduce.Reducer.run(Reducer.java:171)
+	at org.apache.hadoop.mapred.ReduceTask.runNewReducer(ReduceTask.java:627)
+	at org.apache.hadoop.mapred.ReduceTask.run(ReduceTask.java:389)
+	at org.apache.hadoop.mapred.LocalJobRunner$Job$ReduceTaskRunnable.run(LocalJobRunner.java:319)
+	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471)
+	at java.util.concurrent.FutureTask.run(FutureTask.java:262)
+	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
+	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
+	at java.lang.Thread.run(Thread.java:745)
+2017-03-11 04:31:51,488 INFO org.apache.hadoop.mapreduce.Job: Job job_local372885088_0001 running in uber mode : false
+2017-03-11 04:31:51,490 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 0%
+2017-03-11 04:31:51,491 INFO org.apache.hadoop.mapreduce.Job: Job job_local372885088_0001 failed with state FAILED due to: NA
+2017-03-11 04:31:51,503 INFO org.apache.hadoop.mapreduce.Job: Counters: 33
 	File System Counters
-		FILE: Number of bytes read=11514350
-		FILE: Number of bytes written=1394261
+		FILE: Number of bytes read=633
+		FILE: Number of bytes written=264241
 		FILE: Number of read operations=0
 		FILE: Number of large read operations=0
 		FILE: Number of write operations=0
 	Map-Reduce Framework
-		Map input records=124787
-		Map output records=1023494
-		Map output bytes=8925696
-		Map output materialized bytes=167092
-		Input split bytes=120
-		Combine input records=1023494
-		Combine output records=34513
-		Reduce input groups=34513
-		Reduce shuffle bytes=167092
-		Reduce input records=34513
-		Reduce output records=34513
-		Spilled Records=69026
+		Map input records=7
+		Map output records=63
+		Map output bytes=4977
+		Map output materialized bytes=525
+		Input split bytes=122
+		Combine input records=0
+		Combine output records=0
+		Reduce input groups=0
+		Reduce shuffle bytes=525
+		Reduce input records=0
+		Reduce output records=0
+		Spilled Records=63
 		Shuffled Maps =1
 		Failed Shuffles=0
 		Merged Map outputs=1
-		GC time elapsed (ms)=82
+		GC time elapsed (ms)=26
 		CPU time spent (ms)=0
 		Physical memory (bytes) snapshot=0
 		Virtual memory (bytes) snapshot=0
-		Total committed heap usage (bytes)=331227136
+		Total committed heap usage (bytes)=165613568
 	Shuffle Errors
 		BAD_ID=0
 		CONNECTION=0
@@ -549,108 +2242,115 @@
 		WRONG_MAP=0
 		WRONG_REDUCE=0
 	File Input Format Counters 
-		Bytes Read=5589886
+		Bytes Read=454
 	File Output Format Counters 
-		Bytes Written=363879
-2017-03-10 14:11:49,324 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
-2017-03-10 14:11:49,809 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
-2017-03-10 14:11:49,819 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
-2017-03-10 14:11:50,294 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
-2017-03-10 14:11:50,309 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
-2017-03-10 14:11:50,418 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
-2017-03-10 14:11:50,734 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local1130190814_0001
-2017-03-10 14:11:51,124 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
-2017-03-10 14:11:51,125 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local1130190814_0001
-2017-03-10 14:11:51,125 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
-2017-03-10 14:11:51,137 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
-2017-03-10 14:11:51,145 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
-2017-03-10 14:11:51,262 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
-2017-03-10 14:11:51,262 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1130190814_0001_m_000000_0
-2017-03-10 14:11:51,319 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
-2017-03-10 14:11:51,345 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
-2017-03-10 14:11:51,348 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/input/pg100.txt:0+5589886
-2017-03-10 14:11:51,448 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
-2017-03-10 14:11:51,448 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
-2017-03-10 14:11:51,448 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
-2017-03-10 14:11:51,448 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
-2017-03-10 14:11:51,448 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
-2017-03-10 14:11:51,451 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
-2017-03-10 14:11:52,138 INFO org.apache.hadoop.mapreduce.Job: Job job_local1130190814_0001 running in uber mode : false
-2017-03-10 14:11:52,139 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
-2017-03-10 14:11:53,548 INFO org.apache.hadoop.mapred.LocalJobRunner: 
-2017-03-10 14:11:53,549 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
-2017-03-10 14:11:53,549 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
-2017-03-10 14:11:53,549 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 8925696; bufvoid = 104857600
-2017-03-10 14:11:53,549 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 22120424(88481696); length = 4093973/6553600
-2017-03-10 14:11:54,505 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
-2017-03-10 14:11:55,315 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
-2017-03-10 14:11:55,318 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1130190814_0001_m_000000_0 is done. And is in the process of committing
-2017-03-10 14:11:55,323 INFO org.apache.hadoop.mapred.LocalJobRunner: map
-2017-03-10 14:11:55,324 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1130190814_0001_m_000000_0' done.
-2017-03-10 14:11:55,324 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1130190814_0001_m_000000_0
-2017-03-10 14:11:55,324 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
-2017-03-10 14:11:55,331 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks
-2017-03-10 14:11:55,332 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1130190814_0001_r_000000_0
-2017-03-10 14:11:55,337 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
-2017-03-10 14:11:55,337 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
-2017-03-10 14:11:55,339 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@33c40638
-2017-03-10 14:11:55,352 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
-2017-03-10 14:11:55,358 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1130190814_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events
-2017-03-10 14:11:55,386 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.deflate]
-2017-03-10 14:11:55,387 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1130190814_0001_m_000000_0 decomp: 491464 len: 167092 to MEMORY
-2017-03-10 14:11:55,397 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 491464 bytes from map-output for attempt_local1130190814_0001_m_000000_0
-2017-03-10 14:11:55,397 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 491464, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->491464
-2017-03-10 14:11:55,398 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
-2017-03-10 14:11:55,399 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
-2017-03-10 14:11:55,399 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
-2017-03-10 14:11:55,403 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
-2017-03-10 14:11:55,404 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 491459 bytes
-2017-03-10 14:11:55,647 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 491464 bytes to disk to satisfy reduce memory limit
-2017-03-10 14:11:55,647 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 167100 bytes from disk
-2017-03-10 14:11:55,648 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
-2017-03-10 14:11:55,648 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
-2017-03-10 14:11:55,649 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 491459 bytes
-2017-03-10 14:11:55,649 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
-2017-03-10 14:11:55,654 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.skip.on is deprecated. Instead, use mapreduce.job.skiprecords
-2017-03-10 14:11:56,162 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 0%
-2017-03-10 14:11:56,278 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1130190814_0001_r_000000_0 is done. And is in the process of committing
-2017-03-10 14:11:56,289 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
-2017-03-10 14:11:56,292 INFO org.apache.hadoop.mapred.Task: Task attempt_local1130190814_0001_r_000000_0 is allowed to commit now
-2017-03-10 14:11:56,294 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1130190814_0001_r_000000_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1130190814_0001_r_000000
-2017-03-10 14:11:56,294 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
-2017-03-10 14:11:56,295 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1130190814_0001_r_000000_0' done.
-2017-03-10 14:11:56,295 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1130190814_0001_r_000000_0
-2017-03-10 14:11:56,295 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete.
-2017-03-10 14:11:57,163 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 100%
-2017-03-10 14:11:57,163 INFO org.apache.hadoop.mapreduce.Job: Job job_local1130190814_0001 completed successfully
-2017-03-10 14:11:57,176 INFO org.apache.hadoop.mapreduce.Job: Counters: 33
+		Bytes Written=0
+2017-03-11 04:35:38,749 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-11 04:35:39,238 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-11 04:35:39,245 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-11 04:35:39,678 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
+2017-03-11 04:35:39,699 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
+2017-03-11 04:35:39,807 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
+2017-03-11 04:35:40,047 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local95551546_0001
+2017-03-11 04:35:40,533 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
+2017-03-11 04:35:40,534 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local95551546_0001
+2017-03-11 04:35:40,538 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
+2017-03-11 04:35:40,551 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:35:40,562 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
+2017-03-11 04:35:40,672 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
+2017-03-11 04:35:40,674 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local95551546_0001_m_000000_0
+2017-03-11 04:35:40,720 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:35:40,735 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:35:40,742 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:0+454
+2017-03-11 04:35:40,819 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 04:35:40,824 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 04:35:40,824 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 04:35:40,824 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 04:35:40,824 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 04:35:40,827 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 04:35:40,867 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 04:35:40,867 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 04:35:40,867 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-11 04:35:40,867 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 4424; bufvoid = 104857600
+2017-03-11 04:35:40,867 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 26214176(104856704); length = 221/6553600
+2017-03-11 04:35:40,881 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
+2017-03-11 04:35:40,889 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
+2017-03-11 04:35:40,892 INFO org.apache.hadoop.mapred.Task: Task:attempt_local95551546_0001_m_000000_0 is done. And is in the process of committing
+2017-03-11 04:35:40,901 INFO org.apache.hadoop.mapred.LocalJobRunner: map
+2017-03-11 04:35:40,901 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local95551546_0001_m_000000_0' done.
+2017-03-11 04:35:40,901 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local95551546_0001_m_000000_0
+2017-03-11 04:35:40,902 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
+2017-03-11 04:35:40,905 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks
+2017-03-11 04:35:40,905 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local95551546_0001_r_000000_0
+2017-03-11 04:35:40,910 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:35:40,910 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:35:40,913 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@6dac133e
+2017-03-11 04:35:40,931 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:35:40,937 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local95551546_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:35:40,957 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.deflate]
+2017-03-11 04:35:40,958 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local95551546_0001_m_000000_0 decomp: 4538 len: 489 to MEMORY
+2017-03-11 04:35:40,961 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 4538 bytes from map-output for attempt_local95551546_0001_m_000000_0
+2017-03-11 04:35:40,961 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 4538, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->4538
+2017-03-11 04:35:40,962 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:35:40,963 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:35:40,963 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:35:40,969 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:35:40,969 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 4520 bytes
+2017-03-11 04:35:40,971 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 4538 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:35:40,971 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 497 bytes from disk
+2017-03-11 04:35:40,972 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:35:40,972 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:35:40,972 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 4520 bytes
+2017-03-11 04:35:40,978 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:35:40,988 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.skip.on is deprecated. Instead, use mapreduce.job.skiprecords
+2017-03-11 04:35:40,994 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete.
+2017-03-11 04:35:40,999 WARN org.apache.hadoop.mapred.LocalJobRunner: job_local95551546_0001
+java.lang.Exception: java.lang.ArrayIndexOutOfBoundsException: 2
+	at org.apache.hadoop.mapred.LocalJobRunner$Job.runTasks(LocalJobRunner.java:462)
+	at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:529)
+Caused by: java.lang.ArrayIndexOutOfBoundsException: 2
+	at similarity.NaiveApproach$Reduce.reduce(NaiveApproach.java:167)
+	at similarity.NaiveApproach$Reduce.reduce(NaiveApproach.java:1)
+	at org.apache.hadoop.mapreduce.Reducer.run(Reducer.java:171)
+	at org.apache.hadoop.mapred.ReduceTask.runNewReducer(ReduceTask.java:627)
+	at org.apache.hadoop.mapred.ReduceTask.run(ReduceTask.java:389)
+	at org.apache.hadoop.mapred.LocalJobRunner$Job$ReduceTaskRunnable.run(LocalJobRunner.java:319)
+	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471)
+	at java.util.concurrent.FutureTask.run(FutureTask.java:262)
+	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
+	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
+	at java.lang.Thread.run(Thread.java:745)
+2017-03-11 04:35:41,536 INFO org.apache.hadoop.mapreduce.Job: Job job_local95551546_0001 running in uber mode : false
+2017-03-11 04:35:41,537 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 0%
+2017-03-11 04:35:41,539 INFO org.apache.hadoop.mapreduce.Job: Job job_local95551546_0001 failed with state FAILED due to: NA
+2017-03-11 04:35:41,550 INFO org.apache.hadoop.mapreduce.Job: Counters: 33
 	File System Counters
-		FILE: Number of bytes read=11514350
-		FILE: Number of bytes written=1397073
+		FILE: Number of bytes read=633
+		FILE: Number of bytes written=262799
 		FILE: Number of read operations=0
 		FILE: Number of large read operations=0
 		FILE: Number of write operations=0
 	Map-Reduce Framework
-		Map input records=124787
-		Map output records=1023494
-		Map output bytes=8925696
-		Map output materialized bytes=167092
-		Input split bytes=120
-		Combine input records=1023494
-		Combine output records=34513
-		Reduce input groups=34513
-		Reduce shuffle bytes=167092
-		Reduce input records=34513
-		Reduce output records=34513
-		Spilled Records=69026
+		Map input records=7
+		Map output records=56
+		Map output bytes=4424
+		Map output materialized bytes=489
+		Input split bytes=122
+		Combine input records=0
+		Combine output records=0
+		Reduce input groups=0
+		Reduce shuffle bytes=489
+		Reduce input records=0
+		Reduce output records=0
+		Spilled Records=56
 		Shuffled Maps =1
 		Failed Shuffles=0
 		Merged Map outputs=1
-		GC time elapsed (ms)=84
+		GC time elapsed (ms)=26
 		CPU time spent (ms)=0
 		Physical memory (bytes) snapshot=0
 		Virtual memory (bytes) snapshot=0
-		Total committed heap usage (bytes)=331227136
+		Total committed heap usage (bytes)=165613568
 	Shuffle Errors
 		BAD_ID=0
 		CONNECTION=0
@@ -659,108 +2359,106 @@
 		WRONG_MAP=0
 		WRONG_REDUCE=0
 	File Input Format Counters 
-		Bytes Read=5589886
+		Bytes Read=454
 	File Output Format Counters 
-		Bytes Written=363879
-2017-03-10 14:12:54,192 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
-2017-03-10 14:12:54,666 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
-2017-03-10 14:12:54,677 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
-2017-03-10 14:12:54,726 WARN org.apache.hadoop.security.UserGroupInformation: PriviledgedActionException as:cloudera (auth:SIMPLE) cause:org.apache.hadoop.mapred.FileAlreadyExistsException: Output directory file:/home/cloudera/workspace/WordCount/output already exists
-2017-03-10 14:13:16,264 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
-2017-03-10 14:13:16,742 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
-2017-03-10 14:13:16,746 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
-2017-03-10 14:13:17,210 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
-2017-03-10 14:13:17,218 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
-2017-03-10 14:13:17,249 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
-2017-03-10 14:13:17,469 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local1259538348_0001
-2017-03-10 14:13:17,909 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
-2017-03-10 14:13:17,911 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local1259538348_0001
-2017-03-10 14:13:17,915 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
-2017-03-10 14:13:17,923 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
-2017-03-10 14:13:17,937 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
-2017-03-10 14:13:18,056 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
-2017-03-10 14:13:18,059 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1259538348_0001_m_000000_0
-2017-03-10 14:13:18,117 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
-2017-03-10 14:13:18,128 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
-2017-03-10 14:13:18,132 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/input/pg100.txt:0+5589886
-2017-03-10 14:13:18,215 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
-2017-03-10 14:13:18,216 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
-2017-03-10 14:13:18,216 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
-2017-03-10 14:13:18,216 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
-2017-03-10 14:13:18,216 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
-2017-03-10 14:13:18,226 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
-2017-03-10 14:13:18,912 INFO org.apache.hadoop.mapreduce.Job: Job job_local1259538348_0001 running in uber mode : false
-2017-03-10 14:13:18,913 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
-2017-03-10 14:13:20,442 INFO org.apache.hadoop.mapred.LocalJobRunner: 
-2017-03-10 14:13:20,443 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
-2017-03-10 14:13:20,443 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
-2017-03-10 14:13:20,443 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 8925696; bufvoid = 104857600
-2017-03-10 14:13:20,443 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 22120424(88481696); length = 4093973/6553600
-2017-03-10 14:13:21,376 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
-2017-03-10 14:13:22,115 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
-2017-03-10 14:13:22,117 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1259538348_0001_m_000000_0 is done. And is in the process of committing
-2017-03-10 14:13:22,124 INFO org.apache.hadoop.mapred.LocalJobRunner: map
-2017-03-10 14:13:22,124 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1259538348_0001_m_000000_0' done.
-2017-03-10 14:13:22,124 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1259538348_0001_m_000000_0
-2017-03-10 14:13:22,124 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
-2017-03-10 14:13:22,132 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks
-2017-03-10 14:13:22,132 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1259538348_0001_r_000000_0
-2017-03-10 14:13:22,137 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
-2017-03-10 14:13:22,137 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
-2017-03-10 14:13:22,140 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@7f792cec
-2017-03-10 14:13:22,151 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
-2017-03-10 14:13:22,156 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1259538348_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events
-2017-03-10 14:13:22,209 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.deflate]
-2017-03-10 14:13:22,209 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1259538348_0001_m_000000_0 decomp: 491464 len: 167092 to MEMORY
-2017-03-10 14:13:22,215 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 491464 bytes from map-output for attempt_local1259538348_0001_m_000000_0
-2017-03-10 14:13:22,217 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 491464, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->491464
-2017-03-10 14:13:22,219 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
-2017-03-10 14:13:22,220 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
-2017-03-10 14:13:22,224 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
-2017-03-10 14:13:22,229 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
-2017-03-10 14:13:22,230 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 491459 bytes
-2017-03-10 14:13:22,458 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 491464 bytes to disk to satisfy reduce memory limit
-2017-03-10 14:13:22,459 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 167100 bytes from disk
-2017-03-10 14:13:22,459 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
-2017-03-10 14:13:22,459 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
-2017-03-10 14:13:22,460 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 491459 bytes
-2017-03-10 14:13:22,460 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
-2017-03-10 14:13:22,465 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.skip.on is deprecated. Instead, use mapreduce.job.skiprecords
-2017-03-10 14:13:22,936 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 0%
-2017-03-10 14:13:23,199 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1259538348_0001_r_000000_0 is done. And is in the process of committing
-2017-03-10 14:13:23,200 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
-2017-03-10 14:13:23,201 INFO org.apache.hadoop.mapred.Task: Task attempt_local1259538348_0001_r_000000_0 is allowed to commit now
-2017-03-10 14:13:23,201 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1259538348_0001_r_000000_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1259538348_0001_r_000000
-2017-03-10 14:13:23,202 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
-2017-03-10 14:13:23,202 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1259538348_0001_r_000000_0' done.
-2017-03-10 14:13:23,202 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1259538348_0001_r_000000_0
-2017-03-10 14:13:23,202 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete.
-2017-03-10 14:13:23,936 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 100%
-2017-03-10 14:13:23,937 INFO org.apache.hadoop.mapreduce.Job: Job job_local1259538348_0001 completed successfully
-2017-03-10 14:13:23,948 INFO org.apache.hadoop.mapreduce.Job: Counters: 33
+		Bytes Written=0
+2017-03-11 04:37:40,474 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-11 04:37:41,035 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-11 04:37:41,040 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-11 04:37:41,085 WARN org.apache.hadoop.security.UserGroupInformation: PriviledgedActionException as:cloudera (auth:SIMPLE) cause:org.apache.hadoop.mapred.FileAlreadyExistsException: Output directory file:/home/cloudera/workspace/WordCount/output already exists
+2017-03-11 04:37:59,919 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-11 04:38:00,447 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-11 04:38:00,448 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-11 04:38:00,886 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
+2017-03-11 04:38:00,894 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
+2017-03-11 04:38:01,020 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
+2017-03-11 04:38:01,264 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local2110978304_0001
+2017-03-11 04:38:01,718 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
+2017-03-11 04:38:01,719 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local2110978304_0001
+2017-03-11 04:38:01,720 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
+2017-03-11 04:38:01,734 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:38:01,743 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
+2017-03-11 04:38:01,854 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
+2017-03-11 04:38:01,855 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2110978304_0001_m_000000_0
+2017-03-11 04:38:01,905 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:38:01,916 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:38:01,920 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:0+454
+2017-03-11 04:38:01,995 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 04:38:01,995 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 04:38:01,995 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 04:38:01,995 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 04:38:01,996 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 04:38:02,005 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 04:38:02,048 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 04:38:02,048 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 04:38:02,048 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-11 04:38:02,048 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 4424; bufvoid = 104857600
+2017-03-11 04:38:02,048 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 26214176(104856704); length = 221/6553600
+2017-03-11 04:38:02,057 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
+2017-03-11 04:38:02,063 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
+2017-03-11 04:38:02,066 INFO org.apache.hadoop.mapred.Task: Task:attempt_local2110978304_0001_m_000000_0 is done. And is in the process of committing
+2017-03-11 04:38:02,072 INFO org.apache.hadoop.mapred.LocalJobRunner: map
+2017-03-11 04:38:02,072 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local2110978304_0001_m_000000_0' done.
+2017-03-11 04:38:02,072 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local2110978304_0001_m_000000_0
+2017-03-11 04:38:02,072 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
+2017-03-11 04:38:02,075 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks
+2017-03-11 04:38:02,076 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2110978304_0001_r_000000_0
+2017-03-11 04:38:02,081 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:38:02,081 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:38:02,085 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@6dac133e
+2017-03-11 04:38:02,100 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:38:02,104 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local2110978304_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:38:02,123 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.deflate]
+2017-03-11 04:38:02,123 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local2110978304_0001_m_000000_0 decomp: 4538 len: 501 to MEMORY
+2017-03-11 04:38:02,128 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 4538 bytes from map-output for attempt_local2110978304_0001_m_000000_0
+2017-03-11 04:38:02,128 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 4538, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->4538
+2017-03-11 04:38:02,130 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:38:02,131 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:38:02,131 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:38:02,138 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:38:02,138 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 4520 bytes
+2017-03-11 04:38:02,139 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 4538 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:38:02,140 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 509 bytes from disk
+2017-03-11 04:38:02,140 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:38:02,140 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:38:02,141 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 4520 bytes
+2017-03-11 04:38:02,145 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:38:02,159 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.skip.on is deprecated. Instead, use mapreduce.job.skiprecords
+2017-03-11 04:38:02,169 INFO org.apache.hadoop.mapred.Task: Task:attempt_local2110978304_0001_r_000000_0 is done. And is in the process of committing
+2017-03-11 04:38:02,171 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:38:02,171 INFO org.apache.hadoop.mapred.Task: Task attempt_local2110978304_0001_r_000000_0 is allowed to commit now
+2017-03-11 04:38:02,172 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local2110978304_0001_r_000000_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local2110978304_0001_r_000000
+2017-03-11 04:38:02,172 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 04:38:02,172 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local2110978304_0001_r_000000_0' done.
+2017-03-11 04:38:02,173 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local2110978304_0001_r_000000_0
+2017-03-11 04:38:02,173 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete.
+2017-03-11 04:38:02,725 INFO org.apache.hadoop.mapreduce.Job: Job job_local2110978304_0001 running in uber mode : false
+2017-03-11 04:38:02,726 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 100%
+2017-03-11 04:38:02,728 INFO org.apache.hadoop.mapreduce.Job: Job job_local2110978304_0001 completed successfully
+2017-03-11 04:38:02,739 INFO org.apache.hadoop.mapreduce.Job: Counters: 33
 	File System Counters
-		FILE: Number of bytes read=11514350
-		FILE: Number of bytes written=1397073
+		FILE: Number of bytes read=2308
+		FILE: Number of bytes written=531767
 		FILE: Number of read operations=0
 		FILE: Number of large read operations=0
 		FILE: Number of write operations=0
 	Map-Reduce Framework
-		Map input records=124787
-		Map output records=1023494
-		Map output bytes=8925696
-		Map output materialized bytes=167092
-		Input split bytes=120
-		Combine input records=1023494
-		Combine output records=34513
-		Reduce input groups=34513
-		Reduce shuffle bytes=167092
-		Reduce input records=34513
-		Reduce output records=34513
-		Spilled Records=69026
+		Map input records=7
+		Map output records=56
+		Map output bytes=4424
+		Map output materialized bytes=501
+		Input split bytes=122
+		Combine input records=0
+		Combine output records=0
+		Reduce input groups=35
+		Reduce shuffle bytes=501
+		Reduce input records=56
+		Reduce output records=1
+		Spilled Records=112
 		Shuffled Maps =1
 		Failed Shuffles=0
 		Merged Map outputs=1
-		GC time elapsed (ms)=79
+		GC time elapsed (ms)=28
 		CPU time spent (ms)=0
 		Physical memory (bytes) snapshot=0
 		Virtual memory (bytes) snapshot=0
@@ -773,104 +2471,102 @@
 		WRONG_MAP=0
 		WRONG_REDUCE=0
 	File Input Format Counters 
-		Bytes Read=5589886
+		Bytes Read=454
 	File Output Format Counters 
-		Bytes Written=363879
-2017-03-10 14:15:07,671 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
-2017-03-10 14:15:08,143 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
-2017-03-10 14:15:08,146 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
-2017-03-10 14:15:08,597 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
-2017-03-10 14:15:08,610 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
-2017-03-10 14:15:08,649 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
-2017-03-10 14:15:08,885 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local744282859_0001
-2017-03-10 14:15:09,357 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
-2017-03-10 14:15:09,358 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local744282859_0001
-2017-03-10 14:15:09,361 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
-2017-03-10 14:15:09,374 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
-2017-03-10 14:15:09,376 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
-2017-03-10 14:15:09,515 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
-2017-03-10 14:15:09,516 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local744282859_0001_m_000000_0
-2017-03-10 14:15:09,574 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
-2017-03-10 14:15:09,584 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
-2017-03-10 14:15:09,587 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/input/pg100.txt:0+5589886
-2017-03-10 14:15:09,674 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
-2017-03-10 14:15:09,674 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
-2017-03-10 14:15:09,674 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
-2017-03-10 14:15:09,674 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
-2017-03-10 14:15:09,674 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
-2017-03-10 14:15:09,679 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
-2017-03-10 14:15:10,364 INFO org.apache.hadoop.mapreduce.Job: Job job_local744282859_0001 running in uber mode : false
-2017-03-10 14:15:10,366 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
-2017-03-10 14:15:11,981 INFO org.apache.hadoop.mapred.LocalJobRunner: 
-2017-03-10 14:15:11,982 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
-2017-03-10 14:15:11,982 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
-2017-03-10 14:15:11,982 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 8925696; bufvoid = 104857600
-2017-03-10 14:15:11,982 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 22120424(88481696); length = 4093973/6553600
-2017-03-10 14:15:13,023 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
-2017-03-10 14:15:13,726 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
-2017-03-10 14:15:13,728 INFO org.apache.hadoop.mapred.Task: Task:attempt_local744282859_0001_m_000000_0 is done. And is in the process of committing
-2017-03-10 14:15:13,739 INFO org.apache.hadoop.mapred.LocalJobRunner: map
-2017-03-10 14:15:13,739 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local744282859_0001_m_000000_0' done.
-2017-03-10 14:15:13,739 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local744282859_0001_m_000000_0
-2017-03-10 14:15:13,739 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
-2017-03-10 14:15:13,748 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks
-2017-03-10 14:15:13,749 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local744282859_0001_r_000000_0
-2017-03-10 14:15:13,753 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
-2017-03-10 14:15:13,754 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
-2017-03-10 14:15:13,756 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@534d4113
-2017-03-10 14:15:13,766 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
-2017-03-10 14:15:13,772 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local744282859_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events
-2017-03-10 14:15:13,800 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.deflate]
-2017-03-10 14:15:13,800 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local744282859_0001_m_000000_0 decomp: 491464 len: 167092 to MEMORY
-2017-03-10 14:15:13,807 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 491464 bytes from map-output for attempt_local744282859_0001_m_000000_0
-2017-03-10 14:15:13,807 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 491464, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->491464
-2017-03-10 14:15:13,809 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
-2017-03-10 14:15:13,813 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
-2017-03-10 14:15:13,814 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
-2017-03-10 14:15:13,819 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
-2017-03-10 14:15:13,819 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 491459 bytes
-2017-03-10 14:15:14,078 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 491464 bytes to disk to satisfy reduce memory limit
-2017-03-10 14:15:14,079 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 167100 bytes from disk
-2017-03-10 14:15:14,079 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
-2017-03-10 14:15:14,079 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
-2017-03-10 14:15:14,080 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 491459 bytes
-2017-03-10 14:15:14,080 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
-2017-03-10 14:15:14,085 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.skip.on is deprecated. Instead, use mapreduce.job.skiprecords
-2017-03-10 14:15:14,378 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 0%
-2017-03-10 14:15:14,760 INFO org.apache.hadoop.mapred.Task: Task:attempt_local744282859_0001_r_000000_0 is done. And is in the process of committing
-2017-03-10 14:15:14,761 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
-2017-03-10 14:15:14,761 INFO org.apache.hadoop.mapred.Task: Task attempt_local744282859_0001_r_000000_0 is allowed to commit now
-2017-03-10 14:15:14,762 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local744282859_0001_r_000000_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local744282859_0001_r_000000
-2017-03-10 14:15:14,762 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
-2017-03-10 14:15:14,762 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local744282859_0001_r_000000_0' done.
-2017-03-10 14:15:14,762 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local744282859_0001_r_000000_0
-2017-03-10 14:15:14,762 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete.
-2017-03-10 14:15:15,379 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 100%
-2017-03-10 14:15:15,380 INFO org.apache.hadoop.mapreduce.Job: Job job_local744282859_0001 completed successfully
-2017-03-10 14:15:15,390 INFO org.apache.hadoop.mapreduce.Job: Counters: 33
+		Bytes Written=20
+2017-03-11 04:42:06,971 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-11 04:42:08,074 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-11 04:42:08,087 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-11 04:42:09,013 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
+2017-03-11 04:42:09,048 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
+2017-03-11 04:42:09,250 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
+2017-03-11 04:42:09,647 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local1746989117_0001
+2017-03-11 04:42:10,180 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
+2017-03-11 04:42:10,191 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
+2017-03-11 04:42:10,192 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local1746989117_0001
+2017-03-11 04:42:10,213 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:42:10,222 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
+2017-03-11 04:42:10,451 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
+2017-03-11 04:42:10,452 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1746989117_0001_m_000000_0
+2017-03-11 04:42:10,541 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:42:10,562 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:42:10,572 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:0+454
+2017-03-11 04:42:10,712 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 04:42:10,712 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 04:42:10,712 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 04:42:10,713 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 04:42:10,713 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 04:42:10,718 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 04:42:10,770 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 04:42:10,770 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 04:42:10,770 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-11 04:42:10,771 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 4424; bufvoid = 104857600
+2017-03-11 04:42:10,771 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 26214176(104856704); length = 221/6553600
+2017-03-11 04:42:10,789 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
+2017-03-11 04:42:10,804 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
+2017-03-11 04:42:10,808 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1746989117_0001_m_000000_0 is done. And is in the process of committing
+2017-03-11 04:42:10,826 INFO org.apache.hadoop.mapred.LocalJobRunner: map
+2017-03-11 04:42:10,832 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1746989117_0001_m_000000_0' done.
+2017-03-11 04:42:10,836 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1746989117_0001_m_000000_0
+2017-03-11 04:42:10,838 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
+2017-03-11 04:42:10,843 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks
+2017-03-11 04:42:10,844 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1746989117_0001_r_000000_0
+2017-03-11 04:42:10,855 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:42:10,855 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:42:10,866 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@4d83f728
+2017-03-11 04:42:10,891 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 04:42:10,899 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1746989117_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 04:42:10,943 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.deflate]
+2017-03-11 04:42:10,943 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1746989117_0001_m_000000_0 decomp: 4538 len: 501 to MEMORY
+2017-03-11 04:42:10,952 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 4538 bytes from map-output for attempt_local1746989117_0001_m_000000_0
+2017-03-11 04:42:10,952 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 4538, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->4538
+2017-03-11 04:42:10,954 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 04:42:10,955 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:42:10,956 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 04:42:10,967 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:42:10,967 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 4520 bytes
+2017-03-11 04:42:10,970 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 4538 bytes to disk to satisfy reduce memory limit
+2017-03-11 04:42:10,970 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 509 bytes from disk
+2017-03-11 04:42:10,971 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 04:42:10,971 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 04:42:10,972 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 4520 bytes
+2017-03-11 04:42:10,979 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:42:11,006 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.skip.on is deprecated. Instead, use mapreduce.job.skiprecords
+2017-03-11 04:42:11,026 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1746989117_0001_r_000000_0 is done. And is in the process of committing
+2017-03-11 04:42:11,029 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 04:42:11,030 INFO org.apache.hadoop.mapred.Task: Task attempt_local1746989117_0001_r_000000_0 is allowed to commit now
+2017-03-11 04:42:11,032 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1746989117_0001_r_000000_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1746989117_0001_r_000000
+2017-03-11 04:42:11,032 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 04:42:11,033 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1746989117_0001_r_000000_0' done.
+2017-03-11 04:42:11,033 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1746989117_0001_r_000000_0
+2017-03-11 04:42:11,033 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete.
+2017-03-11 04:42:11,212 INFO org.apache.hadoop.mapreduce.Job: Job job_local1746989117_0001 running in uber mode : false
+2017-03-11 04:42:11,213 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 100%
+2017-03-11 04:42:11,215 INFO org.apache.hadoop.mapreduce.Job: Job job_local1746989117_0001 completed successfully
+2017-03-11 04:42:11,233 INFO org.apache.hadoop.mapreduce.Job: Counters: 34
 	File System Counters
-		FILE: Number of bytes read=11514350
-		FILE: Number of bytes written=1394261
+		FILE: Number of bytes read=2308
+		FILE: Number of bytes written=531767
 		FILE: Number of read operations=0
 		FILE: Number of large read operations=0
 		FILE: Number of write operations=0
 	Map-Reduce Framework
-		Map input records=124787
-		Map output records=1023494
-		Map output bytes=8925696
-		Map output materialized bytes=167092
-		Input split bytes=120
-		Combine input records=1023494
-		Combine output records=34513
-		Reduce input groups=34513
-		Reduce shuffle bytes=167092
-		Reduce input records=34513
-		Reduce output records=34513
-		Spilled Records=69026
+		Map input records=7
+		Map output records=56
+		Map output bytes=4424
+		Map output materialized bytes=501
+		Input split bytes=122
+		Combine input records=0
+		Combine output records=0
+		Reduce input groups=35
+		Reduce shuffle bytes=501
+		Reduce input records=56
+		Reduce output records=1
+		Spilled Records=112
 		Shuffled Maps =1
 		Failed Shuffles=0
 		Merged Map outputs=1
-		GC time elapsed (ms)=90
+		GC time elapsed (ms)=49
 		CPU time spent (ms)=0
 		Physical memory (bytes) snapshot=0
 		Virtual memory (bytes) snapshot=0
@@ -883,269 +2579,43 @@
 		WRONG_MAP=0
 		WRONG_REDUCE=0
 	File Input Format Counters 
-		Bytes Read=5589886
+		Bytes Read=454
 	File Output Format Counters 
-		Bytes Written=363879
-2017-03-10 14:16:55,128 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
-2017-03-10 14:16:55,605 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
-2017-03-10 14:16:55,614 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
-2017-03-10 14:16:55,649 WARN org.apache.hadoop.security.UserGroupInformation: PriviledgedActionException as:cloudera (auth:SIMPLE) cause:org.apache.hadoop.mapred.FileAlreadyExistsException: Output directory file:/home/cloudera/workspace/WordCount/output already exists
-2017-03-10 14:17:14,700 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
-2017-03-10 14:17:15,157 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
-2017-03-10 14:17:15,169 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
-2017-03-10 14:17:15,624 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
-2017-03-10 14:17:15,635 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
-2017-03-10 14:17:15,668 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
-2017-03-10 14:17:15,901 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local1420822781_0001
-2017-03-10 14:17:16,319 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
-2017-03-10 14:17:16,321 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local1420822781_0001
-2017-03-10 14:17:16,323 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
-2017-03-10 14:17:16,335 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
-2017-03-10 14:17:16,340 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
-2017-03-10 14:17:16,441 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
-2017-03-10 14:17:16,442 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1420822781_0001_m_000000_0
-2017-03-10 14:17:16,518 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
-2017-03-10 14:17:16,531 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
-2017-03-10 14:17:16,534 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/input/pg100.txt:0+5589886
-2017-03-10 14:17:16,616 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
-2017-03-10 14:17:16,617 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
-2017-03-10 14:17:16,617 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
-2017-03-10 14:17:16,617 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
-2017-03-10 14:17:16,617 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
-2017-03-10 14:17:16,621 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
-2017-03-10 14:17:16,682 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
-2017-03-10 14:17:16,697 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
-2017-03-10 14:17:16,704 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
-2017-03-10 14:17:16,705 WARN org.apache.hadoop.mapred.LocalJobRunner: job_local1420822781_0001
-java.lang.Exception: java.lang.ArrayIndexOutOfBoundsException: 1
-	at org.apache.hadoop.mapred.LocalJobRunner$Job.runTasks(LocalJobRunner.java:462)
-	at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:522)
-Caused by: java.lang.ArrayIndexOutOfBoundsException: 1
-	at similarity.WordSort$Map.loadWordFreq(WordSort.java:87)
-	at similarity.WordSort$Map.setup(WordSort.java:118)
-	at org.apache.hadoop.mapreduce.Mapper.run(Mapper.java:142)
-	at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:787)
-	at org.apache.hadoop.mapred.MapTask.run(MapTask.java:341)
-	at org.apache.hadoop.mapred.LocalJobRunner$Job$MapTaskRunnable.run(LocalJobRunner.java:243)
-	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471)
-	at java.util.concurrent.FutureTask.run(FutureTask.java:262)
-	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
-	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
-	at java.lang.Thread.run(Thread.java:745)
-2017-03-10 14:17:17,328 INFO org.apache.hadoop.mapreduce.Job: Job job_local1420822781_0001 running in uber mode : false
-2017-03-10 14:17:17,329 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
-2017-03-10 14:17:17,331 INFO org.apache.hadoop.mapreduce.Job: Job job_local1420822781_0001 failed with state FAILED due to: NA
-2017-03-10 14:17:17,336 INFO org.apache.hadoop.mapreduce.Job: Counters: 0
-2017-03-10 14:26:12,465 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
-2017-03-10 14:26:12,973 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
-2017-03-10 14:26:12,986 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
-2017-03-10 14:26:13,468 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
-2017-03-10 14:26:13,490 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
-2017-03-10 14:26:13,599 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
-2017-03-10 14:26:13,979 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local1562971559_0001
-2017-03-10 14:26:14,383 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
-2017-03-10 14:26:14,385 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local1562971559_0001
-2017-03-10 14:26:14,384 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
-2017-03-10 14:26:14,391 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
-2017-03-10 14:26:14,399 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
-2017-03-10 14:26:14,512 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
-2017-03-10 14:26:14,513 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1562971559_0001_m_000000_0
-2017-03-10 14:26:14,576 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
-2017-03-10 14:26:14,613 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
-2017-03-10 14:26:14,617 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/input/pg100.txt:0+5589886
-2017-03-10 14:26:14,762 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
-2017-03-10 14:26:14,762 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
-2017-03-10 14:26:14,763 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
-2017-03-10 14:26:14,763 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
-2017-03-10 14:26:14,763 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
-2017-03-10 14:26:14,766 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
-2017-03-10 14:26:15,398 INFO org.apache.hadoop.mapreduce.Job: Job job_local1562971559_0001 running in uber mode : false
-2017-03-10 14:26:15,398 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
-2017-03-10 14:26:15,473 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
-2017-03-10 14:26:15,481 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
-2017-03-10 14:26:15,488 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
-2017-03-10 14:26:15,490 WARN org.apache.hadoop.mapred.LocalJobRunner: job_local1562971559_0001
-java.lang.Exception: java.lang.NullPointerException
-	at org.apache.hadoop.mapred.LocalJobRunner$Job.runTasks(LocalJobRunner.java:462)
-	at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:522)
-Caused by: java.lang.NullPointerException
-	at similarity.WordSort$Map$1.compare(WordSort.java:135)
-	at similarity.WordSort$Map$1.compare(WordSort.java:1)
-	at java.util.TimSort.countRunAndMakeAscending(TimSort.java:324)
-	at java.util.TimSort.sort(TimSort.java:189)
-	at java.util.TimSort.sort(TimSort.java:173)
-	at java.util.Arrays.sort(Arrays.java:659)
-	at java.util.Collections.sort(Collections.java:217)
-	at similarity.WordSort$Map.map(WordSort.java:131)
-	at similarity.WordSort$Map.map(WordSort.java:1)
-	at org.apache.hadoop.mapreduce.Mapper.run(Mapper.java:145)
-	at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:787)
-	at org.apache.hadoop.mapred.MapTask.run(MapTask.java:341)
-	at org.apache.hadoop.mapred.LocalJobRunner$Job$MapTaskRunnable.run(LocalJobRunner.java:243)
-	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471)
-	at java.util.concurrent.FutureTask.run(FutureTask.java:262)
-	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
-	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
-	at java.lang.Thread.run(Thread.java:745)
-2017-03-10 14:26:16,400 INFO org.apache.hadoop.mapreduce.Job: Job job_local1562971559_0001 failed with state FAILED due to: NA
-2017-03-10 14:26:16,402 INFO org.apache.hadoop.mapreduce.Job: Counters: 0
-2017-03-10 14:29:37,807 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
-2017-03-10 14:29:38,356 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
-2017-03-10 14:29:38,361 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
-2017-03-10 14:29:38,865 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
-2017-03-10 14:29:38,874 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
-2017-03-10 14:29:38,909 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
-2017-03-10 14:29:39,202 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local705841975_0001
-2017-03-10 14:29:39,678 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
-2017-03-10 14:29:39,680 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local705841975_0001
-2017-03-10 14:29:39,685 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
-2017-03-10 14:29:39,703 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
-2017-03-10 14:29:39,705 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
-2017-03-10 14:29:39,828 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
-2017-03-10 14:29:39,829 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local705841975_0001_m_000000_0
-2017-03-10 14:29:39,879 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
-2017-03-10 14:29:39,891 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
-2017-03-10 14:29:39,896 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/input/pg100.txt:0+5589886
-2017-03-10 14:29:39,983 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
-2017-03-10 14:29:39,984 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
-2017-03-10 14:29:39,984 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
-2017-03-10 14:29:39,984 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
-2017-03-10 14:29:39,984 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
-2017-03-10 14:29:39,988 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
-2017-03-10 14:29:40,687 INFO org.apache.hadoop.mapreduce.Job: Job job_local705841975_0001 running in uber mode : false
-2017-03-10 14:29:40,690 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
-2017-03-10 14:29:40,701 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
-2017-03-10 14:29:40,708 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
-2017-03-10 14:29:40,717 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
-2017-03-10 14:29:40,718 WARN org.apache.hadoop.mapred.LocalJobRunner: job_local705841975_0001
-java.lang.Exception: java.lang.NullPointerException
-	at org.apache.hadoop.mapred.LocalJobRunner$Job.runTasks(LocalJobRunner.java:462)
-	at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:522)
-Caused by: java.lang.NullPointerException
-	at similarity.WordSort$Map$1.compare(WordSort.java:138)
-	at similarity.WordSort$Map$1.compare(WordSort.java:1)
-	at java.util.TimSort.countRunAndMakeAscending(TimSort.java:324)
-	at java.util.TimSort.sort(TimSort.java:189)
-	at java.util.TimSort.sort(TimSort.java:173)
-	at java.util.Arrays.sort(Arrays.java:659)
-	at java.util.Collections.sort(Collections.java:217)
-	at similarity.WordSort$Map.map(WordSort.java:134)
-	at similarity.WordSort$Map.map(WordSort.java:1)
-	at org.apache.hadoop.mapreduce.Mapper.run(Mapper.java:145)
-	at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:787)
-	at org.apache.hadoop.mapred.MapTask.run(MapTask.java:341)
-	at org.apache.hadoop.mapred.LocalJobRunner$Job$MapTaskRunnable.run(LocalJobRunner.java:243)
-	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471)
-	at java.util.concurrent.FutureTask.run(FutureTask.java:262)
-	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
-	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
-	at java.lang.Thread.run(Thread.java:745)
-2017-03-10 14:29:41,693 INFO org.apache.hadoop.mapreduce.Job: Job job_local705841975_0001 failed with state FAILED due to: NA
-2017-03-10 14:29:41,695 INFO org.apache.hadoop.mapreduce.Job: Counters: 0
-2017-03-10 14:34:26,674 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
-2017-03-10 14:34:27,174 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
-2017-03-10 14:34:27,179 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
-2017-03-10 14:34:27,223 WARN org.apache.hadoop.security.UserGroupInformation: PriviledgedActionException as:cloudera (auth:SIMPLE) cause:org.apache.hadoop.mapred.FileAlreadyExistsException: Output directory file:/home/cloudera/workspace/WordCount already exists
-2017-03-10 14:34:55,125 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
-2017-03-10 14:34:55,598 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
-2017-03-10 14:34:55,612 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
-2017-03-10 14:34:56,114 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
-2017-03-10 14:34:56,121 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
-2017-03-10 14:34:56,259 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
-2017-03-10 14:34:56,554 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local1013376941_0001
-2017-03-10 14:34:56,980 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
-2017-03-10 14:34:56,982 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
-2017-03-10 14:34:56,983 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local1013376941_0001
-2017-03-10 14:34:56,990 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
-2017-03-10 14:34:56,995 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
-2017-03-10 14:34:57,121 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
-2017-03-10 14:34:57,124 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1013376941_0001_m_000000_0
-2017-03-10 14:34:57,181 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
-2017-03-10 14:34:57,198 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
-2017-03-10 14:34:57,202 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/input/pg100.txt:0+5589886
-2017-03-10 14:34:57,295 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
-2017-03-10 14:34:57,298 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
-2017-03-10 14:34:57,298 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
-2017-03-10 14:34:57,298 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
-2017-03-10 14:34:57,298 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
-2017-03-10 14:34:57,300 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
-2017-03-10 14:34:57,967 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
-2017-03-10 14:34:57,974 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
-2017-03-10 14:34:57,988 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
-2017-03-10 14:34:57,990 INFO org.apache.hadoop.mapreduce.Job: Job job_local1013376941_0001 running in uber mode : false
-2017-03-10 14:34:57,991 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
-2017-03-10 14:34:57,993 WARN org.apache.hadoop.mapred.LocalJobRunner: job_local1013376941_0001
-java.lang.Exception: java.lang.NullPointerException
-	at org.apache.hadoop.mapred.LocalJobRunner$Job.runTasks(LocalJobRunner.java:462)
-	at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:522)
-Caused by: java.lang.NullPointerException
-	at similarity.WordSort$Map$1.compare(WordSort.java:146)
-	at similarity.WordSort$Map$1.compare(WordSort.java:1)
-	at java.util.TimSort.countRunAndMakeAscending(TimSort.java:324)
-	at java.util.TimSort.sort(TimSort.java:189)
-	at java.util.TimSort.sort(TimSort.java:173)
-	at java.util.Arrays.sort(Arrays.java:659)
-	at java.util.Collections.sort(Collections.java:217)
-	at similarity.WordSort$Map.map(WordSort.java:142)
-	at similarity.WordSort$Map.map(WordSort.java:1)
-	at org.apache.hadoop.mapreduce.Mapper.run(Mapper.java:145)
-	at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:787)
-	at org.apache.hadoop.mapred.MapTask.run(MapTask.java:341)
-	at org.apache.hadoop.mapred.LocalJobRunner$Job$MapTaskRunnable.run(LocalJobRunner.java:243)
-	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471)
-	at java.util.concurrent.FutureTask.run(FutureTask.java:262)
-	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
-	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
-	at java.lang.Thread.run(Thread.java:745)
-2017-03-10 14:34:57,995 INFO org.apache.hadoop.mapreduce.Job: Job job_local1013376941_0001 failed with state FAILED due to: NA
-2017-03-10 14:34:57,998 INFO org.apache.hadoop.mapreduce.Job: Counters: 0
-2017-03-10 14:38:51,972 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
-2017-03-10 14:38:52,515 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
-2017-03-10 14:38:52,527 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
-2017-03-10 14:38:53,098 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
-2017-03-10 14:38:53,128 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
-2017-03-10 14:38:53,247 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
-2017-03-10 14:38:53,578 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local1410438889_0001
-2017-03-10 14:38:54,016 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
-2017-03-10 14:38:54,017 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local1410438889_0001
-2017-03-10 14:38:54,024 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
-2017-03-10 14:38:54,064 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
-2017-03-10 14:38:54,065 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
-2017-03-10 14:38:54,219 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
-2017-03-10 14:38:54,220 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1410438889_0001_m_000000_0
-2017-03-10 14:38:54,279 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
-2017-03-10 14:38:54,293 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
-2017-03-10 14:38:54,296 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/input/pg100.txt:0+5589886
-2017-03-10 14:38:54,396 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
-2017-03-10 14:38:54,396 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
-2017-03-10 14:38:54,396 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
-2017-03-10 14:38:54,397 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
-2017-03-10 14:38:54,397 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
-2017-03-10 14:38:54,400 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
-2017-03-10 14:38:55,024 INFO org.apache.hadoop.mapreduce.Job: Job job_local1410438889_0001 running in uber mode : false
-2017-03-10 14:38:55,027 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
-2017-03-10 14:38:55,119 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
-2017-03-10 14:38:55,119 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
-2017-03-10 14:38:55,119 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 325; bufvoid = 104857600
-2017-03-10 14:38:55,119 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 26214376(104857504); length = 21/6553600
-2017-03-10 14:38:55,128 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
-2017-03-10 14:38:55,137 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
-2017-03-10 14:38:55,141 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
-2017-03-10 14:38:55,141 WARN org.apache.hadoop.mapred.LocalJobRunner: job_local1410438889_0001
-java.lang.Exception: java.lang.NullPointerException
+		Bytes Written=20
+	similarity.NaiveApproach$CompCounter
+		NUM=21
+2017-03-11 04:59:27,630 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-11 04:59:28,293 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-11 04:59:28,317 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-11 04:59:28,995 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
+2017-03-11 04:59:29,010 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
+2017-03-11 04:59:29,098 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
+2017-03-11 04:59:29,457 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local1838228410_0001
+2017-03-11 04:59:30,030 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
+2017-03-11 04:59:30,031 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local1838228410_0001
+2017-03-11 04:59:30,045 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
+2017-03-11 04:59:30,062 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:59:30,068 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
+2017-03-11 04:59:30,263 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
+2017-03-11 04:59:30,263 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1838228410_0001_m_000000_0
+2017-03-11 04:59:30,337 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 04:59:30,378 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 04:59:30,387 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:0+454
+2017-03-11 04:59:30,595 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 04:59:30,595 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 04:59:30,595 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 04:59:30,597 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 04:59:30,605 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 04:59:30,607 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 04:59:30,649 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 04:59:30,669 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
+2017-03-11 04:59:30,721 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
+2017-03-11 04:59:30,722 WARN org.apache.hadoop.mapred.LocalJobRunner: job_local1838228410_0001
+java.lang.Exception: java.lang.ClassCastException: org.apache.hadoop.io.Text cannot be cast to org.apache.hadoop.io.LongWritable
 	at org.apache.hadoop.mapred.LocalJobRunner$Job.runTasks(LocalJobRunner.java:462)
 	at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:522)
-Caused by: java.lang.NullPointerException
-	at java.lang.Integer.compareTo(Integer.java:1003)
-	at similarity.WordSort$Map$1.compare(WordSort.java:144)
-	at similarity.WordSort$Map$1.compare(WordSort.java:1)
-	at java.util.TimSort.countRunAndMakeAscending(TimSort.java:324)
-	at java.util.TimSort.sort(TimSort.java:189)
-	at java.util.TimSort.sort(TimSort.java:173)
-	at java.util.Arrays.sort(Arrays.java:659)
-	at java.util.Collections.sort(Collections.java:217)
-	at similarity.WordSort$Map.map(WordSort.java:140)
-	at similarity.WordSort$Map.map(WordSort.java:1)
+Caused by: java.lang.ClassCastException: org.apache.hadoop.io.Text cannot be cast to org.apache.hadoop.io.LongWritable
+	at similarity.NaiveApproach$Map.map(NaiveApproach.java:1)
 	at org.apache.hadoop.mapreduce.Mapper.run(Mapper.java:145)
 	at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:787)
 	at org.apache.hadoop.mapred.MapTask.run(MapTask.java:341)
@@ -1155,55 +2625,42 @@ Caused by: java.lang.NullPointerException
 	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
 	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
 	at java.lang.Thread.run(Thread.java:745)
-2017-03-10 14:38:56,030 INFO org.apache.hadoop.mapreduce.Job: Job job_local1410438889_0001 failed with state FAILED due to: NA
-2017-03-10 14:38:56,035 INFO org.apache.hadoop.mapreduce.Job: Counters: 0
-2017-03-10 14:40:16,992 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
-2017-03-10 14:40:17,522 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
-2017-03-10 14:40:17,536 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
-2017-03-10 14:40:18,047 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
-2017-03-10 14:40:18,055 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
-2017-03-10 14:40:18,089 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
-2017-03-10 14:40:18,383 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local483843492_0001
-2017-03-10 14:40:18,855 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
-2017-03-10 14:40:18,856 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local483843492_0001
-2017-03-10 14:40:18,860 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
-2017-03-10 14:40:18,887 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
-2017-03-10 14:40:18,889 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
-2017-03-10 14:40:19,021 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
-2017-03-10 14:40:19,023 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local483843492_0001_m_000000_0
-2017-03-10 14:40:19,078 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
-2017-03-10 14:40:19,106 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
-2017-03-10 14:40:19,109 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/input/pg100.txt:0+5589886
-2017-03-10 14:40:19,243 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
-2017-03-10 14:40:19,243 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
-2017-03-10 14:40:19,243 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
-2017-03-10 14:40:19,244 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
-2017-03-10 14:40:19,244 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
-2017-03-10 14:40:19,249 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
-2017-03-10 14:40:19,852 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
-2017-03-10 14:40:19,852 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
-2017-03-10 14:40:19,852 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 325; bufvoid = 104857600
-2017-03-10 14:40:19,852 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 26214376(104857504); length = 21/6553600
-2017-03-10 14:40:19,858 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
-2017-03-10 14:40:19,860 INFO org.apache.hadoop.mapreduce.Job: Job job_local483843492_0001 running in uber mode : false
-2017-03-10 14:40:19,861 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
-2017-03-10 14:40:19,866 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
-2017-03-10 14:40:19,869 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
-2017-03-10 14:40:19,870 WARN org.apache.hadoop.mapred.LocalJobRunner: job_local483843492_0001
-java.lang.Exception: java.lang.NullPointerException
+2017-03-11 04:59:31,033 INFO org.apache.hadoop.mapreduce.Job: Job job_local1838228410_0001 running in uber mode : false
+2017-03-11 04:59:31,034 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
+2017-03-11 04:59:31,036 INFO org.apache.hadoop.mapreduce.Job: Job job_local1838228410_0001 failed with state FAILED due to: NA
+2017-03-11 04:59:31,049 INFO org.apache.hadoop.mapreduce.Job: Counters: 0
+2017-03-11 05:00:57,011 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-11 05:00:57,702 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-11 05:00:57,707 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-11 05:00:58,350 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
+2017-03-11 05:00:58,358 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
+2017-03-11 05:00:58,452 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
+2017-03-11 05:00:58,816 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local1553338044_0001
+2017-03-11 05:00:59,400 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
+2017-03-11 05:00:59,401 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local1553338044_0001
+2017-03-11 05:00:59,410 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
+2017-03-11 05:00:59,417 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:00:59,445 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
+2017-03-11 05:00:59,609 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
+2017-03-11 05:00:59,609 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1553338044_0001_m_000000_0
+2017-03-11 05:00:59,680 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:00:59,731 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 05:00:59,738 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:0+454
+2017-03-11 05:00:59,903 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 05:00:59,904 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 05:00:59,904 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 05:00:59,904 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 05:00:59,904 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 05:00:59,914 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 05:00:59,968 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 05:00:59,983 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
+2017-03-11 05:01:00,044 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
+2017-03-11 05:01:00,045 WARN org.apache.hadoop.mapred.LocalJobRunner: job_local1553338044_0001
+java.lang.Exception: java.lang.ClassCastException: org.apache.hadoop.io.Text cannot be cast to org.apache.hadoop.io.LongWritable
 	at org.apache.hadoop.mapred.LocalJobRunner$Job.runTasks(LocalJobRunner.java:462)
 	at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:522)
-Caused by: java.lang.NullPointerException
-	at java.lang.Integer.compareTo(Integer.java:1003)
-	at similarity.WordSort$Map$1.compare(WordSort.java:144)
-	at similarity.WordSort$Map$1.compare(WordSort.java:1)
-	at java.util.TimSort.countRunAndMakeAscending(TimSort.java:324)
-	at java.util.TimSort.sort(TimSort.java:189)
-	at java.util.TimSort.sort(TimSort.java:173)
-	at java.util.Arrays.sort(Arrays.java:659)
-	at java.util.Collections.sort(Collections.java:217)
-	at similarity.WordSort$Map.map(WordSort.java:140)
-	at similarity.WordSort$Map.map(WordSort.java:1)
+Caused by: java.lang.ClassCastException: org.apache.hadoop.io.Text cannot be cast to org.apache.hadoop.io.LongWritable
+	at similarity.NaiveApproach$Map.map(NaiveApproach.java:1)
 	at org.apache.hadoop.mapreduce.Mapper.run(Mapper.java:145)
 	at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:787)
 	at org.apache.hadoop.mapred.MapTask.run(MapTask.java:341)
@@ -1213,107 +2670,1438 @@ Caused by: java.lang.NullPointerException
 	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
 	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
 	at java.lang.Thread.run(Thread.java:745)
-2017-03-10 14:40:20,864 INFO org.apache.hadoop.mapreduce.Job: Job job_local483843492_0001 failed with state FAILED due to: NA
-2017-03-10 14:40:20,871 INFO org.apache.hadoop.mapreduce.Job: Counters: 0
-2017-03-10 14:41:43,006 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
-2017-03-10 14:41:43,480 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
-2017-03-10 14:41:43,485 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
-2017-03-10 14:41:44,008 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
-2017-03-10 14:41:44,042 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
-2017-03-10 14:41:44,079 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
-2017-03-10 14:41:44,328 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local1726509137_0001
-2017-03-10 14:41:44,799 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
-2017-03-10 14:41:44,800 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local1726509137_0001
-2017-03-10 14:41:44,806 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
-2017-03-10 14:41:44,835 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
-2017-03-10 14:41:44,838 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
-2017-03-10 14:41:44,977 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
-2017-03-10 14:41:44,978 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1726509137_0001_m_000000_0
-2017-03-10 14:41:45,030 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
-2017-03-10 14:41:45,051 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
-2017-03-10 14:41:45,058 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/input/pg100.txt:0+5589886
-2017-03-10 14:41:45,138 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
-2017-03-10 14:41:45,142 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
-2017-03-10 14:41:45,143 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
-2017-03-10 14:41:45,143 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
-2017-03-10 14:41:45,143 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
-2017-03-10 14:41:45,149 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
-2017-03-10 14:41:45,805 INFO org.apache.hadoop.mapreduce.Job: Job job_local1726509137_0001 running in uber mode : false
-2017-03-10 14:41:45,806 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
-2017-03-10 14:41:48,519 INFO org.apache.hadoop.mapred.LocalJobRunner: 
-2017-03-10 14:41:48,519 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
-2017-03-10 14:41:48,519 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
-2017-03-10 14:41:48,519 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 4776007; bufvoid = 104857600
-2017-03-10 14:41:48,519 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 25753980(103015920); length = 460417/6553600
-2017-03-10 14:41:48,780 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
-2017-03-10 14:41:49,395 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
-2017-03-10 14:41:49,398 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1726509137_0001_m_000000_0 is done. And is in the process of committing
-2017-03-10 14:41:49,404 INFO org.apache.hadoop.mapred.LocalJobRunner: map
-2017-03-10 14:41:49,408 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1726509137_0001_m_000000_0' done.
-2017-03-10 14:41:49,409 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1726509137_0001_m_000000_0
-2017-03-10 14:41:49,409 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
-2017-03-10 14:41:49,411 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks
-2017-03-10 14:41:49,411 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1726509137_0001_r_000000_0
-2017-03-10 14:41:49,416 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
-2017-03-10 14:41:49,417 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
-2017-03-10 14:41:49,419 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@70d586bc
-2017-03-10 14:41:49,429 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
-2017-03-10 14:41:49,435 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1726509137_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events
-2017-03-10 14:41:49,461 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.deflate]
-2017-03-10 14:41:49,461 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1726509137_0001_m_000000_0 decomp: 5006219 len: 2376884 to MEMORY
-2017-03-10 14:41:49,517 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 5006219 bytes from map-output for attempt_local1726509137_0001_m_000000_0
-2017-03-10 14:41:49,521 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 5006219, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->5006219
-2017-03-10 14:41:49,523 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
-2017-03-10 14:41:49,524 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
-2017-03-10 14:41:49,524 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
-2017-03-10 14:41:49,529 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
-2017-03-10 14:41:49,529 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 5006209 bytes
-2017-03-10 14:41:49,818 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 0%
-2017-03-10 14:41:50,226 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 5006219 bytes to disk to satisfy reduce memory limit
-2017-03-10 14:41:50,227 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 2376892 bytes from disk
-2017-03-10 14:41:50,227 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
-2017-03-10 14:41:50,228 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
-2017-03-10 14:41:50,228 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 5006209 bytes
-2017-03-10 14:41:50,230 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
-2017-03-10 14:41:50,235 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.skip.on is deprecated. Instead, use mapreduce.job.skiprecords
-2017-03-10 14:41:50,888 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1726509137_0001_r_000000_0 is done. And is in the process of committing
-2017-03-10 14:41:50,898 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
-2017-03-10 14:41:50,899 INFO org.apache.hadoop.mapred.Task: Task attempt_local1726509137_0001_r_000000_0 is allowed to commit now
-2017-03-10 14:41:50,900 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1726509137_0001_r_000000_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1726509137_0001_r_000000
-2017-03-10 14:41:50,901 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
-2017-03-10 14:41:50,901 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1726509137_0001_r_000000_0' done.
-2017-03-10 14:41:50,901 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1726509137_0001_r_000000_0
-2017-03-10 14:41:50,901 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete.
-2017-03-10 14:41:51,822 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 100%
-2017-03-10 14:41:51,822 INFO org.apache.hadoop.mapreduce.Job: Job job_local1726509137_0001 completed successfully
-2017-03-10 14:41:51,834 INFO org.apache.hadoop.mapreduce.Job: Counters: 33
+2017-03-11 05:01:00,417 INFO org.apache.hadoop.mapreduce.Job: Job job_local1553338044_0001 running in uber mode : false
+2017-03-11 05:01:00,418 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
+2017-03-11 05:01:00,420 INFO org.apache.hadoop.mapreduce.Job: Job job_local1553338044_0001 failed with state FAILED due to: NA
+2017-03-11 05:01:00,435 INFO org.apache.hadoop.mapreduce.Job: Counters: 0
+2017-03-11 05:04:44,882 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-11 05:04:45,593 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-11 05:04:45,593 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-11 05:04:46,184 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
+2017-03-11 05:04:46,199 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
+2017-03-11 05:04:46,277 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
+2017-03-11 05:04:46,636 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local1689988440_0001
+2017-03-11 05:04:47,203 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
+2017-03-11 05:04:47,204 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local1689988440_0001
+2017-03-11 05:04:47,207 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
+2017-03-11 05:04:47,224 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:04:47,237 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
+2017-03-11 05:04:47,394 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
+2017-03-11 05:04:47,396 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1689988440_0001_m_000000_0
+2017-03-11 05:04:47,476 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:04:47,511 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 05:04:47,515 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:0+454
+2017-03-11 05:04:47,713 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 05:04:47,713 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 05:04:47,713 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 05:04:47,713 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 05:04:47,713 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 05:04:47,718 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 05:04:48,208 INFO org.apache.hadoop.mapreduce.Job: Job job_local1689988440_0001 running in uber mode : false
+2017-03-11 05:04:48,209 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
+2017-03-11 05:04:48,259 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 05:04:48,266 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 05:04:48,266 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-11 05:04:48,266 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 69006658; bufvoid = 104857600
+2017-03-11 05:04:48,266 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 22720392(90881568); length = 3494005/6553600
+2017-03-11 05:04:53,498 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:0+454 > sort
+2017-03-11 05:04:54,259 INFO org.apache.hadoop.mapreduce.Job:  map 67% reduce 0%
+2017-03-11 05:04:54,793 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
+2017-03-11 05:04:56,504 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:0+454 > sort
+2017-03-11 05:04:58,795 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
+2017-03-11 05:04:58,801 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1689988440_0001_m_000000_0 is done. And is in the process of committing
+2017-03-11 05:04:58,806 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:0+454
+2017-03-11 05:04:58,806 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1689988440_0001_m_000000_0' done.
+2017-03-11 05:04:58,806 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1689988440_0001_m_000000_0
+2017-03-11 05:04:58,807 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
+2017-03-11 05:04:58,882 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks
+2017-03-11 05:04:58,882 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1689988440_0001_r_000000_0
+2017-03-11 05:04:58,904 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:04:58,904 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 05:04:58,908 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@86b66eb
+2017-03-11 05:04:58,948 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 05:04:58,959 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1689988440_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 05:04:59,027 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.deflate]
+2017-03-11 05:04:59,028 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1689988440_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 05:04:59,034 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1689988440_0001_m_000000_0
+2017-03-11 05:04:59,038 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 05:04:59,044 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 05:04:59,045 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:04:59,045 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 05:04:59,051 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:04:59,057 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:04:59,057 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 05:04:59,058 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 05:04:59,058 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 05:04:59,059 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:04:59,060 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:04:59,066 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:04:59,082 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.skip.on is deprecated. Instead, use mapreduce.job.skiprecords
+2017-03-11 05:04:59,085 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1689988440_0001_r_000000_0 is done. And is in the process of committing
+2017-03-11 05:04:59,097 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:04:59,097 INFO org.apache.hadoop.mapred.Task: Task attempt_local1689988440_0001_r_000000_0 is allowed to commit now
+2017-03-11 05:04:59,099 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1689988440_0001_r_000000_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1689988440_0001_r_000000
+2017-03-11 05:04:59,100 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:04:59,100 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1689988440_0001_r_000000_0' done.
+2017-03-11 05:04:59,100 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1689988440_0001_r_000000_0
+2017-03-11 05:04:59,100 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1689988440_0001_r_000001_0
+2017-03-11 05:04:59,101 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:04:59,101 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 05:04:59,102 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@17649784
+2017-03-11 05:04:59,112 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 05:04:59,117 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1689988440_0001_r_000001_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 05:04:59,121 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1689988440_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 05:04:59,121 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1689988440_0001_m_000000_0
+2017-03-11 05:04:59,127 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 05:04:59,127 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 05:04:59,128 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:04:59,128 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 05:04:59,131 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:04:59,131 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:04:59,132 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 05:04:59,133 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 05:04:59,133 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 05:04:59,133 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:04:59,134 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:04:59,134 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:04:59,144 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1689988440_0001_r_000001_0 is done. And is in the process of committing
+2017-03-11 05:04:59,145 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:04:59,145 INFO org.apache.hadoop.mapred.Task: Task attempt_local1689988440_0001_r_000001_0 is allowed to commit now
+2017-03-11 05:04:59,146 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1689988440_0001_r_000001_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1689988440_0001_r_000001
+2017-03-11 05:04:59,146 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:04:59,147 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1689988440_0001_r_000001_0' done.
+2017-03-11 05:04:59,147 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1689988440_0001_r_000001_0
+2017-03-11 05:04:59,147 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1689988440_0001_r_000002_0
+2017-03-11 05:04:59,152 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:04:59,153 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 05:04:59,157 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@3e1f95a9
+2017-03-11 05:04:59,160 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 05:04:59,173 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1689988440_0001_r_000002_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 05:04:59,181 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1689988440_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 05:04:59,187 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1689988440_0001_m_000000_0
+2017-03-11 05:04:59,188 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 05:04:59,188 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 05:04:59,189 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:04:59,189 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 05:04:59,193 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:04:59,194 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:04:59,194 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 05:04:59,194 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 05:04:59,194 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 05:04:59,194 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:04:59,195 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:04:59,195 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:04:59,204 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1689988440_0001_r_000002_0 is done. And is in the process of committing
+2017-03-11 05:04:59,205 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:04:59,205 INFO org.apache.hadoop.mapred.Task: Task attempt_local1689988440_0001_r_000002_0 is allowed to commit now
+2017-03-11 05:04:59,206 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1689988440_0001_r_000002_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1689988440_0001_r_000002
+2017-03-11 05:04:59,210 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:04:59,210 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1689988440_0001_r_000002_0' done.
+2017-03-11 05:04:59,210 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1689988440_0001_r_000002_0
+2017-03-11 05:04:59,210 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1689988440_0001_r_000003_0
+2017-03-11 05:04:59,221 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:04:59,221 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 05:04:59,222 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@75302bb6
+2017-03-11 05:04:59,222 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 05:04:59,223 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1689988440_0001_r_000003_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 05:04:59,227 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1689988440_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 05:04:59,228 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1689988440_0001_m_000000_0
+2017-03-11 05:04:59,238 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 05:04:59,238 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 05:04:59,239 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:04:59,240 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 05:04:59,242 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:04:59,242 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:04:59,243 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 05:04:59,244 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 05:04:59,244 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 05:04:59,244 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:04:59,245 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:04:59,245 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:04:59,261 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1689988440_0001_r_000003_0 is done. And is in the process of committing
+2017-03-11 05:04:59,264 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:04:59,264 INFO org.apache.hadoop.mapred.Task: Task attempt_local1689988440_0001_r_000003_0 is allowed to commit now
+2017-03-11 05:04:59,265 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1689988440_0001_r_000003_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1689988440_0001_r_000003
+2017-03-11 05:04:59,267 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:04:59,268 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1689988440_0001_r_000003_0' done.
+2017-03-11 05:04:59,272 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1689988440_0001_r_000003_0
+2017-03-11 05:04:59,273 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1689988440_0001_r_000004_0
+2017-03-11 05:04:59,279 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 100%
+2017-03-11 05:04:59,280 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:04:59,281 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 05:04:59,282 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@596bb944
+2017-03-11 05:04:59,283 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 05:04:59,287 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1689988440_0001_r_000004_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 05:04:59,295 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1689988440_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 05:04:59,296 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1689988440_0001_m_000000_0
+2017-03-11 05:04:59,296 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 05:04:59,296 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 05:04:59,298 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:04:59,298 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 05:04:59,299 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:04:59,299 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:04:59,302 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 05:04:59,303 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 05:04:59,303 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 05:04:59,304 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:04:59,308 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:04:59,308 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:04:59,318 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1689988440_0001_r_000004_0 is done. And is in the process of committing
+2017-03-11 05:04:59,324 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:04:59,324 INFO org.apache.hadoop.mapred.Task: Task attempt_local1689988440_0001_r_000004_0 is allowed to commit now
+2017-03-11 05:04:59,325 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1689988440_0001_r_000004_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1689988440_0001_r_000004
+2017-03-11 05:04:59,326 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:04:59,326 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1689988440_0001_r_000004_0' done.
+2017-03-11 05:04:59,326 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1689988440_0001_r_000004_0
+2017-03-11 05:04:59,326 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1689988440_0001_r_000005_0
+2017-03-11 05:04:59,333 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:04:59,334 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 05:04:59,334 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@1c7419bd
+2017-03-11 05:04:59,335 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 05:04:59,340 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1689988440_0001_r_000005_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 05:04:59,346 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local1689988440_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 05:04:59,352 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1689988440_0001_m_000000_0
+2017-03-11 05:04:59,352 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 05:04:59,353 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 05:04:59,354 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:04:59,355 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 05:04:59,356 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:04:59,356 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:04:59,356 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 05:04:59,357 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 05:04:59,357 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 05:04:59,357 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:04:59,357 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:04:59,362 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:04:59,371 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1689988440_0001_r_000005_0 is done. And is in the process of committing
+2017-03-11 05:04:59,373 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:04:59,373 INFO org.apache.hadoop.mapred.Task: Task attempt_local1689988440_0001_r_000005_0 is allowed to commit now
+2017-03-11 05:04:59,376 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1689988440_0001_r_000005_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1689988440_0001_r_000005
+2017-03-11 05:04:59,380 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:04:59,380 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1689988440_0001_r_000005_0' done.
+2017-03-11 05:04:59,380 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1689988440_0001_r_000005_0
+2017-03-11 05:04:59,380 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1689988440_0001_r_000006_0
+2017-03-11 05:04:59,387 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:04:59,388 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 05:04:59,388 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@222c39b9
+2017-03-11 05:04:59,390 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 05:04:59,394 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1689988440_0001_r_000006_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 05:04:59,398 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local1689988440_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 05:04:59,402 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1689988440_0001_m_000000_0
+2017-03-11 05:04:59,407 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 05:04:59,407 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 05:04:59,408 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:04:59,408 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 05:04:59,417 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:04:59,418 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:04:59,418 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 05:04:59,418 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 05:04:59,419 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 05:04:59,419 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:04:59,420 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:04:59,424 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:04:59,439 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1689988440_0001_r_000006_0 is done. And is in the process of committing
+2017-03-11 05:04:59,440 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:04:59,440 INFO org.apache.hadoop.mapred.Task: Task attempt_local1689988440_0001_r_000006_0 is allowed to commit now
+2017-03-11 05:04:59,440 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1689988440_0001_r_000006_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1689988440_0001_r_000006
+2017-03-11 05:04:59,441 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:04:59,442 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1689988440_0001_r_000006_0' done.
+2017-03-11 05:04:59,442 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1689988440_0001_r_000006_0
+2017-03-11 05:04:59,443 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1689988440_0001_r_000007_0
+2017-03-11 05:04:59,448 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:04:59,449 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 05:04:59,449 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@5b7b6f08
+2017-03-11 05:04:59,451 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 05:04:59,458 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1689988440_0001_r_000007_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 05:04:59,463 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local1689988440_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 05:04:59,464 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1689988440_0001_m_000000_0
+2017-03-11 05:04:59,464 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 05:04:59,464 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 05:04:59,465 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:04:59,465 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 05:04:59,466 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:04:59,466 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:04:59,467 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 05:04:59,470 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 05:04:59,471 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 05:04:59,471 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:04:59,472 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:04:59,473 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:04:59,492 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1689988440_0001_r_000007_0 is done. And is in the process of committing
+2017-03-11 05:04:59,498 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:04:59,498 INFO org.apache.hadoop.mapred.Task: Task attempt_local1689988440_0001_r_000007_0 is allowed to commit now
+2017-03-11 05:04:59,500 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1689988440_0001_r_000007_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1689988440_0001_r_000007
+2017-03-11 05:04:59,500 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:04:59,501 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1689988440_0001_r_000007_0' done.
+2017-03-11 05:04:59,501 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1689988440_0001_r_000007_0
+2017-03-11 05:04:59,501 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1689988440_0001_r_000008_0
+2017-03-11 05:04:59,502 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:04:59,502 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 05:04:59,502 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@36f7b10b
+2017-03-11 05:04:59,509 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 05:04:59,510 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1689988440_0001_r_000008_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 05:04:59,516 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local1689988440_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 05:04:59,516 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1689988440_0001_m_000000_0
+2017-03-11 05:04:59,517 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 05:04:59,517 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 05:04:59,517 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:04:59,518 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 05:04:59,519 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:04:59,519 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:04:59,520 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 05:04:59,520 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 05:04:59,520 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 05:04:59,520 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:04:59,520 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:04:59,521 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:04:59,532 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1689988440_0001_r_000008_0 is done. And is in the process of committing
+2017-03-11 05:04:59,535 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:04:59,536 INFO org.apache.hadoop.mapred.Task: Task attempt_local1689988440_0001_r_000008_0 is allowed to commit now
+2017-03-11 05:04:59,538 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1689988440_0001_r_000008_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1689988440_0001_r_000008
+2017-03-11 05:04:59,553 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:04:59,553 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1689988440_0001_r_000008_0' done.
+2017-03-11 05:04:59,555 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1689988440_0001_r_000008_0
+2017-03-11 05:04:59,555 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1689988440_0001_r_000009_0
+2017-03-11 05:04:59,558 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:04:59,559 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 05:04:59,559 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@22d0ce11
+2017-03-11 05:04:59,560 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 05:04:59,561 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1689988440_0001_r_000009_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 05:04:59,573 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#10 about to shuffle output of map attempt_local1689988440_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 05:04:59,573 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1689988440_0001_m_000000_0
+2017-03-11 05:04:59,574 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 05:04:59,574 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 05:04:59,575 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:04:59,575 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 05:04:59,578 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:04:59,578 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:04:59,579 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 05:04:59,579 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 05:04:59,579 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 05:04:59,579 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:04:59,580 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:04:59,582 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:04:59,598 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1689988440_0001_r_000009_0 is done. And is in the process of committing
+2017-03-11 05:04:59,599 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:04:59,599 INFO org.apache.hadoop.mapred.Task: Task attempt_local1689988440_0001_r_000009_0 is allowed to commit now
+2017-03-11 05:04:59,600 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1689988440_0001_r_000009_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1689988440_0001_r_000009
+2017-03-11 05:04:59,600 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:04:59,601 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1689988440_0001_r_000009_0' done.
+2017-03-11 05:04:59,601 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1689988440_0001_r_000009_0
+2017-03-11 05:04:59,601 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1689988440_0001_r_000010_0
+2017-03-11 05:04:59,602 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:04:59,602 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 05:04:59,602 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@2455dbb6
+2017-03-11 05:04:59,611 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 05:04:59,618 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1689988440_0001_r_000010_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 05:04:59,621 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#11 about to shuffle output of map attempt_local1689988440_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 05:04:59,625 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1689988440_0001_m_000000_0
+2017-03-11 05:04:59,629 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 05:04:59,630 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 05:04:59,630 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:04:59,630 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 05:04:59,633 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:04:59,633 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:04:59,633 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 05:04:59,634 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 05:04:59,635 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 05:04:59,635 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:04:59,635 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:04:59,636 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:04:59,654 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1689988440_0001_r_000010_0 is done. And is in the process of committing
+2017-03-11 05:04:59,655 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:04:59,656 INFO org.apache.hadoop.mapred.Task: Task attempt_local1689988440_0001_r_000010_0 is allowed to commit now
+2017-03-11 05:04:59,657 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1689988440_0001_r_000010_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1689988440_0001_r_000010
+2017-03-11 05:04:59,657 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:04:59,657 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1689988440_0001_r_000010_0' done.
+2017-03-11 05:04:59,661 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1689988440_0001_r_000010_0
+2017-03-11 05:04:59,662 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1689988440_0001_r_000011_0
+2017-03-11 05:04:59,665 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:04:59,666 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 05:04:59,667 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@657f6c43
+2017-03-11 05:04:59,673 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 05:04:59,676 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1689988440_0001_r_000011_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 05:04:59,688 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#12 about to shuffle output of map attempt_local1689988440_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 05:04:59,688 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1689988440_0001_m_000000_0
+2017-03-11 05:04:59,688 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 05:04:59,689 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 05:04:59,689 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:04:59,690 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 05:04:59,691 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:04:59,692 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:04:59,692 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 05:04:59,692 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 05:04:59,692 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 05:04:59,693 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:04:59,693 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:04:59,694 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:04:59,703 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1689988440_0001_r_000011_0 is done. And is in the process of committing
+2017-03-11 05:04:59,704 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:04:59,704 INFO org.apache.hadoop.mapred.Task: Task attempt_local1689988440_0001_r_000011_0 is allowed to commit now
+2017-03-11 05:04:59,718 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1689988440_0001_r_000011_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1689988440_0001_r_000011
+2017-03-11 05:04:59,719 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:04:59,720 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1689988440_0001_r_000011_0' done.
+2017-03-11 05:04:59,720 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1689988440_0001_r_000011_0
+2017-03-11 05:04:59,720 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1689988440_0001_r_000012_0
+2017-03-11 05:04:59,725 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:04:59,726 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 05:04:59,726 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@6e88a13e
+2017-03-11 05:04:59,730 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 05:04:59,738 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1689988440_0001_r_000012_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 05:04:59,742 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#13 about to shuffle output of map attempt_local1689988440_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 05:04:59,743 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1689988440_0001_m_000000_0
+2017-03-11 05:04:59,743 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 05:04:59,743 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 05:04:59,743 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:04:59,743 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 05:04:59,745 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:04:59,745 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:04:59,746 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 05:04:59,746 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 05:04:59,746 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 05:04:59,746 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:04:59,747 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:04:59,749 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:04:59,761 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1689988440_0001_r_000012_0 is done. And is in the process of committing
+2017-03-11 05:04:59,762 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:04:59,762 INFO org.apache.hadoop.mapred.Task: Task attempt_local1689988440_0001_r_000012_0 is allowed to commit now
+2017-03-11 05:04:59,763 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1689988440_0001_r_000012_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1689988440_0001_r_000012
+2017-03-11 05:04:59,764 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:04:59,764 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1689988440_0001_r_000012_0' done.
+2017-03-11 05:04:59,764 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1689988440_0001_r_000012_0
+2017-03-11 05:04:59,764 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1689988440_0001_r_000013_0
+2017-03-11 05:04:59,766 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:04:59,766 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 05:04:59,766 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@33ed55b9
+2017-03-11 05:04:59,772 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 05:04:59,780 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1689988440_0001_r_000013_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 05:04:59,787 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#14 about to shuffle output of map attempt_local1689988440_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 05:04:59,788 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1689988440_0001_m_000000_0
+2017-03-11 05:04:59,789 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 05:04:59,789 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 05:04:59,790 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:04:59,790 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 05:04:59,791 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:04:59,791 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:04:59,792 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 05:04:59,792 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 05:04:59,792 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 05:04:59,792 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:04:59,792 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:04:59,792 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:04:59,806 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1689988440_0001_r_000013_0 is done. And is in the process of committing
+2017-03-11 05:04:59,807 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:04:59,807 INFO org.apache.hadoop.mapred.Task: Task attempt_local1689988440_0001_r_000013_0 is allowed to commit now
+2017-03-11 05:04:59,808 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1689988440_0001_r_000013_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1689988440_0001_r_000013
+2017-03-11 05:04:59,809 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:04:59,809 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1689988440_0001_r_000013_0' done.
+2017-03-11 05:04:59,809 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1689988440_0001_r_000013_0
+2017-03-11 05:04:59,809 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1689988440_0001_r_000014_0
+2017-03-11 05:04:59,815 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:04:59,815 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 05:04:59,816 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@1197c2dd
+2017-03-11 05:04:59,816 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 05:04:59,824 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1689988440_0001_r_000014_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 05:04:59,838 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#15 about to shuffle output of map attempt_local1689988440_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 05:04:59,838 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1689988440_0001_m_000000_0
+2017-03-11 05:04:59,839 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 05:04:59,839 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 05:04:59,839 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:04:59,839 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 05:04:59,841 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:04:59,842 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:04:59,843 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 05:04:59,844 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 05:04:59,844 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 05:04:59,844 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:04:59,844 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:04:59,844 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:04:59,856 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1689988440_0001_r_000014_0 is done. And is in the process of committing
+2017-03-11 05:04:59,857 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:04:59,858 INFO org.apache.hadoop.mapred.Task: Task attempt_local1689988440_0001_r_000014_0 is allowed to commit now
+2017-03-11 05:04:59,858 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1689988440_0001_r_000014_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1689988440_0001_r_000014
+2017-03-11 05:04:59,859 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:04:59,859 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1689988440_0001_r_000014_0' done.
+2017-03-11 05:04:59,867 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1689988440_0001_r_000014_0
+2017-03-11 05:04:59,867 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1689988440_0001_r_000015_0
+2017-03-11 05:04:59,868 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:04:59,868 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 05:04:59,869 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@682c1f1b
+2017-03-11 05:04:59,869 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 05:04:59,885 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1689988440_0001_r_000015_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 05:04:59,887 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#16 about to shuffle output of map attempt_local1689988440_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 05:04:59,888 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1689988440_0001_m_000000_0
+2017-03-11 05:04:59,888 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 05:04:59,888 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 05:04:59,889 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:04:59,889 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 05:04:59,894 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:04:59,894 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:04:59,896 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 05:04:59,896 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 05:04:59,896 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 05:04:59,896 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:04:59,897 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:04:59,897 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:04:59,928 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1689988440_0001_r_000015_0 is done. And is in the process of committing
+2017-03-11 05:04:59,929 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:04:59,929 INFO org.apache.hadoop.mapred.Task: Task attempt_local1689988440_0001_r_000015_0 is allowed to commit now
+2017-03-11 05:04:59,930 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1689988440_0001_r_000015_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1689988440_0001_r_000015
+2017-03-11 05:04:59,930 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:04:59,930 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1689988440_0001_r_000015_0' done.
+2017-03-11 05:04:59,931 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1689988440_0001_r_000015_0
+2017-03-11 05:04:59,931 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1689988440_0001_r_000016_0
+2017-03-11 05:04:59,940 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:04:59,941 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 05:04:59,941 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@792a89f4
+2017-03-11 05:04:59,945 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 05:04:59,951 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1689988440_0001_r_000016_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 05:04:59,960 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#17 about to shuffle output of map attempt_local1689988440_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 05:04:59,963 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1689988440_0001_m_000000_0
+2017-03-11 05:04:59,965 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 05:04:59,965 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 05:04:59,966 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:04:59,966 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 05:04:59,968 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:04:59,969 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:04:59,969 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 05:04:59,970 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 05:04:59,970 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 05:04:59,970 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:04:59,971 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:04:59,972 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:04:59,992 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1689988440_0001_r_000016_0 is done. And is in the process of committing
+2017-03-11 05:04:59,993 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:04:59,993 INFO org.apache.hadoop.mapred.Task: Task attempt_local1689988440_0001_r_000016_0 is allowed to commit now
+2017-03-11 05:04:59,994 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1689988440_0001_r_000016_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1689988440_0001_r_000016
+2017-03-11 05:04:59,994 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:04:59,995 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1689988440_0001_r_000016_0' done.
+2017-03-11 05:04:59,995 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1689988440_0001_r_000016_0
+2017-03-11 05:04:59,995 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1689988440_0001_r_000017_0
+2017-03-11 05:05:00,000 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:05:00,001 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 05:05:00,001 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@10bd314a
+2017-03-11 05:05:00,006 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 05:05:00,016 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1689988440_0001_r_000017_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 05:05:00,025 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#18 about to shuffle output of map attempt_local1689988440_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 05:05:00,026 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1689988440_0001_m_000000_0
+2017-03-11 05:05:00,027 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 05:05:00,027 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 05:05:00,027 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:00,027 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 05:05:00,029 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:00,029 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:00,030 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 05:05:00,030 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 05:05:00,030 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 05:05:00,030 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:00,030 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:00,031 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:00,039 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1689988440_0001_r_000017_0 is done. And is in the process of committing
+2017-03-11 05:05:00,040 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:00,040 INFO org.apache.hadoop.mapred.Task: Task attempt_local1689988440_0001_r_000017_0 is allowed to commit now
+2017-03-11 05:05:00,041 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1689988440_0001_r_000017_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1689988440_0001_r_000017
+2017-03-11 05:05:00,047 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:05:00,047 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1689988440_0001_r_000017_0' done.
+2017-03-11 05:05:00,047 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1689988440_0001_r_000017_0
+2017-03-11 05:05:00,050 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1689988440_0001_r_000018_0
+2017-03-11 05:05:00,058 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:05:00,058 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 05:05:00,059 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@6944e8c1
+2017-03-11 05:05:00,060 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 05:05:00,067 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1689988440_0001_r_000018_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 05:05:00,181 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#19 about to shuffle output of map attempt_local1689988440_0001_m_000000_0 decomp: 70753664 len: 2672270 to MEMORY
+2017-03-11 05:05:00,479 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 70753664 bytes from map-output for attempt_local1689988440_0001_m_000000_0
+2017-03-11 05:05:00,479 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 70753664, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->70753664
+2017-03-11 05:05:00,480 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 05:05:00,480 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:00,480 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 05:05:00,481 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:00,481 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 70753646 bytes
+2017-03-11 05:05:01,283 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 36%
+2017-03-11 05:05:04,836 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 70753664 bytes to disk to satisfy reduce memory limit
+2017-03-11 05:05:04,837 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 2672278 bytes from disk
+2017-03-11 05:05:04,837 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 05:05:04,837 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:04,840 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 70753646 bytes
+2017-03-11 05:05:04,840 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:06,069 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:05:06,302 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 38%
+2017-03-11 05:05:08,242 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1689988440_0001_r_000018_0 is done. And is in the process of committing
+2017-03-11 05:05:08,243 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:05:08,243 INFO org.apache.hadoop.mapred.Task: Task attempt_local1689988440_0001_r_000018_0 is allowed to commit now
+2017-03-11 05:05:08,244 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1689988440_0001_r_000018_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1689988440_0001_r_000018
+2017-03-11 05:05:08,250 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:05:08,250 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1689988440_0001_r_000018_0' done.
+2017-03-11 05:05:08,250 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1689988440_0001_r_000018_0
+2017-03-11 05:05:08,250 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1689988440_0001_r_000019_0
+2017-03-11 05:05:08,259 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:05:08,260 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 05:05:08,260 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@4c9534e6
+2017-03-11 05:05:08,276 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 05:05:08,277 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1689988440_0001_r_000019_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 05:05:08,286 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#20 about to shuffle output of map attempt_local1689988440_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 05:05:08,289 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1689988440_0001_m_000000_0
+2017-03-11 05:05:08,289 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 05:05:08,300 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 05:05:08,300 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:08,301 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 05:05:08,301 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:08,302 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:08,304 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 05:05:08,304 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 05:05:08,306 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 05:05:08,307 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:08,307 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:08,308 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:08,316 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1689988440_0001_r_000019_0 is done. And is in the process of committing
+2017-03-11 05:05:08,317 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:08,317 INFO org.apache.hadoop.mapred.Task: Task attempt_local1689988440_0001_r_000019_0 is allowed to commit now
+2017-03-11 05:05:08,318 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1689988440_0001_r_000019_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1689988440_0001_r_000019
+2017-03-11 05:05:08,324 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:05:08,327 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1689988440_0001_r_000019_0' done.
+2017-03-11 05:05:08,327 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1689988440_0001_r_000019_0
+2017-03-11 05:05:08,327 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1689988440_0001_r_000020_0
+2017-03-11 05:05:08,328 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:05:08,329 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 05:05:08,329 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@417ef051
+2017-03-11 05:05:08,332 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 05:05:08,363 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1689988440_0001_r_000020_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 05:05:08,376 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#21 about to shuffle output of map attempt_local1689988440_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 05:05:08,381 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1689988440_0001_m_000000_0
+2017-03-11 05:05:08,381 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 05:05:08,382 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 05:05:08,382 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:08,382 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 05:05:08,383 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:08,384 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:08,385 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 05:05:08,385 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 05:05:08,385 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 05:05:08,385 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:08,385 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:08,386 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:08,394 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1689988440_0001_r_000020_0 is done. And is in the process of committing
+2017-03-11 05:05:08,395 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:08,397 INFO org.apache.hadoop.mapred.Task: Task attempt_local1689988440_0001_r_000020_0 is allowed to commit now
+2017-03-11 05:05:08,399 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1689988440_0001_r_000020_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1689988440_0001_r_000020
+2017-03-11 05:05:08,405 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:05:08,405 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1689988440_0001_r_000020_0' done.
+2017-03-11 05:05:08,405 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1689988440_0001_r_000020_0
+2017-03-11 05:05:08,405 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1689988440_0001_r_000021_0
+2017-03-11 05:05:08,406 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:05:08,406 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 05:05:08,406 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@284c7ff4
+2017-03-11 05:05:08,414 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 05:05:08,415 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1689988440_0001_r_000021_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 05:05:08,417 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#22 about to shuffle output of map attempt_local1689988440_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 05:05:08,424 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1689988440_0001_m_000000_0
+2017-03-11 05:05:08,424 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 05:05:08,424 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 05:05:08,425 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:08,425 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 05:05:08,426 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:08,426 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:08,427 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 05:05:08,427 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 05:05:08,427 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 05:05:08,427 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:08,427 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:08,429 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:08,431 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1689988440_0001_r_000021_0 is done. And is in the process of committing
+2017-03-11 05:05:08,432 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:08,433 INFO org.apache.hadoop.mapred.Task: Task attempt_local1689988440_0001_r_000021_0 is allowed to commit now
+2017-03-11 05:05:08,433 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1689988440_0001_r_000021_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1689988440_0001_r_000021
+2017-03-11 05:05:08,434 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:05:08,434 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1689988440_0001_r_000021_0' done.
+2017-03-11 05:05:08,435 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1689988440_0001_r_000021_0
+2017-03-11 05:05:08,435 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1689988440_0001_r_000022_0
+2017-03-11 05:05:08,440 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:05:08,440 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 05:05:08,440 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@37871111
+2017-03-11 05:05:08,445 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 05:05:08,450 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1689988440_0001_r_000022_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 05:05:08,452 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#23 about to shuffle output of map attempt_local1689988440_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 05:05:08,459 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1689988440_0001_m_000000_0
+2017-03-11 05:05:08,460 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 05:05:08,462 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 05:05:08,463 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:08,463 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 05:05:08,464 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:08,464 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:08,464 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 05:05:08,465 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 05:05:08,465 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 05:05:08,465 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:08,465 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:08,465 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:08,474 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1689988440_0001_r_000022_0 is done. And is in the process of committing
+2017-03-11 05:05:08,476 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:08,476 INFO org.apache.hadoop.mapred.Task: Task attempt_local1689988440_0001_r_000022_0 is allowed to commit now
+2017-03-11 05:05:08,477 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1689988440_0001_r_000022_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1689988440_0001_r_000022
+2017-03-11 05:05:08,478 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:05:08,478 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1689988440_0001_r_000022_0' done.
+2017-03-11 05:05:08,478 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1689988440_0001_r_000022_0
+2017-03-11 05:05:08,478 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1689988440_0001_r_000023_0
+2017-03-11 05:05:08,483 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:05:08,483 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 05:05:08,483 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@6bd1f8d3
+2017-03-11 05:05:08,484 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 05:05:08,494 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1689988440_0001_r_000023_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 05:05:08,506 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#24 about to shuffle output of map attempt_local1689988440_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 05:05:08,507 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1689988440_0001_m_000000_0
+2017-03-11 05:05:08,507 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 05:05:08,508 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 05:05:08,508 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:08,508 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 05:05:08,509 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:08,509 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:08,510 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 05:05:08,510 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 05:05:08,510 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 05:05:08,510 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:08,511 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:08,511 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:08,528 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1689988440_0001_r_000023_0 is done. And is in the process of committing
+2017-03-11 05:05:08,529 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:08,529 INFO org.apache.hadoop.mapred.Task: Task attempt_local1689988440_0001_r_000023_0 is allowed to commit now
+2017-03-11 05:05:08,534 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1689988440_0001_r_000023_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1689988440_0001_r_000023
+2017-03-11 05:05:08,535 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:05:08,536 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1689988440_0001_r_000023_0' done.
+2017-03-11 05:05:08,536 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1689988440_0001_r_000023_0
+2017-03-11 05:05:08,536 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1689988440_0001_r_000024_0
+2017-03-11 05:05:08,550 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:05:08,551 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 05:05:08,551 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@36d85d0d
+2017-03-11 05:05:08,552 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 05:05:08,564 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1689988440_0001_r_000024_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 05:05:08,568 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#25 about to shuffle output of map attempt_local1689988440_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 05:05:08,568 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1689988440_0001_m_000000_0
+2017-03-11 05:05:08,568 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 05:05:08,576 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 05:05:08,577 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:08,577 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 05:05:08,578 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:08,578 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:08,579 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 05:05:08,579 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 05:05:08,579 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 05:05:08,579 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:08,580 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:08,580 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:08,593 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1689988440_0001_r_000024_0 is done. And is in the process of committing
+2017-03-11 05:05:08,593 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:08,594 INFO org.apache.hadoop.mapred.Task: Task attempt_local1689988440_0001_r_000024_0 is allowed to commit now
+2017-03-11 05:05:08,594 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1689988440_0001_r_000024_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1689988440_0001_r_000024
+2017-03-11 05:05:08,595 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:05:08,595 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1689988440_0001_r_000024_0' done.
+2017-03-11 05:05:08,595 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1689988440_0001_r_000024_0
+2017-03-11 05:05:08,595 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1689988440_0001_r_000025_0
+2017-03-11 05:05:08,600 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:05:08,600 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 05:05:08,601 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@7d317302
+2017-03-11 05:05:08,607 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 05:05:08,616 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1689988440_0001_r_000025_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 05:05:08,624 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#26 about to shuffle output of map attempt_local1689988440_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 05:05:08,625 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1689988440_0001_m_000000_0
+2017-03-11 05:05:08,625 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 05:05:08,625 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 05:05:08,626 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:08,626 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 05:05:08,627 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:08,628 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:08,628 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 05:05:08,628 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 05:05:08,628 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 05:05:08,628 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:08,629 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:08,629 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:08,644 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1689988440_0001_r_000025_0 is done. And is in the process of committing
+2017-03-11 05:05:08,648 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:08,648 INFO org.apache.hadoop.mapred.Task: Task attempt_local1689988440_0001_r_000025_0 is allowed to commit now
+2017-03-11 05:05:08,649 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1689988440_0001_r_000025_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1689988440_0001_r_000025
+2017-03-11 05:05:08,658 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:05:08,658 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1689988440_0001_r_000025_0' done.
+2017-03-11 05:05:08,658 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1689988440_0001_r_000025_0
+2017-03-11 05:05:08,661 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1689988440_0001_r_000026_0
+2017-03-11 05:05:08,670 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:05:08,670 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 05:05:08,670 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@3956ed04
+2017-03-11 05:05:08,671 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 05:05:08,678 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1689988440_0001_r_000026_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 05:05:08,687 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#27 about to shuffle output of map attempt_local1689988440_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 05:05:08,688 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1689988440_0001_m_000000_0
+2017-03-11 05:05:08,692 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 05:05:08,693 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 05:05:08,693 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:08,693 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 05:05:08,694 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:08,694 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:08,695 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 05:05:08,695 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 05:05:08,695 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 05:05:08,695 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:08,707 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:08,707 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:08,715 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1689988440_0001_r_000026_0 is done. And is in the process of committing
+2017-03-11 05:05:08,716 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:08,716 INFO org.apache.hadoop.mapred.Task: Task attempt_local1689988440_0001_r_000026_0 is allowed to commit now
+2017-03-11 05:05:08,721 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1689988440_0001_r_000026_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1689988440_0001_r_000026
+2017-03-11 05:05:08,721 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:05:08,730 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1689988440_0001_r_000026_0' done.
+2017-03-11 05:05:08,730 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1689988440_0001_r_000026_0
+2017-03-11 05:05:08,730 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1689988440_0001_r_000027_0
+2017-03-11 05:05:08,735 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:05:08,736 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 05:05:08,737 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@3d7bcc32
+2017-03-11 05:05:08,744 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 05:05:08,748 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1689988440_0001_r_000027_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 05:05:08,765 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#28 about to shuffle output of map attempt_local1689988440_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 05:05:08,765 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1689988440_0001_m_000000_0
+2017-03-11 05:05:08,767 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 05:05:08,767 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 05:05:08,769 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:08,770 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 05:05:08,770 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:08,771 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:08,771 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 05:05:08,771 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 05:05:08,771 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 05:05:08,772 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:08,772 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:08,772 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:08,785 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1689988440_0001_r_000027_0 is done. And is in the process of committing
+2017-03-11 05:05:08,786 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:08,787 INFO org.apache.hadoop.mapred.Task: Task attempt_local1689988440_0001_r_000027_0 is allowed to commit now
+2017-03-11 05:05:08,788 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1689988440_0001_r_000027_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1689988440_0001_r_000027
+2017-03-11 05:05:08,790 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:05:08,791 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1689988440_0001_r_000027_0' done.
+2017-03-11 05:05:08,791 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1689988440_0001_r_000027_0
+2017-03-11 05:05:08,791 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1689988440_0001_r_000028_0
+2017-03-11 05:05:08,799 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:05:08,800 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 05:05:08,800 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@60cfbf92
+2017-03-11 05:05:08,806 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 05:05:08,816 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1689988440_0001_r_000028_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 05:05:08,822 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#29 about to shuffle output of map attempt_local1689988440_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 05:05:08,825 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1689988440_0001_m_000000_0
+2017-03-11 05:05:08,825 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 05:05:08,826 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 05:05:08,826 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:08,826 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 05:05:08,827 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:08,831 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:08,832 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 05:05:08,835 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 05:05:08,835 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 05:05:08,835 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:08,836 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:08,839 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:08,845 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1689988440_0001_r_000028_0 is done. And is in the process of committing
+2017-03-11 05:05:08,846 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:08,846 INFO org.apache.hadoop.mapred.Task: Task attempt_local1689988440_0001_r_000028_0 is allowed to commit now
+2017-03-11 05:05:08,847 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1689988440_0001_r_000028_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1689988440_0001_r_000028
+2017-03-11 05:05:08,848 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:05:08,848 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1689988440_0001_r_000028_0' done.
+2017-03-11 05:05:08,848 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1689988440_0001_r_000028_0
+2017-03-11 05:05:08,848 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1689988440_0001_r_000029_0
+2017-03-11 05:05:08,853 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:05:08,853 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 05:05:08,854 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@4cf8744c
+2017-03-11 05:05:08,854 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 05:05:08,869 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1689988440_0001_r_000029_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 05:05:08,875 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#30 about to shuffle output of map attempt_local1689988440_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 05:05:08,877 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1689988440_0001_m_000000_0
+2017-03-11 05:05:08,878 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 05:05:08,878 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 05:05:08,879 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:08,879 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 05:05:08,880 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:08,880 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:08,881 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 05:05:08,881 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 05:05:08,881 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 05:05:08,881 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:08,882 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:08,882 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:08,894 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1689988440_0001_r_000029_0 is done. And is in the process of committing
+2017-03-11 05:05:08,899 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:08,901 INFO org.apache.hadoop.mapred.Task: Task attempt_local1689988440_0001_r_000029_0 is allowed to commit now
+2017-03-11 05:05:08,905 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1689988440_0001_r_000029_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1689988440_0001_r_000029
+2017-03-11 05:05:08,906 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:05:08,906 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1689988440_0001_r_000029_0' done.
+2017-03-11 05:05:08,906 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1689988440_0001_r_000029_0
+2017-03-11 05:05:08,909 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1689988440_0001_r_000030_0
+2017-03-11 05:05:08,910 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:05:08,911 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 05:05:08,911 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@6786091b
+2017-03-11 05:05:08,911 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 05:05:08,935 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1689988440_0001_r_000030_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 05:05:08,937 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#31 about to shuffle output of map attempt_local1689988440_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 05:05:08,945 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1689988440_0001_m_000000_0
+2017-03-11 05:05:08,952 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 05:05:08,956 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 05:05:08,956 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:08,957 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 05:05:08,957 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:08,958 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:08,959 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 05:05:08,959 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 05:05:08,959 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 05:05:08,959 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:08,959 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:08,960 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:08,967 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1689988440_0001_r_000030_0 is done. And is in the process of committing
+2017-03-11 05:05:08,967 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:08,968 INFO org.apache.hadoop.mapred.Task: Task attempt_local1689988440_0001_r_000030_0 is allowed to commit now
+2017-03-11 05:05:08,968 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1689988440_0001_r_000030_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1689988440_0001_r_000030
+2017-03-11 05:05:08,969 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:05:08,969 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1689988440_0001_r_000030_0' done.
+2017-03-11 05:05:08,971 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1689988440_0001_r_000030_0
+2017-03-11 05:05:08,971 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1689988440_0001_r_000031_0
+2017-03-11 05:05:08,977 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:05:08,978 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 05:05:08,978 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@149675a1
+2017-03-11 05:05:08,989 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 05:05:08,998 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1689988440_0001_r_000031_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 05:05:09,000 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#32 about to shuffle output of map attempt_local1689988440_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 05:05:09,001 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1689988440_0001_m_000000_0
+2017-03-11 05:05:09,001 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 05:05:09,001 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 05:05:09,002 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,003 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 05:05:09,005 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:09,006 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:09,007 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 05:05:09,009 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 05:05:09,009 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 05:05:09,009 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:09,009 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:09,009 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,018 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1689988440_0001_r_000031_0 is done. And is in the process of committing
+2017-03-11 05:05:09,019 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,019 INFO org.apache.hadoop.mapred.Task: Task attempt_local1689988440_0001_r_000031_0 is allowed to commit now
+2017-03-11 05:05:09,020 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1689988440_0001_r_000031_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1689988440_0001_r_000031
+2017-03-11 05:05:09,020 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:05:09,020 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1689988440_0001_r_000031_0' done.
+2017-03-11 05:05:09,020 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1689988440_0001_r_000031_0
+2017-03-11 05:05:09,021 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1689988440_0001_r_000032_0
+2017-03-11 05:05:09,027 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:05:09,027 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 05:05:09,028 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@68d4fa15
+2017-03-11 05:05:09,028 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 05:05:09,033 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1689988440_0001_r_000032_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 05:05:09,035 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#33 about to shuffle output of map attempt_local1689988440_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 05:05:09,040 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1689988440_0001_m_000000_0
+2017-03-11 05:05:09,043 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 05:05:09,043 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 05:05:09,043 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,044 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 05:05:09,044 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:09,046 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:09,046 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 05:05:09,046 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 05:05:09,046 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 05:05:09,046 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:09,047 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:09,048 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,055 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1689988440_0001_r_000032_0 is done. And is in the process of committing
+2017-03-11 05:05:09,057 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,057 INFO org.apache.hadoop.mapred.Task: Task attempt_local1689988440_0001_r_000032_0 is allowed to commit now
+2017-03-11 05:05:09,057 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1689988440_0001_r_000032_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1689988440_0001_r_000032
+2017-03-11 05:05:09,058 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:05:09,059 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1689988440_0001_r_000032_0' done.
+2017-03-11 05:05:09,059 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1689988440_0001_r_000032_0
+2017-03-11 05:05:09,059 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1689988440_0001_r_000033_0
+2017-03-11 05:05:09,063 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:05:09,065 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 05:05:09,065 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@37420057
+2017-03-11 05:05:09,066 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 05:05:09,072 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1689988440_0001_r_000033_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 05:05:09,073 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#34 about to shuffle output of map attempt_local1689988440_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 05:05:09,073 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1689988440_0001_m_000000_0
+2017-03-11 05:05:09,076 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 05:05:09,081 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 05:05:09,082 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,083 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 05:05:09,085 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:09,085 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:09,086 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 05:05:09,086 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 05:05:09,086 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 05:05:09,086 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:09,086 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:09,087 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,111 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1689988440_0001_r_000033_0 is done. And is in the process of committing
+2017-03-11 05:05:09,114 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,115 INFO org.apache.hadoop.mapred.Task: Task attempt_local1689988440_0001_r_000033_0 is allowed to commit now
+2017-03-11 05:05:09,116 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1689988440_0001_r_000033_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1689988440_0001_r_000033
+2017-03-11 05:05:09,124 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:05:09,124 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1689988440_0001_r_000033_0' done.
+2017-03-11 05:05:09,124 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1689988440_0001_r_000033_0
+2017-03-11 05:05:09,124 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1689988440_0001_r_000034_0
+2017-03-11 05:05:09,129 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:05:09,130 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 05:05:09,131 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@2950ece0
+2017-03-11 05:05:09,133 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 05:05:09,147 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1689988440_0001_r_000034_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 05:05:09,155 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#35 about to shuffle output of map attempt_local1689988440_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 05:05:09,157 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1689988440_0001_m_000000_0
+2017-03-11 05:05:09,157 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 05:05:09,157 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 05:05:09,158 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,158 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 05:05:09,158 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:09,159 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:09,159 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 05:05:09,159 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 05:05:09,159 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 05:05:09,159 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:09,160 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:09,160 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,172 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1689988440_0001_r_000034_0 is done. And is in the process of committing
+2017-03-11 05:05:09,174 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,174 INFO org.apache.hadoop.mapred.Task: Task attempt_local1689988440_0001_r_000034_0 is allowed to commit now
+2017-03-11 05:05:09,183 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1689988440_0001_r_000034_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1689988440_0001_r_000034
+2017-03-11 05:05:09,187 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:05:09,187 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1689988440_0001_r_000034_0' done.
+2017-03-11 05:05:09,187 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1689988440_0001_r_000034_0
+2017-03-11 05:05:09,187 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1689988440_0001_r_000035_0
+2017-03-11 05:05:09,191 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:05:09,191 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 05:05:09,191 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@24026868
+2017-03-11 05:05:09,195 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 05:05:09,204 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1689988440_0001_r_000035_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 05:05:09,208 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#36 about to shuffle output of map attempt_local1689988440_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 05:05:09,208 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1689988440_0001_m_000000_0
+2017-03-11 05:05:09,208 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 05:05:09,209 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 05:05:09,209 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,209 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 05:05:09,210 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:09,219 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:09,219 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 05:05:09,219 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 05:05:09,220 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 05:05:09,220 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:09,220 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:09,220 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,235 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1689988440_0001_r_000035_0 is done. And is in the process of committing
+2017-03-11 05:05:09,239 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,240 INFO org.apache.hadoop.mapred.Task: Task attempt_local1689988440_0001_r_000035_0 is allowed to commit now
+2017-03-11 05:05:09,240 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1689988440_0001_r_000035_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1689988440_0001_r_000035
+2017-03-11 05:05:09,241 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:05:09,246 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1689988440_0001_r_000035_0' done.
+2017-03-11 05:05:09,246 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1689988440_0001_r_000035_0
+2017-03-11 05:05:09,246 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1689988440_0001_r_000036_0
+2017-03-11 05:05:09,255 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:05:09,256 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 05:05:09,256 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@68fd6f7
+2017-03-11 05:05:09,257 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 05:05:09,260 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1689988440_0001_r_000036_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 05:05:09,266 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#37 about to shuffle output of map attempt_local1689988440_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 05:05:09,274 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1689988440_0001_m_000000_0
+2017-03-11 05:05:09,274 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 05:05:09,274 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 05:05:09,275 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,275 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 05:05:09,275 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:09,276 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:09,276 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 05:05:09,276 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 05:05:09,276 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 05:05:09,276 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:09,277 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:09,282 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,287 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1689988440_0001_r_000036_0 is done. And is in the process of committing
+2017-03-11 05:05:09,288 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,288 INFO org.apache.hadoop.mapred.Task: Task attempt_local1689988440_0001_r_000036_0 is allowed to commit now
+2017-03-11 05:05:09,288 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1689988440_0001_r_000036_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1689988440_0001_r_000036
+2017-03-11 05:05:09,293 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:05:09,293 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1689988440_0001_r_000036_0' done.
+2017-03-11 05:05:09,293 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1689988440_0001_r_000036_0
+2017-03-11 05:05:09,293 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1689988440_0001_r_000037_0
+2017-03-11 05:05:09,304 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:05:09,306 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 100%
+2017-03-11 05:05:09,308 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 05:05:09,308 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@644f2668
+2017-03-11 05:05:09,309 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 05:05:09,309 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1689988440_0001_r_000037_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 05:05:09,320 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#38 about to shuffle output of map attempt_local1689988440_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 05:05:09,322 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1689988440_0001_m_000000_0
+2017-03-11 05:05:09,322 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 05:05:09,322 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 05:05:09,322 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,322 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 05:05:09,323 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:09,324 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:09,324 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 05:05:09,324 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 05:05:09,324 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 05:05:09,324 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:09,325 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:09,325 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,336 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1689988440_0001_r_000037_0 is done. And is in the process of committing
+2017-03-11 05:05:09,337 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,338 INFO org.apache.hadoop.mapred.Task: Task attempt_local1689988440_0001_r_000037_0 is allowed to commit now
+2017-03-11 05:05:09,338 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1689988440_0001_r_000037_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1689988440_0001_r_000037
+2017-03-11 05:05:09,344 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:05:09,344 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1689988440_0001_r_000037_0' done.
+2017-03-11 05:05:09,344 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1689988440_0001_r_000037_0
+2017-03-11 05:05:09,344 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1689988440_0001_r_000038_0
+2017-03-11 05:05:09,350 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:05:09,351 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 05:05:09,351 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@69348fa3
+2017-03-11 05:05:09,355 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 05:05:09,360 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1689988440_0001_r_000038_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 05:05:09,367 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#39 about to shuffle output of map attempt_local1689988440_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 05:05:09,367 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1689988440_0001_m_000000_0
+2017-03-11 05:05:09,375 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 05:05:09,375 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 05:05:09,376 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,376 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 05:05:09,377 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:09,377 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:09,378 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 05:05:09,378 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 05:05:09,378 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 05:05:09,378 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:09,379 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:09,379 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,381 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1689988440_0001_r_000038_0 is done. And is in the process of committing
+2017-03-11 05:05:09,382 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,382 INFO org.apache.hadoop.mapred.Task: Task attempt_local1689988440_0001_r_000038_0 is allowed to commit now
+2017-03-11 05:05:09,382 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1689988440_0001_r_000038_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1689988440_0001_r_000038
+2017-03-11 05:05:09,383 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:05:09,383 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1689988440_0001_r_000038_0' done.
+2017-03-11 05:05:09,383 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1689988440_0001_r_000038_0
+2017-03-11 05:05:09,383 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1689988440_0001_r_000039_0
+2017-03-11 05:05:09,384 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:05:09,384 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 05:05:09,385 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@3e7b0986
+2017-03-11 05:05:09,385 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 05:05:09,394 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1689988440_0001_r_000039_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 05:05:09,397 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#40 about to shuffle output of map attempt_local1689988440_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 05:05:09,397 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1689988440_0001_m_000000_0
+2017-03-11 05:05:09,398 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 05:05:09,398 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 05:05:09,398 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,399 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 05:05:09,399 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:09,400 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:09,400 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 05:05:09,400 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 05:05:09,400 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 05:05:09,400 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:09,401 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:09,401 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,407 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1689988440_0001_r_000039_0 is done. And is in the process of committing
+2017-03-11 05:05:09,408 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,408 INFO org.apache.hadoop.mapred.Task: Task attempt_local1689988440_0001_r_000039_0 is allowed to commit now
+2017-03-11 05:05:09,409 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1689988440_0001_r_000039_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1689988440_0001_r_000039
+2017-03-11 05:05:09,409 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:05:09,421 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1689988440_0001_r_000039_0' done.
+2017-03-11 05:05:09,421 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1689988440_0001_r_000039_0
+2017-03-11 05:05:09,422 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1689988440_0001_r_000040_0
+2017-03-11 05:05:09,428 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:05:09,428 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 05:05:09,428 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@5e71a70b
+2017-03-11 05:05:09,431 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 05:05:09,437 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1689988440_0001_r_000040_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 05:05:09,442 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#41 about to shuffle output of map attempt_local1689988440_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 05:05:09,448 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1689988440_0001_m_000000_0
+2017-03-11 05:05:09,449 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 05:05:09,449 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 05:05:09,450 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,450 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 05:05:09,451 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:09,452 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:09,452 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 05:05:09,452 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 05:05:09,452 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 05:05:09,452 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:09,453 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:09,453 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,466 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1689988440_0001_r_000040_0 is done. And is in the process of committing
+2017-03-11 05:05:09,467 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,468 INFO org.apache.hadoop.mapred.Task: Task attempt_local1689988440_0001_r_000040_0 is allowed to commit now
+2017-03-11 05:05:09,469 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1689988440_0001_r_000040_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1689988440_0001_r_000040
+2017-03-11 05:05:09,469 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:05:09,469 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1689988440_0001_r_000040_0' done.
+2017-03-11 05:05:09,469 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1689988440_0001_r_000040_0
+2017-03-11 05:05:09,469 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1689988440_0001_r_000041_0
+2017-03-11 05:05:09,470 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:05:09,471 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 05:05:09,471 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@170287bf
+2017-03-11 05:05:09,471 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 05:05:09,486 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1689988440_0001_r_000041_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 05:05:09,499 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#42 about to shuffle output of map attempt_local1689988440_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 05:05:09,499 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1689988440_0001_m_000000_0
+2017-03-11 05:05:09,504 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 05:05:09,504 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 05:05:09,505 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,505 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 05:05:09,505 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:09,506 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:09,506 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 05:05:09,507 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 05:05:09,508 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 05:05:09,508 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:09,508 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:09,509 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,536 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1689988440_0001_r_000041_0 is done. And is in the process of committing
+2017-03-11 05:05:09,537 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,537 INFO org.apache.hadoop.mapred.Task: Task attempt_local1689988440_0001_r_000041_0 is allowed to commit now
+2017-03-11 05:05:09,540 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1689988440_0001_r_000041_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1689988440_0001_r_000041
+2017-03-11 05:05:09,540 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:05:09,540 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1689988440_0001_r_000041_0' done.
+2017-03-11 05:05:09,540 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1689988440_0001_r_000041_0
+2017-03-11 05:05:09,540 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1689988440_0001_r_000042_0
+2017-03-11 05:05:09,545 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:05:09,547 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 05:05:09,547 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@4abfa6aa
+2017-03-11 05:05:09,550 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 05:05:09,561 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1689988440_0001_r_000042_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 05:05:09,563 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#43 about to shuffle output of map attempt_local1689988440_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 05:05:09,571 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1689988440_0001_m_000000_0
+2017-03-11 05:05:09,572 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 05:05:09,575 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 05:05:09,575 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,576 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 05:05:09,576 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:09,576 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:09,577 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 05:05:09,577 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 05:05:09,577 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 05:05:09,577 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:09,577 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:09,578 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,582 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1689988440_0001_r_000042_0 is done. And is in the process of committing
+2017-03-11 05:05:09,583 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,583 INFO org.apache.hadoop.mapred.Task: Task attempt_local1689988440_0001_r_000042_0 is allowed to commit now
+2017-03-11 05:05:09,584 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1689988440_0001_r_000042_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1689988440_0001_r_000042
+2017-03-11 05:05:09,584 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:05:09,584 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1689988440_0001_r_000042_0' done.
+2017-03-11 05:05:09,584 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1689988440_0001_r_000042_0
+2017-03-11 05:05:09,584 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1689988440_0001_r_000043_0
+2017-03-11 05:05:09,595 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:05:09,595 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 05:05:09,595 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@41e69be8
+2017-03-11 05:05:09,605 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 05:05:09,611 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1689988440_0001_r_000043_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 05:05:09,618 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#44 about to shuffle output of map attempt_local1689988440_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 05:05:09,618 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1689988440_0001_m_000000_0
+2017-03-11 05:05:09,624 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 05:05:09,625 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 05:05:09,625 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,625 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 05:05:09,627 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:09,627 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:09,629 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 05:05:09,629 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 05:05:09,629 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 05:05:09,629 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:09,629 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:09,633 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,646 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1689988440_0001_r_000043_0 is done. And is in the process of committing
+2017-03-11 05:05:09,646 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,647 INFO org.apache.hadoop.mapred.Task: Task attempt_local1689988440_0001_r_000043_0 is allowed to commit now
+2017-03-11 05:05:09,650 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1689988440_0001_r_000043_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1689988440_0001_r_000043
+2017-03-11 05:05:09,655 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:05:09,656 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1689988440_0001_r_000043_0' done.
+2017-03-11 05:05:09,656 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1689988440_0001_r_000043_0
+2017-03-11 05:05:09,656 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1689988440_0001_r_000044_0
+2017-03-11 05:05:09,657 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:05:09,657 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 05:05:09,657 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@2ff68a99
+2017-03-11 05:05:09,665 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 05:05:09,667 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1689988440_0001_r_000044_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 05:05:09,668 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#45 about to shuffle output of map attempt_local1689988440_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 05:05:09,668 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1689988440_0001_m_000000_0
+2017-03-11 05:05:09,668 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 05:05:09,673 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 05:05:09,674 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,674 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 05:05:09,675 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:09,675 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:09,675 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 05:05:09,675 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 05:05:09,675 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 05:05:09,675 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:09,676 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:09,676 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,685 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1689988440_0001_r_000044_0 is done. And is in the process of committing
+2017-03-11 05:05:09,685 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,685 INFO org.apache.hadoop.mapred.Task: Task attempt_local1689988440_0001_r_000044_0 is allowed to commit now
+2017-03-11 05:05:09,692 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1689988440_0001_r_000044_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1689988440_0001_r_000044
+2017-03-11 05:05:09,693 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:05:09,693 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1689988440_0001_r_000044_0' done.
+2017-03-11 05:05:09,693 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1689988440_0001_r_000044_0
+2017-03-11 05:05:09,693 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1689988440_0001_r_000045_0
+2017-03-11 05:05:09,694 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:05:09,697 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 05:05:09,697 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@5d2abe5d
+2017-03-11 05:05:09,698 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 05:05:09,706 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1689988440_0001_r_000045_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 05:05:09,708 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#46 about to shuffle output of map attempt_local1689988440_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 05:05:09,709 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1689988440_0001_m_000000_0
+2017-03-11 05:05:09,709 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 05:05:09,709 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 05:05:09,710 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,710 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 05:05:09,712 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:09,713 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:09,713 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 05:05:09,713 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 05:05:09,713 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 05:05:09,713 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:09,714 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:09,717 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,722 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1689988440_0001_r_000045_0 is done. And is in the process of committing
+2017-03-11 05:05:09,723 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,723 INFO org.apache.hadoop.mapred.Task: Task attempt_local1689988440_0001_r_000045_0 is allowed to commit now
+2017-03-11 05:05:09,723 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1689988440_0001_r_000045_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1689988440_0001_r_000045
+2017-03-11 05:05:09,724 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:05:09,724 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1689988440_0001_r_000045_0' done.
+2017-03-11 05:05:09,724 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1689988440_0001_r_000045_0
+2017-03-11 05:05:09,724 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1689988440_0001_r_000046_0
+2017-03-11 05:05:09,725 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:05:09,725 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 05:05:09,725 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@6ab39ec9
+2017-03-11 05:05:09,731 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 05:05:09,735 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1689988440_0001_r_000046_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 05:05:09,738 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#47 about to shuffle output of map attempt_local1689988440_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 05:05:09,751 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1689988440_0001_m_000000_0
+2017-03-11 05:05:09,751 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 05:05:09,752 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 05:05:09,752 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,752 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 05:05:09,753 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:09,753 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:09,753 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 05:05:09,754 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 05:05:09,754 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 05:05:09,754 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:09,754 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:09,755 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,760 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1689988440_0001_r_000046_0 is done. And is in the process of committing
+2017-03-11 05:05:09,761 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,761 INFO org.apache.hadoop.mapred.Task: Task attempt_local1689988440_0001_r_000046_0 is allowed to commit now
+2017-03-11 05:05:09,762 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1689988440_0001_r_000046_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1689988440_0001_r_000046
+2017-03-11 05:05:09,762 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:05:09,762 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1689988440_0001_r_000046_0' done.
+2017-03-11 05:05:09,762 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1689988440_0001_r_000046_0
+2017-03-11 05:05:09,763 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1689988440_0001_r_000047_0
+2017-03-11 05:05:09,767 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:05:09,768 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 05:05:09,768 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@6e6fdcaf
+2017-03-11 05:05:09,769 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 05:05:09,773 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1689988440_0001_r_000047_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 05:05:09,783 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#48 about to shuffle output of map attempt_local1689988440_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 05:05:09,784 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1689988440_0001_m_000000_0
+2017-03-11 05:05:09,786 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 05:05:09,786 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 05:05:09,787 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,787 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 05:05:09,787 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:09,787 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:09,788 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 05:05:09,788 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 05:05:09,788 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 05:05:09,788 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:09,788 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:09,789 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,795 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1689988440_0001_r_000047_0 is done. And is in the process of committing
+2017-03-11 05:05:09,795 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,795 INFO org.apache.hadoop.mapred.Task: Task attempt_local1689988440_0001_r_000047_0 is allowed to commit now
+2017-03-11 05:05:09,796 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1689988440_0001_r_000047_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1689988440_0001_r_000047
+2017-03-11 05:05:09,797 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:05:09,797 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1689988440_0001_r_000047_0' done.
+2017-03-11 05:05:09,797 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1689988440_0001_r_000047_0
+2017-03-11 05:05:09,797 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1689988440_0001_r_000048_0
+2017-03-11 05:05:09,803 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:05:09,804 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 05:05:09,804 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@7fecdc36
+2017-03-11 05:05:09,804 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 05:05:09,805 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1689988440_0001_r_000048_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 05:05:09,808 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#49 about to shuffle output of map attempt_local1689988440_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 05:05:09,809 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1689988440_0001_m_000000_0
+2017-03-11 05:05:09,809 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 05:05:09,813 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 05:05:09,814 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,814 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 05:05:09,815 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:09,815 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:09,815 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 05:05:09,815 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 05:05:09,815 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 05:05:09,815 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:09,816 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:09,816 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,819 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1689988440_0001_r_000048_0 is done. And is in the process of committing
+2017-03-11 05:05:09,819 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,820 INFO org.apache.hadoop.mapred.Task: Task attempt_local1689988440_0001_r_000048_0 is allowed to commit now
+2017-03-11 05:05:09,820 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1689988440_0001_r_000048_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1689988440_0001_r_000048
+2017-03-11 05:05:09,821 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:05:09,821 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1689988440_0001_r_000048_0' done.
+2017-03-11 05:05:09,821 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1689988440_0001_r_000048_0
+2017-03-11 05:05:09,822 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1689988440_0001_r_000049_0
+2017-03-11 05:05:09,827 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:05:09,827 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 05:05:09,827 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@7993bafc
+2017-03-11 05:05:09,828 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 05:05:09,834 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1689988440_0001_r_000049_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 05:05:09,836 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#50 about to shuffle output of map attempt_local1689988440_0001_m_000000_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 05:05:09,837 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1689988440_0001_m_000000_0
+2017-03-11 05:05:09,837 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-03-11 05:05:09,837 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 05:05:09,838 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,838 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 05:05:09,839 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:09,839 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:09,839 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 2 bytes to disk to satisfy reduce memory limit
+2017-03-11 05:05:09,847 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 22 bytes from disk
+2017-03-11 05:05:09,847 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 05:05:09,848 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:05:09,848 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-03-11 05:05:09,848 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,868 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1689988440_0001_r_000049_0 is done. And is in the process of committing
+2017-03-11 05:05:09,869 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:05:09,869 INFO org.apache.hadoop.mapred.Task: Task attempt_local1689988440_0001_r_000049_0 is allowed to commit now
+2017-03-11 05:05:09,870 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1689988440_0001_r_000049_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1689988440_0001_r_000049
+2017-03-11 05:05:09,871 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:05:09,871 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1689988440_0001_r_000049_0' done.
+2017-03-11 05:05:09,871 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1689988440_0001_r_000049_0
+2017-03-11 05:05:09,871 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete.
+2017-03-11 05:05:10,307 INFO org.apache.hadoop.mapreduce.Job: Job job_local1689988440_0001 completed successfully
+2017-03-11 05:05:10,494 INFO org.apache.hadoop.mapreduce.Job: Counters: 34
 	File System Counters
-		FILE: Number of bytes read=16657772
-		FILE: Number of bytes written=12451545
+		FILE: Number of bytes read=175845044
+		FILE: Number of bytes written=235476648
 		FILE: Number of read operations=0
 		FILE: Number of large read operations=0
 		FILE: Number of write operations=0
 	Map-Reduce Framework
-		Map input records=124787
-		Map output records=115105
-		Map output bytes=4776007
-		Map output materialized bytes=2376884
-		Input split bytes=120
+		Map input records=7
+		Map output records=873502
+		Map output bytes=69006658
+		Map output materialized bytes=2672956
+		Input split bytes=122
 		Combine input records=0
 		Combine output records=0
-		Reduce input groups=115105
-		Reduce shuffle bytes=2376884
-		Reduce input records=115105
-		Reduce output records=115105
-		Spilled Records=230210
-		Shuffled Maps =1
+		Reduce input groups=873481
+		Reduce shuffle bytes=2672956
+		Reduce input records=873502
+		Reduce output records=1
+		Spilled Records=1747004
+		Shuffled Maps =50
 		Failed Shuffles=0
-		Merged Map outputs=1
-		GC time elapsed (ms)=140
+		Merged Map outputs=50
+		GC time elapsed (ms)=169
 		CPU time spent (ms)=0
 		Physical memory (bytes) snapshot=0
 		Virtual memory (bytes) snapshot=0
-		Total committed heap usage (bytes)=331227136
+		Total committed heap usage (bytes)=9956241408
 	Shuffle Errors
 		BAD_ID=0
 		CONNECTION=0
@@ -1322,108 +4110,122 @@ Caused by: java.lang.NullPointerException
 		WRONG_MAP=0
 		WRONG_REDUCE=0
 	File Input Format Counters 
-		Bytes Read=5589886
+		Bytes Read=454
 	File Output Format Counters 
-		Bytes Written=4789771
-2017-03-10 14:54:30,023 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
-2017-03-10 14:54:30,544 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
-2017-03-10 14:54:30,565 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
-2017-03-10 14:54:31,208 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
-2017-03-10 14:54:31,222 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
-2017-03-10 14:54:31,311 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
-2017-03-10 14:54:31,680 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local520002813_0001
-2017-03-10 14:54:32,173 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
-2017-03-10 14:54:32,174 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local520002813_0001
-2017-03-10 14:54:32,184 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
-2017-03-10 14:54:32,195 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
-2017-03-10 14:54:32,197 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
-2017-03-10 14:54:32,352 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
-2017-03-10 14:54:32,354 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local520002813_0001_m_000000_0
-2017-03-10 14:54:32,427 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
-2017-03-10 14:54:32,457 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
-2017-03-10 14:54:32,463 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/input/pg100.txt:0+5589886
-2017-03-10 14:54:32,548 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
-2017-03-10 14:54:32,549 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
-2017-03-10 14:54:32,550 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
-2017-03-10 14:54:32,550 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
-2017-03-10 14:54:32,550 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
-2017-03-10 14:54:32,559 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
-2017-03-10 14:54:33,178 INFO org.apache.hadoop.mapreduce.Job: Job job_local520002813_0001 running in uber mode : false
-2017-03-10 14:54:33,179 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
-2017-03-10 14:54:35,891 INFO org.apache.hadoop.mapred.LocalJobRunner: 
-2017-03-10 14:54:35,891 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
-2017-03-10 14:54:35,891 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
-2017-03-10 14:54:35,891 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 5541084; bufvoid = 104857600
-2017-03-10 14:54:35,891 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 25753980(103015920); length = 460417/6553600
-2017-03-10 14:54:36,206 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
-2017-03-10 14:54:37,261 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
-2017-03-10 14:54:37,264 INFO org.apache.hadoop.mapred.Task: Task:attempt_local520002813_0001_m_000000_0 is done. And is in the process of committing
-2017-03-10 14:54:37,275 INFO org.apache.hadoop.mapred.LocalJobRunner: map
-2017-03-10 14:54:37,280 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local520002813_0001_m_000000_0' done.
-2017-03-10 14:54:37,280 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local520002813_0001_m_000000_0
-2017-03-10 14:54:37,280 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
-2017-03-10 14:54:37,284 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks
-2017-03-10 14:54:37,284 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local520002813_0001_r_000000_0
-2017-03-10 14:54:37,289 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
-2017-03-10 14:54:37,289 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
-2017-03-10 14:54:37,293 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@22c4d1da
-2017-03-10 14:54:37,303 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
-2017-03-10 14:54:37,312 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local520002813_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events
-2017-03-10 14:54:37,342 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.deflate]
-2017-03-10 14:54:37,342 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local520002813_0001_m_000000_0 decomp: 5771296 len: 2500118 to MEMORY
-2017-03-10 14:54:37,405 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 5771296 bytes from map-output for attempt_local520002813_0001_m_000000_0
-2017-03-10 14:54:37,408 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 5771296, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->5771296
-2017-03-10 14:54:37,410 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
-2017-03-10 14:54:37,410 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
-2017-03-10 14:54:37,411 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
-2017-03-10 14:54:37,420 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
-2017-03-10 14:54:37,420 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 5771286 bytes
-2017-03-10 14:54:38,188 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 0%
-2017-03-10 14:54:38,209 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 5771296 bytes to disk to satisfy reduce memory limit
-2017-03-10 14:54:38,209 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 2500126 bytes from disk
-2017-03-10 14:54:38,210 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
-2017-03-10 14:54:38,210 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
-2017-03-10 14:54:38,210 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 5771286 bytes
-2017-03-10 14:54:38,211 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
-2017-03-10 14:54:38,219 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.skip.on is deprecated. Instead, use mapreduce.job.skiprecords
-2017-03-10 14:54:39,174 INFO org.apache.hadoop.mapred.Task: Task:attempt_local520002813_0001_r_000000_0 is done. And is in the process of committing
-2017-03-10 14:54:39,181 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
-2017-03-10 14:54:39,181 INFO org.apache.hadoop.mapred.Task: Task attempt_local520002813_0001_r_000000_0 is allowed to commit now
-2017-03-10 14:54:39,187 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local520002813_0001_r_000000_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local520002813_0001_r_000000
-2017-03-10 14:54:39,188 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
-2017-03-10 14:54:39,188 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local520002813_0001_r_000000_0' done.
-2017-03-10 14:54:39,188 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local520002813_0001_r_000000_0
-2017-03-10 14:54:39,188 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete.
-2017-03-10 14:54:39,190 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 100%
-2017-03-10 14:54:40,191 INFO org.apache.hadoop.mapreduce.Job: Job job_local520002813_0001 completed successfully
-2017-03-10 14:54:40,203 INFO org.apache.hadoop.mapreduce.Job: Counters: 34
+		Bytes Written=412
+	similarity.NaiveApproach$CompCounter
+		NUM=21
+2017-03-11 05:15:56,663 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-11 05:15:59,556 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-11 05:15:59,585 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-11 05:16:01,136 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
+2017-03-11 05:16:01,188 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
+2017-03-11 05:16:01,385 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
+2017-03-11 05:16:02,358 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local1188539971_0001
+2017-03-11 05:16:03,356 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
+2017-03-11 05:16:03,357 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local1188539971_0001
+2017-03-11 05:16:03,363 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
+2017-03-11 05:16:03,396 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:16:03,405 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
+2017-03-11 05:16:03,718 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
+2017-03-11 05:16:03,719 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1188539971_0001_m_000000_0
+2017-03-11 05:16:03,880 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:16:03,951 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 05:16:03,956 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:0+447
+2017-03-11 05:16:04,364 INFO org.apache.hadoop.mapreduce.Job: Job job_local1188539971_0001 running in uber mode : false
+2017-03-11 05:16:04,377 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
+2017-03-11 05:16:04,622 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 05:16:04,622 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 05:16:04,622 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 05:16:04,622 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 05:16:04,622 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 05:16:04,651 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 05:16:05,498 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 05:16:05,501 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 05:16:05,501 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-11 05:16:05,501 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 68133156; bufvoid = 104857600
+2017-03-11 05:16:05,501 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 22720392(90881568); length = 3494005/6553600
+2017-03-11 05:16:10,017 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:0+447 > sort
+2017-03-11 05:16:10,425 INFO org.apache.hadoop.mapreduce.Job:  map 67% reduce 0%
+2017-03-11 05:16:13,020 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:0+447 > sort
+2017-03-11 05:16:16,024 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:0+447 > sort
+2017-03-11 05:16:18,089 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
+2017-03-11 05:16:19,040 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:0+447 > sort
+2017-03-11 05:16:22,042 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:0+447 > sort
+2017-03-11 05:16:25,467 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
+2017-03-11 05:16:25,495 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1188539971_0001_m_000000_0 is done. And is in the process of committing
+2017-03-11 05:16:25,505 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:0+447
+2017-03-11 05:16:25,505 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1188539971_0001_m_000000_0' done.
+2017-03-11 05:16:25,506 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1188539971_0001_m_000000_0
+2017-03-11 05:16:25,509 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
+2017-03-11 05:16:25,529 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks
+2017-03-11 05:16:25,529 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1188539971_0001_r_000000_0
+2017-03-11 05:16:25,562 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 05:16:25,563 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 05:16:25,583 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@7bfaa588
+2017-03-11 05:16:25,662 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 05:16:25,688 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1188539971_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 05:16:25,901 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.deflate]
+2017-03-11 05:16:25,902 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1188539971_0001_m_000000_0 decomp: 69880162 len: 2722285 to MEMORY
+2017-03-11 05:16:26,317 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 69880162 bytes from map-output for attempt_local1188539971_0001_m_000000_0
+2017-03-11 05:16:26,340 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 69880162, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->69880162
+2017-03-11 05:16:26,345 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 05:16:26,347 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 05:16:26,347 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 05:16:26,375 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:16:26,376 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 69880144 bytes
+2017-03-11 05:16:26,461 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 0%
+2017-03-11 05:16:31,564 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > sort
+2017-03-11 05:16:32,471 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 56%
+2017-03-11 05:16:33,512 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 69880162 bytes to disk to satisfy reduce memory limit
+2017-03-11 05:16:33,514 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 2722293 bytes from disk
+2017-03-11 05:16:33,528 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 05:16:33,528 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 05:16:33,529 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 69880144 bytes
+2017-03-11 05:16:33,529 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > sort
+2017-03-11 05:16:33,574 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.skip.on is deprecated. Instead, use mapreduce.job.skiprecords
+2017-03-11 05:16:34,582 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:16:35,475 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 68%
+2017-03-11 05:16:37,584 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:16:38,482 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 91%
+2017-03-11 05:16:38,777 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1188539971_0001_r_000000_0 is done. And is in the process of committing
+2017-03-11 05:16:38,778 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:16:38,778 INFO org.apache.hadoop.mapred.Task: Task attempt_local1188539971_0001_r_000000_0 is allowed to commit now
+2017-03-11 05:16:38,789 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1188539971_0001_r_000000_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1188539971_0001_r_000000
+2017-03-11 05:16:38,792 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 05:16:38,793 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1188539971_0001_r_000000_0' done.
+2017-03-11 05:16:38,793 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1188539971_0001_r_000000_0
+2017-03-11 05:16:38,793 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete.
+2017-03-11 05:16:39,492 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 100%
+2017-03-11 05:16:39,492 INFO org.apache.hadoop.mapreduce.Job: Job job_local1188539971_0001 completed successfully
+2017-03-11 05:16:39,548 INFO org.apache.hadoop.mapreduce.Job: Counters: 34
 	File System Counters
-		FILE: Number of bytes read=16904240
-		FILE: Number of bytes written=13589484
+		FILE: Number of bytes read=5445872
+		FILE: Number of bytes written=8698043
 		FILE: Number of read operations=0
 		FILE: Number of large read operations=0
 		FILE: Number of write operations=0
 	Map-Reduce Framework
-		Map input records=124787
-		Map output records=115105
-		Map output bytes=5541084
-		Map output materialized bytes=2500118
-		Input split bytes=120
+		Map input records=7
+		Map output records=873502
+		Map output bytes=68133156
+		Map output materialized bytes=2722285
+		Input split bytes=122
 		Combine input records=0
 		Combine output records=0
-		Reduce input groups=115105
-		Reduce shuffle bytes=2500118
-		Reduce input records=115105
-		Reduce output records=115105
-		Spilled Records=230210
+		Reduce input groups=873481
+		Reduce shuffle bytes=2722285
+		Reduce input records=873502
+		Reduce output records=1
+		Spilled Records=1747004
 		Shuffled Maps =1
 		Failed Shuffles=0
 		Merged Map outputs=1
-		GC time elapsed (ms)=124
+		GC time elapsed (ms)=250
 		CPU time spent (ms)=0
 		Physical memory (bytes) snapshot=0
 		Virtual memory (bytes) snapshot=0
-		Total committed heap usage (bytes)=331227136
+		Total committed heap usage (bytes)=378413056
 	Shuffle Errors
 		BAD_ID=0
 		CONNECTION=0
@@ -1432,110 +4234,869 @@ Caused by: java.lang.NullPointerException
 		WRONG_MAP=0
 		WRONG_REDUCE=0
 	File Input Format Counters 
-		Bytes Read=5589886
+		Bytes Read=447
 	File Output Format Counters 
-		Bytes Written=5560824
-	similarity.WordSort$DocLineCounter
-		NUM=124787
-2017-03-10 14:58:53,626 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
-2017-03-10 14:58:54,092 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
-2017-03-10 14:58:54,097 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
-2017-03-10 14:58:54,564 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
-2017-03-10 14:58:54,572 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
-2017-03-10 14:58:54,619 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
-2017-03-10 14:58:54,850 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local544686432_0001
-2017-03-10 14:58:55,274 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
-2017-03-10 14:58:55,275 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local544686432_0001
-2017-03-10 14:58:55,284 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
-2017-03-10 14:58:55,298 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
-2017-03-10 14:58:55,301 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
-2017-03-10 14:58:55,409 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
-2017-03-10 14:58:55,411 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local544686432_0001_m_000000_0
-2017-03-10 14:58:55,473 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
-2017-03-10 14:58:55,485 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
-2017-03-10 14:58:55,487 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/input/pg100.txt:0+5589886
-2017-03-10 14:58:55,566 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
-2017-03-10 14:58:55,567 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
-2017-03-10 14:58:55,567 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
-2017-03-10 14:58:55,567 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
-2017-03-10 14:58:55,567 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
-2017-03-10 14:58:55,569 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
-2017-03-10 14:58:56,279 INFO org.apache.hadoop.mapreduce.Job: Job job_local544686432_0001 running in uber mode : false
-2017-03-10 14:58:56,280 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
-2017-03-10 14:58:58,471 INFO org.apache.hadoop.mapred.LocalJobRunner: 
-2017-03-10 14:58:58,472 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
-2017-03-10 14:58:58,472 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
-2017-03-10 14:58:58,472 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 5541084; bufvoid = 104857600
-2017-03-10 14:58:58,472 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 25753980(103015920); length = 460417/6553600
-2017-03-10 14:58:58,671 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
-2017-03-10 14:58:59,336 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
-2017-03-10 14:58:59,339 INFO org.apache.hadoop.mapred.Task: Task:attempt_local544686432_0001_m_000000_0 is done. And is in the process of committing
-2017-03-10 14:58:59,344 INFO org.apache.hadoop.mapred.LocalJobRunner: map
-2017-03-10 14:58:59,344 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local544686432_0001_m_000000_0' done.
-2017-03-10 14:58:59,344 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local544686432_0001_m_000000_0
-2017-03-10 14:58:59,345 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
-2017-03-10 14:58:59,347 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks
-2017-03-10 14:58:59,348 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local544686432_0001_r_000000_0
-2017-03-10 14:58:59,353 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
-2017-03-10 14:58:59,353 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
-2017-03-10 14:58:59,355 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@8a93430
-2017-03-10 14:58:59,364 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
-2017-03-10 14:58:59,370 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local544686432_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events
-2017-03-10 14:58:59,395 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.deflate]
-2017-03-10 14:58:59,395 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local544686432_0001_m_000000_0 decomp: 5771296 len: 2405517 to MEMORY
-2017-03-10 14:58:59,447 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 5771296 bytes from map-output for attempt_local544686432_0001_m_000000_0
-2017-03-10 14:58:59,447 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 5771296, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->5771296
-2017-03-10 14:58:59,448 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
-2017-03-10 14:58:59,450 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
-2017-03-10 14:58:59,450 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
-2017-03-10 14:58:59,454 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
-2017-03-10 14:58:59,455 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 5771286 bytes
-2017-03-10 14:59:00,203 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 5771296 bytes to disk to satisfy reduce memory limit
-2017-03-10 14:59:00,203 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 2405525 bytes from disk
-2017-03-10 14:59:00,204 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
-2017-03-10 14:59:00,204 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
-2017-03-10 14:59:00,205 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 5771286 bytes
-2017-03-10 14:59:00,205 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
-2017-03-10 14:59:00,210 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.skip.on is deprecated. Instead, use mapreduce.job.skiprecords
-2017-03-10 14:59:00,290 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 0%
-2017-03-10 14:59:00,890 INFO org.apache.hadoop.mapred.Task: Task:attempt_local544686432_0001_r_000000_0 is done. And is in the process of committing
-2017-03-10 14:59:00,895 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
-2017-03-10 14:59:00,895 INFO org.apache.hadoop.mapred.Task: Task attempt_local544686432_0001_r_000000_0 is allowed to commit now
-2017-03-10 14:59:00,896 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local544686432_0001_r_000000_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local544686432_0001_r_000000
-2017-03-10 14:59:00,897 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
-2017-03-10 14:59:00,897 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local544686432_0001_r_000000_0' done.
-2017-03-10 14:59:00,901 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local544686432_0001_r_000000_0
-2017-03-10 14:59:00,901 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete.
-2017-03-10 14:59:01,290 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 100%
-2017-03-10 14:59:01,291 INFO org.apache.hadoop.mapreduce.Job: Job job_local544686432_0001 completed successfully
-2017-03-10 14:59:01,312 INFO org.apache.hadoop.mapreduce.Job: Counters: 34
+		Bytes Written=20
+	similarity.NaiveApproach$CompCounter
+		NUM=21
+2017-03-11 06:34:28,878 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-11 06:34:31,481 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-11 06:34:31,482 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-11 06:34:32,242 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
+2017-03-11 06:34:32,254 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
+2017-03-11 06:34:32,409 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
+2017-03-11 06:34:33,014 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local123106002_0001
+2017-03-11 06:34:33,807 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
+2017-03-11 06:34:33,808 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local123106002_0001
+2017-03-11 06:34:33,812 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
+2017-03-11 06:34:33,833 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 06:34:33,850 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
+2017-03-11 06:34:34,074 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
+2017-03-11 06:34:34,075 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local123106002_0001_m_000000_0
+2017-03-11 06:34:34,230 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 06:34:34,304 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 06:34:34,307 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:0+447
+2017-03-11 06:34:34,676 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 06:34:34,676 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 06:34:34,676 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 06:34:34,677 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 06:34:34,678 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 06:34:34,704 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 06:34:34,825 INFO org.apache.hadoop.mapreduce.Job: Job job_local123106002_0001 running in uber mode : false
+2017-03-11 06:34:34,830 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
+2017-03-11 06:34:35,810 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 06:34:35,810 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 06:34:35,810 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-11 06:34:35,810 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 68133156; bufvoid = 104857600
+2017-03-11 06:34:35,810 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 22720392(90881568); length = 3494005/6553600
+2017-03-11 06:45:21,439 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-11 06:45:23,459 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-11 06:45:23,466 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-11 06:45:24,206 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
+2017-03-11 06:45:24,225 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
+2017-03-11 06:45:24,358 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
+2017-03-11 06:45:24,923 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local271795742_0001
+2017-03-11 06:45:25,694 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
+2017-03-11 06:45:25,695 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local271795742_0001
+2017-03-11 06:45:25,702 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
+2017-03-11 06:45:25,715 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 06:45:25,734 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
+2017-03-11 06:45:25,966 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
+2017-03-11 06:45:25,971 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local271795742_0001_m_000000_0
+2017-03-11 06:45:26,116 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 06:45:26,170 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 06:45:26,173 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:0+447
+2017-03-11 06:45:26,540 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 06:45:26,540 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 06:45:26,540 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 06:45:26,541 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 06:45:26,541 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 06:45:26,563 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 06:45:26,712 INFO org.apache.hadoop.mapreduce.Job: Job job_local271795742_0001 running in uber mode : false
+2017-03-11 06:45:26,715 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
+2017-03-11 06:45:27,336 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 06:45:27,336 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 06:45:27,336 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-11 06:45:27,336 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 68133156; bufvoid = 104857600
+2017-03-11 06:45:27,337 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 22720392(90881568); length = 3494005/6553600
+2017-03-11 06:45:32,174 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:0+447 > sort
+2017-03-11 06:45:32,731 INFO org.apache.hadoop.mapreduce.Job:  map 67% reduce 0%
+2017-03-11 06:45:35,198 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:0+447 > sort
+2017-03-11 06:48:34,358 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-11 06:48:36,420 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-11 06:48:36,423 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-11 06:48:36,486 WARN org.apache.hadoop.security.UserGroupInformation: PriviledgedActionException as:cloudera (auth:SIMPLE) cause:org.apache.hadoop.mapred.FileAlreadyExistsException: Output directory file:/home/cloudera/workspace/WordCount/output already exists
+2017-03-11 06:48:47,258 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-11 06:48:49,178 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-11 06:48:49,194 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-11 06:48:49,953 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
+2017-03-11 06:48:49,982 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
+2017-03-11 06:48:50,130 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:23
+2017-03-11 06:48:50,734 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local768169073_0001
+2017-03-11 06:48:51,469 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
+2017-03-11 06:48:51,470 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local768169073_0001
+2017-03-11 06:48:51,483 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
+2017-03-11 06:48:51,514 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 06:48:51,539 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
+2017-03-11 06:48:51,866 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
+2017-03-11 06:48:51,868 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local768169073_0001_m_000000_0
+2017-03-11 06:48:51,980 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 06:48:52,036 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 06:48:52,044 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:0+20
+2017-03-11 06:48:52,442 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 06:48:52,443 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 06:48:52,443 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 06:48:52,443 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 06:48:52,443 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 06:48:52,462 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 06:48:52,498 INFO org.apache.hadoop.mapreduce.Job: Job job_local768169073_0001 running in uber mode : false
+2017-03-11 06:48:52,501 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
+2017-03-11 06:48:52,897 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 06:48:52,902 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 06:48:52,902 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-11 06:48:52,902 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 10606810; bufvoid = 104857600
+2017-03-11 06:48:52,902 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 25715256(102861024); length = 499141/6553600
+2017-03-11 06:48:54,008 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
+2017-03-11 06:48:54,942 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
+2017-03-11 06:48:54,955 INFO org.apache.hadoop.mapred.Task: Task:attempt_local768169073_0001_m_000000_0 is done. And is in the process of committing
+2017-03-11 06:48:54,980 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:0+20
+2017-03-11 06:48:54,980 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local768169073_0001_m_000000_0' done.
+2017-03-11 06:48:54,980 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local768169073_0001_m_000000_0
+2017-03-11 06:48:54,981 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local768169073_0001_m_000001_0
+2017-03-11 06:48:54,986 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 06:48:54,987 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 06:48:54,988 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:20+20
+2017-03-11 06:48:55,124 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 06:48:55,130 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 06:48:55,130 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 06:48:55,130 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 06:48:55,130 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 06:48:55,138 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 06:48:55,143 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 06:48:55,149 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 06:48:55,160 INFO org.apache.hadoop.mapred.Task: Task:attempt_local768169073_0001_m_000001_0 is done. And is in the process of committing
+2017-03-11 06:48:55,162 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:20+20
+2017-03-11 06:48:55,163 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local768169073_0001_m_000001_0' done.
+2017-03-11 06:48:55,163 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local768169073_0001_m_000001_0
+2017-03-11 06:48:55,163 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local768169073_0001_m_000002_0
+2017-03-11 06:48:55,166 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 06:48:55,166 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 06:48:55,168 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:40+20
+2017-03-11 06:48:55,290 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 06:48:55,290 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 06:48:55,290 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 06:48:55,291 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 06:48:55,291 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 06:48:55,299 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 06:48:55,301 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 06:48:55,301 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 06:48:55,313 INFO org.apache.hadoop.mapred.Task: Task:attempt_local768169073_0001_m_000002_0 is done. And is in the process of committing
+2017-03-11 06:48:55,331 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:40+20
+2017-03-11 06:48:55,331 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local768169073_0001_m_000002_0' done.
+2017-03-11 06:48:55,331 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local768169073_0001_m_000002_0
+2017-03-11 06:48:55,331 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local768169073_0001_m_000003_0
+2017-03-11 06:48:55,336 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 06:48:55,338 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 06:48:55,339 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:60+20
+2017-03-11 06:48:55,511 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 0%
+2017-03-11 06:48:55,545 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 06:48:55,545 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 06:48:55,545 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 06:48:55,545 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 06:48:55,546 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 06:48:55,552 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 06:48:55,625 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 06:48:55,632 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 06:48:55,633 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-11 06:48:55,636 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 10606810; bufvoid = 104857600
+2017-03-11 06:48:55,639 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 25715256(102861024); length = 499141/6553600
+2017-03-11 06:55:36,696 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-11 06:55:39,364 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-11 06:55:39,372 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-11 06:55:40,232 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
+2017-03-11 06:55:40,270 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
+2017-03-11 06:55:40,471 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:447
+2017-03-11 06:55:41,164 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local1197297442_0001
+2017-03-11 06:55:41,900 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
+2017-03-11 06:55:41,901 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local1197297442_0001
+2017-03-11 06:55:41,908 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
+2017-03-11 06:55:41,938 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 06:55:41,960 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
+2017-03-11 06:55:42,690 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
+2017-03-11 06:55:42,691 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1197297442_0001_m_000000_0
+2017-03-11 06:55:42,840 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 06:55:42,904 INFO org.apache.hadoop.mapreduce.Job: Job job_local1197297442_0001 running in uber mode : false
+2017-03-11 06:55:42,906 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
+2017-03-11 06:55:42,924 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 06:55:42,931 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:0+1
+2017-03-11 06:55:43,638 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 06:55:43,638 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 06:55:43,638 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 06:55:43,638 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 06:55:43,638 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 06:55:43,667 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 06:55:44,372 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 06:55:44,372 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 06:55:44,373 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-11 06:55:44,373 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 10606810; bufvoid = 104857600
+2017-03-11 06:55:44,373 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 25715256(102861024); length = 499141/6553600
+2017-03-11 06:55:45,670 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
+2017-03-11 06:55:46,599 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
+2017-03-11 06:55:46,622 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1197297442_0001_m_000000_0 is done. And is in the process of committing
+2017-03-11 06:55:46,650 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:0+1
+2017-03-11 06:55:46,650 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1197297442_0001_m_000000_0' done.
+2017-03-11 06:55:46,650 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1197297442_0001_m_000000_0
+2017-03-11 06:55:46,650 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1197297442_0001_m_000001_0
+2017-03-11 06:55:46,657 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 06:55:46,658 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 06:55:46,668 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:1+1
+2017-03-11 06:55:46,961 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 0%
+2017-03-11 06:55:46,996 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 06:55:46,999 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 06:55:47,000 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 06:55:47,003 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 06:55:47,004 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 06:55:47,010 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 06:55:47,023 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 06:55:47,023 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 06:55:47,031 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1197297442_0001_m_000001_0 is done. And is in the process of committing
+2017-03-11 06:55:47,038 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:1+1
+2017-03-11 06:55:47,039 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1197297442_0001_m_000001_0' done.
+2017-03-11 06:55:47,039 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1197297442_0001_m_000001_0
+2017-03-11 06:55:47,040 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1197297442_0001_m_000002_0
+2017-03-11 06:55:47,046 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 06:55:47,047 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 06:55:47,049 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:2+1
+2017-03-11 06:55:47,324 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 06:55:47,330 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 06:55:47,331 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 06:55:47,338 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 06:55:47,338 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 06:55:47,344 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 06:55:47,345 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 06:55:47,346 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 06:55:47,354 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1197297442_0001_m_000002_0 is done. And is in the process of committing
+2017-03-11 06:55:47,356 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:2+1
+2017-03-11 06:55:47,360 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1197297442_0001_m_000002_0' done.
+2017-03-11 06:55:47,360 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1197297442_0001_m_000002_0
+2017-03-11 06:55:47,360 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1197297442_0001_m_000003_0
+2017-03-11 06:55:47,364 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 06:55:47,370 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 06:55:47,371 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:3+1
+2017-03-11 06:55:47,679 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 06:55:47,687 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 06:55:47,688 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 06:55:47,688 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 06:55:47,688 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 06:55:47,688 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 06:55:47,690 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 06:55:47,690 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 06:55:47,692 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1197297442_0001_m_000003_0 is done. And is in the process of committing
+2017-03-11 06:55:47,699 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:3+1
+2017-03-11 06:55:47,708 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1197297442_0001_m_000003_0' done.
+2017-03-11 06:55:47,709 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1197297442_0001_m_000003_0
+2017-03-11 06:55:47,709 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1197297442_0001_m_000004_0
+2017-03-11 06:55:47,714 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 06:55:47,714 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 06:55:47,715 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:4+1
+2017-03-11 06:55:48,021 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 06:55:48,021 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 06:55:48,021 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 06:55:48,021 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 06:55:48,021 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 06:55:48,026 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 06:55:48,027 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 06:55:48,030 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 06:55:48,054 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1197297442_0001_m_000004_0 is done. And is in the process of committing
+2017-03-11 06:55:48,057 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:4+1
+2017-03-11 06:55:48,057 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1197297442_0001_m_000004_0' done.
+2017-03-11 06:55:48,059 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1197297442_0001_m_000004_0
+2017-03-11 06:55:48,060 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1197297442_0001_m_000005_0
+2017-03-11 06:55:48,068 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 06:55:48,069 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 06:55:48,077 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:5+1
+2017-03-11 06:55:48,380 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 06:55:48,381 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 06:55:48,381 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 06:55:48,381 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 06:55:48,381 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 06:55:48,381 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 06:55:48,383 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 06:55:48,383 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 06:55:48,385 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1197297442_0001_m_000005_0 is done. And is in the process of committing
+2017-03-11 06:55:48,396 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:5+1
+2017-03-11 06:55:48,396 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1197297442_0001_m_000005_0' done.
+2017-03-11 06:55:48,396 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1197297442_0001_m_000005_0
+2017-03-11 06:55:48,396 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1197297442_0001_m_000006_0
+2017-03-11 06:55:48,403 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 06:55:48,404 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 06:55:48,406 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:6+1
+2017-03-11 06:55:48,764 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 06:55:48,780 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 06:55:48,780 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 06:55:48,780 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 06:55:48,780 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 06:55:48,787 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 06:55:48,790 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 06:55:48,795 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 06:55:48,797 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1197297442_0001_m_000006_0 is done. And is in the process of committing
+2017-03-11 06:55:48,801 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:6+1
+2017-03-11 06:55:48,801 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1197297442_0001_m_000006_0' done.
+2017-03-11 06:55:48,801 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1197297442_0001_m_000006_0
+2017-03-11 06:55:48,801 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1197297442_0001_m_000007_0
+2017-03-11 06:55:48,810 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 06:55:48,811 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 06:55:48,812 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:7+1
+2017-03-11 06:55:49,155 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 06:55:49,160 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 06:55:49,160 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 06:55:49,161 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 06:55:49,161 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 06:55:49,170 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 06:55:49,173 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 06:55:49,173 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 06:55:49,175 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1197297442_0001_m_000007_0 is done. And is in the process of committing
+2017-03-11 06:55:49,192 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:7+1
+2017-03-11 06:55:49,192 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1197297442_0001_m_000007_0' done.
+2017-03-11 06:55:49,193 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1197297442_0001_m_000007_0
+2017-03-11 06:55:49,195 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1197297442_0001_m_000008_0
+2017-03-11 06:55:49,198 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 06:55:49,199 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 06:55:49,199 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:8+1
+2017-03-11 06:55:49,595 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 06:55:49,599 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 06:55:49,600 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 06:55:49,600 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 06:55:49,600 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 06:55:49,606 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 06:55:49,609 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 06:55:49,609 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 06:55:49,618 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1197297442_0001_m_000008_0 is done. And is in the process of committing
+2017-03-11 06:55:49,621 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:8+1
+2017-03-11 06:55:49,623 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1197297442_0001_m_000008_0' done.
+2017-03-11 06:55:49,624 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1197297442_0001_m_000008_0
+2017-03-11 06:55:49,630 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1197297442_0001_m_000009_0
+2017-03-11 06:55:49,635 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 06:55:49,635 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 06:55:49,636 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:9+1
+2017-03-11 06:55:50,136 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 06:55:50,137 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 06:55:50,141 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 06:55:50,141 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 06:55:50,141 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 06:55:50,151 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 06:55:50,152 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 06:55:50,153 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 06:55:50,170 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1197297442_0001_m_000009_0 is done. And is in the process of committing
+2017-03-11 06:55:50,193 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:9+1
+2017-03-11 06:55:50,201 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1197297442_0001_m_000009_0' done.
+2017-03-11 06:55:50,202 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1197297442_0001_m_000009_0
+2017-03-11 06:55:50,202 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1197297442_0001_m_000010_0
+2017-03-11 06:55:50,214 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 06:55:50,215 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 06:55:50,238 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:10+1
+2017-03-11 06:55:50,823 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 06:55:50,825 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 06:55:50,836 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 06:55:50,836 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 06:55:50,837 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 06:55:50,850 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 06:55:50,873 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 06:55:50,873 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 06:55:50,910 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1197297442_0001_m_000010_0 is done. And is in the process of committing
+2017-03-11 06:55:50,912 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:10+1
+2017-03-11 06:55:50,920 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1197297442_0001_m_000010_0' done.
+2017-03-11 06:55:50,921 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1197297442_0001_m_000010_0
+2017-03-11 06:55:50,927 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1197297442_0001_m_000011_0
+2017-03-11 06:55:50,956 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 06:55:50,957 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 06:55:50,971 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:11+1
+2017-03-11 06:55:51,538 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 06:55:51,539 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 06:55:51,539 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 06:55:51,539 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 06:55:51,539 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 06:55:51,544 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 06:55:51,545 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 06:55:51,545 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 06:55:51,554 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1197297442_0001_m_000011_0 is done. And is in the process of committing
+2017-03-11 06:55:51,569 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:11+1
+2017-03-11 06:55:51,578 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1197297442_0001_m_000011_0' done.
+2017-03-11 06:55:51,578 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1197297442_0001_m_000011_0
+2017-03-11 06:55:51,578 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1197297442_0001_m_000012_0
+2017-03-11 06:55:51,583 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 06:55:51,584 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 06:55:51,585 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:12+1
+2017-03-11 06:55:52,243 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 06:55:52,245 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 06:55:52,247 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 06:55:52,255 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 06:55:52,255 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 06:55:52,275 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 06:55:52,278 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 06:55:52,292 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 06:55:52,315 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1197297442_0001_m_000012_0 is done. And is in the process of committing
+2017-03-11 06:55:52,330 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:12+1
+2017-03-11 06:55:52,330 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1197297442_0001_m_000012_0' done.
+2017-03-11 06:55:52,331 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1197297442_0001_m_000012_0
+2017-03-11 06:55:52,331 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1197297442_0001_m_000013_0
+2017-03-11 06:55:52,358 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 06:55:52,358 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 06:55:52,389 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:13+1
+2017-03-11 06:55:53,147 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 06:55:53,162 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 06:55:53,162 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 06:55:53,163 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 06:55:53,163 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 06:55:53,171 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 06:55:53,215 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 06:55:53,233 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 06:55:53,244 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1197297442_0001_m_000013_0 is done. And is in the process of committing
+2017-03-11 06:55:53,319 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:13+1
+2017-03-11 06:55:53,320 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1197297442_0001_m_000013_0' done.
+2017-03-11 06:55:53,320 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1197297442_0001_m_000013_0
+2017-03-11 06:55:53,320 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1197297442_0001_m_000014_0
+2017-03-11 06:55:53,334 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 06:55:53,358 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 06:55:53,383 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:14+1
+2017-03-11 06:55:54,012 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 06:55:54,012 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 06:55:54,014 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 06:55:54,014 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 06:55:54,014 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 06:55:54,023 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 06:55:54,024 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 06:55:54,033 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 06:55:54,035 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1197297442_0001_m_000014_0 is done. And is in the process of committing
+2017-03-11 06:55:54,059 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:14+1
+2017-03-11 06:55:54,072 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1197297442_0001_m_000014_0' done.
+2017-03-11 06:55:54,072 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1197297442_0001_m_000014_0
+2017-03-11 06:55:54,072 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1197297442_0001_m_000015_0
+2017-03-11 06:55:54,089 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 06:55:54,090 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 06:55:54,091 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:15+1
+2017-03-11 06:55:54,583 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 06:55:54,583 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 06:55:54,583 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 06:55:54,584 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 06:55:54,584 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 06:55:54,592 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 06:55:54,593 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 06:55:54,593 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 06:55:54,608 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1197297442_0001_m_000015_0 is done. And is in the process of committing
+2017-03-11 06:55:54,610 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:15+1
+2017-03-11 06:55:54,610 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1197297442_0001_m_000015_0' done.
+2017-03-11 06:55:54,610 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1197297442_0001_m_000015_0
+2017-03-11 06:55:54,613 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1197297442_0001_m_000016_0
+2017-03-11 06:55:54,628 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 06:55:54,628 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 06:55:54,637 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:16+1
+2017-03-11 06:55:55,032 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 06:55:55,047 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 06:55:55,048 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 06:55:55,048 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 06:55:55,048 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 06:55:55,053 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 06:55:55,054 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 06:55:55,055 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 06:55:55,058 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1197297442_0001_m_000016_0 is done. And is in the process of committing
+2017-03-11 06:55:55,060 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:16+1
+2017-03-11 06:55:55,064 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1197297442_0001_m_000016_0' done.
+2017-03-11 06:55:55,064 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1197297442_0001_m_000016_0
+2017-03-11 06:55:55,064 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1197297442_0001_m_000017_0
+2017-03-11 06:55:55,079 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 06:55:55,085 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 06:55:55,086 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:17+1
+2017-03-11 06:55:55,500 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 06:55:55,500 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 06:55:55,500 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 06:55:55,500 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 06:55:55,500 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 06:55:55,510 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 06:55:55,511 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 06:55:55,511 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 06:55:55,513 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1197297442_0001_m_000017_0 is done. And is in the process of committing
+2017-03-11 06:55:55,518 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:17+1
+2017-03-11 06:55:55,518 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1197297442_0001_m_000017_0' done.
+2017-03-11 06:55:55,518 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1197297442_0001_m_000017_0
+2017-03-11 06:55:55,518 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1197297442_0001_m_000018_0
+2017-03-11 06:55:55,528 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 06:55:55,535 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 06:55:55,536 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:18+1
+2017-03-11 06:55:56,018 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 06:55:56,019 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 06:55:56,019 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 06:55:56,019 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 06:55:56,019 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 06:55:56,030 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 06:55:56,031 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 06:55:56,031 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 06:55:56,036 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1197297442_0001_m_000018_0 is done. And is in the process of committing
+2017-03-11 06:55:56,057 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:18+1
+2017-03-11 06:55:56,057 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1197297442_0001_m_000018_0' done.
+2017-03-11 06:55:56,066 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1197297442_0001_m_000018_0
+2017-03-11 06:55:56,066 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1197297442_0001_m_000019_0
+2017-03-11 06:55:56,078 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 06:55:56,079 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 06:55:56,080 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:19+1
+2017-03-11 06:55:56,547 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 06:55:56,547 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 06:55:56,547 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 06:55:56,547 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 06:55:56,547 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 06:55:56,557 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 06:55:56,558 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 06:55:56,558 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 06:55:56,579 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1197297442_0001_m_000019_0 is done. And is in the process of committing
+2017-03-11 06:55:56,581 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:19+1
+2017-03-11 06:55:56,581 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1197297442_0001_m_000019_0' done.
+2017-03-11 06:55:56,581 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1197297442_0001_m_000019_0
+2017-03-11 06:55:56,582 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1197297442_0001_m_000020_0
+2017-03-11 06:55:56,590 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 06:55:56,591 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 06:55:56,592 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:20+1
+2017-03-11 06:55:56,943 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 06:55:56,949 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 06:55:56,950 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 06:55:56,950 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 06:55:56,951 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 06:55:56,960 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 06:55:56,961 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 06:55:56,961 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 06:55:56,973 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1197297442_0001_m_000020_0 is done. And is in the process of committing
+2017-03-11 06:55:56,974 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:20+1
+2017-03-11 06:55:56,974 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1197297442_0001_m_000020_0' done.
+2017-03-11 06:55:56,974 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1197297442_0001_m_000020_0
+2017-03-11 06:55:56,974 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1197297442_0001_m_000021_0
+2017-03-11 06:55:56,991 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 06:55:56,992 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 06:55:56,992 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:21+1
+2017-03-11 06:55:57,401 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 06:55:57,401 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 06:55:57,405 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 06:55:57,405 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 06:55:57,405 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 06:55:57,419 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 06:55:57,420 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 06:55:57,420 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 06:55:57,431 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1197297442_0001_m_000021_0 is done. And is in the process of committing
+2017-03-11 06:55:57,432 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:21+1
+2017-03-11 06:55:57,437 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1197297442_0001_m_000021_0' done.
+2017-03-11 06:55:57,437 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1197297442_0001_m_000021_0
+2017-03-11 06:55:57,437 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1197297442_0001_m_000022_0
+2017-03-11 06:55:57,438 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 06:55:57,439 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 06:55:57,440 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:22+1
+2017-03-11 06:55:57,896 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 06:55:57,896 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 06:55:57,896 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 06:55:57,896 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 06:55:57,896 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 06:55:57,919 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 06:55:57,921 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 06:55:57,921 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 06:55:57,930 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1197297442_0001_m_000022_0 is done. And is in the process of committing
+2017-03-11 06:55:57,931 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:22+1
+2017-03-11 06:55:57,943 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1197297442_0001_m_000022_0' done.
+2017-03-11 06:55:57,943 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1197297442_0001_m_000022_0
+2017-03-11 06:55:57,943 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1197297442_0001_m_000023_0
+2017-03-11 06:55:57,952 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 06:55:57,952 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 06:55:57,953 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:23+1
+2017-03-11 06:55:58,488 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 06:55:58,488 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 06:55:58,489 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 06:55:58,489 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 06:55:58,489 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 06:55:58,497 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 06:55:58,498 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 06:55:58,499 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 06:55:58,520 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1197297442_0001_m_000023_0 is done. And is in the process of committing
+2017-03-11 06:55:58,522 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:23+1
+2017-03-11 06:55:58,536 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1197297442_0001_m_000023_0' done.
+2017-03-11 06:55:58,538 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1197297442_0001_m_000023_0
+2017-03-11 06:55:58,538 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1197297442_0001_m_000024_0
+2017-03-11 06:55:58,548 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 06:55:58,548 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 06:55:58,561 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:24+1
+2017-03-11 07:07:29,361 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-11 07:07:30,506 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-11 07:07:30,522 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-11 07:07:30,899 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
+2017-03-11 07:07:30,908 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
+2017-03-11 07:07:30,948 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:10
+2017-03-11 07:07:31,232 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local271824515_0001
+2017-03-11 07:07:31,617 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
+2017-03-11 07:07:31,622 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
+2017-03-11 07:07:31,622 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local271824515_0001
+2017-03-11 07:07:31,635 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 07:07:31,652 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
+2017-03-11 07:07:31,780 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
+2017-03-11 07:07:31,781 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local271824515_0001_m_000000_0
+2017-03-11 07:07:31,844 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 07:07:31,889 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 07:07:31,893 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:0+49
+2017-03-11 07:07:32,023 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 07:07:32,024 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 07:07:32,024 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 07:07:32,024 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 07:07:32,024 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 07:07:32,040 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 07:07:32,360 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 07:07:32,360 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 07:07:32,360 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-11 07:07:32,360 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 10606810; bufvoid = 104857600
+2017-03-11 07:07:32,360 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 25715256(102861024); length = 499141/6553600
+2017-03-11 07:07:32,633 INFO org.apache.hadoop.mapreduce.Job: Job job_local271824515_0001 running in uber mode : false
+2017-03-11 07:07:32,634 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
+2017-03-11 07:07:32,995 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
+2017-03-11 07:07:33,436 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
+2017-03-11 07:07:33,443 INFO org.apache.hadoop.mapred.Task: Task:attempt_local271824515_0001_m_000000_0 is done. And is in the process of committing
+2017-03-11 07:07:33,470 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:0+49
+2017-03-11 07:07:33,472 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local271824515_0001_m_000000_0' done.
+2017-03-11 07:07:33,472 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local271824515_0001_m_000000_0
+2017-03-11 07:07:33,473 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local271824515_0001_m_000001_0
+2017-03-11 07:07:33,479 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 07:07:33,480 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 07:07:33,481 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:49+49
+2017-03-11 07:07:33,531 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 07:07:33,532 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 07:07:33,532 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 07:07:33,532 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 07:07:33,532 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 07:07:33,532 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 07:07:33,561 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 07:07:33,561 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 07:07:33,562 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-11 07:07:33,562 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 10606810; bufvoid = 104857600
+2017-03-11 07:07:33,562 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 25715256(102861024); length = 499141/6553600
+2017-03-11 07:07:33,638 INFO org.apache.hadoop.mapreduce.Job:  map 10% reduce 0%
+2017-03-11 07:07:34,100 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
+2017-03-11 07:07:34,101 INFO org.apache.hadoop.mapred.Task: Task:attempt_local271824515_0001_m_000001_0 is done. And is in the process of committing
+2017-03-11 07:07:34,102 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:49+49
+2017-03-11 07:07:34,103 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local271824515_0001_m_000001_0' done.
+2017-03-11 07:07:34,103 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local271824515_0001_m_000001_0
+2017-03-11 07:07:34,103 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local271824515_0001_m_000002_0
+2017-03-11 07:07:34,103 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 07:07:34,104 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 07:07:34,105 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:98+49
+2017-03-11 07:07:34,142 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 07:07:34,142 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 07:07:34,142 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 07:07:34,142 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 07:07:34,142 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 07:07:34,143 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 07:07:34,161 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 07:07:34,161 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 07:07:34,162 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-11 07:07:34,162 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 10107666; bufvoid = 104857600
+2017-03-11 07:07:34,162 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 25715256(102861024); length = 499141/6553600
+2017-03-11 07:07:34,640 INFO org.apache.hadoop.mapreduce.Job:  map 20% reduce 0%
+2017-03-11 07:07:34,684 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
+2017-03-11 07:07:34,686 INFO org.apache.hadoop.mapred.Task: Task:attempt_local271824515_0001_m_000002_0 is done. And is in the process of committing
+2017-03-11 07:07:34,691 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:98+49
+2017-03-11 07:07:34,691 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local271824515_0001_m_000002_0' done.
+2017-03-11 07:07:34,691 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local271824515_0001_m_000002_0
+2017-03-11 07:07:34,691 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local271824515_0001_m_000003_0
+2017-03-11 07:07:34,697 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 07:07:34,697 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 07:07:34,699 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:147+49
+2017-03-11 07:07:34,752 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 07:07:34,753 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 07:07:34,753 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 07:07:34,753 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 07:07:34,753 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 07:07:34,754 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 07:07:34,756 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 07:07:34,756 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 07:07:34,759 INFO org.apache.hadoop.mapred.Task: Task:attempt_local271824515_0001_m_000003_0 is done. And is in the process of committing
+2017-03-11 07:07:34,761 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:147+49
+2017-03-11 07:07:34,762 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local271824515_0001_m_000003_0' done.
+2017-03-11 07:07:34,762 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local271824515_0001_m_000003_0
+2017-03-11 07:07:34,762 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local271824515_0001_m_000004_0
+2017-03-11 07:07:34,763 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 07:07:34,763 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 07:07:34,764 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:196+49
+2017-03-11 07:07:34,821 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 07:07:34,821 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 07:07:34,821 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 07:07:34,821 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 07:07:34,821 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 07:07:34,822 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 07:07:34,841 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 07:07:34,841 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 07:07:34,842 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-11 07:07:34,842 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 9858094; bufvoid = 104857600
+2017-03-11 07:07:34,842 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 25715256(102861024); length = 499141/6553600
+2017-03-11 07:07:35,322 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
+2017-03-11 07:07:35,324 INFO org.apache.hadoop.mapred.Task: Task:attempt_local271824515_0001_m_000004_0 is done. And is in the process of committing
+2017-03-11 07:07:35,325 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:196+49
+2017-03-11 07:07:35,325 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local271824515_0001_m_000004_0' done.
+2017-03-11 07:07:35,326 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local271824515_0001_m_000004_0
+2017-03-11 07:07:35,326 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local271824515_0001_m_000005_0
+2017-03-11 07:07:35,326 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 07:07:35,327 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 07:07:35,329 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:245+49
+2017-03-11 07:07:35,370 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 07:07:35,370 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 07:07:35,370 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 07:07:35,370 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 07:07:35,370 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 07:07:35,371 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 07:07:35,388 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 07:07:35,388 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 07:07:35,388 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-11 07:07:35,388 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 9358950; bufvoid = 104857600
+2017-03-11 07:07:35,388 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 25715256(102861024); length = 499141/6553600
+2017-03-11 07:07:35,641 INFO org.apache.hadoop.mapreduce.Job:  map 50% reduce 0%
+2017-03-11 07:07:35,905 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
+2017-03-11 07:07:35,908 INFO org.apache.hadoop.mapred.Task: Task:attempt_local271824515_0001_m_000005_0 is done. And is in the process of committing
+2017-03-11 07:07:35,909 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:245+49
+2017-03-11 07:07:35,909 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local271824515_0001_m_000005_0' done.
+2017-03-11 07:07:35,909 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local271824515_0001_m_000005_0
+2017-03-11 07:07:35,909 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local271824515_0001_m_000006_0
+2017-03-11 07:07:35,911 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 07:07:35,911 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 07:07:35,912 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:294+49
+2017-03-11 07:07:35,964 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 07:07:35,964 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 07:07:35,964 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 07:07:35,964 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 07:07:35,964 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 07:07:35,966 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 07:07:35,991 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 07:07:35,991 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 07:07:35,991 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-11 07:07:35,991 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 8111090; bufvoid = 104857600
+2017-03-11 07:07:35,991 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 25715256(102861024); length = 499141/6553600
+2017-03-11 07:07:36,453 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
+2017-03-11 07:07:36,454 INFO org.apache.hadoop.mapred.Task: Task:attempt_local271824515_0001_m_000006_0 is done. And is in the process of committing
+2017-03-11 07:07:36,456 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:294+49
+2017-03-11 07:07:36,456 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local271824515_0001_m_000006_0' done.
+2017-03-11 07:07:36,456 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local271824515_0001_m_000006_0
+2017-03-11 07:07:36,456 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local271824515_0001_m_000007_0
+2017-03-11 07:07:36,457 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 07:07:36,457 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 07:07:36,458 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:343+49
+2017-03-11 07:07:36,502 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 07:07:36,502 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 07:07:36,502 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 07:07:36,502 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 07:07:36,502 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 07:07:36,503 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 07:07:36,523 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 07:07:36,523 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 07:07:36,523 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-11 07:07:36,523 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 9483736; bufvoid = 104857600
+2017-03-11 07:07:36,523 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 25715256(102861024); length = 499141/6553600
+2017-03-11 07:07:36,644 INFO org.apache.hadoop.mapreduce.Job:  map 70% reduce 0%
+2017-03-11 07:07:37,047 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
+2017-03-11 07:07:37,050 INFO org.apache.hadoop.mapred.Task: Task:attempt_local271824515_0001_m_000007_0 is done. And is in the process of committing
+2017-03-11 07:07:37,051 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:343+49
+2017-03-11 07:07:37,051 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local271824515_0001_m_000007_0' done.
+2017-03-11 07:07:37,051 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local271824515_0001_m_000007_0
+2017-03-11 07:07:37,051 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local271824515_0001_m_000008_0
+2017-03-11 07:07:37,053 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 07:07:37,053 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 07:07:37,054 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:392+49
+2017-03-11 07:07:37,105 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 07:07:37,105 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 07:07:37,105 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 07:07:37,105 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 07:07:37,105 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 07:07:37,105 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 07:07:37,106 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 07:07:37,107 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 07:07:37,109 INFO org.apache.hadoop.mapred.Task: Task:attempt_local271824515_0001_m_000008_0 is done. And is in the process of committing
+2017-03-11 07:07:37,111 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:392+49
+2017-03-11 07:07:37,111 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local271824515_0001_m_000008_0' done.
+2017-03-11 07:07:37,111 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local271824515_0001_m_000008_0
+2017-03-11 07:07:37,111 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local271824515_0001_m_000009_0
+2017-03-11 07:07:37,113 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 07:07:37,114 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 07:07:37,115 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:441+6
+2017-03-11 07:07:37,174 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 07:07:37,178 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 07:07:37,179 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 07:07:37,179 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 07:07:37,179 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 07:07:37,185 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 07:07:37,186 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 07:07:37,186 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 07:07:37,194 INFO org.apache.hadoop.mapred.Task: Task:attempt_local271824515_0001_m_000009_0 is done. And is in the process of committing
+2017-03-11 07:07:37,195 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:441+6
+2017-03-11 07:07:37,195 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local271824515_0001_m_000009_0' done.
+2017-03-11 07:07:37,196 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local271824515_0001_m_000009_0
+2017-03-11 07:07:37,196 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
+2017-03-11 07:07:37,210 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks
+2017-03-11 07:07:37,210 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local271824515_0001_r_000000_0
+2017-03-11 07:07:37,219 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 07:07:37,219 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 07:07:37,224 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@20af4ace
+2017-03-11 07:07:37,250 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 07:07:37,259 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local271824515_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 07:07:37,362 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.deflate]
+2017-03-11 07:07:37,367 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local271824515_0001_m_000001_0 decomp: 10856384 len: 390723 to MEMORY
+2017-03-11 07:07:37,408 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 10856384 bytes from map-output for attempt_local271824515_0001_m_000001_0
+2017-03-11 07:07:37,408 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 10856384, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->10856384
+2017-03-11 07:07:37,423 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local271824515_0001_m_000007_0 decomp: 9733310 len: 390694 to MEMORY
+2017-03-11 07:07:37,462 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 9733310 bytes from map-output for attempt_local271824515_0001_m_000007_0
+2017-03-11 07:07:37,462 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 9733310, inMemoryMapOutputs.size() -> 2, commitMemory -> 10856384, usedMemory ->20589694
+2017-03-11 07:07:37,476 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local271824515_0001_m_000004_0 decomp: 10107668 len: 390714 to MEMORY
+2017-03-11 07:07:37,521 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 10107668 bytes from map-output for attempt_local271824515_0001_m_000004_0
+2017-03-11 07:07:37,521 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 10107668, inMemoryMapOutputs.size() -> 3, commitMemory -> 20589694, usedMemory ->30697362
+2017-03-11 07:07:37,522 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local271824515_0001_m_000009_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 07:07:37,528 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local271824515_0001_m_000009_0
+2017-03-11 07:07:37,528 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 4, commitMemory -> 30697362, usedMemory ->30697364
+2017-03-11 07:07:37,538 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local271824515_0001_m_000005_0 decomp: 9608524 len: 390704 to MEMORY
+2017-03-11 07:07:37,577 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 9608524 bytes from map-output for attempt_local271824515_0001_m_000005_0
+2017-03-11 07:07:37,582 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 9608524, inMemoryMapOutputs.size() -> 5, commitMemory -> 30697364, usedMemory ->40305888
+2017-03-11 07:07:37,584 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local271824515_0001_m_000008_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 07:07:37,584 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local271824515_0001_m_000008_0
+2017-03-11 07:07:37,584 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 6, commitMemory -> 40305888, usedMemory ->40305890
+2017-03-11 07:07:37,645 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local271824515_0001_m_000002_0 decomp: 10357240 len: 390732 to MEMORY
+2017-03-11 07:07:37,647 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 0%
+2017-03-11 07:07:37,687 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 10357240 bytes from map-output for attempt_local271824515_0001_m_000002_0
+2017-03-11 07:07:37,687 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 10357240, inMemoryMapOutputs.size() -> 7, commitMemory -> 40305890, usedMemory ->50663130
+2017-03-11 07:07:37,693 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local271824515_0001_m_000000_0 decomp: 10856384 len: 390722 to MEMORY
+2017-03-11 07:07:37,743 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 10856384 bytes from map-output for attempt_local271824515_0001_m_000000_0
+2017-03-11 07:07:37,744 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 10856384, inMemoryMapOutputs.size() -> 8, commitMemory -> 50663130, usedMemory ->61519514
+2017-03-11 07:07:37,745 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local271824515_0001_m_000003_0 decomp: 2 len: 14 to MEMORY
+2017-03-11 07:07:37,746 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local271824515_0001_m_000003_0
+2017-03-11 07:07:37,746 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 9, commitMemory -> 61519514, usedMemory ->61519516
+2017-03-11 07:07:37,748 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local271824515_0001_m_000006_0 decomp: 8360664 len: 373255 to MEMORY
+2017-03-11 07:07:37,794 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 8360664 bytes from map-output for attempt_local271824515_0001_m_000006_0
+2017-03-11 07:07:37,795 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 8360664, inMemoryMapOutputs.size() -> 10, commitMemory -> 61519516, usedMemory ->69880180
+2017-03-11 07:07:37,795 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 07:07:37,800 INFO org.apache.hadoop.mapred.LocalJobRunner: 10 / 10 copied.
+2017-03-11 07:07:37,800 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 10 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 07:07:37,810 INFO org.apache.hadoop.mapred.Merger: Merging 10 sorted segments
+2017-03-11 07:07:37,813 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 7 segments left of total size: 69880048 bytes
+2017-03-11 07:07:40,634 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 10 segments, 69880180 bytes to disk to satisfy reduce memory limit
+2017-03-11 07:07:40,635 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 2722292 bytes from disk
+2017-03-11 07:07:40,635 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 07:07:40,636 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 07:07:40,636 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 69880144 bytes
+2017-03-11 07:07:40,636 INFO org.apache.hadoop.mapred.LocalJobRunner: 10 / 10 copied.
+2017-03-11 07:07:40,643 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.skip.on is deprecated. Instead, use mapreduce.job.skiprecords
+2017-03-11 07:07:42,562 INFO org.apache.hadoop.mapred.Task: Task:attempt_local271824515_0001_r_000000_0 is done. And is in the process of committing
+2017-03-11 07:07:42,565 INFO org.apache.hadoop.mapred.LocalJobRunner: 10 / 10 copied.
+2017-03-11 07:07:42,565 INFO org.apache.hadoop.mapred.Task: Task attempt_local271824515_0001_r_000000_0 is allowed to commit now
+2017-03-11 07:07:42,566 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local271824515_0001_r_000000_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local271824515_0001_r_000000
+2017-03-11 07:07:42,567 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 07:07:42,567 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local271824515_0001_r_000000_0' done.
+2017-03-11 07:07:42,567 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local271824515_0001_r_000000_0
+2017-03-11 07:07:42,567 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete.
+2017-03-11 07:07:42,659 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 100%
+2017-03-11 07:07:42,660 INFO org.apache.hadoop.mapreduce.Job: Job job_local271824515_0001 completed successfully
+2017-03-11 07:07:42,691 INFO org.apache.hadoop.mapreduce.Job: Counters: 34
 	File System Counters
-		FILE: Number of bytes read=16715038
-		FILE: Number of bytes written=13110192
+		FILE: Number of bytes read=5530668
+		FILE: Number of bytes written=25877978
 		FILE: Number of read operations=0
 		FILE: Number of large read operations=0
 		FILE: Number of write operations=0
 	Map-Reduce Framework
-		Map input records=124787
-		Map output records=115105
-		Map output bytes=5541084
-		Map output materialized bytes=2405517
-		Input split bytes=120
+		Map input records=7
+		Map output records=873502
+		Map output bytes=68133156
+		Map output materialized bytes=2717586
+		Input split bytes=1220
 		Combine input records=0
 		Combine output records=0
-		Reduce input groups=115105
-		Reduce shuffle bytes=2405517
-		Reduce input records=115105
-		Reduce output records=115105
-		Spilled Records=230210
-		Shuffled Maps =1
+		Reduce input groups=873481
+		Reduce shuffle bytes=2717586
+		Reduce input records=873502
+		Reduce output records=1
+		Spilled Records=1747004
+		Shuffled Maps =10
 		Failed Shuffles=0
-		Merged Map outputs=1
-		GC time elapsed (ms)=113
+		Merged Map outputs=10
+		GC time elapsed (ms)=378
 		CPU time spent (ms)=0
 		Physical memory (bytes) snapshot=0
 		Virtual memory (bytes) snapshot=0
-		Total committed heap usage (bytes)=331227136
+		Total committed heap usage (bytes)=2030252032
 	Shuffle Errors
 		BAD_ID=0
 		CONNECTION=0
@@ -1544,114 +5105,447 @@ Caused by: java.lang.NullPointerException
 		WRONG_MAP=0
 		WRONG_REDUCE=0
 	File Input Format Counters 
-		Bytes Read=5589886
+		Bytes Read=2265
 	File Output Format Counters 
-		Bytes Written=5365335
-	similarity.WordSort$DocLineCounter
-		NUM=124787
-2017-03-10 16:04:16,924 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
-2017-03-10 16:04:17,595 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
-2017-03-10 16:04:17,605 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
-2017-03-10 16:04:17,661 WARN org.apache.hadoop.security.UserGroupInformation: PriviledgedActionException as:cloudera (auth:SIMPLE) cause:org.apache.hadoop.mapred.FileAlreadyExistsException: Output directory file:/home/cloudera/workspace/WordCount/output already exists
-2017-03-10 16:04:38,662 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
-2017-03-10 16:04:39,236 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
-2017-03-10 16:04:39,243 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
-2017-03-10 16:04:39,836 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
-2017-03-10 16:04:39,846 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
-2017-03-10 16:04:39,896 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
-2017-03-10 16:04:40,232 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local2107526736_0001
-2017-03-10 16:04:40,744 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
-2017-03-10 16:04:40,745 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local2107526736_0001
-2017-03-10 16:04:40,753 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
-2017-03-10 16:04:40,762 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
-2017-03-10 16:04:40,772 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
-2017-03-10 16:04:40,922 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
-2017-03-10 16:04:40,925 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2107526736_0001_m_000000_0
-2017-03-10 16:04:40,998 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
-2017-03-10 16:04:41,021 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
-2017-03-10 16:04:41,024 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/input/pg100.txt:0+5589886
-2017-03-10 16:04:41,117 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
-2017-03-10 16:04:41,117 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
-2017-03-10 16:04:41,117 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
-2017-03-10 16:04:41,117 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
-2017-03-10 16:04:41,117 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
-2017-03-10 16:04:41,121 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
-2017-03-10 16:04:41,747 INFO org.apache.hadoop.mapreduce.Job: Job job_local2107526736_0001 running in uber mode : false
-2017-03-10 16:04:41,749 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
-2017-03-10 16:04:44,514 INFO org.apache.hadoop.mapred.LocalJobRunner: 
-2017-03-10 16:04:44,515 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
-2017-03-10 16:04:44,515 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
-2017-03-10 16:04:44,515 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 5541084; bufvoid = 104857600
-2017-03-10 16:04:44,515 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 25753980(103015920); length = 460417/6553600
-2017-03-10 16:04:44,850 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
-2017-03-10 16:04:45,645 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
-2017-03-10 16:04:45,648 INFO org.apache.hadoop.mapred.Task: Task:attempt_local2107526736_0001_m_000000_0 is done. And is in the process of committing
-2017-03-10 16:04:45,653 INFO org.apache.hadoop.mapred.LocalJobRunner: map
-2017-03-10 16:04:45,654 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local2107526736_0001_m_000000_0' done.
-2017-03-10 16:04:45,654 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local2107526736_0001_m_000000_0
-2017-03-10 16:04:45,654 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
-2017-03-10 16:04:45,658 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks
-2017-03-10 16:04:45,659 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2107526736_0001_r_000000_0
-2017-03-10 16:04:45,664 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
-2017-03-10 16:04:45,664 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
-2017-03-10 16:04:45,666 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@a9b03af
-2017-03-10 16:04:45,680 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
-2017-03-10 16:04:45,687 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local2107526736_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events
-2017-03-10 16:04:45,730 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.deflate]
-2017-03-10 16:04:45,730 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local2107526736_0001_m_000000_0 decomp: 5771296 len: 2404026 to MEMORY
-2017-03-10 16:04:45,758 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 0%
-2017-03-10 16:04:45,813 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 5771296 bytes from map-output for attempt_local2107526736_0001_m_000000_0
-2017-03-10 16:04:45,813 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 5771296, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->5771296
-2017-03-10 16:04:45,817 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
-2017-03-10 16:04:45,818 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
-2017-03-10 16:04:45,818 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
-2017-03-10 16:04:45,825 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
-2017-03-10 16:04:45,826 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 5771286 bytes
-2017-03-10 16:04:46,671 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 5771296 bytes to disk to satisfy reduce memory limit
-2017-03-10 16:04:46,672 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 2404034 bytes from disk
-2017-03-10 16:04:46,672 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
-2017-03-10 16:04:46,672 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
-2017-03-10 16:04:46,673 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 5771286 bytes
-2017-03-10 16:04:46,674 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
-2017-03-10 16:04:46,680 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.skip.on is deprecated. Instead, use mapreduce.job.skiprecords
-2017-03-10 16:04:47,525 INFO org.apache.hadoop.mapred.Task: Task:attempt_local2107526736_0001_r_000000_0 is done. And is in the process of committing
-2017-03-10 16:04:47,532 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
-2017-03-10 16:04:47,532 INFO org.apache.hadoop.mapred.Task: Task attempt_local2107526736_0001_r_000000_0 is allowed to commit now
-2017-03-10 16:04:47,533 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local2107526736_0001_r_000000_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local2107526736_0001_r_000000
-2017-03-10 16:04:47,533 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
-2017-03-10 16:04:47,534 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local2107526736_0001_r_000000_0' done.
-2017-03-10 16:04:47,534 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local2107526736_0001_r_000000_0
-2017-03-10 16:04:47,534 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete.
-2017-03-10 16:04:47,766 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 100%
-2017-03-10 16:04:47,767 INFO org.apache.hadoop.mapreduce.Job: Job job_local2107526736_0001 completed successfully
-2017-03-10 16:04:47,785 INFO org.apache.hadoop.mapreduce.Job: Counters: 34
+		Bytes Written=20
+	similarity.NaiveApproach$CompCounter
+		NUM=21
+2017-03-11 07:08:38,936 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-11 07:08:39,968 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-11 07:08:39,985 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-11 07:08:40,408 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
+2017-03-11 07:08:40,430 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
+2017-03-11 07:08:40,553 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:7
+2017-03-11 07:08:40,788 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local32959239_0001
+2017-03-11 07:08:41,182 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
+2017-03-11 07:08:41,184 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local32959239_0001
+2017-03-11 07:08:41,183 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
+2017-03-11 07:08:41,191 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 07:08:41,202 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
+2017-03-11 07:08:41,313 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
+2017-03-11 07:08:41,314 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local32959239_0001_m_000000_0
+2017-03-11 07:08:41,372 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 07:08:41,393 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 07:08:41,406 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:378+69
+2017-03-11 07:08:41,487 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 07:08:41,487 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 07:08:41,487 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 07:08:41,487 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 07:08:41,487 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 07:08:41,490 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 07:08:41,674 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 07:08:41,674 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 07:08:41,674 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-11 07:08:41,674 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 9483736; bufvoid = 104857600
+2017-03-11 07:08:41,674 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 25715256(102861024); length = 499141/6553600
+2017-03-11 07:08:42,101 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
+2017-03-11 07:08:42,191 INFO org.apache.hadoop.mapreduce.Job: Job job_local32959239_0001 running in uber mode : false
+2017-03-11 07:08:42,194 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
+2017-03-11 07:08:42,440 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
+2017-03-11 07:08:42,443 INFO org.apache.hadoop.mapred.Task: Task:attempt_local32959239_0001_m_000000_0 is done. And is in the process of committing
+2017-03-11 07:08:42,455 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:378+69
+2017-03-11 07:08:42,455 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local32959239_0001_m_000000_0' done.
+2017-03-11 07:08:42,455 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local32959239_0001_m_000000_0
+2017-03-11 07:08:42,455 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local32959239_0001_m_000001_0
+2017-03-11 07:08:42,459 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 07:08:42,460 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 07:08:42,461 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:0+63
+2017-03-11 07:08:42,502 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 07:08:42,502 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 07:08:42,502 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 07:08:42,502 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 07:08:42,502 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 07:08:42,503 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 07:08:42,531 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 07:08:42,531 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 07:08:42,532 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-11 07:08:42,532 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 10606810; bufvoid = 104857600
+2017-03-11 07:08:42,532 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 25715256(102861024); length = 499141/6553600
+2017-03-11 07:08:43,039 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
+2017-03-11 07:08:43,040 INFO org.apache.hadoop.mapred.Task: Task:attempt_local32959239_0001_m_000001_0 is done. And is in the process of committing
+2017-03-11 07:08:43,042 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:0+63
+2017-03-11 07:08:43,042 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local32959239_0001_m_000001_0' done.
+2017-03-11 07:08:43,043 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local32959239_0001_m_000001_0
+2017-03-11 07:08:43,043 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local32959239_0001_m_000002_0
+2017-03-11 07:08:43,043 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 07:08:43,044 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 07:08:43,044 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:63+63
+2017-03-11 07:08:43,081 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 07:08:43,081 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 07:08:43,081 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 07:08:43,081 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 07:08:43,081 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 07:08:43,082 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 07:08:43,100 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 07:08:43,100 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 07:08:43,100 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-11 07:08:43,100 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 10606810; bufvoid = 104857600
+2017-03-11 07:08:43,100 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 25715256(102861024); length = 499141/6553600
+2017-03-11 07:08:43,199 INFO org.apache.hadoop.mapreduce.Job:  map 29% reduce 0%
+2017-03-11 07:08:43,591 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
+2017-03-11 07:08:43,593 INFO org.apache.hadoop.mapred.Task: Task:attempt_local32959239_0001_m_000002_0 is done. And is in the process of committing
+2017-03-11 07:08:43,595 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:63+63
+2017-03-11 07:08:43,595 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local32959239_0001_m_000002_0' done.
+2017-03-11 07:08:43,595 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local32959239_0001_m_000002_0
+2017-03-11 07:08:43,596 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local32959239_0001_m_000003_0
+2017-03-11 07:08:43,602 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 07:08:43,602 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 07:08:43,603 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:126+63
+2017-03-11 07:08:43,651 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 07:08:43,652 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 07:08:43,652 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 07:08:43,652 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 07:08:43,652 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 07:08:43,653 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 07:08:43,669 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 07:08:43,670 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 07:08:43,670 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-11 07:08:43,670 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 10107666; bufvoid = 104857600
+2017-03-11 07:08:43,670 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 25715256(102861024); length = 499141/6553600
+2017-03-11 07:08:44,147 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
+2017-03-11 07:08:44,148 INFO org.apache.hadoop.mapred.Task: Task:attempt_local32959239_0001_m_000003_0 is done. And is in the process of committing
+2017-03-11 07:08:44,150 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:126+63
+2017-03-11 07:08:44,150 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local32959239_0001_m_000003_0' done.
+2017-03-11 07:08:44,150 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local32959239_0001_m_000003_0
+2017-03-11 07:08:44,150 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local32959239_0001_m_000004_0
+2017-03-11 07:08:44,151 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 07:08:44,151 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 07:08:44,152 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:189+63
+2017-03-11 07:08:44,195 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 07:08:44,195 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 07:08:44,195 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 07:08:44,195 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 07:08:44,195 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 07:08:44,196 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 07:08:44,201 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 0%
+2017-03-11 07:08:44,218 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 07:08:44,218 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 07:08:44,218 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-11 07:08:44,218 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 9858094; bufvoid = 104857600
+2017-03-11 07:08:44,218 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 25715256(102861024); length = 499141/6553600
+2017-03-11 07:08:44,698 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
+2017-03-11 07:08:44,700 INFO org.apache.hadoop.mapred.Task: Task:attempt_local32959239_0001_m_000004_0 is done. And is in the process of committing
+2017-03-11 07:08:44,701 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:189+63
+2017-03-11 07:08:44,701 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local32959239_0001_m_000004_0' done.
+2017-03-11 07:08:44,701 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local32959239_0001_m_000004_0
+2017-03-11 07:08:44,701 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local32959239_0001_m_000005_0
+2017-03-11 07:08:44,703 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 07:08:44,703 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 07:08:44,705 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:252+63
+2017-03-11 07:08:44,747 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 07:08:44,747 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 07:08:44,747 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 07:08:44,747 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 07:08:44,747 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 07:08:44,747 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 07:08:44,770 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 07:08:44,772 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 07:08:44,772 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-11 07:08:44,772 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 9358950; bufvoid = 104857600
+2017-03-11 07:08:44,772 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 25715256(102861024); length = 499141/6553600
+2017-03-11 07:08:45,202 INFO org.apache.hadoop.mapreduce.Job:  map 71% reduce 0%
+2017-03-11 07:08:45,253 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
+2017-03-11 07:08:45,255 INFO org.apache.hadoop.mapred.Task: Task:attempt_local32959239_0001_m_000005_0 is done. And is in the process of committing
+2017-03-11 07:08:45,256 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:252+63
+2017-03-11 07:08:45,256 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local32959239_0001_m_000005_0' done.
+2017-03-11 07:08:45,256 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local32959239_0001_m_000005_0
+2017-03-11 07:08:45,256 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local32959239_0001_m_000006_0
+2017-03-11 07:08:45,257 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 07:08:45,257 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 07:08:45,261 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:315+63
+2017-03-11 07:08:45,308 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 07:08:45,316 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 07:08:45,317 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 07:08:45,317 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 07:08:45,317 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 07:08:45,319 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 07:08:45,336 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 07:08:45,336 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 07:08:45,336 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-11 07:08:45,336 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 8111090; bufvoid = 104857600
+2017-03-11 07:08:45,336 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 25715256(102861024); length = 499141/6553600
+2017-03-11 07:08:45,798 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
+2017-03-11 07:08:45,799 INFO org.apache.hadoop.mapred.Task: Task:attempt_local32959239_0001_m_000006_0 is done. And is in the process of committing
+2017-03-11 07:08:45,800 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:315+63
+2017-03-11 07:08:45,800 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local32959239_0001_m_000006_0' done.
+2017-03-11 07:08:45,801 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local32959239_0001_m_000006_0
+2017-03-11 07:08:45,801 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
+2017-03-11 07:08:45,803 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks
+2017-03-11 07:08:45,803 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local32959239_0001_r_000000_0
+2017-03-11 07:08:45,808 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 07:08:45,808 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 07:08:45,810 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@5a5fbef7
+2017-03-11 07:08:45,819 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 07:08:45,825 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local32959239_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 07:08:45,848 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.deflate]
+2017-03-11 07:08:45,848 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local32959239_0001_m_000000_0 decomp: 9733310 len: 390694 to MEMORY
+2017-03-11 07:08:45,868 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 9733310 bytes from map-output for attempt_local32959239_0001_m_000000_0
+2017-03-11 07:08:45,868 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 9733310, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->9733310
+2017-03-11 07:08:45,877 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local32959239_0001_m_000003_0 decomp: 10357240 len: 390732 to MEMORY
+2017-03-11 07:08:45,900 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 10357240 bytes from map-output for attempt_local32959239_0001_m_000003_0
+2017-03-11 07:08:45,900 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 10357240, inMemoryMapOutputs.size() -> 2, commitMemory -> 9733310, usedMemory ->20090550
+2017-03-11 07:08:45,912 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local32959239_0001_m_000002_0 decomp: 10856384 len: 390723 to MEMORY
+2017-03-11 07:08:45,940 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 10856384 bytes from map-output for attempt_local32959239_0001_m_000002_0
+2017-03-11 07:08:45,941 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 10856384, inMemoryMapOutputs.size() -> 3, commitMemory -> 20090550, usedMemory ->30946934
+2017-03-11 07:08:45,952 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local32959239_0001_m_000005_0 decomp: 9608524 len: 390704 to MEMORY
+2017-03-11 07:08:45,970 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 9608524 bytes from map-output for attempt_local32959239_0001_m_000005_0
+2017-03-11 07:08:45,970 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 9608524, inMemoryMapOutputs.size() -> 4, commitMemory -> 30946934, usedMemory ->40555458
+2017-03-11 07:08:46,035 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local32959239_0001_m_000001_0 decomp: 10856384 len: 390722 to MEMORY
+2017-03-11 07:08:46,057 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 10856384 bytes from map-output for attempt_local32959239_0001_m_000001_0
+2017-03-11 07:08:46,057 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 10856384, inMemoryMapOutputs.size() -> 5, commitMemory -> 40555458, usedMemory ->51411842
+2017-03-11 07:08:46,060 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local32959239_0001_m_000004_0 decomp: 10107668 len: 390714 to MEMORY
+2017-03-11 07:08:46,080 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 10107668 bytes from map-output for attempt_local32959239_0001_m_000004_0
+2017-03-11 07:08:46,081 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 10107668, inMemoryMapOutputs.size() -> 6, commitMemory -> 51411842, usedMemory ->61519510
+2017-03-11 07:08:46,085 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local32959239_0001_m_000006_0 decomp: 8360664 len: 373255 to MEMORY
+2017-03-11 07:08:46,105 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 8360664 bytes from map-output for attempt_local32959239_0001_m_000006_0
+2017-03-11 07:08:46,105 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 8360664, inMemoryMapOutputs.size() -> 7, commitMemory -> 61519510, usedMemory ->69880174
+2017-03-11 07:08:46,105 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 07:08:46,106 INFO org.apache.hadoop.mapred.LocalJobRunner: 7 / 7 copied.
+2017-03-11 07:08:46,107 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 7 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 07:08:46,116 INFO org.apache.hadoop.mapred.Merger: Merging 7 sorted segments
+2017-03-11 07:08:46,116 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 7 segments left of total size: 69880048 bytes
+2017-03-11 07:08:46,202 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 0%
+2017-03-11 07:16:05,536 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-11 07:16:06,648 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-11 07:16:06,649 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-11 07:16:06,696 WARN org.apache.hadoop.security.UserGroupInformation: PriviledgedActionException as:cloudera (auth:SIMPLE) cause:org.apache.hadoop.mapred.FileAlreadyExistsException: Output directory file:/home/cloudera/workspace/WordCount/output already exists
+2017-03-11 07:16:44,548 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-11 07:16:45,572 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-11 07:16:45,573 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-11 07:16:45,909 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
+2017-03-11 07:16:45,916 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
+2017-03-11 07:16:45,964 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:100
+2017-03-11 07:16:46,184 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local1601005211_0001
+2017-03-11 07:16:46,588 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
+2017-03-11 07:16:46,590 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local1601005211_0001
+2017-03-11 07:16:46,592 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
+2017-03-11 07:16:46,598 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 07:16:46,607 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
+2017-03-11 07:16:46,834 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
+2017-03-11 07:16:46,835 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1601005211_0001_m_000000_0
+2017-03-11 07:16:46,893 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 07:16:46,923 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 07:16:46,927 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline:5270463+53272
+2017-03-11 07:16:47,103 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 07:16:47,104 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 07:16:47,104 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 07:16:47,104 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 07:16:47,104 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 07:16:47,119 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 07:16:47,595 INFO org.apache.hadoop.mapreduce.Job: Job job_local1601005211_0001 running in uber mode : false
+2017-03-11 07:16:47,596 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
+2017-03-11 07:16:47,689 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-11 07:16:47,689 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 68862070; bufvoid = 104857600
+2017-03-11 07:16:47,689 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 22458400(89833600); length = 3755997/6553600
+2017-03-11 07:16:47,689 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 72632214 kvi 18158048(72632192)
+2017-03-11 07:17:11,947 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-11 07:17:12,980 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-11 07:17:12,982 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-11 07:17:13,031 WARN org.apache.hadoop.security.UserGroupInformation: PriviledgedActionException as:cloudera (auth:SIMPLE) cause:org.apache.hadoop.mapred.FileAlreadyExistsException: Output directory file:/home/cloudera/workspace/WordCount/output already exists
+2017-03-11 07:17:29,666 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-11 07:17:30,685 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-11 07:17:30,693 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-11 07:17:31,123 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
+2017-03-11 07:17:31,129 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
+2017-03-11 07:17:31,905 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:126756
+2017-03-11 07:17:32,074 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local514307539_0001
+2017-03-11 07:17:32,462 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
+2017-03-11 07:17:32,463 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local514307539_0001
+2017-03-11 07:17:32,465 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
+2017-03-11 07:17:32,478 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 07:17:32,484 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
+2017-03-11 07:17:33,568 INFO org.apache.hadoop.mapreduce.Job: Job job_local514307539_0001 running in uber mode : false
+2017-03-11 07:17:33,573 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
+2017-03-11 07:17:55,648 WARN org.apache.hadoop.mapred.LocalJobRunner: job_local514307539_0001
+java.lang.OutOfMemoryError: Java heap space
+	at java.util.Hashtable$Entry.clone(Hashtable.java:1052)
+	at java.util.Hashtable$Entry.clone(Hashtable.java:1052)
+	at java.util.Hashtable.clone(Hashtable.java:613)
+	at org.apache.hadoop.conf.Configuration.<init>(Configuration.java:707)
+	at org.apache.hadoop.mapred.JobConf.<init>(JobConf.java:447)
+	at org.apache.hadoop.mapred.LocalJobRunner$Job$MapTaskRunnable.<init>(LocalJobRunner.java:217)
+	at org.apache.hadoop.mapred.LocalJobRunner$Job.getMapTaskRunnables(LocalJobRunner.java:272)
+	at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:517)
+2017-03-11 07:17:55,650 INFO org.apache.hadoop.mapreduce.Job: Job job_local514307539_0001 failed with state FAILED due to: NA
+2017-03-11 07:17:55,725 INFO org.apache.hadoop.mapreduce.Job: Counters: 0
+2017-03-11 07:29:33,617 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-11 07:29:34,538 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.max.split.size is deprecated. Instead, use mapreduce.input.fileinputformat.split.maxsize
+2017-03-11 07:29:34,669 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-11 07:29:34,671 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-11 07:29:34,998 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
+2017-03-11 07:29:35,009 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
+2017-03-11 07:29:35,043 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
+2017-03-11 07:29:35,279 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local1380958930_0001
+2017-03-11 07:29:35,673 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
+2017-03-11 07:29:35,684 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
+2017-03-11 07:29:35,685 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local1380958930_0001
+2017-03-11 07:29:35,690 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 07:29:35,695 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
+2017-03-11 07:29:35,790 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
+2017-03-11 07:29:35,792 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1380958930_0001_m_000000_0
+2017-03-11 07:29:35,851 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 07:29:35,883 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 07:29:35,887 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline:0+5323735
+2017-03-11 07:29:35,971 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 07:29:35,971 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 07:29:35,971 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 07:29:35,971 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 07:29:35,971 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 07:29:35,975 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 07:29:36,283 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-11 07:29:36,283 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 68406970; bufvoid = 104857600
+2017-03-11 07:29:36,283 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 22344608(89378432); length = 3869789/6553600
+2017-03-11 07:29:36,283 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 72308634 kvi 18077152(72308608)
+2017-03-11 07:29:36,693 INFO org.apache.hadoop.mapreduce.Job: Job job_local1380958930_0001 running in uber mode : false
+2017-03-11 07:29:36,694 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
+2017-03-11 07:30:00,879 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-11 07:30:01,767 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.max.split.size is deprecated. Instead, use mapreduce.input.fileinputformat.split.maxsize
+2017-03-11 07:30:01,965 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-11 07:30:01,966 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-11 07:30:02,539 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
+2017-03-11 07:30:02,547 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
+2017-03-11 07:30:03,396 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:126756
+2017-03-11 07:30:03,577 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local651046040_0001
+2017-03-11 07:30:03,964 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
+2017-03-11 07:30:03,965 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local651046040_0001
+2017-03-11 07:30:03,968 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
+2017-03-11 07:30:03,973 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 07:30:03,979 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
+2017-03-11 07:30:04,973 INFO org.apache.hadoop.mapreduce.Job: Job job_local651046040_0001 running in uber mode : false
+2017-03-11 07:30:04,974 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
+2017-03-11 07:48:06,292 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-11 07:48:10,718 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-11 07:48:10,727 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-11 07:48:12,423 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
+2017-03-11 07:48:12,551 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
+2017-03-11 07:48:13,113 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
+2017-03-11 07:48:14,255 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local931272447_0001
+2017-03-11 07:48:15,831 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
+2017-03-11 07:48:15,832 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local931272447_0001
+2017-03-11 07:48:15,853 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
+2017-03-11 07:48:15,910 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 07:48:15,928 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
+2017-03-11 07:48:16,509 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
+2017-03-11 07:48:16,510 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local931272447_0001_m_000000_0
+2017-03-11 07:48:16,747 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 07:48:16,843 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 07:48:16,855 INFO org.apache.hadoop.mapreduce.Job: Job job_local931272447_0001 running in uber mode : false
+2017-03-11 07:48:16,860 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
+2017-03-11 07:48:16,875 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/sortedline_sample:0+447
+2017-03-11 07:48:17,752 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-11 07:48:17,752 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-11 07:48:17,752 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-11 07:48:17,752 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-11 07:48:17,761 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-11 07:48:17,842 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-11 07:48:19,674 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-11 07:48:19,675 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-11 07:48:19,675 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-11 07:48:19,675 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 68133156; bufvoid = 104857600
+2017-03-11 07:48:19,675 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 22720392(90881568); length = 3494005/6553600
+2017-03-11 07:48:22,914 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:0+447 > sort
+2017-03-11 07:48:23,893 INFO org.apache.hadoop.mapreduce.Job:  map 67% reduce 0%
+2017-03-11 07:48:25,919 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:0+447 > sort
+2017-03-11 07:48:28,920 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:0+447 > sort
+2017-03-11 07:48:31,922 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:0+447 > sort
+2017-03-11 07:48:34,923 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:0+447 > sort
+2017-03-11 07:48:37,928 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:0+447 > sort
+2017-03-11 07:48:40,933 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:0+447 > sort
+2017-03-11 07:48:43,934 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:0+447 > sort
+2017-03-11 07:48:46,936 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:0+447 > sort
+2017-03-11 07:48:47,095 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
+2017-03-11 07:48:49,941 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:0+447 > sort
+2017-03-11 07:48:52,942 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:0+447 > sort
+2017-03-11 07:49:03,704 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
+2017-03-11 07:49:03,730 INFO org.apache.hadoop.mapred.Task: Task:attempt_local931272447_0001_m_000000_0 is done. And is in the process of committing
+2017-03-11 07:49:03,742 INFO org.apache.hadoop.mapred.LocalJobRunner: file:/home/cloudera/workspace/WordCount/sortedline_sample:0+447
+2017-03-11 07:49:03,742 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local931272447_0001_m_000000_0' done.
+2017-03-11 07:49:03,742 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local931272447_0001_m_000000_0
+2017-03-11 07:49:03,749 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
+2017-03-11 07:49:03,765 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks
+2017-03-11 07:49:03,766 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local931272447_0001_r_000000_0
+2017-03-11 07:49:03,849 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-11 07:49:03,850 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-11 07:49:03,885 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@8a93430
+2017-03-11 07:49:04,010 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 0%
+2017-03-11 07:49:04,052 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-11 07:49:04,101 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local931272447_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-11 07:49:04,675 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.deflate]
+2017-03-11 07:49:04,688 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local931272447_0001_m_000000_0 decomp: 69880162 len: 2722285 to MEMORY
+2017-03-11 07:49:05,714 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 69880162 bytes from map-output for attempt_local931272447_0001_m_000000_0
+2017-03-11 07:49:05,714 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 69880162, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->69880162
+2017-03-11 07:49:05,733 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-11 07:49:05,734 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-11 07:49:05,736 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-11 07:49:05,740 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 07:49:05,763 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 69880144 bytes
+2017-03-11 07:49:09,855 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > sort
+2017-03-11 07:49:10,019 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 41%
+2017-03-11 07:49:12,856 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > sort
+2017-03-11 07:49:13,022 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 46%
+2017-03-11 07:49:15,858 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > sort
+2017-03-11 07:49:16,027 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 51%
+2017-03-11 07:49:18,861 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > sort
+2017-03-11 07:49:19,030 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 57%
+2017-03-11 07:49:21,862 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > sort
+2017-03-11 07:49:22,033 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 62%
+2017-03-11 07:49:24,512 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 69880162 bytes to disk to satisfy reduce memory limit
+2017-03-11 07:49:24,513 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 2722293 bytes from disk
+2017-03-11 07:49:24,514 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-11 07:49:24,514 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-11 07:49:24,548 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 69880144 bytes
+2017-03-11 07:49:24,548 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > sort
+2017-03-11 07:49:24,634 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.skip.on is deprecated. Instead, use mapreduce.job.skiprecords
+2017-03-11 07:49:24,871 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 07:49:25,035 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 67%
+2017-03-11 07:49:27,873 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 07:49:28,040 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 70%
+2017-03-11 07:49:30,875 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 07:49:31,046 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 77%
+2017-03-11 07:49:33,881 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 07:49:34,050 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 85%
+2017-03-11 07:49:36,883 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 07:49:37,054 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 92%
+2017-03-11 07:49:39,924 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 07:49:40,057 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 98%
+2017-03-11 07:49:40,708 INFO org.apache.hadoop.mapred.Task: Task:attempt_local931272447_0001_r_000000_0 is done. And is in the process of committing
+2017-03-11 07:49:40,710 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 07:49:40,710 INFO org.apache.hadoop.mapred.Task: Task attempt_local931272447_0001_r_000000_0 is allowed to commit now
+2017-03-11 07:49:40,717 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local931272447_0001_r_000000_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local931272447_0001_r_000000
+2017-03-11 07:49:40,719 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-11 07:49:40,720 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local931272447_0001_r_000000_0' done.
+2017-03-11 07:49:40,722 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local931272447_0001_r_000000_0
+2017-03-11 07:49:40,723 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete.
+2017-03-11 07:49:41,058 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 100%
+2017-03-11 07:49:41,058 INFO org.apache.hadoop.mapreduce.Job: Job job_local931272447_0001 completed successfully
+2017-03-11 07:49:41,100 INFO org.apache.hadoop.mapreduce.Job: Counters: 34
 	File System Counters
-		FILE: Number of bytes read=16712056
-		FILE: Number of bytes written=13108535
+		FILE: Number of bytes read=5445872
+		FILE: Number of bytes written=8695995
 		FILE: Number of read operations=0
 		FILE: Number of large read operations=0
 		FILE: Number of write operations=0
 	Map-Reduce Framework
-		Map input records=124787
-		Map output records=115105
-		Map output bytes=5541084
-		Map output materialized bytes=2404026
-		Input split bytes=120
+		Map input records=7
+		Map output records=873502
+		Map output bytes=68133156
+		Map output materialized bytes=2722285
+		Input split bytes=122
 		Combine input records=0
 		Combine output records=0
-		Reduce input groups=115105
-		Reduce shuffle bytes=2404026
-		Reduce input records=115105
-		Reduce output records=115105
-		Spilled Records=230210
+		Reduce input groups=873481
+		Reduce shuffle bytes=2722285
+		Reduce input records=873502
+		Reduce output records=1
+		Spilled Records=1747004
 		Shuffled Maps =1
 		Failed Shuffles=0
 		Merged Map outputs=1
-		GC time elapsed (ms)=142
+		GC time elapsed (ms)=388
 		CPU time spent (ms)=0
 		Physical memory (bytes) snapshot=0
 		Virtual memory (bytes) snapshot=0
-		Total committed heap usage (bytes)=331227136
+		Total committed heap usage (bytes)=378413056
 	Shuffle Errors
 		BAD_ID=0
 		CONNECTION=0
@@ -1660,8 +5554,8 @@ Caused by: java.lang.NullPointerException
 		WRONG_MAP=0
 		WRONG_REDUCE=0
 	File Input Format Counters 
-		Bytes Read=5589886
+		Bytes Read=447
 	File Output Format Counters 
-		Bytes Written=5365335
-	similarity.WordSort$DocLineCounter
-		NUM=124787
+		Bytes Written=20
+	similarity.NaiveApproach$CompCounter
+		NUM=21
diff --git a/hadoop.log.2017-03-10 b/hadoop.log.2017-03-10
new file mode 100644
index 0000000..72b1af4
--- /dev/null
+++ b/hadoop.log.2017-03-10
@@ -0,0 +1,1667 @@
+2017-03-10 12:58:10,580 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-10 12:58:11,075 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-10 12:58:11,087 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-10 12:58:11,561 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
+2017-03-10 12:58:11,594 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
+2017-03-10 12:58:11,707 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
+2017-03-10 12:58:11,996 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local1458741767_0001
+2017-03-10 12:58:12,393 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
+2017-03-10 12:58:12,394 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local1458741767_0001
+2017-03-10 12:58:12,399 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
+2017-03-10 12:58:12,419 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-10 12:58:12,422 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
+2017-03-10 12:58:12,540 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
+2017-03-10 12:58:12,543 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1458741767_0001_m_000000_0
+2017-03-10 12:58:12,607 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-10 12:58:12,632 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-10 12:58:12,640 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/input/pg100.txt:0+5589886
+2017-03-10 12:58:12,811 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-10 12:58:12,823 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-10 12:58:12,824 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-10 12:58:12,824 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-10 12:58:12,824 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-10 12:58:12,832 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-10 12:58:13,397 INFO org.apache.hadoop.mapreduce.Job: Job job_local1458741767_0001 running in uber mode : false
+2017-03-10 12:58:13,399 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
+2017-03-10 12:58:14,983 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-10 12:58:14,985 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-10 12:58:14,985 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-10 12:58:14,985 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 8925696; bufvoid = 104857600
+2017-03-10 12:58:14,985 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 22120424(88481696); length = 4093973/6553600
+2017-03-10 12:58:16,014 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
+2017-03-10 12:58:16,798 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
+2017-03-10 12:58:16,800 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1458741767_0001_m_000000_0 is done. And is in the process of committing
+2017-03-10 12:58:16,806 INFO org.apache.hadoop.mapred.LocalJobRunner: map
+2017-03-10 12:58:16,806 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1458741767_0001_m_000000_0' done.
+2017-03-10 12:58:16,806 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1458741767_0001_m_000000_0
+2017-03-10 12:58:16,806 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
+2017-03-10 12:58:16,814 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks
+2017-03-10 12:58:16,814 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1458741767_0001_r_000000_0
+2017-03-10 12:58:16,819 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-10 12:58:16,819 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-10 12:58:16,823 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@7f792cec
+2017-03-10 12:58:16,833 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-10 12:58:16,839 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1458741767_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-10 12:58:16,862 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.deflate]
+2017-03-10 12:58:16,862 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1458741767_0001_m_000000_0 decomp: 491464 len: 167092 to MEMORY
+2017-03-10 12:58:16,869 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 491464 bytes from map-output for attempt_local1458741767_0001_m_000000_0
+2017-03-10 12:58:16,869 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 491464, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->491464
+2017-03-10 12:58:16,870 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-10 12:58:16,871 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-10 12:58:16,871 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-10 12:58:16,876 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-10 12:58:16,876 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 491459 bytes
+2017-03-10 12:58:17,106 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 491464 bytes to disk to satisfy reduce memory limit
+2017-03-10 12:58:17,107 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 167100 bytes from disk
+2017-03-10 12:58:17,107 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-10 12:58:17,107 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-10 12:58:17,108 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 491459 bytes
+2017-03-10 12:58:17,108 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-10 12:58:17,113 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.skip.on is deprecated. Instead, use mapreduce.job.skiprecords
+2017-03-10 12:58:17,416 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 0%
+2017-03-10 12:58:17,746 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1458741767_0001_r_000000_0 is done. And is in the process of committing
+2017-03-10 12:58:17,747 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-10 12:58:17,751 INFO org.apache.hadoop.mapred.Task: Task attempt_local1458741767_0001_r_000000_0 is allowed to commit now
+2017-03-10 12:58:17,752 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1458741767_0001_r_000000_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1458741767_0001_r_000000
+2017-03-10 12:58:17,756 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-10 12:58:17,756 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1458741767_0001_r_000000_0' done.
+2017-03-10 12:58:17,757 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1458741767_0001_r_000000_0
+2017-03-10 12:58:17,758 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete.
+2017-03-10 12:58:18,417 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 100%
+2017-03-10 12:58:18,418 INFO org.apache.hadoop.mapreduce.Job: Job job_local1458741767_0001 completed successfully
+2017-03-10 12:58:18,427 INFO org.apache.hadoop.mapreduce.Job: Counters: 33
+	File System Counters
+		FILE: Number of bytes read=11514350
+		FILE: Number of bytes written=1395729
+		FILE: Number of read operations=0
+		FILE: Number of large read operations=0
+		FILE: Number of write operations=0
+	Map-Reduce Framework
+		Map input records=124787
+		Map output records=1023494
+		Map output bytes=8925696
+		Map output materialized bytes=167092
+		Input split bytes=120
+		Combine input records=1023494
+		Combine output records=34513
+		Reduce input groups=34513
+		Reduce shuffle bytes=167092
+		Reduce input records=34513
+		Reduce output records=34513
+		Spilled Records=69026
+		Shuffled Maps =1
+		Failed Shuffles=0
+		Merged Map outputs=1
+		GC time elapsed (ms)=109
+		CPU time spent (ms)=0
+		Physical memory (bytes) snapshot=0
+		Virtual memory (bytes) snapshot=0
+		Total committed heap usage (bytes)=331227136
+	Shuffle Errors
+		BAD_ID=0
+		CONNECTION=0
+		IO_ERROR=0
+		WRONG_LENGTH=0
+		WRONG_MAP=0
+		WRONG_REDUCE=0
+	File Input Format Counters 
+		Bytes Read=5589886
+	File Output Format Counters 
+		Bytes Written=363879
+2017-03-10 14:05:48,287 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-10 14:05:48,833 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-10 14:05:48,841 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-10 14:05:49,279 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
+2017-03-10 14:05:49,288 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
+2017-03-10 14:05:49,331 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
+2017-03-10 14:05:49,610 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local1590990832_0001
+2017-03-10 14:05:50,040 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
+2017-03-10 14:05:50,042 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local1590990832_0001
+2017-03-10 14:05:50,046 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
+2017-03-10 14:05:50,059 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-10 14:05:50,067 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
+2017-03-10 14:05:50,190 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
+2017-03-10 14:05:50,192 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1590990832_0001_m_000000_0
+2017-03-10 14:05:50,229 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-10 14:05:50,244 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-10 14:05:50,247 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/input/pg100.txt:0+5589886
+2017-03-10 14:05:50,339 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-10 14:05:50,339 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-10 14:05:50,339 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-10 14:05:50,339 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-10 14:05:50,339 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-10 14:05:50,344 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-10 14:05:51,047 INFO org.apache.hadoop.mapreduce.Job: Job job_local1590990832_0001 running in uber mode : false
+2017-03-10 14:05:51,050 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
+2017-03-10 14:05:52,504 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-10 14:05:52,504 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-10 14:05:52,504 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-10 14:05:52,504 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 8925696; bufvoid = 104857600
+2017-03-10 14:05:52,504 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 22120424(88481696); length = 4093973/6553600
+2017-03-10 14:05:53,471 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
+2017-03-10 14:05:54,161 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
+2017-03-10 14:05:54,164 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1590990832_0001_m_000000_0 is done. And is in the process of committing
+2017-03-10 14:05:54,170 INFO org.apache.hadoop.mapred.LocalJobRunner: map
+2017-03-10 14:05:54,170 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1590990832_0001_m_000000_0' done.
+2017-03-10 14:05:54,170 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1590990832_0001_m_000000_0
+2017-03-10 14:05:54,170 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
+2017-03-10 14:05:54,178 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks
+2017-03-10 14:05:54,178 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1590990832_0001_r_000000_0
+2017-03-10 14:05:54,183 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-10 14:05:54,183 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-10 14:05:54,185 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@7f792cec
+2017-03-10 14:05:54,198 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-10 14:05:54,206 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1590990832_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-10 14:05:54,277 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.deflate]
+2017-03-10 14:05:54,277 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1590990832_0001_m_000000_0 decomp: 491464 len: 167092 to MEMORY
+2017-03-10 14:05:54,297 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 491464 bytes from map-output for attempt_local1590990832_0001_m_000000_0
+2017-03-10 14:05:54,298 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 491464, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->491464
+2017-03-10 14:05:54,302 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-10 14:05:54,305 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-10 14:05:54,305 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-10 14:05:54,315 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-10 14:05:54,319 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 491459 bytes
+2017-03-10 14:05:54,702 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 491464 bytes to disk to satisfy reduce memory limit
+2017-03-10 14:05:54,702 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 167100 bytes from disk
+2017-03-10 14:05:54,702 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-10 14:05:54,703 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-10 14:05:54,704 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 491459 bytes
+2017-03-10 14:05:54,704 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-10 14:05:54,714 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.skip.on is deprecated. Instead, use mapreduce.job.skiprecords
+2017-03-10 14:05:55,077 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 0%
+2017-03-10 14:05:55,853 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1590990832_0001_r_000000_0 is done. And is in the process of committing
+2017-03-10 14:05:55,858 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-10 14:05:55,859 INFO org.apache.hadoop.mapred.Task: Task attempt_local1590990832_0001_r_000000_0 is allowed to commit now
+2017-03-10 14:05:55,859 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1590990832_0001_r_000000_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1590990832_0001_r_000000
+2017-03-10 14:05:55,861 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-10 14:05:55,861 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1590990832_0001_r_000000_0' done.
+2017-03-10 14:05:55,861 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1590990832_0001_r_000000_0
+2017-03-10 14:05:55,861 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete.
+2017-03-10 14:05:56,079 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 100%
+2017-03-10 14:05:56,080 INFO org.apache.hadoop.mapreduce.Job: Job job_local1590990832_0001 completed successfully
+2017-03-10 14:05:56,090 INFO org.apache.hadoop.mapreduce.Job: Counters: 33
+	File System Counters
+		FILE: Number of bytes read=11514350
+		FILE: Number of bytes written=1395729
+		FILE: Number of read operations=0
+		FILE: Number of large read operations=0
+		FILE: Number of write operations=0
+	Map-Reduce Framework
+		Map input records=124787
+		Map output records=1023494
+		Map output bytes=8925696
+		Map output materialized bytes=167092
+		Input split bytes=120
+		Combine input records=1023494
+		Combine output records=34513
+		Reduce input groups=34513
+		Reduce shuffle bytes=167092
+		Reduce input records=34513
+		Reduce output records=34513
+		Spilled Records=69026
+		Shuffled Maps =1
+		Failed Shuffles=0
+		Merged Map outputs=1
+		GC time elapsed (ms)=80
+		CPU time spent (ms)=0
+		Physical memory (bytes) snapshot=0
+		Virtual memory (bytes) snapshot=0
+		Total committed heap usage (bytes)=331227136
+	Shuffle Errors
+		BAD_ID=0
+		CONNECTION=0
+		IO_ERROR=0
+		WRONG_LENGTH=0
+		WRONG_MAP=0
+		WRONG_REDUCE=0
+	File Input Format Counters 
+		Bytes Read=5589886
+	File Output Format Counters 
+		Bytes Written=363879
+2017-03-10 14:07:44,622 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-10 14:07:45,122 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-10 14:07:45,129 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-10 14:07:45,628 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
+2017-03-10 14:07:45,645 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
+2017-03-10 14:07:45,678 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
+2017-03-10 14:07:45,909 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local1520504035_0001
+2017-03-10 14:07:46,336 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
+2017-03-10 14:07:46,338 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local1520504035_0001
+2017-03-10 14:07:46,337 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
+2017-03-10 14:07:46,344 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-10 14:07:46,346 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
+2017-03-10 14:07:46,457 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
+2017-03-10 14:07:46,460 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1520504035_0001_m_000000_0
+2017-03-10 14:07:46,523 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-10 14:07:46,542 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-10 14:07:46,545 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/input/pg100.txt:0+5589886
+2017-03-10 14:07:46,634 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-10 14:07:46,634 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-10 14:07:46,634 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-10 14:07:46,634 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-10 14:07:46,635 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-10 14:07:46,639 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-10 14:07:47,343 INFO org.apache.hadoop.mapreduce.Job: Job job_local1520504035_0001 running in uber mode : false
+2017-03-10 14:07:47,344 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
+2017-03-10 14:07:48,802 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-10 14:07:48,802 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-10 14:07:48,802 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-10 14:07:48,802 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 8925696; bufvoid = 104857600
+2017-03-10 14:07:48,802 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 22120424(88481696); length = 4093973/6553600
+2017-03-10 14:07:49,778 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
+2017-03-10 14:07:50,507 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
+2017-03-10 14:07:50,510 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1520504035_0001_m_000000_0 is done. And is in the process of committing
+2017-03-10 14:07:50,516 INFO org.apache.hadoop.mapred.LocalJobRunner: map
+2017-03-10 14:07:50,516 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1520504035_0001_m_000000_0' done.
+2017-03-10 14:07:50,516 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1520504035_0001_m_000000_0
+2017-03-10 14:07:50,516 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
+2017-03-10 14:07:50,523 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks
+2017-03-10 14:07:50,523 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1520504035_0001_r_000000_0
+2017-03-10 14:07:50,529 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-10 14:07:50,529 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-10 14:07:50,531 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@7f792cec
+2017-03-10 14:07:50,541 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-10 14:07:50,548 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1520504035_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-10 14:07:50,573 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.deflate]
+2017-03-10 14:07:50,574 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1520504035_0001_m_000000_0 decomp: 491464 len: 167092 to MEMORY
+2017-03-10 14:07:50,580 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 491464 bytes from map-output for attempt_local1520504035_0001_m_000000_0
+2017-03-10 14:07:50,581 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 491464, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->491464
+2017-03-10 14:07:50,582 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-10 14:07:50,582 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-10 14:07:50,582 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-10 14:07:50,587 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-10 14:07:50,588 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 491459 bytes
+2017-03-10 14:07:50,811 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 491464 bytes to disk to satisfy reduce memory limit
+2017-03-10 14:07:50,811 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 167100 bytes from disk
+2017-03-10 14:07:50,812 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-10 14:07:50,812 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-10 14:07:50,813 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 491459 bytes
+2017-03-10 14:07:50,813 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-10 14:07:50,818 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.skip.on is deprecated. Instead, use mapreduce.job.skiprecords
+2017-03-10 14:07:51,355 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 0%
+2017-03-10 14:07:51,511 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1520504035_0001_r_000000_0 is done. And is in the process of committing
+2017-03-10 14:07:51,512 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-10 14:07:51,515 INFO org.apache.hadoop.mapred.Task: Task attempt_local1520504035_0001_r_000000_0 is allowed to commit now
+2017-03-10 14:07:51,516 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1520504035_0001_r_000000_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1520504035_0001_r_000000
+2017-03-10 14:07:51,516 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-10 14:07:51,518 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1520504035_0001_r_000000_0' done.
+2017-03-10 14:07:51,518 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1520504035_0001_r_000000_0
+2017-03-10 14:07:51,518 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete.
+2017-03-10 14:07:52,356 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 100%
+2017-03-10 14:07:52,356 INFO org.apache.hadoop.mapreduce.Job: Job job_local1520504035_0001 completed successfully
+2017-03-10 14:07:52,367 INFO org.apache.hadoop.mapreduce.Job: Counters: 33
+	File System Counters
+		FILE: Number of bytes read=11514350
+		FILE: Number of bytes written=1397073
+		FILE: Number of read operations=0
+		FILE: Number of large read operations=0
+		FILE: Number of write operations=0
+	Map-Reduce Framework
+		Map input records=124787
+		Map output records=1023494
+		Map output bytes=8925696
+		Map output materialized bytes=167092
+		Input split bytes=120
+		Combine input records=1023494
+		Combine output records=34513
+		Reduce input groups=34513
+		Reduce shuffle bytes=167092
+		Reduce input records=34513
+		Reduce output records=34513
+		Spilled Records=69026
+		Shuffled Maps =1
+		Failed Shuffles=0
+		Merged Map outputs=1
+		GC time elapsed (ms)=86
+		CPU time spent (ms)=0
+		Physical memory (bytes) snapshot=0
+		Virtual memory (bytes) snapshot=0
+		Total committed heap usage (bytes)=331227136
+	Shuffle Errors
+		BAD_ID=0
+		CONNECTION=0
+		IO_ERROR=0
+		WRONG_LENGTH=0
+		WRONG_MAP=0
+		WRONG_REDUCE=0
+	File Input Format Counters 
+		Bytes Read=5589886
+	File Output Format Counters 
+		Bytes Written=363879
+2017-03-10 14:08:46,208 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-10 14:08:46,725 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-10 14:08:46,726 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-10 14:08:46,791 WARN org.apache.hadoop.security.UserGroupInformation: PriviledgedActionException as:cloudera (auth:SIMPLE) cause:org.apache.hadoop.mapred.FileAlreadyExistsException: Output directory file:/home/cloudera/workspace/WordCount/output already exists
+2017-03-10 14:09:00,496 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-10 14:09:00,991 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-10 14:09:00,992 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-10 14:09:01,486 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
+2017-03-10 14:09:01,504 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
+2017-03-10 14:09:01,622 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
+2017-03-10 14:09:01,930 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local1768387477_0001
+2017-03-10 14:09:02,340 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
+2017-03-10 14:09:02,341 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local1768387477_0001
+2017-03-10 14:09:02,345 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
+2017-03-10 14:09:02,348 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-10 14:09:02,366 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
+2017-03-10 14:09:02,467 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
+2017-03-10 14:09:02,468 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1768387477_0001_m_000000_0
+2017-03-10 14:09:02,532 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-10 14:09:02,561 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-10 14:09:02,564 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/input/pg100.txt:0+5589886
+2017-03-10 14:09:02,644 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-10 14:09:02,645 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-10 14:09:02,645 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-10 14:09:02,645 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-10 14:09:02,645 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-10 14:09:02,652 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-10 14:09:03,343 INFO org.apache.hadoop.mapreduce.Job: Job job_local1768387477_0001 running in uber mode : false
+2017-03-10 14:09:03,344 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
+2017-03-10 14:09:04,790 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-10 14:09:04,792 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-10 14:09:04,792 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-10 14:09:04,792 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 8925696; bufvoid = 104857600
+2017-03-10 14:09:04,792 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 22120424(88481696); length = 4093973/6553600
+2017-03-10 14:09:05,819 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
+2017-03-10 14:09:06,544 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
+2017-03-10 14:09:06,546 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1768387477_0001_m_000000_0 is done. And is in the process of committing
+2017-03-10 14:09:06,552 INFO org.apache.hadoop.mapred.LocalJobRunner: map
+2017-03-10 14:09:06,552 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1768387477_0001_m_000000_0' done.
+2017-03-10 14:09:06,552 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1768387477_0001_m_000000_0
+2017-03-10 14:09:06,552 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
+2017-03-10 14:09:06,560 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks
+2017-03-10 14:09:06,561 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1768387477_0001_r_000000_0
+2017-03-10 14:09:06,569 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-10 14:09:06,570 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-10 14:09:06,572 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@7f792cec
+2017-03-10 14:09:06,582 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-10 14:09:06,588 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1768387477_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-10 14:09:06,616 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.deflate]
+2017-03-10 14:09:06,617 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1768387477_0001_m_000000_0 decomp: 491464 len: 167092 to MEMORY
+2017-03-10 14:09:06,626 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 491464 bytes from map-output for attempt_local1768387477_0001_m_000000_0
+2017-03-10 14:09:06,627 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 491464, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->491464
+2017-03-10 14:09:06,628 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-10 14:09:06,629 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-10 14:09:06,629 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-10 14:09:06,636 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-10 14:09:06,636 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 491459 bytes
+2017-03-10 14:09:06,896 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 491464 bytes to disk to satisfy reduce memory limit
+2017-03-10 14:09:06,896 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 167100 bytes from disk
+2017-03-10 14:09:06,897 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-10 14:09:06,897 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-10 14:09:06,898 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 491459 bytes
+2017-03-10 14:09:06,898 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-10 14:09:06,903 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.skip.on is deprecated. Instead, use mapreduce.job.skiprecords
+2017-03-10 14:09:07,352 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 0%
+2017-03-10 14:09:07,776 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1768387477_0001_r_000000_0 is done. And is in the process of committing
+2017-03-10 14:09:07,792 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-10 14:09:07,792 INFO org.apache.hadoop.mapred.Task: Task attempt_local1768387477_0001_r_000000_0 is allowed to commit now
+2017-03-10 14:09:07,792 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1768387477_0001_r_000000_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1768387477_0001_r_000000
+2017-03-10 14:09:07,793 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-10 14:09:07,793 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1768387477_0001_r_000000_0' done.
+2017-03-10 14:09:07,793 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1768387477_0001_r_000000_0
+2017-03-10 14:09:07,793 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete.
+2017-03-10 14:09:08,353 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 100%
+2017-03-10 14:09:08,354 INFO org.apache.hadoop.mapreduce.Job: Job job_local1768387477_0001 completed successfully
+2017-03-10 14:09:08,363 INFO org.apache.hadoop.mapreduce.Job: Counters: 33
+	File System Counters
+		FILE: Number of bytes read=11514350
+		FILE: Number of bytes written=1397073
+		FILE: Number of read operations=0
+		FILE: Number of large read operations=0
+		FILE: Number of write operations=0
+	Map-Reduce Framework
+		Map input records=124787
+		Map output records=1023494
+		Map output bytes=8925696
+		Map output materialized bytes=167092
+		Input split bytes=120
+		Combine input records=1023494
+		Combine output records=34513
+		Reduce input groups=34513
+		Reduce shuffle bytes=167092
+		Reduce input records=34513
+		Reduce output records=34513
+		Spilled Records=69026
+		Shuffled Maps =1
+		Failed Shuffles=0
+		Merged Map outputs=1
+		GC time elapsed (ms)=76
+		CPU time spent (ms)=0
+		Physical memory (bytes) snapshot=0
+		Virtual memory (bytes) snapshot=0
+		Total committed heap usage (bytes)=331227136
+	Shuffle Errors
+		BAD_ID=0
+		CONNECTION=0
+		IO_ERROR=0
+		WRONG_LENGTH=0
+		WRONG_MAP=0
+		WRONG_REDUCE=0
+	File Input Format Counters 
+		Bytes Read=5589886
+	File Output Format Counters 
+		Bytes Written=363879
+2017-03-10 14:10:49,958 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-10 14:10:50,420 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-10 14:10:50,423 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-10 14:10:50,893 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
+2017-03-10 14:10:50,902 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
+2017-03-10 14:10:50,935 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
+2017-03-10 14:10:51,165 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local321623198_0001
+2017-03-10 14:10:51,576 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
+2017-03-10 14:10:51,577 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local321623198_0001
+2017-03-10 14:10:51,586 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
+2017-03-10 14:10:51,596 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-10 14:10:51,607 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
+2017-03-10 14:10:51,726 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
+2017-03-10 14:10:51,728 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local321623198_0001_m_000000_0
+2017-03-10 14:10:51,786 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-10 14:10:51,801 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-10 14:10:51,805 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/input/pg100.txt:0+5589886
+2017-03-10 14:10:51,887 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-10 14:10:51,887 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-10 14:10:51,887 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-10 14:10:51,888 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-10 14:10:51,888 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-10 14:10:51,891 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-10 14:10:52,584 INFO org.apache.hadoop.mapreduce.Job: Job job_local321623198_0001 running in uber mode : false
+2017-03-10 14:10:52,587 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
+2017-03-10 14:10:54,371 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-10 14:10:54,373 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-10 14:10:54,373 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-10 14:10:54,374 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 8925696; bufvoid = 104857600
+2017-03-10 14:10:54,374 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 22120424(88481696); length = 4093973/6553600
+2017-03-10 14:10:55,678 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
+2017-03-10 14:10:56,656 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
+2017-03-10 14:10:56,659 INFO org.apache.hadoop.mapred.Task: Task:attempt_local321623198_0001_m_000000_0 is done. And is in the process of committing
+2017-03-10 14:10:56,666 INFO org.apache.hadoop.mapred.LocalJobRunner: map
+2017-03-10 14:10:56,666 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local321623198_0001_m_000000_0' done.
+2017-03-10 14:10:56,666 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local321623198_0001_m_000000_0
+2017-03-10 14:10:56,667 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
+2017-03-10 14:10:56,676 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks
+2017-03-10 14:10:56,676 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local321623198_0001_r_000000_0
+2017-03-10 14:10:56,684 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-10 14:10:56,685 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-10 14:10:56,689 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@405d65c3
+2017-03-10 14:10:56,704 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-10 14:10:56,710 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local321623198_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-10 14:10:56,742 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.deflate]
+2017-03-10 14:10:56,742 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local321623198_0001_m_000000_0 decomp: 491464 len: 167092 to MEMORY
+2017-03-10 14:10:56,749 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 491464 bytes from map-output for attempt_local321623198_0001_m_000000_0
+2017-03-10 14:10:56,749 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 491464, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->491464
+2017-03-10 14:10:56,750 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-10 14:10:56,752 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-10 14:10:56,752 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-10 14:10:56,756 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-10 14:10:56,757 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 491459 bytes
+2017-03-10 14:10:57,102 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 491464 bytes to disk to satisfy reduce memory limit
+2017-03-10 14:10:57,102 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 167100 bytes from disk
+2017-03-10 14:10:57,103 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-10 14:10:57,103 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-10 14:10:57,104 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 491459 bytes
+2017-03-10 14:10:57,105 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-10 14:10:57,109 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.skip.on is deprecated. Instead, use mapreduce.job.skiprecords
+2017-03-10 14:10:57,598 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 0%
+2017-03-10 14:10:57,797 INFO org.apache.hadoop.mapred.Task: Task:attempt_local321623198_0001_r_000000_0 is done. And is in the process of committing
+2017-03-10 14:10:57,801 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-10 14:10:57,802 INFO org.apache.hadoop.mapred.Task: Task attempt_local321623198_0001_r_000000_0 is allowed to commit now
+2017-03-10 14:10:57,802 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local321623198_0001_r_000000_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local321623198_0001_r_000000
+2017-03-10 14:10:57,803 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-10 14:10:57,803 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local321623198_0001_r_000000_0' done.
+2017-03-10 14:10:57,803 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local321623198_0001_r_000000_0
+2017-03-10 14:10:57,803 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete.
+2017-03-10 14:10:58,598 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 100%
+2017-03-10 14:10:58,599 INFO org.apache.hadoop.mapreduce.Job: Job job_local321623198_0001 completed successfully
+2017-03-10 14:10:58,612 INFO org.apache.hadoop.mapreduce.Job: Counters: 33
+	File System Counters
+		FILE: Number of bytes read=11514350
+		FILE: Number of bytes written=1394261
+		FILE: Number of read operations=0
+		FILE: Number of large read operations=0
+		FILE: Number of write operations=0
+	Map-Reduce Framework
+		Map input records=124787
+		Map output records=1023494
+		Map output bytes=8925696
+		Map output materialized bytes=167092
+		Input split bytes=120
+		Combine input records=1023494
+		Combine output records=34513
+		Reduce input groups=34513
+		Reduce shuffle bytes=167092
+		Reduce input records=34513
+		Reduce output records=34513
+		Spilled Records=69026
+		Shuffled Maps =1
+		Failed Shuffles=0
+		Merged Map outputs=1
+		GC time elapsed (ms)=82
+		CPU time spent (ms)=0
+		Physical memory (bytes) snapshot=0
+		Virtual memory (bytes) snapshot=0
+		Total committed heap usage (bytes)=331227136
+	Shuffle Errors
+		BAD_ID=0
+		CONNECTION=0
+		IO_ERROR=0
+		WRONG_LENGTH=0
+		WRONG_MAP=0
+		WRONG_REDUCE=0
+	File Input Format Counters 
+		Bytes Read=5589886
+	File Output Format Counters 
+		Bytes Written=363879
+2017-03-10 14:11:49,324 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-10 14:11:49,809 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-10 14:11:49,819 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-10 14:11:50,294 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
+2017-03-10 14:11:50,309 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
+2017-03-10 14:11:50,418 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
+2017-03-10 14:11:50,734 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local1130190814_0001
+2017-03-10 14:11:51,124 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
+2017-03-10 14:11:51,125 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local1130190814_0001
+2017-03-10 14:11:51,125 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
+2017-03-10 14:11:51,137 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-10 14:11:51,145 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
+2017-03-10 14:11:51,262 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
+2017-03-10 14:11:51,262 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1130190814_0001_m_000000_0
+2017-03-10 14:11:51,319 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-10 14:11:51,345 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-10 14:11:51,348 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/input/pg100.txt:0+5589886
+2017-03-10 14:11:51,448 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-10 14:11:51,448 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-10 14:11:51,448 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-10 14:11:51,448 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-10 14:11:51,448 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-10 14:11:51,451 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-10 14:11:52,138 INFO org.apache.hadoop.mapreduce.Job: Job job_local1130190814_0001 running in uber mode : false
+2017-03-10 14:11:52,139 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
+2017-03-10 14:11:53,548 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-10 14:11:53,549 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-10 14:11:53,549 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-10 14:11:53,549 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 8925696; bufvoid = 104857600
+2017-03-10 14:11:53,549 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 22120424(88481696); length = 4093973/6553600
+2017-03-10 14:11:54,505 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
+2017-03-10 14:11:55,315 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
+2017-03-10 14:11:55,318 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1130190814_0001_m_000000_0 is done. And is in the process of committing
+2017-03-10 14:11:55,323 INFO org.apache.hadoop.mapred.LocalJobRunner: map
+2017-03-10 14:11:55,324 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1130190814_0001_m_000000_0' done.
+2017-03-10 14:11:55,324 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1130190814_0001_m_000000_0
+2017-03-10 14:11:55,324 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
+2017-03-10 14:11:55,331 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks
+2017-03-10 14:11:55,332 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1130190814_0001_r_000000_0
+2017-03-10 14:11:55,337 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-10 14:11:55,337 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-10 14:11:55,339 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@33c40638
+2017-03-10 14:11:55,352 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-10 14:11:55,358 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1130190814_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-10 14:11:55,386 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.deflate]
+2017-03-10 14:11:55,387 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1130190814_0001_m_000000_0 decomp: 491464 len: 167092 to MEMORY
+2017-03-10 14:11:55,397 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 491464 bytes from map-output for attempt_local1130190814_0001_m_000000_0
+2017-03-10 14:11:55,397 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 491464, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->491464
+2017-03-10 14:11:55,398 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-10 14:11:55,399 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-10 14:11:55,399 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-10 14:11:55,403 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-10 14:11:55,404 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 491459 bytes
+2017-03-10 14:11:55,647 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 491464 bytes to disk to satisfy reduce memory limit
+2017-03-10 14:11:55,647 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 167100 bytes from disk
+2017-03-10 14:11:55,648 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-10 14:11:55,648 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-10 14:11:55,649 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 491459 bytes
+2017-03-10 14:11:55,649 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-10 14:11:55,654 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.skip.on is deprecated. Instead, use mapreduce.job.skiprecords
+2017-03-10 14:11:56,162 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 0%
+2017-03-10 14:11:56,278 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1130190814_0001_r_000000_0 is done. And is in the process of committing
+2017-03-10 14:11:56,289 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-10 14:11:56,292 INFO org.apache.hadoop.mapred.Task: Task attempt_local1130190814_0001_r_000000_0 is allowed to commit now
+2017-03-10 14:11:56,294 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1130190814_0001_r_000000_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1130190814_0001_r_000000
+2017-03-10 14:11:56,294 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-10 14:11:56,295 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1130190814_0001_r_000000_0' done.
+2017-03-10 14:11:56,295 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1130190814_0001_r_000000_0
+2017-03-10 14:11:56,295 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete.
+2017-03-10 14:11:57,163 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 100%
+2017-03-10 14:11:57,163 INFO org.apache.hadoop.mapreduce.Job: Job job_local1130190814_0001 completed successfully
+2017-03-10 14:11:57,176 INFO org.apache.hadoop.mapreduce.Job: Counters: 33
+	File System Counters
+		FILE: Number of bytes read=11514350
+		FILE: Number of bytes written=1397073
+		FILE: Number of read operations=0
+		FILE: Number of large read operations=0
+		FILE: Number of write operations=0
+	Map-Reduce Framework
+		Map input records=124787
+		Map output records=1023494
+		Map output bytes=8925696
+		Map output materialized bytes=167092
+		Input split bytes=120
+		Combine input records=1023494
+		Combine output records=34513
+		Reduce input groups=34513
+		Reduce shuffle bytes=167092
+		Reduce input records=34513
+		Reduce output records=34513
+		Spilled Records=69026
+		Shuffled Maps =1
+		Failed Shuffles=0
+		Merged Map outputs=1
+		GC time elapsed (ms)=84
+		CPU time spent (ms)=0
+		Physical memory (bytes) snapshot=0
+		Virtual memory (bytes) snapshot=0
+		Total committed heap usage (bytes)=331227136
+	Shuffle Errors
+		BAD_ID=0
+		CONNECTION=0
+		IO_ERROR=0
+		WRONG_LENGTH=0
+		WRONG_MAP=0
+		WRONG_REDUCE=0
+	File Input Format Counters 
+		Bytes Read=5589886
+	File Output Format Counters 
+		Bytes Written=363879
+2017-03-10 14:12:54,192 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-10 14:12:54,666 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-10 14:12:54,677 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-10 14:12:54,726 WARN org.apache.hadoop.security.UserGroupInformation: PriviledgedActionException as:cloudera (auth:SIMPLE) cause:org.apache.hadoop.mapred.FileAlreadyExistsException: Output directory file:/home/cloudera/workspace/WordCount/output already exists
+2017-03-10 14:13:16,264 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-10 14:13:16,742 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-10 14:13:16,746 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-10 14:13:17,210 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
+2017-03-10 14:13:17,218 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
+2017-03-10 14:13:17,249 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
+2017-03-10 14:13:17,469 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local1259538348_0001
+2017-03-10 14:13:17,909 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
+2017-03-10 14:13:17,911 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local1259538348_0001
+2017-03-10 14:13:17,915 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
+2017-03-10 14:13:17,923 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-10 14:13:17,937 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
+2017-03-10 14:13:18,056 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
+2017-03-10 14:13:18,059 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1259538348_0001_m_000000_0
+2017-03-10 14:13:18,117 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-10 14:13:18,128 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-10 14:13:18,132 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/input/pg100.txt:0+5589886
+2017-03-10 14:13:18,215 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-10 14:13:18,216 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-10 14:13:18,216 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-10 14:13:18,216 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-10 14:13:18,216 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-10 14:13:18,226 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-10 14:13:18,912 INFO org.apache.hadoop.mapreduce.Job: Job job_local1259538348_0001 running in uber mode : false
+2017-03-10 14:13:18,913 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
+2017-03-10 14:13:20,442 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-10 14:13:20,443 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-10 14:13:20,443 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-10 14:13:20,443 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 8925696; bufvoid = 104857600
+2017-03-10 14:13:20,443 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 22120424(88481696); length = 4093973/6553600
+2017-03-10 14:13:21,376 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
+2017-03-10 14:13:22,115 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
+2017-03-10 14:13:22,117 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1259538348_0001_m_000000_0 is done. And is in the process of committing
+2017-03-10 14:13:22,124 INFO org.apache.hadoop.mapred.LocalJobRunner: map
+2017-03-10 14:13:22,124 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1259538348_0001_m_000000_0' done.
+2017-03-10 14:13:22,124 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1259538348_0001_m_000000_0
+2017-03-10 14:13:22,124 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
+2017-03-10 14:13:22,132 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks
+2017-03-10 14:13:22,132 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1259538348_0001_r_000000_0
+2017-03-10 14:13:22,137 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-10 14:13:22,137 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-10 14:13:22,140 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@7f792cec
+2017-03-10 14:13:22,151 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-10 14:13:22,156 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1259538348_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-10 14:13:22,209 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.deflate]
+2017-03-10 14:13:22,209 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1259538348_0001_m_000000_0 decomp: 491464 len: 167092 to MEMORY
+2017-03-10 14:13:22,215 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 491464 bytes from map-output for attempt_local1259538348_0001_m_000000_0
+2017-03-10 14:13:22,217 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 491464, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->491464
+2017-03-10 14:13:22,219 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-10 14:13:22,220 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-10 14:13:22,224 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-10 14:13:22,229 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-10 14:13:22,230 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 491459 bytes
+2017-03-10 14:13:22,458 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 491464 bytes to disk to satisfy reduce memory limit
+2017-03-10 14:13:22,459 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 167100 bytes from disk
+2017-03-10 14:13:22,459 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-10 14:13:22,459 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-10 14:13:22,460 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 491459 bytes
+2017-03-10 14:13:22,460 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-10 14:13:22,465 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.skip.on is deprecated. Instead, use mapreduce.job.skiprecords
+2017-03-10 14:13:22,936 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 0%
+2017-03-10 14:13:23,199 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1259538348_0001_r_000000_0 is done. And is in the process of committing
+2017-03-10 14:13:23,200 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-10 14:13:23,201 INFO org.apache.hadoop.mapred.Task: Task attempt_local1259538348_0001_r_000000_0 is allowed to commit now
+2017-03-10 14:13:23,201 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1259538348_0001_r_000000_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1259538348_0001_r_000000
+2017-03-10 14:13:23,202 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-10 14:13:23,202 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1259538348_0001_r_000000_0' done.
+2017-03-10 14:13:23,202 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1259538348_0001_r_000000_0
+2017-03-10 14:13:23,202 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete.
+2017-03-10 14:13:23,936 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 100%
+2017-03-10 14:13:23,937 INFO org.apache.hadoop.mapreduce.Job: Job job_local1259538348_0001 completed successfully
+2017-03-10 14:13:23,948 INFO org.apache.hadoop.mapreduce.Job: Counters: 33
+	File System Counters
+		FILE: Number of bytes read=11514350
+		FILE: Number of bytes written=1397073
+		FILE: Number of read operations=0
+		FILE: Number of large read operations=0
+		FILE: Number of write operations=0
+	Map-Reduce Framework
+		Map input records=124787
+		Map output records=1023494
+		Map output bytes=8925696
+		Map output materialized bytes=167092
+		Input split bytes=120
+		Combine input records=1023494
+		Combine output records=34513
+		Reduce input groups=34513
+		Reduce shuffle bytes=167092
+		Reduce input records=34513
+		Reduce output records=34513
+		Spilled Records=69026
+		Shuffled Maps =1
+		Failed Shuffles=0
+		Merged Map outputs=1
+		GC time elapsed (ms)=79
+		CPU time spent (ms)=0
+		Physical memory (bytes) snapshot=0
+		Virtual memory (bytes) snapshot=0
+		Total committed heap usage (bytes)=331227136
+	Shuffle Errors
+		BAD_ID=0
+		CONNECTION=0
+		IO_ERROR=0
+		WRONG_LENGTH=0
+		WRONG_MAP=0
+		WRONG_REDUCE=0
+	File Input Format Counters 
+		Bytes Read=5589886
+	File Output Format Counters 
+		Bytes Written=363879
+2017-03-10 14:15:07,671 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-10 14:15:08,143 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-10 14:15:08,146 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-10 14:15:08,597 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
+2017-03-10 14:15:08,610 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
+2017-03-10 14:15:08,649 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
+2017-03-10 14:15:08,885 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local744282859_0001
+2017-03-10 14:15:09,357 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
+2017-03-10 14:15:09,358 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local744282859_0001
+2017-03-10 14:15:09,361 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
+2017-03-10 14:15:09,374 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-10 14:15:09,376 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
+2017-03-10 14:15:09,515 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
+2017-03-10 14:15:09,516 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local744282859_0001_m_000000_0
+2017-03-10 14:15:09,574 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-10 14:15:09,584 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-10 14:15:09,587 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/input/pg100.txt:0+5589886
+2017-03-10 14:15:09,674 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-10 14:15:09,674 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-10 14:15:09,674 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-10 14:15:09,674 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-10 14:15:09,674 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-10 14:15:09,679 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-10 14:15:10,364 INFO org.apache.hadoop.mapreduce.Job: Job job_local744282859_0001 running in uber mode : false
+2017-03-10 14:15:10,366 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
+2017-03-10 14:15:11,981 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-10 14:15:11,982 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-10 14:15:11,982 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-10 14:15:11,982 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 8925696; bufvoid = 104857600
+2017-03-10 14:15:11,982 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 22120424(88481696); length = 4093973/6553600
+2017-03-10 14:15:13,023 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
+2017-03-10 14:15:13,726 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
+2017-03-10 14:15:13,728 INFO org.apache.hadoop.mapred.Task: Task:attempt_local744282859_0001_m_000000_0 is done. And is in the process of committing
+2017-03-10 14:15:13,739 INFO org.apache.hadoop.mapred.LocalJobRunner: map
+2017-03-10 14:15:13,739 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local744282859_0001_m_000000_0' done.
+2017-03-10 14:15:13,739 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local744282859_0001_m_000000_0
+2017-03-10 14:15:13,739 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
+2017-03-10 14:15:13,748 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks
+2017-03-10 14:15:13,749 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local744282859_0001_r_000000_0
+2017-03-10 14:15:13,753 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-10 14:15:13,754 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-10 14:15:13,756 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@534d4113
+2017-03-10 14:15:13,766 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-10 14:15:13,772 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local744282859_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-10 14:15:13,800 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.deflate]
+2017-03-10 14:15:13,800 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local744282859_0001_m_000000_0 decomp: 491464 len: 167092 to MEMORY
+2017-03-10 14:15:13,807 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 491464 bytes from map-output for attempt_local744282859_0001_m_000000_0
+2017-03-10 14:15:13,807 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 491464, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->491464
+2017-03-10 14:15:13,809 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-10 14:15:13,813 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-10 14:15:13,814 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-10 14:15:13,819 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-10 14:15:13,819 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 491459 bytes
+2017-03-10 14:15:14,078 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 491464 bytes to disk to satisfy reduce memory limit
+2017-03-10 14:15:14,079 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 167100 bytes from disk
+2017-03-10 14:15:14,079 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-10 14:15:14,079 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-10 14:15:14,080 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 491459 bytes
+2017-03-10 14:15:14,080 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-10 14:15:14,085 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.skip.on is deprecated. Instead, use mapreduce.job.skiprecords
+2017-03-10 14:15:14,378 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 0%
+2017-03-10 14:15:14,760 INFO org.apache.hadoop.mapred.Task: Task:attempt_local744282859_0001_r_000000_0 is done. And is in the process of committing
+2017-03-10 14:15:14,761 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-10 14:15:14,761 INFO org.apache.hadoop.mapred.Task: Task attempt_local744282859_0001_r_000000_0 is allowed to commit now
+2017-03-10 14:15:14,762 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local744282859_0001_r_000000_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local744282859_0001_r_000000
+2017-03-10 14:15:14,762 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-10 14:15:14,762 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local744282859_0001_r_000000_0' done.
+2017-03-10 14:15:14,762 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local744282859_0001_r_000000_0
+2017-03-10 14:15:14,762 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete.
+2017-03-10 14:15:15,379 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 100%
+2017-03-10 14:15:15,380 INFO org.apache.hadoop.mapreduce.Job: Job job_local744282859_0001 completed successfully
+2017-03-10 14:15:15,390 INFO org.apache.hadoop.mapreduce.Job: Counters: 33
+	File System Counters
+		FILE: Number of bytes read=11514350
+		FILE: Number of bytes written=1394261
+		FILE: Number of read operations=0
+		FILE: Number of large read operations=0
+		FILE: Number of write operations=0
+	Map-Reduce Framework
+		Map input records=124787
+		Map output records=1023494
+		Map output bytes=8925696
+		Map output materialized bytes=167092
+		Input split bytes=120
+		Combine input records=1023494
+		Combine output records=34513
+		Reduce input groups=34513
+		Reduce shuffle bytes=167092
+		Reduce input records=34513
+		Reduce output records=34513
+		Spilled Records=69026
+		Shuffled Maps =1
+		Failed Shuffles=0
+		Merged Map outputs=1
+		GC time elapsed (ms)=90
+		CPU time spent (ms)=0
+		Physical memory (bytes) snapshot=0
+		Virtual memory (bytes) snapshot=0
+		Total committed heap usage (bytes)=331227136
+	Shuffle Errors
+		BAD_ID=0
+		CONNECTION=0
+		IO_ERROR=0
+		WRONG_LENGTH=0
+		WRONG_MAP=0
+		WRONG_REDUCE=0
+	File Input Format Counters 
+		Bytes Read=5589886
+	File Output Format Counters 
+		Bytes Written=363879
+2017-03-10 14:16:55,128 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-10 14:16:55,605 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-10 14:16:55,614 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-10 14:16:55,649 WARN org.apache.hadoop.security.UserGroupInformation: PriviledgedActionException as:cloudera (auth:SIMPLE) cause:org.apache.hadoop.mapred.FileAlreadyExistsException: Output directory file:/home/cloudera/workspace/WordCount/output already exists
+2017-03-10 14:17:14,700 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-10 14:17:15,157 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-10 14:17:15,169 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-10 14:17:15,624 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
+2017-03-10 14:17:15,635 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
+2017-03-10 14:17:15,668 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
+2017-03-10 14:17:15,901 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local1420822781_0001
+2017-03-10 14:17:16,319 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
+2017-03-10 14:17:16,321 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local1420822781_0001
+2017-03-10 14:17:16,323 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
+2017-03-10 14:17:16,335 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-10 14:17:16,340 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
+2017-03-10 14:17:16,441 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
+2017-03-10 14:17:16,442 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1420822781_0001_m_000000_0
+2017-03-10 14:17:16,518 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-10 14:17:16,531 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-10 14:17:16,534 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/input/pg100.txt:0+5589886
+2017-03-10 14:17:16,616 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-10 14:17:16,617 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-10 14:17:16,617 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-10 14:17:16,617 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-10 14:17:16,617 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-10 14:17:16,621 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-10 14:17:16,682 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-10 14:17:16,697 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
+2017-03-10 14:17:16,704 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
+2017-03-10 14:17:16,705 WARN org.apache.hadoop.mapred.LocalJobRunner: job_local1420822781_0001
+java.lang.Exception: java.lang.ArrayIndexOutOfBoundsException: 1
+	at org.apache.hadoop.mapred.LocalJobRunner$Job.runTasks(LocalJobRunner.java:462)
+	at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:522)
+Caused by: java.lang.ArrayIndexOutOfBoundsException: 1
+	at similarity.WordSort$Map.loadWordFreq(WordSort.java:87)
+	at similarity.WordSort$Map.setup(WordSort.java:118)
+	at org.apache.hadoop.mapreduce.Mapper.run(Mapper.java:142)
+	at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:787)
+	at org.apache.hadoop.mapred.MapTask.run(MapTask.java:341)
+	at org.apache.hadoop.mapred.LocalJobRunner$Job$MapTaskRunnable.run(LocalJobRunner.java:243)
+	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471)
+	at java.util.concurrent.FutureTask.run(FutureTask.java:262)
+	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
+	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
+	at java.lang.Thread.run(Thread.java:745)
+2017-03-10 14:17:17,328 INFO org.apache.hadoop.mapreduce.Job: Job job_local1420822781_0001 running in uber mode : false
+2017-03-10 14:17:17,329 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
+2017-03-10 14:17:17,331 INFO org.apache.hadoop.mapreduce.Job: Job job_local1420822781_0001 failed with state FAILED due to: NA
+2017-03-10 14:17:17,336 INFO org.apache.hadoop.mapreduce.Job: Counters: 0
+2017-03-10 14:26:12,465 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-10 14:26:12,973 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-10 14:26:12,986 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-10 14:26:13,468 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
+2017-03-10 14:26:13,490 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
+2017-03-10 14:26:13,599 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
+2017-03-10 14:26:13,979 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local1562971559_0001
+2017-03-10 14:26:14,383 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
+2017-03-10 14:26:14,385 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local1562971559_0001
+2017-03-10 14:26:14,384 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
+2017-03-10 14:26:14,391 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-10 14:26:14,399 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
+2017-03-10 14:26:14,512 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
+2017-03-10 14:26:14,513 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1562971559_0001_m_000000_0
+2017-03-10 14:26:14,576 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-10 14:26:14,613 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-10 14:26:14,617 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/input/pg100.txt:0+5589886
+2017-03-10 14:26:14,762 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-10 14:26:14,762 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-10 14:26:14,763 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-10 14:26:14,763 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-10 14:26:14,763 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-10 14:26:14,766 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-10 14:26:15,398 INFO org.apache.hadoop.mapreduce.Job: Job job_local1562971559_0001 running in uber mode : false
+2017-03-10 14:26:15,398 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
+2017-03-10 14:26:15,473 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-10 14:26:15,481 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
+2017-03-10 14:26:15,488 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
+2017-03-10 14:26:15,490 WARN org.apache.hadoop.mapred.LocalJobRunner: job_local1562971559_0001
+java.lang.Exception: java.lang.NullPointerException
+	at org.apache.hadoop.mapred.LocalJobRunner$Job.runTasks(LocalJobRunner.java:462)
+	at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:522)
+Caused by: java.lang.NullPointerException
+	at similarity.WordSort$Map$1.compare(WordSort.java:135)
+	at similarity.WordSort$Map$1.compare(WordSort.java:1)
+	at java.util.TimSort.countRunAndMakeAscending(TimSort.java:324)
+	at java.util.TimSort.sort(TimSort.java:189)
+	at java.util.TimSort.sort(TimSort.java:173)
+	at java.util.Arrays.sort(Arrays.java:659)
+	at java.util.Collections.sort(Collections.java:217)
+	at similarity.WordSort$Map.map(WordSort.java:131)
+	at similarity.WordSort$Map.map(WordSort.java:1)
+	at org.apache.hadoop.mapreduce.Mapper.run(Mapper.java:145)
+	at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:787)
+	at org.apache.hadoop.mapred.MapTask.run(MapTask.java:341)
+	at org.apache.hadoop.mapred.LocalJobRunner$Job$MapTaskRunnable.run(LocalJobRunner.java:243)
+	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471)
+	at java.util.concurrent.FutureTask.run(FutureTask.java:262)
+	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
+	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
+	at java.lang.Thread.run(Thread.java:745)
+2017-03-10 14:26:16,400 INFO org.apache.hadoop.mapreduce.Job: Job job_local1562971559_0001 failed with state FAILED due to: NA
+2017-03-10 14:26:16,402 INFO org.apache.hadoop.mapreduce.Job: Counters: 0
+2017-03-10 14:29:37,807 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-10 14:29:38,356 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-10 14:29:38,361 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-10 14:29:38,865 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
+2017-03-10 14:29:38,874 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
+2017-03-10 14:29:38,909 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
+2017-03-10 14:29:39,202 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local705841975_0001
+2017-03-10 14:29:39,678 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
+2017-03-10 14:29:39,680 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local705841975_0001
+2017-03-10 14:29:39,685 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
+2017-03-10 14:29:39,703 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-10 14:29:39,705 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
+2017-03-10 14:29:39,828 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
+2017-03-10 14:29:39,829 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local705841975_0001_m_000000_0
+2017-03-10 14:29:39,879 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-10 14:29:39,891 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-10 14:29:39,896 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/input/pg100.txt:0+5589886
+2017-03-10 14:29:39,983 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-10 14:29:39,984 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-10 14:29:39,984 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-10 14:29:39,984 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-10 14:29:39,984 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-10 14:29:39,988 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-10 14:29:40,687 INFO org.apache.hadoop.mapreduce.Job: Job job_local705841975_0001 running in uber mode : false
+2017-03-10 14:29:40,690 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
+2017-03-10 14:29:40,701 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-10 14:29:40,708 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
+2017-03-10 14:29:40,717 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
+2017-03-10 14:29:40,718 WARN org.apache.hadoop.mapred.LocalJobRunner: job_local705841975_0001
+java.lang.Exception: java.lang.NullPointerException
+	at org.apache.hadoop.mapred.LocalJobRunner$Job.runTasks(LocalJobRunner.java:462)
+	at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:522)
+Caused by: java.lang.NullPointerException
+	at similarity.WordSort$Map$1.compare(WordSort.java:138)
+	at similarity.WordSort$Map$1.compare(WordSort.java:1)
+	at java.util.TimSort.countRunAndMakeAscending(TimSort.java:324)
+	at java.util.TimSort.sort(TimSort.java:189)
+	at java.util.TimSort.sort(TimSort.java:173)
+	at java.util.Arrays.sort(Arrays.java:659)
+	at java.util.Collections.sort(Collections.java:217)
+	at similarity.WordSort$Map.map(WordSort.java:134)
+	at similarity.WordSort$Map.map(WordSort.java:1)
+	at org.apache.hadoop.mapreduce.Mapper.run(Mapper.java:145)
+	at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:787)
+	at org.apache.hadoop.mapred.MapTask.run(MapTask.java:341)
+	at org.apache.hadoop.mapred.LocalJobRunner$Job$MapTaskRunnable.run(LocalJobRunner.java:243)
+	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471)
+	at java.util.concurrent.FutureTask.run(FutureTask.java:262)
+	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
+	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
+	at java.lang.Thread.run(Thread.java:745)
+2017-03-10 14:29:41,693 INFO org.apache.hadoop.mapreduce.Job: Job job_local705841975_0001 failed with state FAILED due to: NA
+2017-03-10 14:29:41,695 INFO org.apache.hadoop.mapreduce.Job: Counters: 0
+2017-03-10 14:34:26,674 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-10 14:34:27,174 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-10 14:34:27,179 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-10 14:34:27,223 WARN org.apache.hadoop.security.UserGroupInformation: PriviledgedActionException as:cloudera (auth:SIMPLE) cause:org.apache.hadoop.mapred.FileAlreadyExistsException: Output directory file:/home/cloudera/workspace/WordCount already exists
+2017-03-10 14:34:55,125 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-10 14:34:55,598 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-10 14:34:55,612 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-10 14:34:56,114 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
+2017-03-10 14:34:56,121 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
+2017-03-10 14:34:56,259 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
+2017-03-10 14:34:56,554 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local1013376941_0001
+2017-03-10 14:34:56,980 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
+2017-03-10 14:34:56,982 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
+2017-03-10 14:34:56,983 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local1013376941_0001
+2017-03-10 14:34:56,990 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-10 14:34:56,995 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
+2017-03-10 14:34:57,121 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
+2017-03-10 14:34:57,124 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1013376941_0001_m_000000_0
+2017-03-10 14:34:57,181 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-10 14:34:57,198 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-10 14:34:57,202 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/input/pg100.txt:0+5589886
+2017-03-10 14:34:57,295 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-10 14:34:57,298 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-10 14:34:57,298 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-10 14:34:57,298 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-10 14:34:57,298 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-10 14:34:57,300 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-10 14:34:57,967 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-10 14:34:57,974 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
+2017-03-10 14:34:57,988 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
+2017-03-10 14:34:57,990 INFO org.apache.hadoop.mapreduce.Job: Job job_local1013376941_0001 running in uber mode : false
+2017-03-10 14:34:57,991 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
+2017-03-10 14:34:57,993 WARN org.apache.hadoop.mapred.LocalJobRunner: job_local1013376941_0001
+java.lang.Exception: java.lang.NullPointerException
+	at org.apache.hadoop.mapred.LocalJobRunner$Job.runTasks(LocalJobRunner.java:462)
+	at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:522)
+Caused by: java.lang.NullPointerException
+	at similarity.WordSort$Map$1.compare(WordSort.java:146)
+	at similarity.WordSort$Map$1.compare(WordSort.java:1)
+	at java.util.TimSort.countRunAndMakeAscending(TimSort.java:324)
+	at java.util.TimSort.sort(TimSort.java:189)
+	at java.util.TimSort.sort(TimSort.java:173)
+	at java.util.Arrays.sort(Arrays.java:659)
+	at java.util.Collections.sort(Collections.java:217)
+	at similarity.WordSort$Map.map(WordSort.java:142)
+	at similarity.WordSort$Map.map(WordSort.java:1)
+	at org.apache.hadoop.mapreduce.Mapper.run(Mapper.java:145)
+	at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:787)
+	at org.apache.hadoop.mapred.MapTask.run(MapTask.java:341)
+	at org.apache.hadoop.mapred.LocalJobRunner$Job$MapTaskRunnable.run(LocalJobRunner.java:243)
+	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471)
+	at java.util.concurrent.FutureTask.run(FutureTask.java:262)
+	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
+	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
+	at java.lang.Thread.run(Thread.java:745)
+2017-03-10 14:34:57,995 INFO org.apache.hadoop.mapreduce.Job: Job job_local1013376941_0001 failed with state FAILED due to: NA
+2017-03-10 14:34:57,998 INFO org.apache.hadoop.mapreduce.Job: Counters: 0
+2017-03-10 14:38:51,972 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-10 14:38:52,515 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-10 14:38:52,527 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-10 14:38:53,098 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
+2017-03-10 14:38:53,128 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
+2017-03-10 14:38:53,247 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
+2017-03-10 14:38:53,578 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local1410438889_0001
+2017-03-10 14:38:54,016 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
+2017-03-10 14:38:54,017 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local1410438889_0001
+2017-03-10 14:38:54,024 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
+2017-03-10 14:38:54,064 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-10 14:38:54,065 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
+2017-03-10 14:38:54,219 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
+2017-03-10 14:38:54,220 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1410438889_0001_m_000000_0
+2017-03-10 14:38:54,279 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-10 14:38:54,293 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-10 14:38:54,296 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/input/pg100.txt:0+5589886
+2017-03-10 14:38:54,396 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-10 14:38:54,396 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-10 14:38:54,396 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-10 14:38:54,397 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-10 14:38:54,397 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-10 14:38:54,400 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-10 14:38:55,024 INFO org.apache.hadoop.mapreduce.Job: Job job_local1410438889_0001 running in uber mode : false
+2017-03-10 14:38:55,027 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
+2017-03-10 14:38:55,119 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-10 14:38:55,119 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-10 14:38:55,119 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 325; bufvoid = 104857600
+2017-03-10 14:38:55,119 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 26214376(104857504); length = 21/6553600
+2017-03-10 14:38:55,128 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
+2017-03-10 14:38:55,137 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
+2017-03-10 14:38:55,141 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
+2017-03-10 14:38:55,141 WARN org.apache.hadoop.mapred.LocalJobRunner: job_local1410438889_0001
+java.lang.Exception: java.lang.NullPointerException
+	at org.apache.hadoop.mapred.LocalJobRunner$Job.runTasks(LocalJobRunner.java:462)
+	at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:522)
+Caused by: java.lang.NullPointerException
+	at java.lang.Integer.compareTo(Integer.java:1003)
+	at similarity.WordSort$Map$1.compare(WordSort.java:144)
+	at similarity.WordSort$Map$1.compare(WordSort.java:1)
+	at java.util.TimSort.countRunAndMakeAscending(TimSort.java:324)
+	at java.util.TimSort.sort(TimSort.java:189)
+	at java.util.TimSort.sort(TimSort.java:173)
+	at java.util.Arrays.sort(Arrays.java:659)
+	at java.util.Collections.sort(Collections.java:217)
+	at similarity.WordSort$Map.map(WordSort.java:140)
+	at similarity.WordSort$Map.map(WordSort.java:1)
+	at org.apache.hadoop.mapreduce.Mapper.run(Mapper.java:145)
+	at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:787)
+	at org.apache.hadoop.mapred.MapTask.run(MapTask.java:341)
+	at org.apache.hadoop.mapred.LocalJobRunner$Job$MapTaskRunnable.run(LocalJobRunner.java:243)
+	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471)
+	at java.util.concurrent.FutureTask.run(FutureTask.java:262)
+	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
+	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
+	at java.lang.Thread.run(Thread.java:745)
+2017-03-10 14:38:56,030 INFO org.apache.hadoop.mapreduce.Job: Job job_local1410438889_0001 failed with state FAILED due to: NA
+2017-03-10 14:38:56,035 INFO org.apache.hadoop.mapreduce.Job: Counters: 0
+2017-03-10 14:40:16,992 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-10 14:40:17,522 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-10 14:40:17,536 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-10 14:40:18,047 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
+2017-03-10 14:40:18,055 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
+2017-03-10 14:40:18,089 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
+2017-03-10 14:40:18,383 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local483843492_0001
+2017-03-10 14:40:18,855 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
+2017-03-10 14:40:18,856 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local483843492_0001
+2017-03-10 14:40:18,860 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
+2017-03-10 14:40:18,887 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-10 14:40:18,889 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
+2017-03-10 14:40:19,021 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
+2017-03-10 14:40:19,023 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local483843492_0001_m_000000_0
+2017-03-10 14:40:19,078 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-10 14:40:19,106 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-10 14:40:19,109 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/input/pg100.txt:0+5589886
+2017-03-10 14:40:19,243 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-10 14:40:19,243 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-10 14:40:19,243 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-10 14:40:19,244 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-10 14:40:19,244 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-10 14:40:19,249 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-10 14:40:19,852 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-10 14:40:19,852 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-10 14:40:19,852 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 325; bufvoid = 104857600
+2017-03-10 14:40:19,852 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 26214376(104857504); length = 21/6553600
+2017-03-10 14:40:19,858 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
+2017-03-10 14:40:19,860 INFO org.apache.hadoop.mapreduce.Job: Job job_local483843492_0001 running in uber mode : false
+2017-03-10 14:40:19,861 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
+2017-03-10 14:40:19,866 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
+2017-03-10 14:40:19,869 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
+2017-03-10 14:40:19,870 WARN org.apache.hadoop.mapred.LocalJobRunner: job_local483843492_0001
+java.lang.Exception: java.lang.NullPointerException
+	at org.apache.hadoop.mapred.LocalJobRunner$Job.runTasks(LocalJobRunner.java:462)
+	at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:522)
+Caused by: java.lang.NullPointerException
+	at java.lang.Integer.compareTo(Integer.java:1003)
+	at similarity.WordSort$Map$1.compare(WordSort.java:144)
+	at similarity.WordSort$Map$1.compare(WordSort.java:1)
+	at java.util.TimSort.countRunAndMakeAscending(TimSort.java:324)
+	at java.util.TimSort.sort(TimSort.java:189)
+	at java.util.TimSort.sort(TimSort.java:173)
+	at java.util.Arrays.sort(Arrays.java:659)
+	at java.util.Collections.sort(Collections.java:217)
+	at similarity.WordSort$Map.map(WordSort.java:140)
+	at similarity.WordSort$Map.map(WordSort.java:1)
+	at org.apache.hadoop.mapreduce.Mapper.run(Mapper.java:145)
+	at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:787)
+	at org.apache.hadoop.mapred.MapTask.run(MapTask.java:341)
+	at org.apache.hadoop.mapred.LocalJobRunner$Job$MapTaskRunnable.run(LocalJobRunner.java:243)
+	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471)
+	at java.util.concurrent.FutureTask.run(FutureTask.java:262)
+	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
+	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
+	at java.lang.Thread.run(Thread.java:745)
+2017-03-10 14:40:20,864 INFO org.apache.hadoop.mapreduce.Job: Job job_local483843492_0001 failed with state FAILED due to: NA
+2017-03-10 14:40:20,871 INFO org.apache.hadoop.mapreduce.Job: Counters: 0
+2017-03-10 14:41:43,006 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-10 14:41:43,480 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-10 14:41:43,485 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-10 14:41:44,008 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
+2017-03-10 14:41:44,042 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
+2017-03-10 14:41:44,079 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
+2017-03-10 14:41:44,328 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local1726509137_0001
+2017-03-10 14:41:44,799 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
+2017-03-10 14:41:44,800 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local1726509137_0001
+2017-03-10 14:41:44,806 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
+2017-03-10 14:41:44,835 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-10 14:41:44,838 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
+2017-03-10 14:41:44,977 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
+2017-03-10 14:41:44,978 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1726509137_0001_m_000000_0
+2017-03-10 14:41:45,030 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-10 14:41:45,051 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-10 14:41:45,058 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/input/pg100.txt:0+5589886
+2017-03-10 14:41:45,138 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-10 14:41:45,142 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-10 14:41:45,143 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-10 14:41:45,143 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-10 14:41:45,143 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-10 14:41:45,149 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-10 14:41:45,805 INFO org.apache.hadoop.mapreduce.Job: Job job_local1726509137_0001 running in uber mode : false
+2017-03-10 14:41:45,806 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
+2017-03-10 14:41:48,519 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-10 14:41:48,519 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-10 14:41:48,519 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-10 14:41:48,519 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 4776007; bufvoid = 104857600
+2017-03-10 14:41:48,519 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 25753980(103015920); length = 460417/6553600
+2017-03-10 14:41:48,780 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
+2017-03-10 14:41:49,395 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
+2017-03-10 14:41:49,398 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1726509137_0001_m_000000_0 is done. And is in the process of committing
+2017-03-10 14:41:49,404 INFO org.apache.hadoop.mapred.LocalJobRunner: map
+2017-03-10 14:41:49,408 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1726509137_0001_m_000000_0' done.
+2017-03-10 14:41:49,409 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1726509137_0001_m_000000_0
+2017-03-10 14:41:49,409 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
+2017-03-10 14:41:49,411 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks
+2017-03-10 14:41:49,411 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1726509137_0001_r_000000_0
+2017-03-10 14:41:49,416 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-10 14:41:49,417 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-10 14:41:49,419 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@70d586bc
+2017-03-10 14:41:49,429 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-10 14:41:49,435 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1726509137_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-10 14:41:49,461 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.deflate]
+2017-03-10 14:41:49,461 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1726509137_0001_m_000000_0 decomp: 5006219 len: 2376884 to MEMORY
+2017-03-10 14:41:49,517 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 5006219 bytes from map-output for attempt_local1726509137_0001_m_000000_0
+2017-03-10 14:41:49,521 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 5006219, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->5006219
+2017-03-10 14:41:49,523 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-10 14:41:49,524 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-10 14:41:49,524 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-10 14:41:49,529 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-10 14:41:49,529 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 5006209 bytes
+2017-03-10 14:41:49,818 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 0%
+2017-03-10 14:41:50,226 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 5006219 bytes to disk to satisfy reduce memory limit
+2017-03-10 14:41:50,227 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 2376892 bytes from disk
+2017-03-10 14:41:50,227 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-10 14:41:50,228 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-10 14:41:50,228 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 5006209 bytes
+2017-03-10 14:41:50,230 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-10 14:41:50,235 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.skip.on is deprecated. Instead, use mapreduce.job.skiprecords
+2017-03-10 14:41:50,888 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1726509137_0001_r_000000_0 is done. And is in the process of committing
+2017-03-10 14:41:50,898 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-10 14:41:50,899 INFO org.apache.hadoop.mapred.Task: Task attempt_local1726509137_0001_r_000000_0 is allowed to commit now
+2017-03-10 14:41:50,900 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1726509137_0001_r_000000_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local1726509137_0001_r_000000
+2017-03-10 14:41:50,901 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-10 14:41:50,901 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1726509137_0001_r_000000_0' done.
+2017-03-10 14:41:50,901 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1726509137_0001_r_000000_0
+2017-03-10 14:41:50,901 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete.
+2017-03-10 14:41:51,822 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 100%
+2017-03-10 14:41:51,822 INFO org.apache.hadoop.mapreduce.Job: Job job_local1726509137_0001 completed successfully
+2017-03-10 14:41:51,834 INFO org.apache.hadoop.mapreduce.Job: Counters: 33
+	File System Counters
+		FILE: Number of bytes read=16657772
+		FILE: Number of bytes written=12451545
+		FILE: Number of read operations=0
+		FILE: Number of large read operations=0
+		FILE: Number of write operations=0
+	Map-Reduce Framework
+		Map input records=124787
+		Map output records=115105
+		Map output bytes=4776007
+		Map output materialized bytes=2376884
+		Input split bytes=120
+		Combine input records=0
+		Combine output records=0
+		Reduce input groups=115105
+		Reduce shuffle bytes=2376884
+		Reduce input records=115105
+		Reduce output records=115105
+		Spilled Records=230210
+		Shuffled Maps =1
+		Failed Shuffles=0
+		Merged Map outputs=1
+		GC time elapsed (ms)=140
+		CPU time spent (ms)=0
+		Physical memory (bytes) snapshot=0
+		Virtual memory (bytes) snapshot=0
+		Total committed heap usage (bytes)=331227136
+	Shuffle Errors
+		BAD_ID=0
+		CONNECTION=0
+		IO_ERROR=0
+		WRONG_LENGTH=0
+		WRONG_MAP=0
+		WRONG_REDUCE=0
+	File Input Format Counters 
+		Bytes Read=5589886
+	File Output Format Counters 
+		Bytes Written=4789771
+2017-03-10 14:54:30,023 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-10 14:54:30,544 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-10 14:54:30,565 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-10 14:54:31,208 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
+2017-03-10 14:54:31,222 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
+2017-03-10 14:54:31,311 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
+2017-03-10 14:54:31,680 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local520002813_0001
+2017-03-10 14:54:32,173 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
+2017-03-10 14:54:32,174 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local520002813_0001
+2017-03-10 14:54:32,184 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
+2017-03-10 14:54:32,195 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-10 14:54:32,197 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
+2017-03-10 14:54:32,352 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
+2017-03-10 14:54:32,354 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local520002813_0001_m_000000_0
+2017-03-10 14:54:32,427 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-10 14:54:32,457 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-10 14:54:32,463 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/input/pg100.txt:0+5589886
+2017-03-10 14:54:32,548 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-10 14:54:32,549 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-10 14:54:32,550 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-10 14:54:32,550 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-10 14:54:32,550 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-10 14:54:32,559 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-10 14:54:33,178 INFO org.apache.hadoop.mapreduce.Job: Job job_local520002813_0001 running in uber mode : false
+2017-03-10 14:54:33,179 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
+2017-03-10 14:54:35,891 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-10 14:54:35,891 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-10 14:54:35,891 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-10 14:54:35,891 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 5541084; bufvoid = 104857600
+2017-03-10 14:54:35,891 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 25753980(103015920); length = 460417/6553600
+2017-03-10 14:54:36,206 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
+2017-03-10 14:54:37,261 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
+2017-03-10 14:54:37,264 INFO org.apache.hadoop.mapred.Task: Task:attempt_local520002813_0001_m_000000_0 is done. And is in the process of committing
+2017-03-10 14:54:37,275 INFO org.apache.hadoop.mapred.LocalJobRunner: map
+2017-03-10 14:54:37,280 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local520002813_0001_m_000000_0' done.
+2017-03-10 14:54:37,280 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local520002813_0001_m_000000_0
+2017-03-10 14:54:37,280 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
+2017-03-10 14:54:37,284 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks
+2017-03-10 14:54:37,284 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local520002813_0001_r_000000_0
+2017-03-10 14:54:37,289 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-10 14:54:37,289 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-10 14:54:37,293 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@22c4d1da
+2017-03-10 14:54:37,303 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-10 14:54:37,312 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local520002813_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-10 14:54:37,342 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.deflate]
+2017-03-10 14:54:37,342 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local520002813_0001_m_000000_0 decomp: 5771296 len: 2500118 to MEMORY
+2017-03-10 14:54:37,405 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 5771296 bytes from map-output for attempt_local520002813_0001_m_000000_0
+2017-03-10 14:54:37,408 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 5771296, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->5771296
+2017-03-10 14:54:37,410 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-10 14:54:37,410 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-10 14:54:37,411 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-10 14:54:37,420 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-10 14:54:37,420 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 5771286 bytes
+2017-03-10 14:54:38,188 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 0%
+2017-03-10 14:54:38,209 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 5771296 bytes to disk to satisfy reduce memory limit
+2017-03-10 14:54:38,209 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 2500126 bytes from disk
+2017-03-10 14:54:38,210 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-10 14:54:38,210 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-10 14:54:38,210 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 5771286 bytes
+2017-03-10 14:54:38,211 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-10 14:54:38,219 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.skip.on is deprecated. Instead, use mapreduce.job.skiprecords
+2017-03-10 14:54:39,174 INFO org.apache.hadoop.mapred.Task: Task:attempt_local520002813_0001_r_000000_0 is done. And is in the process of committing
+2017-03-10 14:54:39,181 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-10 14:54:39,181 INFO org.apache.hadoop.mapred.Task: Task attempt_local520002813_0001_r_000000_0 is allowed to commit now
+2017-03-10 14:54:39,187 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local520002813_0001_r_000000_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local520002813_0001_r_000000
+2017-03-10 14:54:39,188 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-10 14:54:39,188 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local520002813_0001_r_000000_0' done.
+2017-03-10 14:54:39,188 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local520002813_0001_r_000000_0
+2017-03-10 14:54:39,188 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete.
+2017-03-10 14:54:39,190 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 100%
+2017-03-10 14:54:40,191 INFO org.apache.hadoop.mapreduce.Job: Job job_local520002813_0001 completed successfully
+2017-03-10 14:54:40,203 INFO org.apache.hadoop.mapreduce.Job: Counters: 34
+	File System Counters
+		FILE: Number of bytes read=16904240
+		FILE: Number of bytes written=13589484
+		FILE: Number of read operations=0
+		FILE: Number of large read operations=0
+		FILE: Number of write operations=0
+	Map-Reduce Framework
+		Map input records=124787
+		Map output records=115105
+		Map output bytes=5541084
+		Map output materialized bytes=2500118
+		Input split bytes=120
+		Combine input records=0
+		Combine output records=0
+		Reduce input groups=115105
+		Reduce shuffle bytes=2500118
+		Reduce input records=115105
+		Reduce output records=115105
+		Spilled Records=230210
+		Shuffled Maps =1
+		Failed Shuffles=0
+		Merged Map outputs=1
+		GC time elapsed (ms)=124
+		CPU time spent (ms)=0
+		Physical memory (bytes) snapshot=0
+		Virtual memory (bytes) snapshot=0
+		Total committed heap usage (bytes)=331227136
+	Shuffle Errors
+		BAD_ID=0
+		CONNECTION=0
+		IO_ERROR=0
+		WRONG_LENGTH=0
+		WRONG_MAP=0
+		WRONG_REDUCE=0
+	File Input Format Counters 
+		Bytes Read=5589886
+	File Output Format Counters 
+		Bytes Written=5560824
+	similarity.WordSort$DocLineCounter
+		NUM=124787
+2017-03-10 14:58:53,626 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-10 14:58:54,092 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-10 14:58:54,097 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-10 14:58:54,564 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
+2017-03-10 14:58:54,572 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
+2017-03-10 14:58:54,619 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
+2017-03-10 14:58:54,850 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local544686432_0001
+2017-03-10 14:58:55,274 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
+2017-03-10 14:58:55,275 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local544686432_0001
+2017-03-10 14:58:55,284 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
+2017-03-10 14:58:55,298 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-10 14:58:55,301 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
+2017-03-10 14:58:55,409 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
+2017-03-10 14:58:55,411 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local544686432_0001_m_000000_0
+2017-03-10 14:58:55,473 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-10 14:58:55,485 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-10 14:58:55,487 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/input/pg100.txt:0+5589886
+2017-03-10 14:58:55,566 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-10 14:58:55,567 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-10 14:58:55,567 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-10 14:58:55,567 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-10 14:58:55,567 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-10 14:58:55,569 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-10 14:58:56,279 INFO org.apache.hadoop.mapreduce.Job: Job job_local544686432_0001 running in uber mode : false
+2017-03-10 14:58:56,280 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
+2017-03-10 14:58:58,471 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-10 14:58:58,472 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-10 14:58:58,472 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-10 14:58:58,472 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 5541084; bufvoid = 104857600
+2017-03-10 14:58:58,472 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 25753980(103015920); length = 460417/6553600
+2017-03-10 14:58:58,671 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
+2017-03-10 14:58:59,336 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
+2017-03-10 14:58:59,339 INFO org.apache.hadoop.mapred.Task: Task:attempt_local544686432_0001_m_000000_0 is done. And is in the process of committing
+2017-03-10 14:58:59,344 INFO org.apache.hadoop.mapred.LocalJobRunner: map
+2017-03-10 14:58:59,344 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local544686432_0001_m_000000_0' done.
+2017-03-10 14:58:59,344 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local544686432_0001_m_000000_0
+2017-03-10 14:58:59,345 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
+2017-03-10 14:58:59,347 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks
+2017-03-10 14:58:59,348 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local544686432_0001_r_000000_0
+2017-03-10 14:58:59,353 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-10 14:58:59,353 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-10 14:58:59,355 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@8a93430
+2017-03-10 14:58:59,364 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-10 14:58:59,370 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local544686432_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-10 14:58:59,395 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.deflate]
+2017-03-10 14:58:59,395 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local544686432_0001_m_000000_0 decomp: 5771296 len: 2405517 to MEMORY
+2017-03-10 14:58:59,447 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 5771296 bytes from map-output for attempt_local544686432_0001_m_000000_0
+2017-03-10 14:58:59,447 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 5771296, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->5771296
+2017-03-10 14:58:59,448 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-10 14:58:59,450 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-10 14:58:59,450 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-10 14:58:59,454 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-10 14:58:59,455 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 5771286 bytes
+2017-03-10 14:59:00,203 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 5771296 bytes to disk to satisfy reduce memory limit
+2017-03-10 14:59:00,203 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 2405525 bytes from disk
+2017-03-10 14:59:00,204 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-10 14:59:00,204 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-10 14:59:00,205 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 5771286 bytes
+2017-03-10 14:59:00,205 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-10 14:59:00,210 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.skip.on is deprecated. Instead, use mapreduce.job.skiprecords
+2017-03-10 14:59:00,290 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 0%
+2017-03-10 14:59:00,890 INFO org.apache.hadoop.mapred.Task: Task:attempt_local544686432_0001_r_000000_0 is done. And is in the process of committing
+2017-03-10 14:59:00,895 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-10 14:59:00,895 INFO org.apache.hadoop.mapred.Task: Task attempt_local544686432_0001_r_000000_0 is allowed to commit now
+2017-03-10 14:59:00,896 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local544686432_0001_r_000000_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local544686432_0001_r_000000
+2017-03-10 14:59:00,897 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-10 14:59:00,897 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local544686432_0001_r_000000_0' done.
+2017-03-10 14:59:00,901 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local544686432_0001_r_000000_0
+2017-03-10 14:59:00,901 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete.
+2017-03-10 14:59:01,290 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 100%
+2017-03-10 14:59:01,291 INFO org.apache.hadoop.mapreduce.Job: Job job_local544686432_0001 completed successfully
+2017-03-10 14:59:01,312 INFO org.apache.hadoop.mapreduce.Job: Counters: 34
+	File System Counters
+		FILE: Number of bytes read=16715038
+		FILE: Number of bytes written=13110192
+		FILE: Number of read operations=0
+		FILE: Number of large read operations=0
+		FILE: Number of write operations=0
+	Map-Reduce Framework
+		Map input records=124787
+		Map output records=115105
+		Map output bytes=5541084
+		Map output materialized bytes=2405517
+		Input split bytes=120
+		Combine input records=0
+		Combine output records=0
+		Reduce input groups=115105
+		Reduce shuffle bytes=2405517
+		Reduce input records=115105
+		Reduce output records=115105
+		Spilled Records=230210
+		Shuffled Maps =1
+		Failed Shuffles=0
+		Merged Map outputs=1
+		GC time elapsed (ms)=113
+		CPU time spent (ms)=0
+		Physical memory (bytes) snapshot=0
+		Virtual memory (bytes) snapshot=0
+		Total committed heap usage (bytes)=331227136
+	Shuffle Errors
+		BAD_ID=0
+		CONNECTION=0
+		IO_ERROR=0
+		WRONG_LENGTH=0
+		WRONG_MAP=0
+		WRONG_REDUCE=0
+	File Input Format Counters 
+		Bytes Read=5589886
+	File Output Format Counters 
+		Bytes Written=5365335
+	similarity.WordSort$DocLineCounter
+		NUM=124787
+2017-03-10 16:04:16,924 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-10 16:04:17,595 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-10 16:04:17,605 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-10 16:04:17,661 WARN org.apache.hadoop.security.UserGroupInformation: PriviledgedActionException as:cloudera (auth:SIMPLE) cause:org.apache.hadoop.mapred.FileAlreadyExistsException: Output directory file:/home/cloudera/workspace/WordCount/output already exists
+2017-03-10 16:04:38,662 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-03-10 16:04:39,236 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-03-10 16:04:39,243 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-03-10 16:04:39,836 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
+2017-03-10 16:04:39,846 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1
+2017-03-10 16:04:39,896 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1
+2017-03-10 16:04:40,232 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local2107526736_0001
+2017-03-10 16:04:40,744 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
+2017-03-10 16:04:40,745 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local2107526736_0001
+2017-03-10 16:04:40,753 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
+2017-03-10 16:04:40,762 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-10 16:04:40,772 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
+2017-03-10 16:04:40,922 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
+2017-03-10 16:04:40,925 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2107526736_0001_m_000000_0
+2017-03-10 16:04:40,998 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-10 16:04:41,021 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-10 16:04:41,024 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/WordCount/input/pg100.txt:0+5589886
+2017-03-10 16:04:41,117 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-03-10 16:04:41,117 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-03-10 16:04:41,117 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-03-10 16:04:41,117 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-03-10 16:04:41,117 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-03-10 16:04:41,121 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-03-10 16:04:41,747 INFO org.apache.hadoop.mapreduce.Job: Job job_local2107526736_0001 running in uber mode : false
+2017-03-10 16:04:41,749 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
+2017-03-10 16:04:44,514 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-03-10 16:04:44,515 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-03-10 16:04:44,515 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-03-10 16:04:44,515 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 5541084; bufvoid = 104857600
+2017-03-10 16:04:44,515 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 25753980(103015920); length = 460417/6553600
+2017-03-10 16:04:44,850 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.deflate]
+2017-03-10 16:04:45,645 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
+2017-03-10 16:04:45,648 INFO org.apache.hadoop.mapred.Task: Task:attempt_local2107526736_0001_m_000000_0 is done. And is in the process of committing
+2017-03-10 16:04:45,653 INFO org.apache.hadoop.mapred.LocalJobRunner: map
+2017-03-10 16:04:45,654 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local2107526736_0001_m_000000_0' done.
+2017-03-10 16:04:45,654 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local2107526736_0001_m_000000_0
+2017-03-10 16:04:45,654 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
+2017-03-10 16:04:45,658 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks
+2017-03-10 16:04:45,659 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2107526736_0001_r_000000_0
+2017-03-10 16:04:45,664 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-03-10 16:04:45,664 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-03-10 16:04:45,666 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@a9b03af
+2017-03-10 16:04:45,680 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-03-10 16:04:45,687 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local2107526736_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-03-10 16:04:45,730 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.deflate]
+2017-03-10 16:04:45,730 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local2107526736_0001_m_000000_0 decomp: 5771296 len: 2404026 to MEMORY
+2017-03-10 16:04:45,758 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 0%
+2017-03-10 16:04:45,813 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 5771296 bytes from map-output for attempt_local2107526736_0001_m_000000_0
+2017-03-10 16:04:45,813 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 5771296, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->5771296
+2017-03-10 16:04:45,817 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-03-10 16:04:45,818 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-10 16:04:45,818 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
+2017-03-10 16:04:45,825 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-10 16:04:45,826 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 5771286 bytes
+2017-03-10 16:04:46,671 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 5771296 bytes to disk to satisfy reduce memory limit
+2017-03-10 16:04:46,672 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 2404034 bytes from disk
+2017-03-10 16:04:46,672 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-03-10 16:04:46,672 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-03-10 16:04:46,673 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 5771286 bytes
+2017-03-10 16:04:46,674 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-10 16:04:46,680 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.skip.on is deprecated. Instead, use mapreduce.job.skiprecords
+2017-03-10 16:04:47,525 INFO org.apache.hadoop.mapred.Task: Task:attempt_local2107526736_0001_r_000000_0 is done. And is in the process of committing
+2017-03-10 16:04:47,532 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied.
+2017-03-10 16:04:47,532 INFO org.apache.hadoop.mapred.Task: Task attempt_local2107526736_0001_r_000000_0 is allowed to commit now
+2017-03-10 16:04:47,533 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local2107526736_0001_r_000000_0' to file:/home/cloudera/workspace/WordCount/output/_temporary/0/task_local2107526736_0001_r_000000
+2017-03-10 16:04:47,533 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-03-10 16:04:47,534 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local2107526736_0001_r_000000_0' done.
+2017-03-10 16:04:47,534 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local2107526736_0001_r_000000_0
+2017-03-10 16:04:47,534 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete.
+2017-03-10 16:04:47,766 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 100%
+2017-03-10 16:04:47,767 INFO org.apache.hadoop.mapreduce.Job: Job job_local2107526736_0001 completed successfully
+2017-03-10 16:04:47,785 INFO org.apache.hadoop.mapreduce.Job: Counters: 34
+	File System Counters
+		FILE: Number of bytes read=16712056
+		FILE: Number of bytes written=13108535
+		FILE: Number of read operations=0
+		FILE: Number of large read operations=0
+		FILE: Number of write operations=0
+	Map-Reduce Framework
+		Map input records=124787
+		Map output records=115105
+		Map output bytes=5541084
+		Map output materialized bytes=2404026
+		Input split bytes=120
+		Combine input records=0
+		Combine output records=0
+		Reduce input groups=115105
+		Reduce shuffle bytes=2404026
+		Reduce input records=115105
+		Reduce output records=115105
+		Spilled Records=230210
+		Shuffled Maps =1
+		Failed Shuffles=0
+		Merged Map outputs=1
+		GC time elapsed (ms)=142
+		CPU time spent (ms)=0
+		Physical memory (bytes) snapshot=0
+		Virtual memory (bytes) snapshot=0
+		Total committed heap usage (bytes)=331227136
+	Shuffle Errors
+		BAD_ID=0
+		CONNECTION=0
+		IO_ERROR=0
+		WRONG_LENGTH=0
+		WRONG_MAP=0
+		WRONG_REDUCE=0
+	File Input Format Counters 
+		Bytes Read=5589886
+	File Output Format Counters 
+		Bytes Written=5365335
+	similarity.WordSort$DocLineCounter
+		NUM=124787
diff --git a/linenumber_sample b/linenumber_sample
new file mode 100644
index 0000000..e37d32a
--- /dev/null
+++ b/linenumber_sample
@@ -0,0 +1 @@
+1000
\ No newline at end of file
diff --git a/output/.part-r-00000.crc b/output/.part-r-00000.crc
index 3cd8bddcead5ef43f18e4e33ef6a1799e9921951..42e5f645f06fdca9d852a9b0009d8028a7d44f1c 100644
GIT binary patch
literal 12
TcmYc;N@ieSU}DfUbF~Hl51RrW

literal 41600
zcmYc;N@ieSU}8AX)X(;_K;=dDJZoRKNr$x?7KVS?V=r<+_5Gv-4>m9V$#dr)P@DSN
z>azTf1>aWYKh?SrEtI`Q<=LT(&CA8sF+FB^qu!BiH~&iEOz~LV)YFzGa$C5!Yu@!-
z)?4rH#M;X_%W8t*nr}k)_hswW-e=6~Ue!0Z>;>;t@ADrx-rjf~v)FHu-Tq_eMSjd|
zvY2(JB0u`Y^{qFnEB2mVE#Xkwxz=n?%3A-IXF3E0pR8RRkyPyv+a-S|tCxA|uTBr|
zi7z*OKD<z>X!DQY<Ez&HST69DE2Js^hyVKXZ@p4}{tFPW7oMkg&T6{C)NQ|m<`*6o
zlil{lkZ(_w!Q;3Iv2E+TvO_c#gwl8(%y2vRS?+^^)DhQ%`fCN7lFzb!(@b9Q`c}%+
zDW~=yG1*$c7tMI!_Sb6{erJYv{9Qfg<)_<k!~=J)c$#7KVZQn9_qGxqg`55{SHIbB
z|H>u!XRN){l=Q=`TmloU9;s*DohsUV@Y$B+->*cM$bab(m_Pf~+Pk@m_TQVIf4o`7
z*m9XuiNi-DTFd&Fz2ET+sl9(be2do&uDp_BXf%m0^@jJZ<frkCOCM~R?ylZtrJ#^e
zb|5wNQ_?-v_lBGPXa=$wwfAhAUj3kE!_KPxCYe?B)-%0Acclw&e0HL}>^$3odv6>5
zZrM}3K3B<w-OKyB$GlP|IZl?wsrub|QfKt1NL@U;<g=*br_bqz`39#|=RT_Sdgx`x
z6E^$RX^-a3ed|xnP?22t`O*ICZmYV#27TJ?KL6pL6UqnGpZlcL=1+Q~r?|Z&h-Gd1
z#}#H@FXu(BbmMr+U$$4n=P38n0$qP&+5cOmuXXcJ4|23z@vS^utw+wWx6M_zQR3jv
z+ubYfdKXFFYsh71bNs5!p&-SVpLeVI!d11ypR=Uc)?ZEcSGdBz-*|$_O$OKA<)0QA
zEcSA^E0W4`HfvHGOCz(@SAiz?+baSt%v^gmt^d>$)zX5s53h*M3f$bIY+S<B`7%YX
zEh|KI!pXvq$CmwUsGQI+{a2abjxg_rupM*HmhOJ`>iv~DvH~evFMRKu+80-F?h^Z&
zo{vWJ-Jbc($a*4j_uIxx78*+~J`~*%q9Z7J)%8@ReC4<8tiD<Dyn63`1-fz1*<!Nl
zc7s~8j$X{X$=34)ew@nP^8UQ1ZbGHp<;2>vFIkc=_(l4)2%RgjW!dP*#P@fj#okLh
z+fQxZ!1(ynEayEdr}A;BW>n_Kz3aRG_A}e(s&zjPbzJq`P<|}yo8>8E9{-@Z@eQ@b
zW-m=QDJl3hhdez0;^N`#J*K+SKOOwin5X>_;VhbbrDo>Ts^oc(H=BlB`n=Bhqtnrl
z4nIvU%}oO0h40UP(&Jq=Nu%rEf2sDViBrr<WcB$HmM(0v-L-7*Kd-2&^L@F08>FWl
zJ!<9hKm6`JmOJxjdsW20Zm&H0RXsZA)k|fM2%{yh7cTjI>lur~#TpfjU-zxtZb#|f
z&bHW>Y9$)E&y|TWjpgVM?zLOe*V|0qZGS3n;_~;~rOQGcYfo^LO#h_v^>Rnom-D}W
z`dUkg|0=I2IXeIJjOZoHuCFW1Hov(`VX5=pn@?9C{FMI4e#fNayQ}%0^k#LxT=I<n
z-Q_P?i;ek$PhZnZlkeKQ;nj|1r|&bBR=hJ~J<^@$GMoM0bhE;4yT`F6>=H&=SN3;X
zYq6iZ8h!T4l8>P*PaJYM<tDb=xV7WY43{PI)NelfJKrRm_5P0Q20rP3UY=6BAO5c5
zqf&As`{KYyKfWzHccSv`)A+7qozGro^&K?5sM)$uv-07qH%^w%dS8^;9a2->-nez*
zF$0B)H)2octo<DR(0rj{=_>6~w<VqrBqZ-0Qg=FZd*2kU5a0V#-=4pI$=PM?%15i+
z=kCofl8@Lr^ZT`%7Wd;gyDw{H*`HY-aOI`=!=fP5mY~*Wf@bqr_B@DPRq@w+k%n}}
zr@On^xVk!bI+z!23NNoXxO%t8hk!Z(hv-QUF7wW8+RPugblt3#jx*K@2e{d@MQ&ks
zj-TNAN%`5_%;V2PJMXk}-z*pW!7%w#!mW=7(hsk7-}L<CLH_%vuFRO0^ZnL^$dYfr
zW(fT0ih7?TdrX^``KgENf}M#v@_myR%*o$pp#3R+!S}OF#;k%zzt=cAT)Q0V?;Ux5
zxA^_9FOMCYZqVKsnj!tAkhdo6U}gPPxB0(T$!^*HbYeQMO081bvcFPY+dL1k1%&yT
z`nuVF*}L-MiIBHEli$wQS~A%yG0TNND(dr`B9#Pr6PJo>lM3|8o|n`;*puzW_ub*n
zto4uO7pDlFn=muz(48++SA2O@wL1OPQ_j0n&6rQr*-r|{Jt4NQl(|A$d*$be6&>D7
zx{ehkEE0>7jfsC6T*f;65XZ6AHfl?zl$5^PB=uQ%`@d^ZlQ_!a_2=|iuw70LxB4ae
z<)Y5#*=uYI1qzqE5l=CmQFe2V*3Sk-xn3jdX@<{(Qs2+X%Q?O>m`_Ui!M%?fZ7&X&
zX>96TW8&P>**c5Q)Z^gPNXe~<MVsdZZF!Y-^W?3mlVqGYS%NP5^~O$HQNL0r`T7j`
zQh~DA#I6G$xNiOwVX8fub!qK&gNNIqHExUR^ZsghwXON@4T+W`+ayJ-S8THA`_^b`
znSR4*(@Y1Ca8@;uFXFEkyC<y|JoWnZ9KE$y?R4YLmEPCmtJ%0_i?WFD`OK07`)A$M
ziDL?FGXKu-JvL)^tmD!fKTVTE#9nP=dop9wvCoh8r}@htPMu>g|9Nh}MZrW7mHms0
zgjLpvpSN1Oc9!^geTRx$P2XZZO<cQZ<@ZzB=VeQDr46o!|2&}A#rXGlgwONryR9u{
z?VoIwgT-_i@}$4$1m>^sc_8;Y>%w{KE}L)18Vr6%gx`$WD|ffBv~};P6~#Xqt)%=-
zv#(C=+;iDv>h{?SeKy7gzW=|Y%wSjKiQ_U+Cu-!ol<$XM7Vtlp&ZMU<W-h+!a!eqf
z$=#6u2{Egx{g35ko?Fh)>iNZM`ShCgLVqszEbWqc{Cnl!t0w=F?^w8|RR53H`BUl<
zU$B0^=jw@@1KkDKc#F<FF1_*L-HCq;ITO0?ybJmDgWa=ZX`y9$H;-wqWZ>V~!M9~{
zX1J()36PufDDU<4-AncEKNAQtWttMka7HcvS^bS~6`W#9;Vv)xxmooN70cX@Z$0HW
zZSs89##7(Tx)qAQ^>5tpwOuN@M|dk+qsP+|Uu@1T%rm&tm}vFU>u`0>x6S^RCwDvu
zP1*nQ;){md#P5mxw?5zVUD~;-_giuFHRC0YSz&9{oBDU&PY|Aw@nxlpW}C3w@rFr@
zm-sU8u)fd1wb*0tWyJ%}ZtL(da78}yi+na|@xygTHCqau-d(d*<X4)trb9G!d(MBo
z2Pdz$zUa@~<tg{pYU#Y7Vu!CYSAJS^rE7`iT}GatH}+fJ*1Fo!Snkvtdg<5VhEkp>
zi_>{(b)GXcM%UCGbGWnT{9e!1D_nFqOm}_T)a!GkPr~qT>6^`a_NcwMtJ=5ho@&DF
zbK3vzYl*)tXA9P-t!XgcH#=NRTSYCOXTn3b&X`BZ_m2hh7kY8@JQklIGb3WVS=79p
zE@nm#&PF;`X792WI=1R^wia7R_vVrvA|g+@eVHvPHyu}hv+V8c3;AD<ulBXL)Zpi-
z@?6j6=_)N|*=;8)jW%efsQ5JZ{E{(V`sl;yvt3*dx=!qDIv4!o+)?JqxzBb8*vHLw
z&%Ank53hYT&-*?9O2Wk_+UG5EnvwCL@Qd>AT{GhX-ZJUW4gVltIMwq}+q$bWuNO>A
z{C4ZHX=z{ng1W>H1}Uc^9VBBeu-XT_^i%KII+L@yp~59di8b-x>RhI|e-kg?lP;Uh
z!u{2e>Ex4-Hpi#rpT5zP-@0j0_}lQTqt|b&|Bz=c<@1AA)>-Oi=H+)hSB|(zE!ZUA
z<hYj4RHpxp!lnSD+16)1T-=~J?UAg??43PZuC+-C7i!EodqBOk*irFTU$_5Hy$_=G
z*=|3A{2#r&7e1ZyqV2!-^mcO|+wbnTei<)Zx-j7XfioJ3EH_#i-^VZAy6M5})$f<r
z=$~$_mEt*@B=>XDg9io?OD2ZxyBzZ1OX}3mo}ury)VZ{?|6UZ)@!;<h_4;_*ps$C8
z)*qhL?Xx*iDnC7CyTVPq!<QMn!|R^#A2k;c^C|X?-M)hRx}M@A)7hCjwBL(Nod5K~
z$0bp3<mOnc>)(G&KAhA0Ove^=Q=W3$hL=a%E0))u)5@B@ld)&%1#68L-Fv@Jm|B?k
zIoVCG##H;?$GnUcV)g>OPZ&&XHT<K4qxLBD{Mb--W}WQv{h@Cfu5&&Uy>WY|pWB7v
z?*<RmJnmY~7Jut)^Ox^q_lx=UkqK7c4|{~#{yfI^)a|n6o3+_re!3P_Pk4HMF55+k
zl`>y<b|)X%@Y6r>*7uj3tiS5)vnOj=PUMvMTw<3z-Jj|5@;TL(!berP{46<UUbf}S
z+40pgH&dkRgkZwIb?uk0WL<eWF=kWS0@1)L&01-`eRC(jn;h=9qsHHD*9P6rISx~e
zw#(nyxO4W3rn%2G%YS_gcw@?Q{<HQ*hh@KtE~QSt8}n4|-j1M%)oab8;|lig5(qBa
zVcx5-f4Kz1T@#g@D*-(wThFj%JZm{GVXbl5dFP#1b$Lb^%VbseI=Y+hGS$ecykU25
zVo5t^v!mFfC-sN7i#H~3=Sj<ql`pJ4>z69d*EmP**t;8#B1+AlwI4}$@p>zu#TOBI
z;;gOWMvdzVYJvy<KJs|}wSwo9tf%YsW0_0do^Fmkke-nKZ<eC-#LaSR+4pDHzZZX~
z{c|3-l(@dPeDF4Q$6q&@*9(<7{Qf%cOHshV3ufjwKdMJOy>&>qbKBK*d$#FE#CdTX
zie%XNb^qMEKkdCbc)m<~(=Q}$(7*4`o%4$-1o~oEOm$hE9ro&1hOV=~A&#iY0$m4h
zKI{F}UAe9M*+Pcq6$W1xpPBo$uApwsf|9jA(%C|<?4H?_^(>{Veb-e}@2|hEU$u<j
z|Cm@GT9A0lLR8Z1wC-8ID>F3>yiQ)zsCp9ZKmUK%uFU0XQHATela&{I`WyW<dh*{@
zNom$Mw;!sw;dA}0!VZV<d4-CLg>0;qnv`2MNiEv=Z+6drogLSv-Pb*ryi3pd+}>kt
zZs(GpEd8JR;^|RM$tf+BMeJ90iGMI#`}8=s{p<3FN38R64hKCJ+V2x@R?s@-ZOTXU
zG=Y;FJjx2KLO%FaGt7~;^txaf(Q~HePj{(n;c8Q3C*~&;=F5L)>wcPh%KrAvg3U?w
z8Hc3`&$RJ2t=rN1^asPfWuougy)rE&4fe&BuG^i&>ypydA@{TC^@*5key^@`aff#<
z$_|)d{&dx)SveP@i}?#?hH>l65Ikn~j!*B|Lm7*=g)2)haY;2zK9F_JY^8Bj`Gz~c
zE}1`=+vPn+bLZdH%|}@63=>}_J>8ji@7%$S?5uOeJT_c#jJTbme|5=cx5N(zOUpR5
zj=vJV``2Stxun68FLsd^PGx$yyBu6A&%F6_;D7h_h0AY7fA_MmP@h<NxMAg_yGH+(
z-*)6MnUTBZ(>8;MS+gRWi&y`a%UErt#;_y2>UF7P^W}Tr_k~>Ev@y=S;+6*MpFj@<
z&xlqvgX1hrJ6{}^@-LZ^*Tq~jOO*eH(YX)jk8vc#{jPhf_DaYi;MmE@E@o>gB>1_$
zuZn%PiGTi=60YoPe0q^nlY#>nU3c>*zKWfzeDK~;i|TUr>Vy*y4}LQ4Yx!SQ_%I}F
z>tYFxx|60olhUgB?=Ndn{M@|DFx_i?kk-eeN&i$5r!P^sAbHkz?(WmSJg>gnc4YHk
zwe`GQvy`%AxWf;=dGzkkhdW<B%ce^>FIAT^Q^^-9vOAFMGdat`&2hCVUyM{aldI6o
z!plL+r_b8;#Qz50F0ayB(+~Dfe*Krws*hecM<!Jvv0VKHOX8C(M%_L8k1*9ca2QSV
z;9mIj@S*bQGqso_nm-u3@Z9y$eJh%|Mdo)Um%|y8EvbLXOV~os?=F3@EAK9AuId+|
zwBCa^O5blOSj+w|Ft(%UT}r&pdHd!sS~u5i|LOB1Phjg5tv%LH@1GEPVIbxE^V=?7
z7mMD63-@*gJX;XOeB!U}nQxKTvrhhKl-pBs+dXZ*KF{0br|%wP=$yIub9F&bOj5?%
z9Y2K@XPRf6os)cP*`E;hCyTy(kF<!6Km95%LGhaJqDw5ovZ0!<GEN^)w-PVPn!=m6
zqxD_Dy+zI+qrJK#79|`_4DIy^|EKj(p!y~I(^RJY8gj0CU)?*OFq8XJqiyA+>-WvF
zXYJ&(T2wMOqq-??ajxrekE(;E%52$1YC?I>{Z$25vCFvGuiv@g^X;gK@25vO=9@aQ
z#&6Gmp`P|@Lc7t?=fRm<=EQ9}bighC*ZUOT$bDZrP1`or%|0P|b|<fx_Rakt&YRgA
zOCD!Grjh&Qm71Xc+mdzbriwkkD{<HAN&4ySS*E?t4QG#aUFCTgZLX2e#d-5dS7q6Y
z>u=Ko0yIQ=tx6T=oZek<vi2g|UA>kaUcYa=oZ&uEvU2uJcJ~eMVmQ=ib@+X<5$Wfw
zekuIi-Q#mxf=TD}v}7k`5x>XFJR`69Mntb@4f-hZz*cv=`DLEdm-PMD7gqkasNeEK
zAbDzglisb}zdbqX`fjXT=dTmV>i@`c?azV-*3n%h8{1c9Oq(@3KdCNz{i7M~>mIE+
zQlQQEa1wXdAtRmS`jW{%r1kT|10n?`d9mbm*S?Hy-P>kSniLYD9G1v0t?QM0qS5#E
zI$p;1;Iisz6TfhsQCnWP_#$6JoOSc=%1J97XI?VDK1Hu^?dDlCy9~dqH2A)B@9Ays
z=jUu=jBfuZeAFee$>sce`6X9Od5flG9FbJzK2qj=?A+xS;fMFH@$a6NV)npybMW(m
z&27otd>U@{#$Hc~w-{V~!o{sM<zhY4`V&&y!nple)1?>MI<?Cb&x^d+vBO}N%bj%Y
z<-NUX8gUuZmUN0rJX<K17kZVW#Cze9`|BPqtDS#3i)*&SM1|*-hFv9buOdx-U;mIW
z(3^k0RPvvezR!m0s=KTT{t^DKY_Cdn%04uo{`5$s8!KCE!^TFTsvq-~r7#?F5nJ_r
zy1a`Cd)x}9P=&i*_kRE9jJUY*+uWxz_ZV8X@~fqt4)=;Nf4Y26gNLeJhH#RX-K)B9
zP1|?7rfQr}oy#=axu*8>r*wxl_w(=4)Wo#JbkDo{n0+v-n0Q_@IVS0pnu=iqUw3`R
zb%w31R~9`f4eit3)0HY<{=eeNx38P|xFb)qx9<3Noo&fVEBQAQef*~Pvq%;!@+zwC
z{VDRWB5kqVmjBm&H#BaVCCqwG>Nuxt?CWU17h7{y&RCt?GuQM%%&JPWwNE0;1;2k@
zyvWMPW77<|OA}e#fB#q^cK=z8np<Gsvm=iuZcc1>mMv0{325{$xOyw`$gGd~FP}cU
zx~uKz<=$`qFJ4=7t2awN#aQl+^P$y2|9*!#-u8ar`R|sSLF2tg@1~>~@C3eQ|26I4
z>{3<f=C;<~78mBdc^z`3Vu|$AcFQ0M&ufNDUo5h^GJP=zvqIzvGX>_Bns3MSXQ!Io
zv62Z)(w;c=_iL~3HXr5WZV6B2i}GbzG4<McU%BUc@7{0kOqPB#M`*i*c;c}gI#*Y?
z+w%%>pE><?>;I?>k@V^}SMxJA@4Hg9aQpY%DbY{$%3l8zzjrZ%qpY!Y#@eU;!X33*
z%?%3oyxr8L&+iI1Jk)&h-^sMQr*s|`3LUGxH8Hi`W=+8NReVyenN_JK)7Lc^wzU6_
zf1aB0@o~l#<rA)^>PyZoy!oZ4ti?&~N13E~akzWgiuK2)|9}7b8JFtc?1ed{CJ*OM
z<yTo4SEI6fc9mqqyW9Ntwn`*^ZOhs?`$3lX`SUMdcy&7-bz}Y3Ykr{bN#=P@>r<9&
z=FdxO*K*wZDPCV8@nXaEho9E1HmvU1QMN>{`_!H!S;KFCi>nT^#&+#aEMfk3|I*aI
z4<j|3!$f<{Q@TIhSbzJ-gbY40mXjY`IW0HLzP{z!%+dwxbFVRM7dH7B*&t-U=-#ep
z@iHu519q4PZz>W`{Vv+Sq3qWe7NzJ<JGAqk6gkUZ{IuV2(TkMFEvv1MxE`=;I3&OB
zMtsle4R13)*}FwPmoc?JYj&2&{gBry>3!;{LdP=Hy;Y<18(o7MCf)cuZHw*dlTkJg
z7@xhpVp8XNSe{Wx@1W+%CAT#$e+sWHnIZmNU0tKx{&RTubnXvRef5l%zY93I$GPv)
zq&G7??nfQw&DL)C^0}8?hjr<z1v{9xpL*1@NJeYx%G|osN+}<76Sd0!6)f%w+$19+
zy0dPE&9-$fez(o;kD6_18p6mZ9$j(f+fsMeb6ZxeODx;}>~DtX6PCY5$L&H49Qa>6
zSysw>=-|>C<N1$!7k6a`@YjdPPV(1v*?D@GbfVRb<pJLpo#LCOePm}$!t1*nj1m@K
zg60Q&h;F|wT9|hxB{5j3FiT_0-vu(QyBk^8%<2*j`5*Xf16$0FU7y!j{wvrwh3)1R
z{WlLxGs<4JFS4sy+u;0s+nu0!e~n7hF3BaV`y&+?zi$<nLszl?zpd|@cb=V>yZYTX
z{o_$v|37m`IrUJmU+UA_XRq$vVEc8tNbyejM)Olq{g-Twt{<&S=JVoR#WX2fV7d18
zM>k4jm!|Lv%=GzjvSTsV&1kt5r|yKWo~P~9qp24rb3D#*evzpu*Re~RF1vRzv!2@<
z5cz2F%L(d7^-Y$Ao?73a6}r{<H@p9j{VBIor}DJdF1ry_CK#W8;fc@ht@Bv|1V3)i
zJw4%e&2FFcC;TemXLfvj%*Q$Jm6@-5s7KstRUH>i5mTGkr5E2DNBf1ZFOYwy_%`3t
z$K;V-&RUW6-(Rc!y&N{<4QI)RcNY^@7lzH(+EWm+$>v>Lh5Wp|A0GN&z1MQ=xSAfb
zz|6gpX3v$w`tH4H+41Mu)E2(?GQXa%Tj@OC?s3u4H1)jkBK_@O46jX{E+o)${9T*X
zgdp?tZFX9V7Mx57dExzK;pwK%i5I4r`drytQu*&5_d|!!vWv~#NA)b4+PPTnw;#O{
zSDfU_Xj`7M+9|x8&o_nDncp)^zr(1z>Tq7?y}!G+JZ(OxKkxRrb@9vGEVhK|ypQ>A
z^7Or3R>|B2+xmXJJi1foRiU!i8<C{2_b%_2tZ$s+bt*4OD!?(a^1z3k5<A6z>aFpp
zxb=JbzLNU02bMD_&TB~ABr^ZpmE!P<{=Av`;ja}hWFC3AMRxPEa?i?VNv{Q@)Z#cy
z)c-#H=eXJ;e`@Kv`PSKTTeU6-T6w2%Y*up2l46N=T7LfLp36u7wEn)ebJK@)zIQ8~
zon-`Gxn68bnevt=k2T8d(1V~nMulfpVTTMeZ$6ihoA{ok`3CFKmBm{2x~<8|Cq-YY
zRTkY=c-g5mH&1#-w&2$*lTV7w;q?D9W#dVOuXF5scU?)IvR!EMr_=tCPwfA!e<#T|
zvD>rBV*0Y$1;^u$HtcHmp8B=dp;qA$ztY^+%iJf+Z{?Tn>f7E^_`E}Ut@6)Bw+tjV
zy-#P~VyOLEewwjadsn})+FpN;+6ODw`ajFQy5zg|?^bWgim0zvjNg{>G8QaeQIRx9
z^x|)uJb799tCq7w1O7C$x&(Nw44&V`+^8m^@M!bq%A3ho_D&Z*mMX;O@htZ4_tUZM
zTlsgCKb`(t$M??HgA9{`9ZQyGbpCmE>C$Hdg|7l<j9rxr9KElKE=zpuXB>U)f|VDq
z<E<<InT-BfWVUtG=H78#W4!-zbji-=N>10OCc3#j*p^>lJ8ydI|1}POd=>I{tljK8
zomcmL|GSn0r56tEo_t)$_XG2rz2(};Kc7F{;2ZM%_lLF2Yv()pJy+!X<hWayZ8eL2
z<6`HZ<;Bg@7x2E^ytQ?)@~+>4#}tgU_ox;Ia>(!d;B38qFXw`$irsCg8&vN+pWjz4
zvU9sg_fzXT`<D5!u(XOko9W4%^1bB~?~Gq}vf_8@=Ukq4^u{7anN><@9MQXuIIVE|
zIBO!u`KQNzotv`e^4rkbK+PRzUMX9tdf1j)KXWkX&~|=#*mT7T=64Y>=N5iq+OcD!
z-7L@0KhrW*A90r`8s1sL_)u~Ai|2n%Hsv0Z)SZ2(@wZ5AVA-b)jjyiBf48U%pZj*2
z&$4s8>pE^$7z+r_llc>R>`7p`=6lW`MIq%2)AQK>X~;OruPoZM%&bLg>z5B_da6<v
zXX{>{9ryEsQr^{T>My^qigDp))p`GIY00`p6W*8~kmtU=^Uc#gE8VKa_A_7nIeo_U
zeZGb~`jyrRWo(Cz&(Gwq)m3%%-R<RK=Ix>*bnIHuyZ3)5na#iL@Jed$jyu(sqH^6v
zJ!fZf?Y*BmzvH&crRWCZW{V&BD*7HjA1uGeQkb?wcjp`VcTonm>~}x!6q>iG_D%M&
zJl4Xf64A=7Y#*-6BtMI(W%w%`e1EOQGvnlUGd^!sSik;D(QCEknY(@WPOt5;IQH}G
z!Pwc#*F=Ih>n?OY`SH7F5obsF;ntut9>uHHL@GY`kW{dBP5JerZEDIExd!*eMYs5|
z^Ub<w81YwncFp|s0?|&{5}O&qK_^d#J!ZSC9xr+??YrN@#payb>N#!7Kjr=_ym9_W
z-Q0;MRUUs&x_-?tQTbQsqPFR=Y~B*PT8s)TlMmR1325Cn=v%_`p+@cP%KmC8Rmc47
zt-d=Kt<f+#S8(^~?9a<M)*tFxz|yu@p*FbI$+4tY?8lPlTXyJ*yuZ8F|BSlM3EoTA
zZNhTz^=gEs`Bv0Eo3no1dx2#hzF+G2mszL@-S9f_-bCt}_!Psq{xydjTG*|ky7#@j
zQdQ=kxZc(E>a6;7gR>u$3$933P0;ydCVGCt%^5e@&KoB;S-!jy=)#>U^18I?$n`_v
zPX&d}CnjA>OL!5zD0u&EKBHx#K20@VGnvJ7r`)KtwbuFhz_D8Vuh{dO0+C72x(}y)
z+}OI1<H6g2U<Yj(oh!0}Z|7WaZNJES(Egs@0TGL`b)O$e`m0@hxar&Ju7dOp30rD!
zh&7oS$G@EvUmCx@NpsyPtCUko3q`(MbYIkZ=XXJ~%jcuk4G~PPkDdyac8P9sz230)
zs{iEao8%v{&k|a0<+n}DLh;(MU8kokdaiGjE2GeI@I>Rw-+d<?w|1;Q-*e&x>y&iA
z$S`s1)!FO#-I&jW)K9rCsPgc~OaIoL(b;!TF#n8t@~ijs?DIQV!cDGt9XQ_HH1Q_K
z<q01}eZBX8dHE|hYTs%<whfK!FU>9b8y^&^ixry1q*+~Mv=4Ke<n!|Ro61<D-5GLQ
zr6wp;gq%L$tv^jnaTjy2+D_NiN-G+5+0}Mloikm(_`=^)S_09<pNqe(UXY)1UMhe)
zc-|%b?ORq&UoUbl|KGF=AH@Z?UUUDoKl#kF95ajPliQp{TXRKzh6}lBX)nAI{Iz%a
z>GsgLvXq^y-y-(@w$HKT@;ZBB%CB`P0jBw@8cxoC{`%I<eCLTvC&p?_I$orCjA2IK
z%eI;PdON#(ZTfCSWJs-kKYu3YS%*1C*Sr<z{ma7Q|HUBlVDAc}=!=U#*(Vm>(SNel
z@{I3^gI>&Y);H(tu-!<r{bgA>$86_C!J{FvOLAYe$o_v^{UKphP3+|Lot$e`*WX+5
zens86^cOV^%vRU7<yzls-#7QNWY5g1r(deyP4$<tiMwnv>D7vPyX?Mh+x0td(-ZfS
zFOM@CgCBJKJIAxH{_z5#6Mx%1XPNb<7;h78e$sqSi~Fgx#s4*uJKo)Lt(nKqdm=61
zv_;QG7wd|P7i?dzyRco9CD6U}%X+bn<KADj8M9U&T%^J`!A&r8$KkBrT+wg_@k^2N
zW)}OEfA#oHQkk*Z$Zg|9v1GmLdTQY<@~>CvUAZ$mK3&*(!hw>+)_jADvh(a&?*@ne
zdL+dD#jcZe%ZI%Q`ZW)yo@skh`&5vr=7f3p3f?&pvtR03K60O%eKcaqj#I_uPwyRh
zexpWCIXwG&zPH!X$n#h8;%BbjpL~hEhhz5H%23~@3w<}pozz`fndUz4*f#YZL7OAI
zR}vOq*mu{|eKF4zhx1!DZ16CD8Y8eH<?NfT(myGcU6-VqWVQ95Snd74?simAgJZ&^
ztxdChPrNR@_t~UnW|W`E6g{CCmsL$qocD9j<-cp={`802+{gK<k$Da)SFf)BxK~`i
zMbpAccFmpXs-1<)ra7NBnC5u)?gZ7;b8`D1q{}LbefarkRmfS^>VU+B(pMH76g5ul
z5!orO->2bnoaJNmjOdB~luFyr{^aClPAdpsJI5m2YhOS^#O>X4l3sHDjNFj6``6no
zH8~t>)xN6UUGvtKOZDN&TIR_rZ3pJP42|#mDErp<t=ze9(<Vh8NKwpmHFOtSbyMuR
zSHp$GifEIdJttIa__@z{|GsuWg1NU%rr~A(xtT8=0=I~<mq(oq{luG6uxoZ6=kd>T
zX12~va5X>i;Pcuh3#CnW!Y^*-*thON&S#D+#og}~r_NdVHTj!e^#r{-(@fES_UBL6
zSq21oM(kqZshggDM@>;jZ=Uxg$(pGlty_;7ZfE~}bD8hq+fEm^>$T5y{iNmi=c4_E
z6=Du7VfVZgweAIOecr%5k<l$dPT0x)rm(je>->kFX$youxlh0Gu4TuZfP068?iN=5
zJ!L8L&c90~zU#=ApL{X5nTqcJcwF0-b!qvVpMP5(UY(=kvc~_SWzy95qOIS!YhT5l
zx#;S0-+TKGGpTtEAFe%JQE<F?{iUqgoweq&=51%$&cAWHve$U~y+tPj<G6zl@!M!B
znu)OgY)w0@UMs#}Q$*ElrImJCCwBbvEwkHxYc`Mgj%q`ps+E`S%n-C$H2Znz#06Gs
zCZF~?EgS5#QA>F5gQIr!PfS_j!f%|rD5&HivvEq_s#EMiiybmI{O~ywQhoI;_p8rg
z-&0w?iy2DI<XL`cgZP;f%x(!49ot{%yyJiVJv3P@@x#aTs4t%w)@paT)QO$_GUI%G
zXL%_jcl;NgKiTFnA71xKcAf0`nP=5DrFHqm`tpZg?WbEcJ~$%rhWF=U19iJY=JFHU
z=I?LWSZ~(Ba+&q{{pSDMUR-^?Oj&*3$*{Xituhog%5*ub({d4CyLwXdEMu<4DXWh;
z7tIuG4bIj`x_9|&tyARU*hObPv0YJ+dvR)`>4m?ui%$6c45`)@G^vwMK3<@+>c{^g
z70xwR!_V(q))$h>^=8_%Ip1bHO=_Q1_T!Mt@z*?|@#1w>HJ`u4J(xG`$b(%R+|QS-
zy5r36nr@i$Qb%f|=To-QNRR4+E++yM4aMs(E4=f&y_s$D5$BGHF^$#ILQIRT%)(SQ
zZ*13fnz<ue<3jw>SN=!Nozh(OFS+|=(OzyA{~H~LPT#9p9yamFzS?x15=pZYx9V0M
zIjFQSrJ8kJ-3brV{OupN#Ld+EXK-qL`-9rj4gdG=xN=Hs@0TZ6XYQ=-E?OGtr#|Do
z*0Q>{aaEexK|NY~*FKyespac-^V__(7c;i}GrTF^B0KBy)x+Vnt#Xrdt3@U+Ro=zz
z@nDX_zR6b7CQHr*8wEtR3P+?ZYJR?TQu4x>nA2jFJi02LyDuA0-+F!JN`HkVeO3Qd
zSLR-EH2?3%X`K3Mt|1@aXZ2jOGdl}jH}G#aD8D4zr8nWW&NpT^4y{0yDOd92PTK2i
zp0nCy(v4>y-M6HQgie~i;jQef1Cvw37jHC{h&xphYJDTag0<9zVS(pg_G(`92A!i@
z*ZyRyy<%m3Iz9YHaj)3oIgx1xH2=;OtTok5&5P2sG~?gcK2J+W;dQZgr)~9@@`R1s
zXPWwyFunWnZc5UUKgT|}@M?Yfk#xD^`GlM6{OXUiWyF+Dp3Iu?eY4N`M?aQv*T>k~
zu6R{+yh!uqRQA@2*rp9`U)(#kFTJNcW73l#i~9z)vltJWmzJ6ZJzccpeRlHUlM$}5
zOCGo|ODbRfdG~ePpN#vQjrSj~3~JDHOXzT)-Y4KTVddr2B|4Q8cij2#TP!tLT<x*b
z+@-5?Yqz?rd%D5V<@4dU|JYtKZaKLi=ezgXe}xD6OjX*Wul4R*@jLBQ_RMXWxjpR>
z+Y%J6Y~X9Jxu<Boy|l(czfxI#-ADb{yN>SRUCR@_e7#uys1zh5p0RO#S+n+kVe@ah
z3$o!2lDC(sr8Ct@US<F3^k$0NdiN)4YWLnmi&ri2O5x5sHRp_L$DhPYU%CC(KTJEH
zaAzWOs)1#_%~oFKwWoqxj27Qrm!goRJ|o5R?OwIZU6uQnPt=WOW8HZF4qMX!3CW3@
zlk;zVuQ?GH!8$RwF7t+==faNU`I5i9dVBTmS6hndPl&x?G4G(u8Xff?acg{k|DJwZ
zWNw{%E#I^F62pJ5KYioz^b=`|eS5iHURvyu%hJw9@gti;ESysG8)tth)ttMfV%Kq_
zXMy#ybBkB*YddtZ>yVnpgo5{fwGL~{i|DX_v)L}HyHGB-;Ch+&W0lXpmOk=)T+Qvc
zU!|%0r*6m0e=W;B^|EH$Gq3;3IV&nBg?Cy@{mJ<u$0eniY%i7Bzq{Sxk>;_wGNWmt
z_*E9}>rbL4p0EFWzFh0dYMCdOG_AekHeK}1TpSv{o?Tsk->ZpM>;2#Cyv*)vTV%{G
zU8&%)Jn4OZ)RBeP?0;>a6lM7%xtaOm@fDWSTpZSzd{2LD^Wmqe&H4%DOJ8nj|F=i^
z9{XzU?ms^jPKjDh+|DLC_x#j!zAGv*1w}hwAARYN%o%BLc<&}&*$->Hqa-^Ym8Hxm
zd@S~K<N3=@i|5*S6i$)f;^k|=rN6+!_1lD~Gmo<NUY}j0&3)$CTk~BH)P6@~eqZ66
zEcI*tos~x?^uM=%?N()YJY0Fz{3kJtGrw>C)HSh9VV#=hvW1Mja^^S8Ze5Hz(!S8i
zKBV{!Lqmz(rsYA`7&NLU2|b$rIL1o(rTe@;KcY3<kL0gC=TIW^?#{yEb35`?KUU^X
z+iLm3YOV2&cWX+nGhC|d_}%7rynSx}@1mnEi7a3GT&B7F-<JNfVeaG#wOFC*o%xcj
z>+?3XZ<=7c>GbB8oe#X$%l?|7|AkL;)0*#Ij0(>VRoFINkqFF_vAR;ARji-H_QR0h
zbN-*1sg~VZA2Z+OA1)QnjlQ<ldsoES%{SZsJMrpym&;8N_))X?qsgvLpA+x17y6dY
zb=tXZswH1f>C>(6Pp<c4+txlIu%9Dz@3bFnz8Ai)-L}$cS!oc{6_sa!LR03IHIxc(
z<?QoO+8h1x>6vHSXX;Ffo_PC<?~>ee5|V7E?dBA>w=X&T<mn5A^!P(INdn=Xu@AVe
zb=q8*`IGC`<s-36l=iV4;{5L~wb=6Ul)Zd`cOLtxnHuM9te(I5`Io5OnYE?Ae_rX4
zEcBQ+MPBKgV%y#1-ML+}=AOK4)0%RoUF`&~wT0+@XX*Ma{torAsqV`ctlV|>Vz~Z?
zOMVX0SGWCUtFL~Ry<hSMTbQ)^E4Ia6-x9a(+jO&1!M*cvXp~ix`Wbt%9RiBn1&2Mv
zZf#te5%HBneVI<9;&iQhn*{~AOecKud|_!GI``17>8aT>xJ~%e>TQg6E$Hs55MP#R
zcJFQ0x-P-j4&EXyRZp^g1elbTgh(9!^YvQ1uv*H|*Y{Ek>pJ(p30a+Zy10$+1z*zf
z1r3V2!b@LF4G~s3y()UK-qTkrQu%I(ndw?Q|9$ssbWNbZr>c8*>o0ElnCM)%Z>mMy
z-`k2tj=wE+XMGXQ^PlZ^O6q);-mZ4li+wz;4G&Jms(o^~RQTiN9)q^xuip#GpKB;i
zIUsAc&8DR~xLJGUjAipzyjd{gF(=dF`)<dc2dQP(L|^?aba>_Y<Ltqznl&HY?yuP$
zar^c*#-%N0WlNWu9lVgeoiTg%>2BAnC1+!k&WQeF&XAvJ_kF!?q+f$+>fu9MCxyLm
zi`e%vUi^-Va+bl|DTmkleg0d(RAyN>aq&*!shcmZ+pc@R-JgXia#qm27t3UI*D@Sa
z<zMtQCDKK;xFEhj#<Bg0Op^bkY{e@qza`D~yIS{rKKq%)Si<Pm37%6gSNLA-ys!C*
zgJF%#t*MDg9<m1?rZ>O(_*k?sV8@$GpDnzJ8#KD#EtcWRE8q?3p4Dw(+jPwFV9(QE
zGRNlZDVkPM-+rcGT3*5X&Bl){J07RsKdR65Zq_XROD|`ve=YlsdFNT_P6^q2{O{!J
zwysQ3f1U1d)<vA9_W1Hu_R=$+H%jnoOmH!oW^e1^9I%^xSHaPHORndC*;v>9Qc|>f
zk#?&o_raCI-~ZfwB-Hp_yEb^Y{La9_XC05e-4pR#aq5!j(n68gWh<Y)I`+9~&xNxV
z+dto*W~=mU?UNf3+m`I<wyJq=xn#z(h09tKFDPy;n6DJId9logy$+AQ9f>VoSW><J
zO)T#U4Fl^#stv9Y4#z8=#NL=5q_<sC=WxK)6O3~4I^9p#y|Z}AoOr6r?XJbsrh^&6
zE94jR{}JEn{wXZ>GrzHsSxwc7`AfvOFTVYJW5TibhmU=(VY1p{w>F=3)yZG8@4qVW
z=X5xC>Hme3kI(6@Td?|Uw)f#g0mCV#9}n$n5q#^Pv`|Il(9#<d!>=Ddl=gbf^(npc
z<1LKUTWu->e=r^GKfhSh^dal(TaHG%Z~3v>E#bP~BJ$joP1rB%ievlvk6VK!9vrzF
z`K!h2cKP=9i>~J#&Yc!?N$OwdvUYDv5$}}yjwk-!H9KEu;jrro&oa)hVOL~BvN~8U
zn44z)UVk{Y^W@ewSF%}6cU`o2+m<p>@Y4i0=?S}E9f?<&cBSUlaj&wJIrCI4=33ho
z7la<^nlZyH`^+DY1P!(BTA#GV#zw1}nTysOKczKe<@N}N)YFQ|a|)bys>hdKv##gf
z@q{OTb?@t*OBbXiJ(*N~aw`bC8)W}e31Dxk&8?~xO_Scne1GM-YOaM#uJnD<UJx~9
zesV*w(Y0*P@7ER`uQEAyYrjjfaEZ+J3C>R*33Xh#CTn=WpOdlculmNUkMl%lclm6d
z)PC~R8=ZR(xL?0_`^I1JYqqJ7j-=j8|IX7Hf8+OGd|50SQ5o{Zm^rpiIMrnFtxMMb
zd99nK9nxG>bNcY1Sr-2`&OLE--oLq(^VR#08+x>Iom{c-{h`{VJTsfW7BX7|a>PDs
z%rIc|=zo>X^;TYYxnYU*hTMpXc}{EO&Xw`*<c_$x?w+paZ~gFG{;SnH&b}%B-WIH6
z;s0pUpZED@y%!^+SLaH8;5!xl^u$WP&gw^AQ@cX?R(Of;iof$<_O|}>+ao`)JbCTF
z_|l%cu`;fA9|yBk(@U=W(z#2%2tVQdevIXp;@bAfpRCxP8ku_uPF|GkIscf}@q&~~
zif?KUg*48UT%BH!H6zC+c<$3c>!}5G88d8N?%g<FvtV+yp^qX<Wk~a>uR-dQmu-$s
zaI-xU{4|EeBKu)oTyfLZQ>{(XW_i~yr~IF9w@U5W&xdiXsjpr}3EK*l2&gcvO6LFI
zs5jY}TdkwL<0PN=+S06pS&mB;Ztafc-5_{iu2jlL)t|vS^8V`|M6^8jU1?%z7?-z7
zdF>8&fn|AY^BE?mTWhsFTNGwh->A+VE;5^0o_DF$$p`8@FD46p<w)D{Dn>M1zTi23
zkmXx<?w=Dh51;hRsu7$}-^_jW%O{N<#TkbWeLt18ZpD-+)l8G?w~la|T|fUoVdn2=
zffsm-|2=zBqLu#ASIL1RK{>1SN#eCnb5knP9z>^U=y?a8Klbm6;%xm7Z*RXYT2%J;
z>hInQ4nLmNC)lPpep%&p;=pW%5<%gjDU&~%zqqOTn`!4Vrt^t;{ORnM7e`zRR9t>W
zUcPU8g6+-y*LnWr%=_{zb=JBQ%ArC(Kc9Ige`+${HlfD{UvJ`e<6OyOc0{{ZQ}tmt
ze=pM=wd-BJm-y8*{y%jKGF!&2f3T13x`zB)6-AYOnVT5quG-slZvNu8Y1dmfzWBa!
z`L<23PAEN@VKO~yQi|uxFWHmB*Do-0zgeHyv3}y@Z#8=(ixZ5it4gZ6)xYWG38kEp
zRhm;RG5fJwN0!bWce(W6Mmn`;M1L54zcu|@p6Z$DD?%RWy@~%|ViZ(#Z}ty1ji*eL
zpENAo5PGU_g8JQ%z<rC(70#U!U6!lWuyyMug`*72pNqK}wl-IO-j&0+`T<vkZPWdz
z{pUUVDgqm4Dx@%XXso%^{;T&^Ll+A_1H1U0s7bfA%HnwxwzqlDEcvAVlc{56kz46;
z$J4LoEPt?l_6>QSx1qb|o#${}v{u$PHt<_nXV_2OYV!w&Bem=K;%v7kP6}L-5WVGz
zYC>1Z$D%g-+lL-*ewFRKDrjo*bCtgBA@8ei?+{VkzWV<i|MNB>XEP^!Jh*YYL~*u3
zLsB+-iVk~;ROrj7w0o_K=ds=SQr@D#J8MAzf4bDBwd)&|Chh+EVWvxPN!|SNqW|xi
z6^z*CGMrhkXZw$w4eEW_K8{nCPMMuPF=6*Tc~RL}TKAuc+UGG!aVP11|38~eR^EW8
zBScv*Mnykx$<@#O9lRdzuH6%!BKcnJt4g%fuHCh~LF<>aPI~g6qi5-XsVmJ_wC8+W
zG1LFQ{K-p;=j~I<w7Yth`+VXfVfnqs4ByG#p0?h|RayC)&GO(`Eje?qFbT6AfA6^1
z=&{e1^*?SWD+wO`acIg_ckds!BJPWuN1gxR(jdC}<M(yU2hJZXNRUrdp2v9a^x2SC
zNfF}D8lOp?+&o>WrTK2@PV4jg<(|8kbv*lgHFt9G*4_@@>Gj7p`kUWA$TI1R`|)c&
zZZ;NP!U^1GXS69lei?SVqvW$l)yEEvDa&?Ees(46d~0{k8VR>!{q_q>?3dab_wntx
zA34odZRflThd26smwq79f2AdB?bR~2kEd4WBz5?|o5*1Pb6&w$)rE(|z3qzAHS)A%
z@7LPARlShVEYK>tcB-c0vi$vZs|A`?-06BR#KYPZaYOM<-O(>jRkuEUWc<tI{r!dX
zsSeZ9Uy&!-&00^^Gx;81dBD2q*P?UQvD=<)HrQ$A9hA>EX@|O%x*^A%@UH@Czb^I3
z$2%}?vvuBkdDGg4^y{-0JQP`W_-gFj2_`dqzx~dy6%fhz!BlIgb<OO#S>@$7|2{12
zwBwIAxxVGB(z{K8X$D57-g~F=s3iRRkYe%OOSdJKM{>b8cKZ)*TFg8r^-peL^s_HM
zm;G%GM<wIm#q#F9Dzd-UZ;INi_-antMdP9`(%k7s8RrJ=oVDAv=|}zq`H3?MU!RhE
zX2dXyadunf;h$IeWuqk3{r6tU>b~V=I_u3{K`!_2-(+v!WT;!JaO}Z_{W}`;6Sg|B
zzhqna;#8N(!Kn2U>ZRT_MHKGYXBKS7V_mu=HD8iV<ENj>_G$8+%j-0F=boRlT$M}s
zR`34lkIn^6pJu7jti#vqd3)P$Z}s2@JHNO(pP$e4G;{t7(Zu{mOpBWnWggb75sLh1
zqb^j#c5q)*MR}9Wou6BoR&BmeZf(8%$F;8}Q=HExGihyd*ssW_V0|afIb+VFxl;=a
z&x@Q~e=}gKT&f0J*TVM=hYZ!-@20(To~)qrZ0ixWFOuu(W(qhhZMO>#YV4G>*4wo1
zUx&#5@C~1R=BImHe?Ilv@maf0|M9wMa@`?@o5%L7^yJ5-ox%Zgf?ufFB%Y4=Y+fL{
zZM*h#Q<XFM?rnRn{g_)lF}gDRl*WX=t3L{!-<!4gi{PCoxdh4nf&|e5?TF`&I<wav
zOI)DZu-Lox$ox%Hea+m(mNV^^RDC68m(Y9W<Em5ES{G%U>mE*+T5V~|w9rX@*9<d`
zvIS4WG^6F)Ti>$AFV6O?Q<d0MIN@8@|GpN6$jQp3hehw4UcEQv)9p{IvQ8y<z3BP%
zA&beS(#tSf!0o7i%gjQj9ieMqBp-UPg!QwB;p5btlh;%Bxcm`~+2ijw>z&ro^6c+(
zDsA;lJGBl){bA^p*IM81rge#*eZGI*u}i74hkf;8jHd54DY;_vG<II|i>b0sS_!gu
zbnO;2CzeR<-#C?(gN^lEYhd|ATg(5|c~1;_IjkaDm@9mb=&suP<%iv-ogzCLmo0d9
zQs(O}{iIJEF)oYQnwP&<ImyvcIH{CbOW~)$qaE)zz6pQZl&YM+J8kVo&!}k=O(!$`
zt6g|WKvb$t<JY~8clD~mKhNIDe%~fE_uKcUGNHLlu}+V_N<48ae(uz8`^1*LMg84h
zli8S_My!mBd80O6p=Ninp^$ULp7gWvzZgwFEtaxO`Rp{e{aHc6DHE3$oLAp_to;*}
zp|?6fK3VN<uu+L`Ue&_$X&e#8lRUUjsGF!iTxz;dTRUs^9k11g<hRNx+h?WSoHn61
zrMB%t%#DfrdGDBnMrWK%w45>J-CfHzll{rbclqYrwl%j%`K0^k@z29C%^5$W#kTG}
zex_$bYx#WPWxhL3TImJzSPHkSzY|x^`;TG1YwPcrrwa2r-}zd_`z~48q;9n9j_04>
ze`fReZS~wS`~IFcAs>~V@_E#07%&K4oBhtA#wTLe!>gr#9^QMxTl#YSZvC^<3oJwx
z#U4p){Ij)W{aK67*G2o!EXrBE_@eEsX{}igjh`<0<alnfr<v}vRhKNi-*35BcBC^h
z;ast~pSoc2RSSpdS>?Q24_Nr!%52&#KD8+RvVL){<8rC8$+q|F#Ja2IPwN-5^PAMO
zAzW&{O*>nX*^A)k&-+TapSA4d58AxjaPo|obNJpZx>3DfhKGM$(FFgjJJ+0l2F#6_
z^je+!?{(oz!dlIT3$|^m7vimUs^X9cyqjya*{)8lx%eTI{=~3Sjo=^J(jOM^StkEn
z;_@kWM!Jwmd24CY>-@kR%ixgn%6kPr?apYD`h4)a+LeceUmi(cKCf2cmBM=c-{IRQ
zcg(%D@#oQ{mp*^zob<WN<VLl?<X+A#r_JWP*x~(0zxid*)|Auxl+3qOFXdRI_hXXN
zl-Je5m*jKW53JyC++*|1F>T+6tc)gy%m0GsZ|eNX@as?cfk_LZEN8C1e)pcXdH0Vu
z?fYjrZC@s``0P=JLoYS>t26}#K22-fvTfVtR~4RjE!wPR*)RPmHcu;P0teI2YaY|M
zyrox3ZpaE>>a6{Ud+YL;(t^&!U-fV9%t`#Z{1H#&iR*8VJFPfnK2>auv3+yk!IsVK
z0{O+0_EzjPl@s+yxns_tuQ?~r(&UvjU;E2op$y@lLR%WQ-safy*==3!Y)_}^S2tw#
z*($TF?R{}qv!k_M&*1B9TZ!+_RDSJy)Y|2o?Y5e^d|RIff7#{tPUq5I8nHFL5MI?`
zv}AqZxm>^L&z)WFHY9p6Z@quuq0^P9@PrLD)q<WA%%$Ywwl14ikuUhI>Bt)MXH1t>
zMBg6T*gnO7{VRv!Q*RfYowoUi=jqoyZ@>K9b~gQCf|hr%|E%UWS~D&L_5RCmpIO<<
zk}7AjH))nz%k#xQYcx2nW-VM&w)xXSw}rRr^_hSFsoiMx=Y?&`4|ZkK1&3O;3t#1M
zR-NQ!u+J)Y{g!8zcZJd_eYZS46>_{>{;l`pw(llOSMNEt@<#lE2gfGdtex(4RiaT{
zJZLXtrk$MM*JpC_3+JTdW%l@~Th%Lw?J}}?bndUJ`I(dM>E)u=mRD~4!?Eg1qJnVf
zcMB({PsQ^jL^Zs4)Lxv6$PU_HdwkX2rT`7ifa00^biQsYXzR4PCiYBLG~`Tes^|Q4
zKY5w5a5lcx{JYfHZi%rhJbH3RZ1A?;60eDmGkMs=i*zif8i(X2%uHBQa+p0kLpk7h
zXG6X8)~d&Q&)j;{&uX^jbybE#)czXAyT;F_nyoynRTz6k|IA$853zPf=3fxM`uPms
zU*XQUyVIC8gC?)A^p$>)WP18w_PS$>ULRW0aDP$FaVg>J(TBHgx^g1u*;j3uBNGK5
zXE=2zUV6C6!EeFe*Vn@0H?M!xpseW~d5(9+<jw!C%vktsuCt)6f^hyLsVUd=mMpT|
zbh}C_&ULTdlL~iszwq0Sxem-%>k*YttYtaC6V_I=taIUV<>hr6?e^dJj<nmEZ8`9}
zk>h<4;~B$w7db*VCAi=4{AS3~Qe1X^V(A8n=LKfb>KEoLRNgJpSG+Z&>hRRYYx~N)
zO3(Lw_c<ANf9<|CH|OR1y|{Q`nU%id;Xc;GWhxPsHzXW_eY>@6!@1&|pPgln&O2_9
z-2EyhHY&AO|H;+4HP#=#3jJC9p;)KtpG(j;lZ<WCzgb-gQer-$XkFm+)8aVu4ZE)=
z#Wr>1X@9+KW7C{sy1~8hzG8&%le`U~JJzJeHSUSus#_Y*$>}&DU5;1U`=NMUsgRFh
zjm6nbdzb#cdFPdXLiD7I#^K*DEYWh2_1|vCV0_BdL`!l?nHJ+`jRiv8(w>?IH<`8H
zr1=MN`OaR?)!H*_K~3Ge$%*?~bxck@@?p-|ZYX)6Jabcv<BF<t3+_!Tyj?tT%gw~n
zjgwy$?C;n<qw8GCya+|pd3$Ee<yH~?u{QHV)sYu+&*QyLuhN?u<Fas3RdE|zVM|zN
zyPwj26Qw^&iKXHd++HaR@iJ^P<=2;W=6q-IJ#ggl{oXlKT2ekVdhXe%og%X}_>J+}
z;PhmVZ=JLES-Kc0$SIXIB%XMiw`Fx2*LuHo57^nxg+7dFpZ_Py{?y^V&-<Ppd>5+M
z-*&6!=f|5<&mMcTqU)&Q@(V&2!)>k!SHGF=<SFGZf9jO9u*Fe{<@@S(1=P#R2I`yK
z`>#@Y<masot3TUyE?$}RK;uZJT7BT3?<baCz9H}>gU2Ig#?klO?yWCG-!n8k^(e9t
z{~7M-d${tYLsp2+wt#IfS6+`ypSG95>ZnnUYT)8!A7>qL`&ZrTXni<FBfj5<`(-76
z(GP|1d!2S}S)sp|V``4G&UMFX5qqoR=(pODDzCZ!Rej%|(G=CZBJRnZm6abfTBi!_
z*R$TTIX$mVd;{O4z^Q&|cfFjppY>PKTD)R?iQ4jmM^{;g-s=^AIQ7TvM{FtX&gQkz
z91C^4i;WmHIXUb0>pimiRd!483oq-*4Lf_fYUDl(yszJQ!@k#3gMD^y?S#EE80GC3
zA3s+An8|sP@R!tOLQekmTe>a1)BjhQJPCTab(z8S8zp~ieNwHAk}9jFbw_OP;X06T
z=a21waTy-vt$LRYZ$69BE<Cm6e|DH}%qo_ornS?G)%>E0ik>H}NlE0NwMu$<Bx8VJ
zq>x07=qp~MdleN&l@k@uN^CT5DdX@ut!{5CxVpnSQn`5L)LzYX))|-D4K&ZE8Oz+)
zIBK3<dp@%Mo!gO1@^Mc(oX;^^r)O|`Hbu<2+j~Ox{1rKkD@&suec^46k@(wadZ$d_
z-p~8a?i-d0&hV9z*8O3oUBdRcKsJ9;C#P<I<#e9-_+-I!%k5D+%hFC%cb#cEmUqhA
ztwe&adXvk`hZ9@lKiHTyPdDTEy-G#T-1N8AfqOc&$GYQ6H!gluenMH{<9RVPH`{FP
z{hQ32{^WIt><>EsptH8+_J_#}x!?9Q#LN)5UeudtFyrzEJ!`=hv-_S#pZ6^^oSpXc
z&c0Xa#_RSwrJ1ap+_+eCvU@sHBHOf%>4s*nr(C?8Rr>l((VdtmSEY+5FPEyQHnt@4
zi%sKVn0fn^-A$+E%WPj=D0%pC?jyNqo28kyk|(Xb;^j)h(*^8Pt!^+bGMd+sdhEjN
zRxhUMy;8^3i`b%`pZaCktTUy!EcrTfoPL^<=&uwb#t;cp)@j$wnbrw3=XW1ZJ0Nbs
z>CV4D<X7jp(CE9`WoCK#VU`!`p4PngQj4o7_u0$MeBpEAsvR~D4a=82f8f{CB=F#+
zwo|yA(M$J;+9u}z`NG1&F{f6Cd2hJ+ch&DTXD&TUIryzkamot2i5G)I-#?oATO@iN
z-`Vwtx}tvf-f~jD$@lAl`mq+NiU}LftypGxb*Z`A;~fQCPNmLRzl$xR@U))bib(JC
zMaLyV`zj?@^?2Ps(wwiuP-S6m6*Jc_{>kd7Os%dzFNODA+43#+i@SA4(Z4e}pQpI^
zPG7xt;rDrGKIXCBa9ZOkpk8Bl?Rcr3pQ)p9ZA`YzWw#}t=5d_5)5t%^mf7J;_q9o7
zT;a>-&buf5mDO57rRjOH@{ac>U(|7~TmCvbE4cpmp*dU+S2J8toHL(gRz*pZ1po8t
zTPuoYnQY!PXTik}PP#pl+@88_NRSer^8Ur~b2}uN|GU0qPHS3SF3EFy-L|9xnS}`_
z)DvZGBlI87;e4Q6GB<x_;!hh}*}OBGtp3KHpCzsw6}kQ@hupJc(!XA@t=nU@Kw;+k
zz1O`oSBOjG<fYt-n7(GwnS`*l=Dy*K@1OjeB2|7>*XsQq_o6+EbEG{OOIpIyx9{V*
zS2KCmBilnWch0?_>}YSsVs_~NhnkDo52|0Qr(URk(Xyj>LeW;$l`O}@%)NOQF$90#
z_$_HA+o{^0b7!`^h?lE-U(EKv=cS>ApGQXR>7+o{ift!Wo5yrzom84)(PSMd9NrW4
zMm**fN5hIKxePgFXOdYfGA=C&c5YgexpNj*qv)$&lijbiJhiink$IBs#<*o!rWS`y
z|3Zs5Czc;K?o!+5s^k4rE@J5?`^U%MC&?_E_#-Icw33;n{}Tx-x$`o&KEC1F@FGHW
z&#OM|&Pjzct}(n_&RMswr_gjIw@3fGuOF4SPC7n?d&7I)><>}?uL7@pyETJ3tX$(d
zbNlI+W$f}g^QLXQ!#*ePuu04JZC$s0rX791Qde&KP9EDw+ILD8dp#9>6)M+vsZ+nu
zCVa8f^YjG!k3}LTcID!VVobNb&8qIZU3Muf&G^`Zo^$d^sxeiY*`j#8>_eowdcr>*
z2xdKJu;}G#_gAL(&Q<(oPuRQgQJaa^jRHm9j~XYB6>f5#B-ZnHlUcV<RmGKv(3T{h
zrA~A7=W+9WczR}OptHG`{rM_)C9#x>mVUdnQTiz?#>)@Qw9R4dnz4VuXO|be>=kuJ
z(XB@WD<;3^3D5nP$|KiT@r&yY@2=IS{=V%wVyxp;W<2Lq=KG8Z9NFqoHYc9WzI4`p
zL7*+qXUl}Q2I~85JocOH+OA)_?Zw=gI;|I;20ko2=Tv$i`dHpw=4ISGN82qI?0xh5
zb4uLGO&*7~wQM}#&6+n=;PVoJnl0=9N_TGO(x^Or%rx@y@7IjiJtwMe>Aqc5618;d
zat%4TEvaUA_C1tO{QBwhsb7Nq6CT|1Nx9Qy<=rd&w`GM)-P!3u%D4Fba{r$*<4t)&
z#R|9Tl5mF-=jev$yO+Nizum$caNCln#r0nSM`__K)fLme{>Uzi_vSjdRI0gZP1kz+
z=J1bunk{uEeyl&WTKn!np10?d8v<{{l$~qY_;PLk=XWvo5qo@i#mhN<Tz$uI`qq<j
zqxx?CoI{D0NmXVCT{d2OCh<CXXQ}7(ll!9Dxi%l0<d9Ys$hJ~_)}#}Y4`yW@{qr(%
z{qCQpZEyDc-hcLB;gr1|?icyzl<jyuPk~pavY5xlXt^r~SKMC3^?OUb?j_{va$c=V
zKfnHkcE{SJ`@vtMEbeSeQRheuSGGQ_Ex&)Fi@w6;Pa%J=CWM^u4!*#i`j=b1<3`TG
z8B9H9l3V<WUvIcpFkyF9-?G_nz6rA`D=vQK-tjyt>_@B-k383Z$t!lJ{6sbda^-(p
zcx$2_<C-NWJChf#KXLTqluuPxANz4$mTEeXxGMC=+@{pE>n`W7sxHXqYS5kO==^EM
z-m`N!ycASs&3rBO_*+qE*}iO!*u<>n#bQ0FrsY{JOY&AIdA(@e#eBx|Z%gY{mTPgd
zxOV7&2$$BmZ}Bs_X63I_@4jRfvwY*$^W(WVf9chw31_ZsnkvnmwR%%r#BGMh3rzQI
zl3dZh^HZaP$#SprPecFlAG&PcruSvGy-uR|hZLsuA`cFmPsp@b^!6-cU}8^2iQ`s|
zqQbYE9lyC5_51I1+97<?Ks?H^s`C0j<%@jWOFK>(EIKKW8208#FI&?zlPtCp@l#0`
zj`1rqu>OAjYvJupr|w<49NhQ1U`kH)0m)^i>o&>p@-fSN4t1V4?ax#GON;@Nuil89
za@Q`{qtj~tlF7+27OYckIlI$pgCGBEejAjWcR_XQfh$7cLb}>hKDin3FqE3EslBu-
zYm<xMnoA<4JB+x*a@lz|h|f;_ReMS6IN!n#s^5&vg|2hj>lyG{ykIpy#kB0$@+*$>
znhq2*7~Ha)dvL~FY1g#vos}*3s>8Ui73|w<`E6t25vhF}E!(b~@{}lL&QPDdYi8$f
zx4WN~dzQVux2jH<TcY~if#zIR;p5E=#Z3v5mo;WG_)q<9f9|POl1<RDtXIjF{f{MQ
z%eFY~z9F%jH%+mgCvV$=b#2l0d?mq-2?r&fe{;VXdUzAd$y-g6T<+~^crEk5d(~MR
zlj><L66@`ESG+tu?cK*$yXwwp7M{Mo%DC_U=9?8!JqPv_Tbvi@)o&2dI;{M|Z}y8G
zk*>)>tP||YFSI=VV-q<0XKarBweOE-aV*@tB50a?)4B__Ub2-_q?T}S3aLtM{Jic*
zZSAHLjOOnnuK%l@c;?5!oeR@do=o1>)uZ|6?2EkyNAKUcJ@J#tg+hM=ZwbylZCY-}
zQosLy#9h~aC26_jyrxeY*)MIkdHkRAwM)fxN=*0#t<rRrBOiCa+1$Hr`i=Z)SLa-<
zzNY4D$Y`*T$NT!t$#*->)SN%;_2b~NPx23hJi?v^y%Z~yUU$&r{)D?Tf?sY9w%+i~
z>8hvp;nVY`|Jur<RrGyfvcLrihT}J#guEOJnfFN)JMUrH&3{O|#QFA{Ek8c}dAC-d
zGtu~IuIaZHRlC%q`xnofu&#t_-|6!^?|)l-s5>%czv{`g4PTi8x@+ys?|gl|bykjE
z+^0MBKKI}KD{Nre`+D_N8PD#eMQs`hl{+rPhwT6G`}M~I87(!9_x1kA&UD->Q8OX>
z3F9f@XI?3@H0JlJ@kOV~xZh>ei_Drjt?BgKu6_4^1$^SS{4+`6=$n66YW7a)^O!xe
z&-8(yqp-#HiIU;b&vf?(e%zb$$8*D~qehF0_}gEWo|k;NdhLP+(VON4Gq1VcKO5U-
zpL<OwySU-TOpdCHp80A|%eP19UAVD)>GF9}U7M5Nol24my8m3wv$bbWFzbrntB<>K
z|J(jZ=#O}D>aEl4m+nMuuT#8YVE^1>&c0aY#}!w~?gvhs@?e_n**K@|6D=i``^?u=
zxr_T7Z+flzYtO^E8ho`~)ojt`^ZB$c-K<@HHDub4V6AOkcHVzqrz)sBY<Vs>P4jOH
zx5bJw=C36y-=v*e<*pSzp~Pp`>3{OJJZsvX9yrw+Q?T^Q<^CSwD_dmN96FpPyyr!K
zQcH5tt7(30p0nrMn%k-#Q8r_G`XQ$|FQ<dmHRvoK&#ArDclYSThc#}!#k8_}_09i>
zZ4;VT@;fh@ye_)E=~g16_k{Us$7B}l3-zi`VmSCuspx57@@?}b)=IYoE^8gTX}-yF
z>$^;uf9@~a8h36+QFwRZtrKfD{PhpqT`c`Yr<^aJF_@#j;`vW2v-p39S1TFrtGtlN
z^YQNHl!UNH`Ac6t|7G`g@g%;UY=N)kCCiJ=tX>#oF87v`=V5&Cxl<|XQIEF8yI+~^
zU44)1KC4ZfC}nt>>siQ+h#M`--*7d`-|qW-rGL}r=2PdnCI42b{Xc1ao~g*t@C&mk
z`^nurM>hVs|MHl`6pcU;xzrQuSHzkLxcn45aAotpa|ZqQq-UjBS0=TJilqx&v+tR;
zX(!vF{Y^q#*Z%hP&h-zO5cncthTXoR?ZFD|=Xj6!GfD2vy4hH|;i25mBtf5E#nK*@
zw5_}DPRP=#5dSMTci~KD`$zuK&(4b2eogaRbzS7pmhbx}9ZM?XobWW1TQYxFnV8qo
z&fSR#UT0oBoAhr<NFbM;_+-o7`@U!`5Mcj(`gYyS9Mz74pO<TD{NIponDn<saBAvi
z@8A6W_Dd5Pbvi#U&W?7QyQp61MMTz{OJ_D#?mr^Nwe)z@jP1)5Kh1d3c=yB8M@QoB
zPL@(#QrnXH^MRz<=8Nq|di5{pO8t=GHE7irwl!z^d1j>y>&e4{2b(hauCm@s+MzGo
zH>vLP`RCU><_lC$<&)g`&sI!*p0@D6S?#5f9>zKcX8fIgI9+JL-*fx#x9sngn7J!x
z#_1Jpk5pS`{cO-v6uf4w(6=B!asBUkTSfmZy>Df{aF5iSoU0XVM!!_&J-BsAUG$UX
zdz*)Aa=Xr!2V9GIS@<P*e*~wCze~(N5B_Cgn<QVq^naR`CB2|sfL~ke=*NR`D}8V3
zZFQ_?`*i!qPvyuB7n#f=HeBt0^!-}c&5e=QD(~bTO>vuUBAi%$sztBr?zbmrHy;t7
zZ?<9zcab*3vi+H<SvxsI*U47T3YxCwnUVW%71z64A2$72931?;zkNaEr?4W$pHdrN
zPf)Ra_TAs8<uGgB<Mt;l$|-_pRE~XiNZBLwXNE_zwZlmsj{4UtU#6|FGY-Gd7P;K&
z#r;ouj60fc%&ES;R(R4b@g*~E{^6<%Q!HU~ss20he$KSr3{Bx1I2V=9o3M}XN+tiJ
zM6-x{UCGNTb|~$==YH$>O)Din(E}duMT(>5E=u;kaWDAhzYTwGT#aTw{;Q*fyN!Ep
zzTl1q#^9`rLTXk^*X`Atc<cL}4g71)edpN_@A*hc>h1n_%=-JJav1|;LKSU~-FUy(
zHzl*5@xbq+I-!eJ{cG>|`2X9+(gznE113v<S#G*Ka@~J6pULS07UrTXmzS42G8}C6
z%WdP?Q97~qm+|(R|4Fv>$4`2k{&L*KetSn;P0td^(_dvaoGoEpV5jlaU&nEA$&2#k
zQ#}=5sD^#iEVGZe|B;WY^78JuS(ld0xA+&!zCEB*zW?qd+lrlRJ+Crb8m+IH%inic
z^x&ZMnt(3zCsp4?chB?D*)r{yc-^&Rhxt983BLk`A|~JSHFjo=bPh5sm{|HU;A>)5
z`|62bnWU;ijJHmDW_mgG<=xGb4?b1iW_N{I$GmLuOPxzh+oiJ|v%i<RBx;<T<o-*i
zPtn2c!i#%%dJON&&igovdD@+FX>o4#U+Nm3XCG~Sl*F~NFXe5-R~sJl#~-JqYnM+w
z-zM=Q-&cOQiqu?#NQtucc}`AN$0T39OTH~Fdv9VeKSvDz(=%5&4;J|D(hMo<+81*9
z>ys9db3NZCAJ=~-)tVu+LYuQ>VK7H<{?2C|f?>r1KDKINK?^^x72kIt>E4;OS#ci6
ze+v9+ee?MJk-F3Rzjj_#F`K-<=3An)h2k6gipc5n?rzXDPBrZIVX$^R=Kf`E#=DK4
z?imZWbj6=|UU$Cig(|<SzuXGP;-hDJgcogClR3j-<?PLW*ls@H5jJ5w`Q=SP<k`QH
z)l))4o^4sgw#ibh;kwb;hBcG+E$G<!I6!P}AfLTLsa<~2kMoB@ZW**a^VlwGbfD%y
z?4M~r!kWv^nBTYDy@LBa*UI^x57#suS@O58#%~!*r0Cz%Yp(abeECSTo;8C<dR~}G
zeS*}4Fj=0XKm3ZLmrm&U9L;lSj-=_qKbIe=s2zRs@Jo)iw!aeFzk6MqXT)rN=as-F
zJ;zF1SyFx)XU~PXHf)jW|1w2A^ZDyFCFk+)nHSlX&wcLLf3Q(q^!f)jrFJ3heVfiO
zE$O|pX1RUkRL!22pTV(-`5|hVR&pPYXGE90+%BQbbx~K$P{dZXpwT1a+O8;`r-4@6
zl9|t}e7NiXj@F;=mEFJZpUk>4#V9JdUTLjeW1|)CCaq6Rv!#z$Jky#eVCY`Msuy(0
zCa?FaZ%Rgd#=(2}Q!7^lEk5Da@Wfb1POi|KwN!wqFp(>|>BqA*uO~iQp0my5u8YwA
zpIg1UBd?`>{Pq6@!^G6DiPxv}mdc-L+u7^5ZR@KZmZSW0HcmXeS7lGWnW6A*snZv0
z&c|+zbz-}){!l!Z!{&>c8&~gk@c4PA;MRfKd^Qip1mXPbC7OS>iDe&P-CFO}Z}iny
z%7W{j_37As(>%3f&aNmFWemMtW8XgYs-2k8a*3WF+!o<xQXC8BJ`zs+kh%Es+E>e5
ziWX*U4gBVQX;#4XcPx(TxBnb5JW>>KXr9Bltr?vBlJ}z4Z@SmC=}OA%(>f0qnqB|d
zoI8ta^<U;voq)*tCi@>wmAdn@jN`vuTzq;HlUwPZdqoqh1LIu;E}gI18pF|*I_GEk
zl5oZof+13Gq$@a|8L9vDmMJx1ndrSnr^ez<9fMbv=+-K!&96gV{kpozVOpHK<Kf*=
zyQlaU6npgCI{dT7d|GPgCcU7jB{zdJ{ok>?J-7HlYHmV}#MaB#{P#R<kXbGAX_u=P
zC%>zFK*6am45j&^FT&#65+%QEYzR%A@a)?okBPajtyY;-=kXXW_dBGwi`j04{40kO
z4Qsy$hngGz^US&XUEiW-*&)OJfKRXTO7>s)6dQ5t+l^y8o$Hf2cgY^LtmK>!ZlNl<
z=}O!_2cZRAS^MYMIZQvD&2wh+Q~r{R%xp3FDXo<PtLN1&aI*SXp}E&~d$r|;#Z5Pv
zu6{de^Pb~(x5h_9hoaMuIbWJzEathAI$!YTqtcW4En6%Lo~#aOe5{|x_4q)~l`Afj
zHMi&QeiiyCfIob~(fB>HSy^A`#MGtF{IRV28u#AaOFn;BTFl<KqdwbxcTv-+j=HAi
zmF$~x8jikC|Kuz&-{$w(c~ZYxbG@&BuH(I)xAvk#@<j8lMUw;e_zCa&qnn=g{h3qc
zidl-XhY$MATM{&N0{it1Z){sUujfdZPk3+gX90_v{#KLKhDGjd^{el59Q1iK^$xGA
znyYG_HJhW`Qq7~WUJ*4b{_elr7Nk@fdPuLZGa`5Hz1syFHcXLE+xI+EG$E?)Rh6<=
z{iYW=j%WNns?N?lEB@~A4cCt<9X-~%8e4Wx(o{S<`-R`U@2Yd38kBr-_}HKSS<z@_
zuHcqNCF!NgkC|`pJ%8u=(~YbvRxS*RW#X_ib5%I7gm07gAK9oP>$^EJ<(og9`7^)5
zdBJpsZ%qrezeeeu_MBHEDmPn9Ak^<|LZ!FKquCA3&Hv|`X-Lgy*X1ddoBblVN>|2Z
z#&oCYRcoySl_ZxcIwe@i{Jj>Wu23x=`YZp3`YypGT|cf`m~^a+D%-n8CTzcFVe<?z
z4gXzVHi&Ut>&dVT(>Tjq!FC|Rwc9_<e$AX~FL!B9o0$69qPMnuy|u*4{v#E^TN0x*
z!i<I1Eq~Z%8F)KgPvW~WPo*R8!40!n9(%FLHK!duXm8h&H`QT+#ha})GTcgv$3IMX
z9a`{q?bgB<0(#t8GujvmwU%x2kZnzP*I{{9=hucSqF*Xp=cs;t`J?8Z$$^`pd4}d}
z+-Db@?>jh=Z9}58ngW-X`ImRIv>&FjFJwP?>`OpB)4geLeT~~(G)mpKD}P@mu$@I+
zh~4ZN_s%;eqWzo9+3VKG*Y$_(JNS>`q>PAVJM)J(hG#tFJHGu9HeBAmyIe9|;@G7P
z{xKeZH^lJhe!DT1bvF0kC!FuMmi#Wc(A^yEe}1az#GU{vYuC(@@a5k{cK#DRsoL~u
zj*r`S&bP~#6o<LYTfRr;PKLzI5B!-{CX@8+e?1GHuv_xk4v#s}Gm35}3NG7ZB|3dw
zRni*!KRXX=eGrdcu6oIx$9T%B)5g*Q2~W;y9jVo>*dQJGW$yEZu76(W{^=Aedv<#I
z4fpvht0Q^>4c%%2ghi%Y2;0IRwYkvMxY*yv#jT6saML1})`y`J!awTGQ_gzN`1!>&
zjlLy+<K$B}P5WOfB{%U!*S!`SgKOuNAKZ4Z@)nAfGngxQ(Z*{J7q8A5zHhO*g^b6i
zYw-8Z7Jh$w(()U2K21@ZxsL5!CY)`WCLoeAe_Ho5hO4#D15@rR2nC5%<{fSn*>>MK
z{3Ywh-#c47L~aKjoV1hux}lt(^_;(pTP;+R+_%RtpZpMHvcAnFKftp6)vIl(I$ddu
z@{tSr%w>$ODZew8^*YYnkfQ!%tCh~NLJ=vy)enNp3?{356zIE~+uCrcA}NZA^L^&u
zWg%a8{$jK{IK6o0+9&CCY$@Lq?OReLPp@N}V(u<{Yk$U3{z<!;x0;?3`Pmg-T%CLO
zr(m<2A9vzZ{ys6Ame}cDyIC(V>MnRD9Jyu2!~WPS7Rr2YFD1@8bdPV!+n(BV^R0(f
z8W;{#)%G|qnib*cA}`<lYxk_>QT~^w&0X{P(94T%tEb66pME&*-A(oT1$PVNJst{f
znV6dBsVvUB?m$w#!S^?w`Kv!%(<plLI6+@zYSzL-|KmA6K3~24FptL~-X8*@cEwB0
zHJ3_FdGg+BpQ4p#vip<7f_+Dp3F|NVE3q*3;Mz;&pLU0Ad3pM{^Q9kB1y3*hUY5Jh
zvvi`%>-ABrbuDi7Wp8KCJR%TszvigX<}wC>h1FWeH{bP+`1tf*tU^!XF3wfzhq-&4
zH>E37h;Z3VF<V-FrbQ<(f4ktP^trR1`~K!PIVS7A{=Qm$>U`;o{_?MMH*e_W>}KEg
zOEB`KvQhs#IYX`!PAYS&qUP)L-}!7>(_qnmU~!8pb8~}XlkVBWZ_erzc%Lgv2=TmO
z%Tv*5CVc&BpjgYRqYNB6T4HXV)16mdEh*f+aiy_R-3bQc2Xmk2zdw<4tE$HA^!07d
ze%tNyJ{VS=h&tCk=WX-VmV(Evub13vlr<4sdCP2-CVy#Hzk6ExpIg&rFV&Ph8+u$n
zhyS)!u;}-#ELAg)C2j~;W7zZSzrl?57gDz-{bic!eQ()|Ixdq|i#Hr|`XyL)FmL~x
zx@GEAPt!*xw|URW#!s2IeCd?19?|vyo`03gV=JdmyAeKfmKdwdN&PJc^iLRk+c(Y5
zNO$c_?Us#PHh+?|<lF7pC%%|^#L6}4?h7MFp_yM37>*@uJH|irU)<u4>A`1;A3Jz2
zPuB6|TOjD~T<9TiN|9ae{wHP&p~>^|dkro>mRdbQ+sKZy`+5Awe>Wxe`dn4)U$Ip6
zb@Qy^v)9-5o~{#OSTeauv0cE+uFGuS*RRhv=xAi}^BsQS{-R*Q5rdn8n-xlzcv^70
zeb;yApR-@k)fvav9a|98A0x)REhm@BcVhDK9Soloe=dD|N+kdB-4lN~CHJ2`BKA6Y
zasWrI(XGorS(#q&UtyRL5v7~?j5$AF)kHz&%G9v+r~YJL(2#kwxV-wD`9o{z|5Fby
z_xr=8zM|zCYvzY5w$ZiU-t64E=4rzXJ)xQ^+bJR&KfZ`d)1R4ItNQYfi9)1&nR7<k
z4e6WAdKSU=68A=KxHT(T=A0wr`M*;-X1)7;nEN^3vC0*5j=c!WZ-}0lH7B?0>*|fO
z3qx1h+D*O`q?6V1B0Ee|Y=Xy!Rry=P7;jZAd!gCOq+xN8g>&Z1{W^yp2Rl6DxNmpe
zysyp2_~uddFJfm(-kC(T^LECwpV_se=Xl$ezi*GvYk0B2dWNysx+94K54K(A*|<F2
zb;sLl3}J~ASgM`1#Q3v9-}Iene(9SO`%z-v%e=&S9v6b-e@=RS=EeTKVV18-k6y}<
zj=f}bG)2|3)`#=cfsopdwcGqIOe#D3LfKU&^q;^yuWi*{IaLlkR{a00Mc!Q6`o@1*
zHM802xCU8fX<;XUxLw<>Je}{^WcYNKz~4Ok#)>YT3~e5XS88*UK1q5UY4h%!cfQ9(
zoW=C7?Rx3l$}H|H|BqhmK6JWxp}@f<zKIdL`R;VhX`OJ<%(nG4L$2M9KC$CFu5u*v
z@h0}EO_XZB$j&LbvGZ|j=S1HOcMtXbyJweO(u!JNRbnUjW`6w4aAVu^(m!+`PM0oS
zbe1Jc%!B9o&#$kw`$Kq`!lf57rE&E*9atB+d0*qN^^Ia7@=a!1VS3A~lHTZ<y!-WQ
zPuhg3sWNAu79Cp1(d+KRs{EI;Y}eW)<<pn0e)T{4v<~0P_RtH?x8L`j+9|grk9n8=
zZSHd&sX-S${x9Bh*J0jP2Ip4!IV-txOV`SuDAQTM`qiU!PQCXVg?k&7E{RE6Ota|P
zVQ|TF$DCJ5#gU0xRb3_vl~U)gS@hL1(mNrs;!GF+`WXl9zRlWsb*9sf#t%2X_cm`y
zs@r2cv7zqbh7EN^iT$^3pE*0p?D(cNZ)7IKTDvo^J-Tro;{&;hHa>}K*V5Ix|9aUk
zwc5N`^uF}d{J@nRJX?aLW0m~$wQQn3$#U;tOKR6zxb&;dyUL>5FE$rt%?K#%yAl>%
zSv2F2o3Q`WMaSo5$NkcNC{%Sq_{&;GDe;{J|Jf%+SY(D8E?s9U{Qb!Fg5vk{Hm%%!
zI=JKM-=uvZ#{(;tdaTxN-;@*8&iMJz+atbN_vI{}o-vg%pYD)yE|%B#WO~=b)n)6g
zr6PA<QjK!a+Ld22yCl)rwb@ZSvgv#N_Yw<h=5=>3p1ak#@cWrXC#=1@|0F1U`L`t_
z<nX<0DX(<5C_9;+#HzRd{<3fD=P;gqrt+G2ZQ^CM>n2AI9y5D*EG_-f;th(Q85f>N
znwBWscJI*D>6;IoH<W2w6(LiU9oV~`^<>&CM>l2H%_{QIUGkcX#FoiFt+{l_poG0?
z|8oz?+YI|(v>)N0wB><6i}>c(Y1@kuuC6z|V0CWAE=#3#r!$<diuhM-uH522xuB{>
zG2V^KG9cv~v!R$`jK#bF2UouR2L$rIB<(ok&**x}sc>CRgv3qn^itExt9O5U7<Q_#
za3&vFWp6U=!@PByqHW_P4E2_oYCcJw9THQ^Z#3~yeTKYVY|Pa&C$42J-&3cY)_h3w
z#T7%B<msGEI;m6E?b!RVBlq*u#ql@Rx_CdS3%FXmIeA7ecaEN6vBKkLc4wZdvpi7f
zHC?9tTyjb23ag~Qwnj(U4$hTQ_;A~tfx-MkPH4D_oPX5Q=lQeFvFup$<%@-x_tP$x
ztNs7dRr{Q0{EKIApS}LR%fEx0RTl7_^buLNA!yFsPye=8s7bE5)xURv*V6axqEDYk
zt8UF}zIx@1)9Xs}s5tdHuj#6C3>n`JsU9_W*=(WrVwqal&Xw=IR^>dmZMdOYt~{~L
zTVl<fzMsicav};U{s+r_(wtE2vg!@%;rQH7msj3+TCnlh-Nl!VMYabxzFX`Y`tI^;
z^LuaK>OSl$i?9p6?<Ki^zn-R{FSEBp3FAbL$^3_IF3>D@ll{r~<30bBb9UUbH~nn&
z%h9Xc()RApo9Ty5?H}%cXMcOi+uzw+m^o}XzOQ2Gy?40%ppD_g7_Q~#E{1hacJwG-
zeC4&@1RYc3X9Bu6uOG2!x>?e^@#^t~9{b&n%v-kT+OfAY_0|^0uB@79QQC0N^7Z}Z
z*<DAHcioZLxVxM8>NE5IHF+&B6C>=`hO<wqXIyB0+UuC^S3#B+zt(z8Id*2n=ZC!v
z)#CRTF;(fYEKFU0$Y9Z{ePX{K9ACoUf85gh-AC)+b=w4sceI>Y*!Zeu?c^P&G%pKu
zMC__y`PdUTgKf&3M6X>kA+P2pKYE^MxBTF%_gSApZyQ*3vML>Z)7awwx8QR*e^2$!
zSF$&pB1M#MKl>p5>Vw?rI~PONzc=q;+pzU~NAa$H$=c)Bt}XxH|2_N579&5Q_GzXG
zD#nYG8|T0CF*QG<KjXw#KAot(z0F%~&fdz`xa<Be_@<1u;4V+ogT9;D3Z9;4+txCP
zIoYK!_-453D-QXYWuiYm?3%%G#B5sU1nsGM|Fgss)#v4&eD~~H7n_K^{{BX-M8g9Y
zs~R_k>%^b#<zK{e;{JZ&e5S4Y@9h^{VjuDRjgD)xuWW6%=*nfQzyDij#m>0;Z`^}F
zA1cp9hH?2!-|6t`VM4Ew#EyuytE?X|d|aKgLgww0#q2@~Rq;Q$*rPg~{CE<sbNT<3
z%-yN?OY!dx-=Mc@YaFjnQ;eIsozFVf>Z8=oo4Z(6W-pqOD3rS3@5QJOJ4~$_BzV@x
z?VT)lSjmyu`r@hW({!G9%Q^+$Y+{|idH+|o8!>f_B5hk`jo&u0>{xqx;!fY9$Io;t
zE*yB5a{aE`6;`ckB_^lcpJ!zM`geJ^vG~kmdD)NTlYiZkSYrFS=h}@%pXwvMIUKd8
z^g~R1<dw8@%;O)<+M$<oxP_VP7Qg5N8<l0DEmKd+onvcdI`}6&>GhPj4#B&8|CBCm
zym4mlw1yKW9nbZB@%7cLVO-h!@Qk!hgzweAg#rr?YpMq(ywx*{i>qX7{lf0ud7x&)
z#VWVBH?2+ixo>`*h~4FQ=;C6#`@atew@M0mdCs-F>!AABs83t|Ld}bK)_@6Wss^W)
zs9x<6vbpRpbJy(8j9i1H577t0XIY+jdhE!nnQ4#u=et=P-MZy$@^7wn%Y_<0bJVT$
zdb8%lvx@mi;+{K?cFDZ+|8e{6ra4kBMRAk1&o|n2C2_%{-4~C>-YGQkPBV(Tq2cD_
zb?MuR3%b15ZeQb%&oX*^C0h2x18dbUy)$QKZJ5vgY`z-9iv`whFWwkxOf1l!cH@$B
zSI*vx@h4NW)DM3>{c%%3!=)Ef3@#lym->r2*n9o6^(P)*GQBcQ@Ac*10);D^g|9MP
z*{wSB=(Ue;7R0R;)Z=}|#BQ_emJHip+x&Iw0xZJrb=oXcnZ{Aamg|wU<BQ0QsX<44
z+Sb<ZUA{hW!Q_XvSInG0d#pZsvPMCDg`?J*na5Xq|26H8Q}tTcusxEUXM^4ekKoP=
zEk$=#@}KtbEe_W3vMJfTSj@X*;`A&#my3oU=2=Hxb4c(we`!J9e5;x3r(6q-eRzOB
z^81C;M%*jU3Mgmy7hRgRVBafMb?b>HTNTpxv#y+VQpZxFA#h9Zis?!J4F0~DmDs#|
zvv~EvDAB1Qb>~l2_ggRJE{@N1DA*`{@?XUI+|OZ4t?zCB``z&Fg#QiO7i{jmQ1isL
z>4^U^)`d?*dV6^GX_(H@+RCPTDa2!Y{ls%6|NB3=yYuoNP|d&D^go&B@@=Od+_evG
zTX&xQ(V62c-EjEU=A`O>ZL-;uUp1baqNXGu*6(y(Bkqit_m}Atc5jsIdBLahS$lcv
zmUA9jM;uE(R=+v2ys2`l=i>!J`#I-#y_~l#@8jMZ=bQTfRI+4V(+T~)L*<vn(;%md
z`qOxOKh}Q#5x2cp-#yKG@*Lwy?+y7Uy?wH`wM}D^ilD>_1M!?)dRa9+4NMNx*$txe
zZSK5$`&U13=V=p*zqY^cy_>P|(7G?*U-mo1Sj<1bqHp)*qPw8}>_^U=MlEwsyghD|
z;?SX_XlLpEcTZ4P;&P$)Q`R-+*R3wsx_hB#&(SWmV>7MWd3G^;b2NB&TE5%c)u-Qv
zkxR|AgID-Q-h_YE?xt}cT2s|KmTulQ<+k*sU1|ACVt&QC&uF&^)D$?q(r}qtfQH_p
z(=EHMu6;M}Vbhg6Rk@B!9A@6I^i;m!+`dMiTfoI(;l8b+AJir({7PK7@8*GLd{Z5Z
zm+EXh-{NDmVu!wD_=1&Zt*x~_srTdyf3oYDvmxw7=Z5ur@5w0Kcx7|Kr^wrIC9jx@
zx6p^I9~vvR%;0>i)?l;4{KttC%NE}jntjY?5}UH+>{WsLZ#l)7c_l10T{ijg((7Fk
zM>o%Ssm!4nRwOxBdv1QUan)Iiy`P`oUBx%|Lw%<}MT8dnU9)JnSKI&DS8FU6^ticr
zjqJL|O&*=8c8<Tx&p*AgzjTJw2OBnFzx_IAgI#aQE-inxMY^;1Usr<Evh{NO`*OBO
zovHfDl+yaBbKULOluVXfIpKhhzuta-d(7>0zr#X9C+3c{xK#BsVqYSAFMf3SpCBRO
za?n8g@PYufJF~a>bIcG=`+M-^rcWkOHnt{{J_}5I8yN1Vo_t`{{-;XIXTN!|ap8BD
z{Tg{2n1T{&H!L@Jw6*2b?vLv~#a`SvN1#%FZOM^0ckUm~Se~-!-u5+r|6UB(A|eo%
zo*VDnz|!UO_gZ^GLr7Vkbp4Y<&F`f@=H<PSJwL@Y&n+T<tMSyGKf>OB`5<!gbHw55
zw8;*K+iyw<%!~}LaqF)Yy3_o!Mtt^l^*L29KlHqunAmOj`uUQg@NJDNq-IUna=x}O
z#W1^VpVlUy2#v&dQw`H}X6%@;A?E+S|A!ad^;}?nPG;xITN&?+f5zFrYcvZzzSr^d
z@0Tnd7g;_$db8K<B%jCeKJDpKZp`<1U+8+0p>flkr#D1>Rwh`aE<8T-UrqvVX8f<r
zEuNRN`x6%4{vql-Dd0@gkxyyg;#g`Xbfx&~S^fQc$!F5}{o?Fjgik+Q8ml97e!kXP
zp7VO!zWLhTJZt_fe8aCRYMDp>_3N5{zSEcgf;qQ3dg=-pk8hIlnF&W0_*oq;iTEjC
z^Lnpz>H1kOmoUBG!XLRPsZ2O;p^fLaJ?5EA4{sjo*)60#<4WT(W|iDE8JR&Ln;#_E
z8O``BK6m5UDTgl>e5$dUGfQdVi{ezyUrCLdmg;#IMNKM|Z#>BQZ1MrWtxCV!PZ*tC
zY9OR_k9p^s`J#FOMlP$&g@qsezmvMGkxhL9r(>E!Ozm&xCE?}=SUnn^>uj^k{a~D`
zVkG<Sn=;4NgV#U17M!r>(v#z`ds*kNSG{Xaa`&-23(|d3b(VcxYOADc@TtPt_lU!M
z+f#cx#d9ZoJenz0*&4ZT-adEMqhBH%_dGV-I7jeRkJn{&6Z^S~|KzdDmWu4XxoF0N
zsuu<_C#{#OmPp6nthCmz_R34|7c@UOb9vfv{{Ey(a_62jaw;XQ2){p>b%}P9IrnUr
z=xhV^x=yF5H@+tOF)Z5}Xs6dL81vNrj)aJN=V6u~>se-S8uK}&sa|%}clK-3YWQQX
z`YK`7MyJ0o@=xCp{5C-|Ek)qyA0g)bS9eUx|2fylv1+E1%I>};bK==<Z;!dZf1_$U
zkC@GcGt=6ao)j%=I=Zo4t%`AVZ<L>EcXS=s;yb0c1!5k`{QK*3F2*Qo{iLghZ>&4A
zli_zl)7^57<CCwLtYzCTv&#4MKGr?LZa3y>e%fDry8KdJUGCCoLH65IZ1r1iZDGH%
z<lH{ZK#dB{h37fB4*#}XWBsyhjnc|Zj_Rj_&0cI}f8Qhf{o9!tN$h_YDb<=Sf5s~K
zyz82W^n?p*e>4_b?v#EvFQ%Q>Bj(Y+nJ;yx&3eh%b9#U2vaZ@=YxezW;Ge$Nd}R=~
z(Dt*OZ>EO*th8AFaMksz`+UB{t@Dgu9Q)+q;tK}XHXJ=uzx$Vxqu+(e6NE}UEM9$j
zz2@3hejiJR76*~NQ|-R9Nbt@3UGzV@_~O5hR>_(YQ(vnWPPpRvBma2n53@hM53ft^
zvGK5KdwX`nvVtRORqpO($C_L`kFrD<wdcDP$6Y%UwdB#|C5p#R)Smddee#39t25af
z+O?*4npY*L-}Rd=dgAJ$^d=j%o&Bl`|E}-j+iH=z%yi9FMx#?7-^X6d^kO>B7W2UH
ziQaP?&%}vJo+YQ2vmMH`(YU35-(}xZ#%p>7=0R^G3TM>0h3xpraM)^Ysd1%_(89|_
z=kB|t%*^GwF@bf~)83VQ(*II7M;>C-o><w`yx*OdBlJek?6oy2KfRk2>^IdN%~N~9
z**@jXmak9r*Zy~7cWzlya<H;=Da(}9&rf8!x8|4%_zUZ(eYy3<<;P7~x&3|0Y93Dm
z{%)>%@KDzA+C|@0OZBFhT|UwI<!EAQ)G5Be-)n;UCnRxy%huazQM;ynO6ktqik9>C
zU)BB~q%zHTL7krB<G2UK`f;oIdKH94bk@(<6VIsjreNtUt5Yi9X8f2lQ}6C?kv&zf
zw3ojRZ_)keu}*BZId7_4wzSqdr=@co=iJka{&QzyQt#Sie?{;46@B|#j_|Nvo>?4h
zVjC<uKk8N-vyf72@&dJ+afgkkZ8~@2FQ3M}PE*z!FD6=fMw>fb;_NN12`vv&6KU`Z
z%ssZ3C80T|`1r=VJU<IxNnSVLn!k2yTkD%=p6|4T-}h;UMqk~;*IjIA@KaGNDOPr#
zMnDyt^!M*|KN*&?L|<IO5Z)C3&&z4|`Otk=%4X%+%yVY7pT1Kxsn0|*qOvS`<Eec1
z(p#?&ZIzm}=<%{EHqLjq=KT63el~PcirL5S*SVIrTFme_2!2?x#(t8({FReVIaK8{
zd;YQt$ya!h@HFw0as8Bqya&z%9EjBC-m9#aetP-rLq9KD&%YkZX3}kC{hcl1WO)YD
z;-g&8WfnPBJ~NeECV&3=gX;5lE6*z%m_6IEyW#q+#(%!6R?NN8?y_!HrAXkRmd6F@
z-o3m_rgP0+If*sjik0#0e9iZ9PA%u}oZ7{*w(RACojHF3?*Ev`w(O<jo2mOB_+{+*
z#NpE`@y~_N>w?Ms|KCO5X(gMjb1nZgLv%}j+}7_eRd%1aUhr|=snxFMZ11><bY$<b
zT+ux<`{Ap1g142nEiw*mNp#zvc0+VRRQdF>pm}RIYVIwXz^paxd)l_Qrlm{FDzdY?
zFRtUNU#Ii^p!<2Q+u!$`T9KE%>d?n@lS3I-|5S6W<-T+JpZKh;-@iTiU9#1<)PD0C
zORqc6LT^N~F1(*6zIoS!FLM-DEB9pY*eE6WdQX62#Hq864l+!gO81<niQDmCf9w19
z-4WUAdFy_atk|<#@xhIZpHo;?G{ps&Y5L5|zrXR$sioF&wwy(#6@DJeUY}uSoO<TV
z315ZJ&t(o6sehhRd9(9`w5Z342J;TB_6;W-8|OXPFL^~`p@06XN7FCtR=U=@g=<m4
z)-dnh-Cwug(LUk&+{$*FdHC^}x6g@8NVu%<xl&a6z4zmw;E3Ah9~L|8e4<Ty{xGEe
zvwwTzNA<H=*Jky&vah<*cC~uWRr^Zg?VLL|%n|xN`D&2T#BBy`tu3!t#oo#<{d(p_
zt?&7FhYI(Khkg;BBGta-gy%6mF6-tv<H}U4#3$PFc30KZ{)c|(VZX3YQ!=k2$oum&
zO@BK%q3KiZ*{0g9Pi0?cSG+L2<@}9XZzm}%TmR;|<9gO#OMUK~Q}XX@&^~>Vvn$&p
z_-5GQWYr?pyy@P8d*^Aqj`+_1>9~)7|8tcmZ(e$CQwlg(q5NX5<Jx=6FHTJix%cb*
zTzyyF<L0Y3&s%jh;A6q-_%r%S;VoYy?xjrg?svUx#xr*X^WSRL%-)q7XT_ge7W4Jw
z5`ld&GnyygEUZ*f5|IqLGOwC_lAok=$jKICS@!jzfhW!HeaK~hx#N6-<L?()F7Ai7
zt(9L|-!Si&`82<O|C)RRR<tbgSg=#@=Ftc*spZ|g(q5HYd?)gHzd3n!{=wsxpG$wQ
z)BP>!_UW8fqT!zJxigsiUl|t4_OsN~p5go8cU~^BevT0Dg`#G?+jmU9Udg%tQ=p-z
z+t{UQ#oW9{#fO^(%vx-f)wmLi;}5qtGk8y_Dkun$%}Z@H{#m~7-c?68!$Ycf!$bF~
zm7IEd#Pq#Pp!*pmJ+qvwGEV!8j?>xICpfTt-^<m@_%B$pPGfriD!%YE#?6H$jS_Qa
z>AdWl9iilvnSJ7FYkq1SL$lj6@i)yfS--x{b$C;8V8ze2`r>Mx4uj66saZ44KHkw}
zoi3t$`-QunoBfHCl{aT<Zk&-ew>(nxV(MFljg#0KlWsk)F;vY8vpxIa_U>!$`pFgj
zduuB{{1DjOUpDu0!1mJR$KK3wN)+2}UV6)DfzyLpsqZuEUe4;uIDXD!_L9XckMFSx
z=Gm2|Z?np%cG)yV&8wKP@Y|E-ho;sCdCzP$Sjw_%nni!p*~MSmujl`*_hI}i$7$5Q
zQ-xR5!YAlGXLD+9!VfcBdCP;zZ~s18)seE)Zr>dy1?jm*)#6uNJEE%^x^~8$^cjT>
z$DT~Rc}{hqm?IC*|C`HBhS-N+$nFs;(~we~{&X(eTD#C63fZhdo96G<_!a4@SL^Hj
z*Q(-#)ZexZRx+-<S8Jwcs4$<r&^6yz`cM1uSrujL1T2J}G1TV%W9K{hR>k<{n*&?l
zP1#=`t0T%F$gs`jK>6EwyDUYgs{Rh2v+Ze5SH5xmkJoAO*59fcA0BNwdBbS#g!S9b
zOSQc46{tRubC%~!{+;D5PLs}NbVQs<-kbi#j9Gy-T#e^iUR73dXKAyGoUHxj#NWD%
zZ$lWarzhsL%74H0D`S1+{y(qFLWDg``qle(eBC-pdGdja8$y-dPDxEM__ymsm+sDI
zYYJ!X-4(WVzImi<*{o@j>lRNjKAUr2*!l5JIh&rH2Tn(oGMxF6nCioITXoX}#^ayw
z-kNi7yYJ04%9m#4EVaMkVC?2SQM%~s{`YQTUjw6EymKmx=Nv9?S+MDJiNin7%V!;C
zvZj7<_?9zmvO9mMcF9wvuCvLVdV6y>^=|pGbiVx6vpwrhi-vGqo1OeW_nB<a`^DLR
z3>SSq;BexpcWh{>^u<=gMrEOH(RRxZYqBhjLXIchS;{?$ap{~IgV$RFd=fLGUfE15
zR+V^?o4(Xp-Z!DnN^Z@Q<?ZiZ?W|y$95S^d;gFo+sm10sol%W~=T|LIT;5uBR%P1H
zx1Dvn9xR<I@VsjOCi}^J+tdOVRetC=;CEPl!bQV1HOntEEL-<IIVqr0Gdjy>Eo*8S
z<FO~UbAPTeKf%>K>;4?Bqf1v*EAHfU*0ZX&`4+AcQa)9pX3AYYmZ&Wajen1@F~==R
zidA#EpR6;n;k{VxgG@&|27bAMMs{6$g;jn2+C{m)<h%Q?&OhJEExY&O&KGB7w;pc4
zJgHt{8fW+-LAR#!vVAx5Sl50LSo!|M+$(>YDi0Z%FfM-Yl)7-$-kI|mC$rU5OnH<a
z7QXaykNV&1N}>(Nf?uwbbY9h$cKDQxjcJf>yPB-vGS+LwlVb~Care%j)tR$vr^0m=
z^$+~N4LR)mvU8ZiFCUThdJ_97e%(&bzEWx4t;ZjT?$n$uA5(Kag8$3b2@AQ_?@G#7
z{{2d7iuE=B#E(l9d_%+5DQE19d?ylnB{A1ib%%$@)~EYtva*Y0{?y#Xx#LPux$qf<
z)20=<pQlG1|MyfRaqYM0*Su4939&3(e7^UdzR1rIdD)m6zJ~gw-+F;J&0Gv7%wKW8
zM%w(6boKeNFHBmyFV-<^Z8~+0+w{%#(u?Pfp1<ca)qcACU}&~Nk9A?=8rge~zHR$2
zw;`%}_Wo7934cR#Cls99^Hj06-^=By3WM@j>GPt#iQhT)vHlVYXqx!9brSQUqHmk$
z`|XliTrfFh;-0gzr%H5+E?x=KUzJ<3pzCy7sbS8Z!j#_KN4O8`%n7+Y#lcRpwC)aT
z(!8W2mT7g|B`>xzEL(L@f0yOD#oIsjrI~wVdcTopnXi^3z`@vb)sazi7N=3C@t5+i
zHy-Z))v^0<;R(Yvw-Z?tjPCxAy}we*bXxJL`P(-{FZ!Qy{>mzKt%-l4&1YI};P6`T
z<c?#9+9l6pWn49iRr-RquJ#(4ryX>d4%p6klE0o&P`|_e)GNjQ-)CMh#xZ8LJNQjh
zRV$hLSLNcah{d0h|8<4moEClR6!-KCmKTiJLbYG#F8it+$tASLK4;qdpVrUwUzY4P
z-N?S^_{DkCe_WlwHrMsU=7mLTZ;7vAJ9hf5^p~%q>V8#Xmvdv2C#k(;`91IF4jDVQ
zqx`#_CY;in`ln-|kDD+%&jl&(3WhgdzptKHUYvG9AlAXH+~bDe`F$(>74!FPY)JIm
zyjE-BD=XbOc^fw!W`Fl#R@|YM>J7#J_6u%OROq>}&sL?ld+CqmjTH`8-|LuNNp)P_
z^6p5-+uY2rdd~AZtG`UFX1sO9V1r>8ztE#gOZe|@PW{z?>i63VhlJj4aSb~$t^HHj
z4)I&O`(1jURo~cp(M&g?>5j!ohh<_af8&yWv$t5Bv)8Ze?A|M#W3@N!>*hUkolfj*
zXE*z}b7pCPO81m$C+>*;Tykxx#_T1gs-+Gx_c>OVa$GSssqmUzU6rtR!k4h_-|pcy
zGdylQ*mupVa7C{~Z{;ahh2s~!Po0Q%|FK>@FVa?7m3>aA*JSh5KLty2-l%Tbv&`n}
z%%!JnJ~J#lGj{=JNS#?tTYuHc@1JH#+BJK<`Lk)IWcsS9c|KK%r!OsHnb~d2pOMlL
zvW?}P!8yCero(a{8a&U$Jn-J~=FJV6<NseT)$@OHx%+RB<$A%KzjL4Fb%xa6a!?9+
zt-s#X+;vLo?+)WEpRbFLh1$Gc@_F+&m(AOGxb^mM|NeT|V}arS<quN*HfBU0c=_C3
zd4i`_Z0Co+E;&sy*>Y3QH9XPjvzh*Bu1uFe^M$B1&ZLJGhEfJ|o-qA+`M2b2m-MTq
zEM|v~4i27N`z}j}Jy14pn09_u$w`I`hj`X1<)q8(#-dx#Y|4ATk~>#O!@f|W?p1Bq
z$~L`k`fpFy@yIO5dOdBQIcLlI&IPyE-_<!AxU6|U<Bs63CO4kHyBoNtz2!;K_9;a#
zKVMuHb!6$(Z71^F_VNF|6?*yEypYXCEN^|LF`kyZnR9DLWK;I>n$<o`248&hRVN=h
z`Aa%j*|Lax#zkfOJmu4BX+hmn@2%bUUAsft!c-%B>3gHoPJ%X!TX!k8GOH<^+O__3
zP3^oaw?hqW$G*+pvf+5pDboWFZ}E8g?J-H=uf89)D8VM-CZF_<)#2tj=lm93$z417
zy_T`Uv@Wx!;Su4ftM*Ku)OqKKPv<kmiMwWXOjv*Hn&&aC?^2#jzoxBvxAD8<Ti<JL
zp1=RL{Hwio*C?T3vClD`3GZv)zuz+H`TD~9mwz1a6k(Wo;DTVe#HHxYh4$A)EUmJF
zuI^j9DD>B2_fkHm&SP`mrR~pL^1N@+eS@v1qZIGNG>3kA&XMT;y1(+&gpSlHJ1!ow
z*S1Q_nNWAaU#CPXaN$IreLL*C4Q%9&_RaV&A^K<ayX$@Rlc%4X@QGJm@cPs_9>3<S
zo5<c3@rPOJ9RHMO$C59LHcvXsFfHVm;Y(KGLaupX(<N3Mko~5)=smM6r@bswUvJ0G
zBiH}`-n_|I>rD0$v;P-c{OTJDS(4=sI5yn6D<N|vaqsefdG+V-OyZn5!zjUcbM)mR
zi7wUOT;3~qru6^1SyCx;Cp{oF*e;Rtdyvn?Nw4{Sd`?<CVY!!oP5Sk#ja<j)$t|%u
zb+Xqw$e?Jw@cIMyj^BT;v1dwW)T-)pEC)=g4NC0-UI@N!ozvI%)*;wr=@T}sWWTPU
z7z4Y^Fy%kV#j$+nGPbGuyXqe(*Zx?n&QSJg%AX4=&f(pAWO<cU()Y;iSfe8!Byo*b
zOJ{9!onPGlJbk$=j?lu`_ABj<9FJK<GPZnM;@i7lWO4Taj&(09rp114`u#~{uDmLD
zq1*|!Ew3ViW>xPwp_P=u8pYavw89`^r`m*D7vlf-Oi`%jS3j)XXDIZw?5oy^XZj~K
zHw)_v2!zdgb?9_R+2tQ;e~aB(ZhZXpMK@8?^Q4c{ROfW<iVK@B%DOk*^-)fZJ8bmE
zvSi1*%~kIhxb4+?*H2Jy3}Sw->GmgI;>2XNn-6+3uPmLom*M=;%U5rChIjsczxr|6
zLAl1s+iU{3%cKMr%kQ*X^t$=Mk8NkGII~Qp=ikud%6sou^mE1Lxw`M4%|5bAQ~K_U
z+@ssRYu`Ftdh)yOz5PAE=B)Z#(0f39g}^&04Hnx6Pr3!x3LF;7cq+Rs<*iA-;N+Rk
zbEjXezmaY<$@7w#<;J;g2i9_2jQKtDrT^BK&lGHOCI3H{5c*bo;mGURnn}~NOP)up
zI1(=vCCibYmhdXCcd07Bg8jS`)6*`l@;Q0nJKMB8!+q+C6$Tw9GRcdWSk}ip{d8D#
z+rR&%zD*Jzj8C=bs=bqUf9C&I-can<>87Q>3jRp0tBSq;?^|Pa&&oGXMQWIpTvl}m
zxg}njpj?0ZCi7|Tt`<?}mguMx0>(4<uliH4d5duWhLyLYpJ_}Do9<k3T>plqbuPow
zOJ-k=HLuoQ>bAeL(k<%uq`LcQPcm5qv~TRV!@T%K*KM15(;hEQTkpFjO7is2zS#<A
zUr8LU-tuQM@1uX~wGY^Ia!q}5M&)Z;$UMP~8<%h0kQT$8I!SV7*ntfX^;z|FW|kjK
zRefMSPkr)zv!6R|JUBfqBWQ`mPvw_$_Id4InQ|h;XG?a|>uGBJ9V@Mmx&17z-d&%n
zcS@-4{hw;}xni%Q-o)Qq^=DJV@~vP0t~aWVf5DhEZNmSDj~?*waBIrQFIx9tMNI1Y
zn1}1Xy*zT0d7-Rg%c9*@n^x&F9*vqFViTL6F{?YUxBKC}L(Am%c<?p9zC3A#y3M-!
zkCV4eEA%`Tm03`7Y@Xz|v@JP||MT)Ie+p&UrO61b%8tk>nfXw7SzMfk(IUnxy9`af
zR0f~jVk*rim@?&0c<T;^6E)9&?3$R&(KDfB+BY^K&g^qf1%-o@i#!9E(^{2tZ@y}X
z2=0<Ewu`;1(J%epI{)gHjt!A)Jffl#mwl>|)@+D;HPbSuu#`)q<kWvDF6rZ|Kjkiw
z3RL^R)xAh!@zU6YgE~K}I)AX;dbaK1E}d-_E4}qBlGvg|U(S|I^bYPmFpW?1;IWq%
z%jQb_URJtH=edaI_La`9yA>9Aww9MIXNoG_u-TurwCJFbRS(P6dl9o7&n7*b^={9L
znX~y1I?lNJ??r>r>!gqGN*~mJ@~L4@6I(FJ>FRCfSqE;NexVh=iiI~J`@xaET`yj_
zEOxjOW3+49l;{YvFMGu!Yv-OgztHya$%E_8x7~@@pt2_PS*2CvqE&YbwtIglQ=Zsr
z)*Jee@7?mYUc&{-Cdxkox#S|9-)wkm|K``xGGkuns&Do?bxpR}9tn~SpS5VJb{E6v
zZ$+HSH~vqZ^WKhg$>g3x_ifkRQ`a=;x73%4C{q4gwd_dw-2-L&s*-dbRp|7b)lYd5
z&tbiO|G}-fsd6HVCB<_V@rx_Ta6X+>c94ZHyI1Qb@5|5T`6jvh{?8N4)d)P8khkT7
z)sh(2JJz~;BmVFCTkHRJg|^p>c)y9--_K`kF=f5SKU;P6zxk729xMEi*e<YSv4AAg
zz8UgC3rz2+*aRvceeD?kX37?OMwaiA>AST<7~<!8E8YlCD`WkjqrCT$lVp*Nq>0t4
zg-dGN4WtX=7#aS%TgJVraJIarC92REpFPi4CtQZn_6xi9T-O_a4_(=8>pyW)L`Z4N
zvr7pa?IjM^uRU0BdehmL30pMhow!@sy;#@fUbNM$nG@bwy1Uz(M?H}_p;i2Paanbz
zcfC|EcMIS7Jx^?vO4^nmNY-h2v6E%`H5a$yJEtFKzG#Rz<`=Sc|2A`}LV<L_gtD_w
z1v0nJem={c#q9386i+MZA1jw+J0I<6t^K&VwRM)>>h4`j<5Sw7UX46IvExDc@`=g{
z-H$ACHTJk&a{qTMOzB!+sD{|ihN(uY5B*g-c`j68eet{5b<a~6JPtNGMKB!x>Y8|C
zx!dFgcMUThDYq2l)~WHf?TcHyD5#wytKiV8*cmUTeSf&CZo0~WMX!%d|NVA<l%mO2
z*L<GuYa`p#)&9i3y2k$a*vli0N7Acwm*|*$$&J$I>6?-g)AMng8~+n|)xAN<T*lAM
zcCLDM|K5`)uRohDnZwp0YWn!or^V~P&S~IitY08=Syi)D%Rb^8Q`M!95$v*G9$8%s
z^m#P1b(0Lwf%GaP!6L3t9X1(v&PIk#S>JKHtutM>aLwN5|8uSs8J_x(mZbS_w(23@
zg^krg+LG6Go!&i}wr!L0bn9bP77x-s$&`3F))g$7yCGm>pl{ok(ttS<mVuxBlMLhR
zTPLNYZdkfE_JhdbdT-G$Ig5A;^Fj+1U6iy~o6|VoUuN_^cznu4y(ylWQy34Iu(a|j
z*?)}qoa9@2c8%EYGZ&X!Wk0vqq3H^!I!I-x`}Qi@r>MkY_M=b6dzJV4v8|B0l>125
z`qBHqw_EP7Wqk7_B2l#3pq<TrWAj}FM#rt~|0^f>vhR)i_D~^k*<+?VfBzf}3N&cc
zV9Drtn_ltzUSXc6#d7D&Nx4&gh`w&S!8B<@v0bN;Ab09yhHnophRyQ-EjuS+^7RJy
z)#*0&=EvXZ_?D%LU3BZ?W=~If(&`_)<GR*fmdUPPxcs!viakA)U@s+6GMDLf)#`A)
z#?x!&$!*yC@#B=64{p9P+0PaDxIf2~>BrQ}w-4XHIwF;Cy`<*lL;fQ1yk1*rvl*|Y
zK4rgaSrXs;{?Vy@8Oc)^rMJdje#O)<`<d-Awk7=%m+rT2{M_-Ljc=3JOl9|pA~)7)
ze$rN%C~Z?+EAZ<}fst4x_pI|;ZpPk%x-2H1Z_oW$VZ7u><65BwVHRhikL+E>wjyaA
zTXfn<y_Y{H{gRD{3z3Md47U7oh3Cp20fm>>brx$)58tK|zIV#&AAX<wEbLb5Ua?p$
z|I+D}x^{3|w7bEjMxnotU%D;Kc>O)IH>mZ-hdQ-+w{v|918pT$Vq~Vy?4152-$#XE
zLAli&_WdG@w%r$ce=5Cn_xkdHM8lH67?VDusOL3(Kg`V-bIMcJai#p*n!r>SH&LqS
z`_CR**XdF&jb6I9Tlv!O{pwJkvC8B5lMRZ>$uC$g6>liu`?7uMzM4hfwy#y&!FH$q
zuFTxSyrQr7{hoL+I*{qF@XV8sqb|fRlIZi;_H(vP;{X5ql4j1B^Y;FBIl(ri$vM}X
zgr=>JVarbPTDK}u`1+kIJw+kOjO#QvURmCov5n=Ks<~U*`qo<6(~bGvr!FeY5!63A
zsfqQ1uk*3pw=c%}961;n&huTkMlIb{I!XNxW4L<KOhvYp-fX=Sxqb!YpIlcbSuL|H
z;lRRa{)_ePR&Vj&S$ZX9#ymf{<;$o1zT5TP-r?8nAbI<&FY%v>0$jCvCEda}zN>VW
zTujk-3|wq_xgufV+x=C|D&?CBB=-qb7ZhLUXx-bgYifs<dQZviZ1b6a`Z(7VpKbhO
z!oY4&H~rMb2i*Cx!i~?|tY$dL+FCunxAo5Y87n#_yw*;eucOJfX2xXE{n-hZ_wb!k
zSp7fif<%3AM(Dp^vro*rXcxV+&gYF@R`7Dck6ASv&PIDE#66zyX#2ul5)0U7%Bcjm
zu4;VrAatwV)op#}-#^h>eb_H*Zms;3&pvO|ReL)k7AS1`V_kkLNMin+zMyi?ZH6AA
zmigNYU9bFjJn8+nMRKb@o&TC1eSIR!{mBR4%w6_YTuEp8y!TOZcQ3j&&dAbHoM2#b
zJlbgK>W?!G3K>?5U$1?aT(ay+w9|||`485HICVM7yg1$PK9DD3eT=t(gTax8$K{NP
z4C#XEoD=Imc1K2pZHt`PSrV%DIYyE<toY}oeTR=7sBh#C+SXL4G57wFZ#}8G&lWtZ
zbqVGwj!gbBO`W@6ankCg%)i>YmrdkST=X+cR?=_ILrH0?P3OO{PT4fu{`hf`@NJ*n
zl#Z)uNH5%~66muh^%KV)+w%eQJAx8syE-qK!>69nbbQXMFx%-XrH?yA_EsEQcIZmP
zzje<(EqNd}A>(n~vyJPz>yPRNIwkoqAG~AKS-nj1L*0RwlG(4;7j<%_Zj-1o&pPd~
zcxJbr?37h|-(RZwQB(Rr-fVJpw`$amyPv-9Z(gQ$oW*aMut41*&TXyheCJ=R-0jNp
zMr|`wpyKq2KOgFHavTV6kokVjaJPTSlne28izn-4GJZBuSmr)&!GpW|T!IpOKPAs8
zTgBjfcjgSGNn1=bb~i}em_BEgd}zz^E$<hmU4Q4lJ$@7Ci*qdB_N4i81^l(T`@g@B
zRnea#>nsCDyK#n2!RNKUTzSo>{h9OoZKeMF+EBjH{;sA=VfL2C685uv7iXUmJ-X;}
z(&o)8=gwHk<$Y-3V|j@!JLg>S2~{=Uc>56h!_%7Q16MyUTz{PLvU(xglzF<Xb2@w{
zt+viGT(D+gR-EL${*XP2Hwx0*8xLMik$qQPmoDtGo6)1^(W6<6<+1FCg<|`Jrr8!e
zv`<XiRk|)Z=jH!f-+$csIJfJ{n#5nf^y-$f7d(s(Z&ENx&RZ&H`Z>qxj$ifPP2Glm
zwSVoInC`z3*uSWAOE^bqSQFb~L2G;7nuUF4vp*Owo_A_W(4WQo*4W$c=$zkB^Xh!U
z_4%8fImBA$@PGe!!{$q3@tt3#B4=fuYkyxUAN+Zlt67fig&WHDi;kRlemK<kz`_Gt
zOJWjK@4t;)y@sv-itYT?8-G-6o*sO$dyBQVc9mCwTA&~!gL5sPqU_Z*Uu?d<b12>1
ze{#`&^9Fm*uv^>I*ThLji@rI%hH=4*S#j(VyZifBZ9A)Nr$15d$?WDXUZ&qJbqXa~
z*+P}KWmR|<U)@*rk@b9Z;-(tAMCWy0x#|Y<uFP@UR~P5(Fyruhfu{Q=HZIm~pRDhy
z1<hN{XT+fF{(aZ0z6TD^*fYArqO0qAj=aoSaipu1n{B23i(`|6K29l=7RdVR*%lS&
z$+vt?*<o|7@(82N&#e>klbFvoo9Bf;`<u@D_i^SX7xnMwqvAG9$W-}1f7UliezU5U
z=^^{0d+e3Zh-lv3y!33S{VFyu{WDWTdRd<8-@F|xkWj35<b_^k#b&N%R=*yT4blOP
zwyD46WIcTOY#nEIZ}F`bsMT3CqsDZ_-9z>j&ULv0>nZ|&)fDM;<uUkOe`#Xo{ABw3
zBTv?CQu1N&nP}v`Q&HgItD~xSx^Cp`u>NaYp%g1*s?hp+!S|3|h7b2&U&*R};kNTP
zwbpCeyn0%z&oqV{v?%b3xVT0)WzE6f_Z$}Le;4+fEa-dsRD6X3<Ge8I3LUB6lXxTM
zmkIA}Z7|)xDM0BH_xB~us!IhQNnOv))i=1UBjf(8<W78Uzm>t2pt(;^>bE;GIz5!X
zY<YCG;KwS%be|^2#Mkn951-iC_pHCrw2x(q?USjO=bZb-UR`?pOpNE=w~ttVx4adJ
zdOF7=$^5OxnFR^%mEWzD=14iry#GGwW#QJhAKhk1h$M$`7|%a;v+hmhI;L8~qj6qY
zSG7)8+}byBLdlX<%>g&Ry_^@|=@Ra9W{>EUve(&b^X+979xYxwD^2v@D)(EN_kyg>
zsI+hz&psGoyz*L8w2{<j?o{oLkFt(khgG+S-_7^iIP>7+hM$`?vKv&U-jo+pp6b#6
z$u~YKx_N%d>#9{>m*3~k`d^`+`P$~@#mu_bH*eK!HM^huE4kii-{$KJ`1MRWx~Ja`
zJuE)&s(|)9C7*nQS1Uc#Sl6y+J7UQf6Od83ukzT@W8vPrc$8C$7K(SKHQs9CYyHHV
zA$)Q3tdqB0oia9AO>p(tHF4Y0sCaIN_U&nRS?|?5#&*~{ICL3ke7TgESYlGNdDZ3n
z39joVpFa6LN~3Y|okNM@7Vq{4KdAk8W(s3rdF<h{D^~H(W6KQc>F?Wn-9cwLhx<&X
z6+Xf;hcedlJ(tUwFSPodQncRRVvQ?X=g!`E<lmh&&WoljV!f&76r%Cr+s)e#Hc8%O
z$m2}9<UaAUm_R0@(24WgwDvYF_c+8RG{KE$*B=#`3(`*Iw)UTGr$1QUp0(}6)~O+9
z|1R8Cc9zvH*zBq%yZOzjU0M6iU1IyKrDQNq{{3V#Rhfrwj{hGW=C`^d`{!2MjGf9_
zN7%KQuOHoARy+M}d6jZ#%!5TCH=fs}X{IR&owhRX%q;ojY_cTgD{ql(2z%;5ng12O
zxwC_`HoxktZu#@>=8ktyE}c5}S*&Q^Cyz|+U4HYN^iK*MU48p-=;{KahbyAr`>{Tc
zI1t3$z;in&`nM6+qww0w8J@Ln^W7$GWn_J}>`c(Ei5t6>BD+@}xVLqojJ&Go&Q%A4
zwA!Djt_$Ffnbo13V<0;7?ZW2v$${1-8=a>*JUo6S^V{*O*Z*0%Uwrk`>D{FM<3DdT
z1t_^VoQs(MS)nxEB0~7+uGPoxxSg|Tb$J*WlNY(TXAMh0KxK~jmR&E@mp<$2uQr>@
z(=}tmX~UwaZ<;HAY`A*m#IN1c*8P|&Y$|qax@xWx%c6g0*D1M5b+vu|_0PtweSP-k
zYw``6=a?q+c&B|j;TXxRWGCPL(UoCBfY!WE4+=_NN~RSg^9vT#vaO!q-M@L2@jYF?
z8ifas_9jP0OcZ;zKHRS>KH;9UXRJzDvEx@Bi~A|8-BsUS-J1JO=*0H=o9>dXO~NXF
zBJbS&yspV2&1GfCwv3g6saY0MIcn#wb9lEE+4kl0tZ_f=YjEIAj+<-C=ff{&tgms2
z4tV=*!?tscJDe6d9kQx<bR?qu#ELZzF+1jU${!1gyR)rrNAlVHCO)~==o6Qo^#t&n
zW-=~&$r-RdYsXG6k#4404a$KAwwq6WJ>M(um-u6oY+==F)}x*)YL5LAnsd)2H!Su;
zn7^T1V}jy_Ylo#?yXX}hnOn@0dayk)rftIFQ(rb$vE``S>X#fiw8N;taz}5+y|V_l
zgz672m=(NwzV@Gc<uA;pZ*Fo}oY-}{Pgt;A_I<*Uo|wd^jh90ucJ7`1c3+c|PdLMi
zT>&zi{DtJS82$?V^U<=`$eSao_$pkS@yzxu-PYD&wf)@g^YZvQm(0z%lqP*}mQb*@
zPlIyjf|{#8=WJdygSEeP#_0y><YlH@8|^k|+W(npHMK+9;ni%jBkwiSW81DD@@q=H
zChn}Q#-%Ro^MYUcTlv<dn{G6^)!mzNy7~Xtia%e)lG}O&72Ywgmh$EDDt#)SeEV+x
z(*S>$KfjDu%`)6<D9<H1?|HJtU-O3O?CCq>&KO#p*m_CNI=p2P2TN-8;T-|(F^kq#
z+Zd$py{wav{``<pO3Kfy&;M?nvio!^Y}T6`fz4sYJd&|j4=-M;^vvn-mc<9TdUnp5
z@__HFk>~Qhm2-|J&5N8RX0T%aZ>1G|%UwSkbqV<`=QzA9N^*V7ENi}pMtc^XezAVa
zsmPMsXA(k{r)=6V!+!tmdaLc9a%xvA8-;Cow0A{C@~7+h&07vEG`Y*NC9N*(@Fc(3
z@ON+7)qbBY*nQ9Mv+qme4JIYE_TL`dG-he|x}r^1u~M%mYIX5T6Cd-OcmAgryf{4d
zvCz7ie2F?+7pNUsD_F_Ct&+t-Y4$h4D|1))zxZ2NRG-8)X&0w{)YhdrOmPeIbNutR
zJ)FGUhwJ6eh<_^LGJ7m)gEq_U`x#_E)1$n`riJhC`R<E1o-RFA*3Z4>%d|5~BX>Nl
zSN(4<^<`q!oDT<`{p4J2-z(luK4ka*)yINgDkoNi9sRiF{uI~ua~^!2Qq*|o&2m}4
z-0kswVKZcN|4%JC^W#UTW$y7sj5GQTa#wA*xcYyW^v+v?iaP%@+@D&V$gOFf{coB0
z<xh8Jugw2;R#1QWyFI)A34LZbCDHFUB`yEWUF(V&+cv$<+|TOab~&`<MVr-`a*l)d
zW*0oXEE#ojN~}`*k3`QSQ|2f!dapelz<mA@`y*EKXjZE;Cv`d7cJ#2ks%Lnyejbx(
zY=J(bv`5kQ+qc9td!_0b?F4$R?oyk%Bwpf{{{bb5EXzY;2C2)=`+Kib;QSw*<orln
zYGuQ=@5|0e%4(QKw1|8#e-}C7<4M+A>St6}R-~qH5_|Y8JAO&g(!$im(!Xc>T%D($
zIKd<0X#8!*rKK_Y7rw3jTJR(%Mz(r^sELW4oeW#6&xJoxVJBj~ump3oaPk|6Jz&zD
zy<_vSO+Ol!=zfsd_2c0zmn%l^-rAdWJ##<*%b};Md9qs-hs?H}Pne>%{{PMCk>0;r
zvG1CjzKr6L1EtlwBPRN&$WM_oJb#gCBmcI3>BUFyMyqW3m7@{(MPg>xOFpsIH!K%-
zt#?1WaOr=geFxUEEX|X@+j2dpS}%aX>!fu4`nHUKHNCEklT6b%&Y#;7)cLQ-_u6~o
zwD;nHiF^r#A9R?S_<u5--_B7X^6>AZ4#!KL&p&3ky8LF-$n=_U^LLzV@J4UVdAkog
zcpQCwsCTa%<Mfi9Z`P_wt+iz@YFoBl%6YTOqEjkGhcABoSMad-Y}M_jd$ajJ1&2k;
zS6}(!sqyrx1n12|RXbdAdox}=yI?wJvVo)CM_Hx7iM2B9b~7gJIyFma^6M{?CSNyv
z%gfrbKX9FF-Sl@y*<M9|b^E(|*J-sd52KL99{tKmqO4yfvi;IJ4tShD7LiaR|6r^3
zgl}_RmQ470#^iH>IfwImv6H78b}U%k6Z9bckC(!V<K3SAJ}ED>{j85o%04}_)1X9i
zW5iR*rHPN7FEDH7+Fk!N<;jwWxngtfe^V9Vkzo7$c%IyI`N#dvMHjApEchY#hLy=P
zL`AIW-fy2fS0^6$yM4aqA>q_HH+RcP-ZkH8X~J+pY0qRuxmLc8=J`5bdCpG`y|&ZA
z+p6d0>PgeN>Q`M4tmMlS6ucAKT=GWEPQ$dK%iyuhp>1JDwz0HSEs7BA>j-|dLYQ;E
zZ^Qi;*$317kIJ7}u{Qnmyt%3y_$O}hJ}$9V({i`KTeG}X5A>hEznUeT{wZ#Q)J54(
zCC41EEP1FmC;6w@OqFMO@5;B|YxpNCH~pmY0=7#Z#1r4D)HI6i=`hTnqSq9Dum1nv
zUlWc_Yd`F~OQUQ)gMfqf3VYE@ezm;Q-X72JWr&GaU%Xsze`R?^!kMKvS-5#N6e>um
zr!!stkbGOk*2?6T^qvh{t^E!^TBARC^NtnMKAn&Ie`E1pPbTlUt#4H`GuLsfeGvb~
zy(C>t?ct;?i+=d*)vuP5J{x&WOs+my&j0kM$*cEVTw?Rt@l{Zh@*;b#G>Hx0<vId&
zV@}MxD8=sd?dj=>IkJEEKS*uaseLA8QS($&ADN`{)2vQEDGxbxdQtW4chchhJo_a!
zYi8`pTy3XvefKvLiT1e3A>!GMBKfMDckrq=sGdpZXe|DJex|Chtje^cgFBf!WTH8*
z?AC~yuz4=;lG^42haRSi$j;xeQc2*g>C=Bv5w=W`9QAv)ow(y|x$b$<ujZt+x5aN+
z@qg0rTq~SzpZdrD&-&%3Jxdchn>E4$G>`ep#<qMa-&*S@AiX2tgonXJCqL_j4tg{0
zNtvAf9;UeYqSMZO4^1^s?>+j~#n`oY+N^c#Gg{UB*Ue1yT9(%C_IJ<3yssrEYU=h%
zs1)%u)vv03G418a8on7htWOupubTX6kH}=dRSU~(8)tPH`8@h{;Qo@_ijXOrMcxWB
ze!qG1?vD;{-gSzifdU8jE8WPpv|n1bD%#oP#bl$F<0oV}l@={YUbuVe3AR@G*Xt})
z3}u6lWIT$Go0GYqGwQ_Jbr;VF&8;YXE1ajK@?Le_BDdWWyMF)rc0R6fmzI(2{P_tt
zDg@ZA&d$j`F;C2ar@_YNa*&Ar{}tIcLT8_S624{en$)IC-;F-^hrK#~WN-2Z)!M1%
zzWyu17A`*@V|%K)in+gR&nd2DvQmb(CLT~N;yb10XgI}SnYnaE%F)lg|9*-HhMr(l
z%1dh5IJb1US=S<`9)7Lu%P(D3*qz_Qaro<Z(US&!TW9=C_tHCZo#QyykA%#Ll_wj+
z1TK8~V0XAI=*7Z?6Wre^-`<`i;=#myVcCKP_KoXB#8mc07J0R}?LYD1T6WW4!R2~y
zeU`R=y1AvlK!9hp{f=JNCn^a`zrJqzxJsnB&&zI~*zx}xG9=2{oF8YaH5BZgao0g@
zsrH`AKQ)ZpPebJ93x_P9vwz1O#uF_(9h)93`jZf`aLSV}2R`iF&GYlN<TQo)dv31v
zE<O_qv)?-Y;Ft7cv8?6yO#Bgh_|vY-=G*7w?P)jf;eE5Vq0y$g)zUh_BYZ~sIXiX@
zR>7t0HoML~xy7YD>G#9xTe&@_-7of=J~o-=KY7<brUM(Ve`zf{+55lz<%V7I)!rvq
z!ad!jII|ASTKe@zYgof8M$h~vnYw>3GHZ0KPiS@Q`aG+Xec@}V30390XK(je<Du@?
z-(A9XX2l7a$Np!Y`io7Dl~B~1ePX$|w@B$4--l^40?Xtd1t-|5Wb8Y2z<~Q%cMyl+
z#1p(46)E9?Uz&ottOT9BK0IDg_bupUoWq0;jZ^p8mbJc7bvn)de9`KelcpwhTw7-2
zV%ZU9;9tV+)pWLiZI0!G$mJGiry3nMzpW&3&-0>;+VtYI%+p;LOjoHF?GSV`wcGXP
z#I&6q|K1<-NS?N?x%+6{<<O9iO`#3B3+(ge@7K}QkgkYvNsQl-o;q`j;lC;G@AAm6
z`^aU}IkheShE=uC5`N>(m}g0+T7%>kaWvdFiCNPlUBqLxK|@y7Fjdf#LB%X-%f@_u
zuYRGAdqo4Xe3w3B{2Zp<IVnoEBD~`BlaJr^oma5V^8Wqn_1yS)VS|?!miYhjh+V*A
z$zgK0|H_fat9<HSUHJNIZ7WOE8uiC^Q;%$!=E?T=3aiN`+mF4|R=S5b*%k<}W)&y@
zIc(mp{&;Qv>r0AhTn#K<Z+`9YD%um(n61ix?}crATD@%c+`2<4^A;;T&YHs)+ijC8
z<Y};Yrt(zY8AmQWp6ngsw#!CAH^pSnEKW-u)7fkfCf?{eZ_)nMdm>K}$EWO>vyC-E
z<u0e0&%Mpw?DRdP=gv*mCr97fDJBKGJ}PWfoF<Zb(d^HePg4#$zi&{U@I~wX|9i$0
z7JC28+k10iVc>I}Xy<R&@0Jw0_bxL`Pbk^C^rO$J?T&F4`$UC)y;iAIxH>;}z6DqF
zBH7~dO}?KuP4*Ksz2soV7$jKQ+~Fd5$k*B~?t+-pq&07+=pD4}(|x#StCyi#Rkee8
zT=vx)mn->A549UGo=Y=dwQl|U)Z0OCQl~f77}YG=H#0U)fjwo9P~%_UV{!ii^>~BV
z9Jrat%emQd{w0qYd(M6R%UZwp&kOE1lXOZO?gX!Lb-WfK6!3^|Eti7M_QlEx>UseW
zxE&sDd$U-Np-S=EjTsyN2G$gp{}MZC`#(u`7vGl+uNm@oU3uyD;>az>&2H)m&!&V+
z$FR6W{F$8~bXNCTNN9tjP1&Yt(;e?C_!zt}tnz>Cm~MMLVL#)wO5*?qwxZ1yu|?*l
zw=HK(QFDH>*g0uaxwnew(u-c{J-1A)FD~`adNP|k@P)<7rnxzbUWDj<{`2wrx;q(m
zLXYk<-zzneQqRsY^O_;ISoqRrj;DQw8XJxj#w#Q~n^{=*CdH~~-_7g6tLLo0SCM}2
z(xtHPw@+FN&*NL?EV}ti7N6(;i^<IYJWjdWyowU?P-8msVPBev1;^BB7cM5ho_E?X
z$aOR0x=41doWgG=Yqm+QRF8|Bcx1}w&ok#M9u0c1^89LrDDDWe+PZV|i+?&U-4#*y
zvukgqEgyGxLzl=5Hv8KeHW|xqNA<3MxQ)Btep20&1y1)*@B6&lXO`<sE$33B68VVD
z_otscW~Fd{?hA)k|JT1z*|byOcmBfovhc58SC`i-RoZUUSX<Wi{qWbXTsPJ=EttCT
xOR@2hp1OU1>WVGJ7^WTExn@#WeUbf&-GOb}m^><KTRNB-Gqy8Hx%wsk0|2tt|7!pM

diff --git a/output/part-r-00000 b/output/part-r-00000
index 66121c0..c732e9d 100644
--- a/output/part-r-00000
+++ b/output/part-r-00000
@@ -1,2 +1 @@
-be	28565
-up	8609
+2,1	1.0
diff --git a/sortedline_sample b/sortedline_sample
new file mode 100644
index 0000000..c090de7
--- /dev/null
+++ b/sortedline_sample
@@ -0,0 +1,855 @@
+1	EBook,Complete,Works,Gutenberg,Shakespeare,William,Project,by,The,of
+2	Shakespeare,William
+4	anyone,anywhere,eBook,cost,use,This,at,no,for,with,is,of,and,the
+5	restrictions,whatsoever,copy,almost,away,give,may,You,or,no,it
+6	reuse,included,License,Gutenberg,Project,terms,under,it,of,the
+7	wwwgutenbergorg,online,eBook,or,at,this,with
+9	Details,COPYRIGHTED,Below,eBook,Gutenberg,Project,This,is,a
+10	guidelines,file,copyright,Please,follow,this,in,the
+12	Title,Complete,Works,Shakespeare,William,The,of
+14	Author,Shakespeare,William
+16	Posting,2011,September,Date,100,EBook,1
+17	1994,Release,Date,January
+19	Language,English
+22	START,EBOOK,WORKS,GUTENBERG,SHAKESPEARE,COMPLETE,PROJECT,THIS,WILLIAM,OF
+27	Produced,Future,Inc,World,Library,their,from,by,of,the
+32	100th,Etext,presented,file,Gutenberg,Project,This,by,is,and,the
+33	cooperation,Inc,World,Library,presented,their,from,with,is,in
+34	CDROMS,Future,Library,Gutenberg,Shakespeare,Project,of,and,the
+35	releases,Etexts,Domain,Public,placed,often,NOT,are,that,in,the
+37	Shakespeare
+39	implications,Etext,copyright,certain,read,has,This,should,you
+41	VERSION,WORKS,COMPLETE,THIS,WILLIAM,THE,ELECTRONIC,OF
+42	INC,WORLD,19901993,COPYRIGHT,LIBRARY,SHAKESPEARE,IS,BY,AND
+43	COLLEGE,BENEDICTINE,ILLINOIS,PROVIDED,ETEXT,GUTENBERG,PROJECT,BY,OF
+44	MACHINE,PERMISSION,READABLE,WITH,MAY,BE,COPIES,ELECTRONIC,AND
+45	YOUR,LONG,AS,SUCH,SO,OTHERS,1,DISTRIBUTED,COPIES,ARE,FOR,OR
+46	PERSONAL,ONLY,USED,USE,NOT,2,DISTRIBUTED,ARE,AND,OR
+47	INCLUDES,COMMERCIALLY,DISTRIBUTION,PROHIBITED,COMMERCIAL,ANY,BY
+48	SERVICE,MEMBERSHIP,DOWNLOAD,CHARGES,TIME,THAT,FOR,OR
+50	cooperate,World,Library,Gutenberg,Project,proud,The,with,is,to
+51	presentation,Complete,Works,Shakespeare,William,The,in,of,the
+52	HOWEVER,education,reading,entertainment,THIS,for,your,and
+53	NEITHER,NOR,PUBLIC,DOMAIN,SHAREWARE,UNDER,LIBRARY,THE,IS,AND
+54	FUTURE,PRESENTATION,CONDITIONS,NO,MAY,CHARGES,THIS,THE,OF
+55	ACCESS,MATERIAL,MADE,ENCOURAGED,TO,YOU,BE,ANY,THIS,ARE,FOR
+56	AWAY,ANYONE,ALLOWED,LIKE,GIVE,IT,BUT,NO,TO,YOU,CHARGES,ARE
+61	SMALL,PRINT,SHAKESPEARE,COMPLETE,for
+63	VERSION,WORKS,COMPLETE,THIS,WILLIAM,THE,ELECTRONIC,OF
+64	INC,WORLD,19901993,COPYRIGHT,LIBRARY,SHAKESPEARE,IS,BY
+65	PROVIDED,ETEXT,GUTENBERG,PROJECT,IS,BY,AND,OF
+66	COLLEGE,PERMISSION,BENEDICTINE,ILLINOIS,WITH
+68	etexts,unlike,etext,Gutenbergtm,Project,Since,many,other,this
+69	materials,methods,protected,copyright,since,is,you,and,the
+70	Projects,copy,reputation,effect,use,right,will,your,to,and,the
+71	limited,distribute,copyright,laws,other,by,it,is,and,the
+72	statement,Print,Small,conditions,this,of,the
+74	LICENSE,1
+76	DISTRIBUTE,ENCOURAGED,TO,YOU,MAY,ELECTRONIC,ARE,AND,A
+77	MACHINE,READABLE,LONG,AS,SUCH,ETEXT,SO,THIS,COPIES,OF
+78	YOUR,PERSONAL,ONLY,USE,NOT,OTHERS,2,1,ARE,FOR,AND,OR
+79	COMMERCIALLY,PROHIBITED,COMMERCIAL,USED,DISTRIBUTED,OR
+80	INCLUDES,SERVICE,DISTRIBUTION,DOWNLOAD,CHARGES,THAT,ANY,BY,FOR
+81	MEMBERSHIP,TIME,FOR,OR
+83	B,license,conditions,honor,subject,This,that,is,you,to,the
+84	provisions,replacement,refund,print,small,this,of,and,the
+85	statement,exact,distribute,copies,etext,this,that,you,of,and
+86	statement,Print,copies,including,Small,Such,can,this,be
+87	resulting,proprietary,compressed,including,form,any,or
+88	software,processing,hypertext,long,word,or,from,so,as
+89	EITHER
+91	displayed,readable,clearly,etext,does,1,when,The,is,and
+92	characters,intended,contain,those,other,than,by,not,the
+93	tilde,asterisk,author,although,work,of,and,the
+94	underline,punctuation,characters,used,convey,may,be,to
+95	additional,characters,author,intended,used,may,by,be,and,the
+96	indicate,hypertext,links,OR,to
+98	convertible,readily,reader,etext,2,at,no,by,The,is,the
+99	equivalent,EBCDIC,ASCII,expense,plain,form,into,or,by,the
+100	displays,program,etext,instance,case,as,for,that,is,the
+101	processors,word,OR,most,with
+103	agree,provide,3,request,You,or,at,no,on,to
+104	additional,etext,expense,copy,fee,cost,plain,or,in,a,of,the
+105	ASCII
+107	DISCLAIMER,WARRANTY,DAMAGES,LIMITED,2,OF
+109	incomplete,Defect,contain,etext,form,This,may,in,a,of,the
+110	data,inaccurate,transcription,errors,copyright,corrupt,or,a
+111	damaged,disk,virus,infringement,computer,defective,other,or,a
+112	codes,equipment,damage,read,cannot,or,But,by,your,be,that
+113	Replacement,Refund,described,Right,below,or,for,of,the
+114	etext,party,receive,Project,other,any,may,from,as,this,you,and
+115	GUTENBERGtm,disclaims,liability,etext,PROJECT,all,for,you,a,to
+116	HAVE,damages,legal,expenses,costs,fees,including,YOU,and
+117	LIABILITY,STRICT,NEGLIGENCE,REMEDIES,UNDER,NO,FOR,OR
+118	CONTRACT,INCLUDING,BREACH,BUT,WARRANTY,LIMITED,TO,NOT,OR,OF
+119	CONSEQUENTIAL,INDIRECT,PUNITIVE,INCIDENTAL,EVEN,IF,DAMAGES,OR
+120	NOTICE,POSSIBILITY,GIVE,DAMAGES,YOU,SUCH,THE,OF
+122	receiv,90,Defect,etext,discover,days,within,If,this,in,you,a,of
+123	ing,refund,paid,receive,money,any,can,if,it,you,a,of,the
+124	explanatory,sending,note,within,time,an,by,for,it,that,to,the
+125	physical,received,person,If,from,on,it,you,a
+126	medium,note,person,return,such,must,may,your,it,with,you,and
+127	alternatively,replacement,copy,choose,give,If,you,a,to
+128	electronically,received,choose,person,such,may,it,to
+129	alternatively,opportunity,receive,second,give,it,you,a,to
+130	electronically
+132	OTHERWISE,ASIS,OTHER,NO,TO,YOU,PROVIDED,ETEXT,THIS,IS
+133	MADE,EXPRESS,IMPLIED,KIND,WARRANTIES,TO,YOU,AS,ANY,ARE,OR,OF
+134	ON,MEDIUM,IT,INCLUDING,BUT,TO,MAY,BE,ETEXT,NOT,ANY,THE,OR
+135	MERCHANTABILITY,FITNESS,WARRANTIES,LIMITED,TO,FOR,OR,OF,A
+136	PARTICULAR,PURPOSE,disclaimers,states,allow,Some,do,not,of
+137	consequen,exclusion,warranties,implied,limitation,or,of,the
+138	tial,exclusions,disclaimers,damages,above,may,so,not,and,the
+139	legal,apply,rights,other,may,have,you,to,and
+141	INDEMNITY,indemnify,its,3,Project,hold,You,will,and,the
+142	lia,directors,agents,members,harmless,officers,from,all,and
+143	bility,legal,fees,including,expense,arise,cost,that,and
+144	indirectly,directly,following,any,or,from,do,that,you,of,the
+145	B,distribution,alteration,etext,cause,A,this,of
+146	modification,Defect,C,etext,addition,any,or,to,the
+148	WANT,MONEY,DONT,SEND,WHAT,HAVE,EVEN,IF,TO,4,YOU
+149	dedicated,increasing,Gutenberg,number,Project,is,of,to,the
+150	licensed,distributed,domain,freely,works,public,can,be,that,and
+151	gratefully,machine,accepts,readable,Project,form,The,in
+152	OCR,scanning,machines,software,contributions,money,time,in
+153	licenses,etexts,domain,copyright,royalty,public,free,and
+154	Pro,whatever,Money,paid,else,think,can,should,be,you,of,to
+155	ject,Benedictine,Illinois,Association,College,Gutenberg
+157	Kramer,Attorney,Print,B,Small,Charles,This,by
+158	726002026compuservecom,TEL,2122545093,Internet
+159	SMALL,PRINT,SHAKESPEARE,COMPLETE,FOR
+160	V120893,Print,Small
+162	VERSION,WORKS,COMPLETE,THIS,WILLIAM,THE,ELECTRONIC,OF
+163	INC,WORLD,19901993,COPYRIGHT,LIBRARY,SHAKESPEARE,IS,BY,AND
+164	COLLEGE,BENEDICTINE,ILLINOIS,PROVIDED,ETEXT,GUTENBERG,PROJECT,BY,OF
+165	MACHINE,PERMISSION,READABLE,WITH,MAY,BE,COPIES,ELECTRONIC,AND
+166	YOUR,LONG,AS,SUCH,SO,OTHERS,1,DISTRIBUTED,COPIES,ARE,FOR,OR
+167	PERSONAL,ONLY,USED,USE,NOT,2,DISTRIBUTED,ARE,AND,OR
+168	INCLUDES,COMMERCIALLY,DISTRIBUTION,PROHIBITED,COMMERCIAL,ANY,BY
+169	SERVICE,MEMBERSHIP,DOWNLOAD,CHARGES,TIME,THAT,FOR,OR
+174	1609
+176	SONNETS,THE
+178	Shakespeare,William,by
+182	1
+183	increase,creatures,fairest,desire,From,we
+184	thereby,beautys,rose,might,die,never,That
+185	riper,decease,time,should,But,by,as,the
+186	memory,heir,tender,might,bear,His,his
+187	contracted,bright,thine,eyes,own,But,thou,to
+188	selfsubstantial,Feedst,fuel,flame,lights,thy,with
+189	famine,abundance,Making,lies,where,a
+190	foe,cruel,self,Thy,sweet,too,thy,to
+191	ornament,worlds,fresh,art,Thou,now,that,the
+192	gaudy,herald,spring,only,And,to,the
+193	buriest,bud,content,Within,thine,own,thy
+194	niggarding,churl,makst,waste,tender,And,in
+195	glutton,Pity,else,world,or,this,be,the
+196	worlds,due,eat,grave,by,thee,To,and,the
+199	2
+200	besiege,forty,winters,brow,When,shall,thy
+201	trenches,dig,beautys,deep,field,thy,And,in
+202	gazed,youths,livery,proud,Thy,now,on,so
+203	tattered,weed,held,small,worth,Will,be,a,of
+204	asked,beauty,lies,being,where,Then,all,thy
+205	lusty,treasure,days,Where,all,thy,of,the
+206	sunken,deep,within,thine,eyes,own,say,To
+207	alleating,thriftless,praise,Were,shame,an,and
+208	deserved,beautys,praise,use,much,How,more,thy
+209	couldst,child,answer,fair,mine,This,If,thou,of
+210	count,sum,excuse,Shall,old,make,my,and
+211	Proving,succession,beauty,thine,by,his
+212	new,old,made,art,This,when,were,thou,be,to
+213	feelst,warm,cold,blood,when,see,thy,thou,it,And
+216	3
+217	viewest,glass,Look,face,tell,thy,thou,in,and,the
+218	form,another,face,Now,time,should,that,is,the
+219	renewest,repair,fresh,Whose,if,now,thou,not
+220	unbless,beguile,mother,dost,world,Thou,some,the
+221	uneared,womb,whose,where,fair,she,For,so,is
+222	Disdains,tillage,husbandry,thy,of,the
+223	fond,tomb,who,Or,so,will,he,be,is,the
+224	selflove,posterity,stop,Of,his,to
+225	glass,mothers,art,Thou,she,thee,thy,in,and
+226	Calls,April,prime,lovely,back,her,of,the
+227	windows,age,through,shalt,thine,So,see,thou,of
+228	Despite,wrinkles,golden,time,thy,this,of
+229	remembered,live,if,But,thou,be,not,to
+230	Die,image,single,dies,thine,thee,with,and
+233	4
+234	Unthrifty,loveliness,spend,why,dost,thou
+235	legacy,beautys,self,Upon,thy
+236	bequest,Natures,lend,gives,nothing,doth,but
+237	frank,lends,free,those,being,she,are,And,to
+238	niggard,beauteous,abuse,why,dost,Then,thou
+239	largess,bounteous,given,give,thee,The,to
+240	Profitless,usurer,why,use,dost,thou
+241	sums,sum,canst,live,great,So,yet,not,a,of
+242	traffic,having,self,alone,For,thy,with
+243	deceive,self,dost,sweet,Thou,thy,of
+244	calls,nature,gone,Then,how,when,thee,be,to
+245	acceptable,audit,canst,leave,What,thou
+246	tombed,unused,beauty,Thy,must,thee,be,with
+247	executor,used,lives,Which,th,be,to
+250	5
+251	frame,Those,hours,work,gentle,did,with,that
+252	gaze,dwell,lovely,eye,every,where,doth,The
+253	tyrants,same,play,Will,very,to,the
+254	unfair,excel,fairly,doth,which,And,that
+255	neverresting,leads,summer,time,For,on
+256	confounds,hideous,winter,there,To,him,and
+257	Sap,checked,frost,lusty,leaves,quite,gone,with,and
+258	oersnowed,bareness,Beauty,every,where,and
+259	distillation,summers,left,Then,were,not
+260	pent,liquid,glass,prisoner,walls,A,in,of
+261	Beautys,bereft,effect,beauty,were,with
+262	remembrance,Nor,nor,was,what,no,it
+263	distilled,winter,flowers,meet,though,they,But,with
+264	Leese,substance,lives,show,still,sweet,their,but
+267	6
+268	deface,ragged,winters,Then,hand,let,not
+269	distilled,summer,ere,In,thee,thy,thou,be
+270	vial,treasure,Make,place,sweet,some,thou
+271	selfkilled,beautys,treasure,ere,With,be,it
+272	usury,forbidden,use,That,not,is
+273	happies,loan,willing,pay,those,Which,that,the
+274	breed,self,Thats,another,thee,thy,for,to
+275	happier,ten,times,Or,one,for,be,it
+276	happier,Ten,self,times,art,were,than,thy,thou
+277	refigured,ten,times,thine,If,thee,of
+278	depart,shouldst,could,Then,death,if,what,do,thou
+279	posterity,Leaving,living,thee,in
+280	selfwilled,Be,fair,art,much,too,thou,for,not
+281	worms,conquest,deaths,heir,thine,make,To,be,and
+284	7
+285	orient,Lo,gracious,light,when,in,the
+286	Lifts,burning,each,under,eye,head,up,his
+287	newappearing,homage,Doth,sight,his,to
+288	Serving,sacred,majesty,looks,his,with
+289	climbed,steepup,hill,heavenly,having,And,the
+290	Resembling,middle,strong,age,youth,his,in
+291	adore,mortal,beauty,looks,Yet,still,his
+292	Attending,pilgrimage,golden,on,his
+293	highmost,car,pitch,weary,when,from,But,with
+294	reeleth,feeble,Like,age,day,from,he,the
+295	converted,duteous,fore,eyes,now,are,The
+296	tract,low,another,From,way,look,his,and
+297	outgoing,noon,self,So,thy,thou,in
+298	Unlooked,diest,unless,get,son,on,thou,a
+301	8
+302	hearst,sadly,Music,music,why,hear,thou,to
+303	Sweets,sweets,delights,joy,war,with,not,in
+304	receivst,gladly,lovst,Why,which,thou,not,that
+305	receivst,annoy,pleasure,else,thine,Or,with
+306	welltuned,concord,sounds,true,If,of,the
+307	unions,offend,married,ear,thine,By,do
+308	confounds,sweetly,chide,who,They,thee,do,but
+309	singleness,shouldst,parts,bear,In,thou,that,the
+310	string,Mark,husband,another,sweet,how,one,to
+311	ordering,mutual,Strikes,each,by,in
+312	Resembling,sire,happy,child,mother,and
+313	pleasing,sing,note,Who,one,do,all,in
+314	speechless,seeming,song,Whose,many,being,one
+315	Sings,single,wilt,prove,none,Thou,thee,this,to
+318	9
+319	widows,wet,eye,fear,Is,for,it,a,to
+320	consumst,single,self,life,That,thy,thou,in
+321	issueless,hap,Ah,shalt,die,if,thou,to
+322	makeless,wail,wife,world,like,thee,The,will,a
+323	widow,weep,still,world,thy,The,will,be,and
+324	behind,form,left,hast,no,That,thee,thou,of
+325	widow,private,keep,every,When,may,well
+326	childrens,husbands,shape,mind,eyes,By,her,in
+327	unthrift,spend,Look,world,doth,an,what,in,the
+328	Shifts,enjoys,place,still,world,but,for,his,it,the
+329	beautys,waste,end,world,an,hath,But,in,the
+330	destroys,user,unused,kept,so,it,And,the
+331	sits,toward,bosom,others,No,love,that,in
+332	commits,murdrous,shame,himself,such,That,on
+335	10
+336	bearst,deny,shame,any,For,love,thou,that,to
+337	unprovident,self,Who,art,thy,so,for
+338	Grant,beloved,wilt,many,art,if,thou,of
+339	evident,lovst,none,most,But,thou,that,is
+340	possessed,murdrous,hate,art,For,so,thou,with
+341	stickst,conspire,gainst,self,That,thy,thou,not,to
+342	ruinate,Seeking,roof,beauteous,that,to
+343	chief,repair,desire,Which,should,thy,be,to
+344	change,mind,thought,may,O,thy,that,my,I
+345	lodged,fairer,hate,gentle,Shall,than,love,be
+346	presence,gracious,kind,Be,thy,as,is,and
+347	kindhearted,least,self,prove,Or,at,thy,to
+348	self,Make,another,love,thee,for,me,of
+349	beauty,thine,live,still,may,or,That,thee,in
+352	11
+353	wane,growst,fast,shalt,As,so,as,thou
+354	departest,thine,In,which,one,from,thou,that,of
+355	bestowst,youngly,fresh,blood,which,thou,And,that
+356	convertest,mayst,youth,thine,call,Thou,when,from,thou
+357	Herein,increase,wisdom,lives,beauty,and
+358	decay,folly,Without,cold,age,this,and
+359	minded,cease,times,were,should,If,all,so,the
+360	threescore,year,world,away,make,would,And,the
+361	store,nature,whom,those,made,Let,hath,for,not
+362	featureless,Harsh,barrenly,perish,rude,and
+363	endowed,gave,Look,whom,best,she,more,thee
+364	bounteous,cherish,bounty,gift,shouldst,Which,thou,in
+365	carved,thereby,meant,seal,She,thee,her,for,and
+366	print,copy,shouldst,die,Thou,let,more,not,that
+369	12
+370	clock,count,tells,When,time,do,that,I,the
+371	sunk,hideous,brave,night,day,see,And,in,the
+372	violet,prime,behold,past,When,I,the
+373	silvered,sable,curls,white,oer,all,with,And
+374	lofty,trees,barren,leaves,When,see,of,I
+375	erst,canopy,herd,heat,Which,did,from,the
+376	sheaves,girded,summers,green,up,all,And,in
+377	bristly,Borne,bier,beard,white,on,with,and,the
+378	question,beauty,Then,make,do,thy,of,I
+379	wastes,among,time,go,must,That,thou,of,the
+380	beauties,sweets,forsake,themselves,Since,do,and
+381	fast,grow,others,die,see,they,as,And
+382	scythe,Times,defence,gainst,nothing,can,make,And
+383	breed,Save,takes,brave,hence,when,thee,he,him,to
+386	13
+387	self,were,love,O,are,but,your,that,you
+388	longer,self,yours,live,No,than,here,your,you
+389	prepare,Against,coming,end,should,this,you
+390	semblance,sweet,other,give,some,your,And,to
+391	lease,beauty,hold,So,which,should,that,in,you
+392	determination,Find,were,then,no,you
+393	selfs,decease,self,after,Your,again,your
+394	issue,form,bear,sweet,When,should,your
+395	decay,lets,fall,house,Who,fair,so,a,to
+396	uphold,husbandry,might,honour,Which,in
+397	stormy,gusts,winters,Against,day,of,the
+398	barren,eternal,deaths,rage,cold,And,of
+399	unthrifts,dear,none,know,love,O,but,my,you
+400	son,father,had,let,say,You,so,your,a
+403	14
+404	judgement,stars,pluck,Not,from,do,my,I,the
+405	astronomy,methinks,yet,have,And,I
+406	luck,evil,tell,or,good,But,not,of,to
+407	dearths,seasons,plagues,quality,Of,or,of
+408	minutes,brief,fortune,Nor,tell,can,to,I
+409	Pointing,thunder,rain,wind,each,his,to,and
+410	princes,Or,go,say,well,if,shall,it,with
+411	predict,oft,find,heaven,By,that,in,I
+412	derive,knowledge,thine,eyes,from,But,my,I
+413	constant,stars,read,art,such,them,And,in,I
+414	thrive,beauty,together,truth,As,shall,and
+415	convert,store,wouldst,self,If,from,thy,thou,to
+416	prognosticate,else,Or,thee,this,of,I
+417	truths,date,beautys,doom,end,Thy,is,and
+420	15
+421	consider,grows,thing,every,When,that,I
+422	Holds,perfection,moment,little,but,in,a
+423	presenteth,huge,stage,nought,shows,That,but,this
+424	comment,influence,Whereon,secret,stars,in,the
+425	plants,increase,perceive,men,When,as,that,I
+426	Cheered,checked,selfsame,sky,even,by,and,the
+427	Vaunt,decrease,sap,youthful,height,their,at,in
+428	memory,brave,wear,state,out,their,And,of
+429	inconstant,conceit,stay,Then,this,of,the
+430	Sets,rich,sight,youth,before,most,in,my,you
+431	debateth,wasteful,decay,Where,time,with
+432	sullied,change,youth,night,day,To,your,of,to
+433	Time,war,love,all,for,with,And,in,you,of
+434	engraft,takes,new,As,from,he,you,I
+437	16
+438	mightier,wherefore,way,But,do,not,you,a
+439	tyrant,Time,Make,bloody,war,upon,this
+440	fortify,decay,self,your,And,in
+441	rhyme,barren,blessed,means,With,than,more,my
+442	top,hours,happy,stand,Now,on,you,of,the
+443	unset,gardens,maiden,many,yet,And
+444	flowers,virtuous,living,wish,bear,With,would,you
+445	liker,Much,counterfeit,painted,than,your
+446	lines,repair,life,So,should,that,of,the
+447	pencil,pupil,Times,pen,Which,or,this,my
+448	inward,Neither,outward,worth,nor,fair,in
+449	self,Can,live,eyes,men,make,your,in,you,of
+450	keeps,self,still,away,give,To,your
+451	skill,drawn,live,sweet,own,must,by,your,And,you
+454	17
+455	verse,believe,Who,time,come,will,in,my,to
+456	filled,deserts,high,most,were,If,your,it,with
+457	tomb,knows,Though,heaven,yet,but,as,it,is,a
+458	hides,shows,parts,half,life,Which,your,not,and
+459	write,beauty,could,eyes,If,your,of,I,the
+460	graces,numbers,number,fresh,all,your,And,in
+461	poet,age,lies,say,come,would,The,this,to
+462	touched,touches,earthly,heavenly,faces,Such,neer
+463	yellowed,papers,age,So,should,their,with,my
+464	scorned,less,truth,tongue,Be,old,men,than,like,of
+465	termed,poets,rights,rage,true,your,be,And,a
+466	stretched,metre,antique,song,an,And,of
+467	alive,child,yours,time,some,were,But,that,of
+468	rhyme,twice,live,should,You,it,in,my,and
+471	18
+472	compare,summers,Shall,day,thee,a,to,I
+473	temperate,lovely,art,Thou,more,and
+474	Rough,darling,buds,winds,shake,May,do,of,the
+475	lease,date,summers,short,too,hath,all,And,a
+476	Sometime,shines,hot,eye,heaven,too,of,the
+477	dimmed,complexion,often,gold,his,And,is
+478	declines,sometime,every,fair,from,And
+479	untrimmed,changing,natures,chance,course,By,or
+480	fade,eternal,summer,But,shall,thy,not
+481	owst,possession,lose,Nor,fair,thou,that,of
+482	wandrest,brag,shade,Nor,death,shall,thou,his,in
+483	growst,lines,eternal,When,time,thou,in,to
+484	breathe,long,eyes,men,So,can,see,or,as
+485	gives,lives,long,life,So,thee,this,to,and
+488	19
+489	Devouring,paws,blunt,lions,Time,thou,the
+490	brood,devour,earth,sweet,own,make,her,And,the
+491	tigers,jaws,Pluck,keen,fierce,teeth,from,the
+492	longlived,phoenix,burn,blood,her,And,in,the
+493	fleetst,seasons,sorry,glad,Make,as,thou,and
+494	swiftfooted,whateer,Time,wilt,do,thou,And
+495	fading,sweets,wide,world,To,all,her,and,the
+496	crime,heinous,forbid,most,one,But,thee,I
+497	carve,brow,hours,loves,fair,O,thy,with,not,my
+498	antique,lines,pen,draw,Nor,thine,there,no,with
+499	untainted,allow,Him,course,do,thy,in
+500	succeeding,pattern,beautys,men,For,to
+501	despite,Time,worst,wrong,Yet,old,do,thy
+502	verse,young,live,ever,My,love,shall,in,my
+505	20
+506	painted,natures,womans,face,own,hand,A,with
+507	Hast,passion,mistress,master,thou,my,of,the
+508	acquainted,womans,gentle,heart,A,but,not
+509	shifting,womens,fashion,change,false,With,as,is
+510	rolling,theirs,bright,less,An,false,eye,than,more,in
+511	gazeth,Gilding,whereupon,object,it,the
+512	hues,controlling,hue,man,A,all,his,in
+513	amazeth,steals,womens,mens,souls,eyes,Which,and
+514	created,wert,woman,first,thou,for,And,a
+515	adoting,wrought,fell,Till,nature,she,thee,as
+516	defeated,addition,by,thee,And,me,of
+517	adding,purpose,thing,nothing,By,one,my,to
+518	pricked,womens,pleasure,since,out,she,But,thee,for
+519	treasure,Mine,loves,use,their,love,thy,be,and
+522	21
+523	muse,So,as,it,with,me,not,that,is
+524	Stirred,verse,painted,beauty,by,his,a,to
+525	ornament,self,use,heaven,Who,doth,for,it
+526	rehearse,every,fair,doth,his,with,And
+527	couplement,compare,Making,proud,a,of
+528	gems,seas,moon,rich,sun,earth,With,with,and
+529	Aprils,firstborn,flowers,rare,things,With,all,and
+530	rondure,hems,huge,heavens,air,That,this,in
+531	write,truly,true,let,love,O,but,me,in
+532	believe,fair,then,love,as,And,me,is,my
+533	bright,mothers,child,though,any,As,so,not
+534	candles,fixed,heavens,air,gold,those,As,in
+535	hearsay,Let,say,like,well,them,more,that,of
+536	sell,praise,purpose,will,not,that,to,I
+539	22
+540	persuade,glass,old,My,am,shall,me,not,I
+541	date,youth,long,So,one,are,as,thou,of,and
+542	furrows,behold,times,when,But,thee,in,I
+543	expiate,days,look,Then,death,should,my,I
+544	cover,beauty,doth,For,thee,all,that
+545	seemly,raiment,Is,heart,but,my,of,the
+546	breast,thine,live,doth,Which,thy,as,me,in
+547	elder,art,can,How,than,then,thou,be,I
+548	wary,thyself,therefore,love,O,so,be,of
+549	self,As,thee,but,will,for,not,my,I
+550	chary,Bearing,keep,heart,which,thy,so,will,I
+551	faring,babe,nurse,tender,ill,As,from,her
+552	Presume,slain,heart,mine,when,on,thy,not,is
+553	gavst,back,thine,again,Thou,give,me,not,to
+556	23
+557	unperfect,actor,stage,As,an,on,the
+558	beside,put,part,fear,Who,his,with,is
+559	replete,fierce,rage,thing,Or,much,some,too,with
+560	weakens,strengths,abundance,Whose,own,heart,his
+561	forget,trust,fear,So,say,for,of,to,I
+562	rite,ceremony,perfect,loves,The,of
+563	decay,strength,seem,loves,own,mine,And,in,to
+564	Oercharged,burthen,loves,might,own,mine,with,of
+565	eloquence,looks,let,then,O,be,my,the
+566	presagers,dumb,speaking,breast,And,my,of
+567	recompense,plead,look,Who,love,for,and
+568	expressed,More,tongue,than,hath,more,that
+569	silent,learn,writ,read,hath,love,what,O,to
+570	belongs,fine,wit,loves,eyes,hear,To,with,to
+573	24
+574	stelled,played,painter,Mine,eye,hath,and,the
+575	beautys,table,form,Thy,heart,in,my,of
+576	frame,wherein,held,body,tis,My,is,the
+577	painters,perspective,best,art,it,And,is
+578	painter,skill,through,see,must,For,his,you,the
+579	pictured,image,lies,find,where,true,To,your
+580	shop,bosoms,hanging,still,Which,is,in,my
+581	glazed,windows,thine,eyes,hath,That,his,with
+582	turns,done,eyes,Now,see,what,good,have,for
+583	drawn,shape,Mine,thine,eyes,thy,have,for,me,and
+584	wherethrough,windows,breast,sun,Are,my,to,the
+585	Delights,peep,gaze,therein,on,thee,to
+586	cunning,want,grace,Yet,eyes,art,their,this,to
+587	draw,They,heart,see,know,they,what,but,not,the
+590	25
+591	stars,favour,those,who,Let,their,are,with,in
+592	titles,boast,public,proud,honour,Of,and
+593	bars,triumph,Whilst,fortune,whom,such,of,I
+594	Unlooked,joy,honour,most,for,that,in,I
+595	favourites,spread,leaves,princes,Great,fair,their
+596	marigold,suns,eye,at,But,as,the
+597	buried,pride,themselves,lies,their,And,in
+598	frown,glory,die,For,they,their,at,in,a
+599	famoused,painful,warrior,fight,The,for
+600	foiled,victories,After,thousand,once,a
+601	razed,quite,book,honour,Is,from,of,the
+602	toiled,forgot,rest,which,all,he,for,And,the
+603	beloved,happy,Then,love,am,that,and,I
+604	removed,remove,Where,nor,may,be,not,I
+607	26
+608	vassalage,whom,Lord,love,in,my,of,to
+609	strongly,knit,merit,duty,Thy,hath,my
+610	embassage,written,send,thee,To,this,I
+611	witness,duty,wit,show,To,not,my,to
+612	Duty,wit,poor,great,mine,which,so,as
+613	wanting,bare,seem,May,show,words,make,it,in,to
+614	conceit,hope,thine,some,good,But,that,of,I
+615	bestow,naked,souls,thought,In,all,thy,will,it
+616	guides,whatsoever,moving,star,Till,that,my
+617	Points,graciously,aspect,fair,on,with,me
+618	tattered,apparel,puts,loving,on,And,my
+619	respect,worthy,show,sweet,To,thy,me,of
+620	boast,dare,Then,how,may,love,thee,do,to,I
+621	mayst,Till,prove,show,head,where,then,thou,me,not,my
+624	27
+625	Weary,toil,haste,bed,with,me,my,to,I
+626	respose,tired,travel,limbs,dear,The,for,with
+627	journey,begins,head,then,But,in,my,a
+628	expired,bodys,works,work,mind,when,To,my
+629	abide,thoughts,far,where,then,For,from,my,I
+630	Intend,zealous,pilgrimage,thee,a,to
+631	drooping,eyelids,wide,open,keep,And,my
+632	Looking,darkness,blind,see,which,on,do,the
+633	imaginary,Save,souls,sight,that,my
+634	sightless,Presents,shadow,view,thy,my,to
+635	ghastly,hung,jewel,night,Which,like,in,a
+636	beauteous,Makes,black,new,face,night,old,her,and
+637	limbs,Lo,mind,thus,night,day,by,my
+638	quiet,self,find,For,no,thee,for,my,and
+641	28
+642	plight,return,happy,can,How,then,in,I
+643	debarred,benefit,rest,am,That,of,the
+644	eased,oppression,days,night,When,by,not,is
+645	oppressed,night,day,But,by,and
+646	eithers,reign,enemies,each,though,And,to
+647	torture,consent,shake,hands,Do,me,in,to
+648	complain,toil,other,one,by,The,to,the
+649	toil,farther,far,off,still,How,from,thee,I
+650	bright,please,day,art,tell,thou,him,to,I,the
+651	blot,clouds,grace,dost,heaven,when,do,him,And,the
+652	swartcomplexioned,flatter,night,So,I,the
+653	gildst,twire,sparkling,stars,even,When,thou,not,the
+654	daily,sorrows,longer,draw,day,doth,But,my
+655	nightly,length,stronger,griefs,seem,night,doth,make,And
+658	29
+659	disgrace,Fortune,mens,eyes,When,with,in,and
+660	outcast,beweep,alone,state,all,my,I
+661	bootless,deaf,cries,trouble,heaven,with,And,my
+662	fate,curse,self,look,upon,And,my,and
+663	Wishing,rich,hope,like,one,more,me,in,to
+664	Featured,possessed,friends,like,him,with
+665	Desiring,scope,mans,art,this,that,and
+666	contented,enjoy,least,most,With,what,I
+667	despising,self,almost,thoughts,Yet,these,in,my
+668	Haply,state,think,then,on,thee,my,and,I
+669	arising,lark,Like,break,day,at,of,to,the
+670	hymns,sullen,sings,gate,heavens,earth,From,at
+671	remembered,brings,wealth,sweet,such,For,love,thy
+672	scorn,kings,change,state,then,That,with,my,to,I
+675	30
+676	sessions,silent,thought,sweet,When,of,to,the
+677	summon,remembrance,past,things,up,of,I
+678	sought,sigh,lack,thing,many,a,of,I,the
+679	wail,waste,woes,new,times,dear,old,with,And,my
+680	unused,flow,drown,eye,Then,can,an,to,I
+681	dateless,hid,deaths,precious,friends,night,For,in
+682	cancelled,afresh,woe,weep,loves,since,long,And
+683	vanished,expense,moan,sight,many,th,And,a,of
+684	foregone,grievances,grieve,Then,can,at,I
+685	heavily,woe,oer,tell,from,And,to
+686	forebemoaned,moan,account,sad,The,of
+687	paid,pay,new,before,Which,if,as,not,I
+688	while,dear,friend,think,if,But,on,thee,I,the
+689	restored,losses,sorrows,end,All,are,and
+692	31
+693	endeared,bosom,hearts,Thy,all,with,is
+694	lacking,supposed,dead,Which,by,have,I
+695	reigns,parts,loving,loves,there,love,all,And,and
+696	buried,thought,those,friends,which,all,And,I
+697	obsequious,tear,holy,many,How,a,and
+698	religious,stoln,Hath,dear,eye,mine,love,from
+699	interest,appear,dead,which,As,now,of,the
+700	hidden,removed,lie,things,But,thee,that,in
+701	buried,grave,live,where,doth,art,Thou,love,the
+702	Hung,trophies,lovers,gone,with,my,of,the
+703	parts,Who,give,did,their,thee,all,me,of,to
+704	due,alone,thine,many,now,That,is,of
+705	images,loved,view,Their,thee,in,I
+706	hast,they,all,thou,And,me,of,the
+709	32
+710	wellcontented,survive,day,If,thou,my
+711	churl,cover,dust,bones,death,When,shall,with,that,my
+712	resurvey,fortune,shalt,once,more,by,And
+713	deceased,lines,lover,rude,These,poor,thy,of
+714	bettring,Compare,time,them,with,of,the
+715	outstripped,pen,though,every,they,by,be,And
+716	Reserve,rhyme,their,love,them,for,not,my
+717	Exceeded,happier,height,men,by,of,the
+718	vouchsafe,loving,thought,then,O,but,this,me
+719	Muse,growing,grown,Had,age,friends,this,with,my
+720	dearer,birth,brought,had,than,A,love,this,his
+721	equipage,ranks,march,better,To,in,of
+722	poets,died,prove,since,better,But,he,and
+723	Theirs,style,read,Ill,their,love,for,his
+726	33
+727	Full,glorious,morning,seen,many,have,a,I
+728	Flatter,tops,mountain,sovereign,eye,with,the
+729	meadows,Kissing,green,golden,face,with,the
+730	Gilding,alchemy,streams,heavenly,pale,with
+731	basest,permit,Anon,ride,clouds,to,the
+732	celestial,rack,ugly,face,With,on,his
+733	forlorn,visage,hide,world,from,his,And,the
+734	Stealing,unseen,west,disgrace,this,with,to
+735	morn,shine,early,sun,Even,did,one,so,my
+736	splendour,triumphant,brow,With,on,all,my
+737	alack,hour,mine,out,one,was,But,but,he
+738	masked,region,cloud,hath,now,from,The,him,me
+739	disdaineth,whit,Yet,love,no,him,this,for,my
+740	Suns,staineth,stain,heavens,sun,world,when,may,of,the
+743	34
+744	beauteous,promise,didst,day,Why,such,thou,a
+745	cloak,travel,without,forth,make,And,me,my
+746	oertake,clouds,base,way,let,To,me,in,my
+747	bravry,Hiding,smoke,rotten,their,thy,in
+748	cloud,through,break,enough,Tis,thou,not,that,the
+749	stormbeaten,rain,dry,face,on,To,my,the
+750	salve,speak,can,such,well,man,For,no,a,of
+751	heals,cures,disgrace,wound,That,not,and,the
+752	physic,grief,shame,Nor,give,can,thy,my,to
+753	repent,loss,Though,still,yet,thou,have,I,the
+754	lends,relief,offenders,Th,weak,sorrow,but
+755	offences,cross,bears,strong,To,him,that,the
+756	sheds,pearl,Ah,tears,those,which,love,are,thy,but
+757	ransom,deeds,rich,ill,they,are,all,And,and
+760	35
+761	grieved,hast,done,No,which,more,at,thou,be,that
+762	Roses,fountains,mud,thorns,silver,have,and
+763	Clouds,eclipses,stain,moon,sun,both,and
+764	bud,loathsome,canker,sweetest,lives,And,in
+765	faults,even,All,men,make,this,in,and,I
+766	Authorizing,trespass,compare,thy,with
+767	salving,corrupting,amiss,self,My,thy
+768	Excusing,sins,than,more,are,thy
+769	sensual,sense,fault,bring,For,thy,in,to,I
+770	advocate,adverse,party,Thy,thy,is
+771	commence,plea,lawful,gainst,self,And,my,a
+772	civil,Such,hate,war,love,is,in,my,and
+773	accessary,needs,must,an,That,be,I
+774	sourly,robs,thief,sweet,which,from,To,me,that
+777	36
+778	twain,confess,two,Let,must,we,be,me,that
+779	undivided,Although,loves,one,our,are
+780	blots,remain,those,So,shall,do,with,me,that
+781	borne,Without,alone,help,by,thy,be,me
+782	respect,loves,two,In,there,one,our,but,is
+783	separable,spite,lives,Though,our,in,a
+784	alter,sole,effect,loves,though,Which,it,not
+785	delight,steal,hours,loves,Yet,sweet,doth,from,it
+786	acknowledge,evermore,may,thee,not,I
+787	bewailed,guilt,Lest,shame,should,thee,do,my
+788	kindness,public,Nor,honour,thou,with,me
+789	Unless,honour,name,take,from,thy,thou,that
+790	sort,such,love,But,thee,do,so,not,in,I
+791	report,being,mine,As,good,thy,thou,is
+794	37
+795	decrepit,delight,takes,father,As,a
+796	active,deeds,child,youth,see,do,To,his,of
+797	lame,Fortunes,dearest,spite,made,So,by,I
+798	comfort,worth,Take,truth,all,thy,my,of,and
+799	whether,wealth,birth,beauty,wit,For,or
+800	Or,any,these,or,more,all,of
+801	Entitled,crowned,parts,sit,do,thy,in
+802	engrafted,store,make,love,this,my,to,I
+803	despised,lame,poor,nor,So,then,am,not,I
+804	substance,Whilst,shadow,doth,give,such,this,that
+805	sufficed,abundance,am,That,thy,in,I
+806	glory,live,part,by,all,thy,And,a,of
+807	wish,Look,best,what,thee,that,is,in,I
+808	ten,happy,wish,times,This,then,have,me,I
+811	38
+812	invent,muse,subject,want,can,How,my,to
+813	pourst,verse,breathe,While,dost,into,thou,that,my
+814	Thine,argument,excellent,sweet,own,too
+815	rehearse,vulgar,paper,every,For,to
+816	aught,thanks,self,give,if,O,thy,me,in,the
+817	perusal,Worthy,sight,stand,against,thy
+818	whos,dumb,write,cannot,For,thee,so,that,to
+819	invention,self,light,dost,When,give,thy,thou
+820	tenth,Muse,ten,worth,times,Be,more,thou,in,the
+821	rhymers,invocate,nine,Than,those,old,which
+822	calls,bring,forth,let,on,thee,he,him,And,that
+823	Eternal,outlive,date,numbers,long,to
+824	curious,muse,slight,days,please,these,If,do,my
+825	pain,praise,thine,mine,shall,but,The,be,the
+828	39
+829	manners,sing,worth,how,may,O,thy,with,I
+830	part,better,art,When,all,thou,me,of,the
+831	self,praise,bring,own,can,mine,What,to
+832	ist,praise,own,mine,when,what,thee,but,And,I
+833	divided,Even,live,let,us,this,for
+834	single,lose,dear,name,one,love,our,And,of
+835	separation,give,may,That,by,this,I
+836	deservst,due,alone,which,That,thee,thou,to
+837	torment,absence,wouldst,prove,what,O,thou,a
+838	sour,leisure,Were,gave,leave,sweet,thy,it,not
+839	entertain,thoughts,time,love,To,with,of,the
+840	sweetly,deceive,thoughts,doth,Which,time,so,and
+841	teachest,twain,how,make,one,thou,And,that,to
+842	praising,remain,hence,who,By,doth,here,him
+845	40
+846	yea,Take,loves,take,love,them,all,my
+847	hadst,hast,before,than,then,more,What,thou
+848	mayst,call,true,No,love,thou,that,my
+849	hadst,All,thine,before,mine,was,more,thou,this
+850	receivest,Then,if,love,thou,for,my
+851	usest,blame,cannot,love,thee,thou,for,my,I
+852	deceivest,blamed,self,yet,if,But,thy,thou,be
+853	refusest,wilful,taste,self,By,what,thy,of
+854	robbery,forgive,thief,gentle,do,thy,I
+855	poverty,Although,steal,thee,all,thou,my
+856	greater,knows,grief,yet,love,it,And,is,a
+857	hates,injury,greater,known,wrong,bear,than,To
+858	Lascivious,shows,ill,grace,whom,well,all,in
+859	spites,Kill,foes,yet,must,we,be,with,me,not
+862	41
+863	commits,liberty,wrongs,Those,pretty,that
+864	absent,sometime,When,heart,am,from,thy,I
+865	befits,years,beauty,full,Thy,well,thy,and
+866	temptation,follows,still,where,art,For,thou
+867	Gentle,won,therefore,art,thou,be,to,and
+868	Beauteous,assailed,therefore,art,thou,be,to
+869	woos,womans,woman,son,when,what,And,a
+870	prevailed,sourly,till,Will,leave,her,he,have
+871	mightst,forbear,seat,Ay,yet,but,thou,me,my
+872	straying,chide,beauty,youth,thy,And,and
+873	riot,lead,even,Who,there,their,thee,in
+874	twofold,forced,break,truth,Where,art,thou,a,to
+875	Hers,tempting,beauty,by,thee,thy,her,to
+876	Thine,beauty,false,being,by,thy,me,to
+879	42
+880	grief,hast,That,all,her,thou,it,not,is,my
+881	dearly,loved,said,yet,may,her,be,it,And,I
+882	wailing,chief,hath,she,That,thee,is,my,of
+883	nearly,touches,loss,A,love,more,me,that,in
+884	Loving,offenders,excuse,ye,thus,will,I
+885	knowst,because,dost,Thou,love,her,thou,I
+886	abuse,sake,even,doth,she,so,for,And,me,my
+887	Suffring,approve,sake,friend,her,for,my,to
+888	gain,loss,lose,loves,If,thee,is,my,I
+889	losing,loss,found,friend,hath,her,And,that,my
+890	twain,Both,each,lose,find,both,other,and,I
+891	cross,sake,lay,both,on,this,for,And,me,my
+892	heres,joy,friend,one,But,are,my,and,I,the
+893	flattery,Sweet,alone,loves,then,she,but,me
+896	43
+897	wink,best,eyes,When,most,mine,see,then,do,I
+898	unrespected,view,things,day,For,they,all,the
+899	dreams,sleep,look,when,they,But,on,thee,in,I
+900	darkly,directed,bright,dark,are,And,in
+901	shadows,bright,shadow,whose,Then,doth,make,thou
+902	shadows,form,happy,show,How,would,thy
+903	clearer,clear,light,day,much,To,thy,with,the
+904	unseeing,shade,shines,eyes,When,thy,so,to
+905	blessed,eyes,made,mine,How,say,would,be,I
+906	looking,living,day,By,on,thee,in,the
+907	imperfect,shade,dead,night,fair,When,thy,in
+908	sightless,Through,heavy,sleep,stay,eyes,doth,on
+909	nights,days,All,till,see,are,thee,to,I
+910	dreams,nights,bright,days,show,when,thee,do,And,me
+913	44
+914	substance,dull,flesh,thought,were,If,my,of,the
+915	Injurious,distance,stop,way,should,not,my
+916	space,despite,brought,then,For,would,be,of,I
+917	remote,limits,far,dost,stay,From,where,thou
+918	although,foot,matter,stand,No,did,then,my
+919	farthest,removed,earth,Upon,from,thee,the
+920	jump,nimble,sea,land,thought,both,can,For,and
+921	soon,place,where,think,As,would,as,he,be,the
+922	ah,kills,thought,am,But,me,not,that,I
+923	lengths,miles,leap,large,gone,art,when,To,thou,of
+924	wrought,water,earth,much,But,so,that,of,and
+925	moan,leisure,attend,times,must,with,my,I
+926	Receiving,elements,slow,nought,by,so
+927	badges,eithers,woe,heavy,tears,But,of
+930	45
+931	purging,slight,air,fire,two,other,The,and
+932	wherever,abide,Are,both,thee,with,I
+933	desire,thought,first,other,The,my,the
+934	presentabsent,slide,swift,motion,These,with
+935	quicker,elements,gone,these,when,For,are
+936	embassy,tender,In,love,thee,of,to
+937	four,alone,being,two,made,life,My,with,of
+938	Sinks,oppressed,melancholy,down,death,with,to
+939	recured,lifes,composition,Until,be
+940	returned,messengers,swift,those,By,from,thee
+941	assured,even,back,Who,again,come,now,but
+942	recounting,health,fair,Of,thy,it,me,to
+943	longer,glad,joy,told,This,then,no,but,I
+944	grow,straight,sad,send,back,again,them,and,I
+947	46
+948	Mine,mortal,war,eye,heart,at,are,a,and
+949	divide,conquest,sight,How,thy,of,to,the
+950	pictures,bar,Mine,sight,eye,heart,would,thy,my
+951	freedom,right,eye,heart,mine,My,that,of,the
+952	plead,lie,dost,doth,heart,My,thou,him,that,in
+953	pierced,crystal,closet,eyes,never,A,with
+954	defendant,plea,deny,doth,But,that,the
+955	appearance,lies,says,fair,thy,him,And,in
+956	impanelled,title,side,To,this,is
+957	tenants,quest,thoughts,heart,A,all,of,to,the
+958	determined,verdict,their,by,And,is
+959	moiety,clear,hearts,dear,part,eyes,The,and,the
+960	outward,due,part,thus,eyes,mine,As,thy,is
+961	inward,hearts,right,heart,love,thy,And,my,of
+964	47
+965	Betwixt,league,took,eye,heart,mine,is,a,and
+966	turns,each,unto,other,doth,now,good,And,the
+967	famished,eye,look,When,mine,for,that,is,a
+968	smother,sighs,himself,Or,doth,heart,love,with,in
+969	picture,feast,loves,eye,doth,With,then,my
+970	banquet,painted,bids,heart,And,my,to,the
+971	guest,Another,hearts,eye,time,mine,is,my
+972	share,thoughts,part,doth,love,his,And,in,a,of
+973	picture,either,So,or,love,by,thy,my
+974	self,present,Thy,still,away,art,with,me
+975	farther,move,canst,thoughts,than,For,thou,not,my
+976	still,they,them,am,thee,with,And,and,I
+977	picture,sight,sleep,Or,if,they,thy,in,my
+978	Awakes,delight,hearts,eyes,heart,my,to,and
+981	48
+982	careful,took,way,How,when,was,my,I
+983	truest,bars,trifle,thrust,Each,under,to
+984	unused,use,stay,might,That,it,my,to
+985	wards,falsehood,trust,sure,hands,From,in,of
+986	trifles,jewels,whom,But,are,thou,my,to
+987	greatest,comfort,worthy,Most,grief,now,my
+988	dearest,care,only,best,Thou,mine,of,and
+989	vulgar,prey,thief,Art,left,every,of,the
+990	locked,chest,Thee,any,up,have,not,in,I
+991	Save,feel,though,where,art,thou,not,I
+992	closure,breast,Within,gentle,my,of,the
+993	whence,mayst,pleasure,From,part,come,at,thou,and
+994	stoln,thence,wilt,even,fear,thou,be,And,I
+995	thievish,proves,prize,truth,dear,For,so,for,a
+998	49
+999	Against,ever,time,come,if,that
+1000	defects,frown,When,see,on,shall,thee,my,I
\ No newline at end of file
-- 
GitLab