From 722dcc4892d064c21c64f9769f766fc919a6a5a8 Mon Sep 17 00:00:00 2001 From: cloudera_vm <cloudera@quickstart.cloudera> Date: Sat, 18 Feb 2017 09:40:59 -0800 Subject: [PATCH] Q1.iii with BZip2 compression and 10 reducers, Snappy or Gzip are not working... --- Assign1/.classpath | 4 + Assign1/bin/.gitignore | 4 - Assign1/hadoop.log | 4193 +++++++++++++++++++++++ Assign1/output_Q1.iii/._SUCCESS.crc | Bin 0 -> 8 bytes Assign1/output_Q1.iii/.part-r-00000.crc | Bin 0 -> 12 bytes Assign1/output_Q1.iii/.part-r-00001.crc | Bin 0 -> 12 bytes Assign1/output_Q1.iii/.part-r-00002.crc | Bin 0 -> 12 bytes Assign1/output_Q1.iii/.part-r-00003.crc | Bin 0 -> 12 bytes Assign1/output_Q1.iii/.part-r-00004.crc | Bin 0 -> 12 bytes Assign1/output_Q1.iii/.part-r-00005.crc | Bin 0 -> 12 bytes Assign1/output_Q1.iii/.part-r-00006.crc | Bin 0 -> 12 bytes Assign1/output_Q1.iii/.part-r-00007.crc | Bin 0 -> 12 bytes Assign1/output_Q1.iii/.part-r-00008.crc | Bin 0 -> 12 bytes Assign1/output_Q1.iii/.part-r-00009.crc | Bin 0 -> 12 bytes Assign1/output_Q1.iii/_SUCCESS | 0 Assign1/output_Q1.iii/part-r-00000 | 6 + Assign1/output_Q1.iii/part-r-00001 | 15 + Assign1/output_Q1.iii/part-r-00002 | 12 + Assign1/output_Q1.iii/part-r-00003 | 6 + Assign1/output_Q1.iii/part-r-00004 | 5 + Assign1/output_Q1.iii/part-r-00005 | 5 + Assign1/output_Q1.iii/part-r-00006 | 7 + Assign1/output_Q1.iii/part-r-00007 | 8 + Assign1/output_Q1.iii/part-r-00008 | 10 + Assign1/output_Q1.iii/part-r-00009 | 3 + Assign1/output_Q1.iii/stopwords.csv | 76 + Assign1/output_Q1.iii/stopwords.csv~ | 77 + Assign1/src/Question1/Stopword_iii.java | 92 + 28 files changed, 4519 insertions(+), 4 deletions(-) create mode 100644 Assign1/output_Q1.iii/._SUCCESS.crc create mode 100644 Assign1/output_Q1.iii/.part-r-00000.crc create mode 100644 Assign1/output_Q1.iii/.part-r-00001.crc create mode 100644 Assign1/output_Q1.iii/.part-r-00002.crc create mode 100644 Assign1/output_Q1.iii/.part-r-00003.crc create mode 100644 Assign1/output_Q1.iii/.part-r-00004.crc create mode 100644 Assign1/output_Q1.iii/.part-r-00005.crc create mode 100644 Assign1/output_Q1.iii/.part-r-00006.crc create mode 100644 Assign1/output_Q1.iii/.part-r-00007.crc create mode 100644 Assign1/output_Q1.iii/.part-r-00008.crc create mode 100644 Assign1/output_Q1.iii/.part-r-00009.crc create mode 100644 Assign1/output_Q1.iii/_SUCCESS create mode 100644 Assign1/output_Q1.iii/part-r-00000 create mode 100644 Assign1/output_Q1.iii/part-r-00001 create mode 100644 Assign1/output_Q1.iii/part-r-00002 create mode 100644 Assign1/output_Q1.iii/part-r-00003 create mode 100644 Assign1/output_Q1.iii/part-r-00004 create mode 100644 Assign1/output_Q1.iii/part-r-00005 create mode 100644 Assign1/output_Q1.iii/part-r-00006 create mode 100644 Assign1/output_Q1.iii/part-r-00007 create mode 100644 Assign1/output_Q1.iii/part-r-00008 create mode 100644 Assign1/output_Q1.iii/part-r-00009 create mode 100644 Assign1/output_Q1.iii/stopwords.csv create mode 100644 Assign1/output_Q1.iii/stopwords.csv~ create mode 100644 Assign1/src/Question1/Stopword_iii.java diff --git a/Assign1/.classpath b/Assign1/.classpath index d757e2d..fdd0d4e 100644 --- a/Assign1/.classpath +++ b/Assign1/.classpath @@ -53,5 +53,9 @@ <classpathentry kind="lib" path="/usr/lib/hadoop/client-0.20/slf4j-api.jar"/> <classpathentry kind="lib" path="/usr/lib/hadoop/client-0.20/slf4j-api-1.7.5.jar"/> <classpathentry kind="lib" path="/usr/lib/hadoop/client-0.20/slf4j-log4j12.jar"/> + <classpathentry kind="lib" path="/usr/lib/hadoop/client-0.20/apacheds-i18n.jar"/> + <classpathentry kind="lib" path="/usr/lib/hadoop/client-0.20/snappy-java.jar"/> + <classpathentry kind="lib" path="/usr/lib/hadoop/client-0.20/snappy-java-1.0.4.1.jar"/> + <classpathentry kind="lib" path="/usr/lib/hadoop-0.20-mapreduce/lib/snappy-java-1.0.4.1.jar"/> <classpathentry kind="output" path="bin"/> </classpath> diff --git a/Assign1/bin/.gitignore b/Assign1/bin/.gitignore index eaf919f..5085f45 100644 --- a/Assign1/bin/.gitignore +++ b/Assign1/bin/.gitignore @@ -1,5 +1 @@ /Question1/ -/StubDriver.class -/StubMapper.class -/StubReducer.class -/StubTest.class diff --git a/Assign1/hadoop.log b/Assign1/hadoop.log index b0e409d..288c3b7 100644 --- a/Assign1/hadoop.log +++ b/Assign1/hadoop.log @@ -4421,3 +4421,4196 @@ Caused by: java.lang.ClassNotFoundException: org.apache.http.client.methods.Http Bytes Read=26057874 File Output Format Counters Bytes Written=862 +2017-02-18 08:10:46,450 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable +2017-02-18 08:10:48,908 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id +2017-02-18 08:10:48,937 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId= +2017-02-18 08:10:51,128 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set. User classes may not be found. See Job or Job#setJar(String). +2017-02-18 08:10:51,224 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 3 +2017-02-18 08:10:51,664 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:3 +2017-02-18 08:10:53,086 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local2083140530_0001 +2017-02-18 08:10:54,977 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/ +2017-02-18 08:10:54,988 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local2083140530_0001 +2017-02-18 08:10:54,999 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null +2017-02-18 08:10:55,060 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 08:10:55,072 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter +2017-02-18 08:10:55,531 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks +2017-02-18 08:10:55,533 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2083140530_0001_m_000000_0 +2017-02-18 08:10:55,777 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 08:10:55,921 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 08:10:55,926 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg3200.txt:0+16013935 +2017-02-18 08:10:55,999 INFO org.apache.hadoop.mapreduce.Job: Job job_local2083140530_0001 running in uber mode : false +2017-02-18 08:10:56,004 INFO org.apache.hadoop.mapreduce.Job: map 0% reduce 0% +2017-02-18 08:10:57,831 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) +2017-02-18 08:10:57,831 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 +2017-02-18 08:10:57,832 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 +2017-02-18 08:10:57,832 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 +2017-02-18 08:10:57,832 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 +2017-02-18 08:10:57,929 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer +2017-02-18 08:10:58,049 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it +2017-02-18 08:11:01,985 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 08:11:02,018 INFO org.apache.hadoop.mapreduce.Job: map 1% reduce 0% +2017-02-18 08:11:04,989 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 08:11:05,027 INFO org.apache.hadoop.mapreduce.Job: map 5% reduce 0% +2017-02-18 08:11:07,994 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 08:11:08,039 INFO org.apache.hadoop.mapreduce.Job: map 11% reduce 0% +2017-02-18 08:11:10,996 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 08:11:11,048 INFO org.apache.hadoop.mapreduce.Job: map 16% reduce 0% +2017-02-18 08:11:14,000 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 08:11:14,037 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 08:11:14,044 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output +2017-02-18 08:11:14,045 INFO org.apache.hadoop.mapred.MapTask: Spilling map output +2017-02-18 08:11:14,046 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 26925530; bufvoid = 104857600 +2017-02-18 08:11:14,046 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 14765624(59062496); length = 11448773/6553600 +2017-02-18 08:11:14,052 INFO org.apache.hadoop.mapreduce.Job: map 22% reduce 0% +2017-02-18 08:11:17,004 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 08:11:20,010 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 08:11:23,012 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 08:11:26,014 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 08:11:29,016 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 08:11:32,024 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 08:11:35,034 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 08:11:38,040 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 08:11:40,745 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0 +2017-02-18 08:11:40,810 INFO org.apache.hadoop.mapred.Task: Task:attempt_local2083140530_0001_m_000000_0 is done. And is in the process of committing +2017-02-18 08:11:40,824 INFO org.apache.hadoop.mapred.LocalJobRunner: map +2017-02-18 08:11:40,827 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local2083140530_0001_m_000000_0' done. +2017-02-18 08:11:40,832 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local2083140530_0001_m_000000_0 +2017-02-18 08:11:40,834 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2083140530_0001_m_000001_0 +2017-02-18 08:11:40,840 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 08:11:40,841 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 08:11:40,861 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg100.txt:0+5589889 +2017-02-18 08:11:41,154 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 0% +2017-02-18 08:11:41,280 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) +2017-02-18 08:11:41,288 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 +2017-02-18 08:11:41,289 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 +2017-02-18 08:11:41,290 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 +2017-02-18 08:11:41,290 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 +2017-02-18 08:11:41,298 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer +2017-02-18 08:11:41,312 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it +2017-02-18 08:11:45,711 INFO org.apache.hadoop.mapred.LocalJobRunner: +2017-02-18 08:11:45,730 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output +2017-02-18 08:11:45,731 INFO org.apache.hadoop.mapred.MapTask: Spilling map output +2017-02-18 08:11:45,732 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 9171648; bufvoid = 104857600 +2017-02-18 08:11:45,733 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 22120624(88482496); length = 4093773/6553600 +2017-02-18 08:11:46,174 INFO org.apache.hadoop.mapreduce.Job: map 33% reduce 0% +2017-02-18 08:11:46,865 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 08:11:47,176 INFO org.apache.hadoop.mapreduce.Job: map 56% reduce 0% +2017-02-18 08:11:49,867 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 08:11:52,868 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 08:11:53,319 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0 +2017-02-18 08:11:53,334 INFO org.apache.hadoop.mapred.Task: Task:attempt_local2083140530_0001_m_000001_0 is done. And is in the process of committing +2017-02-18 08:11:53,341 INFO org.apache.hadoop.mapred.LocalJobRunner: map +2017-02-18 08:11:53,346 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local2083140530_0001_m_000001_0' done. +2017-02-18 08:11:53,348 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local2083140530_0001_m_000001_0 +2017-02-18 08:11:53,348 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2083140530_0001_m_000002_0 +2017-02-18 08:11:53,363 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 08:11:53,364 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 08:11:53,381 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg31100.txt:0+4454050 +2017-02-18 08:11:53,771 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) +2017-02-18 08:11:53,780 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 +2017-02-18 08:11:53,781 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 +2017-02-18 08:11:53,782 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 +2017-02-18 08:11:53,782 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 +2017-02-18 08:11:53,795 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer +2017-02-18 08:11:53,802 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it +2017-02-18 08:11:54,199 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 0% +2017-02-18 08:11:56,809 INFO org.apache.hadoop.mapred.LocalJobRunner: +2017-02-18 08:11:56,821 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output +2017-02-18 08:11:56,822 INFO org.apache.hadoop.mapred.MapTask: Spilling map output +2017-02-18 08:11:56,823 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 7541511; bufvoid = 104857600 +2017-02-18 08:11:56,823 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 23042076(92168304); length = 3172321/6553600 +2017-02-18 08:11:57,211 INFO org.apache.hadoop.mapreduce.Job: map 67% reduce 0% +2017-02-18 08:11:59,395 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 08:12:00,220 INFO org.apache.hadoop.mapreduce.Job: map 89% reduce 0% +2017-02-18 08:12:02,396 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 08:12:02,603 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0 +2017-02-18 08:12:02,632 INFO org.apache.hadoop.mapred.Task: Task:attempt_local2083140530_0001_m_000002_0 is done. And is in the process of committing +2017-02-18 08:12:02,651 INFO org.apache.hadoop.mapred.LocalJobRunner: map +2017-02-18 08:12:02,651 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local2083140530_0001_m_000002_0' done. +2017-02-18 08:12:02,651 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local2083140530_0001_m_000002_0 +2017-02-18 08:12:02,654 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete. +2017-02-18 08:12:02,808 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks +2017-02-18 08:12:02,815 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2083140530_0001_r_000000_0 +2017-02-18 08:12:02,925 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 08:12:02,926 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 08:12:02,964 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@79758369 +2017-02-18 08:12:03,119 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 08:12:03,152 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local2083140530_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 08:12:03,245 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 0% +2017-02-18 08:12:03,435 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local2083140530_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:12:03,505 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 61 bytes from map-output for attempt_local2083140530_0001_m_000000_0 +2017-02-18 08:12:03,516 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 61, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->61 +2017-02-18 08:12:03,538 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local2083140530_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 08:12:03,556 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 31 bytes from map-output for attempt_local2083140530_0001_m_000002_0 +2017-02-18 08:12:03,557 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 31, inMemoryMapOutputs.size() -> 2, commitMemory -> 61, usedMemory ->92 +2017-02-18 08:12:03,562 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local2083140530_0001_m_000001_0 decomp: 30 len: 34 to MEMORY +2017-02-18 08:12:03,575 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 30 bytes from map-output for attempt_local2083140530_0001_m_000001_0 +2017-02-18 08:12:03,578 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 30, inMemoryMapOutputs.size() -> 3, commitMemory -> 92, usedMemory ->122 +2017-02-18 08:12:03,580 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 08:12:03,581 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 08:12:03,581 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 08:12:03,649 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 08:12:03,650 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 104 bytes +2017-02-18 08:12:03,656 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 122 bytes to disk to satisfy reduce memory limit +2017-02-18 08:12:03,661 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 122 bytes from disk +2017-02-18 08:12:03,685 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 08:12:03,685 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 08:12:03,686 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 110 bytes +2017-02-18 08:12:03,688 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 08:12:03,785 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2083140530_0001_r_000001_0 +2017-02-18 08:12:03,810 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 08:12:03,820 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 08:12:03,820 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@5b4e1b74 +2017-02-18 08:12:03,828 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 08:12:03,849 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local2083140530_0001_r_000001_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 08:12:03,857 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local2083140530_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 08:12:03,862 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 154 bytes from map-output for attempt_local2083140530_0001_m_000000_0 +2017-02-18 08:12:03,866 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 154, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->154 +2017-02-18 08:12:03,878 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local2083140530_0001_m_000002_0 decomp: 39 len: 43 to MEMORY +2017-02-18 08:12:03,880 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 39 bytes from map-output for attempt_local2083140530_0001_m_000002_0 +2017-02-18 08:12:03,885 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 39, inMemoryMapOutputs.size() -> 2, commitMemory -> 154, usedMemory ->193 +2017-02-18 08:12:03,889 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local2083140530_0001_m_000001_0 decomp: 69 len: 73 to MEMORY +2017-02-18 08:12:03,903 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 69 bytes from map-output for attempt_local2083140530_0001_m_000001_0 +2017-02-18 08:12:03,909 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 69, inMemoryMapOutputs.size() -> 3, commitMemory -> 193, usedMemory ->262 +2017-02-18 08:12:03,911 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 08:12:03,912 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 08:12:03,912 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 08:12:03,916 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 08:12:03,916 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 253 bytes +2017-02-18 08:12:03,917 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 262 bytes to disk to satisfy reduce memory limit +2017-02-18 08:12:03,918 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 262 bytes from disk +2017-02-18 08:12:03,925 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 08:12:03,927 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 08:12:03,928 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 255 bytes +2017-02-18 08:12:03,930 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 08:12:03,986 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2083140530_0001_r_000002_0 +2017-02-18 08:12:03,989 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 08:12:03,989 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 08:12:03,989 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@1a6a1f5b +2017-02-18 08:12:03,994 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 08:12:04,019 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local2083140530_0001_r_000002_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 08:12:04,034 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local2083140530_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 08:12:04,036 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 112 bytes from map-output for attempt_local2083140530_0001_m_000000_0 +2017-02-18 08:12:04,040 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 112, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->112 +2017-02-18 08:12:04,056 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local2083140530_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 08:12:04,058 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 51 bytes from map-output for attempt_local2083140530_0001_m_000002_0 +2017-02-18 08:12:04,078 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 51, inMemoryMapOutputs.size() -> 2, commitMemory -> 112, usedMemory ->163 +2017-02-18 08:12:04,080 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local2083140530_0001_m_000001_0 decomp: 41 len: 45 to MEMORY +2017-02-18 08:12:04,087 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 41 bytes from map-output for attempt_local2083140530_0001_m_000001_0 +2017-02-18 08:12:04,088 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 41, inMemoryMapOutputs.size() -> 3, commitMemory -> 163, usedMemory ->204 +2017-02-18 08:12:04,088 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 08:12:04,089 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 08:12:04,089 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 08:12:04,091 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 08:12:04,091 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 186 bytes +2017-02-18 08:12:04,107 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 204 bytes to disk to satisfy reduce memory limit +2017-02-18 08:12:04,108 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 204 bytes from disk +2017-02-18 08:12:04,108 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 08:12:04,108 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 08:12:04,109 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 194 bytes +2017-02-18 08:12:04,109 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 08:12:04,151 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2083140530_0001_r_000003_0 +2017-02-18 08:12:04,168 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 08:12:04,168 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 08:12:04,169 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@7934bf83 +2017-02-18 08:12:04,175 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 08:12:04,196 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local2083140530_0001_r_000003_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 08:12:04,214 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local2083140530_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 08:12:04,215 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 63 bytes from map-output for attempt_local2083140530_0001_m_000000_0 +2017-02-18 08:12:04,225 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 63, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->63 +2017-02-18 08:12:04,227 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local2083140530_0001_m_000002_0 decomp: 11 len: 15 to MEMORY +2017-02-18 08:12:04,232 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 11 bytes from map-output for attempt_local2083140530_0001_m_000002_0 +2017-02-18 08:12:04,241 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 11, inMemoryMapOutputs.size() -> 2, commitMemory -> 63, usedMemory ->74 +2017-02-18 08:12:04,246 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local2083140530_0001_m_000001_0 decomp: 11 len: 15 to MEMORY +2017-02-18 08:12:04,256 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 11 bytes from map-output for attempt_local2083140530_0001_m_000001_0 +2017-02-18 08:12:04,260 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 11, inMemoryMapOutputs.size() -> 3, commitMemory -> 74, usedMemory ->85 +2017-02-18 08:12:04,261 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 08:12:04,262 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 08:12:04,262 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 08:12:04,269 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 08:12:04,269 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 69 bytes +2017-02-18 08:12:04,270 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 85 bytes to disk to satisfy reduce memory limit +2017-02-18 08:12:04,270 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 85 bytes from disk +2017-02-18 08:12:04,281 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 08:12:04,283 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 08:12:04,285 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 75 bytes +2017-02-18 08:12:04,286 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 08:12:04,335 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2083140530_0001_r_000004_0 +2017-02-18 08:12:04,341 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 08:12:04,342 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 08:12:04,342 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@4c122623 +2017-02-18 08:12:04,351 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 08:12:04,370 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local2083140530_0001_r_000004_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 08:12:04,378 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local2083140530_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 08:12:04,392 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 52 bytes from map-output for attempt_local2083140530_0001_m_000000_0 +2017-02-18 08:12:04,392 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 52, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->52 +2017-02-18 08:12:04,394 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local2083140530_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 08:12:04,407 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 31 bytes from map-output for attempt_local2083140530_0001_m_000002_0 +2017-02-18 08:12:04,419 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 31, inMemoryMapOutputs.size() -> 2, commitMemory -> 52, usedMemory ->83 +2017-02-18 08:12:04,421 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local2083140530_0001_m_000001_0 decomp: 33 len: 37 to MEMORY +2017-02-18 08:12:04,435 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 33 bytes from map-output for attempt_local2083140530_0001_m_000001_0 +2017-02-18 08:12:04,435 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 33, inMemoryMapOutputs.size() -> 3, commitMemory -> 83, usedMemory ->116 +2017-02-18 08:12:04,436 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 08:12:04,437 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 08:12:04,437 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 08:12:04,438 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 08:12:04,438 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 100 bytes +2017-02-18 08:12:04,445 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 116 bytes to disk to satisfy reduce memory limit +2017-02-18 08:12:04,446 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 116 bytes from disk +2017-02-18 08:12:04,446 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 08:12:04,446 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 08:12:04,446 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 106 bytes +2017-02-18 08:12:04,454 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 08:12:04,498 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2083140530_0001_r_000005_0 +2017-02-18 08:12:04,506 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 08:12:04,508 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 08:12:04,508 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@7adb5354 +2017-02-18 08:12:04,526 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 08:12:04,540 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local2083140530_0001_r_000005_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 08:12:04,552 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local2083140530_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 08:12:04,566 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 54 bytes from map-output for attempt_local2083140530_0001_m_000000_0 +2017-02-18 08:12:04,566 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 54, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->54 +2017-02-18 08:12:04,576 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local2083140530_0001_m_000002_0 decomp: 23 len: 27 to MEMORY +2017-02-18 08:12:04,581 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 23 bytes from map-output for attempt_local2083140530_0001_m_000002_0 +2017-02-18 08:12:04,586 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 23, inMemoryMapOutputs.size() -> 2, commitMemory -> 54, usedMemory ->77 +2017-02-18 08:12:04,589 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local2083140530_0001_m_000001_0 decomp: 23 len: 27 to MEMORY +2017-02-18 08:12:04,601 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 23 bytes from map-output for attempt_local2083140530_0001_m_000001_0 +2017-02-18 08:12:04,604 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 23, inMemoryMapOutputs.size() -> 3, commitMemory -> 77, usedMemory ->100 +2017-02-18 08:12:04,605 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 08:12:04,606 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 08:12:04,606 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 08:12:04,608 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 08:12:04,608 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 82 bytes +2017-02-18 08:12:04,610 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 100 bytes to disk to satisfy reduce memory limit +2017-02-18 08:12:04,611 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 100 bytes from disk +2017-02-18 08:12:04,613 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 08:12:04,613 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 08:12:04,620 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 90 bytes +2017-02-18 08:12:04,621 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 08:12:04,663 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2083140530_0001_r_000006_0 +2017-02-18 08:12:04,684 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 08:12:04,685 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 08:12:04,686 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@4ab6b2ab +2017-02-18 08:12:04,692 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 08:12:04,706 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local2083140530_0001_r_000006_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 08:12:04,719 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local2083140530_0001_m_000000_0 decomp: 72 len: 76 to MEMORY +2017-02-18 08:12:04,724 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 72 bytes from map-output for attempt_local2083140530_0001_m_000000_0 +2017-02-18 08:12:04,746 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 72, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->72 +2017-02-18 08:12:04,749 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local2083140530_0001_m_000002_0 decomp: 28 len: 32 to MEMORY +2017-02-18 08:12:04,760 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 28 bytes from map-output for attempt_local2083140530_0001_m_000002_0 +2017-02-18 08:12:04,760 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 28, inMemoryMapOutputs.size() -> 2, commitMemory -> 72, usedMemory ->100 +2017-02-18 08:12:04,762 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local2083140530_0001_m_000001_0 decomp: 28 len: 32 to MEMORY +2017-02-18 08:12:04,779 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 28 bytes from map-output for attempt_local2083140530_0001_m_000001_0 +2017-02-18 08:12:04,779 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 28, inMemoryMapOutputs.size() -> 3, commitMemory -> 100, usedMemory ->128 +2017-02-18 08:12:04,785 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 08:12:04,786 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 08:12:04,787 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 08:12:04,788 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 08:12:04,788 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 110 bytes +2017-02-18 08:12:04,794 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 128 bytes to disk to satisfy reduce memory limit +2017-02-18 08:12:04,795 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 128 bytes from disk +2017-02-18 08:12:04,795 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 08:12:04,795 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 08:12:04,796 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 116 bytes +2017-02-18 08:12:04,808 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 08:12:04,868 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2083140530_0001_r_000007_0 +2017-02-18 08:12:04,873 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 08:12:04,874 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 08:12:04,874 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@2b2083d9 +2017-02-18 08:12:04,877 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 08:12:04,894 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local2083140530_0001_r_000007_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 08:12:04,916 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local2083140530_0001_m_000000_0 decomp: 88 len: 92 to MEMORY +2017-02-18 08:12:04,921 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 88 bytes from map-output for attempt_local2083140530_0001_m_000000_0 +2017-02-18 08:12:04,924 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 88, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->88 +2017-02-18 08:12:04,930 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local2083140530_0001_m_000002_0 decomp: 2 len: 6 to MEMORY +2017-02-18 08:12:04,942 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local2083140530_0001_m_000002_0 +2017-02-18 08:12:04,944 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 2, commitMemory -> 88, usedMemory ->90 +2017-02-18 08:12:04,946 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local2083140530_0001_m_000001_0 decomp: 11 len: 15 to MEMORY +2017-02-18 08:12:04,955 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 11 bytes from map-output for attempt_local2083140530_0001_m_000001_0 +2017-02-18 08:12:04,972 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 11, inMemoryMapOutputs.size() -> 3, commitMemory -> 90, usedMemory ->101 +2017-02-18 08:12:04,973 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 08:12:04,973 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 08:12:04,974 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 08:12:04,977 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 08:12:04,977 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 2 segments left of total size: 87 bytes +2017-02-18 08:12:04,978 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 101 bytes to disk to satisfy reduce memory limit +2017-02-18 08:12:04,978 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 101 bytes from disk +2017-02-18 08:12:04,979 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 08:12:04,979 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 08:12:04,986 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 90 bytes +2017-02-18 08:12:04,987 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 08:12:05,044 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2083140530_0001_r_000008_0 +2017-02-18 08:12:05,049 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 08:12:05,050 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 08:12:05,050 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@d62d2df +2017-02-18 08:12:05,055 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 08:12:05,074 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local2083140530_0001_r_000008_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 08:12:05,085 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local2083140530_0001_m_000000_0 decomp: 98 len: 102 to MEMORY +2017-02-18 08:12:05,096 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 98 bytes from map-output for attempt_local2083140530_0001_m_000000_0 +2017-02-18 08:12:05,097 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 98, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->98 +2017-02-18 08:12:05,100 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local2083140530_0001_m_000002_0 decomp: 49 len: 53 to MEMORY +2017-02-18 08:12:05,105 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 49 bytes from map-output for attempt_local2083140530_0001_m_000002_0 +2017-02-18 08:12:05,109 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 49, inMemoryMapOutputs.size() -> 2, commitMemory -> 98, usedMemory ->147 +2017-02-18 08:12:05,112 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local2083140530_0001_m_000001_0 decomp: 49 len: 53 to MEMORY +2017-02-18 08:12:05,135 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 49 bytes from map-output for attempt_local2083140530_0001_m_000001_0 +2017-02-18 08:12:05,136 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 49, inMemoryMapOutputs.size() -> 3, commitMemory -> 147, usedMemory ->196 +2017-02-18 08:12:05,139 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 08:12:05,140 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 08:12:05,140 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 08:12:05,147 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 08:12:05,158 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 184 bytes +2017-02-18 08:12:05,160 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 196 bytes to disk to satisfy reduce memory limit +2017-02-18 08:12:05,160 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 196 bytes from disk +2017-02-18 08:12:05,160 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 08:12:05,161 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 08:12:05,161 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 188 bytes +2017-02-18 08:12:05,162 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 08:12:05,228 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2083140530_0001_r_000009_0 +2017-02-18 08:12:05,240 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 08:12:05,241 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 08:12:05,241 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@4066a7f5 +2017-02-18 08:12:05,245 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 08:12:05,269 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local2083140530_0001_r_000009_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 08:12:05,281 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#10 about to shuffle output of map attempt_local2083140530_0001_m_000000_0 decomp: 22 len: 26 to MEMORY +2017-02-18 08:12:05,296 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 22 bytes from map-output for attempt_local2083140530_0001_m_000000_0 +2017-02-18 08:12:05,296 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 22, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->22 +2017-02-18 08:12:05,298 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#10 about to shuffle output of map attempt_local2083140530_0001_m_000002_0 decomp: 2 len: 6 to MEMORY +2017-02-18 08:12:05,316 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local2083140530_0001_m_000002_0 +2017-02-18 08:12:05,316 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 2, commitMemory -> 22, usedMemory ->24 +2017-02-18 08:12:05,331 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#10 about to shuffle output of map attempt_local2083140530_0001_m_000001_0 decomp: 13 len: 17 to MEMORY +2017-02-18 08:12:05,339 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 13 bytes from map-output for attempt_local2083140530_0001_m_000001_0 +2017-02-18 08:12:05,342 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 13, inMemoryMapOutputs.size() -> 3, commitMemory -> 24, usedMemory ->37 +2017-02-18 08:12:05,346 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 08:12:05,347 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 08:12:05,347 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 08:12:05,349 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 08:12:05,350 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 2 segments left of total size: 22 bytes +2017-02-18 08:12:05,351 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 37 bytes to disk to satisfy reduce memory limit +2017-02-18 08:12:05,354 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 37 bytes from disk +2017-02-18 08:12:05,357 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 08:12:05,359 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 08:12:05,360 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 27 bytes +2017-02-18 08:12:05,368 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 08:12:05,420 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete. +2017-02-18 08:12:05,476 WARN org.apache.hadoop.mapred.LocalJobRunner: job_local2083140530_0001 +java.lang.Exception: java.lang.RuntimeException: native snappy library not available: this version of libhadoop was built without snappy support. + at org.apache.hadoop.mapred.LocalJobRunner$Job.runTasks(LocalJobRunner.java:489) + at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:556) +Caused by: java.lang.RuntimeException: native snappy library not available: this version of libhadoop was built without snappy support. + at org.apache.hadoop.io.compress.SnappyCodec.checkNativeCodeLoaded(SnappyCodec.java:65) + at org.apache.hadoop.io.compress.SnappyCodec.getCompressorType(SnappyCodec.java:134) + at org.apache.hadoop.io.compress.CodecPool.getCompressor(CodecPool.java:150) + at org.apache.hadoop.io.compress.CompressionCodec$Util.createOutputStreamWithCodecPool(CompressionCodec.java:131) + at org.apache.hadoop.io.compress.SnappyCodec.createOutputStream(SnappyCodec.java:99) + at org.apache.hadoop.mapreduce.lib.output.TextOutputFormat.getRecordWriter(TextOutputFormat.java:136) + at org.apache.hadoop.mapred.ReduceTask$NewTrackingRecordWriter.<init>(ReduceTask.java:540) + at org.apache.hadoop.mapred.ReduceTask.runNewReducer(ReduceTask.java:614) + at org.apache.hadoop.mapred.ReduceTask.run(ReduceTask.java:389) + at org.apache.hadoop.mapred.LocalJobRunner$Job$ReduceTaskRunnable.run(LocalJobRunner.java:346) + at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471) + at java.util.concurrent.FutureTask.run(FutureTask.java:262) + at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) + at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) + at java.lang.Thread.run(Thread.java:745) +2017-02-18 08:12:06,253 INFO org.apache.hadoop.mapreduce.Job: Job job_local2083140530_0001 failed with state FAILED due to: NA +2017-02-18 08:12:06,525 INFO org.apache.hadoop.mapreduce.Job: Counters: 30 + File System Counters + FILE: Number of bytes read=63678066 + FILE: Number of bytes written=838216 + FILE: Number of read operations=0 + FILE: Number of large read operations=0 + FILE: Number of write operations=0 + Map-Reduce Framework + Map input records=507535 + Map output records=4678719 + Map output bytes=43638689 + Map output materialized bytes=1471 + Input split bytes=351 + Combine input records=4678719 + Combine output records=131 + Reduce input groups=0 + Reduce shuffle bytes=1471 + Reduce input records=0 + Reduce output records=0 + Spilled Records=131 + Shuffled Maps =30 + Failed Shuffles=0 + Merged Map outputs=30 + GC time elapsed (ms)=951 + Total committed heap usage (bytes)=576008192 + Shuffle Errors + BAD_ID=0 + CONNECTION=0 + IO_ERROR=0 + WRONG_LENGTH=0 + WRONG_MAP=0 + WRONG_REDUCE=0 + File Input Format Counters + Bytes Read=26057874 + File Output Format Counters + Bytes Written=0 +2017-02-18 08:51:02,416 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable +2017-02-18 08:51:02,998 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.compress.map.output is deprecated. Instead, use mapreduce.map.output.compress +2017-02-18 08:51:03,001 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.map.output.compression.codec is deprecated. Instead, use mapreduce.map.output.compress.codec +2017-02-18 08:51:04,900 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id +2017-02-18 08:51:04,911 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId= +2017-02-18 08:51:07,102 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set. User classes may not be found. See Job or Job#setJar(String). +2017-02-18 08:51:07,231 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 3 +2017-02-18 08:51:07,733 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:3 +2017-02-18 08:51:09,552 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local986354165_0001 +2017-02-18 08:51:11,300 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/ +2017-02-18 08:51:11,301 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local986354165_0001 +2017-02-18 08:51:11,322 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null +2017-02-18 08:51:11,377 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 08:51:11,379 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter +2017-02-18 08:51:11,841 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks +2017-02-18 08:51:11,843 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local986354165_0001_m_000000_0 +2017-02-18 08:51:12,153 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 08:51:12,283 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 08:51:12,304 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg3200.txt:0+16013935 +2017-02-18 08:51:12,322 INFO org.apache.hadoop.mapreduce.Job: Job job_local986354165_0001 running in uber mode : false +2017-02-18 08:51:12,350 INFO org.apache.hadoop.mapreduce.Job: map 0% reduce 0% +2017-02-18 08:51:15,290 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) +2017-02-18 08:51:15,290 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 +2017-02-18 08:51:15,290 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 +2017-02-18 08:51:15,290 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 +2017-02-18 08:51:15,290 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 +2017-02-18 08:51:15,352 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer +2017-02-18 08:51:15,451 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it +2017-02-18 08:51:21,274 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 08:51:21,379 INFO org.apache.hadoop.mapreduce.Job: map 4% reduce 0% +2017-02-18 08:51:24,298 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 08:51:24,389 INFO org.apache.hadoop.mapreduce.Job: map 9% reduce 0% +2017-02-18 08:51:27,309 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 08:51:27,399 INFO org.apache.hadoop.mapreduce.Job: map 15% reduce 0% +2017-02-18 08:51:30,311 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 08:51:30,406 INFO org.apache.hadoop.mapreduce.Job: map 21% reduce 0% +2017-02-18 08:51:30,786 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 08:51:30,789 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output +2017-02-18 08:51:30,790 INFO org.apache.hadoop.mapred.MapTask: Spilling map output +2017-02-18 08:51:30,791 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 26925530; bufvoid = 104857600 +2017-02-18 08:51:30,791 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 14765624(59062496); length = 11448773/6553600 +2017-02-18 08:51:33,314 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 08:51:33,416 INFO org.apache.hadoop.mapreduce.Job: map 22% reduce 0% +2017-02-18 08:51:36,318 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 08:51:39,322 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 08:51:42,328 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 08:51:45,334 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 08:51:48,337 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 08:51:50,854 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output +2017-02-18 08:51:50,855 INFO org.apache.hadoop.mapred.MapTask: (RESET) equator 0 kv 26214396(104857584) kvi 14765620(59062480) +2017-02-18 08:51:50,855 INFO org.apache.hadoop.mapred.MapTask: Spilling map output +2017-02-18 08:51:50,855 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 26925530; bufvoid = 104857600 +2017-02-18 08:51:50,855 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 14765624(59062496); length = 11448773/6553600 +2017-02-18 08:51:51,341 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 08:51:54,345 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 08:51:57,347 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 08:52:00,348 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 08:52:03,356 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 08:52:06,360 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 08:52:09,366 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 08:52:09,648 INFO org.apache.hadoop.mapred.MapTask: Ignoring exception during close for org.apache.hadoop.mapred.MapTask$NewOutputCollector@f6a4c4a +java.lang.RuntimeException: native snappy library not available: this version of libhadoop was built without snappy support. + at org.apache.hadoop.io.compress.SnappyCodec.checkNativeCodeLoaded(SnappyCodec.java:65) + at org.apache.hadoop.io.compress.SnappyCodec.getCompressorType(SnappyCodec.java:134) + at org.apache.hadoop.io.compress.CodecPool.getCompressor(CodecPool.java:150) + at org.apache.hadoop.io.compress.CodecPool.getCompressor(CodecPool.java:165) + at org.apache.hadoop.mapred.IFile$Writer.<init>(IFile.java:114) + at org.apache.hadoop.mapred.IFile$Writer.<init>(IFile.java:97) + at org.apache.hadoop.mapred.MapTask$MapOutputBuffer.sortAndSpill(MapTask.java:1606) + at org.apache.hadoop.mapred.MapTask$MapOutputBuffer.flush(MapTask.java:1486) + at org.apache.hadoop.mapred.MapTask$NewOutputCollector.close(MapTask.java:723) + at org.apache.hadoop.mapred.MapTask.closeQuietly(MapTask.java:2016) + at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:797) + at org.apache.hadoop.mapred.MapTask.run(MapTask.java:341) + at org.apache.hadoop.mapred.LocalJobRunner$Job$MapTaskRunnable.run(LocalJobRunner.java:270) + at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471) + at java.util.concurrent.FutureTask.run(FutureTask.java:262) + at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) + at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) + at java.lang.Thread.run(Thread.java:745) +2017-02-18 08:52:09,699 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local986354165_0001_m_000001_0 +2017-02-18 08:52:09,706 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 08:52:09,707 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 08:52:09,710 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg100.txt:0+5589889 +2017-02-18 08:52:11,414 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) +2017-02-18 08:52:11,482 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 +2017-02-18 08:52:11,482 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 +2017-02-18 08:52:11,482 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 +2017-02-18 08:52:11,482 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 +2017-02-18 08:52:11,494 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer +2017-02-18 08:52:11,551 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it +2017-02-18 08:52:12,367 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 08:52:15,633 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 08:52:15,716 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 08:52:16,551 INFO org.apache.hadoop.mapreduce.Job: map 35% reduce 0% +2017-02-18 08:52:17,568 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 08:52:17,571 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output +2017-02-18 08:52:17,571 INFO org.apache.hadoop.mapred.MapTask: Spilling map output +2017-02-18 08:52:17,571 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 9171648; bufvoid = 104857600 +2017-02-18 08:52:17,571 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 22120624(88482496); length = 4093773/6553600 +2017-02-18 08:52:18,723 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 08:52:19,559 INFO org.apache.hadoop.mapreduce.Job: map 44% reduce 0% +2017-02-18 08:52:21,725 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 08:52:23,449 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output +2017-02-18 08:52:23,463 INFO org.apache.hadoop.mapred.MapTask: (RESET) equator 0 kv 26214396(104857584) kvi 22120620(88482480) +2017-02-18 08:52:23,463 INFO org.apache.hadoop.mapred.MapTask: Spilling map output +2017-02-18 08:52:23,463 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 9171648; bufvoid = 104857600 +2017-02-18 08:52:23,464 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 22120624(88482496); length = 4093773/6553600 +2017-02-18 08:52:24,726 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 08:52:27,732 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 08:52:28,910 INFO org.apache.hadoop.mapred.MapTask: Ignoring exception during close for org.apache.hadoop.mapred.MapTask$NewOutputCollector@4cb8cd94 +java.lang.RuntimeException: native snappy library not available: this version of libhadoop was built without snappy support. + at org.apache.hadoop.io.compress.SnappyCodec.checkNativeCodeLoaded(SnappyCodec.java:65) + at org.apache.hadoop.io.compress.SnappyCodec.getCompressorType(SnappyCodec.java:134) + at org.apache.hadoop.io.compress.CodecPool.getCompressor(CodecPool.java:150) + at org.apache.hadoop.io.compress.CodecPool.getCompressor(CodecPool.java:165) + at org.apache.hadoop.mapred.IFile$Writer.<init>(IFile.java:114) + at org.apache.hadoop.mapred.IFile$Writer.<init>(IFile.java:97) + at org.apache.hadoop.mapred.MapTask$MapOutputBuffer.sortAndSpill(MapTask.java:1606) + at org.apache.hadoop.mapred.MapTask$MapOutputBuffer.flush(MapTask.java:1486) + at org.apache.hadoop.mapred.MapTask$NewOutputCollector.close(MapTask.java:723) + at org.apache.hadoop.mapred.MapTask.closeQuietly(MapTask.java:2016) + at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:797) + at org.apache.hadoop.mapred.MapTask.run(MapTask.java:341) + at org.apache.hadoop.mapred.LocalJobRunner$Job$MapTaskRunnable.run(LocalJobRunner.java:270) + at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471) + at java.util.concurrent.FutureTask.run(FutureTask.java:262) + at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) + at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) + at java.lang.Thread.run(Thread.java:745) +2017-02-18 08:52:28,933 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local986354165_0001_m_000002_0 +2017-02-18 08:52:29,198 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 08:52:29,199 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 08:52:29,242 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg31100.txt:0+4454050 +2017-02-18 08:52:29,745 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) +2017-02-18 08:52:29,747 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 +2017-02-18 08:52:29,747 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 +2017-02-18 08:52:29,748 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 +2017-02-18 08:52:29,748 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 +2017-02-18 08:52:29,756 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer +2017-02-18 08:52:29,784 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it +2017-02-18 08:52:35,797 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 08:52:38,522 INFO org.apache.hadoop.mapred.LocalJobRunner: +2017-02-18 08:52:38,548 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output +2017-02-18 08:52:38,548 INFO org.apache.hadoop.mapred.MapTask: Spilling map output +2017-02-18 08:52:38,550 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 7541511; bufvoid = 104857600 +2017-02-18 08:52:38,550 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 23042076(92168304); length = 3172321/6553600 +2017-02-18 08:52:38,800 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 08:52:40,425 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 08:52:41,425 INFO org.apache.hadoop.mapreduce.Job: map 67% reduce 0% +2017-02-18 08:52:43,433 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 08:52:45,067 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output +2017-02-18 08:52:45,078 INFO org.apache.hadoop.mapred.MapTask: (RESET) equator 0 kv 26214396(104857584) kvi 23042072(92168288) +2017-02-18 08:52:45,079 INFO org.apache.hadoop.mapred.MapTask: Spilling map output +2017-02-18 08:52:45,079 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 7541511; bufvoid = 104857600 +2017-02-18 08:52:45,079 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 23042076(92168304); length = 3172321/6553600 +2017-02-18 08:52:46,435 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 08:52:48,811 INFO org.apache.hadoop.mapred.MapTask: Ignoring exception during close for org.apache.hadoop.mapred.MapTask$NewOutputCollector@601a1ecd +java.lang.RuntimeException: native snappy library not available: this version of libhadoop was built without snappy support. + at org.apache.hadoop.io.compress.SnappyCodec.checkNativeCodeLoaded(SnappyCodec.java:65) + at org.apache.hadoop.io.compress.SnappyCodec.getCompressorType(SnappyCodec.java:134) + at org.apache.hadoop.io.compress.CodecPool.getCompressor(CodecPool.java:150) + at org.apache.hadoop.io.compress.CodecPool.getCompressor(CodecPool.java:165) + at org.apache.hadoop.mapred.IFile$Writer.<init>(IFile.java:114) + at org.apache.hadoop.mapred.IFile$Writer.<init>(IFile.java:97) + at org.apache.hadoop.mapred.MapTask$MapOutputBuffer.sortAndSpill(MapTask.java:1606) + at org.apache.hadoop.mapred.MapTask$MapOutputBuffer.flush(MapTask.java:1486) + at org.apache.hadoop.mapred.MapTask$NewOutputCollector.close(MapTask.java:723) + at org.apache.hadoop.mapred.MapTask.closeQuietly(MapTask.java:2016) + at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:797) + at org.apache.hadoop.mapred.MapTask.run(MapTask.java:341) + at org.apache.hadoop.mapred.LocalJobRunner$Job$MapTaskRunnable.run(LocalJobRunner.java:270) + at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471) + at java.util.concurrent.FutureTask.run(FutureTask.java:262) + at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) + at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) + at java.lang.Thread.run(Thread.java:745) +2017-02-18 08:52:48,935 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete. +2017-02-18 08:52:49,063 WARN org.apache.hadoop.mapred.LocalJobRunner: job_local986354165_0001 +java.lang.Exception: java.lang.RuntimeException: native snappy library not available: this version of libhadoop was built without snappy support. + at org.apache.hadoop.mapred.LocalJobRunner$Job.runTasks(LocalJobRunner.java:489) + at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:549) +Caused by: java.lang.RuntimeException: native snappy library not available: this version of libhadoop was built without snappy support. + at org.apache.hadoop.io.compress.SnappyCodec.checkNativeCodeLoaded(SnappyCodec.java:65) + at org.apache.hadoop.io.compress.SnappyCodec.getCompressorType(SnappyCodec.java:134) + at org.apache.hadoop.io.compress.CodecPool.getCompressor(CodecPool.java:150) + at org.apache.hadoop.io.compress.CodecPool.getCompressor(CodecPool.java:165) + at org.apache.hadoop.mapred.IFile$Writer.<init>(IFile.java:114) + at org.apache.hadoop.mapred.IFile$Writer.<init>(IFile.java:97) + at org.apache.hadoop.mapred.MapTask$MapOutputBuffer.sortAndSpill(MapTask.java:1606) + at org.apache.hadoop.mapred.MapTask$MapOutputBuffer.flush(MapTask.java:1486) + at org.apache.hadoop.mapred.MapTask$NewOutputCollector.close(MapTask.java:723) + at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:793) + at org.apache.hadoop.mapred.MapTask.run(MapTask.java:341) + at org.apache.hadoop.mapred.LocalJobRunner$Job$MapTaskRunnable.run(LocalJobRunner.java:270) + at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471) + at java.util.concurrent.FutureTask.run(FutureTask.java:262) + at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) + at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) + at java.lang.Thread.run(Thread.java:745) +2017-02-18 08:52:49,437 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 08:52:49,458 INFO org.apache.hadoop.mapreduce.Job: Job job_local986354165_0001 failed with state FAILED due to: NA +2017-02-18 08:52:49,693 INFO org.apache.hadoop.mapreduce.Job: Counters: 18 + File System Counters + FILE: Number of bytes read=70901752 + FILE: Number of bytes written=829530 + FILE: Number of read operations=0 + FILE: Number of large read operations=0 + FILE: Number of write operations=0 + Map-Reduce Framework + Map input records=507535 + Map output records=4678719 + Map output bytes=43638689 + Map output materialized bytes=0 + Input split bytes=351 + Combine input records=0 + Combine output records=0 + Spilled Records=0 + Failed Shuffles=0 + Merged Map outputs=0 + GC time elapsed (ms)=11806 + Total committed heap usage (bytes)=1413275648 + File Input Format Counters + Bytes Read=26057874 +2017-02-18 08:56:05,243 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable +2017-02-18 08:56:05,794 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.compress.map.output is deprecated. Instead, use mapreduce.map.output.compress +2017-02-18 08:56:05,837 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.map.output.compression.codec is deprecated. Instead, use mapreduce.map.output.compress.codec +2017-02-18 08:56:08,084 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id +2017-02-18 08:56:08,126 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId= +2017-02-18 08:56:11,092 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set. User classes may not be found. See Job or Job#setJar(String). +2017-02-18 08:56:11,184 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 3 +2017-02-18 08:56:11,584 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:3 +2017-02-18 08:56:13,206 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local1323619014_0001 +2017-02-18 08:56:15,020 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/ +2017-02-18 08:56:15,022 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local1323619014_0001 +2017-02-18 08:56:15,035 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null +2017-02-18 08:56:15,105 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 08:56:15,119 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter +2017-02-18 08:56:15,525 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks +2017-02-18 08:56:15,526 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1323619014_0001_m_000000_0 +2017-02-18 08:56:15,791 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 08:56:15,934 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 08:56:15,969 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg3200.txt:0+16013935 +2017-02-18 08:56:16,025 INFO org.apache.hadoop.mapreduce.Job: Job job_local1323619014_0001 running in uber mode : false +2017-02-18 08:56:16,028 INFO org.apache.hadoop.mapreduce.Job: map 0% reduce 0% +2017-02-18 08:56:16,562 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) +2017-02-18 08:56:16,568 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 +2017-02-18 08:56:16,569 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 +2017-02-18 08:56:16,569 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 +2017-02-18 08:56:16,570 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 +2017-02-18 08:56:16,609 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer +2017-02-18 08:56:16,664 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it +2017-02-18 08:56:21,875 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 08:56:22,086 INFO org.apache.hadoop.mapreduce.Job: map 3% reduce 0% +2017-02-18 08:56:24,895 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 08:56:25,136 INFO org.apache.hadoop.mapreduce.Job: map 9% reduce 0% +2017-02-18 08:56:27,896 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 08:56:28,148 INFO org.apache.hadoop.mapreduce.Job: map 15% reduce 0% +2017-02-18 08:56:30,900 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 08:56:31,158 INFO org.apache.hadoop.mapreduce.Job: map 21% reduce 0% +2017-02-18 08:56:31,565 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 08:56:31,574 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output +2017-02-18 08:56:31,575 INFO org.apache.hadoop.mapred.MapTask: Spilling map output +2017-02-18 08:56:31,576 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 26925530; bufvoid = 104857600 +2017-02-18 08:56:31,576 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 14765624(59062496); length = 11448773/6553600 +2017-02-18 08:56:33,907 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 08:56:34,166 INFO org.apache.hadoop.mapreduce.Job: map 22% reduce 0% +2017-02-18 08:56:36,912 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 08:56:39,916 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 08:56:42,923 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 08:56:45,925 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 08:56:48,935 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 08:56:50,241 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 08:56:50,247 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 08:56:51,938 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 08:56:52,095 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 08:56:52,099 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 08:56:52,923 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 08:56:52,933 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 08:56:53,960 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 08:56:53,965 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 08:56:54,324 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 08:56:54,329 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 08:56:54,752 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 08:56:54,785 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 08:56:54,941 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 08:56:55,192 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 08:56:55,203 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 08:56:55,743 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 08:56:55,745 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 08:56:56,127 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 08:56:56,137 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 08:56:56,899 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 08:56:56,912 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 08:56:57,187 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0 +2017-02-18 08:56:57,253 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1323619014_0001_m_000000_0 is done. And is in the process of committing +2017-02-18 08:56:57,256 INFO org.apache.hadoop.mapred.LocalJobRunner: map +2017-02-18 08:56:57,261 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1323619014_0001_m_000000_0' done. +2017-02-18 08:56:57,262 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1323619014_0001_m_000000_0 +2017-02-18 08:56:57,263 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1323619014_0001_m_000001_0 +2017-02-18 08:56:57,270 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 08:56:57,271 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 08:56:57,273 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg100.txt:0+5589889 +2017-02-18 08:56:57,664 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) +2017-02-18 08:56:57,669 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 +2017-02-18 08:56:57,670 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 +2017-02-18 08:56:57,670 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 +2017-02-18 08:56:57,671 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 +2017-02-18 08:56:57,678 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer +2017-02-18 08:56:57,702 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it +2017-02-18 08:56:58,254 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 0% +2017-02-18 08:57:01,849 INFO org.apache.hadoop.mapred.LocalJobRunner: +2017-02-18 08:57:01,863 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output +2017-02-18 08:57:01,864 INFO org.apache.hadoop.mapred.MapTask: Spilling map output +2017-02-18 08:57:01,864 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 9171648; bufvoid = 104857600 +2017-02-18 08:57:01,864 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 22120624(88482496); length = 4093773/6553600 +2017-02-18 08:57:02,264 INFO org.apache.hadoop.mapreduce.Job: map 33% reduce 0% +2017-02-18 08:57:03,300 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 08:57:04,281 INFO org.apache.hadoop.mapreduce.Job: map 56% reduce 0% +2017-02-18 08:57:06,304 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 08:57:07,559 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 08:57:07,567 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 08:57:07,699 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 08:57:07,728 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 08:57:08,017 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 08:57:08,021 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 08:57:08,230 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 08:57:08,232 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 08:57:08,382 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 08:57:08,388 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 08:57:08,528 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 08:57:08,548 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 08:57:08,670 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 08:57:08,700 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 08:57:08,852 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 08:57:08,893 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 08:57:09,003 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 08:57:09,020 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 08:57:09,263 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 08:57:09,270 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 08:57:09,309 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 08:57:09,394 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0 +2017-02-18 08:57:09,411 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1323619014_0001_m_000001_0 is done. And is in the process of committing +2017-02-18 08:57:09,416 INFO org.apache.hadoop.mapred.LocalJobRunner: map +2017-02-18 08:57:09,419 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1323619014_0001_m_000001_0' done. +2017-02-18 08:57:09,420 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1323619014_0001_m_000001_0 +2017-02-18 08:57:09,421 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1323619014_0001_m_000002_0 +2017-02-18 08:57:09,430 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 08:57:09,431 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 08:57:09,432 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg31100.txt:0+4454050 +2017-02-18 08:57:09,807 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) +2017-02-18 08:57:09,823 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 +2017-02-18 08:57:09,827 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 +2017-02-18 08:57:09,827 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 +2017-02-18 08:57:09,827 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 +2017-02-18 08:57:09,836 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer +2017-02-18 08:57:09,850 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it +2017-02-18 08:57:10,305 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 0% +2017-02-18 08:57:12,866 INFO org.apache.hadoop.mapred.LocalJobRunner: +2017-02-18 08:57:12,871 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output +2017-02-18 08:57:12,871 INFO org.apache.hadoop.mapred.MapTask: Spilling map output +2017-02-18 08:57:12,871 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 7541511; bufvoid = 104857600 +2017-02-18 08:57:12,871 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 23042076(92168304); length = 3172321/6553600 +2017-02-18 08:57:13,315 INFO org.apache.hadoop.mapreduce.Job: map 67% reduce 0% +2017-02-18 08:57:15,444 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 08:57:16,322 INFO org.apache.hadoop.mapreduce.Job: map 89% reduce 0% +2017-02-18 08:57:17,038 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 08:57:17,042 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 08:57:17,166 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 08:57:17,180 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 08:57:17,329 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 08:57:17,341 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 08:57:17,558 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 08:57:17,563 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 08:57:17,684 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 08:57:17,684 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 08:57:17,820 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 08:57:17,847 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 08:57:17,931 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 08:57:17,969 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 08:57:18,111 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 08:57:18,127 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 08:57:18,225 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 08:57:18,239 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 08:57:18,449 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 08:57:18,461 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 08:57:18,472 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 08:57:18,520 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0 +2017-02-18 08:57:18,583 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1323619014_0001_m_000002_0 is done. And is in the process of committing +2017-02-18 08:57:18,590 INFO org.apache.hadoop.mapred.LocalJobRunner: map +2017-02-18 08:57:18,594 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1323619014_0001_m_000002_0' done. +2017-02-18 08:57:18,595 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1323619014_0001_m_000002_0 +2017-02-18 08:57:18,597 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete. +2017-02-18 08:57:18,686 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks +2017-02-18 08:57:18,692 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1323619014_0001_r_000000_0 +2017-02-18 08:57:18,737 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 08:57:18,738 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 08:57:18,760 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@3a6d125b +2017-02-18 08:57:18,891 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 08:57:18,919 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1323619014_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 08:57:19,167 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,216 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:19,290 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1323619014_0001_r_000001_0 +2017-02-18 08:57:19,296 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 08:57:19,297 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 08:57:19,297 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@3115d34f +2017-02-18 08:57:19,298 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,299 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:19,301 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,308 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:19,310 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,307 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 08:57:19,315 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:19,317 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,337 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:19,339 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,340 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 0% +2017-02-18 08:57:19,350 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:19,351 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,361 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:19,358 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,349 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1323619014_0001_r_000001_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 08:57:19,363 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 08:57:19,375 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1323619014_0001_r_000002_0 +2017-02-18 08:57:19,384 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 08:57:19,385 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 08:57:19,385 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@4f3ed32a +2017-02-18 08:57:19,390 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,392 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:19,395 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,393 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,400 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 08:57:19,402 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,397 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 08:57:19,407 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:19,409 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,409 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 08:57:19,422 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,422 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 08:57:19,424 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,434 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1323619014_0001_r_000002_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 08:57:19,424 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:19,443 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,443 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:19,444 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,447 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 08:57:19,457 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,457 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:19,458 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,459 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:19,445 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,463 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 08:57:19,468 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1323619014_0001_r_000003_0 +2017-02-18 08:57:19,482 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,483 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 08:57:19,483 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 08:57:19,485 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,486 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 08:57:19,487 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,495 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,494 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 08:57:19,496 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 08:57:19,496 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@4edb440a +2017-02-18 08:57:19,498 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:19,500 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,488 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,512 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 08:57:19,514 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,508 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 08:57:19,516 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,516 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 08:57:19,516 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:19,518 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,526 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:19,528 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,528 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:19,529 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,525 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,520 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 08:57:19,547 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,547 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 08:57:19,548 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,549 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 08:57:19,550 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,550 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 08:57:19,551 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,551 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 08:57:19,518 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 08:57:19,553 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 08:57:19,554 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,555 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 08:57:19,565 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:19,567 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,567 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:19,568 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,568 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 08:57:19,570 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,576 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 08:57:19,578 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,603 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 08:57:19,605 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,599 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,616 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 08:57:19,618 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,618 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 08:57:19,619 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,619 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 08:57:19,620 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,621 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 08:57:19,622 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,622 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 08:57:19,593 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,582 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1323619014_0001_r_000003_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 08:57:19,578 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,623 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:19,623 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 08:57:19,624 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 08:57:19,650 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,653 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1323619014_0001_r_000004_0 +2017-02-18 08:57:19,652 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,665 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 08:57:19,666 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,666 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 08:57:19,667 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,668 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 08:57:19,653 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,668 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:19,669 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 08:57:19,664 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 08:57:19,674 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 08:57:19,674 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@448d87fc +2017-02-18 08:57:19,677 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,678 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:19,679 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,685 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:19,687 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,687 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:19,691 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 08:57:19,680 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,693 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 08:57:19,694 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,694 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 08:57:19,695 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,695 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 08:57:19,685 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,684 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,697 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 08:57:19,698 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,698 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 08:57:19,699 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,699 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 08:57:19,700 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 08:57:19,701 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,714 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 08:57:19,715 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,714 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,733 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:19,735 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,713 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,735 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 08:57:19,737 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,709 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,737 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 08:57:19,738 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,729 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1323619014_0001_r_000004_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 08:57:19,739 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 08:57:19,740 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,754 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 08:57:19,756 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,750 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,777 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 08:57:19,749 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:19,749 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 08:57:19,749 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 08:57:19,781 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,784 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 08:57:19,785 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,783 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,789 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:19,790 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,791 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:19,792 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,792 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:19,782 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,793 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 08:57:19,794 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 08:57:19,789 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 08:57:19,786 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,785 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1323619014_0001_r_000005_0 +2017-02-18 08:57:19,795 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 08:57:19,804 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,822 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 08:57:19,826 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,821 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,833 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 08:57:19,835 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,818 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,835 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 08:57:19,837 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,837 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 08:57:19,838 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,838 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 08:57:19,839 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,839 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 08:57:19,841 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,841 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 08:57:19,842 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,817 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,843 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 08:57:19,833 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 08:57:19,845 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 08:57:19,846 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@68f559e5 +2017-02-18 08:57:19,847 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,847 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 08:57:19,848 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,863 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 08:57:19,865 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,862 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 08:57:19,855 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 08:57:19,881 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,881 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 08:57:19,852 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 08:57:19,849 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,888 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:19,889 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,890 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:19,891 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,891 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:19,893 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,893 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 08:57:19,895 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,895 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 08:57:19,896 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,897 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 08:57:19,897 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,897 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 08:57:19,899 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,896 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 08:57:19,918 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,918 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 08:57:19,919 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,919 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 08:57:19,921 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,921 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 08:57:19,922 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,922 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 08:57:19,911 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,951 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:19,953 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,953 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:19,954 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,954 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:19,955 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,956 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:19,957 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,957 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:19,958 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,958 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:19,959 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,959 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:19,961 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,961 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:19,962 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,962 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:19,910 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 08:57:20,011 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,011 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 08:57:19,965 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,012 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 08:57:20,013 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,014 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 08:57:20,015 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,015 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 08:57:20,016 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,016 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 08:57:20,017 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,017 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 08:57:20,019 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,019 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 08:57:20,020 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,020 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 08:57:20,021 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,021 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 08:57:19,964 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,023 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 08:57:19,912 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,025 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 08:57:20,026 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,026 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 08:57:20,027 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,028 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 08:57:20,029 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,029 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 08:57:20,030 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,030 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 08:57:20,031 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,032 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 08:57:19,933 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1323619014_0001_r_000005_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 08:57:19,923 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 08:57:20,063 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,064 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 08:57:20,065 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,065 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 08:57:20,067 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,067 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 08:57:20,068 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,068 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 08:57:20,069 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,069 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 08:57:20,070 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,070 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 08:57:20,072 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,089 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,113 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 08:57:20,115 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,115 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 08:57:20,075 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,116 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 08:57:20,117 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,117 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 08:57:20,074 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1323619014_0001_r_000006_0 +2017-02-18 08:57:20,074 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,119 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 08:57:20,073 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,119 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 08:57:20,073 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,120 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:20,125 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,140 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 08:57:20,142 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,142 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 08:57:20,140 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,144 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:20,145 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,146 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:20,147 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,147 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:20,148 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,148 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:20,149 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,149 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:20,139 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,151 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 08:57:20,152 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,138 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 08:57:20,166 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,166 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 08:57:20,167 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,167 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 08:57:20,138 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,169 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 08:57:20,138 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 08:57:20,170 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 08:57:20,170 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@35fbcbad +2017-02-18 08:57:20,126 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,171 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 08:57:20,153 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,172 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 08:57:20,173 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 08:57:20,176 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 08:57:20,187 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,190 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 08:57:20,191 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,190 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,189 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,204 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 08:57:20,205 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,206 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 08:57:20,207 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,207 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 08:57:20,208 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,208 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 08:57:20,209 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,209 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 08:57:20,210 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,211 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 08:57:20,212 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,212 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 08:57:20,189 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,212 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 08:57:20,213 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,214 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 08:57:20,215 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,215 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 08:57:20,188 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,216 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 08:57:20,217 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 08:57:20,217 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 08:57:20,223 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1323619014_0001_r_000006_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 08:57:20,234 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,238 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,237 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,243 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 08:57:20,245 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,245 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 08:57:20,246 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,247 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 08:57:20,248 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,248 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 08:57:20,249 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,249 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 08:57:20,250 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,250 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 08:57:20,252 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,236 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,323 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 08:57:20,234 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,324 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:20,326 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,326 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:20,327 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,327 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:20,328 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,328 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:20,329 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,330 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:20,331 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,331 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:20,333 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,333 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 08:57:20,335 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,301 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,335 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 08:57:20,300 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,298 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 08:57:20,252 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 08:57:20,338 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 72 len: 76 to MEMORY +2017-02-18 08:57:20,339 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1323619014_0001_r_000007_0 +2017-02-18 08:57:20,340 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 08:57:20,342 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,342 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 08:57:20,343 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,343 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 08:57:20,345 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,345 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 08:57:20,346 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,346 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 08:57:20,349 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,360 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 08:57:20,377 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 08:57:20,377 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@4e2d66d5 +2017-02-18 08:57:20,357 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 08:57:20,357 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,351 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,380 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 72 len: 76 to MEMORY +2017-02-18 08:57:20,350 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,381 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 08:57:20,383 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,383 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 08:57:20,384 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 08:57:20,385 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,385 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 08:57:20,386 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 08:57:20,387 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,388 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 08:57:20,391 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,389 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,409 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 08:57:20,411 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,411 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 08:57:20,412 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 08:57:20,414 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,409 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,401 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 08:57:20,400 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,415 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 08:57:20,416 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,416 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 08:57:20,417 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,418 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 08:57:20,419 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,419 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 08:57:20,399 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,420 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 08:57:20,398 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,421 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 08:57:20,395 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,422 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 72 len: 76 to MEMORY +2017-02-18 08:57:20,423 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:20,427 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,428 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 08:57:20,433 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 08:57:20,457 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,468 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 08:57:20,470 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,462 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1323619014_0001_r_000007_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 08:57:20,461 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,487 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 08:57:20,489 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,489 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 08:57:20,490 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,490 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 08:57:20,461 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,492 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 08:57:20,494 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,494 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 08:57:20,495 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,495 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 08:57:20,497 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,497 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 08:57:20,498 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,498 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 08:57:20,499 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,500 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 08:57:20,501 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,501 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 08:57:20,460 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,503 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 08:57:20,459 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,504 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 72 len: 76 to MEMORY +2017-02-18 08:57:20,506 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,506 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 72 len: 76 to MEMORY +2017-02-18 08:57:20,507 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,507 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 72 len: 76 to MEMORY +2017-02-18 08:57:20,509 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,509 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 72 len: 76 to MEMORY +2017-02-18 08:57:20,510 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,510 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 72 len: 76 to MEMORY +2017-02-18 08:57:20,512 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,512 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 72 len: 76 to MEMORY +2017-02-18 08:57:20,458 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,512 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:20,514 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,514 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:20,515 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,515 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:20,517 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,517 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 08:57:20,518 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,519 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 08:57:20,525 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,477 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,553 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 88 len: 92 to MEMORY +2017-02-18 08:57:20,471 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,554 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 08:57:20,556 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,556 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 08:57:20,557 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,558 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 08:57:20,559 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,559 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 08:57:20,560 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,560 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 08:57:20,562 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,562 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 08:57:20,563 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1323619014_0001_r_000008_0 +2017-02-18 08:57:20,565 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,565 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 88 len: 92 to MEMORY +2017-02-18 08:57:20,567 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,542 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,581 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 08:57:20,534 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,582 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:20,519 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 08:57:20,580 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 08:57:20,584 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 08:57:20,585 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@239de86 +2017-02-18 08:57:20,567 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 08:57:20,586 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 88 len: 92 to MEMORY +2017-02-18 08:57:20,592 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,590 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,608 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 08:57:20,609 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,610 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 08:57:20,611 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,611 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 08:57:20,590 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,612 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:20,589 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,614 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 08:57:20,587 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,614 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 08:57:20,608 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 08:57:20,616 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 88 len: 92 to MEMORY +2017-02-18 08:57:20,606 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,627 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 72 len: 76 to MEMORY +2017-02-18 08:57:20,633 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,633 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 08:57:20,641 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,642 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 08:57:20,644 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,669 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 08:57:20,670 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,668 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,654 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1323619014_0001_r_000008_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 08:57:20,650 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,697 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 72 len: 76 to MEMORY +2017-02-18 08:57:20,699 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,699 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 72 len: 76 to MEMORY +2017-02-18 08:57:20,701 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,701 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 72 len: 76 to MEMORY +2017-02-18 08:57:20,649 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,702 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:20,704 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,704 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:20,705 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,705 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:20,707 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,707 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:20,648 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,708 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 08:57:20,647 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,709 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 08:57:20,647 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,710 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 88 len: 92 to MEMORY +2017-02-18 08:57:20,645 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,710 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 08:57:20,645 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,711 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 08:57:20,712 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 98 len: 102 to MEMORY +2017-02-18 08:57:20,714 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1323619014_0001_r_000009_0 +2017-02-18 08:57:20,718 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 08:57:20,723 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 08:57:20,724 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 08:57:20,725 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@236e4a57 +2017-02-18 08:57:20,735 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 08:57:20,736 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,737 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 72 len: 76 to MEMORY +2017-02-18 08:57:20,746 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,746 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:20,748 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,777 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:20,779 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,776 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1323619014_0001_r_000009_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 08:57:20,767 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,813 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 72 len: 76 to MEMORY +2017-02-18 08:57:20,815 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,815 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 72 len: 76 to MEMORY +2017-02-18 08:57:20,767 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,816 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 08:57:20,766 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,817 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 08:57:20,765 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,818 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 88 len: 92 to MEMORY +2017-02-18 08:57:20,764 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,818 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 08:57:20,763 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,819 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 08:57:20,763 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,820 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 98 len: 102 to MEMORY +2017-02-18 08:57:20,762 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,821 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 08:57:20,786 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,822 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#10 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 22 len: 26 to MEMORY +2017-02-18 08:57:20,824 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:20,830 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,831 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 72 len: 76 to MEMORY +2017-02-18 08:57:20,833 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,834 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 08:57:20,835 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,835 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 08:57:20,837 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,866 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 08:57:20,866 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,848 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete. +2017-02-18 08:57:20,847 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,846 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,875 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 08:57:20,845 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,876 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 88 len: 92 to MEMORY +2017-02-18 08:57:20,845 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,876 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 08:57:20,843 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,877 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 08:57:20,842 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,878 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 98 len: 102 to MEMORY +2017-02-18 08:57:20,841 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,878 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 08:57:20,879 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:20,880 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#10 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 22 len: 26 to MEMORY +2017-02-18 08:57:20,893 WARN org.apache.hadoop.mapred.LocalJobRunner: job_local1323619014_0001 +java.lang.Exception: org.apache.hadoop.mapreduce.task.reduce.Shuffle$ShuffleError: error in shuffle in localfetcher#1 + at org.apache.hadoop.mapred.LocalJobRunner$Job.runTasks(LocalJobRunner.java:489) + at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:556) +Caused by: org.apache.hadoop.mapreduce.task.reduce.Shuffle$ShuffleError: error in shuffle in localfetcher#1 + at org.apache.hadoop.mapreduce.task.reduce.Shuffle.run(Shuffle.java:134) + at org.apache.hadoop.mapred.ReduceTask.run(ReduceTask.java:376) + at org.apache.hadoop.mapred.LocalJobRunner$Job$ReduceTaskRunnable.run(LocalJobRunner.java:346) + at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471) + at java.util.concurrent.FutureTask.run(FutureTask.java:262) + at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) + at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) + at java.lang.Thread.run(Thread.java:745) +Caused by: java.io.IOException: not a gzip file + at org.apache.hadoop.io.compress.zlib.BuiltInGzipDecompressor.processBasicHeader(BuiltInGzipDecompressor.java:496) + at org.apache.hadoop.io.compress.zlib.BuiltInGzipDecompressor.executeHeaderState(BuiltInGzipDecompressor.java:257) + at org.apache.hadoop.io.compress.zlib.BuiltInGzipDecompressor.decompress(BuiltInGzipDecompressor.java:186) + at org.apache.hadoop.io.compress.DecompressorStream.decompress(DecompressorStream.java:91) + at org.apache.hadoop.io.compress.DecompressorStream.read(DecompressorStream.java:85) + at org.apache.hadoop.io.IOUtils.readFully(IOUtils.java:199) + at org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput.shuffle(InMemoryMapOutput.java:97) + at org.apache.hadoop.mapreduce.task.reduce.LocalFetcher.copyMapOutput(LocalFetcher.java:157) + at org.apache.hadoop.mapreduce.task.reduce.LocalFetcher.doCopy(LocalFetcher.java:102) + at org.apache.hadoop.mapreduce.task.reduce.LocalFetcher.run(LocalFetcher.java:85) +2017-02-18 08:57:21,357 INFO org.apache.hadoop.mapreduce.Job: Job job_local1323619014_0001 failed with state FAILED due to: NA +2017-02-18 08:57:21,653 INFO org.apache.hadoop.mapreduce.Job: Counters: 18 + File System Counters + FILE: Number of bytes read=63678066 + FILE: Number of bytes written=838861 + FILE: Number of read operations=0 + FILE: Number of large read operations=0 + FILE: Number of write operations=0 + Map-Reduce Framework + Map input records=507535 + Map output records=4678719 + Map output bytes=43638689 + Map output materialized bytes=1471 + Input split bytes=351 + Combine input records=4678719 + Combine output records=131 + Spilled Records=131 + Failed Shuffles=0 + Merged Map outputs=0 + GC time elapsed (ms)=740 + Total committed heap usage (bytes)=576008192 + File Input Format Counters + Bytes Read=26057874 +2017-02-18 09:08:49,683 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable +2017-02-18 09:08:50,130 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.compress.map.output is deprecated. Instead, use mapreduce.map.output.compress +2017-02-18 09:08:50,137 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.map.output.compression.codec is deprecated. Instead, use mapreduce.map.output.compress.codec +2017-02-18 09:08:51,720 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id +2017-02-18 09:08:51,730 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId= +2017-02-18 09:08:53,605 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set. User classes may not be found. See Job or Job#setJar(String). +2017-02-18 09:08:53,644 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 3 +2017-02-18 09:08:54,021 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:3 +2017-02-18 09:08:55,329 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local996311227_0001 +2017-02-18 09:08:57,090 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/ +2017-02-18 09:08:57,092 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local996311227_0001 +2017-02-18 09:08:57,103 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null +2017-02-18 09:08:57,143 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:08:57,159 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter +2017-02-18 09:08:57,575 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks +2017-02-18 09:08:57,576 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local996311227_0001_m_000000_0 +2017-02-18 09:08:57,771 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:08:57,879 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:08:57,893 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg3200.txt:0+16013935 +2017-02-18 09:08:58,340 INFO org.apache.hadoop.mapreduce.Job: Job job_local996311227_0001 running in uber mode : false +2017-02-18 09:08:58,347 INFO org.apache.hadoop.mapreduce.Job: map 0% reduce 0% +2017-02-18 09:08:58,596 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) +2017-02-18 09:08:58,596 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 +2017-02-18 09:08:58,597 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 +2017-02-18 09:08:58,610 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 +2017-02-18 09:08:58,611 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 +2017-02-18 09:08:58,647 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer +2017-02-18 09:08:58,679 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it +2017-02-18 09:09:03,869 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 09:09:04,390 INFO org.apache.hadoop.mapreduce.Job: map 3% reduce 0% +2017-02-18 09:09:06,885 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 09:09:07,419 INFO org.apache.hadoop.mapreduce.Job: map 9% reduce 0% +2017-02-18 09:09:09,890 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 09:09:10,424 INFO org.apache.hadoop.mapreduce.Job: map 15% reduce 0% +2017-02-18 09:09:12,894 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 09:09:13,244 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 09:09:13,250 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output +2017-02-18 09:09:13,251 INFO org.apache.hadoop.mapred.MapTask: Spilling map output +2017-02-18 09:09:13,252 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 26925530; bufvoid = 104857600 +2017-02-18 09:09:13,252 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 14765624(59062496); length = 11448773/6553600 +2017-02-18 09:09:13,444 INFO org.apache.hadoop.mapreduce.Job: map 22% reduce 0% +2017-02-18 09:09:15,897 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 09:09:18,901 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 09:09:21,904 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 09:09:24,905 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 09:09:27,909 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 09:09:30,913 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 09:09:31,712 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:09:31,734 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:09:33,465 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:09:33,473 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:09:33,917 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 09:09:34,179 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:09:34,188 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:09:35,151 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:09:35,178 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:09:35,531 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:09:35,550 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:09:35,999 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:09:36,009 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:09:36,386 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:09:36,392 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:09:36,923 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 09:09:36,927 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:09:36,939 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:09:37,310 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:09:37,323 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:09:38,170 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:09:38,184 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:09:38,475 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0 +2017-02-18 09:09:38,501 INFO org.apache.hadoop.mapred.Task: Task:attempt_local996311227_0001_m_000000_0 is done. And is in the process of committing +2017-02-18 09:09:38,509 INFO org.apache.hadoop.mapred.LocalJobRunner: map +2017-02-18 09:09:38,519 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local996311227_0001_m_000000_0' done. +2017-02-18 09:09:38,521 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local996311227_0001_m_000000_0 +2017-02-18 09:09:38,521 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local996311227_0001_m_000001_0 +2017-02-18 09:09:38,528 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:09:38,529 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:09:38,530 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg100.txt:0+5589889 +2017-02-18 09:09:38,533 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 0% +2017-02-18 09:09:38,948 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) +2017-02-18 09:09:38,955 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 +2017-02-18 09:09:38,956 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 +2017-02-18 09:09:38,956 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 +2017-02-18 09:09:38,957 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 +2017-02-18 09:09:38,963 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer +2017-02-18 09:09:38,973 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it +2017-02-18 09:09:43,485 INFO org.apache.hadoop.mapred.LocalJobRunner: +2017-02-18 09:09:43,506 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output +2017-02-18 09:09:43,507 INFO org.apache.hadoop.mapred.MapTask: Spilling map output +2017-02-18 09:09:43,507 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 9171648; bufvoid = 104857600 +2017-02-18 09:09:43,507 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 22120624(88482496); length = 4093773/6553600 +2017-02-18 09:09:43,562 INFO org.apache.hadoop.mapreduce.Job: map 33% reduce 0% +2017-02-18 09:09:44,574 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 09:09:45,580 INFO org.apache.hadoop.mapreduce.Job: map 56% reduce 0% +2017-02-18 09:09:47,578 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 09:09:49,441 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:09:49,455 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:09:49,619 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:09:49,639 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:09:49,937 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:09:49,941 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:09:50,166 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:09:50,188 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:09:50,368 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:09:50,368 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:09:50,504 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:09:50,519 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:09:50,582 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 09:09:50,676 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:09:50,676 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:09:50,934 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:09:50,936 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:09:51,060 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:09:51,065 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:09:51,325 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:09:51,334 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:09:51,476 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0 +2017-02-18 09:09:51,490 INFO org.apache.hadoop.mapred.Task: Task:attempt_local996311227_0001_m_000001_0 is done. And is in the process of committing +2017-02-18 09:09:51,503 INFO org.apache.hadoop.mapred.LocalJobRunner: map +2017-02-18 09:09:51,515 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local996311227_0001_m_000001_0' done. +2017-02-18 09:09:51,516 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local996311227_0001_m_000001_0 +2017-02-18 09:09:51,516 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local996311227_0001_m_000002_0 +2017-02-18 09:09:51,529 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:09:51,530 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:09:51,543 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg31100.txt:0+4454050 +2017-02-18 09:09:51,816 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 0% +2017-02-18 09:09:51,966 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) +2017-02-18 09:09:51,975 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 +2017-02-18 09:09:51,976 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 +2017-02-18 09:09:51,976 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 +2017-02-18 09:09:51,977 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 +2017-02-18 09:09:51,982 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer +2017-02-18 09:09:51,984 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it +2017-02-18 09:09:54,908 INFO org.apache.hadoop.mapred.LocalJobRunner: +2017-02-18 09:09:54,916 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output +2017-02-18 09:09:54,917 INFO org.apache.hadoop.mapred.MapTask: Spilling map output +2017-02-18 09:09:54,917 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 7541511; bufvoid = 104857600 +2017-02-18 09:09:54,918 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 23042076(92168304); length = 3172321/6553600 +2017-02-18 09:09:55,827 INFO org.apache.hadoop.mapreduce.Job: map 67% reduce 0% +2017-02-18 09:09:57,551 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 09:09:57,840 INFO org.apache.hadoop.mapreduce.Job: map 89% reduce 0% +2017-02-18 09:09:59,089 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:09:59,121 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:09:59,208 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:09:59,253 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:09:59,388 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:09:59,416 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:09:59,593 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:09:59,606 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:09:59,714 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:09:59,730 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:09:59,861 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:09:59,865 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:09:59,988 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:09:59,989 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:10:00,128 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:10:00,146 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:10:00,228 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:10:00,241 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:10:00,468 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:10:00,470 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:10:00,518 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0 +2017-02-18 09:10:00,555 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 09:10:00,584 INFO org.apache.hadoop.mapred.Task: Task:attempt_local996311227_0001_m_000002_0 is done. And is in the process of committing +2017-02-18 09:10:00,586 INFO org.apache.hadoop.mapred.LocalJobRunner: map +2017-02-18 09:10:00,586 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local996311227_0001_m_000002_0' done. +2017-02-18 09:10:00,587 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local996311227_0001_m_000002_0 +2017-02-18 09:10:00,587 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete. +2017-02-18 09:10:00,680 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks +2017-02-18 09:10:00,681 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local996311227_0001_r_000000_0 +2017-02-18 09:10:00,727 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:10:00,728 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:10:00,749 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@10c8fecc +2017-02-18 09:10:00,843 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 0% +2017-02-18 09:10:00,866 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 09:10:00,891 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local996311227_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 09:10:01,153 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,178 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:01,253 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local996311227_0001_r_000001_0 +2017-02-18 09:10:01,258 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:10:01,259 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:10:01,260 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@553beecf +2017-02-18 09:10:01,262 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,262 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:01,264 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,272 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:01,272 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 09:10:01,274 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,284 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:01,286 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,295 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:01,296 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,304 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:01,306 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,302 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,290 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local996311227_0001_r_000001_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 09:10:01,326 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY +2017-02-18 09:10:01,327 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local996311227_0001_r_000002_0 +2017-02-18 09:10:01,336 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:10:01,337 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:10:01,337 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@605afad3 +2017-02-18 09:10:01,343 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 09:10:01,344 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,345 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY +2017-02-18 09:10:01,343 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:01,348 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,349 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:01,350 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,355 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:01,356 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,351 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,365 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY +2017-02-18 09:10:01,366 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,366 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY +2017-02-18 09:10:01,367 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:01,368 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,381 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY +2017-02-18 09:10:01,383 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,378 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,376 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local996311227_0001_r_000002_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 09:10:01,404 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:01,405 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,406 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:01,385 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,407 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:01,408 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local996311227_0001_r_000003_0 +2017-02-18 09:10:01,421 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:10:01,423 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,423 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:01,424 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,424 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:01,426 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,426 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:01,427 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,427 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:01,428 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,428 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:01,430 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,430 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:01,431 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY +2017-02-18 09:10:01,433 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,433 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY +2017-02-18 09:10:01,434 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,434 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY +2017-02-18 09:10:01,436 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,436 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY +2017-02-18 09:10:01,437 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,437 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY +2017-02-18 09:10:01,438 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,438 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY +2017-02-18 09:10:01,440 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,440 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY +2017-02-18 09:10:01,446 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:10:01,447 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@673f2af1 +2017-02-18 09:10:01,455 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,456 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:01,457 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,458 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 09:10:01,464 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:01,466 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,472 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:01,474 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,509 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:01,510 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,510 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:01,509 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,508 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,481 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local996311227_0001_r_000003_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 09:10:01,513 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,514 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:01,514 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:01,516 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,514 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY +2017-02-18 09:10:01,517 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local996311227_0001_r_000004_0 +2017-02-18 09:10:01,521 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,538 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY +2017-02-18 09:10:01,514 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,538 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY +2017-02-18 09:10:01,540 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,540 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY +2017-02-18 09:10:01,543 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,543 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY +2017-02-18 09:10:01,544 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,538 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,536 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:10:01,558 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:10:01,558 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@3a668605 +2017-02-18 09:10:01,559 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:01,549 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,543 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:01,562 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,562 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:01,563 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY +2017-02-18 09:10:01,564 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,564 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:01,565 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,575 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 09:10:01,580 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:01,568 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,580 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY +2017-02-18 09:10:01,566 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,580 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:01,582 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,582 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:01,584 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,584 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:01,585 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,585 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:01,586 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,586 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:01,566 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY +2017-02-18 09:10:01,588 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,588 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY +2017-02-18 09:10:01,589 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,589 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY +2017-02-18 09:10:01,591 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,591 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY +2017-02-18 09:10:01,592 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,593 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY +2017-02-18 09:10:01,594 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,615 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY +2017-02-18 09:10:01,617 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,595 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,627 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:01,629 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,629 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:01,630 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,630 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:01,631 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY +2017-02-18 09:10:01,627 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local996311227_0001_r_000004_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 09:10:01,637 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,638 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY +2017-02-18 09:10:01,640 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,659 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:01,653 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,661 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY +2017-02-18 09:10:01,663 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,650 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,678 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY +2017-02-18 09:10:01,679 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,679 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY +2017-02-18 09:10:01,680 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,681 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY +2017-02-18 09:10:01,648 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,681 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:01,642 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,682 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:01,682 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY +2017-02-18 09:10:01,683 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,683 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:01,690 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,697 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,696 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local996311227_0001_r_000005_0 +2017-02-18 09:10:01,696 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,708 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:01,710 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,710 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:01,695 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,727 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:01,729 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,729 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:01,730 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,730 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:01,693 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,731 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:01,733 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,733 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:01,734 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,734 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:01,735 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,735 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:01,737 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,737 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:01,738 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,738 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:01,739 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,739 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:01,740 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,741 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:01,727 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY +2017-02-18 09:10:01,727 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY +2017-02-18 09:10:01,727 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:10:01,744 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,745 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:01,791 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,796 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:01,797 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,795 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,798 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY +2017-02-18 09:10:01,799 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,794 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,799 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY +2017-02-18 09:10:01,800 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,801 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY +2017-02-18 09:10:01,793 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:10:01,802 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@6349766d +2017-02-18 09:10:01,792 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,803 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:01,804 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,805 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:01,806 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,812 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 09:10:01,819 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:01,820 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,808 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,832 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY +2017-02-18 09:10:01,833 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,807 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY +2017-02-18 09:10:01,841 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,807 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:01,842 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,807 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,843 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:01,844 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,844 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:01,846 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,846 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:01,852 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY +2017-02-18 09:10:01,854 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,841 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY +2017-02-18 09:10:01,868 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,869 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY +2017-02-18 09:10:01,870 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,867 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local996311227_0001_r_000005_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 09:10:01,862 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,854 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:01,882 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,882 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:01,883 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,883 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:01,885 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,885 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:01,886 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,886 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:01,854 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:01,878 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,870 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY +2017-02-18 09:10:01,907 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 23 len: 27 to MEMORY +2017-02-18 09:10:01,909 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,909 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 23 len: 27 to MEMORY +2017-02-18 09:10:01,910 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,910 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 23 len: 27 to MEMORY +2017-02-18 09:10:01,911 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local996311227_0001_r_000006_0 +2017-02-18 09:10:01,912 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY +2017-02-18 09:10:01,913 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:01,923 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,923 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY +2017-02-18 09:10:01,925 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,925 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY +2017-02-18 09:10:01,926 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,926 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY +2017-02-18 09:10:01,927 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,927 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY +2017-02-18 09:10:01,928 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,929 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY +2017-02-18 09:10:01,930 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,930 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY +2017-02-18 09:10:01,936 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,936 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:01,937 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,937 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:01,938 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,939 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:01,940 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,940 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:01,942 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,942 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:01,943 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,943 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:01,944 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,945 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:01,946 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,946 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:01,949 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,949 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:01,956 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:10:01,956 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:10:01,957 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@4260af0b +2017-02-18 09:10:01,958 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,958 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 23 len: 27 to MEMORY +2017-02-18 09:10:01,968 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 09:10:01,973 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,983 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:01,984 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,979 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,985 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:01,976 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,986 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 23 len: 27 to MEMORY +2017-02-18 09:10:01,975 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,986 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY +2017-02-18 09:10:01,974 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,986 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:01,992 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,995 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:01,997 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,995 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,997 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 23 len: 27 to MEMORY +2017-02-18 09:10:01,993 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,999 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY +2017-02-18 09:10:02,000 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,000 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY +2017-02-18 09:10:02,002 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,997 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,024 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:02,026 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,026 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:02,027 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,016 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,015 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,028 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 23 len: 27 to MEMORY +2017-02-18 09:10:02,029 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,029 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 23 len: 27 to MEMORY +2017-02-18 09:10:02,030 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,030 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 23 len: 27 to MEMORY +2017-02-18 09:10:02,015 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:02,015 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:02,011 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,032 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY +2017-02-18 09:10:02,003 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local996311227_0001_r_000006_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 09:10:02,027 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY +2017-02-18 09:10:02,033 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 28 len: 32 to MEMORY +2017-02-18 09:10:02,042 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:02,047 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,074 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 23 len: 27 to MEMORY +2017-02-18 09:10:02,058 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,057 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,075 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:02,056 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,076 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:02,052 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,076 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY +2017-02-18 09:10:02,050 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,076 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY +2017-02-18 09:10:02,049 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,076 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 28 len: 32 to MEMORY +2017-02-18 09:10:02,049 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local996311227_0001_r_000007_0 +2017-02-18 09:10:02,077 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:02,089 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,095 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 28 len: 32 to MEMORY +2017-02-18 09:10:02,096 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,095 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,102 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:02,108 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,094 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,115 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:02,117 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,117 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:02,118 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,118 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:02,119 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,119 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:02,120 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,120 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:02,092 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,123 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY +2017-02-18 09:10:02,125 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,125 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY +2017-02-18 09:10:02,126 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,126 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY +2017-02-18 09:10:02,127 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:02,128 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,114 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 28 len: 32 to MEMORY +2017-02-18 09:10:02,101 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,173 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 23 len: 27 to MEMORY +2017-02-18 09:10:02,174 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,174 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 23 len: 27 to MEMORY +2017-02-18 09:10:02,099 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,175 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:02,098 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,176 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY +2017-02-18 09:10:02,177 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,177 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY +2017-02-18 09:10:02,178 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,178 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY +2017-02-18 09:10:02,180 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,180 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY +2017-02-18 09:10:02,156 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:10:02,181 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:10:02,182 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@74eef9db +2017-02-18 09:10:02,183 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:02,184 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,192 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 09:10:02,193 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:02,194 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,189 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,204 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 23 len: 27 to MEMORY +2017-02-18 09:10:02,205 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,205 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 23 len: 27 to MEMORY +2017-02-18 09:10:02,206 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,206 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 23 len: 27 to MEMORY +2017-02-18 09:10:02,208 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,208 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 23 len: 27 to MEMORY +2017-02-18 09:10:02,209 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,209 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 23 len: 27 to MEMORY +2017-02-18 09:10:02,210 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,210 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 23 len: 27 to MEMORY +2017-02-18 09:10:02,188 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,211 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY +2017-02-18 09:10:02,186 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,212 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:02,185 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,212 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 28 len: 32 to MEMORY +2017-02-18 09:10:02,212 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:02,203 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,213 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY +2017-02-18 09:10:02,199 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,216 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:02,226 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local996311227_0001_r_000007_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 09:10:02,256 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,265 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 2 len: 6 to MEMORY +2017-02-18 09:10:02,258 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,266 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:02,267 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,264 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,268 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY +2017-02-18 09:10:02,269 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,263 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,269 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:02,262 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,271 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 28 len: 32 to MEMORY +2017-02-18 09:10:02,273 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,273 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 28 len: 32 to MEMORY +2017-02-18 09:10:02,274 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,274 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 28 len: 32 to MEMORY +2017-02-18 09:10:02,260 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,275 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:02,259 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,275 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY +2017-02-18 09:10:02,276 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local996311227_0001_r_000008_0 +2017-02-18 09:10:02,282 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,283 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:02,286 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY +2017-02-18 09:10:02,286 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:02,294 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:10:02,304 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:10:02,305 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@eb9012c +2017-02-18 09:10:02,307 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,316 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:02,318 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,318 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:02,337 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,316 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,362 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 28 len: 32 to MEMORY +2017-02-18 09:10:02,364 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,364 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 28 len: 32 to MEMORY +2017-02-18 09:10:02,365 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,365 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 28 len: 32 to MEMORY +2017-02-18 09:10:02,315 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,367 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:02,368 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,368 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:02,370 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,370 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:02,313 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,371 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:02,372 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,373 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:02,374 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,374 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:02,375 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,376 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:02,377 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,377 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:02,312 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,379 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY +2017-02-18 09:10:02,380 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,380 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY +2017-02-18 09:10:02,310 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,381 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 2 len: 6 to MEMORY +2017-02-18 09:10:02,310 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,382 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 23 len: 27 to MEMORY +2017-02-18 09:10:02,309 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,383 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY +2017-02-18 09:10:02,334 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 09:10:02,386 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,386 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY +2017-02-18 09:10:02,387 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,389 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,389 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 2 len: 6 to MEMORY +2017-02-18 09:10:02,388 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,391 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 23 len: 27 to MEMORY +2017-02-18 09:10:02,392 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,392 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 23 len: 27 to MEMORY +2017-02-18 09:10:02,394 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,407 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 23 len: 27 to MEMORY +2017-02-18 09:10:02,409 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,407 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,409 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 28 len: 32 to MEMORY +2017-02-18 09:10:02,410 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,403 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,430 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:02,395 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY +2017-02-18 09:10:02,434 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,434 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY +2017-02-18 09:10:02,395 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,435 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 2 len: 6 to MEMORY +2017-02-18 09:10:02,436 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,436 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 2 len: 6 to MEMORY +2017-02-18 09:10:02,438 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,438 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 2 len: 6 to MEMORY +2017-02-18 09:10:02,395 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:02,395 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,440 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY +2017-02-18 09:10:02,442 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 28 len: 32 to MEMORY +2017-02-18 09:10:02,443 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,444 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:02,430 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,425 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 23 len: 27 to MEMORY +2017-02-18 09:10:02,414 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local996311227_0001_r_000008_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 09:10:02,451 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,452 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:02,453 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,453 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:02,454 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,454 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:02,456 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,456 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:02,457 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,457 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:02,459 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,459 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:02,460 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,460 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:02,537 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,537 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:02,539 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,539 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 28 len: 32 to MEMORY +2017-02-18 09:10:02,539 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,539 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY +2017-02-18 09:10:02,540 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,540 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:02,540 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,540 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY +2017-02-18 09:10:02,540 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,540 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 2 len: 6 to MEMORY +2017-02-18 09:10:02,541 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 49 len: 53 to MEMORY +2017-02-18 09:10:02,541 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,541 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 23 len: 27 to MEMORY +2017-02-18 09:10:02,542 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,542 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:02,544 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,568 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:02,570 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,559 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,588 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:02,590 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,590 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:02,554 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,591 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 49 len: 53 to MEMORY +2017-02-18 09:10:02,593 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,593 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 49 len: 53 to MEMORY +2017-02-18 09:10:02,594 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,553 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,595 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:02,596 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,596 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:02,597 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,598 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:02,599 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,599 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:02,600 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,601 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:02,553 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,601 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 28 len: 32 to MEMORY +2017-02-18 09:10:02,603 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,603 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 28 len: 32 to MEMORY +2017-02-18 09:10:02,604 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,604 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 28 len: 32 to MEMORY +2017-02-18 09:10:02,606 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,606 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 28 len: 32 to MEMORY +2017-02-18 09:10:02,552 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,607 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY +2017-02-18 09:10:02,549 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,607 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY +2017-02-18 09:10:02,548 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local996311227_0001_r_000009_0 +2017-02-18 09:10:02,548 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,609 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 2 len: 6 to MEMORY +2017-02-18 09:10:02,545 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,610 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 23 len: 27 to MEMORY +2017-02-18 09:10:02,591 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:02,594 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 49 len: 53 to MEMORY +2017-02-18 09:10:02,642 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:10:02,643 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:10:02,643 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@45b028db +2017-02-18 09:10:02,657 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 09:10:02,662 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,667 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,668 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY +2017-02-18 09:10:02,669 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,669 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY +2017-02-18 09:10:02,667 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,671 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY +2017-02-18 09:10:02,673 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,674 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY +2017-02-18 09:10:02,675 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,675 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY +2017-02-18 09:10:02,677 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,677 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY +2017-02-18 09:10:02,678 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,678 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY +2017-02-18 09:10:02,666 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,680 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 2 len: 6 to MEMORY +2017-02-18 09:10:02,665 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,680 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 23 len: 27 to MEMORY +2017-02-18 09:10:02,664 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,680 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:02,663 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,681 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:02,681 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 49 len: 53 to MEMORY +2017-02-18 09:10:02,691 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,692 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY +2017-02-18 09:10:02,716 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,724 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:02,725 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,723 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,726 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 2 len: 6 to MEMORY +2017-02-18 09:10:02,728 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,722 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,728 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 23 len: 27 to MEMORY +2017-02-18 09:10:02,730 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,721 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,730 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:02,733 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,719 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,751 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:02,753 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,753 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:02,755 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,755 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:02,756 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,756 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:02,758 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,758 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:02,759 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,759 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:02,718 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,761 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 49 len: 53 to MEMORY +2017-02-18 09:10:02,717 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,763 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 28 len: 32 to MEMORY +2017-02-18 09:10:02,765 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,765 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 28 len: 32 to MEMORY +2017-02-18 09:10:02,766 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,766 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 28 len: 32 to MEMORY +2017-02-18 09:10:02,768 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,768 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 28 len: 32 to MEMORY +2017-02-18 09:10:02,769 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,769 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 28 len: 32 to MEMORY +2017-02-18 09:10:02,771 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,744 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:02,744 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 2 len: 6 to MEMORY +2017-02-18 09:10:02,743 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 23 len: 27 to MEMORY +2017-02-18 09:10:02,735 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,726 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,833 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 28 len: 32 to MEMORY +2017-02-18 09:10:02,841 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local996311227_0001_r_000009_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 09:10:02,842 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY +2017-02-18 09:10:02,843 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY +2017-02-18 09:10:02,843 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:02,844 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,845 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 49 len: 53 to MEMORY +2017-02-18 09:10:02,848 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,856 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#10 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 2 len: 6 to MEMORY +2017-02-18 09:10:02,857 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete. +2017-02-18 09:10:02,849 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,871 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 49 len: 53 to MEMORY +2017-02-18 09:10:02,855 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,872 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY +2017-02-18 09:10:02,854 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,872 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY +2017-02-18 09:10:02,853 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,873 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:02,851 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,873 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 23 len: 27 to MEMORY +2017-02-18 09:10:02,851 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,874 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 2 len: 6 to MEMORY +2017-02-18 09:10:02,850 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,874 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:02,862 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,875 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:02,876 WARN org.apache.hadoop.mapred.LocalJobRunner: job_local996311227_0001 +java.lang.Exception: org.apache.hadoop.mapreduce.task.reduce.Shuffle$ShuffleError: error in shuffle in localfetcher#1 + at org.apache.hadoop.mapred.LocalJobRunner$Job.runTasks(LocalJobRunner.java:489) + at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:556) +Caused by: org.apache.hadoop.mapreduce.task.reduce.Shuffle$ShuffleError: error in shuffle in localfetcher#1 + at org.apache.hadoop.mapreduce.task.reduce.Shuffle.run(Shuffle.java:134) + at org.apache.hadoop.mapred.ReduceTask.run(ReduceTask.java:376) + at org.apache.hadoop.mapred.LocalJobRunner$Job$ReduceTaskRunnable.run(LocalJobRunner.java:346) + at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471) + at java.util.concurrent.FutureTask.run(FutureTask.java:262) + at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) + at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) + at java.lang.Thread.run(Thread.java:745) +Caused by: java.io.IOException: not a gzip file + at org.apache.hadoop.io.compress.zlib.BuiltInGzipDecompressor.processBasicHeader(BuiltInGzipDecompressor.java:496) + at org.apache.hadoop.io.compress.zlib.BuiltInGzipDecompressor.executeHeaderState(BuiltInGzipDecompressor.java:257) + at org.apache.hadoop.io.compress.zlib.BuiltInGzipDecompressor.decompress(BuiltInGzipDecompressor.java:186) + at org.apache.hadoop.io.compress.DecompressorStream.decompress(DecompressorStream.java:91) + at org.apache.hadoop.io.compress.DecompressorStream.read(DecompressorStream.java:85) + at org.apache.hadoop.io.IOUtils.readFully(IOUtils.java:199) + at org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput.shuffle(InMemoryMapOutput.java:97) + at org.apache.hadoop.mapreduce.task.reduce.LocalFetcher.copyMapOutput(LocalFetcher.java:157) + at org.apache.hadoop.mapreduce.task.reduce.LocalFetcher.doCopy(LocalFetcher.java:102) + at org.apache.hadoop.mapreduce.task.reduce.LocalFetcher.run(LocalFetcher.java:85) +2017-02-18 09:10:03,867 INFO org.apache.hadoop.mapreduce.Job: Job job_local996311227_0001 failed with state FAILED due to: NA +2017-02-18 09:10:04,136 INFO org.apache.hadoop.mapreduce.Job: Counters: 18 + File System Counters + FILE: Number of bytes read=63678066 + FILE: Number of bytes written=834451 + FILE: Number of read operations=0 + FILE: Number of large read operations=0 + FILE: Number of write operations=0 + Map-Reduce Framework + Map input records=507535 + Map output records=4678719 + Map output bytes=43638689 + Map output materialized bytes=1471 + Input split bytes=351 + Combine input records=4678719 + Combine output records=131 + Spilled Records=131 + Failed Shuffles=0 + Merged Map outputs=0 + GC time elapsed (ms)=847 + Total committed heap usage (bytes)=576008192 + File Input Format Counters + Bytes Read=26057874 +2017-02-18 09:26:49,408 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable +2017-02-18 09:26:49,749 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.compress.map.output is deprecated. Instead, use mapreduce.map.output.compress +2017-02-18 09:26:49,775 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.map.output.compression.codec is deprecated. Instead, use mapreduce.map.output.compress.codec +2017-02-18 09:26:51,050 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id +2017-02-18 09:26:51,065 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId= +2017-02-18 09:26:52,559 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set. User classes may not be found. See Job or Job#setJar(String). +2017-02-18 09:26:52,648 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 3 +2017-02-18 09:26:52,917 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:3 +2017-02-18 09:26:53,914 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local751599384_0001 +2017-02-18 09:26:55,334 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/ +2017-02-18 09:26:55,335 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local751599384_0001 +2017-02-18 09:26:55,353 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null +2017-02-18 09:26:55,413 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:26:55,421 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter +2017-02-18 09:26:55,831 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks +2017-02-18 09:26:55,832 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local751599384_0001_m_000000_0 +2017-02-18 09:26:56,054 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:26:56,169 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:26:56,178 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg3200.txt:0+16013935 +2017-02-18 09:26:56,622 INFO org.apache.hadoop.mapreduce.Job: Job job_local751599384_0001 running in uber mode : false +2017-02-18 09:26:56,624 INFO org.apache.hadoop.mapreduce.Job: map 0% reduce 0% +2017-02-18 09:26:56,926 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) +2017-02-18 09:26:56,926 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 +2017-02-18 09:26:56,926 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 +2017-02-18 09:26:56,926 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 +2017-02-18 09:26:56,926 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 +2017-02-18 09:26:56,956 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer +2017-02-18 09:26:57,006 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it +2017-02-18 09:27:02,132 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 09:27:02,662 INFO org.apache.hadoop.mapreduce.Job: map 6% reduce 0% +2017-02-18 09:27:05,153 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 09:27:05,670 INFO org.apache.hadoop.mapreduce.Job: map 14% reduce 0% +2017-02-18 09:27:08,158 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 09:27:08,237 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 09:27:08,244 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output +2017-02-18 09:27:08,245 INFO org.apache.hadoop.mapred.MapTask: Spilling map output +2017-02-18 09:27:08,245 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 26925530; bufvoid = 104857600 +2017-02-18 09:27:08,246 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 14765624(59062496); length = 11448773/6553600 +2017-02-18 09:27:08,689 INFO org.apache.hadoop.mapreduce.Job: map 22% reduce 0% +2017-02-18 09:27:11,159 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 09:27:14,169 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 09:27:17,178 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 09:27:20,180 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 09:27:22,469 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:27:22,475 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:27:23,187 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 09:27:24,128 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:27:24,134 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:27:24,753 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:27:24,755 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:27:25,552 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:27:25,553 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:27:25,883 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:27:25,892 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:27:26,203 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 09:27:26,249 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:27:26,269 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:27:26,579 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:27:26,583 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:27:26,992 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:27:27,005 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:27:27,304 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:27:27,308 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:27:27,911 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:27:27,916 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:27:28,128 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0 +2017-02-18 09:27:28,155 INFO org.apache.hadoop.mapred.Task: Task:attempt_local751599384_0001_m_000000_0 is done. And is in the process of committing +2017-02-18 09:27:28,161 INFO org.apache.hadoop.mapred.LocalJobRunner: map +2017-02-18 09:27:28,164 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local751599384_0001_m_000000_0' done. +2017-02-18 09:27:28,166 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local751599384_0001_m_000000_0 +2017-02-18 09:27:28,167 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local751599384_0001_m_000001_0 +2017-02-18 09:27:28,174 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:27:28,175 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:27:28,177 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg100.txt:0+5589889 +2017-02-18 09:27:28,463 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) +2017-02-18 09:27:28,474 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 +2017-02-18 09:27:28,475 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 +2017-02-18 09:27:28,475 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 +2017-02-18 09:27:28,476 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 +2017-02-18 09:27:28,482 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer +2017-02-18 09:27:28,494 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it +2017-02-18 09:27:28,789 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 0% +2017-02-18 09:27:31,526 INFO org.apache.hadoop.mapred.LocalJobRunner: +2017-02-18 09:27:31,534 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output +2017-02-18 09:27:31,535 INFO org.apache.hadoop.mapred.MapTask: Spilling map output +2017-02-18 09:27:31,535 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 9171648; bufvoid = 104857600 +2017-02-18 09:27:31,536 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 22120624(88482496); length = 4093773/6553600 +2017-02-18 09:27:31,805 INFO org.apache.hadoop.mapreduce.Job: map 33% reduce 0% +2017-02-18 09:27:34,203 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 09:27:34,817 INFO org.apache.hadoop.mapreduce.Job: map 56% reduce 0% +2017-02-18 09:27:35,818 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:27:35,836 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:27:35,941 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:27:35,952 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:27:36,167 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:27:36,176 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:27:36,302 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:27:36,330 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:27:36,439 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:27:36,444 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:27:36,566 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:27:36,573 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:27:36,670 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:27:36,693 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:27:36,829 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:27:36,837 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:27:36,925 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:27:36,932 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:27:37,125 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:27:37,127 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:27:37,198 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0 +2017-02-18 09:27:37,205 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 09:27:37,217 INFO org.apache.hadoop.mapred.Task: Task:attempt_local751599384_0001_m_000001_0 is done. And is in the process of committing +2017-02-18 09:27:37,222 INFO org.apache.hadoop.mapred.LocalJobRunner: map +2017-02-18 09:27:37,224 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local751599384_0001_m_000001_0' done. +2017-02-18 09:27:37,225 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local751599384_0001_m_000001_0 +2017-02-18 09:27:37,226 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local751599384_0001_m_000002_0 +2017-02-18 09:27:37,233 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:27:37,238 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:27:37,250 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg31100.txt:0+4454050 +2017-02-18 09:27:37,543 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) +2017-02-18 09:27:37,547 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 +2017-02-18 09:27:37,548 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 +2017-02-18 09:27:37,548 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 +2017-02-18 09:27:37,548 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 +2017-02-18 09:27:37,554 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer +2017-02-18 09:27:37,556 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it +2017-02-18 09:27:37,828 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 0% +2017-02-18 09:27:39,830 INFO org.apache.hadoop.mapred.LocalJobRunner: +2017-02-18 09:27:39,852 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output +2017-02-18 09:27:39,853 INFO org.apache.hadoop.mapred.MapTask: Spilling map output +2017-02-18 09:27:39,854 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 7541511; bufvoid = 104857600 +2017-02-18 09:27:39,854 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 23042076(92168304); length = 3172321/6553600 +2017-02-18 09:27:40,851 INFO org.apache.hadoop.mapreduce.Job: map 67% reduce 0% +2017-02-18 09:27:42,899 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:27:42,902 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:27:43,006 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:27:43,018 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:27:43,164 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:27:43,164 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:27:43,270 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 09:27:43,357 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:27:43,372 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:27:43,467 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:27:43,482 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:27:43,607 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:27:43,635 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:27:43,736 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:27:43,746 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:27:43,863 INFO org.apache.hadoop.mapreduce.Job: map 89% reduce 0% +2017-02-18 09:27:43,872 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:27:43,896 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:27:43,981 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:27:44,003 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:27:44,206 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:27:44,209 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:27:44,256 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0 +2017-02-18 09:27:44,279 INFO org.apache.hadoop.mapred.Task: Task:attempt_local751599384_0001_m_000002_0 is done. And is in the process of committing +2017-02-18 09:27:44,285 INFO org.apache.hadoop.mapred.LocalJobRunner: map +2017-02-18 09:27:44,288 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local751599384_0001_m_000002_0' done. +2017-02-18 09:27:44,289 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local751599384_0001_m_000002_0 +2017-02-18 09:27:44,290 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete. +2017-02-18 09:27:44,365 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks +2017-02-18 09:27:44,365 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local751599384_0001_r_000000_0 +2017-02-18 09:27:44,412 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:27:44,414 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:27:44,442 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@53133050 +2017-02-18 09:27:44,518 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 09:27:44,568 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local751599384_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 09:27:44,731 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:44,763 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:44,802 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local751599384_0001_r_000001_0 +2017-02-18 09:27:44,808 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:27:44,809 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:27:44,809 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@2ee007c9 +2017-02-18 09:27:44,813 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 09:27:44,814 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:44,815 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:44,816 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:44,816 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:44,817 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:44,820 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:44,827 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:44,837 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:44,841 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:44,837 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local751599384_0001_r_000001_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 09:27:44,853 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:44,858 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:44,860 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:44,866 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 0% +2017-02-18 09:27:44,858 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:44,872 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local751599384_0001_r_000002_0 +2017-02-18 09:27:44,874 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:44,875 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:44,877 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:44,884 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:44,887 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:44,887 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:44,888 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:44,888 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:44,890 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:44,890 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:44,891 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:44,891 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:44,884 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:27:44,893 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:27:44,893 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@2912ee3c +2017-02-18 09:27:44,896 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:44,897 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:44,903 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 09:27:44,904 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:44,905 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:44,911 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:44,919 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:44,927 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:44,929 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:44,927 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local751599384_0001_r_000002_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 09:27:44,924 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:44,937 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:44,939 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:44,939 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:44,940 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:44,940 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:44,941 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:44,941 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:44,942 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:44,944 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:44,933 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:44,960 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:44,961 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local751599384_0001_r_000003_0 +2017-02-18 09:27:44,969 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:27:44,969 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:27:44,969 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@584ca76 +2017-02-18 09:27:44,971 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:44,972 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:44,973 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:44,977 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:44,995 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:44,995 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:44,996 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:44,996 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:44,997 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:44,998 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:44,999 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:44,999 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:45,000 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,000 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:45,001 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,002 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:44,977 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 09:27:44,977 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,004 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:45,005 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,005 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:45,006 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,007 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:45,008 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,008 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:45,009 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,009 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:45,010 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,011 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:45,012 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,055 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:45,056 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,055 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,033 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local751599384_0001_r_000003_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 09:27:45,013 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:45,058 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,058 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:45,059 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 09:27:45,060 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local751599384_0001_r_000004_0 +2017-02-18 09:27:45,061 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,070 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 09:27:45,068 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:27:45,074 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:27:45,074 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@7490b4d0 +2017-02-18 09:27:45,070 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,074 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:45,075 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:45,070 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,076 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:45,077 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,080 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:45,080 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,080 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 09:27:45,080 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,081 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:45,078 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,086 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:45,089 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,089 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 09:27:45,090 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,100 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 09:27:45,100 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,101 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:45,091 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 09:27:45,108 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,094 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,093 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,109 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:45,110 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:45,111 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,111 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:45,112 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,114 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:45,115 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,114 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,131 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:45,134 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,124 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:45,140 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,140 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:45,119 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,141 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 09:27:45,145 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,146 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 09:27:45,135 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:45,147 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,148 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:45,158 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,174 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:45,157 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,174 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 09:27:45,176 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,156 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,154 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local751599384_0001_r_000004_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 09:27:45,150 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:45,178 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,178 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:45,179 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,179 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:45,180 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,181 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:45,182 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,174 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,200 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 09:27:45,202 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,202 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 09:27:45,202 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local751599384_0001_r_000005_0 +2017-02-18 09:27:45,196 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 09:27:45,191 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,190 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:45,203 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:45,208 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,211 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,222 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 09:27:45,224 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,224 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 09:27:45,225 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,225 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 09:27:45,226 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,226 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 09:27:45,228 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,228 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 09:27:45,229 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,229 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 09:27:45,210 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,209 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,231 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:45,232 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:45,233 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,234 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,234 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:45,235 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,235 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:45,236 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,243 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:45,237 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:45,251 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,251 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:45,237 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:27:45,252 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:27:45,253 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@3e35379c +2017-02-18 09:27:45,255 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,255 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:45,257 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,257 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:45,237 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 09:27:45,265 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 09:27:45,278 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:45,280 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,280 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:45,281 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,281 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:45,282 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,282 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:45,283 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,284 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:45,285 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,285 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:45,286 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,286 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:45,287 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,287 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:45,289 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,289 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:45,290 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,324 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,305 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local751599384_0001_r_000005_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 09:27:45,300 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,326 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 09:27:45,328 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,328 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 09:27:45,329 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,329 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 09:27:45,330 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,330 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 09:27:45,331 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,331 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 09:27:45,299 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,332 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:45,334 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,334 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:45,335 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,335 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:45,337 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,337 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:45,291 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,338 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:45,340 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,340 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:45,341 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,341 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:45,291 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,342 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 09:27:45,343 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 09:27:45,346 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:45,353 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,357 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:45,358 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,357 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,370 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:45,371 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,371 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:45,356 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local751599384_0001_r_000006_0 +2017-02-18 09:27:45,356 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,373 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 09:27:45,355 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,385 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 09:27:45,386 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,386 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 09:27:45,388 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,384 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,389 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:45,390 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,391 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 09:27:45,384 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,394 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 09:27:45,395 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,395 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 09:27:45,396 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,396 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 09:27:45,397 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,398 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 09:27:45,383 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:45,400 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,400 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:45,401 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,401 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:45,383 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:27:45,369 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,377 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,403 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:45,403 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 09:27:45,410 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:45,417 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,422 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:45,425 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,422 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,430 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:45,432 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,432 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:45,421 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:27:45,432 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@43e0ff38 +2017-02-18 09:27:45,420 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,433 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:45,435 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,435 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:45,436 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,436 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:45,419 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,437 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 09:27:45,418 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,438 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 09:27:45,429 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,439 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 09:27:45,440 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:45,460 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,465 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 09:27:45,464 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,464 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,472 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 09:27:45,474 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,474 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 09:27:45,475 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,475 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 09:27:45,477 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,477 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 09:27:45,478 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,478 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 09:27:45,479 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,480 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 09:27:45,481 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,481 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 09:27:45,463 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,482 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 09:27:45,484 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:45,462 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,485 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 09:27:45,461 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,485 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:45,486 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:45,507 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local751599384_0001_r_000006_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 09:27:45,537 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,540 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:45,540 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,546 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 09:27:45,539 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,548 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:45,550 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,550 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:45,539 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,552 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 09:27:45,538 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,553 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:45,571 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,571 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:45,574 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,591 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:45,593 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,584 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,604 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:45,578 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,605 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 09:27:45,607 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,607 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 09:27:45,608 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,608 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 09:27:45,610 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,610 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 09:27:45,611 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,611 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 09:27:45,577 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,612 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:45,613 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,613 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:45,576 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,614 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 09:27:45,616 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,616 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 09:27:45,617 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,617 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 09:27:45,619 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,619 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 09:27:45,620 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,620 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 09:27:45,622 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,575 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,673 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 72 len: 76 to MEMORY +2017-02-18 09:27:45,575 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,675 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 09:27:45,676 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,676 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 09:27:45,677 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,678 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 09:27:45,679 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,679 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 09:27:45,680 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,680 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 09:27:45,682 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local751599384_0001_r_000007_0 +2017-02-18 09:27:45,684 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,684 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 72 len: 76 to MEMORY +2017-02-18 09:27:45,686 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,666 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,637 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:45,634 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,692 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:45,693 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,693 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:45,695 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,695 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:45,696 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,696 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:45,632 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,698 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:45,700 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,700 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:45,701 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,701 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:45,690 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:27:45,703 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:27:45,703 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@77e7c326 +2017-02-18 09:27:45,686 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 09:27:45,705 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,706 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 09:27:45,707 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,706 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,708 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:45,709 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,709 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:45,710 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 72 len: 76 to MEMORY +2017-02-18 09:27:45,712 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 09:27:45,713 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,720 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 09:27:45,721 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,720 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,738 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:45,719 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,738 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 72 len: 76 to MEMORY +2017-02-18 09:27:45,740 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,740 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 72 len: 76 to MEMORY +2017-02-18 09:27:45,741 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,741 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 72 len: 76 to MEMORY +2017-02-18 09:27:45,719 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,717 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 09:27:45,713 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 09:27:45,743 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:45,743 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,743 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:45,744 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 09:27:45,744 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,744 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 09:27:45,755 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,775 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 09:27:45,777 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,762 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,784 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:45,785 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,786 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:45,787 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,787 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:45,788 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,789 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:45,790 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,790 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:45,791 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,791 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:45,761 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,792 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:45,793 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,794 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:45,760 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,794 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 09:27:45,758 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,795 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:45,757 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,795 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 09:27:45,795 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 09:27:45,797 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,784 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,797 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 72 len: 76 to MEMORY +2017-02-18 09:27:45,798 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,875 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 72 len: 76 to MEMORY +2017-02-18 09:27:45,783 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local751599384_0001_r_000007_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 09:27:45,880 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,861 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:45,888 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,888 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:45,890 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,890 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:45,886 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,917 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 88 len: 92 to MEMORY +2017-02-18 09:27:45,918 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local751599384_0001_r_000008_0 +2017-02-18 09:27:45,920 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,886 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,920 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 72 len: 76 to MEMORY +2017-02-18 09:27:45,884 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,922 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:45,923 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,923 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:45,925 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,925 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:45,883 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,926 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 09:27:45,928 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,928 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 09:27:45,929 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,929 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 09:27:45,931 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,931 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 09:27:45,932 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 88 len: 92 to MEMORY +2017-02-18 09:27:45,934 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,904 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,891 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,935 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 09:27:45,937 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,937 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 09:27:45,890 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 09:27:45,939 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,939 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 09:27:45,941 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,941 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 09:27:45,942 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:45,946 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 88 len: 92 to MEMORY +2017-02-18 09:27:45,935 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,934 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,947 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 72 len: 76 to MEMORY +2017-02-18 09:27:45,947 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:45,952 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,958 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,958 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:45,960 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,957 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,960 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 09:27:45,961 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,962 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 09:27:45,964 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,956 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,972 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 88 len: 92 to MEMORY +2017-02-18 09:27:45,955 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:27:45,975 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:27:45,975 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@7659657a +2017-02-18 09:27:45,955 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,976 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:45,977 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,977 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:45,979 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,979 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:45,980 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,980 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:45,954 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,982 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 72 len: 76 to MEMORY +2017-02-18 09:27:45,983 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,983 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 72 len: 76 to MEMORY +2017-02-18 09:27:45,985 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,985 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 72 len: 76 to MEMORY +2017-02-18 09:27:45,952 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,986 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:45,987 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,988 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:45,989 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,989 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:45,991 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,991 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:45,968 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,992 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 09:27:45,963 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:45,963 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 09:27:45,993 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,993 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 88 len: 92 to MEMORY +2017-02-18 09:27:45,993 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 09:27:46,000 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 09:27:46,023 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,030 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 72 len: 76 to MEMORY +2017-02-18 09:27:46,030 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,032 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 09:27:46,034 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,029 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,034 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:46,036 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,028 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,036 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 09:27:46,037 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,038 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 09:27:46,039 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,039 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 09:27:46,040 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,041 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 09:27:46,026 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,042 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 88 len: 92 to MEMORY +2017-02-18 09:27:46,043 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,043 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 88 len: 92 to MEMORY +2017-02-18 09:27:46,045 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,045 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 88 len: 92 to MEMORY +2017-02-18 09:27:46,025 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,046 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 09:27:46,048 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,048 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 09:27:46,049 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,049 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 09:27:46,051 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,051 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 09:27:46,052 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:46,053 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,053 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:46,054 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 09:27:46,058 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,059 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 72 len: 76 to MEMORY +2017-02-18 09:27:46,060 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,060 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:46,075 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,076 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 88 len: 92 to MEMORY +2017-02-18 09:27:46,105 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,105 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 88 len: 92 to MEMORY +2017-02-18 09:27:46,115 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,104 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,116 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 09:27:46,117 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,117 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 09:27:46,103 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,118 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:46,119 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,120 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:46,121 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,121 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:46,102 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,122 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:46,124 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,124 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:46,125 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,125 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:46,127 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,127 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:46,128 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,128 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:46,101 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,100 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,100 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,099 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,080 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local751599384_0001_r_000008_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 09:27:46,107 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,175 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,176 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 09:27:46,176 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 72 len: 76 to MEMORY +2017-02-18 09:27:46,179 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,180 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 72 len: 76 to MEMORY +2017-02-18 09:27:46,181 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,181 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 72 len: 76 to MEMORY +2017-02-18 09:27:46,176 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:46,184 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,184 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:46,186 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,186 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:46,187 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,187 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:46,189 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,189 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:46,176 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 09:27:46,176 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 98 len: 102 to MEMORY +2017-02-18 09:27:46,194 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,194 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 98 len: 102 to MEMORY +2017-02-18 09:27:46,195 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,196 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 98 len: 102 to MEMORY +2017-02-18 09:27:46,197 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,197 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 98 len: 102 to MEMORY +2017-02-18 09:27:46,199 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,199 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 98 len: 102 to MEMORY +2017-02-18 09:27:46,200 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,200 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 98 len: 102 to MEMORY +2017-02-18 09:27:46,202 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,176 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,254 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 09:27:46,256 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,256 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 09:27:46,257 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,257 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 09:27:46,258 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,258 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 09:27:46,260 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,260 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 09:27:46,261 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,261 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 09:27:46,175 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 88 len: 92 to MEMORY +2017-02-18 09:27:46,235 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,263 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:46,223 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,263 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 72 len: 76 to MEMORY +2017-02-18 09:27:46,203 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,264 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:46,203 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,265 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 09:27:46,203 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,265 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 09:27:46,202 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:46,202 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local751599384_0001_r_000009_0 +2017-02-18 09:27:46,267 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 98 len: 102 to MEMORY +2017-02-18 09:27:46,286 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:27:46,287 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:27:46,287 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@18084038 +2017-02-18 09:27:46,291 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 09:27:46,305 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local751599384_0001_r_000009_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 09:27:46,320 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,320 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 98 len: 102 to MEMORY +2017-02-18 09:27:46,322 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,323 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 88 len: 92 to MEMORY +2017-02-18 09:27:46,323 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,324 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:46,324 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,324 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 72 len: 76 to MEMORY +2017-02-18 09:27:46,325 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,325 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:46,326 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,326 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 09:27:46,326 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,327 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:46,327 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,327 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 09:27:46,328 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,329 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 09:27:46,333 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,333 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 98 len: 102 to MEMORY +2017-02-18 09:27:46,345 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,350 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 09:27:46,350 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,352 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:46,353 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,353 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:46,354 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,354 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:46,356 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,356 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:46,357 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,357 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:46,349 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,358 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 72 len: 76 to MEMORY +2017-02-18 09:27:46,360 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,360 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 72 len: 76 to MEMORY +2017-02-18 09:27:46,361 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,361 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 72 len: 76 to MEMORY +2017-02-18 09:27:46,349 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,362 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:46,363 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,363 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:46,364 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,365 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:46,366 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,366 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:46,367 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,367 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:46,368 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,368 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:46,370 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,348 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,370 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 09:27:46,371 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,371 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 09:27:46,346 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,413 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:46,373 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,413 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 09:27:46,351 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,414 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 88 len: 92 to MEMORY +2017-02-18 09:27:46,415 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,415 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:46,416 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,416 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 09:27:46,419 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,419 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 98 len: 102 to MEMORY +2017-02-18 09:27:46,420 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,420 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 98 len: 102 to MEMORY +2017-02-18 09:27:46,421 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,421 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 98 len: 102 to MEMORY +2017-02-18 09:27:46,508 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:46,509 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,509 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 09:27:46,510 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,510 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 09:27:46,511 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,518 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 09:27:46,522 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,523 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:46,524 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,524 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:46,525 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,525 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:46,520 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,526 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#10 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 22 len: 26 to MEMORY +2017-02-18 09:27:46,527 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete. +2017-02-18 09:27:46,530 WARN org.apache.hadoop.mapred.LocalJobRunner: job_local751599384_0001 +java.lang.Exception: org.apache.hadoop.mapreduce.task.reduce.Shuffle$ShuffleError: error in shuffle in localfetcher#1 + at org.apache.hadoop.mapred.LocalJobRunner$Job.runTasks(LocalJobRunner.java:489) + at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:556) +Caused by: org.apache.hadoop.mapreduce.task.reduce.Shuffle$ShuffleError: error in shuffle in localfetcher#1 + at org.apache.hadoop.mapreduce.task.reduce.Shuffle.run(Shuffle.java:134) + at org.apache.hadoop.mapred.ReduceTask.run(ReduceTask.java:376) + at org.apache.hadoop.mapred.LocalJobRunner$Job$ReduceTaskRunnable.run(LocalJobRunner.java:346) + at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471) + at java.util.concurrent.FutureTask.run(FutureTask.java:262) + at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) + at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) + at java.lang.Thread.run(Thread.java:745) +Caused by: java.io.IOException: not a gzip file + at org.apache.hadoop.io.compress.zlib.BuiltInGzipDecompressor.processBasicHeader(BuiltInGzipDecompressor.java:496) + at org.apache.hadoop.io.compress.zlib.BuiltInGzipDecompressor.executeHeaderState(BuiltInGzipDecompressor.java:257) + at org.apache.hadoop.io.compress.zlib.BuiltInGzipDecompressor.decompress(BuiltInGzipDecompressor.java:186) + at org.apache.hadoop.io.compress.DecompressorStream.decompress(DecompressorStream.java:91) + at org.apache.hadoop.io.compress.DecompressorStream.read(DecompressorStream.java:85) + at org.apache.hadoop.io.IOUtils.readFully(IOUtils.java:199) + at org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput.shuffle(InMemoryMapOutput.java:97) + at org.apache.hadoop.mapreduce.task.reduce.LocalFetcher.copyMapOutput(LocalFetcher.java:157) + at org.apache.hadoop.mapreduce.task.reduce.LocalFetcher.doCopy(LocalFetcher.java:102) + at org.apache.hadoop.mapreduce.task.reduce.LocalFetcher.run(LocalFetcher.java:85) +2017-02-18 09:27:46,538 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,557 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 98 len: 102 to MEMORY +2017-02-18 09:27:46,538 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,558 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:46,538 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,558 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 09:27:46,538 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,559 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:46,538 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,559 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 09:27:46,538 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,560 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 88 len: 92 to MEMORY +2017-02-18 09:27:46,538 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,560 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 72 len: 76 to MEMORY +2017-02-18 09:27:46,912 INFO org.apache.hadoop.mapreduce.Job: Job job_local751599384_0001 failed with state FAILED due to: NA +2017-02-18 09:27:47,155 INFO org.apache.hadoop.mapreduce.Job: Counters: 18 + File System Counters + FILE: Number of bytes read=63678066 + FILE: Number of bytes written=834451 + FILE: Number of read operations=0 + FILE: Number of large read operations=0 + FILE: Number of write operations=0 + Map-Reduce Framework + Map input records=507535 + Map output records=4678719 + Map output bytes=43638689 + Map output materialized bytes=1471 + Input split bytes=351 + Combine input records=4678719 + Combine output records=131 + Spilled Records=131 + Failed Shuffles=0 + Merged Map outputs=0 + GC time elapsed (ms)=663 + Total committed heap usage (bytes)=576008192 + File Input Format Counters + Bytes Read=26057874 +2017-02-18 09:32:52,271 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable +2017-02-18 09:32:52,676 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.compress.map.output is deprecated. Instead, use mapreduce.map.output.compress +2017-02-18 09:32:52,678 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.map.output.compression.codec is deprecated. Instead, use mapreduce.map.output.compress.codec +2017-02-18 09:32:53,961 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id +2017-02-18 09:32:53,971 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId= +2017-02-18 09:32:55,424 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set. User classes may not be found. See Job or Job#setJar(String). +2017-02-18 09:32:55,476 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 3 +2017-02-18 09:32:55,773 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:3 +2017-02-18 09:32:56,770 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local963140535_0001 +2017-02-18 09:32:58,243 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/ +2017-02-18 09:32:58,245 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local963140535_0001 +2017-02-18 09:32:58,257 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null +2017-02-18 09:32:58,292 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:32:58,293 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter +2017-02-18 09:32:58,640 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks +2017-02-18 09:32:58,641 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local963140535_0001_m_000000_0 +2017-02-18 09:32:58,823 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:32:58,889 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:32:58,893 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg3200.txt:0+16013935 +2017-02-18 09:32:59,248 INFO org.apache.hadoop.mapreduce.Job: Job job_local963140535_0001 running in uber mode : false +2017-02-18 09:32:59,269 INFO org.apache.hadoop.mapreduce.Job: map 0% reduce 0% +2017-02-18 09:32:59,522 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) +2017-02-18 09:32:59,538 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 +2017-02-18 09:32:59,538 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 +2017-02-18 09:32:59,538 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 +2017-02-18 09:32:59,539 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 +2017-02-18 09:32:59,569 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer +2017-02-18 09:32:59,582 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it +2017-02-18 09:33:04,889 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 09:33:05,319 INFO org.apache.hadoop.mapreduce.Job: map 6% reduce 0% +2017-02-18 09:33:07,900 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 09:33:08,340 INFO org.apache.hadoop.mapreduce.Job: map 14% reduce 0% +2017-02-18 09:33:10,903 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 09:33:10,935 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 09:33:10,937 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output +2017-02-18 09:33:10,938 INFO org.apache.hadoop.mapred.MapTask: Spilling map output +2017-02-18 09:33:10,939 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 26925530; bufvoid = 104857600 +2017-02-18 09:33:10,939 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 14765624(59062496); length = 11448773/6553600 +2017-02-18 09:33:11,347 INFO org.apache.hadoop.mapreduce.Job: map 22% reduce 0% +2017-02-18 09:33:13,912 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 09:33:16,917 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 09:33:19,923 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 09:33:22,930 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 09:33:25,935 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 09:33:27,123 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output +2017-02-18 09:33:27,129 INFO org.apache.hadoop.mapred.MapTask: (RESET) equator 0 kv 26214396(104857584) kvi 14765620(59062480) +2017-02-18 09:33:27,129 INFO org.apache.hadoop.mapred.MapTask: Spilling map output +2017-02-18 09:33:27,129 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 26925530; bufvoid = 104857600 +2017-02-18 09:33:27,129 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 14765624(59062496); length = 11448773/6553600 +2017-02-18 09:33:28,944 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 09:33:31,946 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 09:33:34,957 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 09:33:37,968 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 09:33:40,042 INFO org.apache.hadoop.mapred.MapTask: Ignoring exception during close for org.apache.hadoop.mapred.MapTask$NewOutputCollector@f6a4c4a +java.lang.RuntimeException: native snappy library not available: this version of libhadoop was built without snappy support. + at org.apache.hadoop.io.compress.SnappyCodec.checkNativeCodeLoaded(SnappyCodec.java:65) + at org.apache.hadoop.io.compress.SnappyCodec.getCompressorType(SnappyCodec.java:134) + at org.apache.hadoop.io.compress.CodecPool.getCompressor(CodecPool.java:150) + at org.apache.hadoop.io.compress.CodecPool.getCompressor(CodecPool.java:165) + at org.apache.hadoop.mapred.IFile$Writer.<init>(IFile.java:114) + at org.apache.hadoop.mapred.IFile$Writer.<init>(IFile.java:97) + at org.apache.hadoop.mapred.MapTask$MapOutputBuffer.sortAndSpill(MapTask.java:1606) + at org.apache.hadoop.mapred.MapTask$MapOutputBuffer.flush(MapTask.java:1486) + at org.apache.hadoop.mapred.MapTask$NewOutputCollector.close(MapTask.java:723) + at org.apache.hadoop.mapred.MapTask.closeQuietly(MapTask.java:2016) + at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:797) + at org.apache.hadoop.mapred.MapTask.run(MapTask.java:341) + at org.apache.hadoop.mapred.LocalJobRunner$Job$MapTaskRunnable.run(LocalJobRunner.java:270) + at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471) + at java.util.concurrent.FutureTask.run(FutureTask.java:262) + at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) + at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) + at java.lang.Thread.run(Thread.java:745) +2017-02-18 09:33:40,081 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local963140535_0001_m_000001_0 +2017-02-18 09:33:40,091 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:33:40,091 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:33:40,096 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg100.txt:0+5589889 +2017-02-18 09:33:40,579 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) +2017-02-18 09:33:40,589 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 +2017-02-18 09:33:40,592 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 +2017-02-18 09:33:40,594 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 +2017-02-18 09:33:40,595 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 +2017-02-18 09:33:40,602 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer +2017-02-18 09:33:40,611 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it +2017-02-18 09:33:40,971 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 09:33:43,769 INFO org.apache.hadoop.mapred.LocalJobRunner: +2017-02-18 09:33:43,774 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output +2017-02-18 09:33:43,774 INFO org.apache.hadoop.mapred.MapTask: Spilling map output +2017-02-18 09:33:43,774 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 9171648; bufvoid = 104857600 +2017-02-18 09:33:43,774 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 22120624(88482496); length = 4093773/6553600 +2017-02-18 09:33:43,979 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 09:33:46,116 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 09:33:46,528 INFO org.apache.hadoop.mapreduce.Job: map 44% reduce 0% +2017-02-18 09:33:47,986 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output +2017-02-18 09:33:47,992 INFO org.apache.hadoop.mapred.MapTask: (RESET) equator 0 kv 26214396(104857584) kvi 22120620(88482480) +2017-02-18 09:33:47,993 INFO org.apache.hadoop.mapred.MapTask: Spilling map output +2017-02-18 09:33:47,993 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 9171648; bufvoid = 104857600 +2017-02-18 09:33:47,993 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 22120624(88482496); length = 4093773/6553600 +2017-02-18 09:33:49,120 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 09:33:51,563 INFO org.apache.hadoop.mapred.MapTask: Ignoring exception during close for org.apache.hadoop.mapred.MapTask$NewOutputCollector@e5c48f1 +java.lang.RuntimeException: native snappy library not available: this version of libhadoop was built without snappy support. + at org.apache.hadoop.io.compress.SnappyCodec.checkNativeCodeLoaded(SnappyCodec.java:65) + at org.apache.hadoop.io.compress.SnappyCodec.getCompressorType(SnappyCodec.java:134) + at org.apache.hadoop.io.compress.CodecPool.getCompressor(CodecPool.java:150) + at org.apache.hadoop.io.compress.CodecPool.getCompressor(CodecPool.java:165) + at org.apache.hadoop.mapred.IFile$Writer.<init>(IFile.java:114) + at org.apache.hadoop.mapred.IFile$Writer.<init>(IFile.java:97) + at org.apache.hadoop.mapred.MapTask$MapOutputBuffer.sortAndSpill(MapTask.java:1606) + at org.apache.hadoop.mapred.MapTask$MapOutputBuffer.flush(MapTask.java:1486) + at org.apache.hadoop.mapred.MapTask$NewOutputCollector.close(MapTask.java:723) + at org.apache.hadoop.mapred.MapTask.closeQuietly(MapTask.java:2016) + at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:797) + at org.apache.hadoop.mapred.MapTask.run(MapTask.java:341) + at org.apache.hadoop.mapred.LocalJobRunner$Job$MapTaskRunnable.run(LocalJobRunner.java:270) + at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471) + at java.util.concurrent.FutureTask.run(FutureTask.java:262) + at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) + at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) + at java.lang.Thread.run(Thread.java:745) +2017-02-18 09:33:51,577 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local963140535_0001_m_000002_0 +2017-02-18 09:33:51,588 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:33:51,595 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:33:51,599 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg31100.txt:0+4454050 +2017-02-18 09:33:51,954 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) +2017-02-18 09:33:51,962 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 +2017-02-18 09:33:51,962 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 +2017-02-18 09:33:51,963 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 +2017-02-18 09:33:51,963 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 +2017-02-18 09:33:51,973 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer +2017-02-18 09:33:51,974 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it +2017-02-18 09:33:52,131 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 09:33:54,795 INFO org.apache.hadoop.mapred.LocalJobRunner: +2017-02-18 09:33:54,806 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output +2017-02-18 09:33:54,806 INFO org.apache.hadoop.mapred.MapTask: Spilling map output +2017-02-18 09:33:54,806 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 7541511; bufvoid = 104857600 +2017-02-18 09:33:54,806 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 23042076(92168304); length = 3172321/6553600 +2017-02-18 09:33:55,134 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 09:33:57,609 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 09:33:57,930 INFO org.apache.hadoop.mapreduce.Job: map 67% reduce 0% +2017-02-18 09:33:57,940 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output +2017-02-18 09:33:57,945 INFO org.apache.hadoop.mapred.MapTask: (RESET) equator 0 kv 26214396(104857584) kvi 23042072(92168288) +2017-02-18 09:33:57,946 INFO org.apache.hadoop.mapred.MapTask: Spilling map output +2017-02-18 09:33:57,947 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 7541511; bufvoid = 104857600 +2017-02-18 09:33:57,947 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 23042076(92168304); length = 3172321/6553600 +2017-02-18 09:34:00,613 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 09:34:00,657 INFO org.apache.hadoop.mapred.MapTask: Ignoring exception during close for org.apache.hadoop.mapred.MapTask$NewOutputCollector@2d14b355 +java.lang.RuntimeException: native snappy library not available: this version of libhadoop was built without snappy support. + at org.apache.hadoop.io.compress.SnappyCodec.checkNativeCodeLoaded(SnappyCodec.java:65) + at org.apache.hadoop.io.compress.SnappyCodec.getCompressorType(SnappyCodec.java:134) + at org.apache.hadoop.io.compress.CodecPool.getCompressor(CodecPool.java:150) + at org.apache.hadoop.io.compress.CodecPool.getCompressor(CodecPool.java:165) + at org.apache.hadoop.mapred.IFile$Writer.<init>(IFile.java:114) + at org.apache.hadoop.mapred.IFile$Writer.<init>(IFile.java:97) + at org.apache.hadoop.mapred.MapTask$MapOutputBuffer.sortAndSpill(MapTask.java:1606) + at org.apache.hadoop.mapred.MapTask$MapOutputBuffer.flush(MapTask.java:1486) + at org.apache.hadoop.mapred.MapTask$NewOutputCollector.close(MapTask.java:723) + at org.apache.hadoop.mapred.MapTask.closeQuietly(MapTask.java:2016) + at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:797) + at org.apache.hadoop.mapred.MapTask.run(MapTask.java:341) + at org.apache.hadoop.mapred.LocalJobRunner$Job$MapTaskRunnable.run(LocalJobRunner.java:270) + at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471) + at java.util.concurrent.FutureTask.run(FutureTask.java:262) + at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) + at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) + at java.lang.Thread.run(Thread.java:745) +2017-02-18 09:34:00,664 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete. +2017-02-18 09:34:00,673 WARN org.apache.hadoop.mapred.LocalJobRunner: job_local963140535_0001 +java.lang.Exception: java.lang.RuntimeException: native snappy library not available: this version of libhadoop was built without snappy support. + at org.apache.hadoop.mapred.LocalJobRunner$Job.runTasks(LocalJobRunner.java:489) + at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:549) +Caused by: java.lang.RuntimeException: native snappy library not available: this version of libhadoop was built without snappy support. + at org.apache.hadoop.io.compress.SnappyCodec.checkNativeCodeLoaded(SnappyCodec.java:65) + at org.apache.hadoop.io.compress.SnappyCodec.getCompressorType(SnappyCodec.java:134) + at org.apache.hadoop.io.compress.CodecPool.getCompressor(CodecPool.java:150) + at org.apache.hadoop.io.compress.CodecPool.getCompressor(CodecPool.java:165) + at org.apache.hadoop.mapred.IFile$Writer.<init>(IFile.java:114) + at org.apache.hadoop.mapred.IFile$Writer.<init>(IFile.java:97) + at org.apache.hadoop.mapred.MapTask$MapOutputBuffer.sortAndSpill(MapTask.java:1606) + at org.apache.hadoop.mapred.MapTask$MapOutputBuffer.flush(MapTask.java:1486) + at org.apache.hadoop.mapred.MapTask$NewOutputCollector.close(MapTask.java:723) + at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:793) + at org.apache.hadoop.mapred.MapTask.run(MapTask.java:341) + at org.apache.hadoop.mapred.LocalJobRunner$Job$MapTaskRunnable.run(LocalJobRunner.java:270) + at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471) + at java.util.concurrent.FutureTask.run(FutureTask.java:262) + at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) + at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) + at java.lang.Thread.run(Thread.java:745) +2017-02-18 09:34:00,943 INFO org.apache.hadoop.mapreduce.Job: Job job_local963140535_0001 failed with state FAILED due to: NA +2017-02-18 09:34:01,055 INFO org.apache.hadoop.mapreduce.Job: Counters: 18 + File System Counters + FILE: Number of bytes read=73722745 + FILE: Number of bytes written=829530 + FILE: Number of read operations=0 + FILE: Number of large read operations=0 + FILE: Number of write operations=0 + Map-Reduce Framework + Map input records=507535 + Map output records=4678719 + Map output bytes=43638689 + Map output materialized bytes=0 + Input split bytes=351 + Combine input records=0 + Combine output records=0 + Spilled Records=0 + Failed Shuffles=0 + Merged Map outputs=0 + GC time elapsed (ms)=1944 + Total committed heap usage (bytes)=1413292032 + File Input Format Counters + Bytes Read=26057874 +2017-02-18 09:36:42,891 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable +2017-02-18 09:36:43,290 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.compress.map.output is deprecated. Instead, use mapreduce.map.output.compress +2017-02-18 09:36:43,295 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.map.output.compression.codec is deprecated. Instead, use mapreduce.map.output.compress.codec +2017-02-18 09:36:44,625 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id +2017-02-18 09:36:44,637 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId= +2017-02-18 09:36:46,271 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set. User classes may not be found. See Job or Job#setJar(String). +2017-02-18 09:36:46,307 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 3 +2017-02-18 09:36:46,597 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:3 +2017-02-18 09:36:47,633 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local2056867727_0001 +2017-02-18 09:36:49,105 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/ +2017-02-18 09:36:49,107 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local2056867727_0001 +2017-02-18 09:36:49,118 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null +2017-02-18 09:36:49,153 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:36:49,169 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter +2017-02-18 09:36:49,480 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks +2017-02-18 09:36:49,483 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2056867727_0001_m_000000_0 +2017-02-18 09:36:49,685 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:36:49,784 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:36:49,807 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg3200.txt:0+16013935 +2017-02-18 09:36:50,167 INFO org.apache.hadoop.mapreduce.Job: Job job_local2056867727_0001 running in uber mode : false +2017-02-18 09:36:50,176 INFO org.apache.hadoop.mapreduce.Job: map 0% reduce 0% +2017-02-18 09:36:50,435 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) +2017-02-18 09:36:50,458 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 +2017-02-18 09:36:50,459 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 +2017-02-18 09:36:50,460 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 +2017-02-18 09:36:50,460 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 +2017-02-18 09:36:50,487 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer +2017-02-18 09:36:50,528 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it +2017-02-18 09:36:55,778 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 09:36:56,207 INFO org.apache.hadoop.mapreduce.Job: map 6% reduce 0% +2017-02-18 09:36:58,787 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 09:36:59,218 INFO org.apache.hadoop.mapreduce.Job: map 14% reduce 0% +2017-02-18 09:37:01,669 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 09:37:01,672 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output +2017-02-18 09:37:01,672 INFO org.apache.hadoop.mapred.MapTask: Spilling map output +2017-02-18 09:37:01,672 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 26925530; bufvoid = 104857600 +2017-02-18 09:37:01,672 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 14765624(59062496); length = 11448773/6553600 +2017-02-18 09:37:01,789 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 09:37:02,230 INFO org.apache.hadoop.mapreduce.Job: map 22% reduce 0% +2017-02-18 09:37:04,793 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 09:37:07,797 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 09:37:10,807 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 09:37:13,808 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 09:37:16,815 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 09:37:17,664 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.bz2] +2017-02-18 09:37:19,819 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 09:37:22,831 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 09:37:23,735 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0 +2017-02-18 09:37:23,769 INFO org.apache.hadoop.mapred.Task: Task:attempt_local2056867727_0001_m_000000_0 is done. And is in the process of committing +2017-02-18 09:37:23,775 INFO org.apache.hadoop.mapred.LocalJobRunner: map +2017-02-18 09:37:23,776 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local2056867727_0001_m_000000_0' done. +2017-02-18 09:37:23,776 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local2056867727_0001_m_000000_0 +2017-02-18 09:37:23,776 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2056867727_0001_m_000001_0 +2017-02-18 09:37:23,787 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:37:23,789 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:37:23,793 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg100.txt:0+5589889 +2017-02-18 09:37:24,086 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) +2017-02-18 09:37:24,095 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 +2017-02-18 09:37:24,095 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 +2017-02-18 09:37:24,095 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 +2017-02-18 09:37:24,095 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 +2017-02-18 09:37:24,101 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer +2017-02-18 09:37:24,115 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it +2017-02-18 09:37:24,384 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 0% +2017-02-18 09:37:27,137 INFO org.apache.hadoop.mapred.LocalJobRunner: +2017-02-18 09:37:27,148 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output +2017-02-18 09:37:27,149 INFO org.apache.hadoop.mapred.MapTask: Spilling map output +2017-02-18 09:37:27,149 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 9171648; bufvoid = 104857600 +2017-02-18 09:37:27,149 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 22120624(88482496); length = 4093773/6553600 +2017-02-18 09:37:27,402 INFO org.apache.hadoop.mapreduce.Job: map 33% reduce 0% +2017-02-18 09:37:29,799 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 09:37:30,413 INFO org.apache.hadoop.mapreduce.Job: map 56% reduce 0% +2017-02-18 09:37:32,802 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 09:37:33,153 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0 +2017-02-18 09:37:33,163 INFO org.apache.hadoop.mapred.Task: Task:attempt_local2056867727_0001_m_000001_0 is done. And is in the process of committing +2017-02-18 09:37:33,168 INFO org.apache.hadoop.mapred.LocalJobRunner: map +2017-02-18 09:37:33,171 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local2056867727_0001_m_000001_0' done. +2017-02-18 09:37:33,172 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local2056867727_0001_m_000001_0 +2017-02-18 09:37:33,173 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2056867727_0001_m_000002_0 +2017-02-18 09:37:33,179 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:37:33,180 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:37:33,187 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg31100.txt:0+4454050 +2017-02-18 09:37:33,424 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 0% +2017-02-18 09:37:33,475 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) +2017-02-18 09:37:33,481 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 +2017-02-18 09:37:33,487 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 +2017-02-18 09:37:33,489 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 +2017-02-18 09:37:33,489 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 +2017-02-18 09:37:33,497 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer +2017-02-18 09:37:33,499 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it +2017-02-18 09:37:35,611 INFO org.apache.hadoop.mapred.LocalJobRunner: +2017-02-18 09:37:35,620 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output +2017-02-18 09:37:35,621 INFO org.apache.hadoop.mapred.MapTask: Spilling map output +2017-02-18 09:37:35,622 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 7541511; bufvoid = 104857600 +2017-02-18 09:37:35,622 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 23042076(92168304); length = 3172321/6553600 +2017-02-18 09:37:36,442 INFO org.apache.hadoop.mapreduce.Job: map 67% reduce 0% +2017-02-18 09:37:39,198 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 09:37:39,462 INFO org.apache.hadoop.mapreduce.Job: map 89% reduce 0% +2017-02-18 09:37:40,255 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0 +2017-02-18 09:37:40,263 INFO org.apache.hadoop.mapred.Task: Task:attempt_local2056867727_0001_m_000002_0 is done. And is in the process of committing +2017-02-18 09:37:40,269 INFO org.apache.hadoop.mapred.LocalJobRunner: map +2017-02-18 09:37:40,272 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local2056867727_0001_m_000002_0' done. +2017-02-18 09:37:40,273 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local2056867727_0001_m_000002_0 +2017-02-18 09:37:40,274 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete. +2017-02-18 09:37:40,342 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks +2017-02-18 09:37:40,343 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2056867727_0001_r_000000_0 +2017-02-18 09:37:40,390 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:37:40,391 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:37:40,405 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@7018394a +2017-02-18 09:37:40,468 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 0% +2017-02-18 09:37:40,490 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 09:37:40,534 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local2056867727_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 09:37:40,699 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.bz2] +2017-02-18 09:37:40,703 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local2056867727_0001_m_000000_0 decomp: 61 len: 103 to MEMORY +2017-02-18 09:37:40,740 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 61 bytes from map-output for attempt_local2056867727_0001_m_000000_0 +2017-02-18 09:37:40,767 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 61, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->61 +2017-02-18 09:37:40,777 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local2056867727_0001_m_000001_0 decomp: 30 len: 78 to MEMORY +2017-02-18 09:37:40,785 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 30 bytes from map-output for attempt_local2056867727_0001_m_000001_0 +2017-02-18 09:37:40,789 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 30, inMemoryMapOutputs.size() -> 2, commitMemory -> 61, usedMemory ->91 +2017-02-18 09:37:40,794 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local2056867727_0001_m_000002_0 decomp: 31 len: 82 to MEMORY +2017-02-18 09:37:40,800 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 31 bytes from map-output for attempt_local2056867727_0001_m_000002_0 +2017-02-18 09:37:40,814 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 31, inMemoryMapOutputs.size() -> 3, commitMemory -> 91, usedMemory ->122 +2017-02-18 09:37:40,817 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 09:37:40,818 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:37:40,819 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 09:37:40,851 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 09:37:40,865 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 104 bytes +2017-02-18 09:37:40,909 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 122 bytes to disk to satisfy reduce memory limit +2017-02-18 09:37:40,921 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 153 bytes from disk +2017-02-18 09:37:40,926 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 09:37:40,931 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 09:37:40,935 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 110 bytes +2017-02-18 09:37:40,938 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:37:40,984 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.skip.on is deprecated. Instead, use mapreduce.job.skiprecords +2017-02-18 09:37:40,987 INFO org.apache.hadoop.mapred.Task: Task:attempt_local2056867727_0001_r_000000_0 is done. And is in the process of committing +2017-02-18 09:37:40,988 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:37:40,988 INFO org.apache.hadoop.mapred.Task: Task attempt_local2056867727_0001_r_000000_0 is allowed to commit now +2017-02-18 09:37:40,989 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local2056867727_0001_r_000000_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iii/_temporary/0/task_local2056867727_0001_r_000000 +2017-02-18 09:37:41,004 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 09:37:41,008 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local2056867727_0001_r_000000_0' done. +2017-02-18 09:37:41,009 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local2056867727_0001_r_000000_0 +2017-02-18 09:37:41,010 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2056867727_0001_r_000001_0 +2017-02-18 09:37:41,018 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:37:41,019 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:37:41,020 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@21fdb35f +2017-02-18 09:37:41,027 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 09:37:41,048 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local2056867727_0001_r_000001_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 09:37:41,060 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local2056867727_0001_m_000000_0 decomp: 154 len: 171 to MEMORY +2017-02-18 09:37:41,065 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 154 bytes from map-output for attempt_local2056867727_0001_m_000000_0 +2017-02-18 09:37:41,071 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 154, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->154 +2017-02-18 09:37:41,075 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local2056867727_0001_m_000001_0 decomp: 69 len: 110 to MEMORY +2017-02-18 09:37:41,084 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 69 bytes from map-output for attempt_local2056867727_0001_m_000001_0 +2017-02-18 09:37:41,088 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 69, inMemoryMapOutputs.size() -> 2, commitMemory -> 154, usedMemory ->223 +2017-02-18 09:37:41,094 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local2056867727_0001_m_000002_0 decomp: 39 len: 85 to MEMORY +2017-02-18 09:37:41,100 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 39 bytes from map-output for attempt_local2056867727_0001_m_000002_0 +2017-02-18 09:37:41,114 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 39, inMemoryMapOutputs.size() -> 3, commitMemory -> 223, usedMemory ->262 +2017-02-18 09:37:41,117 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 09:37:41,117 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:37:41,118 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 09:37:41,119 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 09:37:41,119 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 253 bytes +2017-02-18 09:37:41,136 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 262 bytes to disk to satisfy reduce memory limit +2017-02-18 09:37:41,149 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 238 bytes from disk +2017-02-18 09:37:41,151 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 09:37:41,152 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 09:37:41,155 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 255 bytes +2017-02-18 09:37:41,159 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:37:41,201 INFO org.apache.hadoop.mapred.Task: Task:attempt_local2056867727_0001_r_000001_0 is done. And is in the process of committing +2017-02-18 09:37:41,204 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:37:41,204 INFO org.apache.hadoop.mapred.Task: Task attempt_local2056867727_0001_r_000001_0 is allowed to commit now +2017-02-18 09:37:41,208 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local2056867727_0001_r_000001_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iii/_temporary/0/task_local2056867727_0001_r_000001 +2017-02-18 09:37:41,215 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 09:37:41,216 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local2056867727_0001_r_000001_0' done. +2017-02-18 09:37:41,216 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local2056867727_0001_r_000001_0 +2017-02-18 09:37:41,217 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2056867727_0001_r_000002_0 +2017-02-18 09:37:41,237 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:37:41,238 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:37:41,238 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@16f5d08e +2017-02-18 09:37:41,242 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 09:37:41,259 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local2056867727_0001_r_000002_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 09:37:41,268 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local2056867727_0001_m_000000_0 decomp: 112 len: 146 to MEMORY +2017-02-18 09:37:41,277 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 112 bytes from map-output for attempt_local2056867727_0001_m_000000_0 +2017-02-18 09:37:41,286 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 112, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->112 +2017-02-18 09:37:41,290 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local2056867727_0001_m_000001_0 decomp: 41 len: 81 to MEMORY +2017-02-18 09:37:41,299 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 41 bytes from map-output for attempt_local2056867727_0001_m_000001_0 +2017-02-18 09:37:41,303 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 41, inMemoryMapOutputs.size() -> 2, commitMemory -> 112, usedMemory ->153 +2017-02-18 09:37:41,306 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local2056867727_0001_m_000002_0 decomp: 51 len: 94 to MEMORY +2017-02-18 09:37:41,313 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 51 bytes from map-output for attempt_local2056867727_0001_m_000002_0 +2017-02-18 09:37:41,318 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 51, inMemoryMapOutputs.size() -> 3, commitMemory -> 153, usedMemory ->204 +2017-02-18 09:37:41,319 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 09:37:41,321 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:37:41,322 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 09:37:41,323 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 09:37:41,323 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 186 bytes +2017-02-18 09:37:41,370 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 204 bytes to disk to satisfy reduce memory limit +2017-02-18 09:37:41,374 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 205 bytes from disk +2017-02-18 09:37:41,377 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 09:37:41,378 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 09:37:41,380 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 194 bytes +2017-02-18 09:37:41,385 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:37:41,423 INFO org.apache.hadoop.mapred.Task: Task:attempt_local2056867727_0001_r_000002_0 is done. And is in the process of committing +2017-02-18 09:37:41,433 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:37:41,434 INFO org.apache.hadoop.mapred.Task: Task attempt_local2056867727_0001_r_000002_0 is allowed to commit now +2017-02-18 09:37:41,439 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local2056867727_0001_r_000002_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iii/_temporary/0/task_local2056867727_0001_r_000002 +2017-02-18 09:37:41,450 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 09:37:41,454 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local2056867727_0001_r_000002_0' done. +2017-02-18 09:37:41,455 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local2056867727_0001_r_000002_0 +2017-02-18 09:37:41,455 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2056867727_0001_r_000003_0 +2017-02-18 09:37:41,466 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:37:41,472 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:37:41,472 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 100% +2017-02-18 09:37:41,476 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@7fae5c75 +2017-02-18 09:37:41,484 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 09:37:41,502 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local2056867727_0001_r_000003_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 09:37:41,508 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local2056867727_0001_m_000000_0 decomp: 63 len: 105 to MEMORY +2017-02-18 09:37:41,515 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 63 bytes from map-output for attempt_local2056867727_0001_m_000000_0 +2017-02-18 09:37:41,520 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 63, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->63 +2017-02-18 09:37:41,528 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local2056867727_0001_m_000001_0 decomp: 11 len: 57 to MEMORY +2017-02-18 09:37:41,531 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 11 bytes from map-output for attempt_local2056867727_0001_m_000001_0 +2017-02-18 09:37:41,537 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 11, inMemoryMapOutputs.size() -> 2, commitMemory -> 63, usedMemory ->74 +2017-02-18 09:37:41,559 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local2056867727_0001_m_000002_0 decomp: 11 len: 59 to MEMORY +2017-02-18 09:37:41,565 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 11 bytes from map-output for attempt_local2056867727_0001_m_000002_0 +2017-02-18 09:37:41,573 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 11, inMemoryMapOutputs.size() -> 3, commitMemory -> 74, usedMemory ->85 +2017-02-18 09:37:41,575 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 09:37:41,576 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:37:41,577 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 09:37:41,578 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 09:37:41,578 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 69 bytes +2017-02-18 09:37:41,605 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 85 bytes to disk to satisfy reduce memory limit +2017-02-18 09:37:41,610 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 128 bytes from disk +2017-02-18 09:37:41,611 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 09:37:41,612 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 09:37:41,616 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 75 bytes +2017-02-18 09:37:41,622 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:37:41,670 INFO org.apache.hadoop.mapred.Task: Task:attempt_local2056867727_0001_r_000003_0 is done. And is in the process of committing +2017-02-18 09:37:41,672 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:37:41,673 INFO org.apache.hadoop.mapred.Task: Task attempt_local2056867727_0001_r_000003_0 is allowed to commit now +2017-02-18 09:37:41,678 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local2056867727_0001_r_000003_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iii/_temporary/0/task_local2056867727_0001_r_000003 +2017-02-18 09:37:41,683 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 09:37:41,685 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local2056867727_0001_r_000003_0' done. +2017-02-18 09:37:41,686 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local2056867727_0001_r_000003_0 +2017-02-18 09:37:41,686 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2056867727_0001_r_000004_0 +2017-02-18 09:37:41,695 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:37:41,696 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:37:41,696 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@2f5fea9c +2017-02-18 09:37:41,711 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 09:37:41,723 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local2056867727_0001_r_000004_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 09:37:41,737 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local2056867727_0001_m_000000_0 decomp: 52 len: 101 to MEMORY +2017-02-18 09:37:41,753 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 52 bytes from map-output for attempt_local2056867727_0001_m_000000_0 +2017-02-18 09:37:41,756 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 52, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->52 +2017-02-18 09:37:41,760 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local2056867727_0001_m_000001_0 decomp: 33 len: 82 to MEMORY +2017-02-18 09:37:41,776 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 33 bytes from map-output for attempt_local2056867727_0001_m_000001_0 +2017-02-18 09:37:41,781 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 33, inMemoryMapOutputs.size() -> 2, commitMemory -> 52, usedMemory ->85 +2017-02-18 09:37:41,784 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local2056867727_0001_m_000002_0 decomp: 31 len: 79 to MEMORY +2017-02-18 09:37:41,813 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 31 bytes from map-output for attempt_local2056867727_0001_m_000002_0 +2017-02-18 09:37:41,820 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 31, inMemoryMapOutputs.size() -> 3, commitMemory -> 85, usedMemory ->116 +2017-02-18 09:37:41,820 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 09:37:41,821 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:37:41,821 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 09:37:41,907 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 09:37:41,907 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 100 bytes +2017-02-18 09:37:41,951 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 116 bytes to disk to satisfy reduce memory limit +2017-02-18 09:37:41,967 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 147 bytes from disk +2017-02-18 09:37:41,968 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 09:37:41,968 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 09:37:41,969 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 106 bytes +2017-02-18 09:37:41,976 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:37:42,014 INFO org.apache.hadoop.mapred.Task: Task:attempt_local2056867727_0001_r_000004_0 is done. And is in the process of committing +2017-02-18 09:37:42,017 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:37:42,029 INFO org.apache.hadoop.mapred.Task: Task attempt_local2056867727_0001_r_000004_0 is allowed to commit now +2017-02-18 09:37:42,031 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local2056867727_0001_r_000004_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iii/_temporary/0/task_local2056867727_0001_r_000004 +2017-02-18 09:37:42,040 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 09:37:42,041 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local2056867727_0001_r_000004_0' done. +2017-02-18 09:37:42,041 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local2056867727_0001_r_000004_0 +2017-02-18 09:37:42,041 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2056867727_0001_r_000005_0 +2017-02-18 09:37:42,059 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:37:42,060 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:37:42,068 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@70b91162 +2017-02-18 09:37:42,077 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 09:37:42,107 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local2056867727_0001_r_000005_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 09:37:42,117 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local2056867727_0001_m_000000_0 decomp: 54 len: 100 to MEMORY +2017-02-18 09:37:42,128 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 54 bytes from map-output for attempt_local2056867727_0001_m_000000_0 +2017-02-18 09:37:42,143 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 54, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->54 +2017-02-18 09:37:42,146 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local2056867727_0001_m_000001_0 decomp: 23 len: 69 to MEMORY +2017-02-18 09:37:42,162 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 23 bytes from map-output for attempt_local2056867727_0001_m_000001_0 +2017-02-18 09:37:42,166 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 23, inMemoryMapOutputs.size() -> 2, commitMemory -> 54, usedMemory ->77 +2017-02-18 09:37:42,188 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local2056867727_0001_m_000002_0 decomp: 23 len: 68 to MEMORY +2017-02-18 09:37:42,194 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 23 bytes from map-output for attempt_local2056867727_0001_m_000002_0 +2017-02-18 09:37:42,195 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 23, inMemoryMapOutputs.size() -> 3, commitMemory -> 77, usedMemory ->100 +2017-02-18 09:37:42,203 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 09:37:42,204 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:37:42,204 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 09:37:42,206 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 09:37:42,206 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 82 bytes +2017-02-18 09:37:42,226 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 100 bytes to disk to satisfy reduce memory limit +2017-02-18 09:37:42,245 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 130 bytes from disk +2017-02-18 09:37:42,247 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 09:37:42,247 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 09:37:42,249 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 90 bytes +2017-02-18 09:37:42,266 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:37:42,307 INFO org.apache.hadoop.mapred.Task: Task:attempt_local2056867727_0001_r_000005_0 is done. And is in the process of committing +2017-02-18 09:37:42,313 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:37:42,313 INFO org.apache.hadoop.mapred.Task: Task attempt_local2056867727_0001_r_000005_0 is allowed to commit now +2017-02-18 09:37:42,314 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local2056867727_0001_r_000005_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iii/_temporary/0/task_local2056867727_0001_r_000005 +2017-02-18 09:37:42,326 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 09:37:42,349 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local2056867727_0001_r_000005_0' done. +2017-02-18 09:37:42,350 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local2056867727_0001_r_000005_0 +2017-02-18 09:37:42,350 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2056867727_0001_r_000006_0 +2017-02-18 09:37:42,358 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:37:42,358 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:37:42,359 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@3d144d62 +2017-02-18 09:37:42,368 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 09:37:42,387 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local2056867727_0001_r_000006_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 09:37:42,409 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local2056867727_0001_m_000000_0 decomp: 72 len: 119 to MEMORY +2017-02-18 09:37:42,415 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 72 bytes from map-output for attempt_local2056867727_0001_m_000000_0 +2017-02-18 09:37:42,418 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 72, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->72 +2017-02-18 09:37:42,422 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local2056867727_0001_m_000001_0 decomp: 28 len: 75 to MEMORY +2017-02-18 09:37:42,429 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 28 bytes from map-output for attempt_local2056867727_0001_m_000001_0 +2017-02-18 09:37:42,443 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 28, inMemoryMapOutputs.size() -> 2, commitMemory -> 72, usedMemory ->100 +2017-02-18 09:37:42,445 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local2056867727_0001_m_000002_0 decomp: 28 len: 74 to MEMORY +2017-02-18 09:37:42,460 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 28 bytes from map-output for attempt_local2056867727_0001_m_000002_0 +2017-02-18 09:37:42,466 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 28, inMemoryMapOutputs.size() -> 3, commitMemory -> 100, usedMemory ->128 +2017-02-18 09:37:42,467 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 09:37:42,468 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:37:42,468 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 09:37:42,483 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 60% +2017-02-18 09:37:42,532 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 09:37:42,533 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 110 bytes +2017-02-18 09:37:42,582 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 128 bytes to disk to satisfy reduce memory limit +2017-02-18 09:37:42,583 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 155 bytes from disk +2017-02-18 09:37:42,583 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 09:37:42,583 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 09:37:42,593 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 116 bytes +2017-02-18 09:37:42,603 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:37:42,656 INFO org.apache.hadoop.mapred.Task: Task:attempt_local2056867727_0001_r_000006_0 is done. And is in the process of committing +2017-02-18 09:37:42,658 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:37:42,658 INFO org.apache.hadoop.mapred.Task: Task attempt_local2056867727_0001_r_000006_0 is allowed to commit now +2017-02-18 09:37:42,659 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local2056867727_0001_r_000006_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iii/_temporary/0/task_local2056867727_0001_r_000006 +2017-02-18 09:37:42,673 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 09:37:42,673 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local2056867727_0001_r_000006_0' done. +2017-02-18 09:37:42,674 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local2056867727_0001_r_000006_0 +2017-02-18 09:37:42,681 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2056867727_0001_r_000007_0 +2017-02-18 09:37:42,691 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:37:42,692 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:37:42,693 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@a82b79d +2017-02-18 09:37:42,702 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 09:37:42,726 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local2056867727_0001_r_000007_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 09:37:42,740 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local2056867727_0001_m_000000_0 decomp: 88 len: 120 to MEMORY +2017-02-18 09:37:42,750 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 88 bytes from map-output for attempt_local2056867727_0001_m_000000_0 +2017-02-18 09:37:42,757 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 88, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->88 +2017-02-18 09:37:42,769 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local2056867727_0001_m_000001_0 decomp: 11 len: 55 to MEMORY +2017-02-18 09:37:42,771 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 11 bytes from map-output for attempt_local2056867727_0001_m_000001_0 +2017-02-18 09:37:42,795 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 11, inMemoryMapOutputs.size() -> 2, commitMemory -> 88, usedMemory ->99 +2017-02-18 09:37:42,797 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local2056867727_0001_m_000002_0 decomp: 2 len: 41 to MEMORY +2017-02-18 09:37:42,812 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local2056867727_0001_m_000002_0 +2017-02-18 09:37:42,813 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 3, commitMemory -> 99, usedMemory ->101 +2017-02-18 09:37:42,824 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 09:37:42,825 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:37:42,825 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 09:37:42,826 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 09:37:42,827 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 2 segments left of total size: 87 bytes +2017-02-18 09:37:42,858 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 101 bytes to disk to satisfy reduce memory limit +2017-02-18 09:37:42,877 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 140 bytes from disk +2017-02-18 09:37:42,877 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 09:37:42,878 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 09:37:42,879 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 90 bytes +2017-02-18 09:37:42,888 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:37:42,929 INFO org.apache.hadoop.mapred.Task: Task:attempt_local2056867727_0001_r_000007_0 is done. And is in the process of committing +2017-02-18 09:37:42,931 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:37:42,931 INFO org.apache.hadoop.mapred.Task: Task attempt_local2056867727_0001_r_000007_0 is allowed to commit now +2017-02-18 09:37:42,934 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local2056867727_0001_r_000007_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iii/_temporary/0/task_local2056867727_0001_r_000007 +2017-02-18 09:37:42,963 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 09:37:42,964 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local2056867727_0001_r_000007_0' done. +2017-02-18 09:37:42,964 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local2056867727_0001_r_000007_0 +2017-02-18 09:37:42,964 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2056867727_0001_r_000008_0 +2017-02-18 09:37:42,972 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:37:42,973 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:37:42,973 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@132393b4 +2017-02-18 09:37:42,983 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 09:37:42,996 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local2056867727_0001_r_000008_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 09:37:43,008 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local2056867727_0001_m_000000_0 decomp: 98 len: 134 to MEMORY +2017-02-18 09:37:43,014 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 98 bytes from map-output for attempt_local2056867727_0001_m_000000_0 +2017-02-18 09:37:43,024 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 98, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->98 +2017-02-18 09:37:43,028 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local2056867727_0001_m_000001_0 decomp: 49 len: 92 to MEMORY +2017-02-18 09:37:43,043 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 49 bytes from map-output for attempt_local2056867727_0001_m_000001_0 +2017-02-18 09:37:43,044 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 49, inMemoryMapOutputs.size() -> 2, commitMemory -> 98, usedMemory ->147 +2017-02-18 09:37:43,054 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local2056867727_0001_m_000002_0 decomp: 49 len: 97 to MEMORY +2017-02-18 09:37:43,067 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 49 bytes from map-output for attempt_local2056867727_0001_m_000002_0 +2017-02-18 09:37:43,073 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 49, inMemoryMapOutputs.size() -> 3, commitMemory -> 147, usedMemory ->196 +2017-02-18 09:37:43,082 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 09:37:43,083 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:37:43,083 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 09:37:43,086 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 09:37:43,086 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 184 bytes +2017-02-18 09:37:43,101 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 196 bytes to disk to satisfy reduce memory limit +2017-02-18 09:37:43,131 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 196 bytes from disk +2017-02-18 09:37:43,136 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 09:37:43,137 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 09:37:43,140 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 188 bytes +2017-02-18 09:37:43,141 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:37:43,191 INFO org.apache.hadoop.mapred.Task: Task:attempt_local2056867727_0001_r_000008_0 is done. And is in the process of committing +2017-02-18 09:37:43,196 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:37:43,196 INFO org.apache.hadoop.mapred.Task: Task attempt_local2056867727_0001_r_000008_0 is allowed to commit now +2017-02-18 09:37:43,197 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local2056867727_0001_r_000008_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iii/_temporary/0/task_local2056867727_0001_r_000008 +2017-02-18 09:37:43,205 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 09:37:43,207 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local2056867727_0001_r_000008_0' done. +2017-02-18 09:37:43,223 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local2056867727_0001_r_000008_0 +2017-02-18 09:37:43,223 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2056867727_0001_r_000009_0 +2017-02-18 09:37:43,232 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:37:43,233 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:37:43,233 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@54930708 +2017-02-18 09:37:43,256 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 09:37:43,270 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local2056867727_0001_r_000009_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 09:37:43,287 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#10 about to shuffle output of map attempt_local2056867727_0001_m_000000_0 decomp: 22 len: 69 to MEMORY +2017-02-18 09:37:43,310 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 22 bytes from map-output for attempt_local2056867727_0001_m_000000_0 +2017-02-18 09:37:43,310 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 22, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->22 +2017-02-18 09:37:43,329 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#10 about to shuffle output of map attempt_local2056867727_0001_m_000001_0 decomp: 13 len: 58 to MEMORY +2017-02-18 09:37:43,330 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 13 bytes from map-output for attempt_local2056867727_0001_m_000001_0 +2017-02-18 09:37:43,338 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 13, inMemoryMapOutputs.size() -> 2, commitMemory -> 22, usedMemory ->35 +2017-02-18 09:37:43,340 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#10 about to shuffle output of map attempt_local2056867727_0001_m_000002_0 decomp: 2 len: 41 to MEMORY +2017-02-18 09:37:43,364 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local2056867727_0001_m_000002_0 +2017-02-18 09:37:43,365 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 3, commitMemory -> 35, usedMemory ->37 +2017-02-18 09:37:43,368 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 09:37:43,370 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:37:43,370 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 09:37:43,371 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 09:37:43,372 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 2 segments left of total size: 22 bytes +2017-02-18 09:37:43,379 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 37 bytes to disk to satisfy reduce memory limit +2017-02-18 09:37:43,412 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 93 bytes from disk +2017-02-18 09:37:43,413 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 09:37:43,413 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 09:37:43,417 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 27 bytes +2017-02-18 09:37:43,433 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:37:43,468 INFO org.apache.hadoop.mapred.Task: Task:attempt_local2056867727_0001_r_000009_0 is done. And is in the process of committing +2017-02-18 09:37:43,471 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:37:43,471 INFO org.apache.hadoop.mapred.Task: Task attempt_local2056867727_0001_r_000009_0 is allowed to commit now +2017-02-18 09:37:43,473 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local2056867727_0001_r_000009_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iii/_temporary/0/task_local2056867727_0001_r_000009 +2017-02-18 09:37:43,484 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 09:37:43,487 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local2056867727_0001_r_000009_0' done. +2017-02-18 09:37:43,487 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local2056867727_0001_r_000009_0 +2017-02-18 09:37:43,488 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete. +2017-02-18 09:37:43,493 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 100% +2017-02-18 09:37:43,633 WARN org.apache.hadoop.mapred.LocalJobRunner: job_local2056867727_0001 +java.lang.NoClassDefFoundError: org/apache/http/client/methods/HttpUriRequest + at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:573) +Caused by: java.lang.ClassNotFoundException: org.apache.http.client.methods.HttpUriRequest + at java.net.URLClassLoader$1.run(URLClassLoader.java:366) + at java.net.URLClassLoader$1.run(URLClassLoader.java:355) + at java.security.AccessController.doPrivileged(Native Method) + at java.net.URLClassLoader.findClass(URLClassLoader.java:354) + at java.lang.ClassLoader.loadClass(ClassLoader.java:425) + at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:308) + at java.lang.ClassLoader.loadClass(ClassLoader.java:358) + ... 1 more +2017-02-18 09:37:44,501 INFO org.apache.hadoop.mapreduce.Job: Job job_local2056867727_0001 failed with state FAILED due to: NA +2017-02-18 09:37:44,719 INFO org.apache.hadoop.mapreduce.Job: Counters: 30 + File System Counters + FILE: Number of bytes read=324416757 + FILE: Number of bytes written=3669220 + FILE: Number of read operations=0 + FILE: Number of large read operations=0 + FILE: Number of write operations=0 + Map-Reduce Framework + Map input records=507535 + Map output records=4678719 + Map output bytes=43638689 + Map output materialized bytes=2645 + Input split bytes=351 + Combine input records=4678719 + Combine output records=131 + Reduce input groups=77 + Reduce shuffle bytes=2645 + Reduce input records=131 + Reduce output records=77 + Spilled Records=262 + Shuffled Maps =30 + Failed Shuffles=0 + Merged Map outputs=30 + GC time elapsed (ms)=709 + Total committed heap usage (bytes)=2551959552 + Shuffle Errors + BAD_ID=0 + CONNECTION=0 + IO_ERROR=0 + WRONG_LENGTH=0 + WRONG_MAP=0 + WRONG_REDUCE=0 + File Input Format Counters + Bytes Read=26057874 + File Output Format Counters + Bytes Written=862 diff --git a/Assign1/output_Q1.iii/._SUCCESS.crc b/Assign1/output_Q1.iii/._SUCCESS.crc new file mode 100644 index 0000000000000000000000000000000000000000..3b7b044936a890cd8d651d349a752d819d71d22c GIT binary patch literal 8 PcmYc;N@ieSU}69O2$TUk literal 0 HcmV?d00001 diff --git a/Assign1/output_Q1.iii/.part-r-00000.crc b/Assign1/output_Q1.iii/.part-r-00000.crc new file mode 100644 index 0000000000000000000000000000000000000000..4fedb91d1eb6ac2fb6a27375c9548e6c7033c6db GIT binary patch literal 12 TcmYc;N@ieSU}AW)Y~4u!6-NX_ literal 0 HcmV?d00001 diff --git a/Assign1/output_Q1.iii/.part-r-00001.crc b/Assign1/output_Q1.iii/.part-r-00001.crc new file mode 100644 index 0000000000000000000000000000000000000000..be4dc603d312ce0eb53a535bc046c408bb51ebdb GIT binary patch literal 12 TcmYc;N@ieSU}9)HdHFT~6m$eQ literal 0 HcmV?d00001 diff --git a/Assign1/output_Q1.iii/.part-r-00002.crc b/Assign1/output_Q1.iii/.part-r-00002.crc new file mode 100644 index 0000000000000000000000000000000000000000..b92426178bd5e1bb3aeea4697267bedc9d2ecdd1 GIT binary patch literal 12 TcmYc;N@ieSU}AWolJ^||6JG<Y literal 0 HcmV?d00001 diff --git a/Assign1/output_Q1.iii/.part-r-00003.crc b/Assign1/output_Q1.iii/.part-r-00003.crc new file mode 100644 index 0000000000000000000000000000000000000000..677833c1b40a26109bb334033b86d88dee26ddd0 GIT binary patch literal 12 TcmYc;N@ieSU}EU~Q1S`@6jlTl literal 0 HcmV?d00001 diff --git a/Assign1/output_Q1.iii/.part-r-00004.crc b/Assign1/output_Q1.iii/.part-r-00004.crc new file mode 100644 index 0000000000000000000000000000000000000000..7fc6a7656b02cdd23d2426d0596215efc87ef6ec GIT binary patch literal 12 TcmYc;N@ieSU}ErK?A`<b5NiU# literal 0 HcmV?d00001 diff --git a/Assign1/output_Q1.iii/.part-r-00005.crc b/Assign1/output_Q1.iii/.part-r-00005.crc new file mode 100644 index 0000000000000000000000000000000000000000..60a0bcaff7a38f6ce9977344859f49752ec1e668 GIT binary patch literal 12 TcmYc;N@ieSU}A96<5&Rz56S{t literal 0 HcmV?d00001 diff --git a/Assign1/output_Q1.iii/.part-r-00006.crc b/Assign1/output_Q1.iii/.part-r-00006.crc new file mode 100644 index 0000000000000000000000000000000000000000..91aba40925d99e4170b989e8b19998a00ee74770 GIT binary patch literal 12 TcmYc;N@ieSU}Cs*Q(^%C6e9zk literal 0 HcmV?d00001 diff --git a/Assign1/output_Q1.iii/.part-r-00007.crc b/Assign1/output_Q1.iii/.part-r-00007.crc new file mode 100644 index 0000000000000000000000000000000000000000..b02535967566691c72a1bac6a4e8cdb82cffed61 GIT binary patch literal 12 TcmYc;N@ieSU}7lb<gx<*53vFL literal 0 HcmV?d00001 diff --git a/Assign1/output_Q1.iii/.part-r-00008.crc b/Assign1/output_Q1.iii/.part-r-00008.crc new file mode 100644 index 0000000000000000000000000000000000000000..ee46171c9b8fb9345c192817f7e21ae0e388a033 GIT binary patch literal 12 TcmYc;N@ieSU}6Y-t`G(Q5)T6D literal 0 HcmV?d00001 diff --git a/Assign1/output_Q1.iii/.part-r-00009.crc b/Assign1/output_Q1.iii/.part-r-00009.crc new file mode 100644 index 0000000000000000000000000000000000000000..f01ba08157dd0322cd847b04ecbc8433ff23a714 GIT binary patch literal 12 TcmYc;N@ieSU}D%M*ZCI!643*h literal 0 HcmV?d00001 diff --git a/Assign1/output_Q1.iii/_SUCCESS b/Assign1/output_Q1.iii/_SUCCESS new file mode 100644 index 0000000..e69de29 diff --git a/Assign1/output_Q1.iii/part-r-00000 b/Assign1/output_Q1.iii/part-r-00000 new file mode 100644 index 0000000..4de60e8 --- /dev/null +++ b/Assign1/output_Q1.iii/part-r-00000 @@ -0,0 +1,6 @@ +about 6121 +be 27239 +by 15659 +her 21272 +up 7138 +you 35121 diff --git a/Assign1/output_Q1.iii/part-r-00001 b/Assign1/output_Q1.iii/part-r-00001 new file mode 100644 index 0000000..b139bbd --- /dev/null +++ b/Assign1/output_Q1.iii/part-r-00001 @@ -0,0 +1,15 @@ + 202317 +been 6037 +down 4100 +got 4047 +have 23991 +is 37433 +me 11514 +my 22712 +she 19935 +their 6428 +them 5581 +this 18019 +upon 4160 +we 13668 +will 11037 diff --git a/Assign1/output_Q1.iii/part-r-00002 b/Assign1/output_Q1.iii/part-r-00002 new file mode 100644 index 0000000..e1d13e3 --- /dev/null +++ b/Assign1/output_Q1.iii/part-r-00002 @@ -0,0 +1,12 @@ +but 30329 +do 5427 +had 24180 +it 50587 +like 4217 +no 7952 +on 12904 +the 208421 +then 6525 +thy 4028 +was 49624 +which 7409 diff --git a/Assign1/output_Q1.iii/part-r-00003 b/Assign1/output_Q1.iii/part-r-00003 new file mode 100644 index 0000000..34e98f3 --- /dev/null +++ b/Assign1/output_Q1.iii/part-r-00003 @@ -0,0 +1,6 @@ +are 7566 +as 29531 +into 4263 +one 9926 +our 4088 +they 14089 diff --git a/Assign1/output_Q1.iii/part-r-00004 b/Assign1/output_Q1.iii/part-r-00004 new file mode 100644 index 0000000..35e78f9 --- /dev/null +++ b/Assign1/output_Q1.iii/part-r-00004 @@ -0,0 +1,5 @@ +all 11971 +at 18706 +of 119701 +that 54938 +your 11160 diff --git a/Assign1/output_Q1.iii/part-r-00005 b/Assign1/output_Q1.iii/part-r-00005 new file mode 100644 index 0000000..cb6a433 --- /dev/null +++ b/Assign1/output_Q1.iii/part-r-00005 @@ -0,0 +1,5 @@ +his 34380 +out 7891 +when 8507 +who 4428 +with 34665 diff --git a/Assign1/output_Q1.iii/part-r-00006 b/Assign1/output_Q1.iii/part-r-00006 new file mode 100644 index 0000000..3859676 --- /dev/null +++ b/Assign1/output_Q1.iii/part-r-00006 @@ -0,0 +1,7 @@ +could 6614 +he 44747 +i 72836 +in 69179 +or 8610 +were 9682 +would 9567 diff --git a/Assign1/output_Q1.iii/part-r-00007 b/Assign1/output_Q1.iii/part-r-00007 new file mode 100644 index 0000000..8e7777a --- /dev/null +++ b/Assign1/output_Q1.iii/part-r-00007 @@ -0,0 +1,8 @@ +from 9433 +has 5077 +its 4523 +other 4002 +so 13588 +some 4462 +there 8909 +what 5911 diff --git a/Assign1/output_Q1.iii/part-r-00008 b/Assign1/output_Q1.iii/part-r-00008 new file mode 100644 index 0000000..3dffa45 --- /dev/null +++ b/Assign1/output_Q1.iii/part-r-00008 @@ -0,0 +1,10 @@ +a 99209 +an 8104 +and 167100 +for 36130 +if 7613 +it. 4557 +not 32386 +said 5039 +see 4020 +to 114272 diff --git a/Assign1/output_Q1.iii/part-r-00009 b/Assign1/output_Q1.iii/part-r-00009 new file mode 100644 index 0000000..2c6f5ad --- /dev/null +++ b/Assign1/output_Q1.iii/part-r-00009 @@ -0,0 +1,3 @@ +any 5067 +him 8425 +thou 5138 diff --git a/Assign1/output_Q1.iii/stopwords.csv b/Assign1/output_Q1.iii/stopwords.csv new file mode 100644 index 0000000..cc6b854 --- /dev/null +++ b/Assign1/output_Q1.iii/stopwords.csv @@ -0,0 +1,76 @@ +about +be +by +her +up +you +been +down +got +have +is +me +my +she +their +them +this +upon +we +will +but +do +had +it +like +no +on +the +then +thy +was +which +are +as +into +one +our +they +all +at +of +that +your +his +out +when +who +with +could +he +i +in +or +were +would +from +has +its +other +so +some +there +what +a +an +and +for +if +it. +not +said +see +to +any +him +thou diff --git a/Assign1/output_Q1.iii/stopwords.csv~ b/Assign1/output_Q1.iii/stopwords.csv~ new file mode 100644 index 0000000..9e9588f --- /dev/null +++ b/Assign1/output_Q1.iii/stopwords.csv~ @@ -0,0 +1,77 @@ +about +be +by +her +up +you + +been +down +got +have +is +me +my +she +their +them +this +upon +we +will +but +do +had +it +like +no +on +the +then +thy +was +which +are +as +into +one +our +they +all +at +of +that +your +his +out +when +who +with +could +he +i +in +or +were +would +from +has +its +other +so +some +there +what +a +an +and +for +if +it. +not +said +see +to +any +him +thou diff --git a/Assign1/src/Question1/Stopword_iii.java b/Assign1/src/Question1/Stopword_iii.java new file mode 100644 index 0000000..fb7c0ad --- /dev/null +++ b/Assign1/src/Question1/Stopword_iii.java @@ -0,0 +1,92 @@ +package Question1; + +import java.io.IOException; +import java.util.Arrays; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.conf.Configured; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.io.IntWritable; +import org.apache.hadoop.io.LongWritable; +import org.apache.hadoop.io.Text; +import org.apache.hadoop.io.compress.BZip2Codec; +import org.apache.hadoop.io.compress.CompressionCodec; +import org.apache.hadoop.mapreduce.Job; +import org.apache.hadoop.mapreduce.Mapper; +import org.apache.hadoop.mapreduce.Reducer; +import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; +import org.apache.hadoop.mapreduce.lib.input.TextInputFormat; +import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; +import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat; +import org.apache.hadoop.util.Tool; +import org.apache.hadoop.util.ToolRunner; + +public class Stopword_iii extends Configured implements Tool { + public static void main(String[] args) throws Exception { + System.out.println(Arrays.toString(args)); + int res = ToolRunner.run(new Configuration(), new Stopword_iii(), args); + + System.exit(res); + } + + @Override + public int run(String[] args) throws Exception { + System.out.println(Arrays.toString(args)); + Job job = new Job(getConf(), "Stopword_iii"); + job.setJarByClass(Stopword_iii.class); + job.setOutputKeyClass(Text.class); + job.setOutputValueClass(IntWritable.class); + + job.setMapperClass(Map.class); + job.setReducerClass(Reduce.class); + job.setCombinerClass(Reduce.class); + + job.getConfiguration().setBoolean("mapred.compress.map.output", + true); + job.getConfiguration().setClass("mapred.map.output.compression.codec", + BZip2Codec.class, CompressionCodec.class); + + job.setInputFormatClass(TextInputFormat.class); + job.setOutputFormatClass(TextOutputFormat.class); + + job.setNumReduceTasks(10); + + + FileInputFormat.addInputPath(job, new Path(args[0])); + FileInputFormat.addInputPath(job, new Path(args[1])); + FileInputFormat.addInputPath(job, new Path(args[2])); + FileOutputFormat.setOutputPath(job, new Path(args[3])); + + + job.waitForCompletion(true); + + return 0; + } + + public static class Map extends Mapper<LongWritable, Text, Text, IntWritable> { + private final static IntWritable ONE = new IntWritable(1); + private Text word = new Text(); + + @Override + public void map(LongWritable key, Text value, Context context) + throws IOException, InterruptedException { + for (String token: value.toString().split("\\s+")) { + word.set(token.toLowerCase()); + context.write(word, ONE); + } + } + } + + public static class Reduce extends Reducer<Text, IntWritable, Text, IntWritable> { + @Override + public void reduce(Text key, Iterable<IntWritable> values, Context context) + throws IOException, InterruptedException { + int sum = 0; + for (IntWritable val : values) { + sum += val.get(); + } + if (sum > 4000) { + context.write(key, new IntWritable(sum)); + } + } + } +} -- GitLab